From 18b3585a4e1a69284817a9d1139742fff0a5a14a Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Mon, 5 Sep 2022 14:02:16 -0400 Subject: [PATCH] proc_macro/bridge: Eagerly pass TokenStream contents into the client This change could improve performance, especially in cases like with the cross-thread backend which have expensive RPC. It handles this by moving more of the logic for TokenStream into the client, storing a `Rc>` directly, rather than converting to/from the compiler representation over RPC for every change. As this eagerly decomposes interpolated AST fragments, this ended up changing the output of `TokenStream::to_string()`. To keep up compatibility for now, formatting is still performed using RPC and `rustc_ast_pretty`, however in the future we may look into decoupling proc-macro formatting from `rustc_ast_pretty`, and doing it in the client. One side-effect of this was that the workaround for the procedural-masquerade crate had to be updated to work with the new approach, as we couldn't depend on the AST fragment printer to format the input. To do this, a real `,` token is now inserted when passing the enum to a proc-macro rather than depending on the pretty-printing behaviour. From local tests it appears that the new output should be compatible enough to work with older versions of procedural-masquerade. The new conversion between the proc-macro and compiler representations of a `TokenStream` is now handled by the `RpcContext` type provided by the proc-macro server. This type is a distinct value which will be fetched and stored by the server before commands are run, such that the decoders can have access to relevant state. In the future, we may also change the symbol handling code to work off of `RpcContext` rather than being static methods on the `Server` trait. --- compiler/rustc_expand/src/base.rs | 68 ++-- compiler/rustc_expand/src/proc_macro.rs | 19 +- .../rustc_expand/src/proc_macro_server.rs | 299 ++++++++--------- library/proc_macro/src/bridge/client.rs | 17 +- library/proc_macro/src/bridge/mod.rs | 25 +- library/proc_macro/src/bridge/server.rs | 66 +++- library/proc_macro/src/bridge/token_stream.rs | 71 ++++ library/proc_macro/src/lib.rs | 159 ++------- .../intra-doc/through-proc-macro.stderr | 9 +- .../capture-macro-rules-invoke.stdout | 4 +- .../proc-macro/capture-unglued-token.stdout | 3 +- .../proc-macro/doc-comment-preserved.stdout | 10 +- .../expr-stmt-nonterminal-tokens.stdout | 15 +- .../proc-macro/inert-attribute-order.stdout | 5 +- .../issue-73933-procedural-masquerade-full.rs | 14 + ...ue-73933-procedural-masquerade-full.stderr | 25 ++ ...ue-73933-procedural-masquerade-full.stdout | 118 +++++++ .../issue-73933-procedural-masquerade.rs | 6 - .../issue-73933-procedural-masquerade.stderr | 68 +--- .../issue-73933-procedural-masquerade.stdout | 10 +- .../issue-78675-captured-inner-attrs.stdout | 5 +- .../proc-macro/nonterminal-expansion.stdout | 3 +- .../nonterminal-token-hygiene.stdout | 3 +- .../src/abis/abi_sysroot/ra_server.rs | 306 ++++++++---------- 24 files changed, 678 insertions(+), 650 deletions(-) create mode 100644 library/proc_macro/src/bridge/token_stream.rs create mode 100644 src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.rs create mode 100644 src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stderr create mode 100644 src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stdout diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 2bb522caa2d41..32ad54e67197e 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -3,8 +3,8 @@ use crate::module::DirOwnership; use rustc_ast::attr::MarkedAttrs; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Nonterminal}; -use rustc_ast::tokenstream::TokenStream; +use rustc_ast::token; +use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind}; use rustc_attr::{self as attr, Deprecation, Stability}; @@ -1407,7 +1407,16 @@ pub fn parse_macro_name_and_helper_attrs( /// asserts in old versions of those crates and their wide use in the ecosystem. /// See issue #73345 for more details. /// FIXME(#73933): Remove this eventually. -fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool { +fn pretty_printing_compatibility_hack(ann: &Annotatable, sess: &ParseSess) -> bool { + let item = match ann { + Annotatable::Item(item) => item, + Annotatable::Stmt(stmt) => match &stmt.kind { + ast::StmtKind::Item(item) => item, + _ => return false, + }, + _ => return false, + }; + let name = item.ident.name; if name == sym::ProceduralMasqueradeDummyType { if let ast::ItemKind::Enum(enum_def, _) = &item.kind { @@ -1430,26 +1439,35 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool { false } -pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &ParseSess) -> bool { - let item = match ann { - Annotatable::Item(item) => item, - Annotatable::Stmt(stmt) => match &stmt.kind { - ast::StmtKind::Item(item) => item, - _ => return false, - }, - _ => return false, - }; - pretty_printing_compatibility_hack(item, sess) -} - -pub(crate) fn nt_pretty_printing_compatibility_hack(nt: &Nonterminal, sess: &ParseSess) -> bool { - let item = match nt { - Nonterminal::NtItem(item) => item, - Nonterminal::NtStmt(stmt) => match &stmt.kind { - ast::StmtKind::Item(item) => item, - _ => return false, - }, - _ => return false, - }; - pretty_printing_compatibility_hack(item, sess) +/// Helper to get the TokenStream for a Annotatable, taking the +/// `pretty_printing_compatibility_hack` into account. +/// +/// This will inject an artificial comma token into the token stream for the +/// `ProceduralMasqueradeDummyType` enum, to make the tokens presented to the +/// macro match those expected by the old `pprust` output. +pub(crate) fn get_ann_tokenstream_with_pretty_printing_compatibility_hack( + ann: &Annotatable, + sess: &ParseSess, +) -> TokenStream { + let stream = ann.to_tokens(); + // FIXME: It needs to be removed, but there are some compatibility issues + // (see #73345). + if pretty_printing_compatibility_hack(ann, sess) { + let mut tts: Vec<_> = stream.into_trees().collect(); + if let Some(TokenTree::Delimited(span, _, inner)) = tts.last_mut() { + if !matches!( + inner.trees().last(), + Some(TokenTree::Token(token::Token { kind: token::Comma, .. }, ..)) + ) { + *inner = inner + .trees() + .cloned() + .chain([TokenTree::token_alone(token::Comma, span.close)]) + .collect(); + } + } + tts.into_iter().collect() + } else { + stream + } } diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs index 1a2ab9d190ebd..a13b4cf66846c 100644 --- a/compiler/rustc_expand/src/proc_macro.rs +++ b/compiler/rustc_expand/src/proc_macro.rs @@ -3,14 +3,12 @@ use crate::proc_macro_server; use rustc_ast as ast; use rustc_ast::ptr::P; -use rustc_ast::token; use rustc_ast::tokenstream::TokenStream; -use rustc_data_structures::sync::Lrc; use rustc_errors::ErrorGuaranteed; use rustc_parse::parser::ForceCollect; use rustc_session::config::ProcMacroExecutionStrategy; use rustc_span::profiling::SpannedEventArgRecorder; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::Span; struct CrossbeamMessagePipe { tx: crossbeam_channel::Sender, @@ -116,17 +114,10 @@ impl MultiItemModifier for DeriveProcMacro { // We need special handling for statement items // (e.g. `fn foo() { #[derive(Debug)] struct Bar; }`) let is_stmt = matches!(item, Annotatable::Stmt(..)); - let hack = crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess.parse_sess); - let input = if hack { - let nt = match item { - Annotatable::Item(item) => token::NtItem(item), - Annotatable::Stmt(stmt) => token::NtStmt(stmt), - _ => unreachable!(), - }; - TokenStream::token_alone(token::Interpolated(Lrc::new(nt)), DUMMY_SP) - } else { - item.to_tokens() - }; + let input = crate::base::get_ann_tokenstream_with_pretty_printing_compatibility_hack( + &item, + &ecx.sess.parse_sess, + ); let stream = { let _timer = diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 59a7b668a83ce..fa67c5a7986aa 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -83,8 +83,27 @@ impl ToInternal for LitKind { } } -impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec> { - fn from_internal((stream, rustc): (TokenStream, &mut Rustc<'_, '_>)) -> Self { +impl ToInternal for Level { + fn to_internal(self) -> rustc_errors::Level { + match self { + Level::Error => rustc_errors::Level::Error { lint: false }, + Level::Warning => rustc_errors::Level::Warning(None), + Level::Note => rustc_errors::Level::Note, + Level::Help => rustc_errors::Level::Help, + _ => unreachable!("unknown proc_macro::Level variant: {:?}", self), + } + } +} + +pub(crate) struct RpcContext<'a> { + sess: &'a ParseSess, +} + +impl<'a> server::RpcContext> for RpcContext<'a> { + fn tts_from_tokenstream( + &mut self, + stream: TokenStream, + ) -> Vec> { use rustc_ast::token::*; // Estimate the capacity as `stream.len()` rounded up to the next power @@ -98,7 +117,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec)> for Vec)> for Vec { - let stream = TokenStream::from_nonterminal_ast(&nt); - // A hack used to pass AST fragments to attribute and derive - // macros as a single nonterminal token instead of a token - // stream. Such token needs to be "unwrapped" and not - // represented as a delimited group. - // FIXME: It needs to be removed, but there are some - // compatibility issues (see #73345). - if crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.sess()) { - trees.extend(Self::from_internal((stream, rustc))); - } else { - trees.push(TokenTree::Group(Group { - delimiter: pm::Delimiter::None, - stream: Some(stream), - span: DelimSpan::from_single(span), - })) - } + trees.push(TokenTree::Group(Group { + delimiter: pm::Delimiter::None, + stream: TokenStream::from_nonterminal_ast(&nt), + span: DelimSpan::from_single(span), + })) } OpenDelim(..) | CloseDelim(..) => unreachable!(), @@ -235,103 +243,105 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec for (TokenTree, &mut Rustc<'_, '_>) { - fn to_internal(self) -> TokenStream { + fn tokenstream_from_tts( + &mut self, + trees: impl Iterator>, + ) -> TokenStream { use rustc_ast::token::*; - let (tree, rustc) = self; - let (ch, joint, span) = match tree { - TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span), - TokenTree::Group(Group { delimiter, stream, span: DelimSpan { open, close, .. } }) => { - return tokenstream::TokenStream::delimited( - tokenstream::DelimSpan { open, close }, - delimiter.to_internal(), - stream.unwrap_or_default(), - ); - } - TokenTree::Ident(self::Ident { sym, is_raw, span }) => { - rustc.sess().symbol_gallery.insert(sym, span); - return tokenstream::TokenStream::token_alone(Ident(sym, is_raw), span); - } - TokenTree::Literal(self::Literal { - kind: self::LitKind::Integer, - symbol, - suffix, - span, - }) if symbol.as_str().starts_with('-') => { - let minus = BinOp(BinOpToken::Minus); - let symbol = Symbol::intern(&symbol.as_str()[1..]); - let integer = TokenKind::lit(token::Integer, symbol, suffix); - let a = tokenstream::TokenTree::token_alone(minus, span); - let b = tokenstream::TokenTree::token_alone(integer, span); - return [a, b].into_iter().collect(); - } - TokenTree::Literal(self::Literal { - kind: self::LitKind::Float, - symbol, - suffix, - span, - }) if symbol.as_str().starts_with('-') => { - let minus = BinOp(BinOpToken::Minus); - let symbol = Symbol::intern(&symbol.as_str()[1..]); - let float = TokenKind::lit(token::Float, symbol, suffix); - let a = tokenstream::TokenTree::token_alone(minus, span); - let b = tokenstream::TokenTree::token_alone(float, span); - return [a, b].into_iter().collect(); - } - TokenTree::Literal(self::Literal { kind, symbol, suffix, span }) => { - return tokenstream::TokenStream::token_alone( - TokenKind::lit(kind.to_internal(), symbol, suffix), + let mut builder = tokenstream::TokenStreamBuilder::new(); + for tree in trees { + let (ch, joint, span) = match tree { + TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span), + TokenTree::Group(Group { + delimiter, + stream, + span: DelimSpan { open, close, .. }, + }) => { + builder.push(tokenstream::TokenStream::delimited( + tokenstream::DelimSpan { open, close }, + delimiter.to_internal(), + stream, + )); + continue; + } + TokenTree::Ident(self::Ident { sym, is_raw, span }) => { + self.sess.symbol_gallery.insert(sym, span); + builder.push(tokenstream::TokenStream::token_alone(Ident(sym, is_raw), span)); + continue; + } + TokenTree::Literal(self::Literal { + kind: self::LitKind::Integer, + symbol, + suffix, span, - ); - } - }; + }) if symbol.as_str().starts_with('-') => { + let minus = BinOp(BinOpToken::Minus); + let symbol = Symbol::intern(&symbol.as_str()[1..]); + let integer = TokenKind::lit(token::Integer, symbol, suffix); + let a = tokenstream::TokenTree::token_alone(minus, span); + let b = tokenstream::TokenTree::token_alone(integer, span); + builder.push([a, b].into_iter().collect()); + continue; + } + TokenTree::Literal(self::Literal { + kind: self::LitKind::Float, + symbol, + suffix, + span, + }) if symbol.as_str().starts_with('-') => { + let minus = BinOp(BinOpToken::Minus); + let symbol = Symbol::intern(&symbol.as_str()[1..]); + let float = TokenKind::lit(token::Float, symbol, suffix); + let a = tokenstream::TokenTree::token_alone(minus, span); + let b = tokenstream::TokenTree::token_alone(float, span); + builder.push([a, b].into_iter().collect()); + continue; + } + TokenTree::Literal(self::Literal { kind, symbol, suffix, span }) => { + builder.push(tokenstream::TokenStream::token_alone( + TokenKind::lit(kind.to_internal(), symbol, suffix), + span, + )); + continue; + } + }; - let kind = match ch { - b'=' => Eq, - b'<' => Lt, - b'>' => Gt, - b'!' => Not, - b'~' => Tilde, - b'+' => BinOp(Plus), - b'-' => BinOp(Minus), - b'*' => BinOp(Star), - b'/' => BinOp(Slash), - b'%' => BinOp(Percent), - b'^' => BinOp(Caret), - b'&' => BinOp(And), - b'|' => BinOp(Or), - b'@' => At, - b'.' => Dot, - b',' => Comma, - b';' => Semi, - b':' => Colon, - b'#' => Pound, - b'$' => Dollar, - b'?' => Question, - b'\'' => SingleQuote, - _ => unreachable!(), - }; + let kind = match ch { + b'=' => Eq, + b'<' => Lt, + b'>' => Gt, + b'!' => Not, + b'~' => Tilde, + b'+' => BinOp(Plus), + b'-' => BinOp(Minus), + b'*' => BinOp(Star), + b'/' => BinOp(Slash), + b'%' => BinOp(Percent), + b'^' => BinOp(Caret), + b'&' => BinOp(And), + b'|' => BinOp(Or), + b'@' => At, + b'.' => Dot, + b',' => Comma, + b';' => Semi, + b':' => Colon, + b'#' => Pound, + b'$' => Dollar, + b'?' => Question, + b'\'' => SingleQuote, + _ => unreachable!(), + }; - if joint { - tokenstream::TokenStream::token_joint(kind, span) - } else { - tokenstream::TokenStream::token_alone(kind, span) + builder.push(if joint { + tokenstream::TokenStream::token_joint(kind, span) + } else { + tokenstream::TokenStream::token_alone(kind, span) + }); } - } -} -impl ToInternal for Level { - fn to_internal(self) -> rustc_errors::Level { - match self { - Level::Error => rustc_errors::Level::Error { lint: false }, - Level::Warning => rustc_errors::Level::Warning(None), - Level::Note => rustc_errors::Level::Note, - Level::Help => rustc_errors::Level::Help, - _ => unreachable!("unknown proc_macro::Level variant: {:?}", self), - } + builder.build() } } @@ -359,17 +369,19 @@ impl<'a, 'b> Rustc<'a, 'b> { } } - fn sess(&self) -> &ParseSess { + fn sess(&self) -> &'a ParseSess { self.ecx.parse_sess() } } -impl server::Types for Rustc<'_, '_> { +impl<'a> server::Types for Rustc<'a, '_> { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; type SourceFile = Lrc; type Span = Span; type Symbol = Symbol; + + type RpcContext = RpcContext<'a>; } impl server::FreeFunctions for Rustc<'_, '_> { @@ -452,10 +464,6 @@ impl server::FreeFunctions for Rustc<'_, '_> { } impl server::TokenStream for Rustc<'_, '_> { - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } - fn from_str(&mut self, src: &str) -> Self::TokenStream { parse_stream_from_source_str( FileName::proc_macro_source_code(src), @@ -465,18 +473,15 @@ impl server::TokenStream for Rustc<'_, '_> { ) } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { - pprust::tts_to_string(stream) + fn to_string(&mut self, stream: Self::TokenStream) -> String { + pprust::tts_to_string(&stream) } - fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result { + fn expand_expr(&mut self, stream: Self::TokenStream) -> Result { // Parse the expression from our tokenstream. let expr: PResult<'_, _> = try { - let mut p = rustc_parse::stream_to_parser( - self.sess(), - stream.clone(), - Some("proc_macro expand expr"), - ); + let mut p = + rustc_parse::stream_to_parser(self.sess(), stream, Some("proc_macro expand expr")); let expr = p.parse_expr()?; if p.token != token::Eof { p.unexpected()?; @@ -528,50 +533,6 @@ impl server::TokenStream for Rustc<'_, '_> { _ => Err(()), } } - - fn from_token_tree( - &mut self, - tree: TokenTree, - ) -> Self::TokenStream { - (tree, &mut *self).to_internal() - } - - fn concat_trees( - &mut self, - base: Option, - trees: Vec>, - ) -> Self::TokenStream { - let mut builder = tokenstream::TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for tree in trees { - builder.push((tree, &mut *self).to_internal()); - } - builder.build() - } - - fn concat_streams( - &mut self, - base: Option, - streams: Vec, - ) -> Self::TokenStream { - let mut builder = tokenstream::TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for stream in streams { - builder.push(stream); - } - builder.build() - } - - fn into_trees( - &mut self, - stream: Self::TokenStream, - ) -> Vec> { - FromInternal::from_internal((stream, self)) - } } impl server::SourceFile for Rustc<'_, '_> { @@ -743,6 +704,10 @@ impl server::Server for Rustc<'_, '_> { } } + fn rpc_context(&mut self) -> Self::RpcContext { + RpcContext { sess: self.sess() } + } + fn intern_symbol(string: &str) -> Self::Symbol { Symbol::intern(string) } diff --git a/library/proc_macro/src/bridge/client.rs b/library/proc_macro/src/bridge/client.rs index 4461b21802adb..2ae449cbd1890 100644 --- a/library/proc_macro/src/bridge/client.rs +++ b/library/proc_macro/src/bridge/client.rs @@ -31,13 +31,15 @@ macro_rules! define_handles { // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`. #[allow(non_snake_case)] pub(super) struct HandleStore { + pub(super) rpc_context: S::RpcContext, $($oty: handle::OwnedStore,)* $($ity: handle::InternedStore,)* } impl HandleStore { - pub(super) fn new(handle_counters: &'static HandleCounters) -> Self { + pub(super) fn new(handle_counters: &'static HandleCounters, rpc_context: S::RpcContext) -> Self { HandleStore { + rpc_context, $($oty: handle::OwnedStore::new(&handle_counters.$oty),)* $($ity: handle::InternedStore::new(&handle_counters.$ity),)* } @@ -174,7 +176,6 @@ macro_rules! define_handles { define_handles! { 'owned: FreeFunctions, - TokenStream, SourceFile, 'interned: @@ -187,12 +188,6 @@ define_handles! { // Alternatively, special "modes" could be listed of types in with_api // instead of pattern matching on methods, here and in server decl. -impl Clone for TokenStream { - fn clone(&self) -> Self { - self.clone() - } -} - impl Clone for SourceFile { fn clone(&self) -> Self { self.clone() @@ -219,6 +214,8 @@ impl fmt::Debug for Span { } } +pub(crate) use super::token_stream::TokenStream; + pub(crate) use super::symbol::Symbol; macro_rules! define_client_side { @@ -432,7 +429,7 @@ impl Client { Client { get_handle_counters: HandleCounters::get, run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { - run_client(bridge, |input| f(crate::TokenStream(Some(input))).0) + run_client(bridge, |input| f(crate::TokenStream(input)).0) }), _marker: PhantomData, } @@ -447,7 +444,7 @@ impl Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> { get_handle_counters: HandleCounters::get, run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { run_client(bridge, |(input, input2)| { - f(crate::TokenStream(Some(input)), crate::TokenStream(Some(input2))).0 + f(crate::TokenStream(input), crate::TokenStream(input2)).0 }) }), _marker: PhantomData, diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index 4c1e196b5ad16..c24b7ebad636c 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -60,26 +60,9 @@ macro_rules! with_api { fn emit_diagnostic(diagnostic: Diagnostic<$S::Span>); }, TokenStream { - fn drop($self: $S::TokenStream); - fn clone($self: &$S::TokenStream) -> $S::TokenStream; - fn is_empty($self: &$S::TokenStream) -> bool; - fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>; + fn expand_expr($self: $S::TokenStream) -> Result<$S::TokenStream, ()>; fn from_str(src: &str) -> $S::TokenStream; - fn to_string($self: &$S::TokenStream) -> String; - fn from_token_tree( - tree: TokenTree<$S::TokenStream, $S::Span, $S::Symbol>, - ) -> $S::TokenStream; - fn concat_trees( - base: Option<$S::TokenStream>, - trees: Vec>, - ) -> $S::TokenStream; - fn concat_streams( - base: Option<$S::TokenStream>, - streams: Vec<$S::TokenStream>, - ) -> $S::TokenStream; - fn into_trees( - $self: $S::TokenStream - ) -> Vec>; + fn to_string($self: $S::TokenStream) -> String; }, SourceFile { fn drop($self: $S::SourceFile); @@ -154,6 +137,8 @@ mod selfless_reify; pub mod server; #[allow(unsafe_code)] mod symbol; +#[forbid(unsafe_code)] +mod token_stream; use buffer::Buffer; pub use rpc::PanicMessage; @@ -444,7 +429,7 @@ compound_traits!(struct DelimSpan { open, close, entire }); #[derive(Clone)] pub struct Group { pub delimiter: Delimiter, - pub stream: Option, + pub stream: TokenStream, pub span: DelimSpan, } diff --git a/library/proc_macro/src/bridge/server.rs b/library/proc_macro/src/bridge/server.rs index 8202c40d63170..96b737e348388 100644 --- a/library/proc_macro/src/bridge/server.rs +++ b/library/proc_macro/src/bridge/server.rs @@ -14,6 +14,23 @@ pub trait Types { type SourceFile: 'static + Clone; type Span: 'static + Copy + Eq + Hash; type Symbol: 'static; + + type RpcContext: RpcContext; +} + +pub trait RpcContext { + /// Convert a Self::TokenStream into a sequence of `TokenTree`s, + /// calling the callback with each `TokenTree` in order. + fn tts_from_tokenstream( + &mut self, + stream: S::TokenStream, + ) -> Vec>; + + /// Convert a sequence of `TokenTree`s into a `Self::TokenStream`. + fn tokenstream_from_tts( + &mut self, + trees: impl Iterator>, + ) -> S::TokenStream; } /// Declare an associated fn of one of the traits below, adding necessary @@ -39,6 +56,10 @@ macro_rules! declare_server_traits { pub trait Server: Types $(+ $name)* { fn globals(&mut self) -> ExpnGlobals; + /// Get a helper type which can be used to serialize/deserialize + /// types such as TokenStream. + fn rpc_context(&mut self) -> Self::RpcContext; + /// Intern a symbol received from RPC fn intern_symbol(ident: &str) -> Self::Symbol; @@ -61,6 +82,9 @@ impl Server for MarkedTypes { fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { S::with_symbol_string(symbol.unmark(), f) } + fn rpc_context(&mut self) -> Self::RpcContext { + MarkedRpcContext(self.0.rpc_context()) + } } macro_rules! define_mark_types_impls { @@ -69,6 +93,8 @@ macro_rules! define_mark_types_impls { }),* $(,)?) => { impl Types for MarkedTypes { $(type $name = Marked;)* + + type RpcContext = MarkedRpcContext; } $(impl $name for MarkedTypes { @@ -80,6 +106,35 @@ macro_rules! define_mark_types_impls { } with_api!(Self, self_, define_mark_types_impls); +pub(super) struct MarkedRpcContext(R); + +impl> RpcContext> for MarkedRpcContext { + fn tts_from_tokenstream( + &mut self, + stream: as Types>::TokenStream, + ) -> Vec< + TokenTree< + as Types>::TokenStream, + as Types>::Span, + as Types>::Symbol, + >, + > { + self.0.tts_from_tokenstream(stream.unmark()).into_iter().map(|tt| <_>::mark(tt)).collect() + } + fn tokenstream_from_tts( + &mut self, + trees: impl Iterator< + Item = TokenTree< + as Types>::TokenStream, + as Types>::Span, + as Types>::Symbol, + >, + >, + ) -> as Types>::TokenStream { + <_>::mark(self.0.tokenstream_from_tts(trees.map(|tt| tt.unmark()))) + } +} + struct Dispatcher { handle_store: HandleStore, server: S, @@ -304,8 +359,11 @@ fn run_server< run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer, force_show_panics: bool, ) -> Result { - let mut dispatcher = - Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) }; + let mut server = MarkedTypes(server); + let mut dispatcher = Dispatcher { + handle_store: HandleStore::new(handle_counters, server.rpc_context()), + server, + }; let globals = dispatcher.server.globals(); @@ -338,7 +396,7 @@ impl client::Client { run, force_show_panics, ) - .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) + .map(|s| < as Types>::TokenStream>::unmark(s)) } } @@ -367,6 +425,6 @@ impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream run, force_show_panics, ) - .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) + .map(|s| < as Types>::TokenStream>::unmark(s)) } } diff --git a/library/proc_macro/src/bridge/token_stream.rs b/library/proc_macro/src/bridge/token_stream.rs new file mode 100644 index 0000000000000..500de61c58bd2 --- /dev/null +++ b/library/proc_macro/src/bridge/token_stream.rs @@ -0,0 +1,71 @@ +use super::server::RpcContext; +use super::*; + +use std::rc::Rc; + +#[derive(Clone)] +pub(crate) struct TokenStream { + pub(crate) tokens: Rc>, +} + +impl TokenStream { + pub(crate) fn new(tokens: Vec) -> Self { + TokenStream { tokens: Rc::new(tokens) } + } + + pub(crate) fn is_empty(&self) -> bool { + self.tokens.is_empty() + } +} + +impl Encode for TokenStream { + fn encode(self, w: &mut Writer, s: &mut S) { + let tts: Vec<_> = self + .tokens + .iter() + .map(|tt| match tt { + crate::TokenTree::Group(group) => TokenTree::Group(group.0.clone()), + crate::TokenTree::Punct(punct) => TokenTree::Punct(punct.0.clone()), + crate::TokenTree::Ident(ident) => TokenTree::Ident(ident.0.clone()), + crate::TokenTree::Literal(literal) => TokenTree::Literal(literal.0.clone()), + }) + .collect(); + tts.encode(w, s) + } +} + +impl DecodeMut<'_, '_, client::HandleStore>> + for Marked +{ + fn decode(r: &mut Reader<'_>, s: &mut client::HandleStore>) -> Self { + let tts: Vec<_> = DecodeMut::decode(r, s); + s.rpc_context.tokenstream_from_tts(tts.into_iter()) + } +} + +impl Encode>> + for Marked +{ + fn encode(self, w: &mut Writer, s: &mut client::HandleStore>) { + let tts = s.rpc_context.tts_from_tokenstream(self); + tts.encode(w, s); + } +} + +impl DecodeMut<'_, '_, S> for TokenStream { + fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { + TokenStream::new( + >::decode(r, s) + .into_iter() + .map(|tt| match tt { + TokenTree::Group(group) => crate::TokenTree::Group(crate::Group(group)), + TokenTree::Punct(punct) => crate::TokenTree::Punct(crate::Punct(punct)), + TokenTree::Ident(ident) => crate::TokenTree::Ident(crate::Ident(ident)), + TokenTree::Literal(literal) => { + crate::TokenTree::Literal(crate::Literal(literal)) + } + }) + .collect(), + ) + } +} diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 8e478cd7bc8a2..eea462e8f45de 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -24,6 +24,7 @@ #![feature(staged_api)] #![feature(allow_internal_unstable)] #![feature(decl_macro)] +#![feature(let_else)] #![feature(local_key_cell_methods)] #![feature(maybe_uninit_write_slice)] #![feature(negative_impls)] @@ -46,6 +47,7 @@ pub use diagnostic::{Diagnostic, Level, MultiSpan}; use std::cmp::Ordering; use std::ops::RangeBounds; use std::path::PathBuf; +use std::rc::Rc; use std::str::FromStr; use std::{error, fmt, iter}; @@ -76,7 +78,7 @@ pub fn is_available() -> bool { /// and `#[proc_macro_derive]` definitions. #[stable(feature = "proc_macro_lib", since = "1.15.0")] #[derive(Clone)] -pub struct TokenStream(Option); +pub struct TokenStream(bridge::client::TokenStream); #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl !Send for TokenStream {} @@ -130,13 +132,13 @@ impl TokenStream { /// Returns an empty `TokenStream` containing no token trees. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub fn new() -> TokenStream { - TokenStream(None) + TokenStream(bridge::client::TokenStream::new(Vec::new())) } /// Checks if this `TokenStream` is empty. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub fn is_empty(&self) -> bool { - self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true) + self.0.is_empty() } /// Parses this `TokenStream` as an expression and attempts to expand any @@ -151,9 +153,8 @@ impl TokenStream { /// considered errors, is unspecified and may change in the future. #[unstable(feature = "proc_macro_expand", issue = "90765")] pub fn expand_expr(&self) -> Result { - let stream = self.0.as_ref().ok_or(ExpandError)?; - match bridge::client::TokenStream::expand_expr(stream) { - Ok(stream) => Ok(TokenStream(Some(stream))), + match bridge::client::TokenStream::expand_expr(self.0.clone()) { + Ok(stream) => Ok(TokenStream(stream)), Err(_) => Err(ExpandError), } } @@ -171,7 +172,7 @@ impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result { - Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src)))) + Ok(TokenStream(bridge::client::TokenStream::from_str(src))) } } @@ -180,7 +181,7 @@ impl FromStr for TokenStream { #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl ToString for TokenStream { fn to_string(&self) -> String { - self.0.as_ref().map(|t| t.to_string()).unwrap_or_default() + self.0.clone().to_string() } } @@ -213,97 +214,11 @@ impl Default for TokenStream { #[unstable(feature = "proc_macro_quote", issue = "54722")] pub use quote::{quote, quote_span}; -fn tree_to_bridge_tree( - tree: TokenTree, -) -> bridge::TokenTree { - match tree { - TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), - TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), - TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), - TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), - } -} - /// Creates a token stream containing a single token tree. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { - TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree)))) - } -} - -/// Non-generic helper for implementing `FromIterator` and -/// `Extend` with less monomorphization in calling crates. -struct ConcatTreesHelper { - trees: Vec< - bridge::TokenTree< - bridge::client::TokenStream, - bridge::client::Span, - bridge::client::Symbol, - >, - >, -} - -impl ConcatTreesHelper { - fn new(capacity: usize) -> Self { - ConcatTreesHelper { trees: Vec::with_capacity(capacity) } - } - - fn push(&mut self, tree: TokenTree) { - self.trees.push(tree_to_bridge_tree(tree)); - } - - fn build(self) -> TokenStream { - if self.trees.is_empty() { - TokenStream(None) - } else { - TokenStream(Some(bridge::client::TokenStream::concat_trees(None, self.trees))) - } - } - - fn append_to(self, stream: &mut TokenStream) { - if self.trees.is_empty() { - return; - } - stream.0 = Some(bridge::client::TokenStream::concat_trees(stream.0.take(), self.trees)) - } -} - -/// Non-generic helper for implementing `FromIterator` and -/// `Extend` with less monomorphization in calling crates. -struct ConcatStreamsHelper { - streams: Vec, -} - -impl ConcatStreamsHelper { - fn new(capacity: usize) -> Self { - ConcatStreamsHelper { streams: Vec::with_capacity(capacity) } - } - - fn push(&mut self, stream: TokenStream) { - if let Some(stream) = stream.0 { - self.streams.push(stream); - } - } - - fn build(mut self) -> TokenStream { - if self.streams.len() <= 1 { - TokenStream(self.streams.pop()) - } else { - TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams))) - } - } - - fn append_to(mut self, stream: &mut TokenStream) { - if self.streams.is_empty() { - return; - } - let base = stream.0.take(); - if base.is_none() && self.streams.len() == 1 { - stream.0 = self.streams.pop(); - } else { - stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams)); - } + TokenStream(bridge::client::TokenStream::new(vec![tree])) } } @@ -311,10 +226,7 @@ impl ConcatStreamsHelper { #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl iter::FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { - let iter = trees.into_iter(); - let mut builder = ConcatTreesHelper::new(iter.size_hint().0); - iter.for_each(|tree| builder.push(tree)); - builder.build() + TokenStream(bridge::client::TokenStream::new(trees.into_iter().collect())) } } @@ -323,72 +235,59 @@ impl iter::FromIterator for TokenStream { #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl iter::FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { - let iter = streams.into_iter(); - let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); - iter.for_each(|stream| builder.push(stream)); - builder.build() + TokenStream(bridge::client::TokenStream::new(streams.into_iter().flatten().collect())) } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend for TokenStream { fn extend>(&mut self, trees: I) { - let iter = trees.into_iter(); - let mut builder = ConcatTreesHelper::new(iter.size_hint().0); - iter.for_each(|tree| builder.push(tree)); - builder.append_to(self); + Rc::make_mut(&mut self.0.tokens).extend(trees) } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend for TokenStream { fn extend>(&mut self, streams: I) { - let iter = streams.into_iter(); - let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); - iter.for_each(|stream| builder.push(stream)); - builder.append_to(self); + Rc::make_mut(&mut self.0.tokens).extend(streams.into_iter().flatten()) } } /// Public implementation details for the `TokenStream` type, such as iterators. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub mod token_stream { - use crate::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree}; + use crate::{TokenStream, TokenTree}; + use std::rc::Rc; /// An iterator over `TokenStream`'s `TokenTree`s. /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, /// and returns whole groups as token trees. #[derive(Clone)] #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - pub struct IntoIter( - std::vec::IntoIter< - bridge::TokenTree< - bridge::client::TokenStream, - bridge::client::Span, - bridge::client::Symbol, - >, - >, - ); + pub struct IntoIter { + index: usize, + trees: Rc>, + } #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl Iterator for IntoIter { type Item = TokenTree; fn next(&mut self) -> Option { - self.0.next().map(|tree| match tree { - bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)), - bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)), - bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)), - bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)), - }) + let rv = self.trees.get(self.index).cloned(); + if rv.is_some() { + self.index += 1; + } + rv } fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() + let len = self.trees.len() - self.index; + (len, Some(len)) } fn count(self) -> usize { - self.0.count() + self.size_hint().0 } } @@ -398,7 +297,7 @@ pub mod token_stream { type IntoIter = IntoIter; fn into_iter(self) -> IntoIter { - IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter()) + IntoIter { index: 0, trees: self.0.tokens } } } } diff --git a/src/test/rustdoc-ui/intra-doc/through-proc-macro.stderr b/src/test/rustdoc-ui/intra-doc/through-proc-macro.stderr index f0a7ed1785b43..3ea209c0d22ad 100644 --- a/src/test/rustdoc-ui/intra-doc/through-proc-macro.stderr +++ b/src/test/rustdoc-ui/intra-doc/through-proc-macro.stderr @@ -1,14 +1,19 @@ warning: unresolved link to `Oooops` - --> $DIR/through-proc-macro.rs:13:10 + --> $DIR/through-proc-macro.rs:13:5 | LL | /// [Oooops] - | ^^^^^^ no item named `Oooops` in scope + | ^^^^^^^^^^^^ | note: the lint level is defined here --> $DIR/through-proc-macro.rs:7:9 | LL | #![warn(rustdoc::broken_intra_doc_links)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: the link appears in this line: + + [Oooops] + ^^^^^^ + = note: no item named `Oooops` in scope = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]` warning: 1 warning emitted diff --git a/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout b/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout index 4de8746a1b460..256ab6501f1ab 100644 --- a/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout +++ b/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout @@ -11,9 +11,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:21:21: 21:26 (#4), }, ] -PRINT-BANG INPUT (DISPLAY): 1 + 1, { "a" }, let a = 1;, String, my_name, 'a, my_val = 30, -std::option::Option, pub(in some::path) , [a b c], -30 -PRINT-BANG RE-COLLECTED (DISPLAY): 1 + 1, { "a" }, let a = 1, String, my_name, 'a, my_val = 30, +PRINT-BANG INPUT (DISPLAY): 1 + 1, { "a" }, let a = 1, String, my_name, 'a, my_val = 30, std :: option :: Option, pub(in some :: path), [a b c], - 30 PRINT-BANG INPUT (DEBUG): TokenStream [ Group { diff --git a/src/test/ui/proc-macro/capture-unglued-token.stdout b/src/test/ui/proc-macro/capture-unglued-token.stdout index 7e6b540332c79..1cb59624e0de3 100644 --- a/src/test/ui/proc-macro/capture-unglued-token.stdout +++ b/src/test/ui/proc-macro/capture-unglued-token.stdout @@ -1,5 +1,4 @@ -PRINT-BANG INPUT (DISPLAY): Vec -PRINT-BANG RE-COLLECTED (DISPLAY): Vec < u8 > +PRINT-BANG INPUT (DISPLAY): Vec < u8 > PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/test/ui/proc-macro/doc-comment-preserved.stdout b/src/test/ui/proc-macro/doc-comment-preserved.stdout index f4160d7da80e5..99f5845536b58 100644 --- a/src/test/ui/proc-macro/doc-comment-preserved.stdout +++ b/src/test/ui/proc-macro/doc-comment-preserved.stdout @@ -1,12 +1,4 @@ -PRINT-BANG INPUT (DISPLAY): /** -******* -* DOC * -* DOC * -* DOC * -******* -*/ - pub struct S ; -PRINT-BANG RE-COLLECTED (DISPLAY): #[doc = "\n*******\n* DOC *\n* DOC *\n* DOC *\n*******\n"] pub struct S ; +PRINT-BANG INPUT (DISPLAY): #[doc = "\n*******\n* DOC *\n* DOC *\n* DOC *\n*******\n"] pub struct S ; PRINT-BANG INPUT (DEBUG): TokenStream [ Punct { ch: '#', diff --git a/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout b/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout index 686d53e887660..74d787d8589c2 100644 --- a/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout +++ b/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout @@ -1,5 +1,4 @@ -PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = #[allow(warnings)] 0 ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = #[allow(warnings)] #[allow(warnings)] 0 ; 0 }, } +PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = #[allow(warnings)] #[allow(warnings)] 0 ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -122,8 +121,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #4 bytes(306..355), }, ] -PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 } ; 0 }, } +PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -280,8 +278,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #12 bytes(430..483), }, ] -PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH } ; 0 }, } +PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -358,8 +355,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #16 bytes(430..483), }, ] -PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 + 1 } ; 0 }, } +PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1 } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -449,8 +445,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #20 bytes(430..483), }, ] -PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH + 1 } ; 0 }, } +PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1 } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", diff --git a/src/test/ui/proc-macro/inert-attribute-order.stdout b/src/test/ui/proc-macro/inert-attribute-order.stdout index cc215545952df..6b6f1f2492a53 100644 --- a/src/test/ui/proc-macro/inert-attribute-order.stdout +++ b/src/test/ui/proc-macro/inert-attribute-order.stdout @@ -1,7 +1,4 @@ -PRINT-ATTR INPUT (DISPLAY): /// 1 -#[rustfmt :: attr2] #[doc = "3"] #[doc = "4"] #[rustfmt :: attr5] /// 6 -#[print_attr(nodebug)] struct S ; -PRINT-ATTR RE-COLLECTED (DISPLAY): #[doc = " 1"] #[rustfmt :: attr2] #[doc = "3"] #[doc = "4"] +PRINT-ATTR INPUT (DISPLAY): #[doc = " 1"] #[rustfmt :: attr2] #[doc = "3"] #[doc = "4"] #[rustfmt :: attr5] #[doc = " 6"] #[print_attr(nodebug)] struct S ; PRINT-ATTR INPUT (DISPLAY): #[doc = " 1"] #[rustfmt :: attr2] #[doc = "3"] #[doc = "4"] #[rustfmt :: attr5] #[doc = " 6"] struct S ; diff --git a/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.rs b/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.rs new file mode 100644 index 0000000000000..2fc091323b5ea --- /dev/null +++ b/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.rs @@ -0,0 +1,14 @@ +// aux-build:test-macros.rs + +#[macro_use] +extern crate test_macros; + +#[derive(Print)] +#[allow(unused)] +enum ProceduralMasqueradeDummyType { +//~^ ERROR using +//~| WARN this was previously + Input = (0, stringify!(input tokens!?)).0 +} + +fn main() {} diff --git a/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stderr b/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stderr new file mode 100644 index 0000000000000..9debe3b186aaf --- /dev/null +++ b/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stderr @@ -0,0 +1,25 @@ +error: using `procedural-masquerade` crate + --> $DIR/issue-73933-procedural-masquerade-full.rs:8:6 + | +LL | enum ProceduralMasqueradeDummyType { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[deny(proc_macro_back_compat)]` on by default + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #83125 + = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. + +error: aborting due to previous error + +Future incompatibility report: Future breakage diagnostic: +error: using `procedural-masquerade` crate + --> $DIR/issue-73933-procedural-masquerade-full.rs:8:6 + | +LL | enum ProceduralMasqueradeDummyType { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[deny(proc_macro_back_compat)]` on by default + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #83125 + = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. + diff --git a/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stdout b/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stdout new file mode 100644 index 0000000000000..8b249664a8774 --- /dev/null +++ b/src/test/ui/proc-macro/issue-73933-procedural-masquerade-full.stdout @@ -0,0 +1,118 @@ +PRINT-DERIVE INPUT (DISPLAY): #[allow(unused)] enum ProceduralMasqueradeDummyType +{ Input = (0, stringify! (input tokens! ?)).0, } +PRINT-DERIVE INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: #0 bytes(86..87), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: #0 bytes(88..93), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "unused", + span: #0 bytes(94..100), + }, + ], + span: #0 bytes(93..101), + }, + ], + span: #0 bytes(87..102), + }, + Ident { + ident: "enum", + span: #0 bytes(103..107), + }, + Ident { + ident: "ProceduralMasqueradeDummyType", + span: #0 bytes(108..137), + }, + Group { + delimiter: Brace, + stream: TokenStream [ + Ident { + ident: "Input", + span: #0 bytes(191..196), + }, + Punct { + ch: '=', + spacing: Alone, + span: #0 bytes(197..198), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: #0 bytes(200..201), + }, + Punct { + ch: ',', + spacing: Alone, + span: #0 bytes(201..202), + }, + Ident { + ident: "stringify", + span: #0 bytes(203..212), + }, + Punct { + ch: '!', + spacing: Alone, + span: #0 bytes(212..213), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "input", + span: #0 bytes(214..219), + }, + Ident { + ident: "tokens", + span: #0 bytes(220..226), + }, + Punct { + ch: '!', + spacing: Joint, + span: #0 bytes(226..227), + }, + Punct { + ch: '?', + spacing: Alone, + span: #0 bytes(227..228), + }, + ], + span: #0 bytes(213..229), + }, + ], + span: #0 bytes(199..230), + }, + Punct { + ch: '.', + spacing: Alone, + span: #0 bytes(230..231), + }, + Literal { + kind: Integer, + symbol: "0", + suffix: None, + span: #0 bytes(231..232), + }, + Punct { + ch: ',', + spacing: Alone, + span: #0 bytes(233..234), + }, + ], + span: #0 bytes(138..234), + }, +] diff --git a/src/test/ui/proc-macro/issue-73933-procedural-masquerade.rs b/src/test/ui/proc-macro/issue-73933-procedural-masquerade.rs index 0c1c51c01a884..6eeed6ba3533a 100644 --- a/src/test/ui/proc-macro/issue-73933-procedural-masquerade.rs +++ b/src/test/ui/proc-macro/issue-73933-procedural-masquerade.rs @@ -6,12 +6,6 @@ extern crate test_macros; #[derive(Print)] enum ProceduralMasqueradeDummyType { //~^ ERROR using -//~| WARN this was previously -//~| ERROR using -//~| WARN this was previously -//~| ERROR using -//~| WARN this was previously -//~| ERROR using //~| WARN this was previously Input } diff --git a/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stderr b/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stderr index be4239089e881..dff71c9eacd4d 100644 --- a/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stderr +++ b/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stderr @@ -9,37 +9,7 @@ LL | enum ProceduralMasqueradeDummyType { = note: for more information, see issue #83125 = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. -error: using `procedural-masquerade` crate - --> $DIR/issue-73933-procedural-masquerade.rs:7:6 - | -LL | enum ProceduralMasqueradeDummyType { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #83125 - = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. - -error: using `procedural-masquerade` crate - --> $DIR/issue-73933-procedural-masquerade.rs:7:6 - | -LL | enum ProceduralMasqueradeDummyType { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #83125 - = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. - -error: using `procedural-masquerade` crate - --> $DIR/issue-73933-procedural-masquerade.rs:7:6 - | -LL | enum ProceduralMasqueradeDummyType { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #83125 - = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. - -error: aborting due to 4 previous errors +error: aborting due to previous error Future incompatibility report: Future breakage diagnostic: error: using `procedural-masquerade` crate @@ -53,39 +23,3 @@ LL | enum ProceduralMasqueradeDummyType { = note: for more information, see issue #83125 = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. -Future breakage diagnostic: -error: using `procedural-masquerade` crate - --> $DIR/issue-73933-procedural-masquerade.rs:7:6 - | -LL | enum ProceduralMasqueradeDummyType { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: `#[deny(proc_macro_back_compat)]` on by default - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #83125 - = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. - -Future breakage diagnostic: -error: using `procedural-masquerade` crate - --> $DIR/issue-73933-procedural-masquerade.rs:7:6 - | -LL | enum ProceduralMasqueradeDummyType { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: `#[deny(proc_macro_back_compat)]` on by default - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #83125 - = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. - -Future breakage diagnostic: -error: using `procedural-masquerade` crate - --> $DIR/issue-73933-procedural-masquerade.rs:7:6 - | -LL | enum ProceduralMasqueradeDummyType { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: `#[deny(proc_macro_back_compat)]` on by default - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #83125 - = note: The `procedural-masquerade` crate has been unnecessary since Rust 1.30.0. Versions of this crate below 0.1.7 will eventually stop compiling. - diff --git a/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stdout b/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stdout index 50334589d0bba..d581a481da8f9 100644 --- a/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stdout +++ b/src/test/ui/proc-macro/issue-73933-procedural-masquerade.stdout @@ -1,5 +1,4 @@ PRINT-DERIVE INPUT (DISPLAY): enum ProceduralMasqueradeDummyType { Input, } -PRINT-DERIVE RE-COLLECTED (DISPLAY): enum ProceduralMasqueradeDummyType { Input } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -14,9 +13,14 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ stream: TokenStream [ Ident { ident: "Input", - span: #0 bytes(315..320), + span: #0 bytes(174..179), + }, + Punct { + ch: ',', + spacing: Alone, + span: #0 bytes(180..181), }, ], - span: #0 bytes(121..322), + span: #0 bytes(121..181), }, ] diff --git a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout index 60a400a5deabf..a932c2dcda088 100644 --- a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout +++ b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout @@ -1,7 +1,4 @@ -PRINT-BANG INPUT (DISPLAY): foo! { #[fake_attr] mod bar { - #![doc = r" Foo"] -} } -PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): foo! { #[fake_attr] mod bar { #! [doc = r" Foo"] } } +PRINT-BANG INPUT (DISPLAY): foo! { #[fake_attr] mod bar { #! [doc = r" Foo"] } } PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "foo", diff --git a/src/test/ui/proc-macro/nonterminal-expansion.stdout b/src/test/ui/proc-macro/nonterminal-expansion.stdout index 4d884348f2ca4..3b0f2e82d56d0 100644 --- a/src/test/ui/proc-macro/nonterminal-expansion.stdout +++ b/src/test/ui/proc-macro/nonterminal-expansion.stdout @@ -1,5 +1,4 @@ -PRINT-ATTR_ARGS INPUT (DISPLAY): a, line!(), b -PRINT-ATTR_ARGS RE-COLLECTED (DISPLAY): a, line! (), b +PRINT-ATTR_ARGS INPUT (DISPLAY): a, line! (), b PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "a", diff --git a/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout b/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout index c08e5308138c9..54776eb8ff131 100644 --- a/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout +++ b/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout @@ -1,5 +1,4 @@ -PRINT-BANG INPUT (DISPLAY): struct S; -PRINT-BANG RE-COLLECTED (DISPLAY): struct S ; +PRINT-BANG INPUT (DISPLAY): struct S ; PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs index e4e43e97dde82..dcb88c53d54c2 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs @@ -50,6 +50,7 @@ impl server::Types for RustAnalyzer { type SourceFile = SourceFile; type Span = Span; type Symbol = Symbol; + type RpcContext = RpcContext; } impl server::FreeFunctions for RustAnalyzer { @@ -78,179 +79,16 @@ impl server::FreeFunctions for RustAnalyzer { } impl server::TokenStream for RustAnalyzer { - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } fn from_str(&mut self, src: &str) -> Self::TokenStream { use std::str::FromStr; Self::TokenStream::from_str(src).expect("cannot parse string") } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { + fn to_string(&mut self, stream: Self::TokenStream) -> String { stream.to_string() } - fn from_token_tree( - &mut self, - tree: bridge::TokenTree, - ) -> Self::TokenStream { - match tree { - bridge::TokenTree::Group(group) => { - let group = Group { - delimiter: delim_to_internal(group.delimiter), - token_trees: match group.stream { - Some(stream) => stream.into_iter().collect(), - None => Vec::new(), - }, - }; - let tree = TokenTree::from(group); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Ident(ident) => { - // FIXME: handle raw idents - let text = ident.sym.text(); - let ident: tt::Ident = tt::Ident { text, id: ident.span }; - let leaf = tt::Leaf::from(ident); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Literal(literal) => { - let literal = LiteralFormatter(literal); - let text = literal - .with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied())); - - let literal = tt::Literal { text, id: literal.0.span }; - let leaf = tt::Leaf::from(literal); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Punct(p) => { - let punct = tt::Punct { - char: p.ch as char, - spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, - id: p.span, - }; - let leaf = tt::Leaf::from(punct); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - } - } - - fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { - Ok(self_.clone()) - } - - fn concat_trees( - &mut self, - base: Option, - trees: Vec>, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for tree in trees { - builder.push(self.from_token_tree(tree)); - } - builder.build() - } - - fn concat_streams( - &mut self, - base: Option, - streams: Vec, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for stream in streams { - builder.push(stream); - } - builder.build() - } - - fn into_trees( - &mut self, - stream: Self::TokenStream, - ) -> Vec> { - stream - .into_iter() - .map(|tree| match tree { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(bridge::Ident { - sym: Symbol::intern(&ident.text), - // FIXME: handle raw idents - is_raw: false, - span: ident.id, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { - bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Err, - symbol: Symbol::intern(&lit.text), - // FIXME: handle suffixes - suffix: None, - span: lit.id, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { - bridge::TokenTree::Punct(bridge::Punct { - ch: punct.char as u8, - joint: punct.spacing == Spacing::Joint, - span: punct.id, - }) - } - tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { - delimiter: delim_to_external(subtree.delimiter), - stream: if subtree.token_trees.is_empty() { - None - } else { - Some(subtree.token_trees.into_iter().collect()) - }, - span: bridge::DelimSpan::from_single( - subtree.delimiter.map_or(Span::unspecified(), |del| del.id), - ), - }), - }) - .collect() - } -} - -fn delim_to_internal(d: proc_macro::Delimiter) -> Option { - let kind = match d { - proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, - proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace, - proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket, - proc_macro::Delimiter::None => return None, - }; - Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) -} - -fn delim_to_external(d: Option) -> proc_macro::Delimiter { - match d.map(|it| it.kind) { - Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis, - Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace, - Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket, - None => proc_macro::Delimiter::None, - } -} - -fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing { - match spacing { - proc_macro::Spacing::Alone => Spacing::Alone, - proc_macro::Spacing::Joint => Spacing::Joint, - } -} - -fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { - match spacing { - Spacing::Alone => proc_macro::Spacing::Alone, - Spacing::Joint => proc_macro::Spacing::Joint, + fn expand_expr(&mut self, self_: Self::TokenStream) -> Result { + Ok(self_) } } @@ -349,6 +187,10 @@ impl server::Server for RustAnalyzer { } } + fn rpc_context(&mut self) -> Self::RpcContext { + RpcContext {} + } + fn intern_symbol(ident: &str) -> Self::Symbol { Symbol::intern(&tt::SmolStr::from(ident)) } @@ -358,6 +200,138 @@ impl server::Server for RustAnalyzer { } } +pub struct RpcContext; + +impl server::RpcContext for RpcContext { + fn tts_from_tokenstream( + &mut self, + stream: TokenStream, + ) -> Vec> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(bridge::Ident { + sym: Symbol::intern(&ident.text), + // FIXME: handle raw idents + is_raw: false, + span: ident.id, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + bridge::TokenTree::Literal(bridge::Literal { + // FIXME: handle literal kinds + kind: bridge::LitKind::Err, + symbol: Symbol::intern(&lit.text), + // FIXME: handle suffixes + suffix: None, + span: lit.id, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + bridge::TokenTree::Punct(bridge::Punct { + ch: punct.char as u8, + joint: punct.spacing == Spacing::Joint, + span: punct.id, + }) + } + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { + delimiter: delim_to_external(subtree.delimiter), + stream: subtree.token_trees.into_iter().collect(), + span: bridge::DelimSpan::from_single( + subtree.delimiter.map_or(Span::unspecified(), |del| del.id), + ), + }), + }) + .collect() + } + + fn tokenstream_from_tts( + &mut self, + trees: impl Iterator>, + ) -> TokenStream { + let mut builder = TokenStreamBuilder::new(); + for tree in trees { + builder.push(match tree { + bridge::TokenTree::Group(group) => { + let group = Group { + delimiter: delim_to_internal(group.delimiter), + token_trees: group.stream.into_iter().collect(), + }; + let tree = TokenTree::from(group); + TokenStream::from_iter(vec![tree]) + } + + bridge::TokenTree::Ident(ident) => { + // FIXME: handle raw idents + let text = ident.sym.text(); + let ident: tt::Ident = tt::Ident { text, id: ident.span }; + let leaf = tt::Leaf::from(ident); + let tree = TokenTree::from(leaf); + TokenStream::from_iter(vec![tree]) + } + + bridge::TokenTree::Literal(literal) => { + let literal = LiteralFormatter(literal); + let text = literal.with_stringify_parts(|parts| { + tt::SmolStr::from_iter(parts.iter().copied()) + }); + + let literal = tt::Literal { text, id: literal.0.span }; + let leaf = tt::Leaf::from(literal); + let tree = TokenTree::from(leaf); + TokenStream::from_iter(vec![tree]) + } + + bridge::TokenTree::Punct(p) => { + let punct = tt::Punct { + char: p.ch as char, + spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, + id: p.span, + }; + let leaf = tt::Leaf::from(punct); + let tree = TokenTree::from(leaf); + TokenStream::from_iter(vec![tree]) + } + }); + } + builder.build() + } +} + +fn delim_to_internal(d: proc_macro::Delimiter) -> Option { + let kind = match d { + proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, + proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace, + proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket, + proc_macro::Delimiter::None => return None, + }; + Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) +} + +fn delim_to_external(d: Option) -> proc_macro::Delimiter { + match d.map(|it| it.kind) { + Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis, + Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace, + Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket, + None => proc_macro::Delimiter::None, + } +} + +fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing { + match spacing { + proc_macro::Spacing::Alone => Spacing::Alone, + proc_macro::Spacing::Joint => Spacing::Joint, + } +} + +fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { + match spacing { + Spacing::Alone => proc_macro::Spacing::Alone, + Spacing::Joint => proc_macro::Spacing::Joint, + } +} + struct LiteralFormatter(bridge::Literal); impl LiteralFormatter {