@@ -93,18 +93,16 @@ pub struct Parser<'a> {
93
93
/// Use span from this token if you need an isolated span.
94
94
pub token : Token ,
95
95
/// The current non-normalized token if it's different from `token`.
96
- /// Preferable use is through the `unnormalized_token()` getter.
97
96
/// Use span from this token if you need to concatenate it with some neighbouring spans.
98
- unnormalized_token : Option < Token > ,
97
+ unnormalized_token : Token ,
99
98
/// The previous normalized token.
100
99
/// Use span from this token if you need an isolated span.
101
100
prev_token : Token ,
102
101
/// The previous non-normalized token if it's different from `prev_token`.
103
- /// Preferable use is through the `unnormalized_prev_token()` getter.
104
102
/// Use span from this token if you need to concatenate it with some neighbouring spans.
105
- unnormalized_prev_token : Option < Token > ,
106
- /// Equivalent to `unnormalized_prev_token() .span`.
107
- /// FIXME: Remove in favor of `(unnormalized_)prev_token() .span`.
103
+ unnormalized_prev_token : Token ,
104
+ /// Equivalent to `unnormalized_prev_token.span`.
105
+ /// FIXME: Remove in favor of `(unnormalized_)prev_token.span`.
108
106
pub prev_span : Span ,
109
107
restrictions : Restrictions ,
110
108
/// Used to determine the path to externally loaded source files.
@@ -378,9 +376,9 @@ impl<'a> Parser<'a> {
378
376
let mut parser = Parser {
379
377
sess,
380
378
token : Token :: dummy ( ) ,
381
- unnormalized_token : None ,
379
+ unnormalized_token : Token :: dummy ( ) ,
382
380
prev_token : Token :: dummy ( ) ,
383
- unnormalized_prev_token : None ,
381
+ unnormalized_prev_token : Token :: dummy ( ) ,
384
382
prev_span : DUMMY_SP ,
385
383
restrictions : Restrictions :: empty ( ) ,
386
384
recurse_into_file_modules,
@@ -404,7 +402,8 @@ impl<'a> Parser<'a> {
404
402
subparser_name,
405
403
} ;
406
404
407
- parser. token = parser. next_tok ( ) ;
405
+ // Make parser point to the first token.
406
+ parser. bump ( ) ;
408
407
409
408
if let Some ( directory) = directory {
410
409
parser. directory = directory;
@@ -418,27 +417,18 @@ impl<'a> Parser<'a> {
418
417
}
419
418
}
420
419
421
- parser. process_potential_macro_variable ( ) ;
422
420
parser
423
421
}
424
422
425
- fn unnormalized_token ( & self ) -> & Token {
426
- self . unnormalized_token . as_ref ( ) . unwrap_or ( & self . token )
427
- }
428
-
429
- fn unnormalized_prev_token ( & self ) -> & Token {
430
- self . unnormalized_prev_token . as_ref ( ) . unwrap_or ( & self . prev_token )
431
- }
432
-
433
- fn next_tok ( & mut self ) -> Token {
423
+ fn next_tok ( & mut self , fallback_span : Span ) -> Token {
434
424
let mut next = if self . desugar_doc_comments {
435
425
self . token_cursor . next_desugared ( )
436
426
} else {
437
427
self . token_cursor . next ( )
438
428
} ;
439
429
if next. span . is_dummy ( ) {
440
430
// Tweak the location for better diagnostics, but keep syntactic context intact.
441
- next. span = self . unnormalized_token ( ) . span . with_ctxt ( next. span . ctxt ( ) ) ;
431
+ next. span = fallback_span . with_ctxt ( next. span . ctxt ( ) ) ;
442
432
}
443
433
next
444
434
}
@@ -896,6 +886,23 @@ impl<'a> Parser<'a> {
896
886
self . parse_delim_comma_seq ( token:: Paren , f)
897
887
}
898
888
889
+ // Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`)
890
+ // tokens are replaced with usual identifier and lifetime tokens,
891
+ // so the former are never encountered during normal parsing.
892
+ crate fn set_token ( & mut self , token : Token ) {
893
+ self . unnormalized_token = token;
894
+ self . token = match & self . unnormalized_token . kind {
895
+ token:: Interpolated ( nt) => match * * nt {
896
+ token:: NtIdent ( ident, is_raw) => {
897
+ Token :: new ( token:: Ident ( ident. name , is_raw) , ident. span )
898
+ }
899
+ token:: NtLifetime ( ident) => Token :: new ( token:: Lifetime ( ident. name ) , ident. span ) ,
900
+ _ => self . unnormalized_token . clone ( ) ,
901
+ } ,
902
+ _ => self . unnormalized_token . clone ( ) ,
903
+ }
904
+ }
905
+
899
906
/// Advance the parser by one token.
900
907
pub fn bump ( & mut self ) {
901
908
if self . prev_token . kind == TokenKind :: Eof {
@@ -905,16 +912,15 @@ impl<'a> Parser<'a> {
905
912
}
906
913
907
914
// Update the current and previous tokens.
908
- let next_token = self . next_tok ( ) ;
909
- self . prev_token = mem:: replace ( & mut self . token , next_token) ;
915
+ self . prev_token = self . token . take ( ) ;
910
916
self . unnormalized_prev_token = self . unnormalized_token . take ( ) ;
917
+ let next_token = self . next_tok ( self . unnormalized_prev_token . span ) ;
918
+ self . set_token ( next_token) ;
911
919
912
920
// Update fields derived from the previous token.
913
- self . prev_span = self . unnormalized_prev_token ( ) . span ;
921
+ self . prev_span = self . unnormalized_prev_token . span ;
914
922
915
923
self . expected_tokens . clear ( ) ;
916
- // Check after each token.
917
- self . process_potential_macro_variable ( ) ;
918
924
}
919
925
920
926
/// Advances the parser using provided token as a next one. Use this when
@@ -924,12 +930,12 @@ impl<'a> Parser<'a> {
924
930
/// Correct token kinds and spans need to be calculated instead.
925
931
fn bump_with ( & mut self , next : TokenKind , span : Span ) {
926
932
// Update the current and previous tokens.
927
- let next_token = Token :: new ( next, span) ;
928
- self . prev_token = mem:: replace ( & mut self . token , next_token) ;
933
+ self . prev_token = self . token . take ( ) ;
929
934
self . unnormalized_prev_token = self . unnormalized_token . take ( ) ;
935
+ self . set_token ( Token :: new ( next, span) ) ;
930
936
931
937
// Update fields derived from the previous token.
932
- self . prev_span = self . unnormalized_prev_token ( ) . span . with_hi ( span. lo ( ) ) ;
938
+ self . prev_span = self . unnormalized_prev_token . span . with_hi ( span. lo ( ) ) ;
933
939
934
940
self . expected_tokens . clear ( ) ;
935
941
}
@@ -1066,39 +1072,6 @@ impl<'a> Parser<'a> {
1066
1072
}
1067
1073
}
1068
1074
1069
- pub fn process_potential_macro_variable ( & mut self ) {
1070
- let normalized_token = match self . token . kind {
1071
- token:: Dollar
1072
- if self . token . span . from_expansion ( ) && self . look_ahead ( 1 , |t| t. is_ident ( ) ) =>
1073
- {
1074
- self . bump ( ) ;
1075
- let name = match self . token . kind {
1076
- token:: Ident ( name, _) => name,
1077
- _ => unreachable ! ( ) ,
1078
- } ;
1079
- let span = self . prev_span . to ( self . token . span ) ;
1080
- self . struct_span_err ( span, & format ! ( "unknown macro variable `{}`" , name) )
1081
- . span_label ( span, "unknown macro variable" )
1082
- . emit ( ) ;
1083
- self . bump ( ) ;
1084
- return ;
1085
- }
1086
- token:: Interpolated ( ref nt) => {
1087
- // Interpolated identifier and lifetime tokens are replaced with usual identifier
1088
- // and lifetime tokens, so the former are never encountered during normal parsing.
1089
- match * * nt {
1090
- token:: NtIdent ( ident, is_raw) => {
1091
- Token :: new ( token:: Ident ( ident. name , is_raw) , ident. span )
1092
- }
1093
- token:: NtLifetime ( ident) => Token :: new ( token:: Lifetime ( ident. name ) , ident. span ) ,
1094
- _ => return ,
1095
- }
1096
- }
1097
- _ => return ,
1098
- } ;
1099
- self . unnormalized_token = Some ( mem:: replace ( & mut self . token , normalized_token) ) ;
1100
- }
1101
-
1102
1075
/// Parses a single token tree from the input.
1103
1076
pub fn parse_token_tree ( & mut self ) -> TokenTree {
1104
1077
match self . token . kind {
@@ -1107,15 +1080,14 @@ impl<'a> Parser<'a> {
1107
1080
& mut self . token_cursor . frame ,
1108
1081
self . token_cursor . stack . pop ( ) . unwrap ( ) ,
1109
1082
) ;
1110
- self . token . span = frame. span . entire ( ) ;
1083
+ self . set_token ( Token :: new ( TokenKind :: CloseDelim ( frame . delim ) , frame. span . close ) ) ;
1111
1084
self . bump ( ) ;
1112
1085
TokenTree :: Delimited ( frame. span , frame. delim , frame. tree_cursor . stream . into ( ) )
1113
1086
}
1114
1087
token:: CloseDelim ( _) | token:: Eof => unreachable ! ( ) ,
1115
1088
_ => {
1116
- let token = self . token . clone ( ) ;
1117
1089
self . bump ( ) ;
1118
- TokenTree :: Token ( token )
1090
+ TokenTree :: Token ( self . prev_token . clone ( ) )
1119
1091
}
1120
1092
}
1121
1093
}
0 commit comments