@@ -18,21 +18,26 @@ use tokenstream;
18
18
19
19
use std:: rc:: Rc ;
20
20
21
+ /// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
22
+ /// thatthat the delimiter itself might be `NoDelim`.
21
23
#[ derive( Clone , PartialEq , Eq , RustcEncodable , RustcDecodable , Hash , Debug ) ]
22
24
pub struct Delimited {
23
25
pub delim : token:: DelimToken ,
24
26
pub tts : Vec < TokenTree > ,
25
27
}
26
28
27
29
impl Delimited {
30
+ /// Return the opening delimiter (possibly `NoDelim`).
28
31
pub fn open_token ( & self ) -> token:: Token {
29
32
token:: OpenDelim ( self . delim )
30
33
}
31
34
35
+ /// Return the closing delimiter (possibly `NoDelim`).
32
36
pub fn close_token ( & self ) -> token:: Token {
33
37
token:: CloseDelim ( self . delim )
34
38
}
35
39
40
+ /// Return a `self::TokenTree` witha a `Span` corresponding to the opening delimiter.
36
41
pub fn open_tt ( & self , span : Span ) -> TokenTree {
37
42
let open_span = if span == DUMMY_SP {
38
43
DUMMY_SP
@@ -42,6 +47,7 @@ impl Delimited {
42
47
TokenTree :: Token ( open_span, self . open_token ( ) )
43
48
}
44
49
50
+ /// Return a `self::TokenTree` witha a `Span` corresponding to the closing delimiter.
45
51
pub fn close_tt ( & self , span : Span ) -> TokenTree {
46
52
let close_span = if span == DUMMY_SP {
47
53
DUMMY_SP
@@ -75,7 +81,7 @@ pub enum KleeneOp {
75
81
}
76
82
77
83
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
78
- /// are "first-class" token trees.
84
+ /// are "first-class" token trees. Useful for parsing macros.
79
85
#[ derive( Debug , Clone , PartialEq , Eq , RustcEncodable , RustcDecodable , Hash ) ]
80
86
pub enum TokenTree {
81
87
Token ( Span , token:: Token ) ,
@@ -93,6 +99,7 @@ pub enum TokenTree {
93
99
}
94
100
95
101
impl TokenTree {
102
+ /// Return the number of tokens in the tree.
96
103
pub fn len ( & self ) -> usize {
97
104
match * self {
98
105
TokenTree :: Delimited ( _, ref delimed) => match delimed. delim {
@@ -104,6 +111,8 @@ impl TokenTree {
104
111
}
105
112
}
106
113
114
+ /// Returns true if the given token tree contains no other tokens. This is vacuously true for
115
+ /// single tokens or metavar/decls, but may be false for delimited trees or sequences.
107
116
pub fn is_empty ( & self ) -> bool {
108
117
match * self {
109
118
TokenTree :: Delimited ( _, ref delimed) => match delimed. delim {
@@ -115,6 +124,7 @@ impl TokenTree {
115
124
}
116
125
}
117
126
127
+ /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
118
128
pub fn get_tt ( & self , index : usize ) -> TokenTree {
119
129
match ( self , index) {
120
130
( & TokenTree :: Delimited ( _, ref delimed) , _) if delimed. delim == token:: NoDelim => {
@@ -146,15 +156,39 @@ impl TokenTree {
146
156
}
147
157
}
148
158
159
+ /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
160
+ /// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
161
+ /// collection of `TokenTree` for use in parsing a macro.
162
+ ///
163
+ /// # Parameters
164
+ ///
165
+ /// - `input`: a token stream to read from, the contents of which we are parsing.
166
+ /// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a
167
+ /// macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with
168
+ /// their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and
169
+ /// `ident` are "matchers". They are not present in the body of a macro rule -- just in the
170
+ /// pattern, so we pass a parameter to indicate whether to expect them or not.
171
+ /// - `sess`: the parsing session. Any errors will be emitted to this session.
172
+ ///
173
+ /// # Returns
174
+ ///
175
+ /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
149
176
pub fn parse (
150
177
input : tokenstream:: TokenStream ,
151
178
expect_matchers : bool ,
152
179
sess : & ParseSess ,
153
180
) -> Vec < TokenTree > {
181
+ // Will contain the final collection of `self::TokenTree`
154
182
let mut result = Vec :: new ( ) ;
183
+
184
+ // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
185
+ // additional trees if need be.
155
186
let mut trees = input. trees ( ) ;
156
187
while let Some ( tree) = trees. next ( ) {
157
188
let tree = parse_tree ( tree, & mut trees, expect_matchers, sess) ;
189
+
190
+ // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
191
+ // parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`).
158
192
match tree {
159
193
TokenTree :: MetaVar ( start_sp, ident) if expect_matchers => {
160
194
let span = match trees. next ( ) {
@@ -182,12 +216,27 @@ pub fn parse(
182
216
keywords:: Invalid . ident ( ) ,
183
217
) ) ;
184
218
}
219
+
220
+ // Not a metavar or no matchers allowed, so just return the tree
185
221
_ => result. push ( tree) ,
186
222
}
187
223
}
188
224
result
189
225
}
190
226
227
+ /// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
228
+ /// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
229
+ /// for use in parsing a macro.
230
+ ///
231
+ /// Converting the given tree may involve reading more tokens.
232
+ ///
233
+ /// # Parameters
234
+ ///
235
+ /// - `tree`: the tree wish to convert.
236
+ /// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
237
+ /// converting `tree`
238
+ /// - `expect_matchers`: same as for `parse` (see above).
239
+ /// - `sess`: the parsing session. Any errors will be emitted to this session.
191
240
fn parse_tree < I > (
192
241
tree : tokenstream:: TokenTree ,
193
242
trees : & mut I ,
@@ -197,16 +246,24 @@ fn parse_tree<I>(
197
246
where
198
247
I : Iterator < Item = tokenstream:: TokenTree > ,
199
248
{
249
+ // Depending on what `tree` is, we could be parsing different parts of a macro
200
250
match tree {
251
+ // `tree` is a `$` token. Look at the next token in `trees`
201
252
tokenstream:: TokenTree :: Token ( span, token:: Dollar ) => match trees. next ( ) {
253
+ // `tree` is followed by a delimited set of token trees. This indicates the beginning
254
+ // of a repetition sequence in the macro (e.g. `$(pat)*`).
202
255
Some ( tokenstream:: TokenTree :: Delimited ( span, delimited) ) => {
256
+ // Must have `(` not `{` or `[`
203
257
if delimited. delim != token:: Paren {
204
258
let tok = pprust:: token_to_string ( & token:: OpenDelim ( delimited. delim ) ) ;
205
259
let msg = format ! ( "expected `(`, found `{}`" , tok) ;
206
260
sess. span_diagnostic . span_err ( span, & msg) ;
207
261
}
262
+ // Parse the contents of the sequence itself
208
263
let sequence = parse ( delimited. tts . into ( ) , expect_matchers, sess) ;
264
+ // Get the Kleen operator and optional separator
209
265
let ( separator, op) = parse_sep_and_kleene_op ( trees, span, sess) ;
266
+ // Count the number of captured "names" (i.e. named metavars)
210
267
let name_captures = macro_parser:: count_names ( & sequence) ;
211
268
TokenTree :: Sequence (
212
269
span,
@@ -218,6 +275,9 @@ where
218
275
} ) ,
219
276
)
220
277
}
278
+
279
+ // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
280
+ // metavariable that names the crate of the invokation.
221
281
Some ( tokenstream:: TokenTree :: Token ( ident_span, ref token) ) if token. is_ident ( ) => {
222
282
let ident = token. ident ( ) . unwrap ( ) ;
223
283
let span = ident_span. with_lo ( span. lo ( ) ) ;
@@ -231,6 +291,8 @@ where
231
291
TokenTree :: MetaVar ( span, ident)
232
292
}
233
293
}
294
+
295
+ // `tree` is followed by a random token. This is an error.
234
296
Some ( tokenstream:: TokenTree :: Token ( span, tok) ) => {
235
297
let msg = format ! (
236
298
"expected identifier, found `{}`" ,
@@ -239,9 +301,16 @@ where
239
301
sess. span_diagnostic . span_err ( span, & msg) ;
240
302
TokenTree :: MetaVar ( span, keywords:: Invalid . ident ( ) )
241
303
}
304
+
305
+ // There are no more tokens. Just return the `$` we already have.
242
306
None => TokenTree :: Token ( span, token:: Dollar ) ,
243
307
} ,
308
+
309
+ // `tree` is an arbitrary token. Keep it.
244
310
tokenstream:: TokenTree :: Token ( span, tok) => TokenTree :: Token ( span, tok) ,
311
+
312
+ // `tree` is the beginning of a delimited set of tokens (e.g. `(` or `{`). We need to
313
+ // descend into the delimited set and further parse it.
245
314
tokenstream:: TokenTree :: Delimited ( span, delimited) => TokenTree :: Delimited (
246
315
span,
247
316
Rc :: new ( Delimited {
@@ -257,8 +326,8 @@ where
257
326
/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
258
327
/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
259
328
/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
260
- /// itself. Note that here we are parsing the _pattern_ itself, rather than trying to match some
261
- /// stream of tokens against the pattern .
329
+ /// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
330
+ /// stream of tokens in an invokation of a macro .
262
331
///
263
332
/// This function will take some input iterator `input` corresponding to `span` and a parsing
264
333
/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
0 commit comments