diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 281bd72deeb80..99ca8c43cfbe2 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -257,7 +257,7 @@ impl<'a> Classifier<'a> {
token::Question => Class::QuestionMark,
token::Dollar => {
- if self.lexer.peek().kind.is_ident() {
+ if self.lexer.peek().is_ident() {
self.in_macro_nonterminal = true;
Class::MacroNonTerminal
} else {
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index b5d9b761773b4..d7e43f645df7b 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned};
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use crate::parse::parser::Parser;
use crate::parse::{self, ParseSess, PResult};
-use crate::parse::token::{self, Token, TokenKind};
+use crate::parse::token::{self, Token};
use crate::ptr::P;
use crate::symbol::{sym, Symbol};
use crate::ThinVec;
@@ -467,8 +467,7 @@ impl MetaItem {
segment.ident.span.ctxt());
idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
}
- idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident),
- segment.ident.span).into());
+ idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
last_pos = segment.ident.span.hi();
}
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index f98e1433356c2..4758b6a50e520 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -78,7 +78,7 @@ use crate::ast::{Ident, Name};
use crate::ext::tt::quoted::{self, TokenTree};
use crate::parse::{Directory, ParseSess};
use crate::parse::parser::{Parser, PathStyle};
-use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
+use crate::parse::token::{self, DocComment, Nonterminal, Token};
use crate::print::pprust;
use crate::symbol::{kw, sym, Symbol};
use crate::tokenstream::{DelimSpan, TokenStream};
@@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> {
seq_op: Option,
/// The separator if we are in a repetition.
- sep: Option,
+ sep: Option,
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
/// before we enter the sequence.
@@ -417,24 +417,24 @@ fn nameize>(
/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
/// other tokens, this is "unexpected token...".
-pub fn parse_failure_msg(tok: TokenKind) -> String {
- match tok {
+pub fn parse_failure_msg(tok: &Token) -> String {
+ match tok.kind {
token::Eof => "unexpected end of macro invocation".to_string(),
_ => format!(
"no rules expected the token `{}`",
- pprust::token_to_string(&tok)
+ pprust::token_to_string(tok)
),
}
}
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
-fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool {
- if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) {
- name1 == name2 && is_raw1 == is_raw2
- } else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) {
- name1 == name2
+fn token_name_eq(t1: &Token, t2: &Token) -> bool {
+ if let (Some((ident1, is_raw1)), Some((ident2, is_raw2))) = (t1.ident(), t2.ident()) {
+ ident1.name == ident2.name && is_raw1 == is_raw2
+ } else if let (Some(ident1), Some(ident2)) = (t1.lifetime(), t2.lifetime()) {
+ ident1.name == ident2.name
} else {
- *t1 == *t2
+ t1.kind == t2.kind
}
}
@@ -712,7 +712,7 @@ pub fn parse(
// If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
// either the parse is ambiguous (which should never happen) or there is a syntax error.
- if token_name_eq(&parser.token, &token::Eof) {
+ if parser.token == token::Eof {
if eof_items.len() == 1 {
let matches = eof_items[0]
.matches
@@ -804,8 +804,8 @@ pub fn parse(
/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
-fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
- match *token {
+fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
+ match token.kind {
token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
_ => None,
}
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 4998129fdee51..e7da195c0055f 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -17,7 +17,7 @@ use crate::symbol::{Symbol, kw, sym};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
use errors::FatalError;
-use syntax_pos::{Span, DUMMY_SP, symbol::Ident};
+use syntax_pos::{Span, symbol::Ident};
use log::debug;
use rustc_data_structures::fx::{FxHashMap};
@@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
let (token, label) = best_failure.expect("ran no matchers");
let span = token.span.substitute_dummy(sp);
- let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind));
+ let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
err.span_label(span, label);
if let Some(sp) = def_span {
if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
@@ -266,17 +266,19 @@ pub fn compile(
let argument_gram = vec![
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![
- quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
- quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
- quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
+ quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, ast::Ident::from_str("tt")),
+ quoted::TokenTree::token(token::FatArrow, def.span),
+ quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, ast::Ident::from_str("tt")),
],
- separator: Some(if body.legacy { token::Semi } else { token::Comma }),
+ separator: Some(Token::new(
+ if body.legacy { token::Semi } else { token::Comma }, def.span
+ )),
op: quoted::KleeneOp::OneOrMore,
num_captures: 2,
})),
// to phase into semicolon-termination instead of semicolon-separation
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
- tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
+ tts: vec![quoted::TokenTree::token(token::Semi, def.span)],
separator: None,
op: quoted::KleeneOp::ZeroOrMore,
num_captures: 0
@@ -286,7 +288,7 @@ pub fn compile(
let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
Success(m) => m,
Failure(token, msg) => {
- let s = parse_failure_msg(token.kind);
+ let s = parse_failure_msg(&token);
let sp = token.span.substitute_dummy(def.span);
let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
err.span_label(sp, msg);
@@ -608,9 +610,8 @@ impl FirstSets {
// If the sequence contents can be empty, then the first
// token could be the separator token itself.
- if let (Some(ref sep), true) = (seq_rep.separator.clone(),
- subfirst.maybe_empty) {
- first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
+ if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+ first.add_one_maybe(TokenTree::Token(sep.clone()));
}
// Reverse scan: Sequence comes before `first`.
@@ -658,9 +659,8 @@ impl FirstSets {
// If the sequence contents can be empty, then the first
// token could be the separator token itself.
- if let (Some(ref sep), true) = (seq_rep.separator.clone(),
- subfirst.maybe_empty) {
- first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
+ if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+ first.add_one_maybe(TokenTree::Token(sep.clone()));
}
assert!(first.maybe_empty);
@@ -851,7 +851,7 @@ fn check_matcher_core(sess: &ParseSess,
// against SUFFIX
continue 'each_token;
}
- TokenTree::Sequence(sp, ref seq_rep) => {
+ TokenTree::Sequence(_, ref seq_rep) => {
suffix_first = build_suffix_first();
// The trick here: when we check the interior, we want
// to include the separator (if any) as a potential
@@ -864,9 +864,9 @@ fn check_matcher_core(sess: &ParseSess,
// work of cloning it? But then again, this way I may
// get a "tighter" span?
let mut new;
- let my_suffix = if let Some(ref u) = seq_rep.separator {
+ let my_suffix = if let Some(sep) = &seq_rep.separator {
new = suffix_first.clone();
- new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
+ new.add_one_maybe(TokenTree::Token(sep.clone()));
&new
} else {
&suffix_first
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index ec7d7f705d893..707fb65bcc52b 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -23,16 +23,6 @@ pub struct Delimited {
}
impl Delimited {
- /// Returns the opening delimiter (possibly `NoDelim`).
- pub fn open_token(&self) -> TokenKind {
- token::OpenDelim(self.delim)
- }
-
- /// Returns the closing delimiter (possibly `NoDelim`).
- pub fn close_token(&self) -> TokenKind {
- token::CloseDelim(self.delim)
- }
-
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span.is_dummy() {
@@ -40,7 +30,7 @@ impl Delimited {
} else {
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
};
- TokenTree::token(self.open_token(), open_span)
+ TokenTree::token(token::OpenDelim(self.delim), open_span)
}
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +40,7 @@ impl Delimited {
} else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
};
- TokenTree::token(self.close_token(), close_span)
+ TokenTree::token(token::CloseDelim(self.delim), close_span)
}
}
@@ -59,7 +49,7 @@ pub struct SequenceRepetition {
/// The sequence of token trees
pub tts: Vec,
/// The optional separator
- pub separator: Option,
+ pub separator: Option,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `Match`s that appear in the sequence (and subsequences)
@@ -282,7 +272,7 @@ where
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
// Must have `(` not `{` or `[`
if delim != token::Paren {
- let tok = pprust::token_to_string(&token::OpenDelim(delim));
+ let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span.entire(), &msg);
}
@@ -371,8 +361,8 @@ where
/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
/// `None`.
-fn kleene_op(token: &TokenKind) -> Option {
- match *token {
+fn kleene_op(token: &Token) -> Option {
+ match token.kind {
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
token::Question => Some(KleeneOp::ZeroOrOne),
@@ -424,7 +414,7 @@ fn parse_sep_and_kleene_op(
attrs: &[ast::Attribute],
edition: Edition,
macro_node_id: NodeId,
-) -> (Option, KleeneOp)
+) -> (Option, KleeneOp)
where
I: Iterator- ,
{
@@ -449,7 +439,7 @@ fn parse_sep_and_kleene_op_2015(
_features: &Features,
_attrs: &[ast::Attribute],
macro_node_id: NodeId,
-) -> (Option, KleeneOp)
+) -> (Option, KleeneOp)
where
I: Iterator
- ,
{
@@ -502,7 +492,7 @@ where
a hard error in an upcoming edition",
);
- return (Some(token::Question), op);
+ return (Some(Token::new(token::Question, op1_span)), op);
}
// #2 is a random token (this is an error) :(
@@ -541,7 +531,7 @@ where
}
// #2 is a KleeneOp :D
- Ok(Ok((op, _))) => return (Some(token.kind), op),
+ Ok(Ok((op, _))) => return (Some(token), op),
// #2 is a random token :(
Ok(Err(token)) => token.span,
@@ -567,7 +557,7 @@ fn parse_sep_and_kleene_op_2018(
sess: &ParseSess,
_features: &Features,
_attrs: &[ast::Attribute],
-) -> (Option, KleeneOp)
+) -> (Option, KleeneOp)
where
I: Iterator
- ,
{
@@ -596,7 +586,7 @@ where
}
// #2 is a KleeneOp :D
- Ok(Ok((op, _))) => return (Some(token.kind), op),
+ Ok(Ok((op, _))) => return (Some(token), op),
// #2 is a random token :(
Ok(Err(token)) => token.span,
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 90a9cc8f34d2d..c51f4b20c31c0 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -4,11 +4,10 @@ use crate::ext::expand::Marker;
use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
use crate::ext::tt::quoted;
use crate::mut_visit::noop_visit_tt;
-use crate::parse::token::{self, NtTT, TokenKind};
+use crate::parse::token::{self, NtTT, Token};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use smallvec::{smallvec, SmallVec};
-use syntax_pos::DUMMY_SP;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
@@ -18,7 +17,7 @@ use std::rc::Rc;
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
enum Frame {
Delimited { forest: Lrc, idx: usize, span: DelimSpan },
- Sequence { forest: Lrc, idx: usize, sep: Option },
+ Sequence { forest: Lrc, idx: usize, sep: Option },
}
impl Frame {
@@ -109,17 +108,13 @@ pub fn transcribe(
else {
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
// go back to the beginning of the sequence.
- if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
- let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
+ if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
+ let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
*repeat_idx += 1;
- if *repeat_idx < repeat_len {
+ if repeat_idx < repeat_len {
*idx = 0;
- if let Some(sep) = sep.clone() {
- let prev_span = match result.last() {
- Some((tt, _)) => tt.span(),
- None => DUMMY_SP,
- };
- result.push(TokenTree::token(sep, prev_span).into());
+ if let Some(sep) = sep {
+ result.push(TokenTree::Token(sep.clone()).into());
}
continue;
}
@@ -242,7 +237,7 @@ pub fn transcribe(
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
sp = sp.apply_mark(cx.current_expansion.mark);
result.push(TokenTree::token(token::Dollar, sp).into());
- result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into());
+ result.push(TokenTree::Token(Token::from_ast_ident(ident)).into());
}
}
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index c4db9a9df45a9..9d2ac5b4b5168 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -729,7 +729,7 @@ impl<'a> Parser<'a> {
&mut self,
t: &TokenKind,
) -> PResult<'a, bool /* recovered */> {
- let token_str = pprust::token_to_string(t);
+ let token_str = pprust::token_kind_to_string(t);
let this_token_str = self.this_token_descr();
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
// Point at the end of the macro call when reaching end of macro arguments.
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index e3d959c2c54c4..2f4c48d4bf9e0 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1501,7 +1501,7 @@ fn char_at(s: &str, byte: usize) -> char {
mod tests {
use super::*;
- use crate::ast::{Ident, CrateConfig};
+ use crate::ast::CrateConfig;
use crate::symbol::Symbol;
use crate::source_map::{SourceMap, FilePathMapping};
use crate::feature_gate::UnstableFeatures;
@@ -1562,7 +1562,7 @@ mod tests {
assert_eq!(string_reader.next_token(), token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = Token::new(
- token::Ident(Symbol::intern("fn"), false),
+ mk_ident("fn"),
Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
);
assert_eq!(tok1.kind, tok2.kind);
@@ -1593,7 +1593,7 @@ mod tests {
// make the identifier by looking up the string in the interner
fn mk_ident(id: &str) -> TokenKind {
- TokenKind::from_ast_ident(Ident::from_str(id))
+ token::Ident(Symbol::intern(id), false)
}
fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index b809f99beba33..99d9d40a45b93 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -211,7 +211,7 @@ impl<'a> TokenTreesReader<'a> {
let raw = self.string_reader.peek_span_src_raw;
self.real_token();
let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
- && token::is_op(&self.token);
+ && self.token.is_op();
Ok((tt, if is_joint { Joint } else { NonJoint }))
}
}
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 1d708d39a1379..cde35681988db 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -9,7 +9,7 @@ use crate::parse::parser::emit_unclosed_delims;
use crate::parse::token::TokenKind;
use crate::tokenstream::{TokenStream, TokenTree};
use crate::diagnostics::plugin::ErrorMap;
-use crate::print::pprust::token_to_string;
+use crate::print::pprust;
use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
use rustc_data_structures::sync::{Lrc, Lock};
@@ -312,7 +312,7 @@ pub fn maybe_file_to_stream(
for unmatched in unmatched_braces {
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
"incorrect close delimiter: `{}`",
- token_to_string(&token::CloseDelim(unmatched.found_delim)),
+ pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
));
db.span_label(unmatched.found_span, "incorrect close delimiter");
if let Some(sp) = unmatched.candidate_span {
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 3acd708814560..d9eba3bbadb68 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -401,7 +401,7 @@ crate enum TokenType {
impl TokenType {
crate fn to_string(&self) -> String {
match *self {
- TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
+ TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
@@ -418,7 +418,7 @@ impl TokenType {
///
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
/// that `IDENT` is not the ident of a fn trait.
-fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool {
+fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
t == &token::ModSep || t == &token::Lt ||
t == &token::BinOp(token::Shl)
}
@@ -586,10 +586,10 @@ impl<'a> Parser<'a> {
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
- if edible.contains(&self.token) {
+ if edible.contains(&self.token.kind) {
self.bump();
Ok(false)
- } else if inedible.contains(&self.token) {
+ } else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(false)
} else if self.last_unexpected_token_span == Some(self.token.span) {
@@ -951,7 +951,7 @@ impl<'a> Parser<'a> {
Err(mut e) => {
// Attempt to keep parsing if it was a similar separator
if let Some(ref tokens) = t.similar_tokens() {
- if tokens.contains(&self.token) {
+ if tokens.contains(&self.token.kind) {
self.bump();
}
}
@@ -1756,7 +1756,7 @@ impl<'a> Parser<'a> {
fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
let ident = self.parse_path_segment_ident()?;
- let is_args_start = |token: &TokenKind| match *token {
+ let is_args_start = |token: &Token| match token.kind {
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
| token::LArrow => true,
_ => false,
@@ -2627,9 +2627,11 @@ impl<'a> Parser<'a> {
token::Ident(name, _) => name,
_ => unreachable!()
};
- let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
- err.span_label(self.token.span, "unknown macro variable");
- err.emit();
+ let span = self.prev_span.to(self.token.span);
+ self.diagnostic()
+ .struct_span_fatal(span, &format!("unknown macro variable `{}`", name))
+ .span_label(span, "unknown macro variable")
+ .emit();
self.bump();
return
}
@@ -2820,7 +2822,7 @@ impl<'a> Parser<'a> {
LhsExpr::AttributesParsed(attrs) => Some(attrs),
_ => None,
};
- if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
+ if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
return self.parse_prefix_range_expr(attrs);
} else {
self.parse_prefix_expr(attrs)?
@@ -3097,7 +3099,7 @@ impl<'a> Parser<'a> {
self.err_dotdotdot_syntax(self.token.span);
}
- debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
+ debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
self.token);
let tok = self.token.clone();
@@ -7865,7 +7867,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, handler:
for unmatched in unclosed_delims.iter() {
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
"incorrect close delimiter: `{}`",
- pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
+ pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
));
err.span_label(unmatched.found_span, "incorrect close delimiter");
if let Some(sp) = unmatched.candidate_span {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 28a733728bf7b..cc34883e2e815 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -17,7 +17,6 @@ use log::info;
use std::fmt;
use std::mem;
-use std::ops::Deref;
#[cfg(target_arch = "x86_64")]
use rustc_data_structures::static_assert_size;
use rustc_data_structures::sync::Lrc;
@@ -242,20 +241,57 @@ pub struct Token {
}
impl TokenKind {
- /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary.
- pub fn from_ast_ident(ident: ast::Ident) -> TokenKind {
- Ident(ident.name, ident.is_raw_guess())
+ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind {
+ Literal(Lit::new(kind, symbol, suffix))
}
- crate fn is_like_plus(&self) -> bool {
+ /// Returns tokens that are likely to be typed accidentally instead of the current token.
+ /// Enables better error recovery when the wrong token is found.
+ crate fn similar_tokens(&self) -> Option> {
match *self {
- BinOp(Plus) | BinOpEq(Plus) => true,
- _ => false,
+ Comma => Some(vec![Dot, Lt, Semi]),
+ Semi => Some(vec![Colon, Comma]),
+ _ => None
}
}
}
impl Token {
+ crate fn new(kind: TokenKind, span: Span) -> Self {
+ Token { kind, span }
+ }
+
+ /// Some token that will be thrown away later.
+ crate fn dummy() -> Self {
+ Token::new(TokenKind::Whitespace, DUMMY_SP)
+ }
+
+ /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
+ crate fn from_ast_ident(ident: ast::Ident) -> Self {
+ Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
+ }
+
+ /// Return this token by value and leave a dummy token in its place.
+ crate fn take(&mut self) -> Self {
+ mem::replace(self, Token::dummy())
+ }
+
+ crate fn is_op(&self) -> bool {
+ match self.kind {
+ OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
+ Ident(..) | Lifetime(..) | Interpolated(..) |
+ Whitespace | Comment | Shebang(..) | Eof => false,
+ _ => true,
+ }
+ }
+
+ crate fn is_like_plus(&self) -> bool {
+ match self.kind {
+ BinOp(Plus) | BinOpEq(Plus) => true,
+ _ => false,
+ }
+ }
+
/// Returns `true` if the token can appear at the start of an expression.
crate fn can_begin_expr(&self) -> bool {
match self.kind {
@@ -310,12 +346,10 @@ impl Token {
_ => false,
}
}
-}
-impl TokenKind {
/// Returns `true` if the token can appear at the start of a const param.
- pub fn can_begin_const_arg(&self) -> bool {
- match self {
+ crate fn can_begin_const_arg(&self) -> bool {
+ match self.kind {
OpenDelim(Brace) => true,
Interpolated(ref nt) => match **nt {
NtExpr(..) => true,
@@ -326,31 +360,23 @@ impl TokenKind {
_ => self.can_begin_literal_or_bool(),
}
}
-}
-impl Token {
/// Returns `true` if the token can appear at the start of a generic bound.
crate fn can_begin_bound(&self) -> bool {
self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
self == &Question || self == &OpenDelim(Paren)
}
-}
-
-impl TokenKind {
- pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind {
- Literal(Lit::new(kind, symbol, suffix))
- }
/// Returns `true` if the token is any literal
crate fn is_lit(&self) -> bool {
- match *self {
+ match self.kind {
Literal(..) => true,
_ => false,
}
}
crate fn expect_lit(&self) -> Lit {
- match *self {
+ match self.kind {
Literal(lit) => lit,
_=> panic!("`expect_lit` called on non-literal"),
}
@@ -359,7 +385,7 @@ impl TokenKind {
/// Returns `true` if the token is any literal, a minus (which can prefix a literal,
/// for example a '-42', or one of the boolean idents).
crate fn can_begin_literal_or_bool(&self) -> bool {
- match *self {
+ match self.kind {
Literal(..) => true,
BinOp(Minus) => true,
Ident(name, false) if name == kw::True => true,
@@ -371,9 +397,7 @@ impl TokenKind {
_ => false,
}
}
-}
-impl Token {
/// Returns an identifier if this token is an identifier.
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
match self.kind {
@@ -397,49 +421,25 @@ impl Token {
_ => None,
}
}
-}
-impl TokenKind {
- /// Returns an identifier name if this token is an identifier.
- pub fn ident_name(&self) -> Option<(ast::Name, /* is_raw */ bool)> {
- match *self {
- Ident(name, is_raw) => Some((name, is_raw)),
- Interpolated(ref nt) => match **nt {
- NtIdent(ident, is_raw) => Some((ident.name, is_raw)),
- _ => None,
- },
- _ => None,
- }
- }
- /// Returns a lifetime name if this token is a lifetime.
- pub fn lifetime_name(&self) -> Option {
- match *self {
- Lifetime(name) => Some(name),
- Interpolated(ref nt) => match **nt {
- NtLifetime(ident) => Some(ident.name),
- _ => None,
- },
- _ => None,
- }
- }
/// Returns `true` if the token is an identifier.
pub fn is_ident(&self) -> bool {
- self.ident_name().is_some()
+ self.ident().is_some()
}
/// Returns `true` if the token is a lifetime.
crate fn is_lifetime(&self) -> bool {
- self.lifetime_name().is_some()
+ self.lifetime().is_some()
}
/// Returns `true` if the token is a identifier whose name is the given
/// string slice.
crate fn is_ident_named(&self, name: Symbol) -> bool {
- self.ident_name().map_or(false, |(ident_name, _)| ident_name == name)
+ self.ident().map_or(false, |(ident, _)| ident.name == name)
}
/// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool {
- if let Interpolated(ref nt) = *self {
+ if let Interpolated(ref nt) = self.kind {
if let NtPath(..) = **nt {
return true;
}
@@ -456,33 +456,27 @@ impl TokenKind {
crate fn is_qpath_start(&self) -> bool {
self == &Lt || self == &BinOp(Shl)
}
-}
-impl Token {
crate fn is_path_start(&self) -> bool {
self == &ModSep || self.is_qpath_start() || self.is_path() ||
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
}
-}
-impl TokenKind {
/// Returns `true` if the token is a given keyword, `kw`.
pub fn is_keyword(&self, kw: Symbol) -> bool {
- self.ident_name().map(|(name, is_raw)| name == kw && !is_raw).unwrap_or(false)
+ self.ident().map(|(id, is_raw)| id.name == kw && !is_raw).unwrap_or(false)
}
- pub fn is_path_segment_keyword(&self) -> bool {
- match self.ident_name() {
- Some((name, false)) => name.is_path_segment_keyword(),
+ crate fn is_path_segment_keyword(&self) -> bool {
+ match self.ident() {
+ Some((id, false)) => id.is_path_segment_keyword(),
_ => false,
}
}
-}
-impl Token {
// Returns true for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
- pub fn is_special_ident(&self) -> bool {
+ crate fn is_special_ident(&self) -> bool {
match self.ident() {
Some((id, false)) => id.is_special(),
_ => false,
@@ -512,55 +506,53 @@ impl Token {
_ => false,
}
}
-}
-impl TokenKind {
- crate fn glue(self, joint: TokenKind) -> Option {
- Some(match self {
- Eq => match joint {
+ crate fn glue(self, joint: Token) -> Option {
+ let kind = match self.kind {
+ Eq => match joint.kind {
Eq => EqEq,
Gt => FatArrow,
_ => return None,
},
- Lt => match joint {
+ Lt => match joint.kind {
Eq => Le,
Lt => BinOp(Shl),
Le => BinOpEq(Shl),
BinOp(Minus) => LArrow,
_ => return None,
},
- Gt => match joint {
+ Gt => match joint.kind {
Eq => Ge,
Gt => BinOp(Shr),
Ge => BinOpEq(Shr),
_ => return None,
},
- Not => match joint {
+ Not => match joint.kind {
Eq => Ne,
_ => return None,
},
- BinOp(op) => match joint {
+ BinOp(op) => match joint.kind {
Eq => BinOpEq(op),
BinOp(And) if op == And => AndAnd,
BinOp(Or) if op == Or => OrOr,
Gt if op == Minus => RArrow,
_ => return None,
},
- Dot => match joint {
+ Dot => match joint.kind {
Dot => DotDot,
DotDot => DotDotDot,
_ => return None,
},
- DotDot => match joint {
+ DotDot => match joint.kind {
Dot => DotDotDot,
Eq => DotDotEq,
_ => return None,
},
- Colon => match joint {
+ Colon => match joint.kind {
Colon => ModSep,
_ => return None,
},
- SingleQuote => match joint {
+ SingleQuote => match joint.kind {
Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))),
_ => return None,
},
@@ -570,26 +562,18 @@ impl TokenKind {
Question | OpenDelim(..) | CloseDelim(..) |
Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
Whitespace | Comment | Shebang(..) | Eof => return None,
- })
- }
+ };
- /// Returns tokens that are likely to be typed accidentally instead of the current token.
- /// Enables better error recovery when the wrong token is found.
- crate fn similar_tokens(&self) -> Option> {
- match *self {
- Comma => Some(vec![Dot, Lt, Semi]),
- Semi => Some(vec![Colon, Comma]),
- _ => None
- }
+ Some(Token::new(kind, self.span.to(joint.span)))
}
// See comments in `Nonterminal::to_tokenstream` for why we care about
// *probably* equal here rather than actual equality
- crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool {
- if mem::discriminant(self) != mem::discriminant(other) {
+ crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
+ if mem::discriminant(&self.kind) != mem::discriminant(&other.kind) {
return false
}
- match (self, other) {
+ match (&self.kind, &other.kind) {
(&Eq, &Eq) |
(&Lt, &Lt) |
(&Le, &Le) |
@@ -643,36 +627,12 @@ impl TokenKind {
}
}
-impl Token {
- crate fn new(kind: TokenKind, span: Span) -> Self {
- Token { kind, span }
- }
-
- /// Some token that will be thrown away later.
- crate fn dummy() -> Self {
- Token::new(TokenKind::Whitespace, DUMMY_SP)
- }
-
- /// Return this token by value and leave a dummy token in its place.
- crate fn take(&mut self) -> Self {
- mem::replace(self, Token::dummy())
- }
-}
-
impl PartialEq for Token {
fn eq(&self, rhs: &TokenKind) -> bool {
self.kind == *rhs
}
}
-// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
-impl Deref for Token {
- type Target = TokenKind;
- fn deref(&self) -> &Self::Target {
- &self.kind
- }
-}
-
#[derive(Clone, RustcEncodable, RustcDecodable)]
/// For interpolation during macro expansion.
pub enum Nonterminal {
@@ -812,15 +772,6 @@ impl Nonterminal {
}
}
-crate fn is_op(tok: &TokenKind) -> bool {
- match *tok {
- OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
- Ident(..) | Lifetime(..) | Interpolated(..) |
- Whitespace | Comment | Shebang(..) | Eof => false,
- _ => true,
- }
-}
-
fn prepend_attrs(sess: &ParseSess,
attrs: &[ast::Attribute],
tokens: Option<&tokenstream::TokenStream>,
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index d922e1896cc9c..4cbe590d44bfe 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -6,7 +6,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg};
use crate::util::parser::{self, AssocOp, Fixity};
use crate::attr;
use crate::source_map::{self, SourceMap, Spanned};
-use crate::parse::token::{self, BinOpToken, Nonterminal, TokenKind};
+use crate::parse::token::{self, BinOpToken, Nonterminal, Token, TokenKind};
use crate::parse::lexer::comments;
use crate::parse::{self, ParseSess};
use crate::print::pp::{self, Breaks};
@@ -189,7 +189,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
out
}
-pub fn token_to_string(tok: &TokenKind) -> String {
+pub fn token_kind_to_string(tok: &TokenKind) -> String {
match *tok {
token::Eq => "=".to_string(),
token::Lt => "<".to_string(),
@@ -250,6 +250,10 @@ pub fn token_to_string(tok: &TokenKind) -> String {
}
}
+pub fn token_to_string(token: &Token) -> String {
+ token_kind_to_string(&token.kind)
+}
+
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => expr_to_string(e),
@@ -734,11 +738,11 @@ pub trait PrintState<'a> {
}
}
TokenTree::Delimited(_, delim, tts) => {
- self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
+ self.writer().word(token_kind_to_string(&token::OpenDelim(delim)))?;
self.writer().space()?;
self.print_tts(tts)?;
self.writer().space()?;
- self.writer().word(token_to_string(&token::CloseDelim(delim)))
+ self.writer().word(token_kind_to_string(&token::CloseDelim(delim)))
},
}
}
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 9dea3a4dcc144..2daec9702798f 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -126,14 +126,6 @@ impl TokenTree {
}
}
- /// Indicates if the stream is a token that is equal to the provided token.
- pub fn eq_token(&self, t: TokenKind) -> bool {
- match self {
- TokenTree::Token(token) => *token == t,
- _ => false,
- }
- }
-
pub fn joint(self) -> TokenStream {
TokenStream::new(vec![(self, Joint)])
}
@@ -430,11 +422,10 @@ impl TokenStreamBuilder {
let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
- if let Some(glued_tok) = last_token.kind.glue(token.kind) {
+ if let Some(glued_tok) = last_token.glue(token) {
let last_stream = self.0.pop().unwrap();
self.push_all_but_last_tree(&last_stream);
- let glued_span = last_token.span.to(token.span);
- let glued_tt = TokenTree::token(glued_tok, glued_span);
+ let glued_tt = TokenTree::Token(glued_tok);
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
self.0.push(glued_tokenstream);
self.push_all_but_first_tree(&stream);
diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs
index 9e26f1bf7d374..69dd96625cc02 100644
--- a/src/libsyntax/util/parser.rs
+++ b/src/libsyntax/util/parser.rs
@@ -1,4 +1,4 @@
-use crate::parse::token::{self, TokenKind, BinOpToken};
+use crate::parse::token::{self, Token, BinOpToken};
use crate::symbol::kw;
use crate::ast::{self, BinOpKind};
@@ -69,9 +69,9 @@ pub enum Fixity {
impl AssocOp {
/// Creates a new AssocOP from a token
- pub fn from_token(t: &TokenKind) -> Option {
+ pub fn from_token(t: &Token) -> Option {
use AssocOp::*;
- match *t {
+ match t.kind {
token::BinOpEq(k) => Some(AssignOp(k)),
token::Eq => Some(Assign),
token::BinOp(BinOpToken::Star) => Some(Multiply),
diff --git a/src/test/ui/macros/macro-input-future-proofing.stderr b/src/test/ui/macros/macro-input-future-proofing.stderr
index a35f6283afb2e..542486927dfd1 100644
--- a/src/test/ui/macros/macro-input-future-proofing.stderr
+++ b/src/test/ui/macros/macro-input-future-proofing.stderr
@@ -55,10 +55,10 @@ LL | ($($a:ty, $b:ty)* -) => ();
= note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`
error: `$ty:ty` is followed by `-`, which is not allowed for `ty` fragments
- --> $DIR/macro-input-future-proofing.rs:18:7
+ --> $DIR/macro-input-future-proofing.rs:18:15
|
LL | ($($ty:ty)-+) => ();
- | ^^^^^^^^ not allowed after `ty` fragments
+ | ^ not allowed after `ty` fragments
|
= note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`