Skip to content

Commit 3bc5453

Browse files
committed
auto merge of #18365 : bjz/rust/token, r=alexcrichton
[breaking-change] (for syntax-extensions) - Token variant identifiers have been converted to PascalCase for consistency with Rust coding standards - Some free-functions in `syntax::token` have been converted to methods on `syntax::token::Token`: - `can_begin_expr` -> `Token::can_begin_expr` - `close_delimiter_for` -> `Token::get_close_delimiter` - `is_lit` -> `Token::is_lit` - `is_ident` -> `Token::is_ident` - `is_path` -> `Token::is_path` - `is_plain_ident` -> `Token::is_plain_ident` - `is_lifetime` -> `Token::is_lifetime` - `is_mutability` -> `Token::is_mutability` - `to_binop` -> `Token::to_binop` - `is_keyword` -> `Token::is_keyword` - `is_any_keyword` -> `Token:is_any_keyword` - `is_strict_keyword` -> `Token::is_strict_keyword` - `is_reserved_keyword` -> `Token::is_reserved_keyword` - `mtwt_token_eq` -> `Token::mtwt_eq` - `token::Ident` now takes an enum instead of a boolean for clarity - `token::{to_string, binop_to_string}` were moved to `pprust::{token_to_string, binop_to_string}`
2 parents 124508d + 665ad9c commit 3bc5453

29 files changed

+1501
-1421
lines changed

src/doc/guide-plugin.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ extern crate syntax;
5555
extern crate rustc;
5656
5757
use syntax::codemap::Span;
58-
use syntax::parse::token::{IDENT, get_ident};
58+
use syntax::parse::token;
5959
use syntax::ast::{TokenTree, TtToken};
6060
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
6161
use syntax::ext::build::AstBuilder; // trait for expr_uint
@@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
7171
("I", 1)];
7272
7373
let text = match args {
74-
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
74+
[TtToken(_, token::Ident(s, _))] => token::get_ident(s).to_string(),
7575
_ => {
7676
cx.span_err(sp, "argument should be a single identifier");
7777
return DummyResult::any(sp);

src/grammar/verify.rs

+103-100
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,12 @@ use rustc::driver::{session, config};
3030

3131
use syntax::ast;
3232
use syntax::ast::Name;
33-
use syntax::parse::token::*;
33+
use syntax::parse::token;
3434
use syntax::parse::lexer::TokenAndSpan;
3535

3636
fn parse_token_list(file: &str) -> HashMap<String, Token> {
3737
fn id() -> Token {
38-
IDENT(ast::Ident { name: Name(0), ctxt: 0, }, false)
38+
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, token::Plain)
3939
}
4040

4141
let mut res = HashMap::new();
@@ -52,64 +52,64 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
5252
let num = line.slice_from(eq + 1);
5353

5454
let tok = match val {
55-
"SHR" => BINOP(SHR),
56-
"DOLLAR" => DOLLAR,
57-
"LT" => LT,
58-
"STAR" => BINOP(STAR),
59-
"FLOAT_SUFFIX" => id(),
60-
"INT_SUFFIX" => id(),
61-
"SHL" => BINOP(SHL),
62-
"LBRACE" => LBRACE,
63-
"RARROW" => RARROW,
64-
"LIT_STR" => LIT_STR(Name(0)),
65-
"DOTDOT" => DOTDOT,
66-
"MOD_SEP" => MOD_SEP,
67-
"DOTDOTDOT" => DOTDOTDOT,
68-
"NOT" => NOT,
69-
"AND" => BINOP(AND),
70-
"LPAREN" => LPAREN,
71-
"ANDAND" => ANDAND,
72-
"AT" => AT,
73-
"LBRACKET" => LBRACKET,
74-
"LIT_STR_RAW" => LIT_STR_RAW(Name(0), 0),
75-
"RPAREN" => RPAREN,
76-
"SLASH" => BINOP(SLASH),
77-
"COMMA" => COMMA,
78-
"LIFETIME" => LIFETIME(ast::Ident { name: Name(0), ctxt: 0 }),
79-
"CARET" => BINOP(CARET),
80-
"TILDE" => TILDE,
81-
"IDENT" => id(),
82-
"PLUS" => BINOP(PLUS),
83-
"LIT_CHAR" => LIT_CHAR(Name(0)),
84-
"LIT_BYTE" => LIT_BYTE(Name(0)),
85-
"EQ" => EQ,
86-
"RBRACKET" => RBRACKET,
87-
"COMMENT" => COMMENT,
88-
"DOC_COMMENT" => DOC_COMMENT(Name(0)),
89-
"DOT" => DOT,
90-
"EQEQ" => EQEQ,
91-
"NE" => NE,
92-
"GE" => GE,
93-
"PERCENT" => BINOP(PERCENT),
94-
"RBRACE" => RBRACE,
95-
"BINOP" => BINOP(PLUS),
96-
"POUND" => POUND,
97-
"OROR" => OROR,
98-
"LIT_INTEGER" => LIT_INTEGER(Name(0)),
99-
"BINOPEQ" => BINOPEQ(PLUS),
100-
"LIT_FLOAT" => LIT_FLOAT(Name(0)),
101-
"WHITESPACE" => WS,
102-
"UNDERSCORE" => UNDERSCORE,
103-
"MINUS" => BINOP(MINUS),
104-
"SEMI" => SEMI,
105-
"COLON" => COLON,
106-
"FAT_ARROW" => FAT_ARROW,
107-
"OR" => BINOP(OR),
108-
"GT" => GT,
109-
"LE" => LE,
110-
"LIT_BINARY" => LIT_BINARY(Name(0)),
111-
"LIT_BINARY_RAW" => LIT_BINARY_RAW(Name(0), 0),
112-
_ => continue
55+
"SHR" => token::BinOp(token::Shr),
56+
"DOLLAR" => token::Dollar,
57+
"LT" => token::Lt,
58+
"STAR" => token::BinOp(token::Star),
59+
"FLOAT_SUFFIX" => id(),
60+
"INT_SUFFIX" => id(),
61+
"SHL" => token::BinOp(token::Shl),
62+
"LBRACE" => token::LBrace,
63+
"RARROW" => token::Rarrow,
64+
"LIT_STR" => token::LitStr(Name(0)),
65+
"DOTDOT" => token::DotDot,
66+
"MOD_SEP" => token::ModSep,
67+
"DOTDOTDOT" => token::DotDotDot,
68+
"NOT" => token::Not,
69+
"AND" => token::BinOp(token::And),
70+
"LPAREN" => token::LParen,
71+
"ANDAND" => token::AndAnd,
72+
"AT" => token::At,
73+
"LBRACKET" => token::LBracket,
74+
"LIT_STR_RAW" => token::LitStrRaw(Name(0), 0),
75+
"RPAREN" => token::RParen,
76+
"SLASH" => token::BinOp(token::Slash),
77+
"COMMA" => token::Comma,
78+
"LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }),
79+
"CARET" => token::BinOp(token::Caret),
80+
"TILDE" => token::Tilde,
81+
"IDENT" => token::Id(),
82+
"PLUS" => token::BinOp(token::Plus),
83+
"LIT_CHAR" => token::LitChar(Name(0)),
84+
"LIT_BYTE" => token::LitByte(Name(0)),
85+
"EQ" => token::Eq,
86+
"RBRACKET" => token::RBracket,
87+
"COMMENT" => token::Comment,
88+
"DOC_COMMENT" => token::DocComment(Name(0)),
89+
"DOT" => token::Dot,
90+
"EQEQ" => token::EqEq,
91+
"NE" => token::Ne,
92+
"GE" => token::Ge,
93+
"PERCENT" => token::BinOp(token::Percent),
94+
"RBRACE" => token::RBrace,
95+
"BINOP" => token::BinOp(token::Plus),
96+
"POUND" => token::Pound,
97+
"OROR" => token::OrOr,
98+
"LIT_INTEGER" => token::LitInteger(Name(0)),
99+
"BINOPEQ" => token::BinOpEq(token::Plus),
100+
"LIT_FLOAT" => token::LitFloat(Name(0)),
101+
"WHITESPACE" => token::Whitespace,
102+
"UNDERSCORE" => token::Underscore,
103+
"MINUS" => token::BinOp(token::Minus),
104+
"SEMI" => token::Semi,
105+
"COLON" => token::Colon,
106+
"FAT_ARROW" => token::FatArrow,
107+
"OR" => token::BinOp(token::Or),
108+
"GT" => token::Gt,
109+
"LE" => token::Le,
110+
"LIT_BINARY" => token::LitBinary(Name(0)),
111+
"LIT_BINARY_RAW" => token::LitBinaryRaw(Name(0), 0),
112+
_ => continue,
113113
};
114114

115115
res.insert(num.to_string(), tok);
@@ -119,19 +119,19 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
119119
res
120120
}
121121

122-
fn str_to_binop(s: &str) -> BinOp {
122+
fn str_to_binop(s: &str) -> BinOpToken {
123123
match s {
124-
"+" => PLUS,
125-
"/" => SLASH,
126-
"-" => MINUS,
127-
"*" => STAR,
128-
"%" => PERCENT,
129-
"^" => CARET,
130-
"&" => AND,
131-
"|" => OR,
132-
"<<" => SHL,
133-
">>" => SHR,
134-
_ => fail!("Bad binop str `{}`", s)
124+
"+" => token::Plus,
125+
"/" => token::Slash,
126+
"-" => token::Minus,
127+
"*" => token::Star,
128+
"%" => token::Percent,
129+
"^" => token::Caret,
130+
"&" => token::And,
131+
"|" => token::Or,
132+
"<<" => token::Shl,
133+
">>" => token::Shr,
134+
_ => fail!("Bad binop str `{}`", s),
135135
}
136136
}
137137

@@ -186,19 +186,21 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
186186
debug!("What we got: content (`{}`), proto: {}", content, proto_tok);
187187

188188
let real_tok = match *proto_tok {
189-
BINOP(..) => BINOP(str_to_binop(content)),
190-
BINOPEQ(..) => BINOPEQ(str_to_binop(content.slice_to(content.len() - 1))),
191-
LIT_STR(..) => LIT_STR(fix(content)),
192-
LIT_STR_RAW(..) => LIT_STR_RAW(fix(content), count(content)),
193-
LIT_CHAR(..) => LIT_CHAR(fixchar(content)),
194-
LIT_BYTE(..) => LIT_BYTE(fixchar(content)),
195-
DOC_COMMENT(..) => DOC_COMMENT(nm),
196-
LIT_INTEGER(..) => LIT_INTEGER(nm),
197-
LIT_FLOAT(..) => LIT_FLOAT(nm),
198-
LIT_BINARY(..) => LIT_BINARY(nm),
199-
LIT_BINARY_RAW(..) => LIT_BINARY_RAW(fix(content), count(content)),
200-
IDENT(..) => IDENT(ast::Ident { name: nm, ctxt: 0 }, true),
201-
LIFETIME(..) => LIFETIME(ast::Ident { name: nm, ctxt: 0 }),
189+
token::BinOp(..) => token::BinOp(str_to_binop(content)),
190+
token::BinOpEq(..) => token::BinOpEq(str_to_binop(content.slice_to(
191+
content.len() - 1))),
192+
token::LitStr(..) => token::LitStr(fix(content)),
193+
token::LitStrRaw(..) => token::LitStrRaw(fix(content), count(content)),
194+
token::LitChar(..) => token::LitChar(fixchar(content)),
195+
token::LitByte(..) => token::LitByte(fixchar(content)),
196+
token::DocComment(..) => token::DocComment(nm),
197+
token::LitInteger(..) => token::LitInteger(nm),
198+
token::LitFloat(..) => token::LitFloat(nm),
199+
token::LitBinary(..) => token::LitBinary(nm),
200+
token::LitBinaryRaw(..) => token::LitBinaryRaw(fix(content), count(content)),
201+
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 },
202+
token::ModName),
203+
token::Lifetime(..) => token::Lifetime(ast::Ident { name: nm, ctxt: 0 }),
202204
ref t => t.clone()
203205
};
204206

@@ -222,8 +224,8 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
222224

223225
fn tok_cmp(a: &Token, b: &Token) -> bool {
224226
match a {
225-
&IDENT(id, _) => match b {
226-
&IDENT(id2, _) => id == id2,
227+
&token::Ident(id, _) => match b {
228+
&token::Ident(id2, _) => id == id2,
227229
_ => false
228230
},
229231
_ => a == b
@@ -281,19 +283,20 @@ fn main() {
281283
)
282284
)
283285

284-
matches!(LIT_BYTE(..),
285-
LIT_CHAR(..),
286-
LIT_INTEGER(..),
287-
LIT_FLOAT(..),
288-
LIT_STR(..),
289-
LIT_STR_RAW(..),
290-
LIT_BINARY(..),
291-
LIT_BINARY_RAW(..),
292-
IDENT(..),
293-
LIFETIME(..),
294-
INTERPOLATED(..),
295-
DOC_COMMENT(..),
296-
SHEBANG(..)
286+
matches!(
287+
LitByte(..),
288+
LitChar(..),
289+
LitInteger(..),
290+
LitFloat(..),
291+
LitStr(..),
292+
LitStrRaw(..),
293+
LitBinary(..),
294+
LitBinaryRaw(..),
295+
Ident(..),
296+
Lifetime(..),
297+
Interpolated(..),
298+
DocComment(..),
299+
Shebang(..)
297300
);
298301
}
299302
}

src/libregex_macros/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -634,7 +634,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<String> {
634634
return None
635635
}
636636
};
637-
if !parser.eat(&token::EOF) {
637+
if !parser.eat(&token::Eof) {
638638
cx.span_err(parser.span, "only one string literal allowed");
639639
return None;
640640
}

src/librustc/middle/save/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -428,7 +428,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
428428
let qualname = format!("{}::{}", qualname, name);
429429
let typ = ppaux::ty_to_string(&self.analysis.ty_cx,
430430
(*self.analysis.ty_cx.node_types.borrow())[field.node.id as uint]);
431-
match self.span.sub_span_before_token(field.span, token::COLON) {
431+
match self.span.sub_span_before_token(field.span, token::Colon) {
432432
Some(sub_span) => self.fmt.field_str(field.span,
433433
Some(sub_span),
434434
field.node.id,
@@ -1175,7 +1175,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
11751175
// 'use' always introduces an alias, if there is not an explicit
11761176
// one, there is an implicit one.
11771177
let sub_span =
1178-
match self.span.sub_span_before_token(path.span, token::EQ) {
1178+
match self.span.sub_span_before_token(path.span, token::Eq) {
11791179
Some(sub_span) => Some(sub_span),
11801180
None => sub_span,
11811181
};

0 commit comments

Comments
 (0)