8
8
// option. This file may not be copied, modified, or distributed
9
9
// except according to those terms.
10
10
11
- #![ feature( plugin, rustc_private, str_char , collections ) ]
11
+ #![ feature( plugin, rustc_private) ]
12
12
13
13
extern crate syntax;
14
14
extern crate rustc;
@@ -24,6 +24,7 @@ use std::path::Path;
24
24
25
25
use syntax:: parse;
26
26
use syntax:: parse:: lexer;
27
+ use rustc:: dep_graph:: DepGraph ;
27
28
use rustc:: session:: { self , config} ;
28
29
use rustc:: middle:: cstore:: DummyCrateStore ;
29
30
@@ -32,17 +33,17 @@ use syntax::ast;
32
33
use syntax:: ast:: Name ;
33
34
use syntax:: codemap;
34
35
use syntax:: codemap:: Pos ;
35
- use syntax:: parse:: token;
36
+ use syntax:: parse:: token:: { self , BinOpToken , DelimToken , Lit , Token } ;
36
37
use syntax:: parse:: lexer:: TokenAndSpan ;
37
38
38
39
fn parse_token_list ( file : & str ) -> HashMap < String , token:: Token > {
39
40
fn id ( ) -> token:: Token {
40
- token :: Ident ( ast:: Ident :: with_empty_ctxt ( Name ( 0 ) ) , token :: Plain )
41
+ Token :: Ident ( ast:: Ident :: with_empty_ctxt ( Name ( 0 ) ) )
41
42
}
42
43
43
44
let mut res = HashMap :: new ( ) ;
44
45
45
- res. insert ( "-1" . to_string ( ) , token :: Eof ) ;
46
+ res. insert ( "-1" . to_string ( ) , Token :: Eof ) ;
46
47
47
48
for line in file. split ( '\n' ) {
48
49
let eq = match line. trim ( ) . rfind ( '=' ) {
@@ -54,65 +55,65 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
54
55
let num = & line[ eq + 1 ..] ;
55
56
56
57
let tok = match val {
57
- "SHR" => token :: BinOp ( token :: Shr ) ,
58
- "DOLLAR" => token :: Dollar ,
59
- "LT" => token :: Lt ,
60
- "STAR" => token :: BinOp ( token :: Star ) ,
58
+ "SHR" => Token :: BinOp ( BinOpToken :: Shr ) ,
59
+ "DOLLAR" => Token :: Dollar ,
60
+ "LT" => Token :: Lt ,
61
+ "STAR" => Token :: BinOp ( BinOpToken :: Star ) ,
61
62
"FLOAT_SUFFIX" => id ( ) ,
62
63
"INT_SUFFIX" => id ( ) ,
63
- "SHL" => token :: BinOp ( token :: Shl ) ,
64
- "LBRACE" => token :: OpenDelim ( token :: Brace ) ,
65
- "RARROW" => token :: RArrow ,
66
- "LIT_STR" => token :: Literal ( token :: Str_ ( Name ( 0 ) ) , None ) ,
67
- "DOTDOT" => token :: DotDot ,
68
- "MOD_SEP" => token :: ModSep ,
69
- "DOTDOTDOT" => token :: DotDotDot ,
70
- "NOT" => token :: Not ,
71
- "AND" => token :: BinOp ( token :: And ) ,
72
- "LPAREN" => token :: OpenDelim ( token :: Paren ) ,
73
- "ANDAND" => token :: AndAnd ,
74
- "AT" => token :: At ,
75
- "LBRACKET" => token :: OpenDelim ( token :: Bracket ) ,
76
- "LIT_STR_RAW" => token :: Literal ( token :: StrRaw ( Name ( 0 ) , 0 ) , None ) ,
77
- "RPAREN" => token :: CloseDelim ( token :: Paren ) ,
78
- "SLASH" => token :: BinOp ( token :: Slash ) ,
79
- "COMMA" => token :: Comma ,
80
- "LIFETIME" => token :: Lifetime ( ast:: Ident :: with_empty_ctxt ( Name ( 0 ) ) ) ,
81
- "CARET" => token :: BinOp ( token :: Caret ) ,
82
- "TILDE" => token :: Tilde ,
64
+ "SHL" => Token :: BinOp ( BinOpToken :: Shl ) ,
65
+ "LBRACE" => Token :: OpenDelim ( DelimToken :: Brace ) ,
66
+ "RARROW" => Token :: RArrow ,
67
+ "LIT_STR" => Token :: Literal ( Lit :: Str_ ( Name ( 0 ) ) , None ) ,
68
+ "DOTDOT" => Token :: DotDot ,
69
+ "MOD_SEP" => Token :: ModSep ,
70
+ "DOTDOTDOT" => Token :: DotDotDot ,
71
+ "NOT" => Token :: Not ,
72
+ "AND" => Token :: BinOp ( BinOpToken :: And ) ,
73
+ "LPAREN" => Token :: OpenDelim ( DelimToken :: Paren ) ,
74
+ "ANDAND" => Token :: AndAnd ,
75
+ "AT" => Token :: At ,
76
+ "LBRACKET" => Token :: OpenDelim ( DelimToken :: Bracket ) ,
77
+ "LIT_STR_RAW" => Token :: Literal ( Lit :: StrRaw ( Name ( 0 ) , 0 ) , None ) ,
78
+ "RPAREN" => Token :: CloseDelim ( DelimToken :: Paren ) ,
79
+ "SLASH" => Token :: BinOp ( BinOpToken :: Slash ) ,
80
+ "COMMA" => Token :: Comma ,
81
+ "LIFETIME" => Token :: Lifetime ( ast:: Ident :: with_empty_ctxt ( Name ( 0 ) ) ) ,
82
+ "CARET" => Token :: BinOp ( BinOpToken :: Caret ) ,
83
+ "TILDE" => Token :: Tilde ,
83
84
"IDENT" => id ( ) ,
84
- "PLUS" => token :: BinOp ( token :: Plus ) ,
85
- "LIT_CHAR" => token :: Literal ( token :: Char ( Name ( 0 ) ) , None ) ,
86
- "LIT_BYTE" => token :: Literal ( token :: Byte ( Name ( 0 ) ) , None ) ,
87
- "EQ" => token :: Eq ,
88
- "RBRACKET" => token :: CloseDelim ( token :: Bracket ) ,
89
- "COMMENT" => token :: Comment ,
90
- "DOC_COMMENT" => token :: DocComment ( Name ( 0 ) ) ,
91
- "DOT" => token :: Dot ,
92
- "EQEQ" => token :: EqEq ,
93
- "NE" => token :: Ne ,
94
- "GE" => token :: Ge ,
95
- "PERCENT" => token :: BinOp ( token :: Percent ) ,
96
- "RBRACE" => token :: CloseDelim ( token :: Brace ) ,
97
- "BINOP" => token :: BinOp ( token :: Plus ) ,
98
- "POUND" => token :: Pound ,
99
- "OROR" => token :: OrOr ,
100
- "LIT_INTEGER" => token :: Literal ( token :: Integer ( Name ( 0 ) ) , None ) ,
101
- "BINOPEQ" => token :: BinOpEq ( token :: Plus ) ,
102
- "LIT_FLOAT" => token :: Literal ( token :: Float ( Name ( 0 ) ) , None ) ,
103
- "WHITESPACE" => token :: Whitespace ,
104
- "UNDERSCORE" => token :: Underscore ,
105
- "MINUS" => token :: BinOp ( token :: Minus ) ,
106
- "SEMI" => token :: Semi ,
107
- "COLON" => token :: Colon ,
108
- "FAT_ARROW" => token :: FatArrow ,
109
- "OR" => token :: BinOp ( token :: Or ) ,
110
- "GT" => token :: Gt ,
111
- "LE" => token :: Le ,
112
- "LIT_BYTE_STR " => token :: Literal ( token :: ByteStr ( Name ( 0 ) ) , None ) ,
113
- "LIT_BYTE_STR_RAW " => token :: Literal ( token :: ByteStrRaw ( Name ( 0 ) , 0 ) , None ) ,
114
- "QUESTION" => token :: Question ,
115
- "SHEBANG" => token :: Shebang ( Name ( 0 ) ) ,
85
+ "PLUS" => Token :: BinOp ( BinOpToken :: Plus ) ,
86
+ "LIT_CHAR" => Token :: Literal ( Lit :: Char ( Name ( 0 ) ) , None ) ,
87
+ "LIT_BYTE" => Token :: Literal ( Lit :: Byte ( Name ( 0 ) ) , None ) ,
88
+ "EQ" => Token :: Eq ,
89
+ "RBRACKET" => Token :: CloseDelim ( DelimToken :: Bracket ) ,
90
+ "COMMENT" => Token :: Comment ,
91
+ "DOC_COMMENT" => Token :: DocComment ( Name ( 0 ) ) ,
92
+ "DOT" => Token :: Dot ,
93
+ "EQEQ" => Token :: EqEq ,
94
+ "NE" => Token :: Ne ,
95
+ "GE" => Token :: Ge ,
96
+ "PERCENT" => Token :: BinOp ( BinOpToken :: Percent ) ,
97
+ "RBRACE" => Token :: CloseDelim ( DelimToken :: Brace ) ,
98
+ "BINOP" => Token :: BinOp ( BinOpToken :: Plus ) ,
99
+ "POUND" => Token :: Pound ,
100
+ "OROR" => Token :: OrOr ,
101
+ "LIT_INTEGER" => Token :: Literal ( Lit :: Integer ( Name ( 0 ) ) , None ) ,
102
+ "BINOPEQ" => Token :: BinOpEq ( BinOpToken :: Plus ) ,
103
+ "LIT_FLOAT" => Token :: Literal ( Lit :: Float ( Name ( 0 ) ) , None ) ,
104
+ "WHITESPACE" => Token :: Whitespace ,
105
+ "UNDERSCORE" => Token :: Underscore ,
106
+ "MINUS" => Token :: BinOp ( BinOpToken :: Minus ) ,
107
+ "SEMI" => Token :: Semi ,
108
+ "COLON" => Token :: Colon ,
109
+ "FAT_ARROW" => Token :: FatArrow ,
110
+ "OR" => Token :: BinOp ( BinOpToken :: Or ) ,
111
+ "GT" => Token :: Gt ,
112
+ "LE" => Token :: Le ,
113
+ "LIT_BINARY " => Token :: Literal ( Lit :: ByteStr ( Name ( 0 ) ) , None ) ,
114
+ "LIT_BINARY_RAW " => Token :: Literal ( Lit :: ByteStrRaw ( Name ( 0 ) , 0 ) , None ) ,
115
+ "QUESTION" => Token :: Question ,
116
+ "SHEBANG" => Token :: Shebang ( Name ( 0 ) ) ,
116
117
_ => continue ,
117
118
} ;
118
119
@@ -125,30 +126,31 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
125
126
126
127
fn str_to_binop ( s : & str ) -> token:: BinOpToken {
127
128
match s {
128
- "+" => token :: Plus ,
129
- "/" => token :: Slash ,
130
- "-" => token :: Minus ,
131
- "*" => token :: Star ,
132
- "%" => token :: Percent ,
133
- "^" => token :: Caret ,
134
- "&" => token :: And ,
135
- "|" => token :: Or ,
136
- "<<" => token :: Shl ,
137
- ">>" => token :: Shr ,
129
+ "+" => BinOpToken :: Plus ,
130
+ "/" => BinOpToken :: Slash ,
131
+ "-" => BinOpToken :: Minus ,
132
+ "*" => BinOpToken :: Star ,
133
+ "%" => BinOpToken :: Percent ,
134
+ "^" => BinOpToken :: Caret ,
135
+ "&" => BinOpToken :: And ,
136
+ "|" => BinOpToken :: Or ,
137
+ "<<" => BinOpToken :: Shl ,
138
+ ">>" => BinOpToken :: Shr ,
138
139
_ => panic ! ( "Bad binop str `{}`" , s) ,
139
140
}
140
141
}
141
142
142
143
/// Assuming a string/byte string literal, strip out the leading/trailing
143
144
/// hashes and surrounding quotes/raw/byte prefix.
144
145
fn fix ( mut lit : & str ) -> ast:: Name {
145
- if lit. char_at ( 0 ) == 'r' {
146
- if lit. char_at ( 1 ) == 'b' {
146
+ let prefix: Vec < char > = lit. chars ( ) . take ( 2 ) . collect ( ) ;
147
+ if prefix[ 0 ] == 'r' {
148
+ if prefix[ 1 ] == 'b' {
147
149
lit = & lit[ 2 ..]
148
150
} else {
149
151
lit = & lit[ 1 ..] ;
150
152
}
151
- } else if lit . char_at ( 0 ) == 'b' {
153
+ } else if prefix [ 0 ] == 'b' {
152
154
lit = & lit[ 1 ..] ;
153
155
}
154
156
@@ -160,7 +162,8 @@ fn fix(mut lit: &str) -> ast::Name {
160
162
161
163
/// Assuming a char/byte literal, strip the 'b' prefix and the single quotes.
162
164
fn fixchar ( mut lit : & str ) -> ast:: Name {
163
- if lit. char_at ( 0 ) == 'b' {
165
+ let prefix = lit. chars ( ) . next ( ) . unwrap ( ) ;
166
+ if prefix == 'b' {
164
167
lit = & lit[ 1 ..] ;
165
168
}
166
169
@@ -197,26 +200,25 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
197
200
debug ! ( "What we got: content (`{}`), proto: {:?}" , content, proto_tok) ;
198
201
199
202
let real_tok = match * proto_tok {
200
- token :: BinOp ( ..) => token :: BinOp ( str_to_binop ( content) ) ,
201
- token :: BinOpEq ( ..) => token :: BinOpEq ( str_to_binop ( & content[ ..content. len ( ) - 1 ] ) ) ,
202
- token :: Literal ( token :: Str_ ( ..) , n) => token :: Literal ( token :: Str_ ( fix ( content) ) , n) ,
203
- token :: Literal ( token :: StrRaw ( ..) , n) => token :: Literal ( token :: StrRaw ( fix ( content) ,
203
+ Token :: BinOp ( ..) => Token :: BinOp ( str_to_binop ( content) ) ,
204
+ Token :: BinOpEq ( ..) => Token :: BinOpEq ( str_to_binop ( & content[ ..content. len ( ) - 1 ] ) ) ,
205
+ Token :: Literal ( Lit :: Str_ ( ..) , n) => Token :: Literal ( Lit :: Str_ ( fix ( content) ) , n) ,
206
+ Token :: Literal ( Lit :: StrRaw ( ..) , n) => Token :: Literal ( Lit :: StrRaw ( fix ( content) ,
204
207
count ( content) ) , n) ,
205
- token :: Literal ( token :: Char ( ..) , n) => token :: Literal ( token :: Char ( fixchar ( content) ) , n) ,
206
- token :: Literal ( token :: Byte ( ..) , n) => token :: Literal ( token :: Byte ( fixchar ( content) ) , n) ,
207
- token :: DocComment ( ..) => token :: DocComment ( nm) ,
208
- token :: Literal ( token :: Integer ( ..) , n) => token :: Literal ( token :: Integer ( nm) , n) ,
209
- token :: Literal ( token :: Float ( ..) , n) => token :: Literal ( token :: Float ( nm) , n) ,
210
- token :: Literal ( token :: ByteStr ( ..) , n) => token :: Literal ( token :: ByteStr ( nm) , n) ,
211
- token :: Literal ( token :: ByteStrRaw ( ..) , n) => token :: Literal ( token :: ByteStrRaw ( fix ( content) ,
208
+ Token :: Literal ( Lit :: Char ( ..) , n) => Token :: Literal ( Lit :: Char ( fixchar ( content) ) , n) ,
209
+ Token :: Literal ( Lit :: Byte ( ..) , n) => Token :: Literal ( Lit :: Byte ( fixchar ( content) ) , n) ,
210
+ Token :: DocComment ( ..) => Token :: DocComment ( nm) ,
211
+ Token :: Literal ( Lit :: Integer ( ..) , n) => Token :: Literal ( Lit :: Integer ( nm) , n) ,
212
+ Token :: Literal ( Lit :: Float ( ..) , n) => Token :: Literal ( Lit :: Float ( nm) , n) ,
213
+ Token :: Literal ( Lit :: ByteStr ( ..) , n) => Token :: Literal ( Lit :: ByteStr ( nm) , n) ,
214
+ Token :: Literal ( Lit :: ByteStrRaw ( ..) , n) => Token :: Literal ( Lit :: ByteStrRaw ( fix ( content) ,
212
215
count ( content) ) , n) ,
213
- token:: Ident ( ..) => token:: Ident ( ast:: Ident :: with_empty_ctxt ( nm) ,
214
- token:: ModName ) ,
215
- token:: Lifetime ( ..) => token:: Lifetime ( ast:: Ident :: with_empty_ctxt ( nm) ) ,
216
+ Token :: Ident ( ..) => Token :: Ident ( ast:: Ident :: with_empty_ctxt ( nm) ) ,
217
+ Token :: Lifetime ( ..) => Token :: Lifetime ( ast:: Ident :: with_empty_ctxt ( nm) ) ,
216
218
ref t => t. clone ( )
217
219
} ;
218
220
219
- let start_offset = if real_tok == token :: Eof {
221
+ let start_offset = if real_tok == Token :: Eof {
220
222
1
221
223
} else {
222
224
0
@@ -245,8 +247,8 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
245
247
246
248
fn tok_cmp ( a : & token:: Token , b : & token:: Token ) -> bool {
247
249
match a {
248
- & token :: Ident ( id, _ ) => match b {
249
- & token :: Ident ( id2, _ ) => id == id2,
250
+ & Token :: Ident ( id) => match b {
251
+ & Token :: Ident ( id2) => id == id2,
250
252
_ => false
251
253
} ,
252
254
_ => a == b
@@ -287,7 +289,7 @@ fn main() {
287
289
debug ! ( "Pairs: {:?}" , surrogate_pairs_pos) ;
288
290
289
291
let options = config:: basic_options ( ) ;
290
- let session = session:: build_session ( options, None ,
292
+ let session = session:: build_session ( options, & DepGraph :: new ( false ) , None ,
291
293
syntax:: diagnostics:: registry:: Registry :: new ( & [ ] ) ,
292
294
Rc :: new ( DummyCrateStore ) ) ;
293
295
let filemap = session. parse_sess . codemap ( ) . new_filemap ( String :: from ( "<n/a>" ) , code) ;
@@ -310,7 +312,7 @@ fn main() {
310
312
311
313
for antlr_tok in antlr_tokens {
312
314
let rustc_tok = next ( & mut lexer) ;
313
- if rustc_tok. tok == token :: Eof && antlr_tok. tok == token :: Eof {
315
+ if rustc_tok. tok == Token :: Eof && antlr_tok. tok == Token :: Eof {
314
316
continue
315
317
}
316
318
@@ -337,19 +339,19 @@ fn main() {
337
339
}
338
340
339
341
matches ! (
340
- token :: Literal ( token :: Byte ( ..) , _) ,
341
- token :: Literal ( token :: Char ( ..) , _) ,
342
- token :: Literal ( token :: Integer ( ..) , _) ,
343
- token :: Literal ( token :: Float ( ..) , _) ,
344
- token :: Literal ( token :: Str_ ( ..) , _) ,
345
- token :: Literal ( token :: StrRaw ( ..) , _) ,
346
- token :: Literal ( token :: ByteStr ( ..) , _) ,
347
- token :: Literal ( token :: ByteStrRaw ( ..) , _) ,
348
- token :: Ident ( ..) ,
349
- token :: Lifetime ( ..) ,
350
- token :: Interpolated ( ..) ,
351
- token :: DocComment ( ..) ,
352
- token :: Shebang ( ..)
342
+ Token :: Literal ( Lit :: Byte ( ..) , _) ,
343
+ Token :: Literal ( Lit :: Char ( ..) , _) ,
344
+ Token :: Literal ( Lit :: Integer ( ..) , _) ,
345
+ Token :: Literal ( Lit :: Float ( ..) , _) ,
346
+ Token :: Literal ( Lit :: Str_ ( ..) , _) ,
347
+ Token :: Literal ( Lit :: StrRaw ( ..) , _) ,
348
+ Token :: Literal ( Lit :: ByteStr ( ..) , _) ,
349
+ Token :: Literal ( Lit :: ByteStrRaw ( ..) , _) ,
350
+ Token :: Ident ( ..) ,
351
+ Token :: Lifetime ( ..) ,
352
+ Token :: Interpolated ( ..) ,
353
+ Token :: DocComment ( ..) ,
354
+ Token :: Shebang ( ..)
353
355
) ;
354
356
}
355
357
}
0 commit comments