@@ -3,7 +3,7 @@ use crate::parse::token::{self, Token, TokenKind};
3
3
use crate :: symbol:: { sym, Symbol } ;
4
4
use crate :: parse:: unescape_error_reporting:: { emit_unescape_error, push_escaped_char} ;
5
5
6
- use errors:: { FatalError , Diagnostic , DiagnosticBuilder } ;
6
+ use errors:: { FatalError , DiagnosticBuilder } ;
7
7
use syntax_pos:: { BytePos , Pos , Span , NO_EXPANSION } ;
8
8
use rustc_lexer:: Base ;
9
9
use rustc_lexer:: unescape;
@@ -39,7 +39,6 @@ pub struct StringReader<'a> {
39
39
pos : BytePos ,
40
40
/// Stop reading src at this index.
41
41
end_src_index : usize ,
42
- fatal_errs : Vec < DiagnosticBuilder < ' a > > ,
43
42
/// Source text to tokenize.
44
43
src : Lrc < String > ,
45
44
override_span : Option < Span > ,
@@ -62,7 +61,6 @@ impl<'a> StringReader<'a> {
62
61
pos : source_file. start_pos ,
63
62
end_src_index : src. len ( ) ,
64
63
src,
65
- fatal_errs : Vec :: new ( ) ,
66
64
override_span,
67
65
}
68
66
}
@@ -89,29 +87,17 @@ impl<'a> StringReader<'a> {
89
87
self . override_span . unwrap_or_else ( || Span :: new ( lo, hi, NO_EXPANSION ) )
90
88
}
91
89
92
- fn unwrap_or_abort ( & mut self , res : Result < Token , ( ) > ) -> Token {
93
- match res {
94
- Ok ( tok) => tok,
95
- Err ( _) => {
96
- self . emit_fatal_errors ( ) ;
97
- FatalError . raise ( ) ;
98
- }
99
- }
100
- }
101
-
102
90
/// Returns the next token, including trivia like whitespace or comments.
103
91
///
104
92
/// `Err(())` means that some errors were encountered, which can be
105
93
/// retrieved using `buffer_fatal_errors`.
106
- pub fn try_next_token ( & mut self ) -> Result < Token , ( ) > {
107
- assert ! ( self . fatal_errs. is_empty( ) ) ;
108
-
94
+ pub fn next_token ( & mut self ) -> Token {
109
95
let start_src_index = self . src_index ( self . pos ) ;
110
96
let text: & str = & self . src [ start_src_index..self . end_src_index ] ;
111
97
112
98
if text. is_empty ( ) {
113
99
let span = self . mk_sp ( self . pos , self . pos ) ;
114
- return Ok ( Token :: new ( token:: Eof , span) ) ;
100
+ return Token :: new ( token:: Eof , span) ;
115
101
}
116
102
117
103
{
@@ -125,7 +111,7 @@ impl<'a> StringReader<'a> {
125
111
let kind = token:: Shebang ( sym) ;
126
112
127
113
let span = self . mk_sp ( start, self . pos ) ;
128
- return Ok ( Token :: new ( kind, span) ) ;
114
+ return Token :: new ( kind, span) ;
129
115
}
130
116
}
131
117
}
@@ -139,39 +125,10 @@ impl<'a> StringReader<'a> {
139
125
140
126
// This could use `?`, but that makes code significantly (10-20%) slower.
141
127
// https://github.com/rust-lang/rust/issues/37939
142
- let kind = match self . cook_lexer_token ( token. kind , start) {
143
- Ok ( it) => it,
144
- Err ( err) => return Err ( self . fatal_errs . push ( err) ) ,
145
- } ;
128
+ let kind = self . cook_lexer_token ( token. kind , start) ;
146
129
147
130
let span = self . mk_sp ( start, self . pos ) ;
148
- Ok ( Token :: new ( kind, span) )
149
- }
150
-
151
- /// Returns the next token, including trivia like whitespace or comments.
152
- ///
153
- /// Aborts in case of an error.
154
- pub fn next_token ( & mut self ) -> Token {
155
- let res = self . try_next_token ( ) ;
156
- self . unwrap_or_abort ( res)
157
- }
158
-
159
- fn emit_fatal_errors ( & mut self ) {
160
- for err in & mut self . fatal_errs {
161
- err. emit ( ) ;
162
- }
163
-
164
- self . fatal_errs . clear ( ) ;
165
- }
166
-
167
- pub fn buffer_fatal_errors ( & mut self ) -> Vec < Diagnostic > {
168
- let mut buffer = Vec :: new ( ) ;
169
-
170
- for err in self . fatal_errs . drain ( ..) {
171
- err. buffer ( & mut buffer) ;
172
- }
173
-
174
- buffer
131
+ Token :: new ( kind, span)
175
132
}
176
133
177
134
/// Report a fatal lexical error with a given span.
@@ -218,8 +175,8 @@ impl<'a> StringReader<'a> {
218
175
& self ,
219
176
token : rustc_lexer:: TokenKind ,
220
177
start : BytePos ,
221
- ) -> Result < TokenKind , DiagnosticBuilder < ' a > > {
222
- let kind = match token {
178
+ ) -> TokenKind {
179
+ match token {
223
180
rustc_lexer:: TokenKind :: LineComment => {
224
181
let string = self . str_from ( start) ;
225
182
// comments with only more "/"s are not doc comments
@@ -396,16 +353,12 @@ impl<'a> StringReader<'a> {
396
353
// this should be inside `rustc_lexer`. However, we should first remove compound
397
354
// tokens like `<<` from `rustc_lexer`, and then add fancier error recovery to it,
398
355
// as there will be less overall work to do this way.
399
- return match unicode_chars:: check_for_substitution ( self , start, c, & mut err) {
400
- Some ( token) => {
401
- err. emit ( ) ;
402
- Ok ( token)
403
- }
404
- None => Err ( err) ,
405
- }
356
+ let token = unicode_chars:: check_for_substitution ( self , start, c, & mut err)
357
+ . unwrap_or ( token:: Whitespace ) ;
358
+ err. emit ( ) ;
359
+ token
406
360
}
407
- } ;
408
- Ok ( kind)
361
+ }
409
362
}
410
363
411
364
fn cook_lexer_literal (
0 commit comments