Skip to content

Commit 8960d0d

Browse files
lexer: convert Token to be an ES6 class (#2234)
1 parent ec5fbb0 commit 8960d0d

File tree

3 files changed

+71
-68
lines changed

3 files changed

+71
-68
lines changed

src/language/ast.d.ts

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ export class Location {
3838
* Represents a range of characters represented by a lexical token
3939
* within a Source.
4040
*/
41-
export interface Token {
41+
export class Token {
4242
/**
4343
* The kind of Token.
4444
*/
@@ -76,6 +76,16 @@ export interface Token {
7676
*/
7777
readonly prev: Token | null;
7878
readonly next: Token | null;
79+
80+
constructor(
81+
kind: TokenKindEnum,
82+
start: number,
83+
end: number,
84+
line: number,
85+
column: number,
86+
prev: Token | null,
87+
value?: string,
88+
);
7989
}
8090

8191
/**

src/language/ast.js

Lines changed: 38 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -53,47 +53,74 @@ defineToJSON(Location, function() {
5353
* Represents a range of characters represented by a lexical token
5454
* within a Source.
5555
*/
56-
export type Token = {
56+
export class Token {
5757
/**
5858
* The kind of Token.
5959
*/
60-
+kind: TokenKindEnum,
60+
+kind: TokenKindEnum;
6161

6262
/**
6363
* The character offset at which this Node begins.
6464
*/
65-
+start: number,
65+
+start: number;
6666

6767
/**
6868
* The character offset at which this Node ends.
6969
*/
70-
+end: number,
70+
+end: number;
7171

7272
/**
7373
* The 1-indexed line number on which this Token appears.
7474
*/
75-
+line: number,
75+
+line: number;
7676

7777
/**
7878
* The 1-indexed column number at which this Token begins.
7979
*/
80-
+column: number,
80+
+column: number;
8181

8282
/**
8383
* For non-punctuation tokens, represents the interpreted value of the token.
8484
*/
85-
+value: string | void,
85+
+value: string | void;
8686

8787
/**
8888
* Tokens exist as nodes in a double-linked-list amongst all tokens
8989
* including ignored tokens. <SOF> is always the first node and <EOF>
9090
* the last.
9191
*/
92-
+prev: Token | null,
93-
+next: Token | null,
92+
+prev: Token | null;
93+
+next: Token | null;
94+
95+
constructor(
96+
kind: TokenKindEnum,
97+
start: number,
98+
end: number,
99+
line: number,
100+
column: number,
101+
prev: Token | null,
102+
value?: string,
103+
) {
104+
this.kind = kind;
105+
this.start = start;
106+
this.end = end;
107+
this.line = line;
108+
this.column = column;
109+
this.value = value;
110+
this.prev = prev;
111+
this.next = null;
112+
}
113+
}
94114

95-
...
96-
};
115+
// Print a simplified form when appearing in JSON/util.inspect.
116+
defineToJSON(Token, function() {
117+
return {
118+
kind: this.kind,
119+
value: this.value,
120+
line: this.line,
121+
column: this.column,
122+
};
123+
});
97124

98125
/**
99126
* The list of all possible AST node types.

src/language/lexer.js

Lines changed: 22 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,8 @@
11
// @flow strict
22

3-
import defineToJSON from '../jsutils/defineToJSON';
4-
53
import { syntaxError } from '../error/syntaxError';
64

7-
import { type Token } from './ast';
5+
import { Token } from './ast';
86
import { type Source } from './source';
97
import { dedentBlockStringValue } from './blockString';
108
import { type TokenKindEnum, TokenKind } from './tokenKind';
@@ -41,7 +39,7 @@ export class Lexer {
4139
lineStart: number;
4240

4341
constructor(source: Source) {
44-
const startOfFileToken = new Tok(TokenKind.SOF, 0, 0, 0, 0, null);
42+
const startOfFileToken = new Token(TokenKind.SOF, 0, 0, 0, 0, null);
4543

4644
this.source = source;
4745
this.lastToken = startOfFileToken;
@@ -97,38 +95,6 @@ export function isPunctuatorTokenKind(kind: TokenKindEnum) {
9795
);
9896
}
9997

100-
/**
101-
* Helper function for constructing the Token object.
102-
*/
103-
function Tok(
104-
kind: TokenKindEnum,
105-
start: number,
106-
end: number,
107-
line: number,
108-
column: number,
109-
prev: Token | null,
110-
value?: string,
111-
) {
112-
this.kind = kind;
113-
this.start = start;
114-
this.end = end;
115-
this.line = line;
116-
this.column = column;
117-
this.value = value;
118-
this.prev = prev;
119-
this.next = null;
120-
}
121-
122-
// Print a simplified form when appearing in JSON/util.inspect.
123-
defineToJSON(Tok, function() {
124-
return {
125-
kind: this.kind,
126-
value: this.value,
127-
line: this.line,
128-
column: this.column,
129-
};
130-
});
131-
13298
function printCharCode(code) {
13399
return (
134100
// NaN/undefined represents access beyond the end of the file.
@@ -159,7 +125,7 @@ function readToken(lexer: Lexer, prev: Token): Token {
159125
const col = 1 + pos - lexer.lineStart;
160126

161127
if (pos >= bodyLength) {
162-
return new Tok(TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
128+
return new Token(TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
163129
}
164130

165131
const code = body.charCodeAt(pos);
@@ -168,52 +134,52 @@ function readToken(lexer: Lexer, prev: Token): Token {
168134
switch (code) {
169135
// !
170136
case 33:
171-
return new Tok(TokenKind.BANG, pos, pos + 1, line, col, prev);
137+
return new Token(TokenKind.BANG, pos, pos + 1, line, col, prev);
172138
// #
173139
case 35:
174140
return readComment(source, pos, line, col, prev);
175141
// $
176142
case 36:
177-
return new Tok(TokenKind.DOLLAR, pos, pos + 1, line, col, prev);
143+
return new Token(TokenKind.DOLLAR, pos, pos + 1, line, col, prev);
178144
// &
179145
case 38:
180-
return new Tok(TokenKind.AMP, pos, pos + 1, line, col, prev);
146+
return new Token(TokenKind.AMP, pos, pos + 1, line, col, prev);
181147
// (
182148
case 40:
183-
return new Tok(TokenKind.PAREN_L, pos, pos + 1, line, col, prev);
149+
return new Token(TokenKind.PAREN_L, pos, pos + 1, line, col, prev);
184150
// )
185151
case 41:
186-
return new Tok(TokenKind.PAREN_R, pos, pos + 1, line, col, prev);
152+
return new Token(TokenKind.PAREN_R, pos, pos + 1, line, col, prev);
187153
// .
188154
case 46:
189155
if (body.charCodeAt(pos + 1) === 46 && body.charCodeAt(pos + 2) === 46) {
190-
return new Tok(TokenKind.SPREAD, pos, pos + 3, line, col, prev);
156+
return new Token(TokenKind.SPREAD, pos, pos + 3, line, col, prev);
191157
}
192158
break;
193159
// :
194160
case 58:
195-
return new Tok(TokenKind.COLON, pos, pos + 1, line, col, prev);
161+
return new Token(TokenKind.COLON, pos, pos + 1, line, col, prev);
196162
// =
197163
case 61:
198-
return new Tok(TokenKind.EQUALS, pos, pos + 1, line, col, prev);
164+
return new Token(TokenKind.EQUALS, pos, pos + 1, line, col, prev);
199165
// @
200166
case 64:
201-
return new Tok(TokenKind.AT, pos, pos + 1, line, col, prev);
167+
return new Token(TokenKind.AT, pos, pos + 1, line, col, prev);
202168
// [
203169
case 91:
204-
return new Tok(TokenKind.BRACKET_L, pos, pos + 1, line, col, prev);
170+
return new Token(TokenKind.BRACKET_L, pos, pos + 1, line, col, prev);
205171
// ]
206172
case 93:
207-
return new Tok(TokenKind.BRACKET_R, pos, pos + 1, line, col, prev);
173+
return new Token(TokenKind.BRACKET_R, pos, pos + 1, line, col, prev);
208174
// {
209175
case 123:
210-
return new Tok(TokenKind.BRACE_L, pos, pos + 1, line, col, prev);
176+
return new Token(TokenKind.BRACE_L, pos, pos + 1, line, col, prev);
211177
// |
212178
case 124:
213-
return new Tok(TokenKind.PIPE, pos, pos + 1, line, col, prev);
179+
return new Token(TokenKind.PIPE, pos, pos + 1, line, col, prev);
214180
// }
215181
case 125:
216-
return new Tok(TokenKind.BRACE_R, pos, pos + 1, line, col, prev);
182+
return new Token(TokenKind.BRACE_R, pos, pos + 1, line, col, prev);
217183
// A-Z _ a-z
218184
case 65:
219185
case 66:
@@ -364,7 +330,7 @@ function readComment(source, start, line, col, prev): Token {
364330
(code > 0x001f || code === 0x0009)
365331
);
366332

367-
return new Tok(
333+
return new Token(
368334
TokenKind.COMMENT,
369335
start,
370336
position,
@@ -439,7 +405,7 @@ function readNumber(source, start, firstCode, line, col, prev): Token {
439405
);
440406
}
441407

442-
return new Tok(
408+
return new Token(
443409
isFloat ? TokenKind.FLOAT : TokenKind.INT,
444410
start,
445411
position,
@@ -493,7 +459,7 @@ function readString(source, start, line, col, prev): Token {
493459
// Closing Quote (")
494460
if (code === 34) {
495461
value += body.slice(chunkStart, position);
496-
return new Tok(
462+
return new Token(
497463
TokenKind.STRING,
498464
start,
499465
position + 1,
@@ -600,7 +566,7 @@ function readBlockString(source, start, line, col, prev, lexer): Token {
600566
body.charCodeAt(position + 2) === 34
601567
) {
602568
rawValue += body.slice(chunkStart, position);
603-
return new Tok(
569+
return new Token(
604570
TokenKind.BLOCK_STRING,
605571
start,
606572
position + 3,
@@ -711,7 +677,7 @@ function readName(source, start, line, col, prev): Token {
711677
) {
712678
++position;
713679
}
714-
return new Tok(
680+
return new Token(
715681
TokenKind.NAME,
716682
start,
717683
position,

0 commit comments

Comments
 (0)