Skip to content

Commit 1c51daf

Browse files
Lock around all of tokenizeriter_next
Co-authored-by: Pablo Galindo <[email protected]>
1 parent 975f01a commit 1c51daf

File tree

1 file changed

+12
-27
lines changed

1 file changed

+12
-27
lines changed

Python/Python-tokenize.c

Lines changed: 12 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -85,14 +85,17 @@ tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
8585
}
8686

8787
static int
88-
_tokenizer_error(struct tok_state *tok)
88+
_tokenizer_error(tokenizeriterobject *it)
8989
{
90+
_Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(it);
91+
9092
if (PyErr_Occurred()) {
9193
return -1;
9294
}
9395

9496
const char *msg = NULL;
9597
PyObject* errtype = PyExc_SyntaxError;
98+
struct tok_state *tok = it->tok;
9699
switch (tok->done) {
97100
case E_TOKEN:
98101
msg = "invalid token";
@@ -182,7 +185,7 @@ static PyObject *
182185
_get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t size)
183186
{
184187
_Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(it);
185-
PyObject *line;
188+
PyObject *line = it->last_line;
186189
if (it->tok->lineno != it->last_lineno) {
187190
// Line has changed since last token, so we fetch the new line and cache it
188191
// in the iter object.
@@ -193,10 +196,6 @@ _get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t si
193196
it->byte_col_offset_diff = 0;
194197
}
195198
}
196-
else {
197-
// Line hasn't changed so we reuse the cached one.
198-
line = it->last_line;
199-
}
200199
return line;
201200
}
202201

@@ -235,30 +234,23 @@ static PyObject *
235234
tokenizeriter_next(tokenizeriterobject *it)
236235
{
237236
PyObject* result = NULL;
238-
struct token token;
239-
_PyToken_Init(&token);
240237

241-
int type;
242238
Py_BEGIN_CRITICAL_SECTION(it);
243-
type = _PyTokenizer_Get(it->tok, &token);
244-
Py_END_CRITICAL_SECTION();
245239

240+
struct token token;
241+
_PyToken_Init(&token);
242+
243+
int type = _PyTokenizer_Get(it->tok, &token);
246244
if (type == ERRORTOKEN) {
247245
if(!PyErr_Occurred()) {
248-
_tokenizer_error(it->tok);
246+
_tokenizer_error(it);
249247
assert(PyErr_Occurred());
250248
}
251249
goto exit;
252250
}
253251
if (it->done || type == ERRORTOKEN) {
254252
PyErr_SetString(PyExc_StopIteration, "EOF");
255-
256-
#ifdef Py_GIL_DISABLED
257-
_Py_atomic_store_int(&it->done, 1);
258-
#else
259253
it->done = 1;
260-
#endif
261-
262254
goto exit;
263255
}
264256
PyObject *str = NULL;
@@ -287,9 +279,7 @@ tokenizeriter_next(tokenizeriterobject *it)
287279
size -= 1;
288280
}
289281

290-
Py_BEGIN_CRITICAL_SECTION(it);
291282
line = _get_current_line(it, line_start, size);
292-
Py_END_CRITICAL_SECTION();
293283
}
294284
if (line == NULL) {
295285
Py_DECREF(str);
@@ -300,10 +290,7 @@ tokenizeriter_next(tokenizeriterobject *it)
300290
Py_ssize_t end_lineno = it->tok->lineno;
301291
Py_ssize_t col_offset = -1;
302292
Py_ssize_t end_col_offset = -1;
303-
304-
Py_BEGIN_CRITICAL_SECTION(it);
305293
_get_col_offsets(it, token, line_start, line, lineno, end_lineno, &col_offset, &end_col_offset);
306-
Py_END_CRITICAL_SECTION();
307294

308295
if (it->tok->tok_extra_tokens) {
309296
if (is_trailing_token) {
@@ -343,12 +330,10 @@ tokenizeriter_next(tokenizeriterobject *it)
343330
exit:
344331
_PyToken_Free(&token);
345332
if (type == ENDMARKER) {
346-
#ifdef Py_GIL_DISABLED
347-
_Py_atomic_store_int(&it->done, 1);
348-
#else
349333
it->done = 1;
350-
#endif
351334
}
335+
336+
Py_END_CRITICAL_SECTION();
352337
return result;
353338
}
354339

0 commit comments

Comments
 (0)