Skip to content

Commit 20c7cb2

Browse files
Lock around all of tokenizeriter_next
Co-authored-by: Pablo Galindo <[email protected]>
1 parent 975f01a commit 20c7cb2

File tree

1 file changed

+11
-22
lines changed

1 file changed

+11
-22
lines changed

Python/Python-tokenize.c

Lines changed: 11 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -85,14 +85,17 @@ tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
8585
}
8686

8787
static int
88-
_tokenizer_error(struct tok_state *tok)
88+
_tokenizer_error(tokenizeriterobject *it)
8989
{
90+
_Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(it);
91+
9092
if (PyErr_Occurred()) {
9193
return -1;
9294
}
9395

9496
const char *msg = NULL;
9597
PyObject* errtype = PyExc_SyntaxError;
98+
struct tok_state *tok = it->tok;
9699
switch (tok->done) {
97100
case E_TOKEN:
98101
msg = "invalid token";
@@ -182,7 +185,7 @@ static PyObject *
182185
_get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t size)
183186
{
184187
_Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(it);
185-
PyObject *line;
188+
PyObject *line = it->last_line;
186189
if (it->tok->lineno != it->last_lineno) {
187190
// Line has changed since last token, so we fetch the new line and cache it
188191
// in the iter object.
@@ -193,10 +196,6 @@ _get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t si
193196
it->byte_col_offset_diff = 0;
194197
}
195198
}
196-
else {
197-
// Line hasn't changed so we reuse the cached one.
198-
line = it->last_line;
199-
}
200199
return line;
201200
}
202201

@@ -235,6 +234,9 @@ static PyObject *
235234
tokenizeriter_next(tokenizeriterobject *it)
236235
{
237236
PyObject* result = NULL;
237+
238+
Py_BEGIN_CRITICAL_SECTION(it);
239+
238240
struct token token;
239241
_PyToken_Init(&token);
240242

@@ -245,20 +247,14 @@ tokenizeriter_next(tokenizeriterobject *it)
245247

246248
if (type == ERRORTOKEN) {
247249
if(!PyErr_Occurred()) {
248-
_tokenizer_error(it->tok);
250+
_tokenizer_error(it);
249251
assert(PyErr_Occurred());
250252
}
251253
goto exit;
252254
}
253255
if (it->done || type == ERRORTOKEN) {
254256
PyErr_SetString(PyExc_StopIteration, "EOF");
255-
256-
#ifdef Py_GIL_DISABLED
257-
_Py_atomic_store_int(&it->done, 1);
258-
#else
259257
it->done = 1;
260-
#endif
261-
262258
goto exit;
263259
}
264260
PyObject *str = NULL;
@@ -287,9 +283,7 @@ tokenizeriter_next(tokenizeriterobject *it)
287283
size -= 1;
288284
}
289285

290-
Py_BEGIN_CRITICAL_SECTION(it);
291286
line = _get_current_line(it, line_start, size);
292-
Py_END_CRITICAL_SECTION();
293287
}
294288
if (line == NULL) {
295289
Py_DECREF(str);
@@ -300,10 +294,7 @@ tokenizeriter_next(tokenizeriterobject *it)
300294
Py_ssize_t end_lineno = it->tok->lineno;
301295
Py_ssize_t col_offset = -1;
302296
Py_ssize_t end_col_offset = -1;
303-
304-
Py_BEGIN_CRITICAL_SECTION(it);
305297
_get_col_offsets(it, token, line_start, line, lineno, end_lineno, &col_offset, &end_col_offset);
306-
Py_END_CRITICAL_SECTION();
307298

308299
if (it->tok->tok_extra_tokens) {
309300
if (is_trailing_token) {
@@ -343,12 +334,10 @@ tokenizeriter_next(tokenizeriterobject *it)
343334
exit:
344335
_PyToken_Free(&token);
345336
if (type == ENDMARKER) {
346-
#ifdef Py_GIL_DISABLED
347-
_Py_atomic_store_int(&it->done, 1);
348-
#else
349337
it->done = 1;
350-
#endif
351338
}
339+
340+
Py_END_CRITICAL_SECTION();
352341
return result;
353342
}
354343

0 commit comments

Comments
 (0)