@@ -85,14 +85,16 @@ tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
8585}
8686
8787static int
88- _tokenizer_error (struct tok_state * tok )
88+ _tokenizer_error (tokenizeriterobject * it )
8989{
90+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED (it );
9091 if (PyErr_Occurred ()) {
9192 return -1 ;
9293 }
9394
9495 const char * msg = NULL ;
9596 PyObject * errtype = PyExc_SyntaxError ;
97+ struct tok_state * tok = it -> tok ;
9698 switch (tok -> done ) {
9799 case E_TOKEN :
98100 msg = "invalid token" ;
@@ -182,7 +184,7 @@ static PyObject *
182184_get_current_line (tokenizeriterobject * it , const char * line_start , Py_ssize_t size )
183185{
184186 _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED (it );
185- PyObject * line ;
187+ PyObject * line = it -> last_line ;
186188 if (it -> tok -> lineno != it -> last_lineno ) {
187189 // Line has changed since last token, so we fetch the new line and cache it
188190 // in the iter object.
@@ -193,10 +195,6 @@ _get_current_line(tokenizeriterobject *it, const char *line_start, Py_ssize_t si
193195 it -> byte_col_offset_diff = 0 ;
194196 }
195197 }
196- else {
197- // Line hasn't changed so we reuse the cached one.
198- line = it -> last_line ;
199- }
200198 return line ;
201199}
202200
@@ -235,30 +233,23 @@ static PyObject *
235233tokenizeriter_next (tokenizeriterobject * it )
236234{
237235 PyObject * result = NULL ;
238- struct token token ;
239- _PyToken_Init (& token );
240236
241- int type ;
242237 Py_BEGIN_CRITICAL_SECTION (it );
243- type = _PyTokenizer_Get (it -> tok , & token );
244- Py_END_CRITICAL_SECTION ();
245238
239+ struct token token ;
240+ _PyToken_Init (& token );
241+
242+ int type = _PyTokenizer_Get (it -> tok , & token );
246243 if (type == ERRORTOKEN ) {
247244 if (!PyErr_Occurred ()) {
248- _tokenizer_error (it -> tok );
245+ _tokenizer_error (it );
249246 assert (PyErr_Occurred ());
250247 }
251248 goto exit ;
252249 }
253250 if (it -> done || type == ERRORTOKEN ) {
254251 PyErr_SetString (PyExc_StopIteration , "EOF" );
255-
256- #ifdef Py_GIL_DISABLED
257- _Py_atomic_store_int (& it -> done , 1 );
258- #else
259252 it -> done = 1 ;
260- #endif
261-
262253 goto exit ;
263254 }
264255 PyObject * str = NULL ;
@@ -287,9 +278,7 @@ tokenizeriter_next(tokenizeriterobject *it)
287278 size -= 1 ;
288279 }
289280
290- Py_BEGIN_CRITICAL_SECTION (it );
291281 line = _get_current_line (it , line_start , size );
292- Py_END_CRITICAL_SECTION ();
293282 }
294283 if (line == NULL ) {
295284 Py_DECREF (str );
@@ -300,10 +289,7 @@ tokenizeriter_next(tokenizeriterobject *it)
300289 Py_ssize_t end_lineno = it -> tok -> lineno ;
301290 Py_ssize_t col_offset = -1 ;
302291 Py_ssize_t end_col_offset = -1 ;
303-
304- Py_BEGIN_CRITICAL_SECTION (it );
305292 _get_col_offsets (it , token , line_start , line , lineno , end_lineno , & col_offset , & end_col_offset );
306- Py_END_CRITICAL_SECTION ();
307293
308294 if (it -> tok -> tok_extra_tokens ) {
309295 if (is_trailing_token ) {
@@ -343,12 +329,10 @@ tokenizeriter_next(tokenizeriterobject *it)
343329exit :
344330 _PyToken_Free (& token );
345331 if (type == ENDMARKER ) {
346- #ifdef Py_GIL_DISABLED
347- _Py_atomic_store_int (& it -> done , 1 );
348- #else
349332 it -> done = 1 ;
350- #endif
351333 }
334+
335+ Py_END_CRITICAL_SECTION ();
352336 return result ;
353337}
354338
0 commit comments