@@ -24,25 +24,6 @@ extern const char *_PyUOpName(int index);
24
24
* ./adaptive.md
25
25
*/
26
26
27
- #ifdef Py_GIL_DISABLED
28
- #define SET_OPCODE_OR_RETURN (instr , opcode ) \
29
- do { \
30
- uint8_t old_op = _Py_atomic_load_uint8_relaxed(&(instr)->op.code); \
31
- if (old_op >= MIN_INSTRUMENTED_OPCODE) { \
32
- /* Lost race with instrumentation */ \
33
- return ; \
34
- } \
35
- if (!_Py_atomic_compare_exchange_uint8 (& (instr )-> op .code , & old_op , \
36
- (opcode ))) { \
37
- /* Lost race with instrumentation */ \
38
- assert (old_op >= MIN_INSTRUMENTED_OPCODE ); \
39
- return ; \
40
- } \
41
- } while (0 )
42
- #else
43
- #define SET_OPCODE_OR_RETURN (instr , opcode ) (instr)->op.code = (opcode)
44
- #endif
45
-
46
27
#ifdef Py_STATS
47
28
GCStats _py_gc_stats [NUM_GENERATIONS ] = { 0 };
48
29
static PyStats _Py_stats_struct = { .gc_stats = _py_gc_stats };
@@ -687,6 +668,73 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts,
687
668
#define SPEC_FAIL_CONTAINS_OP_LIST 11
688
669
#define SPEC_FAIL_CONTAINS_OP_USER_CLASS 12
689
670
671
+ static inline int
672
+ set_opcode (_Py_CODEUNIT * instr , uint8_t opcode )
673
+ {
674
+ #ifdef Py_GIL_DISABLED
675
+ uint8_t old_op = _Py_atomic_load_uint8_relaxed (& instr -> op .code );
676
+ if (old_op >= MIN_INSTRUMENTED_OPCODE ) {
677
+ /* Lost race with instrumentation */
678
+ return 0 ;
679
+ }
680
+ if (!_Py_atomic_compare_exchange_uint8 (& instr -> op .code , & old_op , opcode )) {
681
+ /* Lost race with instrumentation */
682
+ assert (old_op >= MIN_INSTRUMENTED_OPCODE );
683
+ return 0 ;
684
+ }
685
+ return 1 ;
686
+ #else
687
+ instr -> op .code = opcode ;
688
+ return 1 ;
689
+ #endif
690
+ }
691
+
692
+ static inline void
693
+ set_counter (_Py_BackoffCounter * counter , _Py_BackoffCounter value )
694
+ {
695
+ FT_ATOMIC_STORE_UINT16_RELAXED (counter -> value_and_backoff ,
696
+ value .value_and_backoff );
697
+ }
698
+
699
+ static inline _Py_BackoffCounter
700
+ load_counter (_Py_BackoffCounter * counter )
701
+ {
702
+ _Py_BackoffCounter result = {
703
+ .value_and_backoff =
704
+ FT_ATOMIC_LOAD_UINT16_RELAXED (counter -> value_and_backoff )};
705
+ return result ;
706
+ }
707
+
708
+ static inline void
709
+ specialize (_Py_CODEUNIT * instr , uint8_t specialized_opcode )
710
+ {
711
+ assert (!PyErr_Occurred ());
712
+ if (!set_opcode (instr , specialized_opcode )) {
713
+ STAT_INC (_PyOpcode_Deopt [specialized_opcode ], failure );
714
+ SPECIALIZATION_FAIL (_PyOpcode_Deopt [specialized_opcode ],
715
+ SPEC_FAIL_OTHER );
716
+ return ;
717
+ }
718
+ set_counter ((_Py_BackoffCounter * )instr + 1 , adaptive_counter_cooldown ());
719
+ }
720
+
721
+ static inline void
722
+ unspecialize (_Py_CODEUNIT * instr , int reason )
723
+ {
724
+ assert (!PyErr_Occurred ());
725
+ uint8_t opcode = FT_ATOMIC_LOAD_UINT8_RELAXED (instr -> op .code );
726
+ uint8_t generic_opcode = _PyOpcode_Deopt [opcode ];
727
+ STAT_INC (generic_opcode , failure );
728
+ if (!set_opcode (instr , generic_opcode )) {
729
+ SPECIALIZATION_FAIL (generic_opcode , SPEC_FAIL_OTHER );
730
+ return ;
731
+ }
732
+ SPECIALIZATION_FAIL (generic_opcode , reason );
733
+ _Py_BackoffCounter * counter = (_Py_BackoffCounter * )instr + 1 ;
734
+ _Py_BackoffCounter cur = load_counter (counter );
735
+ set_counter (counter , adaptive_counter_backoff (cur ));
736
+ }
737
+
690
738
static int function_kind (PyCodeObject * code );
691
739
static bool function_check_args (PyObject * o , int expected_argcount , int opcode );
692
740
static uint32_t function_get_version (PyObject * o , int opcode );
@@ -2195,7 +2243,6 @@ _Py_Specialize_CallKw(_PyStackRef callable_st, _Py_CODEUNIT *instr, int nargs)
2195
2243
}
2196
2244
}
2197
2245
2198
- #ifdef Py_STATS
2199
2246
static int
2200
2247
binary_op_fail_kind (int oparg , PyObject * lhs , PyObject * rhs )
2201
2248
{
@@ -2263,7 +2310,6 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs)
2263
2310
}
2264
2311
Py_UNREACHABLE ();
2265
2312
}
2266
- #endif // Py_STATS
2267
2313
2268
2314
void
2269
2315
_Py_Specialize_BinaryOp (_PyStackRef lhs_st , _PyStackRef rhs_st , _Py_CODEUNIT * instr ,
@@ -2273,8 +2319,6 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
2273
2319
PyObject * rhs = PyStackRef_AsPyObjectBorrow (rhs_st );
2274
2320
assert (ENABLE_SPECIALIZATION_FT );
2275
2321
assert (_PyOpcode_Caches [BINARY_OP ] == INLINE_CACHE_ENTRIES_BINARY_OP );
2276
- _PyBinaryOpCache * cache = (_PyBinaryOpCache * )(instr + 1 );
2277
- uint8_t specialized_op ;
2278
2322
switch (oparg ) {
2279
2323
case NB_ADD :
2280
2324
case NB_INPLACE_ADD :
@@ -2285,19 +2329,19 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
2285
2329
_Py_CODEUNIT next = instr [INLINE_CACHE_ENTRIES_BINARY_OP + 1 ];
2286
2330
bool to_store = (next .op .code == STORE_FAST );
2287
2331
if (to_store && PyStackRef_AsPyObjectBorrow (locals [next .op .arg ]) == lhs ) {
2288
- specialized_op = BINARY_OP_INPLACE_ADD_UNICODE ;
2289
- goto success ;
2332
+ specialize ( instr , BINARY_OP_INPLACE_ADD_UNICODE ) ;
2333
+ return ;
2290
2334
}
2291
- specialized_op = BINARY_OP_ADD_UNICODE ;
2292
- goto success ;
2335
+ specialize ( instr , BINARY_OP_ADD_UNICODE ) ;
2336
+ return ;
2293
2337
}
2294
2338
if (PyLong_CheckExact (lhs )) {
2295
- specialized_op = BINARY_OP_ADD_INT ;
2296
- goto success ;
2339
+ specialize ( instr , BINARY_OP_ADD_INT ) ;
2340
+ return ;
2297
2341
}
2298
2342
if (PyFloat_CheckExact (lhs )) {
2299
- specialized_op = BINARY_OP_ADD_FLOAT ;
2300
- goto success ;
2343
+ specialize ( instr , BINARY_OP_ADD_FLOAT ) ;
2344
+ return ;
2301
2345
}
2302
2346
break ;
2303
2347
case NB_MULTIPLY :
@@ -2306,12 +2350,12 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
2306
2350
break ;
2307
2351
}
2308
2352
if (PyLong_CheckExact (lhs )) {
2309
- specialized_op = BINARY_OP_MULTIPLY_INT ;
2310
- goto success ;
2353
+ specialize ( instr , BINARY_OP_MULTIPLY_INT ) ;
2354
+ return ;
2311
2355
}
2312
2356
if (PyFloat_CheckExact (lhs )) {
2313
- specialized_op = BINARY_OP_MULTIPLY_FLOAT ;
2314
- goto success ;
2357
+ specialize ( instr , BINARY_OP_MULTIPLY_FLOAT ) ;
2358
+ return ;
2315
2359
}
2316
2360
break ;
2317
2361
case NB_SUBTRACT :
@@ -2320,24 +2364,16 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
2320
2364
break ;
2321
2365
}
2322
2366
if (PyLong_CheckExact (lhs )) {
2323
- specialized_op = BINARY_OP_SUBTRACT_INT ;
2324
- goto success ;
2367
+ specialize ( instr , BINARY_OP_SUBTRACT_INT ) ;
2368
+ return ;
2325
2369
}
2326
2370
if (PyFloat_CheckExact (lhs )) {
2327
- specialized_op = BINARY_OP_SUBTRACT_FLOAT ;
2328
- goto success ;
2371
+ specialize ( instr , BINARY_OP_SUBTRACT_FLOAT ) ;
2372
+ return ;
2329
2373
}
2330
2374
break ;
2331
2375
}
2332
- SPECIALIZATION_FAIL (BINARY_OP , binary_op_fail_kind (oparg , lhs , rhs ));
2333
- STAT_INC (BINARY_OP , failure );
2334
- SET_OPCODE_OR_RETURN (instr , BINARY_OP );
2335
- cache -> counter = adaptive_counter_backoff (cache -> counter );
2336
- return ;
2337
- success :
2338
- STAT_INC (BINARY_OP , success );
2339
- SET_OPCODE_OR_RETURN (instr , specialized_op );
2340
- cache -> counter = adaptive_counter_cooldown ();
2376
+ unspecialize (instr , binary_op_fail_kind (oparg , lhs , rhs ));
2341
2377
}
2342
2378
2343
2379
0 commit comments