Skip to content

Commit 9ce4fa0

Browse files
authored
gh-115999: Introduce helpers for (un)specializing instructions (#126414)
Introduce helpers for (un)specializing instructions Consolidate the code to specialize/unspecialize instructions into two helper functions and use them in `_Py_Specialize_BinaryOp`. The resulting code is more concise and keeps all of the logic at the point where we decide to specialize/unspecialize an instruction.
1 parent a1c57bc commit 9ce4fa0

File tree

1 file changed

+84
-48
lines changed

1 file changed

+84
-48
lines changed

Python/specialize.c

Lines changed: 84 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -24,25 +24,6 @@ extern const char *_PyUOpName(int index);
2424
* ./adaptive.md
2525
*/
2626

27-
#ifdef Py_GIL_DISABLED
28-
#define SET_OPCODE_OR_RETURN(instr, opcode) \
29-
do { \
30-
uint8_t old_op = _Py_atomic_load_uint8_relaxed(&(instr)->op.code); \
31-
if (old_op >= MIN_INSTRUMENTED_OPCODE) { \
32-
/* Lost race with instrumentation */ \
33-
return; \
34-
} \
35-
if (!_Py_atomic_compare_exchange_uint8(&(instr)->op.code, &old_op, \
36-
(opcode))) { \
37-
/* Lost race with instrumentation */ \
38-
assert(old_op >= MIN_INSTRUMENTED_OPCODE); \
39-
return; \
40-
} \
41-
} while (0)
42-
#else
43-
#define SET_OPCODE_OR_RETURN(instr, opcode) (instr)->op.code = (opcode)
44-
#endif
45-
4627
#ifdef Py_STATS
4728
GCStats _py_gc_stats[NUM_GENERATIONS] = { 0 };
4829
static PyStats _Py_stats_struct = { .gc_stats = _py_gc_stats };
@@ -687,6 +668,73 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts,
687668
#define SPEC_FAIL_CONTAINS_OP_LIST 11
688669
#define SPEC_FAIL_CONTAINS_OP_USER_CLASS 12
689670

671+
static inline int
672+
set_opcode(_Py_CODEUNIT *instr, uint8_t opcode)
673+
{
674+
#ifdef Py_GIL_DISABLED
675+
uint8_t old_op = _Py_atomic_load_uint8_relaxed(&instr->op.code);
676+
if (old_op >= MIN_INSTRUMENTED_OPCODE) {
677+
/* Lost race with instrumentation */
678+
return 0;
679+
}
680+
if (!_Py_atomic_compare_exchange_uint8(&instr->op.code, &old_op, opcode)) {
681+
/* Lost race with instrumentation */
682+
assert(old_op >= MIN_INSTRUMENTED_OPCODE);
683+
return 0;
684+
}
685+
return 1;
686+
#else
687+
instr->op.code = opcode;
688+
return 1;
689+
#endif
690+
}
691+
692+
static inline void
693+
set_counter(_Py_BackoffCounter *counter, _Py_BackoffCounter value)
694+
{
695+
FT_ATOMIC_STORE_UINT16_RELAXED(counter->value_and_backoff,
696+
value.value_and_backoff);
697+
}
698+
699+
static inline _Py_BackoffCounter
700+
load_counter(_Py_BackoffCounter *counter)
701+
{
702+
_Py_BackoffCounter result = {
703+
.value_and_backoff =
704+
FT_ATOMIC_LOAD_UINT16_RELAXED(counter->value_and_backoff)};
705+
return result;
706+
}
707+
708+
static inline void
709+
specialize(_Py_CODEUNIT *instr, uint8_t specialized_opcode)
710+
{
711+
assert(!PyErr_Occurred());
712+
if (!set_opcode(instr, specialized_opcode)) {
713+
STAT_INC(_PyOpcode_Deopt[specialized_opcode], failure);
714+
SPECIALIZATION_FAIL(_PyOpcode_Deopt[specialized_opcode],
715+
SPEC_FAIL_OTHER);
716+
return;
717+
}
718+
set_counter((_Py_BackoffCounter *)instr + 1, adaptive_counter_cooldown());
719+
}
720+
721+
static inline void
722+
unspecialize(_Py_CODEUNIT *instr, int reason)
723+
{
724+
assert(!PyErr_Occurred());
725+
uint8_t opcode = FT_ATOMIC_LOAD_UINT8_RELAXED(instr->op.code);
726+
uint8_t generic_opcode = _PyOpcode_Deopt[opcode];
727+
STAT_INC(generic_opcode, failure);
728+
if (!set_opcode(instr, generic_opcode)) {
729+
SPECIALIZATION_FAIL(generic_opcode, SPEC_FAIL_OTHER);
730+
return;
731+
}
732+
SPECIALIZATION_FAIL(generic_opcode, reason);
733+
_Py_BackoffCounter *counter = (_Py_BackoffCounter *)instr + 1;
734+
_Py_BackoffCounter cur = load_counter(counter);
735+
set_counter(counter, adaptive_counter_backoff(cur));
736+
}
737+
690738
static int function_kind(PyCodeObject *code);
691739
static bool function_check_args(PyObject *o, int expected_argcount, int opcode);
692740
static uint32_t function_get_version(PyObject *o, int opcode);
@@ -2195,7 +2243,6 @@ _Py_Specialize_CallKw(_PyStackRef callable_st, _Py_CODEUNIT *instr, int nargs)
21952243
}
21962244
}
21972245

2198-
#ifdef Py_STATS
21992246
static int
22002247
binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs)
22012248
{
@@ -2263,7 +2310,6 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs)
22632310
}
22642311
Py_UNREACHABLE();
22652312
}
2266-
#endif // Py_STATS
22672313

22682314
void
22692315
_Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr,
@@ -2273,8 +2319,6 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
22732319
PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st);
22742320
assert(ENABLE_SPECIALIZATION_FT);
22752321
assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP);
2276-
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1);
2277-
uint8_t specialized_op;
22782322
switch (oparg) {
22792323
case NB_ADD:
22802324
case NB_INPLACE_ADD:
@@ -2285,19 +2329,19 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
22852329
_Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_BINARY_OP + 1];
22862330
bool to_store = (next.op.code == STORE_FAST);
22872331
if (to_store && PyStackRef_AsPyObjectBorrow(locals[next.op.arg]) == lhs) {
2288-
specialized_op = BINARY_OP_INPLACE_ADD_UNICODE;
2289-
goto success;
2332+
specialize(instr, BINARY_OP_INPLACE_ADD_UNICODE);
2333+
return;
22902334
}
2291-
specialized_op = BINARY_OP_ADD_UNICODE;
2292-
goto success;
2335+
specialize(instr, BINARY_OP_ADD_UNICODE);
2336+
return;
22932337
}
22942338
if (PyLong_CheckExact(lhs)) {
2295-
specialized_op = BINARY_OP_ADD_INT;
2296-
goto success;
2339+
specialize(instr, BINARY_OP_ADD_INT);
2340+
return;
22972341
}
22982342
if (PyFloat_CheckExact(lhs)) {
2299-
specialized_op = BINARY_OP_ADD_FLOAT;
2300-
goto success;
2343+
specialize(instr, BINARY_OP_ADD_FLOAT);
2344+
return;
23012345
}
23022346
break;
23032347
case NB_MULTIPLY:
@@ -2306,12 +2350,12 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
23062350
break;
23072351
}
23082352
if (PyLong_CheckExact(lhs)) {
2309-
specialized_op = BINARY_OP_MULTIPLY_INT;
2310-
goto success;
2353+
specialize(instr, BINARY_OP_MULTIPLY_INT);
2354+
return;
23112355
}
23122356
if (PyFloat_CheckExact(lhs)) {
2313-
specialized_op = BINARY_OP_MULTIPLY_FLOAT;
2314-
goto success;
2357+
specialize(instr, BINARY_OP_MULTIPLY_FLOAT);
2358+
return;
23152359
}
23162360
break;
23172361
case NB_SUBTRACT:
@@ -2320,24 +2364,16 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
23202364
break;
23212365
}
23222366
if (PyLong_CheckExact(lhs)) {
2323-
specialized_op = BINARY_OP_SUBTRACT_INT;
2324-
goto success;
2367+
specialize(instr, BINARY_OP_SUBTRACT_INT);
2368+
return;
23252369
}
23262370
if (PyFloat_CheckExact(lhs)) {
2327-
specialized_op = BINARY_OP_SUBTRACT_FLOAT;
2328-
goto success;
2371+
specialize(instr, BINARY_OP_SUBTRACT_FLOAT);
2372+
return;
23292373
}
23302374
break;
23312375
}
2332-
SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs));
2333-
STAT_INC(BINARY_OP, failure);
2334-
SET_OPCODE_OR_RETURN(instr, BINARY_OP);
2335-
cache->counter = adaptive_counter_backoff(cache->counter);
2336-
return;
2337-
success:
2338-
STAT_INC(BINARY_OP, success);
2339-
SET_OPCODE_OR_RETURN(instr, specialized_op);
2340-
cache->counter = adaptive_counter_cooldown();
2376+
unspecialize(instr, binary_op_fail_kind(oparg, lhs, rhs));
23412377
}
23422378

23432379

0 commit comments

Comments
 (0)