@@ -154,8 +154,11 @@ typedef struct basicblock_* jump_target_label;
154
154
struct instr {
155
155
int i_opcode ;
156
156
int i_oparg ;
157
- /* target block (if jump instruction) */
158
- jump_target_label i_target ;
157
+ /* target block (if jump instruction) -- we temporarily have both the label
158
+ and the block in the instr. The label is set by front end, and the block
159
+ is calculated by backend. */
160
+ jump_target_label i_target_label ;
161
+ struct basicblock_ * i_target ;
159
162
/* target block when exception is raised, should not be set by front-end. */
160
163
struct basicblock_ * i_except ;
161
164
struct location i_loc ;
@@ -1266,7 +1269,7 @@ PyCompile_OpcodeStackEffect(int opcode, int oparg)
1266
1269
1267
1270
static int
1268
1271
basicblock_addop (basicblock * b , int opcode , int oparg ,
1269
- basicblock * target , struct location loc )
1272
+ jump_target_label target , struct location loc )
1270
1273
{
1271
1274
assert (IS_WITHIN_OPCODE_RANGE (opcode ));
1272
1275
assert (!IS_ASSEMBLER_OPCODE (opcode ));
@@ -1284,14 +1287,15 @@ basicblock_addop(basicblock *b, int opcode, int oparg,
1284
1287
struct instr * i = & b -> b_instr [off ];
1285
1288
i -> i_opcode = opcode ;
1286
1289
i -> i_oparg = oparg ;
1287
- i -> i_target = target ;
1290
+ i -> i_target_label = target ;
1291
+ i -> i_target = NULL ;
1288
1292
i -> i_loc = loc ;
1289
1293
1290
1294
return 1 ;
1291
1295
}
1292
1296
1293
1297
static int
1294
- cfg_builder_addop (cfg_builder * g , int opcode , int oparg , basicblock * target ,
1298
+ cfg_builder_addop (cfg_builder * g , int opcode , int oparg , jump_target_label target ,
1295
1299
struct location loc )
1296
1300
{
1297
1301
struct instr * last = basicblock_last_instr (g -> curblock );
@@ -1309,7 +1313,7 @@ static int
1309
1313
cfg_builder_addop_noarg (cfg_builder * g , int opcode , struct location loc )
1310
1314
{
1311
1315
assert (!HAS_ARG (opcode ));
1312
- return cfg_builder_addop (g , opcode , 0 , NULL , loc );
1316
+ return cfg_builder_addop (g , opcode , 0 , NO_LABEL , loc );
1313
1317
}
1314
1318
1315
1319
static Py_ssize_t
@@ -1521,7 +1525,7 @@ cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, struct locatio
1521
1525
EXTENDED_ARG is used for 16, 24, and 32-bit arguments. */
1522
1526
1523
1527
int oparg_ = Py_SAFE_DOWNCAST (oparg , Py_ssize_t , int );
1524
- return cfg_builder_addop (g , opcode , oparg_ , NULL , loc );
1528
+ return cfg_builder_addop (g , opcode , oparg_ , NO_LABEL , loc );
1525
1529
}
1526
1530
1527
1531
static int
@@ -5116,15 +5120,15 @@ compiler_sync_comprehension_generator(struct compiler *c,
5116
5120
expr_ty elt = asdl_seq_GET (elts , 0 );
5117
5121
if (elt -> kind != Starred_kind ) {
5118
5122
VISIT (c , expr , elt );
5119
- start = NULL ;
5123
+ start = NO_LABEL ;
5120
5124
}
5121
5125
}
5122
- if (start ) {
5126
+ if (start != NO_LABEL ) {
5123
5127
VISIT (c , expr , gen -> iter );
5124
5128
ADDOP (c , GET_ITER );
5125
5129
}
5126
5130
}
5127
- if (start ) {
5131
+ if (start != NO_LABEL ) {
5128
5132
depth ++ ;
5129
5133
USE_LABEL (c , start );
5130
5134
ADDOP_JUMP (c , FOR_ITER , anchor );
@@ -5175,7 +5179,7 @@ compiler_sync_comprehension_generator(struct compiler *c,
5175
5179
}
5176
5180
5177
5181
USE_LABEL (c , if_cleanup );
5178
- if (start ) {
5182
+ if (start != NO_LABEL ) {
5179
5183
ADDOP_JUMP (c , JUMP , start );
5180
5184
5181
5185
USE_LABEL (c , anchor );
@@ -5196,10 +5200,6 @@ compiler_async_comprehension_generator(struct compiler *c,
5196
5200
NEW_JUMP_TARGET_LABEL (c , except );
5197
5201
NEW_JUMP_TARGET_LABEL (c , if_cleanup );
5198
5202
5199
- if (start == NULL || if_cleanup == NULL || except == NULL ) {
5200
- return 0 ;
5201
- }
5202
-
5203
5203
gen = (comprehension_ty )asdl_seq_GET (generators , gen_index );
5204
5204
5205
5205
if (gen_index == 0 ) {
@@ -7370,10 +7370,15 @@ push_cold_blocks_to_end(cfg_builder *g, int code_flags) {
7370
7370
return -1 ;
7371
7371
}
7372
7372
basicblock_addop (explicit_jump , JUMP , 0 , b -> b_next , NO_LOCATION );
7373
-
7374
7373
explicit_jump -> b_cold = 1 ;
7375
7374
explicit_jump -> b_next = b -> b_next ;
7376
7375
b -> b_next = explicit_jump ;
7376
+
7377
+ /* calculate target from target_label */
7378
+ /* TODO: formalize an API for adding jumps in the backend */
7379
+ struct instr * last = basicblock_last_instr (explicit_jump );
7380
+ last -> i_target = last -> i_target_label ;
7381
+ last -> i_target_label = NULL ;
7377
7382
}
7378
7383
}
7379
7384
@@ -8210,6 +8215,9 @@ dump_basicblock(const basicblock *b)
8210
8215
static int
8211
8216
normalize_basic_block (basicblock * bb );
8212
8217
8218
+ static int
8219
+ calculate_jump_targets (basicblock * entryblock );
8220
+
8213
8221
static int
8214
8222
optimize_cfg (basicblock * entryblock , PyObject * consts , PyObject * const_cache );
8215
8223
@@ -8428,7 +8436,7 @@ static void
8428
8436
eliminate_empty_basic_blocks (basicblock * entryblock );
8429
8437
8430
8438
8431
- static void
8439
+ static int
8432
8440
remove_redundant_jumps (basicblock * entryblock ) {
8433
8441
/* If a non-empty block ends with a jump instruction, check if the next
8434
8442
* non-empty block reached through normal flow control is the target
@@ -8442,6 +8450,10 @@ remove_redundant_jumps(basicblock *entryblock) {
8442
8450
assert (!IS_ASSEMBLER_OPCODE (b_last_instr -> i_opcode ));
8443
8451
if (b_last_instr -> i_opcode == JUMP ||
8444
8452
b_last_instr -> i_opcode == JUMP_NO_INTERRUPT ) {
8453
+ if (b_last_instr -> i_target == NULL ) {
8454
+ PyErr_SetString (PyExc_SystemError , "jump with NULL target" );
8455
+ return -1 ;
8456
+ }
8445
8457
if (b_last_instr -> i_target == b -> b_next ) {
8446
8458
assert (b -> b_next -> b_iused );
8447
8459
b_last_instr -> i_opcode = NOP ;
@@ -8453,6 +8465,7 @@ remove_redundant_jumps(basicblock *entryblock) {
8453
8465
if (removed ) {
8454
8466
eliminate_empty_basic_blocks (entryblock );
8455
8467
}
8468
+ return 0 ;
8456
8469
}
8457
8470
8458
8471
static PyCodeObject *
@@ -8530,7 +8543,9 @@ assemble(struct compiler *c, int addNone)
8530
8543
if (consts == NULL ) {
8531
8544
goto error ;
8532
8545
}
8533
-
8546
+ if (calculate_jump_targets (entryblock )) {
8547
+ goto error ;
8548
+ }
8534
8549
if (optimize_cfg (entryblock , consts , c -> c_const_cache )) {
8535
8550
goto error ;
8536
8551
}
@@ -8558,7 +8573,9 @@ assemble(struct compiler *c, int addNone)
8558
8573
goto error ;
8559
8574
}
8560
8575
8561
- remove_redundant_jumps (entryblock );
8576
+ if (remove_redundant_jumps (entryblock ) < 0 ) {
8577
+ goto error ;
8578
+ }
8562
8579
for (basicblock * b = entryblock ; b != NULL ; b = b -> b_next ) {
8563
8580
clean_basic_block (b );
8564
8581
}
@@ -9372,6 +9389,25 @@ propagate_line_numbers(basicblock *entryblock) {
9372
9389
}
9373
9390
}
9374
9391
9392
+
9393
+ /* Calculate the actual jump target from the target_label */
9394
+ static int
9395
+ calculate_jump_targets (basicblock * entryblock )
9396
+ {
9397
+ for (basicblock * b = entryblock ; b != NULL ; b = b -> b_next ) {
9398
+ for (int i = 0 ; i < b -> b_iused ; i ++ ) {
9399
+ struct instr * instr = & b -> b_instr [i ];
9400
+ assert (instr -> i_target == NULL );
9401
+ instr -> i_target = instr -> i_target_label ;
9402
+ instr -> i_target_label = NULL ;
9403
+ if (is_jump (instr ) || is_block_push (instr )) {
9404
+ assert (instr -> i_target != NULL );
9405
+ }
9406
+ }
9407
+ }
9408
+ return 0 ;
9409
+ }
9410
+
9375
9411
/* Perform optimizations on a control flow graph.
9376
9412
The consts object should still be in list form to allow new constants
9377
9413
to be appended.
0 commit comments