@@ -2873,13 +2873,14 @@ static int compiler_addcompare(struct compiler *c, location loc,
2873
2873
2874
2874
2875
2875
static int
2876
- compiler_jump_if (struct compiler * c , location * ploc ,
2876
+ compiler_jump_if (struct compiler * c , location loc ,
2877
2877
expr_ty e , jump_target_label next , int cond )
2878
2878
{
2879
2879
switch (e -> kind ) {
2880
2880
case UnaryOp_kind :
2881
- if (e -> v .UnaryOp .op == Not )
2882
- return compiler_jump_if (c , ploc , e -> v .UnaryOp .operand , next , !cond );
2881
+ if (e -> v .UnaryOp .op == Not ) {
2882
+ return compiler_jump_if (c , loc , e -> v .UnaryOp .operand , next , !cond );
2883
+ }
2883
2884
/* fallback to general implementation */
2884
2885
break ;
2885
2886
case BoolOp_kind : {
@@ -2893,11 +2894,13 @@ compiler_jump_if(struct compiler *c, location *ploc,
2893
2894
next2 = new_next2 ;
2894
2895
}
2895
2896
for (i = 0 ; i < n ; ++ i ) {
2896
- if (!compiler_jump_if (c , ploc , (expr_ty )asdl_seq_GET (s , i ), next2 , cond2 ))
2897
+ if (!compiler_jump_if (c , loc , (expr_ty )asdl_seq_GET (s , i ), next2 , cond2 )) {
2897
2898
return 0 ;
2899
+ }
2898
2900
}
2899
- if (!compiler_jump_if (c , ploc , (expr_ty )asdl_seq_GET (s , n ), next , cond ))
2901
+ if (!compiler_jump_if (c , loc , (expr_ty )asdl_seq_GET (s , n ), next , cond )) {
2900
2902
return 0 ;
2903
+ }
2901
2904
if (!SAME_LABEL (next2 , next )) {
2902
2905
USE_LABEL (c , next2 );
2903
2906
}
@@ -2906,45 +2909,46 @@ compiler_jump_if(struct compiler *c, location *ploc,
2906
2909
case IfExp_kind : {
2907
2910
NEW_JUMP_TARGET_LABEL (c , end );
2908
2911
NEW_JUMP_TARGET_LABEL (c , next2 );
2909
- if (!compiler_jump_if (c , ploc , e -> v .IfExp .test , next2 , 0 ))
2912
+ if (!compiler_jump_if (c , loc , e -> v .IfExp .test , next2 , 0 )) {
2910
2913
return 0 ;
2911
- if (!compiler_jump_if (c , ploc , e -> v .IfExp .body , next , cond ))
2914
+ }
2915
+ if (!compiler_jump_if (c , loc , e -> v .IfExp .body , next , cond )) {
2912
2916
return 0 ;
2917
+ }
2913
2918
ADDOP_JUMP (c , NO_LOCATION , JUMP , end );
2914
2919
2915
2920
USE_LABEL (c , next2 );
2916
- if (!compiler_jump_if (c , ploc , e -> v .IfExp .orelse , next , cond ))
2921
+ if (!compiler_jump_if (c , loc , e -> v .IfExp .orelse , next , cond )) {
2917
2922
return 0 ;
2923
+ }
2918
2924
2919
2925
USE_LABEL (c , end );
2920
2926
return 1 ;
2921
2927
}
2922
2928
case Compare_kind : {
2923
- SET_LOC (c , e );
2924
- * ploc = LOC (e );
2925
- Py_ssize_t i , n = asdl_seq_LEN (e -> v .Compare .ops ) - 1 ;
2929
+ Py_ssize_t n = asdl_seq_LEN (e -> v .Compare .ops ) - 1 ;
2926
2930
if (n > 0 ) {
2927
2931
if (!check_compare (c , e )) {
2928
2932
return 0 ;
2929
2933
}
2930
2934
NEW_JUMP_TARGET_LABEL (c , cleanup );
2931
2935
VISIT (c , expr , e -> v .Compare .left );
2932
- for (i = 0 ; i < n ; i ++ ) {
2936
+ for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
2933
2937
VISIT (c , expr ,
2934
2938
(expr_ty )asdl_seq_GET (e -> v .Compare .comparators , i ));
2935
- ADDOP_I (c , * ploc , SWAP , 2 );
2936
- ADDOP_I (c , * ploc , COPY , 2 );
2937
- ADDOP_COMPARE (c , * ploc , asdl_seq_GET (e -> v .Compare .ops , i ));
2938
- ADDOP_JUMP (c , * ploc , POP_JUMP_IF_FALSE , cleanup );
2939
+ ADDOP_I (c , LOC ( e ) , SWAP , 2 );
2940
+ ADDOP_I (c , LOC ( e ) , COPY , 2 );
2941
+ ADDOP_COMPARE (c , LOC ( e ) , asdl_seq_GET (e -> v .Compare .ops , i ));
2942
+ ADDOP_JUMP (c , LOC ( e ) , POP_JUMP_IF_FALSE , cleanup );
2939
2943
}
2940
2944
VISIT (c , expr , (expr_ty )asdl_seq_GET (e -> v .Compare .comparators , n ));
2941
- ADDOP_COMPARE (c , * ploc , asdl_seq_GET (e -> v .Compare .ops , n ));
2942
- ADDOP_JUMP (c , * ploc , cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE , next );
2945
+ ADDOP_COMPARE (c , LOC ( e ) , asdl_seq_GET (e -> v .Compare .ops , n ));
2946
+ ADDOP_JUMP (c , LOC ( e ) , cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE , next );
2943
2947
NEW_JUMP_TARGET_LABEL (c , end );
2944
2948
ADDOP_JUMP (c , NO_LOCATION , JUMP , end );
2945
2949
2946
2950
USE_LABEL (c , cleanup );
2947
- ADDOP (c , * ploc , POP_TOP );
2951
+ ADDOP (c , LOC ( e ) , POP_TOP );
2948
2952
if (!cond ) {
2949
2953
ADDOP_JUMP (c , NO_LOCATION , JUMP , next );
2950
2954
}
@@ -2973,8 +2977,7 @@ compiler_ifexp(struct compiler *c, expr_ty e)
2973
2977
NEW_JUMP_TARGET_LABEL (c , end );
2974
2978
NEW_JUMP_TARGET_LABEL (c , next );
2975
2979
2976
- location loc = LOC (e );
2977
- if (!compiler_jump_if (c , & loc , e -> v .IfExp .test , next , 0 )) {
2980
+ if (!compiler_jump_if (c , LOC (e ), e -> v .IfExp .test , next , 0 )) {
2978
2981
return 0 ;
2979
2982
}
2980
2983
VISIT (c , expr , e -> v .IfExp .body );
@@ -3059,8 +3062,7 @@ compiler_if(struct compiler *c, stmt_ty s)
3059
3062
else {
3060
3063
next = end ;
3061
3064
}
3062
- location loc = LOC (s );
3063
- if (!compiler_jump_if (c , & loc , s -> v .If .test , next , 0 )) {
3065
+ if (!compiler_jump_if (c , LOC (s ), s -> v .If .test , next , 0 )) {
3064
3066
return 0 ;
3065
3067
}
3066
3068
VISIT_SEQ (c , stmt , s -> v .If .body );
@@ -3167,25 +3169,22 @@ compiler_async_for(struct compiler *c, stmt_ty s)
3167
3169
static int
3168
3170
compiler_while (struct compiler * c , stmt_ty s )
3169
3171
{
3170
- location loc = LOC (s );
3171
3172
NEW_JUMP_TARGET_LABEL (c , loop );
3172
3173
NEW_JUMP_TARGET_LABEL (c , body );
3173
3174
NEW_JUMP_TARGET_LABEL (c , end );
3174
3175
NEW_JUMP_TARGET_LABEL (c , anchor );
3175
3176
3176
3177
USE_LABEL (c , loop );
3177
- if (!compiler_push_fblock (c , loc , WHILE_LOOP , loop , end , NULL )) {
3178
+ if (!compiler_push_fblock (c , LOC ( s ) , WHILE_LOOP , loop , end , NULL )) {
3178
3179
return 0 ;
3179
3180
}
3180
- if (!compiler_jump_if (c , & loc , s -> v .While .test , anchor , 0 )) {
3181
+ if (!compiler_jump_if (c , LOC ( s ) , s -> v .While .test , anchor , 0 )) {
3181
3182
return 0 ;
3182
3183
}
3183
3184
3184
3185
USE_LABEL (c , body );
3185
3186
VISIT_SEQ (c , stmt , s -> v .While .body );
3186
- SET_LOC (c , s );
3187
- loc = LOC (s );
3188
- if (!compiler_jump_if (c , & loc , s -> v .While .test , body , 1 )) {
3187
+ if (!compiler_jump_if (c , LOC (s ), s -> v .While .test , body , 1 )) {
3189
3188
return 0 ;
3190
3189
}
3191
3190
@@ -3986,8 +3985,7 @@ compiler_assert(struct compiler *c, stmt_ty s)
3986
3985
return 1 ;
3987
3986
}
3988
3987
NEW_JUMP_TARGET_LABEL (c , end );
3989
- location loc = LOC (s );
3990
- if (!compiler_jump_if (c , & loc , s -> v .Assert .test , end , 1 )) {
3988
+ if (!compiler_jump_if (c , LOC (s ), s -> v .Assert .test , end , 1 )) {
3991
3989
return 0 ;
3992
3990
}
3993
3991
ADDOP (c , LOC (s ), LOAD_ASSERTION_ERROR );
@@ -4017,18 +4015,13 @@ compiler_stmt_expr(struct compiler *c, location loc, expr_ty value)
4017
4015
}
4018
4016
4019
4017
VISIT (c , expr , value );
4020
- /* Mark POP_TOP as artificial */
4021
- UNSET_LOC (c );
4022
- ADDOP (c , NO_LOCATION , POP_TOP );
4018
+ ADDOP (c , NO_LOCATION , POP_TOP ); /* artificial */
4023
4019
return 1 ;
4024
4020
}
4025
4021
4026
4022
static int
4027
4023
compiler_visit_stmt (struct compiler * c , stmt_ty s )
4028
4024
{
4029
- Py_ssize_t i , n ;
4030
- /* Always assign a lineno to the next instruction for a stmt. */
4031
- SET_LOC (c , s );
4032
4025
4033
4026
switch (s -> kind ) {
4034
4027
case FunctionDef_kind :
@@ -4042,12 +4035,11 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4042
4035
break ;
4043
4036
case Assign_kind :
4044
4037
{
4045
- n = asdl_seq_LEN (s -> v .Assign .targets );
4038
+ Py_ssize_t n = asdl_seq_LEN (s -> v .Assign .targets );
4046
4039
VISIT (c , expr , s -> v .Assign .value );
4047
- location loc = LOC (s );
4048
- for (i = 0 ; i < n ; i ++ ) {
4040
+ for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
4049
4041
if (i < n - 1 ) {
4050
- ADDOP_I (c , loc , COPY , 1 );
4042
+ ADDOP_I (c , LOC ( s ) , COPY , 1 );
4051
4043
}
4052
4044
VISIT (c , expr ,
4053
4045
(expr_ty )asdl_seq_GET (s -> v .Assign .targets , i ));
@@ -4068,7 +4060,7 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4068
4060
return compiler_match (c , s );
4069
4061
case Raise_kind :
4070
4062
{
4071
- n = 0 ;
4063
+ Py_ssize_t n = 0 ;
4072
4064
if (s -> v .Raise .exc ) {
4073
4065
VISIT (c , expr , s -> v .Raise .exc );
4074
4066
n ++ ;
@@ -4077,8 +4069,7 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4077
4069
n ++ ;
4078
4070
}
4079
4071
}
4080
- location loc = LOC (s );
4081
- ADDOP_I (c , loc , RAISE_VARARGS , (int )n );
4072
+ ADDOP_I (c , LOC (s ), RAISE_VARARGS , (int )n );
4082
4073
break ;
4083
4074
}
4084
4075
case Try_kind :
@@ -4096,24 +4087,20 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
4096
4087
break ;
4097
4088
case Expr_kind :
4098
4089
{
4099
- location loc = LOC (s );
4100
- return compiler_stmt_expr (c , loc , s -> v .Expr .value );
4090
+ return compiler_stmt_expr (c , LOC (s ), s -> v .Expr .value );
4101
4091
}
4102
4092
case Pass_kind :
4103
4093
{
4104
- location loc = LOC (s );
4105
- ADDOP (c , loc , NOP );
4094
+ ADDOP (c , LOC (s ), NOP );
4106
4095
break ;
4107
4096
}
4108
4097
case Break_kind :
4109
4098
{
4110
- location loc = LOC (s );
4111
- return compiler_break (c , loc );
4099
+ return compiler_break (c , LOC (s ));
4112
4100
}
4113
4101
case Continue_kind :
4114
4102
{
4115
- location loc = LOC (s );
4116
- return compiler_continue (c , loc );
4103
+ return compiler_continue (c , LOC (s ));
4117
4104
}
4118
4105
case With_kind :
4119
4106
return compiler_with (c , s , 0 );
@@ -5275,7 +5262,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
5275
5262
Py_ssize_t n = asdl_seq_LEN (gen -> ifs );
5276
5263
for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
5277
5264
expr_ty e = (expr_ty )asdl_seq_GET (gen -> ifs , i );
5278
- if (!compiler_jump_if (c , & loc , e , if_cleanup , 0 )) {
5265
+ if (!compiler_jump_if (c , loc , e , if_cleanup , 0 )) {
5279
5266
return 0 ;
5280
5267
}
5281
5268
}
@@ -5374,7 +5361,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
5374
5361
Py_ssize_t n = asdl_seq_LEN (gen -> ifs );
5375
5362
for (Py_ssize_t i = 0 ; i < n ; i ++ ) {
5376
5363
expr_ty e = (expr_ty )asdl_seq_GET (gen -> ifs , i );
5377
- if (!compiler_jump_if (c , & loc , e , if_cleanup , 0 )) {
5364
+ if (!compiler_jump_if (c , loc , e , if_cleanup , 0 )) {
5378
5365
return 0 ;
5379
5366
}
5380
5367
}
@@ -7109,7 +7096,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
7109
7096
// NOTE: Returning macros are safe again.
7110
7097
if (m -> guard ) {
7111
7098
RETURN_IF_FALSE (ensure_fail_pop (c , pc , 0 ));
7112
- RETURN_IF_FALSE (compiler_jump_if (c , & loc , m -> guard , pc -> fail_pop [0 ], 0 ));
7099
+ RETURN_IF_FALSE (compiler_jump_if (c , loc , m -> guard , pc -> fail_pop [0 ], 0 ));
7113
7100
}
7114
7101
// Success! Pop the subject off, we're done with it:
7115
7102
if (i != cases - has_default - 1 ) {
@@ -7138,7 +7125,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
7138
7125
ADDOP (c , loc , NOP );
7139
7126
}
7140
7127
if (m -> guard ) {
7141
- RETURN_IF_FALSE (compiler_jump_if (c , & loc , m -> guard , end , 0 ));
7128
+ RETURN_IF_FALSE (compiler_jump_if (c , loc , m -> guard , end , 0 ));
7142
7129
}
7143
7130
VISIT_SEQ (c , stmt , m -> body );
7144
7131
}
0 commit comments