Skip to content

Commit 23b4a50

Browse files
mkustermanncommit-bot@chromium.org
authored andcommitted
Reland "[VM] Use IR for code in [CatchEntryInstr]s to populate captured exception/stacktrace variables"
This fixes an issue when a program got loaded via dill, a function with a try-catch got optimized and the exception/stacktrace variables got captured. Change-Id: Icb626965019b248afe3b72a6679c5049ea7b7b00 Reviewed-on: https://dart-review.googlesource.com/55681 Reviewed-by: Vyacheslav Egorov <[email protected]> Commit-Queue: Martin Kustermann <[email protected]>
1 parent 23c8c4d commit 23b4a50

21 files changed

+383
-280
lines changed

runtime/observatory/tests/service/service_kernel.status

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ get_isolate_after_language_error_test: CompileTimeError
3030
complex_reload_test: RuntimeError
3131

3232
[ $arch == simdbc64 && $compiler == dartk ]
33-
coverage_optimized_function_test: Crash # Please triage
3433
get_cpu_profile_timeline_rpc_test: Pass, RuntimeError # http://dartbug.com/31794
3534
pause_on_unhandled_async_exceptions_test: RuntimeError, Timeout # Issue 31765
3635

runtime/platform/growable_array.h

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,16 @@ class BaseGrowableArray : public B {
7070
}
7171
}
7272

73+
void EnsureLength(intptr_t new_length, const T& default_value) {
74+
const intptr_t old_length = length_;
75+
if (old_length < new_length) {
76+
Resize(new_length);
77+
for (intptr_t i = old_length; i < new_length; ++i) {
78+
(*this)[i] = default_value;
79+
}
80+
}
81+
}
82+
7383
const T& At(intptr_t index) const { return operator[](index); }
7484

7585
T& Last() const {

runtime/vm/compiler/backend/flow_graph.cc

Lines changed: 50 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1081,10 +1081,34 @@ void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry,
10811081
}
10821082
}
10831083
}
1084-
} else if (block_entry->IsCatchBlockEntry()) {
1084+
} else if (CatchBlockEntryInstr* catch_entry =
1085+
block_entry->AsCatchBlockEntry()) {
1086+
const intptr_t raw_exception_var_envindex =
1087+
catch_entry->raw_exception_var() != nullptr
1088+
? catch_entry->raw_exception_var()->BitIndexIn(
1089+
num_direct_parameters_)
1090+
: -1;
1091+
const intptr_t raw_stacktrace_var_envindex =
1092+
catch_entry->raw_stacktrace_var() != nullptr
1093+
? catch_entry->raw_stacktrace_var()->BitIndexIn(
1094+
num_direct_parameters_)
1095+
: -1;
1096+
10851097
// Add real definitions for all locals and parameters.
10861098
for (intptr_t i = 0; i < env->length(); ++i) {
1087-
ParameterInstr* param = new (zone()) ParameterInstr(i, block_entry);
1099+
// Replace usages of the raw exception/stacktrace variables with
1100+
// [SpecialParameterInstr]s.
1101+
Definition* param = nullptr;
1102+
if (raw_exception_var_envindex == i) {
1103+
param = new SpecialParameterInstr(SpecialParameterInstr::kException,
1104+
Thread::kNoDeoptId);
1105+
} else if (raw_stacktrace_var_envindex == i) {
1106+
param = new SpecialParameterInstr(SpecialParameterInstr::kStackTrace,
1107+
Thread::kNoDeoptId);
1108+
} else {
1109+
param = new (zone()) ParameterInstr(i, block_entry);
1110+
}
1111+
10881112
param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp.
10891113
(*env)[i] = param;
10901114
block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param);
@@ -1106,6 +1130,30 @@ void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry,
11061130
// Attach environment to the block entry.
11071131
AttachEnvironment(block_entry, env);
11081132

1133+
#if defined(TARGET_ARCH_DBC)
1134+
// On DBC the exception/stacktrace variables are in special registers when
1135+
// entering the catch block. The only usage of those special registers is
1136+
// within the catch block. A possible lazy-deopt at the beginning of the
1137+
// catch does not need to move those around, since the registers will be
1138+
// up-to-date when arriving in the unoptimized code and unoptimized code will
1139+
// take care of moving them to appropriate slots.
1140+
if (CatchBlockEntryInstr* catch_entry = block_entry->AsCatchBlockEntry()) {
1141+
Environment* deopt_env = catch_entry->env();
1142+
const LocalVariable* raw_exception_var = catch_entry->raw_exception_var();
1143+
const LocalVariable* raw_stacktrace_var = catch_entry->raw_stacktrace_var();
1144+
if (raw_exception_var != nullptr) {
1145+
Value* value = deopt_env->ValueAt(
1146+
raw_exception_var->BitIndexIn(num_direct_parameters_));
1147+
value->BindToEnvironment(constant_null());
1148+
}
1149+
if (raw_stacktrace_var != nullptr) {
1150+
Value* value = deopt_env->ValueAt(
1151+
raw_stacktrace_var->BitIndexIn(num_direct_parameters_));
1152+
value->BindToEnvironment(constant_null());
1153+
}
1154+
}
1155+
#endif // defined(TARGET_ARCH_DBC)
1156+
11091157
// 2. Process normal instructions.
11101158
for (ForwardInstructionIterator it(block_entry); !it.Done(); it.Advance()) {
11111159
Instruction* current = it.Current();

runtime/vm/compiler/backend/flow_graph_compiler_dbc.cc

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
#include "vm/instructions.h"
1818
#include "vm/object_store.h"
1919
#include "vm/parser.h"
20+
#include "vm/simulator.h"
2021
#include "vm/stack_frame.h"
2122
#include "vm/stub_code.h"
2223
#include "vm/symbols.h"
@@ -383,6 +384,12 @@ void ParallelMoveResolver::EmitMove(int index) {
383384
} else if (source.IsArgsDescRegister()) {
384385
ASSERT(destination.IsRegister());
385386
__ LoadArgDescriptorOpt(destination.reg());
387+
} else if (source.IsExceptionRegister()) {
388+
ASSERT(destination.IsRegister());
389+
__ MoveSpecial(destination.reg(), Simulator::kExceptionSpecialIndex);
390+
} else if (source.IsStackTraceRegister()) {
391+
ASSERT(destination.IsRegister());
392+
__ MoveSpecial(destination.reg(), Simulator::kStackTraceSpecialIndex);
386393
} else if (source.IsConstant() && destination.IsRegister()) {
387394
if (source.constant_instruction()->representation() == kUnboxedDouble) {
388395
const Register result = destination.reg();

runtime/vm/compiler/backend/il.h

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1641,16 +1641,18 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
16411641
const LocalVariable& stacktrace_var,
16421642
bool needs_stacktrace,
16431643
intptr_t deopt_id,
1644-
bool should_restore_closure_context = false)
1644+
const LocalVariable* raw_exception_var,
1645+
const LocalVariable* raw_stacktrace_var)
16451646
: BlockEntryInstr(block_id, try_index, deopt_id),
16461647
graph_entry_(graph_entry),
16471648
predecessor_(NULL),
16481649
catch_handler_types_(Array::ZoneHandle(handler_types.raw())),
16491650
catch_try_index_(catch_try_index),
16501651
exception_var_(exception_var),
16511652
stacktrace_var_(stacktrace_var),
1653+
raw_exception_var_(raw_exception_var),
1654+
raw_stacktrace_var_(raw_stacktrace_var),
16521655
needs_stacktrace_(needs_stacktrace),
1653-
should_restore_closure_context_(should_restore_closure_context),
16541656
handler_token_pos_(handler_token_pos),
16551657
is_generated_(is_generated) {}
16561658

@@ -1669,6 +1671,11 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
16691671
const LocalVariable& exception_var() const { return exception_var_; }
16701672
const LocalVariable& stacktrace_var() const { return stacktrace_var_; }
16711673

1674+
const LocalVariable* raw_exception_var() const { return raw_exception_var_; }
1675+
const LocalVariable* raw_stacktrace_var() const {
1676+
return raw_stacktrace_var_;
1677+
}
1678+
16721679
bool needs_stacktrace() const { return needs_stacktrace_; }
16731680

16741681
bool is_generated() const { return is_generated_; }
@@ -1692,21 +1699,16 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
16921699
predecessor_ = predecessor;
16931700
}
16941701

1695-
bool should_restore_closure_context() const {
1696-
ASSERT(exception_var_.is_captured() == stacktrace_var_.is_captured());
1697-
ASSERT(!exception_var_.is_captured() || should_restore_closure_context_);
1698-
return should_restore_closure_context_;
1699-
}
1700-
17011702
GraphEntryInstr* graph_entry_;
17021703
BlockEntryInstr* predecessor_;
17031704
const Array& catch_handler_types_;
17041705
const intptr_t catch_try_index_;
17051706
GrowableArray<Definition*> initial_definitions_;
17061707
const LocalVariable& exception_var_;
17071708
const LocalVariable& stacktrace_var_;
1709+
const LocalVariable* raw_exception_var_;
1710+
const LocalVariable* raw_stacktrace_var_;
17081711
const bool needs_stacktrace_;
1709-
const bool should_restore_closure_context_;
17101712
TokenPosition handler_token_pos_;
17111713
bool is_generated_;
17121714

@@ -2998,7 +3000,13 @@ class AssertBooleanInstr : public TemplateDefinition<1, Throws, Pure> {
29983000
// the type arguments of a generic function or an arguments descriptor.
29993001
class SpecialParameterInstr : public TemplateDefinition<0, NoThrow> {
30003002
public:
3001-
enum SpecialParameterKind { kContext, kTypeArgs, kArgDescriptor };
3003+
enum SpecialParameterKind {
3004+
kContext,
3005+
kTypeArgs,
3006+
kArgDescriptor,
3007+
kException,
3008+
kStackTrace
3009+
};
30023010

30033011
SpecialParameterInstr(SpecialParameterKind kind, intptr_t deopt_id)
30043012
: TemplateDefinition(deopt_id), kind_(kind) {}
@@ -3027,6 +3035,10 @@ class SpecialParameterInstr : public TemplateDefinition<0, NoThrow> {
30273035
return "kTypeArgs";
30283036
case kArgDescriptor:
30293037
return "kArgDescriptor";
3038+
case kException:
3039+
return "kException";
3040+
case kStackTrace:
3041+
return "kStackTrace";
30303042
}
30313043
UNREACHABLE();
30323044
return NULL;

runtime/vm/compiler/backend/il_arm.cc

Lines changed: 9 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -2888,38 +2888,15 @@ void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
28882888
ASSERT(fp_sp_dist <= 0);
28892889
__ AddImmediate(SP, FP, fp_sp_dist);
28902890

2891-
// Auxiliary variables introduced by the try catch can be captured if we are
2892-
// inside a function with yield/resume points. In this case we first need
2893-
// to restore the context to match the context at entry into the closure.
2894-
if (should_restore_closure_context()) {
2895-
const ParsedFunction& parsed_function = compiler->parsed_function();
2896-
ASSERT(parsed_function.function().IsClosureFunction());
2897-
LocalScope* scope = parsed_function.node_sequence()->scope();
2898-
2899-
LocalVariable* closure_parameter = scope->VariableAt(0);
2900-
ASSERT(!closure_parameter->is_captured());
2901-
__ LoadFromOffset(kWord, R6, FP, closure_parameter->index() * kWordSize);
2902-
__ LoadFieldFromOffset(kWord, R6, R6, Closure::context_offset());
2903-
2904-
const intptr_t context_index =
2905-
parsed_function.current_context_var()->index();
2906-
__ StoreToOffset(kWord, R6, FP, context_index * kWordSize);
2907-
}
2908-
2909-
// Initialize exception and stack trace variables.
2910-
if (exception_var().is_captured()) {
2911-
ASSERT(stacktrace_var().is_captured());
2912-
__ StoreIntoObjectOffset(R6,
2913-
Context::variable_offset(exception_var().index()),
2914-
kExceptionObjectReg);
2915-
__ StoreIntoObjectOffset(R6,
2916-
Context::variable_offset(stacktrace_var().index()),
2917-
kStackTraceObjectReg);
2918-
} else {
2919-
__ StoreToOffset(kWord, kExceptionObjectReg, FP,
2920-
exception_var().index() * kWordSize);
2921-
__ StoreToOffset(kWord, kStackTraceObjectReg, FP,
2922-
stacktrace_var().index() * kWordSize);
2891+
if (!compiler->is_optimizing()) {
2892+
if (raw_exception_var_ != nullptr) {
2893+
__ str(kExceptionObjectReg,
2894+
Address(FP, raw_exception_var_->index() * kWordSize));
2895+
}
2896+
if (raw_stacktrace_var_ != nullptr) {
2897+
__ str(kStackTraceObjectReg,
2898+
Address(FP, raw_stacktrace_var_->index() * kWordSize));
2899+
}
29232900
}
29242901
}
29252902

runtime/vm/compiler/backend/il_arm64.cc

Lines changed: 9 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -2637,40 +2637,15 @@ void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
26372637
ASSERT(fp_sp_dist <= 0);
26382638
__ AddImmediate(SP, FP, fp_sp_dist);
26392639

2640-
// Auxiliary variables introduced by the try catch can be captured if we are
2641-
// inside a function with yield/resume points. In this case we first need
2642-
// to restore the context to match the context at entry into the closure.
2643-
if (should_restore_closure_context()) {
2644-
const ParsedFunction& parsed_function = compiler->parsed_function();
2645-
ASSERT(parsed_function.function().IsClosureFunction());
2646-
LocalScope* scope = parsed_function.node_sequence()->scope();
2647-
2648-
LocalVariable* closure_parameter = scope->VariableAt(0);
2649-
ASSERT(!closure_parameter->is_captured());
2650-
__ LoadFromOffset(R28, FP, closure_parameter->index() * kWordSize);
2651-
__ LoadFieldFromOffset(R28, R28, Closure::context_offset());
2652-
2653-
const intptr_t context_index =
2654-
parsed_function.current_context_var()->index();
2655-
__ StoreToOffset(R28, FP, context_index * kWordSize);
2656-
}
2657-
2658-
// Initialize exception and stack trace variables.
2659-
if (exception_var().is_captured()) {
2660-
ASSERT(stacktrace_var().is_captured());
2661-
__ StoreIntoObjectOffset(R28,
2662-
Context::variable_offset(exception_var().index()),
2663-
kExceptionObjectReg);
2664-
__ StoreIntoObjectOffset(R28,
2665-
Context::variable_offset(stacktrace_var().index()),
2666-
kStackTraceObjectReg);
2667-
} else {
2668-
// Restore stack and initialize the two exception variables:
2669-
// exception and stack trace variables.
2670-
__ StoreToOffset(kExceptionObjectReg, FP,
2671-
exception_var().index() * kWordSize);
2672-
__ StoreToOffset(kStackTraceObjectReg, FP,
2673-
stacktrace_var().index() * kWordSize);
2640+
if (!compiler->is_optimizing()) {
2641+
if (raw_exception_var_ != nullptr) {
2642+
__ str(kExceptionObjectReg,
2643+
Address(FP, raw_exception_var_->index() * kWordSize));
2644+
}
2645+
if (raw_stacktrace_var_ != nullptr) {
2646+
__ str(kStackTraceObjectReg,
2647+
Address(FP, raw_stacktrace_var_->index() * kWordSize));
2648+
}
26742649
}
26752650
}
26762651

runtime/vm/compiler/backend/il_dbc.cc

Lines changed: 7 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -1184,69 +1184,18 @@ EMIT_NATIVE_CODE(CatchBlockEntry, 0) {
11841184
if (HasParallelMove()) {
11851185
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
11861186
}
1187+
__ SetFrame(compiler->StackSize());
11871188

1188-
Register context_reg = kNoRegister;
1189-
1190-
// Auxiliary variables introduced by the try catch can be captured if we are
1191-
// inside a function with yield/resume points. In this case we first need
1192-
// to restore the context to match the context at entry into the closure.
1193-
if (should_restore_closure_context()) {
1194-
const ParsedFunction& parsed_function = compiler->parsed_function();
1195-
1196-
ASSERT(parsed_function.function().IsClosureFunction());
1197-
LocalScope* scope = parsed_function.node_sequence()->scope();
1198-
1199-
LocalVariable* closure_parameter = scope->VariableAt(0);
1200-
ASSERT(!closure_parameter->is_captured());
1201-
1202-
const LocalVariable& current_context_var =
1203-
*parsed_function.current_context_var();
1204-
1205-
context_reg = compiler->is_optimizing()
1206-
? compiler->CatchEntryRegForVariable(current_context_var)
1207-
: LocalVarIndex(0, current_context_var.index());
1208-
1209-
Register closure_reg;
1210-
if (closure_parameter->index() > 0) {
1211-
__ Move(context_reg, LocalVarIndex(0, closure_parameter->index()));
1212-
closure_reg = context_reg;
1213-
} else {
1214-
closure_reg = LocalVarIndex(0, closure_parameter->index());
1215-
}
1216-
1217-
__ LoadField(context_reg, closure_reg,
1218-
Closure::context_offset() / kWordSize);
1219-
}
1220-
1221-
if (exception_var().is_captured()) {
1222-
ASSERT(stacktrace_var().is_captured());
1223-
ASSERT(context_reg != kNoRegister);
1224-
// This will be SP[1] register so we are free to use it as a temporary.
1225-
const Register temp = compiler->StackSize();
1226-
__ MoveSpecial(temp, Simulator::kExceptionSpecialIndex);
1227-
__ StoreField(context_reg,
1228-
Context::variable_offset(exception_var().index()) / kWordSize,
1229-
temp);
1230-
__ MoveSpecial(temp, Simulator::kStackTraceSpecialIndex);
1231-
__ StoreField(
1232-
context_reg,
1233-
Context::variable_offset(stacktrace_var().index()) / kWordSize, temp);
1234-
} else {
1235-
if (compiler->is_optimizing()) {
1236-
const intptr_t exception_reg =
1237-
compiler->CatchEntryRegForVariable(exception_var());
1238-
const intptr_t stacktrace_reg =
1239-
compiler->CatchEntryRegForVariable(stacktrace_var());
1240-
__ MoveSpecial(exception_reg, Simulator::kExceptionSpecialIndex);
1241-
__ MoveSpecial(stacktrace_reg, Simulator::kStackTraceSpecialIndex);
1242-
} else {
1243-
__ MoveSpecial(LocalVarIndex(0, exception_var().index()),
1189+
if (!compiler->is_optimizing()) {
1190+
if (raw_exception_var_ != nullptr) {
1191+
__ MoveSpecial(LocalVarIndex(0, raw_exception_var_->index()),
12441192
Simulator::kExceptionSpecialIndex);
1245-
__ MoveSpecial(LocalVarIndex(0, stacktrace_var().index()),
1193+
}
1194+
if (raw_stacktrace_var_ != nullptr) {
1195+
__ MoveSpecial(LocalVarIndex(0, raw_stacktrace_var_->index()),
12461196
Simulator::kStackTraceSpecialIndex);
12471197
}
12481198
}
1249-
__ SetFrame(compiler->StackSize());
12501199
}
12511200

12521201
EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) {

0 commit comments

Comments
 (0)