Skip to content

Commit

Permalink
Version 3.5.0-61.0.dev
Browse files Browse the repository at this point in the history
Merge 7eed880 into dev
  • Loading branch information
Dart CI committed Apr 15, 2024
2 parents 3f161bb + 7eed880 commit 5f03197
Show file tree
Hide file tree
Showing 41 changed files with 3,077 additions and 3,571 deletions.
6,274 changes: 2,847 additions & 3,427 deletions runtime/bin/ffi_test/ffi_test_functions_generated.cc

Large diffs are not rendered by default.

11 changes: 0 additions & 11 deletions runtime/vm/compiler/backend/il_arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3292,17 +3292,6 @@ LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(this);
if (!FLAG_precompiled_mode) {
// On lazy deoptimization we patch the optimized code here to enter the
// deoptimization stub.
const intptr_t deopt_id = DeoptId::ToDeoptAfter(GetDeoptId());
if (compiler->is_optimizing()) {
compiler->AddDeoptIndexAtCall(deopt_id, env());
} else {
compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
InstructionSource());
}
}
if (HasParallelMove()) {
parallel_move()->EmitNativeCode(compiler);
}
Expand Down
11 changes: 0 additions & 11 deletions runtime/vm/compiler/backend/il_arm64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2854,17 +2854,6 @@ LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(this);
if (!FLAG_precompiled_mode) {
// On lazy deoptimization we patch the optimized code here to enter the
// deoptimization stub.
const intptr_t deopt_id = DeoptId::ToDeoptAfter(GetDeoptId());
if (compiler->is_optimizing()) {
compiler->AddDeoptIndexAtCall(deopt_id, env());
} else {
compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
InstructionSource());
}
}
if (HasParallelMove()) {
parallel_move()->EmitNativeCode(compiler);
}
Expand Down
11 changes: 0 additions & 11 deletions runtime/vm/compiler/backend/il_ia32.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2492,17 +2492,6 @@ LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(this);
if (!FLAG_precompiled_mode) {
// On lazy deoptimization we patch the optimized code here to enter the
// deoptimization stub.
const intptr_t deopt_id = DeoptId::ToDeoptAfter(GetDeoptId());
if (compiler->is_optimizing()) {
compiler->AddDeoptIndexAtCall(deopt_id, env());
} else {
compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
InstructionSource());
}
}
if (HasParallelMove()) {
parallel_move()->EmitNativeCode(compiler);
}
Expand Down
11 changes: 0 additions & 11 deletions runtime/vm/compiler/backend/il_riscv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3100,17 +3100,6 @@ LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(this);
if (!FLAG_precompiled_mode) {
// On lazy deoptimization we patch the optimized code here to enter the
// deoptimization stub.
const intptr_t deopt_id = DeoptId::ToDeoptAfter(GetDeoptId());
if (compiler->is_optimizing()) {
compiler->AddDeoptIndexAtCall(deopt_id, env());
} else {
compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
InstructionSource());
}
}
if (HasParallelMove()) {
parallel_move()->EmitNativeCode(compiler);
}
Expand Down
11 changes: 0 additions & 11 deletions runtime/vm/compiler/backend/il_x64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2892,17 +2892,6 @@ LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(this);
if (!FLAG_precompiled_mode) {
// On lazy deoptimization we patch the optimized code here to enter the
// deoptimization stub.
const intptr_t deopt_id = DeoptId::ToDeoptAfter(GetDeoptId());
if (compiler->is_optimizing()) {
compiler->AddDeoptIndexAtCall(deopt_id, env());
} else {
compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id,
InstructionSource());
}
}
if (HasParallelMove()) {
parallel_move()->EmitNativeCode(compiler);
}
Expand Down
6 changes: 4 additions & 2 deletions runtime/vm/compiler/stub_code_compiler.cc
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,8 @@ void StubCodeCompiler::GenerateReThrowStub() {
__ PushObject(NullObject()); // Make room for (unused) result.
__ PushRegistersInOrder(
{ReThrowABI::kExceptionReg, ReThrowABI::kStackTraceReg});
__ CallRuntime(kReThrowRuntimeEntry, /*argument_count=*/2);
__ PushImmediate(Smi::RawValue(0)); // Do not bypass debugger.
__ CallRuntime(kReThrowRuntimeEntry, /*argument_count=*/3);
__ Breakpoint();
}

Expand Down Expand Up @@ -2490,7 +2491,8 @@ void StubCodeCompiler::GenerateAsyncExceptionHandlerStub() {
__ EnterStubFrame();
__ PushObject(NullObject()); // Make room for (unused) result.
__ PushRegistersInOrder({kExceptionObjectReg, kStackTraceObjectReg});
__ CallRuntime(kReThrowRuntimeEntry, /*argument_count=*/2);
__ PushImmediate(Smi::RawValue(0)); // Do not bypass debugger.
__ CallRuntime(kReThrowRuntimeEntry, /*argument_count=*/3);
__ Breakpoint();
}

Expand Down
17 changes: 17 additions & 0 deletions runtime/vm/compiler/stub_code_compiler_arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -918,6 +918,8 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
if (kind == kLazyDeoptFromReturn) {
__ Push(R1); // Preserve result, it will be GC-d here.
} else if (kind == kLazyDeoptFromThrow) {
// Preserve CODE_REG for one more runtime call.
__ Push(CODE_REG);
__ Push(R1); // Preserve exception, it will be GC-d here.
__ Push(R2); // Preserve stacktrace, it will be GC-d here.
}
Expand All @@ -931,11 +933,26 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
} else if (kind == kLazyDeoptFromThrow) {
__ Pop(R1); // Restore stacktrace.
__ Pop(R0); // Restore exception.
__ Pop(CODE_REG);
}
__ LeaveStubFrame();
// Remove materialization arguments.
__ add(SP, SP, Operand(R2, ASR, kSmiTagSize));
// The caller is responsible for emitting the return instruction.

if (kind == kLazyDeoptFromThrow) {
// Unoptimized frame is now ready to accept the exception. Rethrow it to
// find the right handler. Ask rethrow machinery to bypass debugger it
// was already notified about this exception.
__ EnterStubFrame();
__ PushImmediate(
target::ToRawSmi(0)); // Space for the return value (unused).
__ Push(R0); // Exception
__ Push(R1); // Stacktrace
__ PushImmediate(target::ToRawSmi(1)); // Bypass debugger
__ CallRuntime(kReThrowRuntimeEntry, 3);
__ LeaveStubFrame();
}
}

// R0: result, must be preserved
Expand Down
16 changes: 16 additions & 0 deletions runtime/vm/compiler/stub_code_compiler_arm64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1168,6 +1168,8 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
if (kind == kLazyDeoptFromReturn) {
__ Push(R1); // Preserve result, it will be GC-d here.
} else if (kind == kLazyDeoptFromThrow) {
// Preserve CODE_REG for one more runtime call.
__ Push(CODE_REG);
__ Push(R1); // Preserve exception, it will be GC-d here.
__ Push(R2); // Preserve stacktrace, it will be GC-d here.
}
Expand All @@ -1183,11 +1185,25 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
} else if (kind == kLazyDeoptFromThrow) {
__ Pop(R1); // Restore stacktrace.
__ Pop(R0); // Restore exception.
__ Pop(CODE_REG);
}
__ LeaveStubFrame();
// Remove materialization arguments.
__ add(SP, SP, Operand(R2));
// The caller is responsible for emitting the return instruction.

if (kind == kLazyDeoptFromThrow) {
// Unoptimized frame is now ready to accept the exception. Rethrow it to
// find the right handler. Ask rethrow machinery to bypass debugger it
// was already notified about this exception.
__ EnterStubFrame();
__ Push(ZR); // Space for the return value (unused).
__ Push(R0); // Exception
__ Push(R1); // Stacktrace
__ PushImmediate(target::ToRawSmi(1)); // Bypass debugger
__ CallRuntime(kReThrowRuntimeEntry, 3);
__ LeaveStubFrame();
}
}

// R0: result, must be preserved
Expand Down
20 changes: 18 additions & 2 deletions runtime/vm/compiler/stub_code_compiler_ia32.cc
Original file line number Diff line number Diff line change
Expand Up @@ -777,6 +777,8 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
if (kind == kLazyDeoptFromReturn) {
__ pushl(EBX); // Preserve result, it will be GC-d here.
} else if (kind == kLazyDeoptFromThrow) {
// Preserve CODE_REG for one more runtime call.
__ pushl(CODE_REG);
__ pushl(EBX); // Preserve exception, it will be GC-d here.
__ pushl(ECX); // Preserve stacktrace, it will be GC-d here.
}
Expand All @@ -789,15 +791,29 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
if (kind == kLazyDeoptFromReturn) {
__ popl(EAX); // Restore result.
} else if (kind == kLazyDeoptFromThrow) {
__ popl(EDX); // Restore exception.
__ popl(EAX); // Restore stacktrace.
__ popl(EDX); // Restore stacktrace.
__ popl(EAX); // Restore exception.
__ popl(CODE_REG);
}
__ LeaveStubFrame();

__ popl(ECX); // Pop return address.
__ addl(ESP, EBX); // Remove materialization arguments.
__ pushl(ECX); // Push return address.
// The caller is responsible for emitting the return instruction.

if (kind == kLazyDeoptFromThrow) {
// Unoptimized frame is now ready to accept the exception. Rethrow it to
// find the right handler. Ask rethrow machinery to bypass debugger it
// was already notified about this exception.
__ EnterStubFrame();
__ pushl(Immediate(target::ToRawSmi(0))); // Space for the result.
__ pushl(EAX); // Exception
__ pushl(EDX); // Stacktrace
__ pushl(Immediate(target::ToRawSmi(1))); // Bypass debugger.
__ CallRuntime(kReThrowRuntimeEntry, 3);
__ LeaveStubFrame();
}
}

// EAX: result, must be preserved
Expand Down
16 changes: 16 additions & 0 deletions runtime/vm/compiler/stub_code_compiler_riscv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1012,6 +1012,8 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
if (kind == kLazyDeoptFromReturn) {
__ PushRegister(T1); // Preserve result, it will be GC-d here.
} else if (kind == kLazyDeoptFromThrow) {
// Preserve CODE_REG for one more runtime call.
__ PushRegister(CODE_REG);
// Preserve exception, it will be GC-d here.
// Preserve stacktrace, it will be GC-d here.
__ PushRegistersInOrder({T1, T2});
Expand All @@ -1028,11 +1030,25 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
} else if (kind == kLazyDeoptFromThrow) {
__ PopRegister(A1); // Restore stacktrace.
__ PopRegister(A0); // Restore exception.
__ PopRegister(CODE_REG);
}
__ LeaveStubFrame();
// Remove materialization arguments.
__ add(SP, SP, T2);
// The caller is responsible for emitting the return instruction.

if (kind == kLazyDeoptFromThrow) {
// Unoptimized frame is now ready to accept the exception. Rethrow it to
// find the right handler. Ask rethrow machinery to bypass debugger it
// was already notified about this exception.
__ EnterStubFrame();
__ PushRegister(ZR); // Space for the result value (unused)
__ PushRegister(A0); // Exception
__ PushRegister(A1); // Stacktrace
__ PushImmediate(target::ToRawSmi(1)); // Bypass debugger.
__ CallRuntime(kReThrowRuntimeEntry, 3);
__ LeaveStubFrame();
}
}

// A0: result, must be preserved
Expand Down
15 changes: 15 additions & 0 deletions runtime/vm/compiler/stub_code_compiler_x64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1096,6 +1096,8 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
if (kind == kLazyDeoptFromReturn) {
__ pushq(RBX); // Preserve result, it will be GC-d here.
} else if (kind == kLazyDeoptFromThrow) {
// Preserve CODE_REG for one more runtime call.
__ pushq(CODE_REG);
__ pushq(RBX); // Preserve exception.
__ pushq(RDX); // Preserve stacktrace.
}
Expand All @@ -1110,13 +1112,26 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
} else if (kind == kLazyDeoptFromThrow) {
__ popq(RDX); // Restore stacktrace.
__ popq(RAX); // Restore exception.
__ popq(CODE_REG);
}
__ LeaveStubFrame();

__ popq(RCX); // Pop return address.
__ addq(RSP, RBX); // Remove materialization arguments.
__ pushq(RCX); // Push return address.
// The caller is responsible for emitting the return instruction.

if (kind == kLazyDeoptFromThrow) {
// Unoptimized frame is now ready to accept the exception. Rethrow it to
// find the right handler.
__ EnterStubFrame();
__ pushq(Immediate(target::ToRawSmi(0))); // Space for the result.
__ pushq(RAX); // Exception
__ pushq(RDX); // Stacktrace
__ pushq(Immediate(target::ToRawSmi(1))); // Bypass debugger.
__ CallRuntime(kReThrowRuntimeEntry, 3);
__ LeaveStubFrame();
}
}

// RAX: result, must be preserved
Expand Down
29 changes: 18 additions & 11 deletions runtime/vm/exceptions.cc
Original file line number Diff line number Diff line change
Expand Up @@ -730,7 +730,8 @@ DART_NORETURN
static void ThrowExceptionHelper(Thread* thread,
const Instance& incoming_exception,
const Instance& existing_stacktrace,
const bool is_rethrow) {
const bool is_rethrow,
const bool bypass_debugger) {
// SuspendLongJumpScope during Dart entry ensures that if a longjmp base is
// available, it is the innermost error handler. If one is available, so
// should jump there instead.
Expand All @@ -739,12 +740,14 @@ static void ThrowExceptionHelper(Thread* thread,
auto object_store = thread->isolate_group()->object_store();
Isolate* isolate = thread->isolate();
#if !defined(PRODUCT)
// Do not notify debugger on stack overflow and out of memory exceptions.
// The VM would crash when the debugger calls back into the VM to
// get values of variables.
if (incoming_exception.ptr() != object_store->out_of_memory() &&
incoming_exception.ptr() != object_store->stack_overflow()) {
isolate->debugger()->PauseException(incoming_exception);
if (!bypass_debugger) {
// Do not notify debugger on stack overflow and out of memory exceptions.
// The VM would crash when the debugger calls back into the VM to
// get values of variables.
if (incoming_exception.ptr() != object_store->out_of_memory() &&
incoming_exception.ptr() != object_store->stack_overflow()) {
isolate->debugger()->PauseException(incoming_exception);
}
}
#endif
bool use_preallocated_stacktrace = false;
Expand Down Expand Up @@ -975,21 +978,25 @@ void Exceptions::CreateAndThrowTypeError(TokenPosition location,
void Exceptions::Throw(Thread* thread, const Instance& exception) {
// Null object is a valid exception object.
ThrowExceptionHelper(thread, exception, StackTrace::Handle(thread->zone()),
false);
/*is_rethrow=*/false,
/*bypass_debugger=*/false);
}

void Exceptions::ReThrow(Thread* thread,
const Instance& exception,
const Instance& stacktrace) {
const Instance& stacktrace,
bool bypass_debugger /* = false */) {
// Null object is a valid exception object.
ThrowExceptionHelper(thread, exception, stacktrace, true);
ThrowExceptionHelper(thread, exception, stacktrace, /*is_rethrow=*/true,
bypass_debugger);
}

void Exceptions::ThrowWithStackTrace(Thread* thread,
const Instance& exception,
const Instance& stacktrace) {
// Null object is a valid exception object.
ThrowExceptionHelper(thread, exception, stacktrace, false);
ThrowExceptionHelper(thread, exception, stacktrace, /*is_rethrow=*/false,
/*bypass_debugger=*/false);
}

void Exceptions::PropagateError(const Error& error) {
Expand Down
3 changes: 2 additions & 1 deletion runtime/vm/exceptions.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ class Exceptions : AllStatic {
DART_NORETURN static void Throw(Thread* thread, const Instance& exception);
DART_NORETURN static void ReThrow(Thread* thread,
const Instance& exception,
const Instance& stacktrace);
const Instance& stacktrace,
bool bypass_debugger = false);
DART_NORETURN static void ThrowWithStackTrace(Thread* thread,
const Instance& exception,
const Instance& stacktrace);
Expand Down
Loading

0 comments on commit 5f03197

Please sign in to comment.