From 275bfa1b612da851b45870ce2077cf339ff6e9fe Mon Sep 17 00:00:00 2001 From: "wingo@igalia.com" Date: Mon, 5 May 2014 14:31:51 +0000 Subject: [PATCH] Relocate suspended generator activations when enabling debug mode R=yangguo@chromium.org BUG=v8:3289 LOG=N Review URL: https://codereview.chromium.org/264973014 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21145 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/debug.cc | 239 ++++++++++++++++++-------- src/debug.h | 4 + src/objects-inl.h | 5 + src/objects.h | 1 + test/mjsunit/harmony/generators-relocation.js | 61 +++++++ 5 files changed, 238 insertions(+), 72 deletions(-) create mode 100644 test/mjsunit/harmony/generators-relocation.js diff --git a/src/debug.cc b/src/debug.cc index 530cf30..3ecf8ba 100644 --- a/src/debug.cc +++ b/src/debug.cc @@ -1881,6 +1881,59 @@ static void CollectActiveFunctionsFromThread( } +// Figure out how many bytes of "pc_offset" correspond to actual code by +// subtracting off the bytes that correspond to constant/veneer pools. See +// Assembler::CheckConstPool() and Assembler::CheckVeneerPool(). Note that this +// is only useful for architectures using constant pools or veneer pools. +static int ComputeCodeOffsetFromPcOffset(Code *code, int pc_offset) { + ASSERT_EQ(code->kind(), Code::FUNCTION); + ASSERT(!code->has_debug_break_slots()); + ASSERT_LE(0, pc_offset); + ASSERT_LT(pc_offset, code->instruction_end() - code->instruction_start()); + + int mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | + RelocInfo::ModeMask(RelocInfo::VENEER_POOL); + byte *pc = code->instruction_start() + pc_offset; + int code_offset = pc_offset; + for (RelocIterator it(code, mask); !it.done(); it.next()) { + RelocInfo* info = it.rinfo(); + if (info->pc() >= pc) break; + ASSERT(RelocInfo::IsConstPool(info->rmode())); + code_offset -= static_cast(info->data()); + ASSERT_LE(0, code_offset); + } + + return code_offset; +} + + +// The inverse of ComputeCodeOffsetFromPcOffset. +static int ComputePcOffsetFromCodeOffset(Code *code, int code_offset) { + ASSERT_EQ(code->kind(), Code::FUNCTION); + + int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | + RelocInfo::ModeMask(RelocInfo::CONST_POOL) | + RelocInfo::ModeMask(RelocInfo::VENEER_POOL); + int reloc = 0; + for (RelocIterator it(code, mask); !it.done(); it.next()) { + RelocInfo* info = it.rinfo(); + if (info->pc() - code->instruction_start() - reloc >= code_offset) break; + if (RelocInfo::IsDebugBreakSlot(info->rmode())) { + reloc += Assembler::kDebugBreakSlotLength; + } else { + ASSERT(RelocInfo::IsConstPool(info->rmode())); + reloc += static_cast(info->data()); + } + } + + int pc_offset = code_offset + reloc; + + ASSERT_LT(code->instruction_start() + pc_offset, code->instruction_end()); + + return pc_offset; +} + + static void RedirectActivationsToRecompiledCodeOnThread( Isolate* isolate, ThreadLocalTop* top) { @@ -1902,51 +1955,13 @@ static void RedirectActivationsToRecompiledCodeOnThread( continue; } - // Iterate over the RelocInfo in the original code to compute the sum of the - // constant pools and veneer pools sizes. (See Assembler::CheckConstPool() - // and Assembler::CheckVeneerPool()) - // Note that this is only useful for architectures using constant pools or - // veneer pools. - int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | - RelocInfo::ModeMask(RelocInfo::VENEER_POOL); - int frame_pool_size = 0; - for (RelocIterator it(*frame_code, pool_mask); !it.done(); it.next()) { - RelocInfo* info = it.rinfo(); - if (info->pc() >= frame->pc()) break; - frame_pool_size += static_cast(info->data()); - } - intptr_t frame_offset = - frame->pc() - frame_code->instruction_start() - frame_pool_size; - - // Iterate over the RelocInfo for new code to find the number of bytes - // generated for debug slots and constant pools. - int debug_break_slot_bytes = 0; - int new_code_pool_size = 0; - int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | - RelocInfo::ModeMask(RelocInfo::CONST_POOL) | - RelocInfo::ModeMask(RelocInfo::VENEER_POOL); - for (RelocIterator it(*new_code, mask); !it.done(); it.next()) { - // Check if the pc in the new code with debug break - // slots is before this slot. - RelocInfo* info = it.rinfo(); - intptr_t new_offset = info->pc() - new_code->instruction_start() - - new_code_pool_size - debug_break_slot_bytes; - if (new_offset >= frame_offset) { - break; - } - - if (RelocInfo::IsDebugBreakSlot(info->rmode())) { - debug_break_slot_bytes += Assembler::kDebugBreakSlotLength; - } else { - ASSERT(RelocInfo::IsConstPool(info->rmode())); - // The size of the pools is encoded in the data. - new_code_pool_size += static_cast(info->data()); - } - } + int old_pc_offset = + static_cast(frame->pc() - frame_code->instruction_start()); + int code_offset = ComputeCodeOffsetFromPcOffset(*frame_code, old_pc_offset); + int new_pc_offset = ComputePcOffsetFromCodeOffset(*new_code, code_offset); // Compute the equivalent pc in the new code. - byte* new_pc = new_code->instruction_start() + frame_offset + - debug_break_slot_bytes + new_code_pool_size; + byte* new_pc = new_code->instruction_start() + new_pc_offset; if (FLAG_trace_deopt) { PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) " @@ -2002,6 +2017,55 @@ class ActiveFunctionsRedirector : public ThreadVisitor { }; +class ForceDebuggerActive { + public: + explicit ForceDebuggerActive(Isolate *isolate) { + isolate_ = isolate; + old_state_ = isolate->debugger()->force_debugger_active(); + isolate_->debugger()->set_force_debugger_active(true); + } + + ~ForceDebuggerActive() { + isolate_->debugger()->set_force_debugger_active(old_state_); + } + + private: + Isolate *isolate_; + bool old_state_; + + DISALLOW_COPY_AND_ASSIGN(ForceDebuggerActive); +}; + + +void Debug::MaybeRecompileFunctionForDebugging(Handle function) { + ASSERT_EQ(Code::FUNCTION, function->code()->kind()); + ASSERT_EQ(function->code(), function->shared()->code()); + + if (function->code()->has_debug_break_slots()) return; + + ForceDebuggerActive force_debugger_active(isolate_); + MaybeHandle code = Compiler::GetCodeForDebugging(function); + // Recompilation can fail. In that case leave the code as it was. + if (!code.is_null()) + function->ReplaceCode(*code.ToHandleChecked()); + ASSERT_EQ(function->code(), function->shared()->code()); +} + + +void Debug::RecompileAndRelocateSuspendedGenerators( + const List > &generators) { + for (int i = 0; i < generators.length(); i++) { + Handle fun(generators[i]->function()); + + MaybeRecompileFunctionForDebugging(fun); + + int code_offset = generators[i]->continuation(); + int pc_offset = ComputePcOffsetFromCodeOffset(fun->code(), code_offset); + generators[i]->set_continuation(pc_offset); + } +} + + void Debug::PrepareForBreakPoints() { // If preparing for the first break point make sure to deoptimize all // functions as debugging does not work with optimized code. @@ -2021,6 +2085,21 @@ void Debug::PrepareForBreakPoints() { // is used both in GC and non-GC code. List > active_functions(100); + // A list of all suspended generators. + List > suspended_generators; + + // A list of all generator functions. We need to recompile all functions, + // but we don't know until after visiting the whole heap which generator + // functions have suspended activations and which do not. As in the case of + // functions with activations on the stack, we need to be careful with + // generator functions with suspended activations because although they + // should be recompiled, recompilation can fail, and we need to avoid + // leaving the heap in an inconsistent state. + // + // We could perhaps avoid this list and instead re-use the GC metadata + // links. + List > generator_functions; + { // We are going to iterate heap to find all functions without // debug break slots. @@ -2058,6 +2137,11 @@ void Debug::PrepareForBreakPoints() { if (function->IsBuiltin()) continue; if (shared->code()->gc_metadata() == active_code_marker) continue; + if (shared->is_generator()) { + generator_functions.Add(Handle(function, isolate_)); + continue; + } + Code::Kind kind = function->code()->kind(); if (kind == Code::FUNCTION && !function->code()->has_debug_break_slots()) { @@ -2077,6 +2161,24 @@ void Debug::PrepareForBreakPoints() { function->shared()->set_code(*lazy_compile); } } + } else if (obj->IsJSGeneratorObject()) { + JSGeneratorObject* gen = JSGeneratorObject::cast(obj); + if (!gen->is_suspended()) continue; + + JSFunction* fun = gen->function(); + ASSERT_EQ(fun->code()->kind(), Code::FUNCTION); + if (fun->code()->has_debug_break_slots()) continue; + + int pc_offset = gen->continuation(); + ASSERT_LT(0, pc_offset); + + int code_offset = + ComputeCodeOffsetFromPcOffset(fun->code(), pc_offset); + + // This will be fixed after we recompile the functions. + gen->set_continuation(code_offset); + + suspended_generators.Add(Handle(gen, isolate_)); } } @@ -2087,42 +2189,35 @@ void Debug::PrepareForBreakPoints() { } } + // Recompile generator functions that have suspended activations, and + // relocate those activations. + RecompileAndRelocateSuspendedGenerators(suspended_generators); + + // Mark generator functions that didn't have suspended activations for lazy + // recompilation. Note that this set does not include any active functions. + for (int i = 0; i < generator_functions.length(); i++) { + Handle &function = generator_functions[i]; + if (function->code()->kind() != Code::FUNCTION) continue; + if (function->code()->has_debug_break_slots()) continue; + function->set_code(*lazy_compile); + function->shared()->set_code(*lazy_compile); + } + // Now recompile all functions with activation frames and and - // patch the return address to run in the new compiled code. + // patch the return address to run in the new compiled code. It could be + // that some active functions were recompiled already by the suspended + // generator recompilation pass above; a generator with suspended + // activations could also have active activations. That's fine. for (int i = 0; i < active_functions.length(); i++) { Handle function = active_functions[i]; Handle shared(function->shared()); - if (function->code()->kind() == Code::FUNCTION && - function->code()->has_debug_break_slots()) { - // Nothing to do. Function code already had debug break slots. - continue; - } - // If recompilation is not possible just skip it. - if (shared->is_toplevel() || - !shared->allows_lazy_compilation() || - shared->code()->kind() == Code::BUILTIN) { - continue; - } - - // Make sure that the shared full code is compiled with debug - // break slots. - if (!shared->code()->has_debug_break_slots()) { - // Try to compile the full code with debug break slots. If it - // fails just keep the current code. - bool prev_force_debugger_active = - isolate_->debugger()->force_debugger_active(); - isolate_->debugger()->set_force_debugger_active(true); - Handle code = Compiler::GetCodeForDebugging( - function).ToHandleChecked(); - function->ReplaceCode(*code); - isolate_->debugger()->set_force_debugger_active( - prev_force_debugger_active); - } + if (shared->is_toplevel()) continue; + if (!shared->allows_lazy_compilation()) continue; + if (shared->code()->kind() == Code::BUILTIN) continue; - // Keep function code in sync with shared function info. - function->set_code(shared->code()); + MaybeRecompileFunctionForDebugging(function); } RedirectActivationsToRecompiledCodeOnThread(isolate_, diff --git a/src/debug.h b/src/debug.h index 2f5c9bf..457a5fa 100644 --- a/src/debug.h +++ b/src/debug.h @@ -511,6 +511,10 @@ class Debug { Handle CheckBreakPoints(Handle break_point); bool CheckBreakPoint(Handle break_point_object); + void MaybeRecompileFunctionForDebugging(Handle function); + void RecompileAndRelocateSuspendedGenerators( + const List > &suspended_generators); + // Global handle to debug context where all the debugger JavaScript code is // loaded. Handle debug_context_; diff --git a/src/objects-inl.h b/src/objects-inl.h index 33b1bc9..e4ccd75 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -5711,6 +5711,11 @@ SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset) ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset) SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset) +bool JSGeneratorObject::is_suspended() { + ASSERT_LT(kGeneratorExecuting, kGeneratorClosed); + ASSERT_EQ(kGeneratorClosed, 0); + return continuation() > 0; +} JSGeneratorObject* JSGeneratorObject::cast(Object* obj) { ASSERT(obj->IsJSGeneratorObject()); diff --git a/src/objects.h b/src/objects.h index 4f7702d..d607b04 100644 --- a/src/objects.h +++ b/src/objects.h @@ -7461,6 +7461,7 @@ class JSGeneratorObject: public JSObject { // cannot be resumed. inline int continuation(); inline void set_continuation(int continuation); + inline bool is_suspended(); // [operand_stack]: Saved operand stack. DECL_ACCESSORS(operand_stack, FixedArray) diff --git a/test/mjsunit/harmony/generators-relocation.js b/test/mjsunit/harmony/generators-relocation.js new file mode 100644 index 0000000..4074235 --- /dev/null +++ b/test/mjsunit/harmony/generators-relocation.js @@ -0,0 +1,61 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-debug-as debug --harmony-generators + +var Debug = debug.Debug; + +function assertIteratorResult(value, done, result) { + assertEquals({value: value, done: done}, result); +} + +function RunTest(formals_and_body, args, value1, value2) { + // A null listener. It isn't important what the listener does. + function listener(event, exec_state, event_data, data) { + } + + // Create the generator function outside a debugging context. It will probably + // be lazily compiled. + var gen = (function*(){}).constructor.apply(null, formals_and_body); + + // Instantiate the generator object. + var obj = gen.apply(null, args); + + // Advance to the first yield. + assertIteratorResult(value1, false, obj.next()); + + // Add a breakpoint on line 3 (the second yield). + var bp = Debug.setBreakPoint(gen, 3); + + // Enable the debugger, which should force recompilation of the generator + // function and relocation of the suspended generator activation. + Debug.setListener(listener); + + // Check that the generator resumes and suspends properly. + assertIteratorResult(value2, false, obj.next()); + + // Disable debugger -- should not force recompilation. + Debug.clearBreakPoint(bp); + Debug.setListener(null); + + // Run to completion. + assertIteratorResult(undefined, true, obj.next()); +} + +function prog(a, b, c) { + return a + ';\n' + 'yield ' + b + ';\n' + 'yield ' + c; +} + +// Simple empty local scope. +RunTest([prog('', '1', '2')], [], 1, 2); + +RunTest([prog('for (;;) break', '1', '2')], [], 1, 2); + +RunTest([prog('while (0) foo()', '1', '2')], [], 1, 2); + +RunTest(['a', prog('var x = 3', 'a', 'x')], [1], 1, 3); + +RunTest(['a', prog('', '1', '2')], [42], 1, 2); + +RunTest(['a', prog('for (;;) break', '1', '2')], [42], 1, 2); -- 2.7.4