1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/accessors.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/disasm.h"
11 #include "src/full-codegen.h"
12 #include "src/global-handles.h"
13 #include "src/macro-assembler.h"
14 #include "src/prettyprinter.h"
20 static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
21 return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
22 base::OS::CommitPageSize(),
23 #if defined(__native_client__)
24 // The Native Client port of V8 uses an interpreter,
25 // so code pages don't need PROT_EXEC.
34 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
35 : allocator_(allocator),
36 deoptimized_frame_info_(NULL),
38 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
39 deopt_entry_code_entries_[i] = -1;
40 deopt_entry_code_[i] = AllocateCodeChunk(allocator);
45 DeoptimizerData::~DeoptimizerData() {
46 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
47 allocator_->Free(deopt_entry_code_[i]);
48 deopt_entry_code_[i] = NULL;
53 void DeoptimizerData::Iterate(ObjectVisitor* v) {
54 if (deoptimized_frame_info_ != NULL) {
55 deoptimized_frame_info_->Iterate(v);
60 Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
61 if (function_->IsHeapObject()) {
62 // Search all deoptimizing code in the native context of the function.
63 Context* native_context = function_->context()->native_context();
64 Object* element = native_context->DeoptimizedCodeListHead();
65 while (!element->IsUndefined()) {
66 Code* code = Code::cast(element);
67 CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
68 if (code->contains(addr)) return code;
69 element = code->next_code_link();
76 // We rely on this function not causing a GC. It is called from generated code
77 // without having a real stack frame in place.
78 Deoptimizer* Deoptimizer::New(JSFunction* function,
84 Deoptimizer* deoptimizer = new Deoptimizer(isolate,
91 CHECK(isolate->deoptimizer_data()->current_ == NULL);
92 isolate->deoptimizer_data()->current_ = deoptimizer;
97 // No larger than 2K on all platforms
98 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
101 size_t Deoptimizer::GetMaxDeoptTableSize() {
103 Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
104 int commit_page_size = static_cast<int>(base::OS::CommitPageSize());
105 int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
106 commit_page_size) + 1;
107 return static_cast<size_t>(commit_page_size * page_count);
111 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
112 Deoptimizer* result = isolate->deoptimizer_data()->current_;
113 CHECK_NE(result, NULL);
114 result->DeleteFrameDescriptions();
115 isolate->deoptimizer_data()->current_ = NULL;
120 int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) {
121 if (jsframe_index == 0) return 0;
124 while (jsframe_index >= 0) {
125 FrameDescription* frame = output_[frame_index];
126 if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
132 return frame_index - 1;
136 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
137 JavaScriptFrame* frame,
140 CHECK(frame->is_optimized());
141 CHECK(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
143 // Get the function and code from the frame.
144 JSFunction* function = frame->function();
145 Code* code = frame->LookupCode();
147 // Locate the deoptimization point in the code. As we are at a call the
148 // return address must be at a place in the code with deoptimization support.
149 SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
150 int deoptimization_index = safepoint_entry.deoptimization_index();
151 CHECK_NE(deoptimization_index, Safepoint::kNoDeoptimizationIndex);
153 // Always use the actual stack slots when calculating the fp to sp
154 // delta adding two for the function and context.
155 unsigned stack_slots = code->stack_slots();
156 unsigned fp_to_sp_delta = (stack_slots * kPointerSize) +
157 StandardFrameConstants::kFixedFrameSizeFromFp;
159 Deoptimizer* deoptimizer = new Deoptimizer(isolate,
161 Deoptimizer::DEBUGGER,
162 deoptimization_index,
166 Address tos = frame->fp() - fp_to_sp_delta;
167 deoptimizer->FillInputFrame(tos, frame);
169 // Calculate the output frames.
170 Deoptimizer::ComputeOutputFrames(deoptimizer);
172 // Create the GC safe output frame information and register it for GC
174 CHECK_LT(jsframe_index, deoptimizer->jsframe_count());
176 // Convert JS frame index into frame index.
177 int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
179 bool has_arguments_adaptor =
181 deoptimizer->output_[frame_index - 1]->GetFrameType() ==
182 StackFrame::ARGUMENTS_ADAPTOR;
184 int construct_offset = has_arguments_adaptor ? 2 : 1;
185 bool has_construct_stub =
186 frame_index >= construct_offset &&
187 deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
188 StackFrame::CONSTRUCT;
190 DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
192 has_arguments_adaptor,
194 isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
196 // Get the "simulated" top and size for the requested frame.
197 FrameDescription* parameters_frame =
198 deoptimizer->output_[
199 has_arguments_adaptor ? (frame_index - 1) : frame_index];
201 uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize;
202 Address parameters_top = reinterpret_cast<Address>(
203 parameters_frame->GetTop() + (parameters_frame->GetFrameSize() -
206 uint32_t expressions_size = info->expression_count() * kPointerSize;
207 Address expressions_top = reinterpret_cast<Address>(
208 deoptimizer->output_[frame_index]->GetTop());
210 // Done with the GC-unsafe frame descriptions. This re-enables allocation.
211 deoptimizer->DeleteFrameDescriptions();
213 // Allocate a heap number for the doubles belonging to this frame.
214 deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame(
215 parameters_top, parameters_size, expressions_top, expressions_size, info);
217 // Finished using the deoptimizer instance.
224 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
226 CHECK_EQ(isolate->deoptimizer_data()->deoptimized_frame_info_, info);
228 isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
232 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
235 TableEntryGenerator generator(masm, type, count);
236 generator.Generate();
240 void Deoptimizer::VisitAllOptimizedFunctionsForContext(
241 Context* context, OptimizedFunctionVisitor* visitor) {
242 DisallowHeapAllocation no_allocation;
244 CHECK(context->IsNativeContext());
246 visitor->EnterContext(context);
248 // Visit the list of optimized functions, removing elements that
249 // no longer refer to optimized code.
250 JSFunction* prev = NULL;
251 Object* element = context->OptimizedFunctionsListHead();
252 while (!element->IsUndefined()) {
253 JSFunction* function = JSFunction::cast(element);
254 Object* next = function->next_function_link();
255 if (function->code()->kind() != Code::OPTIMIZED_FUNCTION ||
256 (visitor->VisitFunction(function),
257 function->code()->kind() != Code::OPTIMIZED_FUNCTION)) {
258 // The function no longer refers to optimized code, or the visitor
259 // changed the code to which it refers to no longer be optimized code.
260 // Remove the function from this list.
262 prev->set_next_function_link(next);
264 context->SetOptimizedFunctionsListHead(next);
266 // The visitor should not alter the link directly.
267 CHECK_EQ(function->next_function_link(), next);
268 // Set the next function link to undefined to indicate it is no longer
269 // in the optimized functions list.
270 function->set_next_function_link(context->GetHeap()->undefined_value());
272 // The visitor should not alter the link directly.
273 CHECK_EQ(function->next_function_link(), next);
274 // preserve this element.
280 visitor->LeaveContext(context);
284 void Deoptimizer::VisitAllOptimizedFunctions(
286 OptimizedFunctionVisitor* visitor) {
287 DisallowHeapAllocation no_allocation;
289 // Run through the list of all native contexts.
290 Object* context = isolate->heap()->native_contexts_list();
291 while (!context->IsUndefined()) {
292 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
293 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
298 // Unlink functions referring to code marked for deoptimization, then move
299 // marked code from the optimized code list to the deoptimized code list,
300 // and patch code for lazy deopt.
301 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
302 DisallowHeapAllocation no_allocation;
304 // A "closure" that unlinks optimized code that is going to be
305 // deoptimized from the functions that refer to it.
306 class SelectedCodeUnlinker: public OptimizedFunctionVisitor {
308 virtual void EnterContext(Context* context) { } // Don't care.
309 virtual void LeaveContext(Context* context) { } // Don't care.
310 virtual void VisitFunction(JSFunction* function) {
311 Code* code = function->code();
312 if (!code->marked_for_deoptimization()) return;
314 // Unlink this function and evict from optimized code map.
315 SharedFunctionInfo* shared = function->shared();
316 function->set_code(shared->code());
318 if (FLAG_trace_deopt) {
319 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
320 PrintF(scope.file(), "[deoptimizer unlinked: ");
321 function->PrintName(scope.file());
323 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
328 // Unlink all functions that refer to marked code.
329 SelectedCodeUnlinker unlinker;
330 VisitAllOptimizedFunctionsForContext(context, &unlinker);
332 Isolate* isolate = context->GetHeap()->isolate();
334 Code* topmost_optimized_code = NULL;
335 bool safe_to_deopt_topmost_optimized_code = false;
336 // Make sure all activations of optimized code can deopt at their current PC.
337 // The topmost optimized code has special handling because it cannot be
338 // deoptimized due to weak object dependency.
339 for (StackFrameIterator it(isolate, isolate->thread_local_top());
340 !it.done(); it.Advance()) {
341 StackFrame::Type type = it.frame()->type();
342 if (type == StackFrame::OPTIMIZED) {
343 Code* code = it.frame()->LookupCode();
344 if (FLAG_trace_deopt) {
345 JSFunction* function =
346 static_cast<OptimizedFrame*>(it.frame())->function();
347 CodeTracer::Scope scope(isolate->GetCodeTracer());
348 PrintF(scope.file(), "[deoptimizer found activation of function: ");
349 function->PrintName(scope.file());
351 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
353 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
354 int deopt_index = safepoint.deoptimization_index();
355 // Turbofan deopt is checked when we are patching addresses on stack.
356 bool turbofanned = code->is_turbofanned();
358 deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned;
359 CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned);
360 if (topmost_optimized_code == NULL) {
361 topmost_optimized_code = code;
362 safe_to_deopt_topmost_optimized_code = safe_to_deopt;
368 // Move marked code from the optimized code list to the deoptimized
369 // code list, collecting them into a ZoneList.
371 ZoneList<Code*> codes(10, &zone);
373 // Walk over all optimized code objects in this native context.
375 Object* element = context->OptimizedCodeListHead();
376 while (!element->IsUndefined()) {
377 Code* code = Code::cast(element);
378 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
379 Object* next = code->next_code_link();
381 if (code->marked_for_deoptimization()) {
382 // Put the code into the list for later patching.
383 codes.Add(code, &zone);
386 // Skip this code in the optimized code list.
387 prev->set_next_code_link(next);
389 // There was no previous node, the next node is the new head.
390 context->SetOptimizedCodeListHead(next);
393 // Move the code to the _deoptimized_ code list.
394 code->set_next_code_link(context->DeoptimizedCodeListHead());
395 context->SetDeoptimizedCodeListHead(code);
397 // Not marked; preserve this element.
403 if (FLAG_turbo_deoptimization) {
404 PatchStackForMarkedCode(isolate);
407 // TODO(titzer): we need a handle scope only because of the macro assembler,
408 // which is only used in EnsureCodeForDeoptimizationEntry.
409 HandleScope scope(isolate);
411 // Now patch all the codes for deoptimization.
412 for (int i = 0; i < codes.length(); i++) {
414 if (codes[i] == topmost_optimized_code) {
415 DCHECK(safe_to_deopt_topmost_optimized_code);
418 // It is finally time to die, code object.
420 // Remove the code from optimized code map.
421 DeoptimizationInputData* deopt_data =
422 DeoptimizationInputData::cast(codes[i]->deoptimization_data());
423 SharedFunctionInfo* shared =
424 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo());
425 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code");
427 // Do platform-specific patching to force any activations to lazy deopt.
429 // We skip patching Turbofan code - we patch return addresses on stack.
430 // TODO(jarin) We should still zap the code object (but we have to
431 // be careful not to zap the deoptimization block).
432 if (!codes[i]->is_turbofanned()) {
433 PatchCodeForDeoptimization(isolate, codes[i]);
435 // We might be in the middle of incremental marking with compaction.
436 // Tell collector to treat this code object in a special way and
437 // ignore all slots that might have been recorded on it.
438 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]);
444 static int FindPatchAddressForReturnAddress(Code* code, int pc) {
445 DeoptimizationInputData* input_data =
446 DeoptimizationInputData::cast(code->deoptimization_data());
447 int patch_count = input_data->ReturnAddressPatchCount();
448 for (int i = 0; i < patch_count; i++) {
449 int return_pc = input_data->ReturnAddressPc(i)->value();
450 int patch_pc = input_data->PatchedAddressPc(i)->value();
451 // If the supplied pc matches the return pc or if the address
452 // has been already patched, return the patch pc.
453 if (pc == return_pc || pc == patch_pc) {
461 // For all marked Turbofanned code on stack, change the return address to go
462 // to the deoptimization block.
463 void Deoptimizer::PatchStackForMarkedCode(Isolate* isolate) {
464 // TODO(jarin) We should tolerate missing patch entry for the topmost frame.
465 for (StackFrameIterator it(isolate, isolate->thread_local_top()); !it.done();
467 StackFrame::Type type = it.frame()->type();
468 if (type == StackFrame::OPTIMIZED) {
469 Code* code = it.frame()->LookupCode();
470 if (code->is_turbofanned() && code->marked_for_deoptimization()) {
471 JSFunction* function =
472 static_cast<OptimizedFrame*>(it.frame())->function();
473 Address* pc_address = it.frame()->pc_address();
475 static_cast<int>(*pc_address - code->instruction_start());
476 int new_pc_offset = FindPatchAddressForReturnAddress(code, pc_offset);
478 if (FLAG_trace_deopt) {
479 CodeTracer::Scope scope(isolate->GetCodeTracer());
480 PrintF(scope.file(), "[patching stack address for function: ");
481 function->PrintName(scope.file());
482 PrintF(scope.file(), " (Pc offset %i -> %i)]\n", pc_offset,
486 CHECK_LE(0, new_pc_offset);
487 *pc_address += new_pc_offset - pc_offset;
494 void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
495 if (FLAG_trace_deopt) {
496 CodeTracer::Scope scope(isolate->GetCodeTracer());
497 PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
499 DisallowHeapAllocation no_allocation;
500 // For all contexts, mark all code, then deoptimize.
501 Object* context = isolate->heap()->native_contexts_list();
502 while (!context->IsUndefined()) {
503 Context* native_context = Context::cast(context);
504 MarkAllCodeForContext(native_context);
505 DeoptimizeMarkedCodeForContext(native_context);
506 context = native_context->get(Context::NEXT_CONTEXT_LINK);
511 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
512 if (FLAG_trace_deopt) {
513 CodeTracer::Scope scope(isolate->GetCodeTracer());
514 PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
516 DisallowHeapAllocation no_allocation;
517 // For all contexts, deoptimize code already marked.
518 Object* context = isolate->heap()->native_contexts_list();
519 while (!context->IsUndefined()) {
520 Context* native_context = Context::cast(context);
521 DeoptimizeMarkedCodeForContext(native_context);
522 context = native_context->get(Context::NEXT_CONTEXT_LINK);
527 void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
528 if (FLAG_trace_deopt) {
529 CodeTracer::Scope scope(object->GetHeap()->isolate()->GetCodeTracer());
530 PrintF(scope.file(), "[deoptimize global object @ 0x%08" V8PRIxPTR "]\n",
531 reinterpret_cast<intptr_t>(object));
533 if (object->IsJSGlobalProxy()) {
534 PrototypeIterator iter(object->GetIsolate(), object);
535 // TODO(verwaest): This CHECK will be hit if the global proxy is detached.
536 CHECK(iter.GetCurrent()->IsJSGlobalObject());
537 Context* native_context =
538 GlobalObject::cast(iter.GetCurrent())->native_context();
539 MarkAllCodeForContext(native_context);
540 DeoptimizeMarkedCodeForContext(native_context);
541 } else if (object->IsGlobalObject()) {
542 Context* native_context = GlobalObject::cast(object)->native_context();
543 MarkAllCodeForContext(native_context);
544 DeoptimizeMarkedCodeForContext(native_context);
549 void Deoptimizer::MarkAllCodeForContext(Context* context) {
550 Object* element = context->OptimizedCodeListHead();
551 while (!element->IsUndefined()) {
552 Code* code = Code::cast(element);
553 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
554 code->set_marked_for_deoptimization(true);
555 element = code->next_code_link();
560 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
561 Code* code = function->code();
562 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
563 // Mark the code for deoptimization and unlink any functions that also
564 // refer to that code. The code cannot be shared across native contexts,
565 // so we only need to search one.
566 code->set_marked_for_deoptimization(true);
567 DeoptimizeMarkedCodeForContext(function->context()->native_context());
572 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
573 deoptimizer->DoComputeOutputFrames();
577 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
578 StackFrame::Type frame_type) {
579 switch (deopt_type) {
584 return (frame_type == StackFrame::STUB)
585 ? FLAG_trace_stub_failures
588 FATAL("Unsupported deopt type");
593 const char* Deoptimizer::MessageFor(BailoutType type) {
595 case EAGER: return "eager";
596 case SOFT: return "soft";
597 case LAZY: return "lazy";
598 case DEBUGGER: return "debugger";
600 FATAL("Unsupported deopt type");
605 Deoptimizer::Deoptimizer(Isolate* isolate,
606 JSFunction* function,
611 Code* optimized_code)
614 bailout_id_(bailout_id),
617 fp_to_sp_delta_(fp_to_sp_delta),
618 has_alignment_padding_(0),
623 deferred_objects_tagged_values_(0),
624 deferred_objects_double_values_(0),
625 deferred_objects_(0),
626 deferred_heap_numbers_(0),
627 jsframe_functions_(0),
628 jsframe_has_adapted_arguments_(0),
629 materialized_values_(NULL),
630 materialized_objects_(NULL),
631 materialization_value_index_(0),
632 materialization_object_index_(0),
634 // For COMPILED_STUBs called from builtins, the function pointer is a SMI
635 // indicating an internal frame.
636 if (function->IsSmi()) {
639 DCHECK(from != NULL);
640 if (function != NULL && function->IsOptimized()) {
641 function->shared()->increment_deopt_count();
642 if (bailout_type_ == Deoptimizer::SOFT) {
643 isolate->counters()->soft_deopts_executed()->Increment();
644 // Soft deopts shouldn't count against the overall re-optimization count
645 // that can eventually lead to disabling optimization for a function.
646 int opt_count = function->shared()->opt_count();
647 if (opt_count > 0) opt_count--;
648 function->shared()->set_opt_count(opt_count);
651 compiled_code_ = FindOptimizedCode(function, optimized_code);
654 DCHECK(compiled_code_ != NULL);
655 if (type == EAGER || type == SOFT || type == LAZY) {
656 DCHECK(compiled_code_->kind() != Code::FUNCTION);
660 StackFrame::Type frame_type = function == NULL
662 : StackFrame::JAVA_SCRIPT;
663 trace_scope_ = TraceEnabledFor(type, frame_type) ?
664 new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL;
666 CHECK(AllowHeapAllocation::IsAllowed());
667 disallow_heap_allocation_ = new DisallowHeapAllocation();
669 unsigned size = ComputeInputFrameSize();
670 input_ = new(size) FrameDescription(size, function);
671 input_->SetFrameType(frame_type);
675 Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
676 Code* optimized_code) {
677 switch (bailout_type_) {
678 case Deoptimizer::SOFT:
679 case Deoptimizer::EAGER:
680 case Deoptimizer::LAZY: {
681 Code* compiled_code = FindDeoptimizingCode(from_);
682 return (compiled_code == NULL)
683 ? static_cast<Code*>(isolate_->FindCodeObject(from_))
686 case Deoptimizer::DEBUGGER:
687 DCHECK(optimized_code->contains(from_));
688 return optimized_code;
690 FATAL("Could not find code for optimized function");
695 void Deoptimizer::PrintFunctionName() {
696 if (function_->IsJSFunction()) {
697 function_->PrintName(trace_scope_->file());
699 PrintF(trace_scope_->file(),
700 "%s", Code::Kind2String(compiled_code_->kind()));
705 Deoptimizer::~Deoptimizer() {
706 DCHECK(input_ == NULL && output_ == NULL);
707 DCHECK(disallow_heap_allocation_ == NULL);
712 void Deoptimizer::DeleteFrameDescriptions() {
714 for (int i = 0; i < output_count_; ++i) {
715 if (output_[i] != input_) delete output_[i];
721 CHECK(!AllowHeapAllocation::IsAllowed());
722 CHECK(disallow_heap_allocation_ != NULL);
723 delete disallow_heap_allocation_;
724 disallow_heap_allocation_ = NULL;
729 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
734 if (id >= kMaxNumberOfEntries) return NULL;
735 if (mode == ENSURE_ENTRY_CODE) {
736 EnsureCodeForDeoptimizationEntry(isolate, type, id);
738 CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS);
740 DeoptimizerData* data = isolate->deoptimizer_data();
741 CHECK_LT(type, kBailoutTypesWithCodeEntry);
742 MemoryChunk* base = data->deopt_entry_code_[type];
743 return base->area_start() + (id * table_entry_size_);
747 int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
750 DeoptimizerData* data = isolate->deoptimizer_data();
751 MemoryChunk* base = data->deopt_entry_code_[type];
752 Address start = base->area_start();
755 addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
756 return kNotDeoptimizationEntry;
759 static_cast<int>(addr - start) % table_entry_size_);
760 return static_cast<int>(addr - start) / table_entry_size_;
764 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
766 SharedFunctionInfo* shared) {
767 // TODO(kasperl): For now, we do a simple linear search for the PC
768 // offset associated with the given node id. This should probably be
769 // changed to a binary search.
770 int length = data->DeoptPoints();
771 for (int i = 0; i < length; i++) {
772 if (data->AstId(i) == id) {
773 return data->PcAndState(i)->value();
777 os << "[couldn't find pc offset for node=" << id.ToInt() << "]\n"
778 << "[method: " << shared->DebugName()->ToCString().get() << "]\n"
779 << "[source:\n" << SourceCodeOf(shared) << "\n]" << endl;
781 FATAL("unable to find pc offset during deoptimization");
786 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
788 // Count all entries in the deoptimizing code list of every context.
789 Object* context = isolate->heap()->native_contexts_list();
790 while (!context->IsUndefined()) {
791 Context* native_context = Context::cast(context);
792 Object* element = native_context->DeoptimizedCodeListHead();
793 while (!element->IsUndefined()) {
794 Code* code = Code::cast(element);
795 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
797 element = code->next_code_link();
799 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
805 // We rely on this function not causing a GC. It is called from generated code
806 // without having a real stack frame in place.
807 void Deoptimizer::DoComputeOutputFrames() {
808 // Print some helpful diagnostic information.
809 if (FLAG_log_timer_events &&
810 compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
811 LOG(isolate(), CodeDeoptEvent(compiled_code_));
813 base::ElapsedTimer timer;
815 // Determine basic deoptimization information. The optimized frame is
816 // described by the input data.
817 DeoptimizationInputData* input_data =
818 DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
820 if (trace_scope_ != NULL) {
822 PrintF(trace_scope_->file(),
823 "[deoptimizing (DEOPT %s): begin 0x%08" V8PRIxPTR " ",
824 MessageFor(bailout_type_),
825 reinterpret_cast<intptr_t>(function_));
827 PrintF(trace_scope_->file(),
828 " (opt #%d) @%d, FP to SP delta: %d]\n",
829 input_data->OptimizationId()->value(),
832 if (bailout_type_ == EAGER || bailout_type_ == SOFT ||
833 (compiled_code_->is_hydrogen_stub())) {
834 compiled_code_->PrintDeoptLocation(trace_scope_->file(), bailout_id_);
838 BailoutId node_id = input_data->AstId(bailout_id_);
839 ByteArray* translations = input_data->TranslationByteArray();
840 unsigned translation_index =
841 input_data->TranslationIndex(bailout_id_)->value();
843 // Do the input frame to output frame(s) translation.
844 TranslationIterator iterator(translations, translation_index);
845 Translation::Opcode opcode =
846 static_cast<Translation::Opcode>(iterator.Next());
847 DCHECK(Translation::BEGIN == opcode);
849 // Read the number of output frames and allocate an array for their
851 int count = iterator.Next();
852 iterator.Next(); // Drop JS frames count.
853 DCHECK(output_ == NULL);
854 output_ = new FrameDescription*[count];
855 for (int i = 0; i < count; ++i) {
858 output_count_ = count;
860 Register fp_reg = JavaScriptFrame::fp_register();
861 stack_fp_ = reinterpret_cast<Address>(
862 input_->GetRegister(fp_reg.code()) +
863 has_alignment_padding_ * kPointerSize);
865 // Translate each output frame.
866 for (int i = 0; i < count; ++i) {
867 // Read the ast node id, function, and frame height for this output frame.
868 Translation::Opcode opcode =
869 static_cast<Translation::Opcode>(iterator.Next());
871 case Translation::JS_FRAME:
872 DoComputeJSFrame(&iterator, i);
875 case Translation::ARGUMENTS_ADAPTOR_FRAME:
876 DoComputeArgumentsAdaptorFrame(&iterator, i);
878 case Translation::CONSTRUCT_STUB_FRAME:
879 DoComputeConstructStubFrame(&iterator, i);
881 case Translation::GETTER_STUB_FRAME:
882 DoComputeAccessorStubFrame(&iterator, i, false);
884 case Translation::SETTER_STUB_FRAME:
885 DoComputeAccessorStubFrame(&iterator, i, true);
887 case Translation::COMPILED_STUB_FRAME:
888 DoComputeCompiledStubFrame(&iterator, i);
890 case Translation::BEGIN:
891 case Translation::REGISTER:
892 case Translation::INT32_REGISTER:
893 case Translation::UINT32_REGISTER:
894 case Translation::DOUBLE_REGISTER:
895 case Translation::STACK_SLOT:
896 case Translation::INT32_STACK_SLOT:
897 case Translation::UINT32_STACK_SLOT:
898 case Translation::DOUBLE_STACK_SLOT:
899 case Translation::LITERAL:
900 case Translation::ARGUMENTS_OBJECT:
902 FATAL("Unsupported translation");
907 // Print some helpful diagnostic information.
908 if (trace_scope_ != NULL) {
909 double ms = timer.Elapsed().InMillisecondsF();
910 int index = output_count_ - 1; // Index of the topmost frame.
911 JSFunction* function = output_[index]->GetFunction();
912 PrintF(trace_scope_->file(),
913 "[deoptimizing (%s): end 0x%08" V8PRIxPTR " ",
914 MessageFor(bailout_type_),
915 reinterpret_cast<intptr_t>(function));
917 PrintF(trace_scope_->file(),
918 " @%d => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
922 output_[index]->GetPc(),
923 FullCodeGenerator::State2String(
924 static_cast<FullCodeGenerator::State>(
925 output_[index]->GetState()->value())),
926 has_alignment_padding_ ? "with padding" : "no padding",
932 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
934 BailoutId node_id = BailoutId(iterator->Next());
935 JSFunction* function;
936 if (frame_index != 0) {
937 function = JSFunction::cast(ComputeLiteral(iterator->Next()));
939 int closure_id = iterator->Next();
941 CHECK_EQ(Translation::kSelfLiteralId, closure_id);
942 function = function_;
944 unsigned height = iterator->Next();
945 unsigned height_in_bytes = height * kPointerSize;
946 if (trace_scope_ != NULL) {
947 PrintF(trace_scope_->file(), " translating ");
948 function->PrintName(trace_scope_->file());
949 PrintF(trace_scope_->file(),
950 " => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
953 // The 'fixed' part of the frame consists of the incoming parameters and
954 // the part described by JavaScriptFrameConstants.
955 unsigned fixed_frame_size = ComputeFixedSize(function);
956 unsigned input_frame_size = input_->GetFrameSize();
957 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
959 // Allocate and store the output frame description.
960 FrameDescription* output_frame =
961 new(output_frame_size) FrameDescription(output_frame_size, function);
962 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
964 bool is_bottommost = (0 == frame_index);
965 bool is_topmost = (output_count_ - 1 == frame_index);
966 CHECK(frame_index >= 0 && frame_index < output_count_);
967 CHECK_EQ(output_[frame_index], NULL);
968 output_[frame_index] = output_frame;
970 // The top address for the bottommost output frame can be computed from
971 // the input frame pointer and the output frame's height. For all
972 // subsequent output frames, it can be computed from the previous one's
973 // top address and the current frame's size.
974 Register fp_reg = JavaScriptFrame::fp_register();
975 intptr_t top_address;
977 // Determine whether the input frame contains alignment padding.
978 has_alignment_padding_ =
979 (!compiled_code_->is_turbofanned() && HasAlignmentPadding(function))
982 // 2 = context and function in the frame.
983 // If the optimized frame had alignment padding, adjust the frame pointer
984 // to point to the new position of the old frame pointer after padding
985 // is removed. Subtract 2 * kPointerSize for the context and function slots.
986 top_address = input_->GetRegister(fp_reg.code()) -
987 StandardFrameConstants::kFixedFrameSizeFromFp -
988 height_in_bytes + has_alignment_padding_ * kPointerSize;
990 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
992 output_frame->SetTop(top_address);
994 // Compute the incoming parameter translation.
995 int parameter_count = function->shared()->formal_parameter_count() + 1;
996 unsigned output_offset = output_frame_size;
997 unsigned input_offset = input_frame_size;
998 for (int i = 0; i < parameter_count; ++i) {
999 output_offset -= kPointerSize;
1000 DoTranslateCommand(iterator, frame_index, output_offset);
1002 input_offset -= (parameter_count * kPointerSize);
1004 // There are no translation commands for the caller's pc and fp, the
1005 // context, and the function. Synthesize their values and set them up
1008 // The caller's pc for the bottommost output frame is the same as in the
1009 // input frame. For all subsequent output frames, it can be read from the
1010 // previous one. This frame's pc can be computed from the non-optimized
1011 // function code and AST id of the bailout.
1012 output_offset -= kPCOnStackSize;
1013 input_offset -= kPCOnStackSize;
1015 if (is_bottommost) {
1016 value = input_->GetFrameSlot(input_offset);
1018 value = output_[frame_index - 1]->GetPc();
1020 output_frame->SetCallerPc(output_offset, value);
1021 if (trace_scope_ != NULL) {
1022 PrintF(trace_scope_->file(),
1023 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1024 V8PRIxPTR " ; caller's pc\n",
1025 top_address + output_offset, output_offset, value);
1028 // The caller's frame pointer for the bottommost output frame is the same
1029 // as in the input frame. For all subsequent output frames, it can be
1030 // read from the previous one. Also compute and set this frame's frame
1032 output_offset -= kFPOnStackSize;
1033 input_offset -= kFPOnStackSize;
1034 if (is_bottommost) {
1035 value = input_->GetFrameSlot(input_offset);
1037 value = output_[frame_index - 1]->GetFp();
1039 output_frame->SetCallerFp(output_offset, value);
1040 intptr_t fp_value = top_address + output_offset;
1041 DCHECK(!is_bottommost || (input_->GetRegister(fp_reg.code()) +
1042 has_alignment_padding_ * kPointerSize) == fp_value);
1043 output_frame->SetFp(fp_value);
1044 if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value);
1045 if (trace_scope_ != NULL) {
1046 PrintF(trace_scope_->file(),
1047 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1048 V8PRIxPTR " ; caller's fp\n",
1049 fp_value, output_offset, value);
1051 DCHECK(!is_bottommost || !has_alignment_padding_ ||
1052 (fp_value & kPointerSize) != 0);
1054 if (FLAG_enable_ool_constant_pool) {
1055 // For the bottommost output frame the constant pool pointer can be gotten
1056 // from the input frame. For subsequent output frames, it can be read from
1057 // the previous frame.
1058 output_offset -= kPointerSize;
1059 input_offset -= kPointerSize;
1060 if (is_bottommost) {
1061 value = input_->GetFrameSlot(input_offset);
1063 value = output_[frame_index - 1]->GetConstantPool();
1065 output_frame->SetCallerConstantPool(output_offset, value);
1067 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1068 V8PRIxPTR "; caller's constant_pool\n",
1069 top_address + output_offset, output_offset, value);
1073 // For the bottommost output frame the context can be gotten from the input
1074 // frame. For all subsequent output frames it can be gotten from the function
1075 // so long as we don't inline functions that need local contexts.
1076 Register context_reg = JavaScriptFrame::context_register();
1077 output_offset -= kPointerSize;
1078 input_offset -= kPointerSize;
1079 if (is_bottommost) {
1080 value = input_->GetFrameSlot(input_offset);
1082 value = reinterpret_cast<intptr_t>(function->context());
1084 output_frame->SetFrameSlot(output_offset, value);
1085 output_frame->SetContext(value);
1086 if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
1087 if (trace_scope_ != NULL) {
1088 PrintF(trace_scope_->file(),
1089 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1090 V8PRIxPTR "; context\n",
1091 top_address + output_offset, output_offset, value);
1094 // The function was mentioned explicitly in the BEGIN_FRAME.
1095 output_offset -= kPointerSize;
1096 input_offset -= kPointerSize;
1097 value = reinterpret_cast<intptr_t>(function);
1098 // The function for the bottommost output frame should also agree with the
1100 DCHECK(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
1101 output_frame->SetFrameSlot(output_offset, value);
1102 if (trace_scope_ != NULL) {
1103 PrintF(trace_scope_->file(),
1104 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1105 V8PRIxPTR "; function\n",
1106 top_address + output_offset, output_offset, value);
1109 // Translate the rest of the frame.
1110 for (unsigned i = 0; i < height; ++i) {
1111 output_offset -= kPointerSize;
1112 DoTranslateCommand(iterator, frame_index, output_offset);
1114 CHECK_EQ(0, output_offset);
1116 // Compute this frame's PC, state, and continuation.
1117 Code* non_optimized_code = function->shared()->code();
1118 FixedArray* raw_data = non_optimized_code->deoptimization_data();
1119 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
1120 Address start = non_optimized_code->instruction_start();
1121 unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
1122 unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
1123 intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
1124 output_frame->SetPc(pc_value);
1126 // Update constant pool.
1127 if (FLAG_enable_ool_constant_pool) {
1128 intptr_t constant_pool_value =
1129 reinterpret_cast<intptr_t>(non_optimized_code->constant_pool());
1130 output_frame->SetConstantPool(constant_pool_value);
1132 Register constant_pool_reg =
1133 JavaScriptFrame::constant_pool_pointer_register();
1134 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1138 FullCodeGenerator::State state =
1139 FullCodeGenerator::StateField::decode(pc_and_state);
1140 output_frame->SetState(Smi::FromInt(state));
1142 // Set the continuation for the topmost frame.
1143 if (is_topmost && bailout_type_ != DEBUGGER) {
1144 Builtins* builtins = isolate_->builtins();
1145 Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1146 if (bailout_type_ == LAZY) {
1147 continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1148 } else if (bailout_type_ == SOFT) {
1149 continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
1151 CHECK_EQ(bailout_type_, EAGER);
1153 output_frame->SetContinuation(
1154 reinterpret_cast<intptr_t>(continuation->entry()));
1159 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
1161 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1162 unsigned height = iterator->Next();
1163 unsigned height_in_bytes = height * kPointerSize;
1164 if (trace_scope_ != NULL) {
1165 PrintF(trace_scope_->file(),
1166 " translating arguments adaptor => height=%d\n", height_in_bytes);
1169 unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
1170 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1172 // Allocate and store the output frame description.
1173 FrameDescription* output_frame =
1174 new(output_frame_size) FrameDescription(output_frame_size, function);
1175 output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
1177 // Arguments adaptor can not be topmost or bottommost.
1178 CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1179 CHECK(output_[frame_index] == NULL);
1180 output_[frame_index] = output_frame;
1182 // The top address of the frame is computed from the previous
1183 // frame's top and this frame's size.
1184 intptr_t top_address;
1185 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1186 output_frame->SetTop(top_address);
1188 // Compute the incoming parameter translation.
1189 int parameter_count = height;
1190 unsigned output_offset = output_frame_size;
1191 for (int i = 0; i < parameter_count; ++i) {
1192 output_offset -= kPointerSize;
1193 DoTranslateCommand(iterator, frame_index, output_offset);
1196 // Read caller's PC from the previous frame.
1197 output_offset -= kPCOnStackSize;
1198 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1199 output_frame->SetCallerPc(output_offset, callers_pc);
1200 if (trace_scope_ != NULL) {
1201 PrintF(trace_scope_->file(),
1202 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1203 V8PRIxPTR " ; caller's pc\n",
1204 top_address + output_offset, output_offset, callers_pc);
1207 // Read caller's FP from the previous frame, and set this frame's FP.
1208 output_offset -= kFPOnStackSize;
1209 intptr_t value = output_[frame_index - 1]->GetFp();
1210 output_frame->SetCallerFp(output_offset, value);
1211 intptr_t fp_value = top_address + output_offset;
1212 output_frame->SetFp(fp_value);
1213 if (trace_scope_ != NULL) {
1214 PrintF(trace_scope_->file(),
1215 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1216 V8PRIxPTR " ; caller's fp\n",
1217 fp_value, output_offset, value);
1220 if (FLAG_enable_ool_constant_pool) {
1221 // Read the caller's constant pool from the previous frame.
1222 output_offset -= kPointerSize;
1223 value = output_[frame_index - 1]->GetConstantPool();
1224 output_frame->SetCallerConstantPool(output_offset, value);
1226 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1227 V8PRIxPTR "; caller's constant_pool\n",
1228 top_address + output_offset, output_offset, value);
1232 // A marker value is used in place of the context.
1233 output_offset -= kPointerSize;
1234 intptr_t context = reinterpret_cast<intptr_t>(
1235 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1236 output_frame->SetFrameSlot(output_offset, context);
1237 if (trace_scope_ != NULL) {
1238 PrintF(trace_scope_->file(),
1239 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1240 V8PRIxPTR " ; context (adaptor sentinel)\n",
1241 top_address + output_offset, output_offset, context);
1244 // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
1245 output_offset -= kPointerSize;
1246 value = reinterpret_cast<intptr_t>(function);
1247 output_frame->SetFrameSlot(output_offset, value);
1248 if (trace_scope_ != NULL) {
1249 PrintF(trace_scope_->file(),
1250 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1251 V8PRIxPTR " ; function\n",
1252 top_address + output_offset, output_offset, value);
1255 // Number of incoming arguments.
1256 output_offset -= kPointerSize;
1257 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1258 output_frame->SetFrameSlot(output_offset, value);
1259 if (trace_scope_ != NULL) {
1260 PrintF(trace_scope_->file(),
1261 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1262 V8PRIxPTR " ; argc (%d)\n",
1263 top_address + output_offset, output_offset, value, height - 1);
1266 DCHECK(0 == output_offset);
1268 Builtins* builtins = isolate_->builtins();
1269 Code* adaptor_trampoline =
1270 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
1271 intptr_t pc_value = reinterpret_cast<intptr_t>(
1272 adaptor_trampoline->instruction_start() +
1273 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
1274 output_frame->SetPc(pc_value);
1275 if (FLAG_enable_ool_constant_pool) {
1276 intptr_t constant_pool_value =
1277 reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
1278 output_frame->SetConstantPool(constant_pool_value);
1283 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
1285 Builtins* builtins = isolate_->builtins();
1286 Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
1287 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1288 unsigned height = iterator->Next();
1289 unsigned height_in_bytes = height * kPointerSize;
1290 if (trace_scope_ != NULL) {
1291 PrintF(trace_scope_->file(),
1292 " translating construct stub => height=%d\n", height_in_bytes);
1295 unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize;
1296 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1298 // Allocate and store the output frame description.
1299 FrameDescription* output_frame =
1300 new(output_frame_size) FrameDescription(output_frame_size, function);
1301 output_frame->SetFrameType(StackFrame::CONSTRUCT);
1303 // Construct stub can not be topmost or bottommost.
1304 DCHECK(frame_index > 0 && frame_index < output_count_ - 1);
1305 DCHECK(output_[frame_index] == NULL);
1306 output_[frame_index] = output_frame;
1308 // The top address of the frame is computed from the previous
1309 // frame's top and this frame's size.
1310 intptr_t top_address;
1311 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1312 output_frame->SetTop(top_address);
1314 // Compute the incoming parameter translation.
1315 int parameter_count = height;
1316 unsigned output_offset = output_frame_size;
1317 for (int i = 0; i < parameter_count; ++i) {
1318 output_offset -= kPointerSize;
1319 int deferred_object_index = deferred_objects_.length();
1320 DoTranslateCommand(iterator, frame_index, output_offset);
1321 // The allocated receiver of a construct stub frame is passed as the
1322 // receiver parameter through the translation. It might be encoding
1323 // a captured object, patch the slot address for a captured object.
1324 if (i == 0 && deferred_objects_.length() > deferred_object_index) {
1325 CHECK(!deferred_objects_[deferred_object_index].is_arguments());
1326 deferred_objects_[deferred_object_index].patch_slot_address(top_address);
1330 // Read caller's PC from the previous frame.
1331 output_offset -= kPCOnStackSize;
1332 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1333 output_frame->SetCallerPc(output_offset, callers_pc);
1334 if (trace_scope_ != NULL) {
1335 PrintF(trace_scope_->file(),
1336 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1337 V8PRIxPTR " ; caller's pc\n",
1338 top_address + output_offset, output_offset, callers_pc);
1341 // Read caller's FP from the previous frame, and set this frame's FP.
1342 output_offset -= kFPOnStackSize;
1343 intptr_t value = output_[frame_index - 1]->GetFp();
1344 output_frame->SetCallerFp(output_offset, value);
1345 intptr_t fp_value = top_address + output_offset;
1346 output_frame->SetFp(fp_value);
1347 if (trace_scope_ != NULL) {
1348 PrintF(trace_scope_->file(),
1349 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1350 V8PRIxPTR " ; caller's fp\n",
1351 fp_value, output_offset, value);
1354 if (FLAG_enable_ool_constant_pool) {
1355 // Read the caller's constant pool from the previous frame.
1356 output_offset -= kPointerSize;
1357 value = output_[frame_index - 1]->GetConstantPool();
1358 output_frame->SetCallerConstantPool(output_offset, value);
1360 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1361 V8PRIxPTR " ; caller's constant pool\n",
1362 top_address + output_offset, output_offset, value);
1366 // The context can be gotten from the previous frame.
1367 output_offset -= kPointerSize;
1368 value = output_[frame_index - 1]->GetContext();
1369 output_frame->SetFrameSlot(output_offset, value);
1370 if (trace_scope_ != NULL) {
1371 PrintF(trace_scope_->file(),
1372 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1373 V8PRIxPTR " ; context\n",
1374 top_address + output_offset, output_offset, value);
1377 // A marker value is used in place of the function.
1378 output_offset -= kPointerSize;
1379 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
1380 output_frame->SetFrameSlot(output_offset, value);
1381 if (trace_scope_ != NULL) {
1382 PrintF(trace_scope_->file(),
1383 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1384 V8PRIxPTR " ; function (construct sentinel)\n",
1385 top_address + output_offset, output_offset, value);
1388 // The output frame reflects a JSConstructStubGeneric frame.
1389 output_offset -= kPointerSize;
1390 value = reinterpret_cast<intptr_t>(construct_stub);
1391 output_frame->SetFrameSlot(output_offset, value);
1392 if (trace_scope_ != NULL) {
1393 PrintF(trace_scope_->file(),
1394 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1395 V8PRIxPTR " ; code object\n",
1396 top_address + output_offset, output_offset, value);
1399 // Number of incoming arguments.
1400 output_offset -= kPointerSize;
1401 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1402 output_frame->SetFrameSlot(output_offset, value);
1403 if (trace_scope_ != NULL) {
1404 PrintF(trace_scope_->file(),
1405 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1406 V8PRIxPTR " ; argc (%d)\n",
1407 top_address + output_offset, output_offset, value, height - 1);
1410 // Constructor function being invoked by the stub (only present on some
1411 // architectures, indicated by kConstructorOffset).
1412 if (ConstructFrameConstants::kConstructorOffset != kMinInt) {
1413 output_offset -= kPointerSize;
1414 value = reinterpret_cast<intptr_t>(function);
1415 output_frame->SetFrameSlot(output_offset, value);
1416 if (trace_scope_ != NULL) {
1417 PrintF(trace_scope_->file(),
1418 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1419 V8PRIxPTR " ; constructor function\n",
1420 top_address + output_offset, output_offset, value);
1424 // The newly allocated object was passed as receiver in the artificial
1425 // constructor stub environment created by HEnvironment::CopyForInlining().
1426 output_offset -= kPointerSize;
1427 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
1428 output_frame->SetFrameSlot(output_offset, value);
1429 if (trace_scope_ != NULL) {
1430 PrintF(trace_scope_->file(),
1431 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1432 V8PRIxPTR " ; allocated receiver\n",
1433 top_address + output_offset, output_offset, value);
1436 CHECK_EQ(0, output_offset);
1438 intptr_t pc = reinterpret_cast<intptr_t>(
1439 construct_stub->instruction_start() +
1440 isolate_->heap()->construct_stub_deopt_pc_offset()->value());
1441 output_frame->SetPc(pc);
1442 if (FLAG_enable_ool_constant_pool) {
1443 intptr_t constant_pool_value =
1444 reinterpret_cast<intptr_t>(construct_stub->constant_pool());
1445 output_frame->SetConstantPool(constant_pool_value);
1450 void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
1452 bool is_setter_stub_frame) {
1453 JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
1454 // The receiver (and the implicit return value, if any) are expected in
1455 // registers by the LoadIC/StoreIC, so they don't belong to the output stack
1456 // frame. This means that we have to use a height of 0.
1457 unsigned height = 0;
1458 unsigned height_in_bytes = height * kPointerSize;
1459 const char* kind = is_setter_stub_frame ? "setter" : "getter";
1460 if (trace_scope_ != NULL) {
1461 PrintF(trace_scope_->file(),
1462 " translating %s stub => height=%u\n", kind, height_in_bytes);
1465 // We need 1 stack entry for the return address and enough entries for the
1466 // StackFrame::INTERNAL (FP, context, frame type, code object and constant
1467 // pool (if FLAG_enable_ool_constant_pool)- see MacroAssembler::EnterFrame).
1468 // For a setter stub frame we need one additional entry for the implicit
1469 // return value, see StoreStubCompiler::CompileStoreViaSetter.
1470 unsigned fixed_frame_entries =
1471 (StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 +
1472 (is_setter_stub_frame ? 1 : 0);
1473 unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
1474 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1476 // Allocate and store the output frame description.
1477 FrameDescription* output_frame =
1478 new(output_frame_size) FrameDescription(output_frame_size, accessor);
1479 output_frame->SetFrameType(StackFrame::INTERNAL);
1481 // A frame for an accessor stub can not be the topmost or bottommost one.
1482 CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1483 CHECK_EQ(output_[frame_index], NULL);
1484 output_[frame_index] = output_frame;
1486 // The top address of the frame is computed from the previous frame's top and
1487 // this frame's size.
1488 intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1489 output_frame->SetTop(top_address);
1491 unsigned output_offset = output_frame_size;
1493 // Read caller's PC from the previous frame.
1494 output_offset -= kPCOnStackSize;
1495 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1496 output_frame->SetCallerPc(output_offset, callers_pc);
1497 if (trace_scope_ != NULL) {
1498 PrintF(trace_scope_->file(),
1499 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1501 top_address + output_offset, output_offset, callers_pc);
1504 // Read caller's FP from the previous frame, and set this frame's FP.
1505 output_offset -= kFPOnStackSize;
1506 intptr_t value = output_[frame_index - 1]->GetFp();
1507 output_frame->SetCallerFp(output_offset, value);
1508 intptr_t fp_value = top_address + output_offset;
1509 output_frame->SetFp(fp_value);
1510 if (trace_scope_ != NULL) {
1511 PrintF(trace_scope_->file(),
1512 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1514 fp_value, output_offset, value);
1517 if (FLAG_enable_ool_constant_pool) {
1518 // Read the caller's constant pool from the previous frame.
1519 output_offset -= kPointerSize;
1520 value = output_[frame_index - 1]->GetConstantPool();
1521 output_frame->SetCallerConstantPool(output_offset, value);
1523 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1524 V8PRIxPTR " ; caller's constant pool\n",
1525 top_address + output_offset, output_offset, value);
1529 // The context can be gotten from the previous frame.
1530 output_offset -= kPointerSize;
1531 value = output_[frame_index - 1]->GetContext();
1532 output_frame->SetFrameSlot(output_offset, value);
1533 if (trace_scope_ != NULL) {
1534 PrintF(trace_scope_->file(),
1535 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1537 top_address + output_offset, output_offset, value);
1540 // A marker value is used in place of the function.
1541 output_offset -= kPointerSize;
1542 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
1543 output_frame->SetFrameSlot(output_offset, value);
1544 if (trace_scope_ != NULL) {
1545 PrintF(trace_scope_->file(),
1546 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1547 " ; function (%s sentinel)\n",
1548 top_address + output_offset, output_offset, value, kind);
1551 // Get Code object from accessor stub.
1552 output_offset -= kPointerSize;
1553 Builtins::Name name = is_setter_stub_frame ?
1554 Builtins::kStoreIC_Setter_ForDeopt :
1555 Builtins::kLoadIC_Getter_ForDeopt;
1556 Code* accessor_stub = isolate_->builtins()->builtin(name);
1557 value = reinterpret_cast<intptr_t>(accessor_stub);
1558 output_frame->SetFrameSlot(output_offset, value);
1559 if (trace_scope_ != NULL) {
1560 PrintF(trace_scope_->file(),
1561 " 0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1563 top_address + output_offset, output_offset, value);
1567 DoTranslateObjectAndSkip(iterator);
1569 if (is_setter_stub_frame) {
1570 // The implicit return value was part of the artificial setter stub
1572 output_offset -= kPointerSize;
1573 DoTranslateCommand(iterator, frame_index, output_offset);
1576 CHECK_EQ(output_offset, 0);
1578 Smi* offset = is_setter_stub_frame ?
1579 isolate_->heap()->setter_stub_deopt_pc_offset() :
1580 isolate_->heap()->getter_stub_deopt_pc_offset();
1581 intptr_t pc = reinterpret_cast<intptr_t>(
1582 accessor_stub->instruction_start() + offset->value());
1583 output_frame->SetPc(pc);
1584 if (FLAG_enable_ool_constant_pool) {
1585 intptr_t constant_pool_value =
1586 reinterpret_cast<intptr_t>(accessor_stub->constant_pool());
1587 output_frame->SetConstantPool(constant_pool_value);
1592 void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
1596 // | .... | | .... |
1597 // +-------------------------+ +-------------------------+
1598 // | JSFunction continuation | | JSFunction continuation |
1599 // +-------------------------+ +-------------------------+
1600 // | | saved frame (FP) | | saved frame (FP) |
1601 // | +=========================+<-fpreg +=========================+<-fpreg
1602 // | |constant pool (if ool_cp)| |constant pool (if ool_cp)|
1603 // | +-------------------------+ +-------------------------|
1604 // | | JSFunction context | | JSFunction context |
1605 // v +-------------------------+ +-------------------------|
1606 // | COMPILED_STUB marker | | STUB_FAILURE marker |
1607 // +-------------------------+ +-------------------------+
1608 // | | | caller args.arguments_ |
1609 // | ... | +-------------------------+
1610 // | | | caller args.length_ |
1611 // |-------------------------|<-spreg +-------------------------+
1612 // | caller args pointer |
1613 // +-------------------------+
1614 // | caller stack param 1 |
1615 // parameters in registers +-------------------------+
1616 // and spilled to stack | .... |
1617 // +-------------------------+
1618 // | caller stack param n |
1619 // +-------------------------+<-spreg
1620 // reg = number of parameters
1621 // reg = failure handler address
1622 // reg = saved frame
1623 // reg = JSFunction context
1626 CHECK(compiled_code_->is_hydrogen_stub());
1627 int major_key = CodeStub::GetMajorKey(compiled_code_);
1628 CodeStubInterfaceDescriptor* descriptor =
1629 isolate_->code_stub_interface_descriptor(major_key);
1630 // Check that there is a matching descriptor to the major key.
1631 // This will fail if there has not been one installed to the isolate.
1632 DCHECK_EQ(descriptor->MajorKey(), major_key);
1634 // The output frame must have room for all pushed register parameters
1635 // and the standard stack frame slots. Include space for an argument
1636 // object to the callee and optionally the space to pass the argument
1637 // object to the stub failure handler.
1638 int param_count = descriptor->GetEnvironmentParameterCount();
1639 CHECK_GE(param_count, 0);
1641 int height_in_bytes = kPointerSize * param_count + sizeof(Arguments) +
1643 int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
1644 int input_frame_size = input_->GetFrameSize();
1645 int output_frame_size = height_in_bytes + fixed_frame_size;
1646 if (trace_scope_ != NULL) {
1647 PrintF(trace_scope_->file(),
1648 " translating %s => StubFailureTrampolineStub, height=%d\n",
1649 CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
1653 // The stub failure trampoline is a single frame.
1654 FrameDescription* output_frame =
1655 new(output_frame_size) FrameDescription(output_frame_size, NULL);
1656 output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
1657 CHECK_EQ(frame_index, 0);
1658 output_[frame_index] = output_frame;
1660 // The top address for the output frame can be computed from the input
1661 // frame pointer and the output frame's height. Subtract space for the
1662 // context and function slots.
1663 Register fp_reg = StubFailureTrampolineFrame::fp_register();
1664 intptr_t top_address = input_->GetRegister(fp_reg.code()) -
1665 StandardFrameConstants::kFixedFrameSizeFromFp - height_in_bytes;
1666 output_frame->SetTop(top_address);
1668 // Read caller's PC (JSFunction continuation) from the input frame.
1669 unsigned input_frame_offset = input_frame_size - kPCOnStackSize;
1670 unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
1671 intptr_t value = input_->GetFrameSlot(input_frame_offset);
1672 output_frame->SetCallerPc(output_frame_offset, value);
1673 if (trace_scope_ != NULL) {
1674 PrintF(trace_scope_->file(),
1675 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1676 V8PRIxPTR " ; caller's pc\n",
1677 top_address + output_frame_offset, output_frame_offset, value);
1680 // Read caller's FP from the input frame, and set this frame's FP.
1681 input_frame_offset -= kFPOnStackSize;
1682 value = input_->GetFrameSlot(input_frame_offset);
1683 output_frame_offset -= kFPOnStackSize;
1684 output_frame->SetCallerFp(output_frame_offset, value);
1685 intptr_t frame_ptr = input_->GetRegister(fp_reg.code());
1686 output_frame->SetRegister(fp_reg.code(), frame_ptr);
1687 output_frame->SetFp(frame_ptr);
1688 if (trace_scope_ != NULL) {
1689 PrintF(trace_scope_->file(),
1690 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1691 V8PRIxPTR " ; caller's fp\n",
1692 top_address + output_frame_offset, output_frame_offset, value);
1695 if (FLAG_enable_ool_constant_pool) {
1696 // Read the caller's constant pool from the input frame.
1697 input_frame_offset -= kPointerSize;
1698 value = input_->GetFrameSlot(input_frame_offset);
1699 output_frame_offset -= kPointerSize;
1700 output_frame->SetCallerConstantPool(output_frame_offset, value);
1702 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1703 V8PRIxPTR " ; caller's constant_pool\n",
1704 top_address + output_frame_offset, output_frame_offset, value);
1708 // The context can be gotten from the input frame.
1709 Register context_reg = StubFailureTrampolineFrame::context_register();
1710 input_frame_offset -= kPointerSize;
1711 value = input_->GetFrameSlot(input_frame_offset);
1712 output_frame->SetRegister(context_reg.code(), value);
1713 output_frame_offset -= kPointerSize;
1714 output_frame->SetFrameSlot(output_frame_offset, value);
1715 CHECK(reinterpret_cast<Object*>(value)->IsContext());
1716 if (trace_scope_ != NULL) {
1717 PrintF(trace_scope_->file(),
1718 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1719 V8PRIxPTR " ; context\n",
1720 top_address + output_frame_offset, output_frame_offset, value);
1723 // A marker value is used in place of the function.
1724 output_frame_offset -= kPointerSize;
1725 value = reinterpret_cast<intptr_t>(
1726 Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
1727 output_frame->SetFrameSlot(output_frame_offset, value);
1728 if (trace_scope_ != NULL) {
1729 PrintF(trace_scope_->file(),
1730 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1731 V8PRIxPTR " ; function (stub failure sentinel)\n",
1732 top_address + output_frame_offset, output_frame_offset, value);
1735 intptr_t caller_arg_count = 0;
1736 bool arg_count_known = !descriptor->stack_parameter_count().is_valid();
1738 // Build the Arguments object for the caller's parameters and a pointer to it.
1739 output_frame_offset -= kPointerSize;
1740 int args_arguments_offset = output_frame_offset;
1741 intptr_t the_hole = reinterpret_cast<intptr_t>(
1742 isolate_->heap()->the_hole_value());
1743 if (arg_count_known) {
1744 value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1745 (caller_arg_count - 1) * kPointerSize;
1750 output_frame->SetFrameSlot(args_arguments_offset, value);
1751 if (trace_scope_ != NULL) {
1752 PrintF(trace_scope_->file(),
1753 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1754 V8PRIxPTR " ; args.arguments %s\n",
1755 top_address + args_arguments_offset, args_arguments_offset, value,
1756 arg_count_known ? "" : "(the hole)");
1759 output_frame_offset -= kPointerSize;
1760 int length_frame_offset = output_frame_offset;
1761 value = arg_count_known ? caller_arg_count : the_hole;
1762 output_frame->SetFrameSlot(length_frame_offset, value);
1763 if (trace_scope_ != NULL) {
1764 PrintF(trace_scope_->file(),
1765 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1766 V8PRIxPTR " ; args.length %s\n",
1767 top_address + length_frame_offset, length_frame_offset, value,
1768 arg_count_known ? "" : "(the hole)");
1771 output_frame_offset -= kPointerSize;
1772 value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
1773 (output_frame_size - output_frame_offset) + kPointerSize;
1774 output_frame->SetFrameSlot(output_frame_offset, value);
1775 if (trace_scope_ != NULL) {
1776 PrintF(trace_scope_->file(),
1777 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1778 V8PRIxPTR " ; args*\n",
1779 top_address + output_frame_offset, output_frame_offset, value);
1782 // Copy the register parameters to the failure frame.
1783 int arguments_length_offset = -1;
1784 for (int i = 0; i < param_count; ++i) {
1785 output_frame_offset -= kPointerSize;
1786 DoTranslateCommand(iterator, 0, output_frame_offset);
1788 if (!arg_count_known &&
1789 descriptor->IsEnvironmentParameterCountRegister(i)) {
1790 arguments_length_offset = output_frame_offset;
1794 CHECK_EQ(output_frame_offset, 0);
1796 if (!arg_count_known) {
1797 CHECK_GE(arguments_length_offset, 0);
1798 // We know it's a smi because 1) the code stub guarantees the stack
1799 // parameter count is in smi range, and 2) the DoTranslateCommand in the
1800 // parameter loop above translated that to a tagged value.
1801 Smi* smi_caller_arg_count = reinterpret_cast<Smi*>(
1802 output_frame->GetFrameSlot(arguments_length_offset));
1803 caller_arg_count = smi_caller_arg_count->value();
1804 output_frame->SetFrameSlot(length_frame_offset, caller_arg_count);
1805 if (trace_scope_ != NULL) {
1806 PrintF(trace_scope_->file(),
1807 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1808 V8PRIxPTR " ; args.length\n",
1809 top_address + length_frame_offset, length_frame_offset,
1812 value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1813 (caller_arg_count - 1) * kPointerSize;
1814 output_frame->SetFrameSlot(args_arguments_offset, value);
1815 if (trace_scope_ != NULL) {
1816 PrintF(trace_scope_->file(),
1817 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1818 V8PRIxPTR " ; args.arguments\n",
1819 top_address + args_arguments_offset, args_arguments_offset,
1824 // Copy the double registers from the input into the output frame.
1825 CopyDoubleRegisters(output_frame);
1827 // Fill registers containing handler and number of parameters.
1828 SetPlatformCompiledStubRegisters(output_frame, descriptor);
1830 // Compute this frame's PC, state, and continuation.
1831 Code* trampoline = NULL;
1832 StubFunctionMode function_mode = descriptor->function_mode();
1833 StubFailureTrampolineStub(isolate_,
1834 function_mode).FindCodeInCache(&trampoline);
1835 DCHECK(trampoline != NULL);
1836 output_frame->SetPc(reinterpret_cast<intptr_t>(
1837 trampoline->instruction_start()));
1838 if (FLAG_enable_ool_constant_pool) {
1839 Register constant_pool_reg =
1840 StubFailureTrampolineFrame::constant_pool_pointer_register();
1841 intptr_t constant_pool_value =
1842 reinterpret_cast<intptr_t>(trampoline->constant_pool());
1843 output_frame->SetConstantPool(constant_pool_value);
1844 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1846 output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
1847 Code* notify_failure =
1848 isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
1849 output_frame->SetContinuation(
1850 reinterpret_cast<intptr_t>(notify_failure->entry()));
1854 Handle<Object> Deoptimizer::MaterializeNextHeapObject() {
1855 int object_index = materialization_object_index_++;
1856 ObjectMaterializationDescriptor desc = deferred_objects_[object_index];
1857 const int length = desc.object_length();
1859 if (desc.duplicate_object() >= 0) {
1860 // Found a previously materialized object by de-duplication.
1861 object_index = desc.duplicate_object();
1862 materialized_objects_->Add(Handle<Object>());
1863 } else if (desc.is_arguments() && ArgumentsObjectIsAdapted(object_index)) {
1864 // Use the arguments adapter frame we just built to materialize the
1865 // arguments object. FunctionGetArguments can't throw an exception.
1866 Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1867 Handle<JSObject> arguments = Handle<JSObject>::cast(
1868 Accessors::FunctionGetArguments(function));
1869 materialized_objects_->Add(arguments);
1870 // To keep consistent object counters, we still materialize the
1871 // nested values (but we throw them away).
1872 for (int i = 0; i < length; ++i) {
1873 MaterializeNextValue();
1875 } else if (desc.is_arguments()) {
1876 // Construct an arguments object and copy the parameters to a newly
1877 // allocated arguments object backing store.
1878 Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1879 Handle<JSObject> arguments =
1880 isolate_->factory()->NewArgumentsObject(function, length);
1881 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
1882 DCHECK_EQ(array->length(), length);
1883 arguments->set_elements(*array);
1884 materialized_objects_->Add(arguments);
1885 for (int i = 0; i < length; ++i) {
1886 Handle<Object> value = MaterializeNextValue();
1887 array->set(i, *value);
1890 // Dispatch on the instance type of the object to be materialized.
1891 // We also need to make sure that the representation of all fields
1892 // in the given object are general enough to hold a tagged value.
1893 Handle<Map> map = Map::GeneralizeAllFieldRepresentations(
1894 Handle<Map>::cast(MaterializeNextValue()));
1895 switch (map->instance_type()) {
1896 case MUTABLE_HEAP_NUMBER_TYPE:
1897 case HEAP_NUMBER_TYPE: {
1898 // Reuse the HeapNumber value directly as it is already properly
1899 // tagged and skip materializing the HeapNumber explicitly. Turn mutable
1900 // heap numbers immutable.
1901 Handle<Object> object = MaterializeNextValue();
1902 if (object_index < prev_materialized_count_) {
1903 materialized_objects_->Add(Handle<Object>(
1904 previously_materialized_objects_->get(object_index), isolate_));
1906 materialized_objects_->Add(object);
1908 materialization_value_index_ += kDoubleSize / kPointerSize - 1;
1911 case JS_OBJECT_TYPE: {
1912 Handle<JSObject> object =
1913 isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
1914 if (object_index < prev_materialized_count_) {
1915 materialized_objects_->Add(Handle<Object>(
1916 previously_materialized_objects_->get(object_index), isolate_));
1918 materialized_objects_->Add(object);
1920 Handle<Object> properties = MaterializeNextValue();
1921 Handle<Object> elements = MaterializeNextValue();
1922 object->set_properties(FixedArray::cast(*properties));
1923 object->set_elements(FixedArrayBase::cast(*elements));
1924 for (int i = 0; i < length - 3; ++i) {
1925 Handle<Object> value = MaterializeNextValue();
1926 FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
1927 object->FastPropertyAtPut(index, *value);
1931 case JS_ARRAY_TYPE: {
1932 Handle<JSArray> object =
1933 isolate_->factory()->NewJSArray(0, map->elements_kind());
1934 if (object_index < prev_materialized_count_) {
1935 materialized_objects_->Add(Handle<Object>(
1936 previously_materialized_objects_->get(object_index), isolate_));
1938 materialized_objects_->Add(object);
1940 Handle<Object> properties = MaterializeNextValue();
1941 Handle<Object> elements = MaterializeNextValue();
1942 Handle<Object> length = MaterializeNextValue();
1943 object->set_properties(FixedArray::cast(*properties));
1944 object->set_elements(FixedArrayBase::cast(*elements));
1945 object->set_length(*length);
1950 "[couldn't handle instance type %d]\n", map->instance_type());
1951 FATAL("Unsupported instance type");
1955 return materialized_objects_->at(object_index);
1959 Handle<Object> Deoptimizer::MaterializeNextValue() {
1960 int value_index = materialization_value_index_++;
1961 Handle<Object> value = materialized_values_->at(value_index);
1962 if (value->IsMutableHeapNumber()) {
1963 HeapNumber::cast(*value)->set_map(isolate_->heap()->heap_number_map());
1965 if (*value == isolate_->heap()->arguments_marker()) {
1966 value = MaterializeNextHeapObject();
1972 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
1973 DCHECK_NE(DEBUGGER, bailout_type_);
1975 MaterializedObjectStore* materialized_store =
1976 isolate_->materialized_object_store();
1977 previously_materialized_objects_ = materialized_store->Get(stack_fp_);
1978 prev_materialized_count_ = previously_materialized_objects_.is_null() ?
1979 0 : previously_materialized_objects_->length();
1981 // Walk all JavaScript output frames with the given frame iterator.
1982 for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
1983 if (frame_index != 0) it->Advance();
1984 JavaScriptFrame* frame = it->frame();
1985 jsframe_functions_.Add(handle(frame->function(), isolate_));
1986 jsframe_has_adapted_arguments_.Add(frame->has_adapted_arguments());
1989 // Handlify all tagged object values before triggering any allocation.
1990 List<Handle<Object> > values(deferred_objects_tagged_values_.length());
1991 for (int i = 0; i < deferred_objects_tagged_values_.length(); ++i) {
1992 values.Add(Handle<Object>(deferred_objects_tagged_values_[i], isolate_));
1995 // Play it safe and clear all unhandlified values before we continue.
1996 deferred_objects_tagged_values_.Clear();
1998 // Materialize all heap numbers before looking at arguments because when the
1999 // output frames are used to materialize arguments objects later on they need
2000 // to already contain valid heap numbers.
2001 for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
2002 HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
2003 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2004 if (trace_scope_ != NULL) {
2005 PrintF(trace_scope_->file(),
2006 "Materialized a new heap number %p [%e] in slot %p\n",
2007 reinterpret_cast<void*>(*num),
2011 Memory::Object_at(d.destination()) = *num;
2014 // Materialize all heap numbers required for arguments/captured objects.
2015 for (int i = 0; i < deferred_objects_double_values_.length(); i++) {
2016 HeapNumberMaterializationDescriptor<int> d =
2017 deferred_objects_double_values_[i];
2018 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2019 if (trace_scope_ != NULL) {
2020 PrintF(trace_scope_->file(),
2021 "Materialized a new heap number %p [%e] for object at %d\n",
2022 reinterpret_cast<void*>(*num),
2026 DCHECK(values.at(d.destination())->IsTheHole());
2027 values.Set(d.destination(), num);
2030 // Play it safe and clear all object double values before we continue.
2031 deferred_objects_double_values_.Clear();
2033 // Materialize arguments/captured objects.
2034 if (!deferred_objects_.is_empty()) {
2035 List<Handle<Object> > materialized_objects(deferred_objects_.length());
2036 materialized_objects_ = &materialized_objects;
2037 materialized_values_ = &values;
2039 while (materialization_object_index_ < deferred_objects_.length()) {
2040 int object_index = materialization_object_index_;
2041 ObjectMaterializationDescriptor descriptor =
2042 deferred_objects_.at(object_index);
2044 // Find a previously materialized object by de-duplication or
2045 // materialize a new instance of the object if necessary. Store
2046 // the materialized object into the frame slot.
2047 Handle<Object> object = MaterializeNextHeapObject();
2048 if (descriptor.slot_address() != NULL) {
2049 Memory::Object_at(descriptor.slot_address()) = *object;
2051 if (trace_scope_ != NULL) {
2052 if (descriptor.is_arguments()) {
2053 PrintF(trace_scope_->file(),
2054 "Materialized %sarguments object of length %d for %p: ",
2055 ArgumentsObjectIsAdapted(object_index) ? "(adapted) " : "",
2056 Handle<JSObject>::cast(object)->elements()->length(),
2057 reinterpret_cast<void*>(descriptor.slot_address()));
2059 PrintF(trace_scope_->file(),
2060 "Materialized captured object of size %d for %p: ",
2061 Handle<HeapObject>::cast(object)->Size(),
2062 reinterpret_cast<void*>(descriptor.slot_address()));
2064 object->ShortPrint(trace_scope_->file());
2065 PrintF(trace_scope_->file(), "\n");
2069 CHECK_EQ(materialization_object_index_, materialized_objects_->length());
2070 CHECK_EQ(materialization_value_index_, materialized_values_->length());
2073 if (prev_materialized_count_ > 0) {
2074 materialized_store->Remove(stack_fp_);
2079 void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
2080 Address parameters_top,
2081 uint32_t parameters_size,
2082 Address expressions_top,
2083 uint32_t expressions_size,
2084 DeoptimizedFrameInfo* info) {
2085 CHECK_EQ(DEBUGGER, bailout_type_);
2086 Address parameters_bottom = parameters_top + parameters_size;
2087 Address expressions_bottom = expressions_top + expressions_size;
2088 for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
2089 HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
2091 // Check of the heap number to materialize actually belong to the frame
2093 Address slot = d.destination();
2094 if (parameters_top <= slot && slot < parameters_bottom) {
2095 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2097 int index = (info->parameters_count() - 1) -
2098 static_cast<int>(slot - parameters_top) / kPointerSize;
2100 if (trace_scope_ != NULL) {
2101 PrintF(trace_scope_->file(),
2102 "Materializing a new heap number %p [%e] in slot %p"
2103 "for parameter slot #%d\n",
2104 reinterpret_cast<void*>(*num),
2110 info->SetParameter(index, *num);
2111 } else if (expressions_top <= slot && slot < expressions_bottom) {
2112 Handle<Object> num = isolate_->factory()->NewNumber(d.value());
2114 int index = info->expression_count() - 1 -
2115 static_cast<int>(slot - expressions_top) / kPointerSize;
2117 if (trace_scope_ != NULL) {
2118 PrintF(trace_scope_->file(),
2119 "Materializing a new heap number %p [%e] in slot %p"
2120 "for expression slot #%d\n",
2121 reinterpret_cast<void*>(*num),
2127 info->SetExpression(index, *num);
2133 static const char* TraceValueType(bool is_smi) {
2138 return "heap number";
2142 void Deoptimizer::DoTranslateObjectAndSkip(TranslationIterator* iterator) {
2143 Translation::Opcode opcode =
2144 static_cast<Translation::Opcode>(iterator->Next());
2147 case Translation::BEGIN:
2148 case Translation::JS_FRAME:
2149 case Translation::ARGUMENTS_ADAPTOR_FRAME:
2150 case Translation::CONSTRUCT_STUB_FRAME:
2151 case Translation::GETTER_STUB_FRAME:
2152 case Translation::SETTER_STUB_FRAME:
2153 case Translation::COMPILED_STUB_FRAME: {
2154 FATAL("Unexpected frame start translation opcode");
2158 case Translation::REGISTER:
2159 case Translation::INT32_REGISTER:
2160 case Translation::UINT32_REGISTER:
2161 case Translation::DOUBLE_REGISTER:
2162 case Translation::STACK_SLOT:
2163 case Translation::INT32_STACK_SLOT:
2164 case Translation::UINT32_STACK_SLOT:
2165 case Translation::DOUBLE_STACK_SLOT:
2166 case Translation::LITERAL: {
2167 // The value is not part of any materialized object, so we can ignore it.
2168 iterator->Skip(Translation::NumberOfOperandsFor(opcode));
2172 case Translation::DUPLICATED_OBJECT: {
2173 int object_index = iterator->Next();
2174 if (trace_scope_ != NULL) {
2175 PrintF(trace_scope_->file(), " skipping object ");
2176 PrintF(trace_scope_->file(),
2177 " ; duplicate of object #%d\n", object_index);
2179 AddObjectDuplication(0, object_index);
2183 case Translation::ARGUMENTS_OBJECT:
2184 case Translation::CAPTURED_OBJECT: {
2185 int length = iterator->Next();
2186 bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2187 if (trace_scope_ != NULL) {
2188 PrintF(trace_scope_->file(), " skipping object ");
2189 PrintF(trace_scope_->file(),
2190 " ; object (length = %d, is_args = %d)\n", length, is_args);
2193 AddObjectStart(0, length, is_args);
2195 // We save the object values on the side and materialize the actual
2196 // object after the deoptimized frame is built.
2197 int object_index = deferred_objects_.length() - 1;
2198 for (int i = 0; i < length; i++) {
2199 DoTranslateObject(iterator, object_index, i);
2205 FATAL("Unexpected translation opcode");
2209 void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
2212 disasm::NameConverter converter;
2213 Address object_slot = deferred_objects_[object_index].slot_address();
2215 Translation::Opcode opcode =
2216 static_cast<Translation::Opcode>(iterator->Next());
2219 case Translation::BEGIN:
2220 case Translation::JS_FRAME:
2221 case Translation::ARGUMENTS_ADAPTOR_FRAME:
2222 case Translation::CONSTRUCT_STUB_FRAME:
2223 case Translation::GETTER_STUB_FRAME:
2224 case Translation::SETTER_STUB_FRAME:
2225 case Translation::COMPILED_STUB_FRAME:
2226 FATAL("Unexpected frame start translation opcode");
2229 case Translation::REGISTER: {
2230 int input_reg = iterator->Next();
2231 intptr_t input_value = input_->GetRegister(input_reg);
2232 if (trace_scope_ != NULL) {
2233 PrintF(trace_scope_->file(),
2234 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2235 reinterpret_cast<intptr_t>(object_slot),
2237 PrintF(trace_scope_->file(),
2238 "0x%08" V8PRIxPTR " ; %s ", input_value,
2239 converter.NameOfCPURegister(input_reg));
2240 reinterpret_cast<Object*>(input_value)->ShortPrint(
2241 trace_scope_->file());
2242 PrintF(trace_scope_->file(),
2245 AddObjectTaggedValue(input_value);
2249 case Translation::INT32_REGISTER: {
2250 int input_reg = iterator->Next();
2251 intptr_t value = input_->GetRegister(input_reg);
2252 bool is_smi = Smi::IsValid(value);
2253 if (trace_scope_ != NULL) {
2254 PrintF(trace_scope_->file(),
2255 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2256 reinterpret_cast<intptr_t>(object_slot),
2258 PrintF(trace_scope_->file(),
2259 "%" V8PRIdPTR " ; %s (%s)\n", value,
2260 converter.NameOfCPURegister(input_reg),
2261 TraceValueType(is_smi));
2264 intptr_t tagged_value =
2265 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2266 AddObjectTaggedValue(tagged_value);
2268 double double_value = static_cast<double>(static_cast<int32_t>(value));
2269 AddObjectDoubleValue(double_value);
2274 case Translation::UINT32_REGISTER: {
2275 int input_reg = iterator->Next();
2276 uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
2277 bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2278 if (trace_scope_ != NULL) {
2279 PrintF(trace_scope_->file(),
2280 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2281 reinterpret_cast<intptr_t>(object_slot),
2283 PrintF(trace_scope_->file(),
2284 "%" V8PRIdPTR " ; uint %s (%s)\n", value,
2285 converter.NameOfCPURegister(input_reg),
2286 TraceValueType(is_smi));
2289 intptr_t tagged_value =
2290 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2291 AddObjectTaggedValue(tagged_value);
2293 double double_value = static_cast<double>(static_cast<uint32_t>(value));
2294 AddObjectDoubleValue(double_value);
2299 case Translation::DOUBLE_REGISTER: {
2300 int input_reg = iterator->Next();
2301 double value = input_->GetDoubleRegister(input_reg);
2302 if (trace_scope_ != NULL) {
2303 PrintF(trace_scope_->file(),
2304 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2305 reinterpret_cast<intptr_t>(object_slot),
2307 PrintF(trace_scope_->file(),
2309 DoubleRegister::AllocationIndexToString(input_reg));
2311 AddObjectDoubleValue(value);
2315 case Translation::STACK_SLOT: {
2316 int input_slot_index = iterator->Next();
2317 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2318 intptr_t input_value = input_->GetFrameSlot(input_offset);
2319 if (trace_scope_ != NULL) {
2320 PrintF(trace_scope_->file(),
2321 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2322 reinterpret_cast<intptr_t>(object_slot),
2324 PrintF(trace_scope_->file(),
2325 "0x%08" V8PRIxPTR " ; [sp + %d] ", input_value, input_offset);
2326 reinterpret_cast<Object*>(input_value)->ShortPrint(
2327 trace_scope_->file());
2328 PrintF(trace_scope_->file(),
2331 AddObjectTaggedValue(input_value);
2335 case Translation::INT32_STACK_SLOT: {
2336 int input_slot_index = iterator->Next();
2337 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2338 intptr_t value = input_->GetFrameSlot(input_offset);
2339 bool is_smi = Smi::IsValid(value);
2340 if (trace_scope_ != NULL) {
2341 PrintF(trace_scope_->file(),
2342 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2343 reinterpret_cast<intptr_t>(object_slot),
2345 PrintF(trace_scope_->file(),
2346 "%" V8PRIdPTR " ; [sp + %d] (%s)\n",
2347 value, input_offset, TraceValueType(is_smi));
2350 intptr_t tagged_value =
2351 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2352 AddObjectTaggedValue(tagged_value);
2354 double double_value = static_cast<double>(static_cast<int32_t>(value));
2355 AddObjectDoubleValue(double_value);
2360 case Translation::UINT32_STACK_SLOT: {
2361 int input_slot_index = iterator->Next();
2362 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2364 static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
2365 bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2366 if (trace_scope_ != NULL) {
2367 PrintF(trace_scope_->file(),
2368 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2369 reinterpret_cast<intptr_t>(object_slot),
2371 PrintF(trace_scope_->file(),
2372 "%" V8PRIdPTR " ; [sp + %d] (uint %s)\n",
2373 value, input_offset, TraceValueType(is_smi));
2376 intptr_t tagged_value =
2377 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2378 AddObjectTaggedValue(tagged_value);
2380 double double_value = static_cast<double>(static_cast<uint32_t>(value));
2381 AddObjectDoubleValue(double_value);
2386 case Translation::DOUBLE_STACK_SLOT: {
2387 int input_slot_index = iterator->Next();
2388 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2389 double value = input_->GetDoubleFrameSlot(input_offset);
2390 if (trace_scope_ != NULL) {
2391 PrintF(trace_scope_->file(),
2392 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2393 reinterpret_cast<intptr_t>(object_slot),
2395 PrintF(trace_scope_->file(),
2396 "%e ; [sp + %d]\n", value, input_offset);
2398 AddObjectDoubleValue(value);
2402 case Translation::LITERAL: {
2403 Object* literal = ComputeLiteral(iterator->Next());
2404 if (trace_scope_ != NULL) {
2405 PrintF(trace_scope_->file(),
2406 " object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2407 reinterpret_cast<intptr_t>(object_slot),
2409 literal->ShortPrint(trace_scope_->file());
2410 PrintF(trace_scope_->file(),
2413 intptr_t value = reinterpret_cast<intptr_t>(literal);
2414 AddObjectTaggedValue(value);
2418 case Translation::DUPLICATED_OBJECT: {
2419 int object_index = iterator->Next();
2420 if (trace_scope_ != NULL) {
2421 PrintF(trace_scope_->file(),
2422 " nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2423 reinterpret_cast<intptr_t>(object_slot),
2425 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2426 PrintF(trace_scope_->file(),
2427 " ; duplicate of object #%d\n", object_index);
2429 // Use the materialization marker value as a sentinel and fill in
2430 // the object after the deoptimized frame is built.
2431 intptr_t value = reinterpret_cast<intptr_t>(
2432 isolate_->heap()->arguments_marker());
2433 AddObjectDuplication(0, object_index);
2434 AddObjectTaggedValue(value);
2438 case Translation::ARGUMENTS_OBJECT:
2439 case Translation::CAPTURED_OBJECT: {
2440 int length = iterator->Next();
2441 bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2442 if (trace_scope_ != NULL) {
2443 PrintF(trace_scope_->file(),
2444 " nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2445 reinterpret_cast<intptr_t>(object_slot),
2447 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2448 PrintF(trace_scope_->file(),
2449 " ; object (length = %d, is_args = %d)\n", length, is_args);
2451 // Use the materialization marker value as a sentinel and fill in
2452 // the object after the deoptimized frame is built.
2453 intptr_t value = reinterpret_cast<intptr_t>(
2454 isolate_->heap()->arguments_marker());
2455 AddObjectStart(0, length, is_args);
2456 AddObjectTaggedValue(value);
2457 // We save the object values on the side and materialize the actual
2458 // object after the deoptimized frame is built.
2459 int object_index = deferred_objects_.length() - 1;
2460 for (int i = 0; i < length; i++) {
2461 DoTranslateObject(iterator, object_index, i);
2467 FATAL("Unexpected translation opcode");
2471 void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
2473 unsigned output_offset) {
2474 disasm::NameConverter converter;
2475 // A GC-safe temporary placeholder that we can put in the output frame.
2476 const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0));
2478 Translation::Opcode opcode =
2479 static_cast<Translation::Opcode>(iterator->Next());
2482 case Translation::BEGIN:
2483 case Translation::JS_FRAME:
2484 case Translation::ARGUMENTS_ADAPTOR_FRAME:
2485 case Translation::CONSTRUCT_STUB_FRAME:
2486 case Translation::GETTER_STUB_FRAME:
2487 case Translation::SETTER_STUB_FRAME:
2488 case Translation::COMPILED_STUB_FRAME:
2489 FATAL("Unexpected translation opcode");
2492 case Translation::REGISTER: {
2493 int input_reg = iterator->Next();
2494 intptr_t input_value = input_->GetRegister(input_reg);
2495 if (trace_scope_ != NULL) {
2497 trace_scope_->file(),
2498 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ",
2499 output_[frame_index]->GetTop() + output_offset,
2502 converter.NameOfCPURegister(input_reg));
2503 reinterpret_cast<Object*>(input_value)->ShortPrint(
2504 trace_scope_->file());
2505 PrintF(trace_scope_->file(), "\n");
2507 output_[frame_index]->SetFrameSlot(output_offset, input_value);
2511 case Translation::INT32_REGISTER: {
2512 int input_reg = iterator->Next();
2513 intptr_t value = input_->GetRegister(input_reg);
2514 bool is_smi = Smi::IsValid(value);
2515 if (trace_scope_ != NULL) {
2517 trace_scope_->file(),
2518 " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n",
2519 output_[frame_index]->GetTop() + output_offset,
2522 converter.NameOfCPURegister(input_reg),
2523 TraceValueType(is_smi));
2526 intptr_t tagged_value =
2527 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2528 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2530 // We save the untagged value on the side and store a GC-safe
2531 // temporary placeholder in the frame.
2532 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2533 static_cast<double>(static_cast<int32_t>(value)));
2534 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2539 case Translation::UINT32_REGISTER: {
2540 int input_reg = iterator->Next();
2541 uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
2542 bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue);
2543 if (trace_scope_ != NULL) {
2545 trace_scope_->file(),
2546 " 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR
2547 " ; uint %s (%s)\n",
2548 output_[frame_index]->GetTop() + output_offset,
2551 converter.NameOfCPURegister(input_reg),
2552 TraceValueType(is_smi));
2555 intptr_t tagged_value =
2556 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2557 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2559 // We save the untagged value on the side and store a GC-safe
2560 // temporary placeholder in the frame.
2561 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2562 static_cast<double>(static_cast<uint32_t>(value)));
2563 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2568 case Translation::DOUBLE_REGISTER: {
2569 int input_reg = iterator->Next();
2570 double value = input_->GetDoubleRegister(input_reg);
2571 if (trace_scope_ != NULL) {
2572 PrintF(trace_scope_->file(),
2573 " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n",
2574 output_[frame_index]->GetTop() + output_offset,
2577 DoubleRegister::AllocationIndexToString(input_reg));
2579 // We save the untagged value on the side and store a GC-safe
2580 // temporary placeholder in the frame.
2581 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2582 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2586 case Translation::STACK_SLOT: {
2587 int input_slot_index = iterator->Next();
2588 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2589 intptr_t input_value = input_->GetFrameSlot(input_offset);
2590 if (trace_scope_ != NULL) {
2591 PrintF(trace_scope_->file(),
2592 " 0x%08" V8PRIxPTR ": ",
2593 output_[frame_index]->GetTop() + output_offset);
2594 PrintF(trace_scope_->file(),
2595 "[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
2599 reinterpret_cast<Object*>(input_value)->ShortPrint(
2600 trace_scope_->file());
2601 PrintF(trace_scope_->file(), "\n");
2603 output_[frame_index]->SetFrameSlot(output_offset, input_value);
2607 case Translation::INT32_STACK_SLOT: {
2608 int input_slot_index = iterator->Next();
2609 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2610 intptr_t value = input_->GetFrameSlot(input_offset);
2611 bool is_smi = Smi::IsValid(value);
2612 if (trace_scope_ != NULL) {
2613 PrintF(trace_scope_->file(),
2614 " 0x%08" V8PRIxPTR ": ",
2615 output_[frame_index]->GetTop() + output_offset);
2616 PrintF(trace_scope_->file(),
2617 "[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
2621 TraceValueType(is_smi));
2624 intptr_t tagged_value =
2625 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2626 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2628 // We save the untagged value on the side and store a GC-safe
2629 // temporary placeholder in the frame.
2630 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2631 static_cast<double>(static_cast<int32_t>(value)));
2632 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2637 case Translation::UINT32_STACK_SLOT: {
2638 int input_slot_index = iterator->Next();
2639 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2641 static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
2642 bool is_smi = value <= static_cast<uintptr_t>(Smi::kMaxValue);
2643 if (trace_scope_ != NULL) {
2644 PrintF(trace_scope_->file(),
2645 " 0x%08" V8PRIxPTR ": ",
2646 output_[frame_index]->GetTop() + output_offset);
2647 PrintF(trace_scope_->file(),
2648 "[top + %d] <- %" V8PRIuPTR " ; [sp + %d] (uint32 %s)\n",
2652 TraceValueType(is_smi));
2655 intptr_t tagged_value =
2656 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2657 output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2659 // We save the untagged value on the side and store a GC-safe
2660 // temporary placeholder in the frame.
2661 AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2662 static_cast<double>(static_cast<uint32_t>(value)));
2663 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2668 case Translation::DOUBLE_STACK_SLOT: {
2669 int input_slot_index = iterator->Next();
2670 unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2671 double value = input_->GetDoubleFrameSlot(input_offset);
2672 if (trace_scope_ != NULL) {
2673 PrintF(trace_scope_->file(),
2674 " 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
2675 output_[frame_index]->GetTop() + output_offset,
2680 // We save the untagged value on the side and store a GC-safe
2681 // temporary placeholder in the frame.
2682 AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2683 output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2687 case Translation::LITERAL: {
2688 Object* literal = ComputeLiteral(iterator->Next());
2689 if (trace_scope_ != NULL) {
2690 PrintF(trace_scope_->file(),
2691 " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2692 output_[frame_index]->GetTop() + output_offset,
2694 literal->ShortPrint(trace_scope_->file());
2695 PrintF(trace_scope_->file(), " ; literal\n");
2697 intptr_t value = reinterpret_cast<intptr_t>(literal);
2698 output_[frame_index]->SetFrameSlot(output_offset, value);
2702 case Translation::DUPLICATED_OBJECT: {
2703 int object_index = iterator->Next();
2704 if (trace_scope_ != NULL) {
2705 PrintF(trace_scope_->file(),
2706 " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2707 output_[frame_index]->GetTop() + output_offset,
2709 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2710 PrintF(trace_scope_->file(),
2711 " ; duplicate of object #%d\n", object_index);
2713 // Use the materialization marker value as a sentinel and fill in
2714 // the object after the deoptimized frame is built.
2715 intptr_t value = reinterpret_cast<intptr_t>(
2716 isolate_->heap()->arguments_marker());
2717 AddObjectDuplication(output_[frame_index]->GetTop() + output_offset,
2719 output_[frame_index]->SetFrameSlot(output_offset, value);
2723 case Translation::ARGUMENTS_OBJECT:
2724 case Translation::CAPTURED_OBJECT: {
2725 int length = iterator->Next();
2726 bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2727 if (trace_scope_ != NULL) {
2728 PrintF(trace_scope_->file(),
2729 " 0x%08" V8PRIxPTR ": [top + %d] <- ",
2730 output_[frame_index]->GetTop() + output_offset,
2732 isolate_->heap()->arguments_marker()->ShortPrint(trace_scope_->file());
2733 PrintF(trace_scope_->file(),
2734 " ; object (length = %d, is_args = %d)\n", length, is_args);
2736 // Use the materialization marker value as a sentinel and fill in
2737 // the object after the deoptimized frame is built.
2738 intptr_t value = reinterpret_cast<intptr_t>(
2739 isolate_->heap()->arguments_marker());
2740 AddObjectStart(output_[frame_index]->GetTop() + output_offset,
2742 output_[frame_index]->SetFrameSlot(output_offset, value);
2743 // We save the object values on the side and materialize the actual
2744 // object after the deoptimized frame is built.
2745 int object_index = deferred_objects_.length() - 1;
2746 for (int i = 0; i < length; i++) {
2747 DoTranslateObject(iterator, object_index, i);
2755 unsigned Deoptimizer::ComputeInputFrameSize() const {
2756 unsigned fixed_size = ComputeFixedSize(function_);
2757 // The fp-to-sp delta already takes the context, constant pool pointer and the
2758 // function into account so we have to avoid double counting them.
2759 unsigned result = fixed_size + fp_to_sp_delta_ -
2760 StandardFrameConstants::kFixedFrameSizeFromFp;
2761 if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
2762 unsigned stack_slots = compiled_code_->stack_slots();
2763 unsigned outgoing_size = ComputeOutgoingArgumentSize();
2764 CHECK(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size);
2770 unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const {
2771 // The fixed part of the frame consists of the return address, frame
2772 // pointer, function, context, and all the incoming arguments.
2773 return ComputeIncomingArgumentSize(function) +
2774 StandardFrameConstants::kFixedFrameSize;
2778 unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const {
2779 // The incoming arguments is the values for formal parameters and
2780 // the receiver. Every slot contains a pointer.
2781 if (function->IsSmi()) {
2782 CHECK_EQ(Smi::cast(function), Smi::FromInt(StackFrame::STUB));
2785 unsigned arguments = function->shared()->formal_parameter_count() + 1;
2786 return arguments * kPointerSize;
2790 unsigned Deoptimizer::ComputeOutgoingArgumentSize() const {
2791 DeoptimizationInputData* data = DeoptimizationInputData::cast(
2792 compiled_code_->deoptimization_data());
2793 unsigned height = data->ArgumentsStackHeight(bailout_id_)->value();
2794 return height * kPointerSize;
2798 Object* Deoptimizer::ComputeLiteral(int index) const {
2799 DeoptimizationInputData* data = DeoptimizationInputData::cast(
2800 compiled_code_->deoptimization_data());
2801 FixedArray* literals = data->LiteralArray();
2802 return literals->get(index);
2806 void Deoptimizer::AddObjectStart(intptr_t slot, int length, bool is_args) {
2807 ObjectMaterializationDescriptor object_desc(
2808 reinterpret_cast<Address>(slot), jsframe_count_, length, -1, is_args);
2809 deferred_objects_.Add(object_desc);
2813 void Deoptimizer::AddObjectDuplication(intptr_t slot, int object_index) {
2814 ObjectMaterializationDescriptor object_desc(
2815 reinterpret_cast<Address>(slot), jsframe_count_, -1, object_index, false);
2816 deferred_objects_.Add(object_desc);
2820 void Deoptimizer::AddObjectTaggedValue(intptr_t value) {
2821 deferred_objects_tagged_values_.Add(reinterpret_cast<Object*>(value));
2825 void Deoptimizer::AddObjectDoubleValue(double value) {
2826 deferred_objects_tagged_values_.Add(isolate()->heap()->the_hole_value());
2827 HeapNumberMaterializationDescriptor<int> value_desc(
2828 deferred_objects_tagged_values_.length() - 1, value);
2829 deferred_objects_double_values_.Add(value_desc);
2833 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) {
2834 HeapNumberMaterializationDescriptor<Address> value_desc(
2835 reinterpret_cast<Address>(slot_address), value);
2836 deferred_heap_numbers_.Add(value_desc);
2840 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
2843 // We cannot run this if the serializer is enabled because this will
2844 // cause us to emit relocation information for the external
2845 // references. This is fine because the deoptimizer's code section
2846 // isn't meant to be serialized at all.
2847 CHECK(type == EAGER || type == SOFT || type == LAZY);
2848 DeoptimizerData* data = isolate->deoptimizer_data();
2849 int entry_count = data->deopt_entry_code_entries_[type];
2850 if (max_entry_id < entry_count) return;
2851 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
2852 while (max_entry_id >= entry_count) entry_count *= 2;
2853 CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries);
2855 MacroAssembler masm(isolate, NULL, 16 * KB);
2856 masm.set_emit_debug_code(false);
2857 GenerateDeoptimizationEntries(&masm, entry_count, type);
2859 masm.GetCode(&desc);
2860 DCHECK(!RelocInfo::RequiresRelocation(desc));
2862 MemoryChunk* chunk = data->deopt_entry_code_[type];
2863 CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
2865 chunk->CommitArea(desc.instr_size);
2866 CopyBytes(chunk->area_start(), desc.buffer,
2867 static_cast<size_t>(desc.instr_size));
2868 CpuFeatures::FlushICache(chunk->area_start(), desc.instr_size);
2870 data->deopt_entry_code_entries_[type] = entry_count;
2874 FrameDescription::FrameDescription(uint32_t frame_size,
2875 JSFunction* function)
2876 : frame_size_(frame_size),
2877 function_(function),
2881 context_(kZapUint32),
2882 constant_pool_(kZapUint32) {
2883 // Zap all the registers.
2884 for (int r = 0; r < Register::kNumRegisters; r++) {
2885 // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
2886 // isn't used before the next safepoint, the GC will try to scan it as a
2887 // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
2888 SetRegister(r, kZapUint32);
2891 // Zap all the slots.
2892 for (unsigned o = 0; o < frame_size; o += kPointerSize) {
2893 SetFrameSlot(o, kZapUint32);
2898 int FrameDescription::ComputeFixedSize() {
2899 return StandardFrameConstants::kFixedFrameSize +
2900 (ComputeParametersCount() + 1) * kPointerSize;
2904 unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) {
2905 if (slot_index >= 0) {
2906 // Local or spill slots. Skip the fixed part of the frame
2907 // including all arguments.
2908 unsigned base = GetFrameSize() - ComputeFixedSize();
2909 return base - ((slot_index + 1) * kPointerSize);
2911 // Incoming parameter.
2912 int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
2913 unsigned base = GetFrameSize() - arg_size;
2914 return base - ((slot_index + 1) * kPointerSize);
2919 int FrameDescription::ComputeParametersCount() {
2921 case StackFrame::JAVA_SCRIPT:
2922 return function_->shared()->formal_parameter_count();
2923 case StackFrame::ARGUMENTS_ADAPTOR: {
2924 // Last slot contains number of incomming arguments as a smi.
2925 // Can't use GetExpression(0) because it would cause infinite recursion.
2926 return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
2928 case StackFrame::STUB:
2929 return -1; // Minus receiver.
2931 FATAL("Unexpected stack frame type");
2937 Object* FrameDescription::GetParameter(int index) {
2939 CHECK_LT(index, ComputeParametersCount());
2940 // The slot indexes for incoming arguments are negative.
2941 unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
2942 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2946 unsigned FrameDescription::GetExpressionCount() {
2947 CHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
2948 unsigned size = GetFrameSize() - ComputeFixedSize();
2949 return size / kPointerSize;
2953 Object* FrameDescription::GetExpression(int index) {
2954 DCHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
2955 unsigned offset = GetOffsetFromSlotIndex(index);
2956 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2960 void TranslationBuffer::Add(int32_t value, Zone* zone) {
2961 // Encode the sign bit in the least significant bit.
2962 bool is_negative = (value < 0);
2963 uint32_t bits = ((is_negative ? -value : value) << 1) |
2964 static_cast<int32_t>(is_negative);
2965 // Encode the individual bytes using the least significant bit of
2966 // each byte to indicate whether or not more bytes follow.
2968 uint32_t next = bits >> 7;
2969 contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
2971 } while (bits != 0);
2975 int32_t TranslationIterator::Next() {
2976 // Run through the bytes until we reach one with a least significant
2977 // bit of zero (marks the end).
2979 for (int i = 0; true; i += 7) {
2981 uint8_t next = buffer_->get(index_++);
2982 bits |= (next >> 1) << i;
2983 if ((next & 1) == 0) break;
2985 // The bits encode the sign in the least significant bit.
2986 bool is_negative = (bits & 1) == 1;
2987 int32_t result = bits >> 1;
2988 return is_negative ? -result : result;
2992 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
2993 int length = contents_.length();
2994 Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
2995 MemCopy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
3000 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
3001 buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
3002 buffer_->Add(literal_id, zone());
3003 buffer_->Add(height, zone());
3007 void Translation::BeginGetterStubFrame(int literal_id) {
3008 buffer_->Add(GETTER_STUB_FRAME, zone());
3009 buffer_->Add(literal_id, zone());
3013 void Translation::BeginSetterStubFrame(int literal_id) {
3014 buffer_->Add(SETTER_STUB_FRAME, zone());
3015 buffer_->Add(literal_id, zone());
3019 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
3020 buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
3021 buffer_->Add(literal_id, zone());
3022 buffer_->Add(height, zone());
3026 void Translation::BeginJSFrame(BailoutId node_id,
3029 buffer_->Add(JS_FRAME, zone());
3030 buffer_->Add(node_id.ToInt(), zone());
3031 buffer_->Add(literal_id, zone());
3032 buffer_->Add(height, zone());
3036 void Translation::BeginCompiledStubFrame() {
3037 buffer_->Add(COMPILED_STUB_FRAME, zone());
3041 void Translation::BeginArgumentsObject(int args_length) {
3042 buffer_->Add(ARGUMENTS_OBJECT, zone());
3043 buffer_->Add(args_length, zone());
3047 void Translation::BeginCapturedObject(int length) {
3048 buffer_->Add(CAPTURED_OBJECT, zone());
3049 buffer_->Add(length, zone());
3053 void Translation::DuplicateObject(int object_index) {
3054 buffer_->Add(DUPLICATED_OBJECT, zone());
3055 buffer_->Add(object_index, zone());
3059 void Translation::StoreRegister(Register reg) {
3060 buffer_->Add(REGISTER, zone());
3061 buffer_->Add(reg.code(), zone());
3065 void Translation::StoreInt32Register(Register reg) {
3066 buffer_->Add(INT32_REGISTER, zone());
3067 buffer_->Add(reg.code(), zone());
3071 void Translation::StoreUint32Register(Register reg) {
3072 buffer_->Add(UINT32_REGISTER, zone());
3073 buffer_->Add(reg.code(), zone());
3077 void Translation::StoreDoubleRegister(DoubleRegister reg) {
3078 buffer_->Add(DOUBLE_REGISTER, zone());
3079 buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
3083 void Translation::StoreStackSlot(int index) {
3084 buffer_->Add(STACK_SLOT, zone());
3085 buffer_->Add(index, zone());
3089 void Translation::StoreInt32StackSlot(int index) {
3090 buffer_->Add(INT32_STACK_SLOT, zone());
3091 buffer_->Add(index, zone());
3095 void Translation::StoreUint32StackSlot(int index) {
3096 buffer_->Add(UINT32_STACK_SLOT, zone());
3097 buffer_->Add(index, zone());
3101 void Translation::StoreDoubleStackSlot(int index) {
3102 buffer_->Add(DOUBLE_STACK_SLOT, zone());
3103 buffer_->Add(index, zone());
3107 void Translation::StoreLiteral(int literal_id) {
3108 buffer_->Add(LITERAL, zone());
3109 buffer_->Add(literal_id, zone());
3113 void Translation::StoreArgumentsObject(bool args_known,
3116 buffer_->Add(ARGUMENTS_OBJECT, zone());
3117 buffer_->Add(args_known, zone());
3118 buffer_->Add(args_index, zone());
3119 buffer_->Add(args_length, zone());
3123 int Translation::NumberOfOperandsFor(Opcode opcode) {
3125 case GETTER_STUB_FRAME:
3126 case SETTER_STUB_FRAME:
3127 case DUPLICATED_OBJECT:
3128 case ARGUMENTS_OBJECT:
3129 case CAPTURED_OBJECT:
3131 case INT32_REGISTER:
3132 case UINT32_REGISTER:
3133 case DOUBLE_REGISTER:
3135 case INT32_STACK_SLOT:
3136 case UINT32_STACK_SLOT:
3137 case DOUBLE_STACK_SLOT:
3139 case COMPILED_STUB_FRAME:
3142 case ARGUMENTS_ADAPTOR_FRAME:
3143 case CONSTRUCT_STUB_FRAME:
3148 FATAL("Unexpected translation type");
3153 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
3155 const char* Translation::StringFor(Opcode opcode) {
3156 #define TRANSLATION_OPCODE_CASE(item) case item: return #item;
3158 TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
3160 #undef TRANSLATION_OPCODE_CASE
3168 // We can't intermix stack decoding and allocations because
3169 // deoptimization infrastracture is not GC safe.
3170 // Thus we build a temporary structure in malloced space.
3171 SlotRef SlotRefValueBuilder::ComputeSlotForNextArgument(
3172 Translation::Opcode opcode,
3173 TranslationIterator* iterator,
3174 DeoptimizationInputData* data,
3175 JavaScriptFrame* frame) {
3177 case Translation::BEGIN:
3178 case Translation::JS_FRAME:
3179 case Translation::ARGUMENTS_ADAPTOR_FRAME:
3180 case Translation::CONSTRUCT_STUB_FRAME:
3181 case Translation::GETTER_STUB_FRAME:
3182 case Translation::SETTER_STUB_FRAME:
3183 // Peeled off before getting here.
3186 case Translation::DUPLICATED_OBJECT: {
3187 return SlotRef::NewDuplicateObject(iterator->Next());
3190 case Translation::ARGUMENTS_OBJECT:
3191 return SlotRef::NewArgumentsObject(iterator->Next());
3193 case Translation::CAPTURED_OBJECT: {
3194 return SlotRef::NewDeferredObject(iterator->Next());
3197 case Translation::REGISTER:
3198 case Translation::INT32_REGISTER:
3199 case Translation::UINT32_REGISTER:
3200 case Translation::DOUBLE_REGISTER:
3201 // We are at safepoint which corresponds to call. All registers are
3202 // saved by caller so there would be no live registers at this
3203 // point. Thus these translation commands should not be used.
3206 case Translation::STACK_SLOT: {
3207 int slot_index = iterator->Next();
3208 Address slot_addr = SlotAddress(frame, slot_index);
3209 return SlotRef(slot_addr, SlotRef::TAGGED);
3212 case Translation::INT32_STACK_SLOT: {
3213 int slot_index = iterator->Next();
3214 Address slot_addr = SlotAddress(frame, slot_index);
3215 return SlotRef(slot_addr, SlotRef::INT32);
3218 case Translation::UINT32_STACK_SLOT: {
3219 int slot_index = iterator->Next();
3220 Address slot_addr = SlotAddress(frame, slot_index);
3221 return SlotRef(slot_addr, SlotRef::UINT32);
3224 case Translation::DOUBLE_STACK_SLOT: {
3225 int slot_index = iterator->Next();
3226 Address slot_addr = SlotAddress(frame, slot_index);
3227 return SlotRef(slot_addr, SlotRef::DOUBLE);
3230 case Translation::LITERAL: {
3231 int literal_index = iterator->Next();
3232 return SlotRef(data->GetIsolate(),
3233 data->LiteralArray()->get(literal_index));
3236 case Translation::COMPILED_STUB_FRAME:
3241 FATAL("We should never get here - unexpected deopt info.");
3246 SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame,
3247 int inlined_jsframe_index,
3248 int formal_parameter_count)
3249 : current_slot_(0), args_length_(-1), first_slot_index_(-1) {
3250 DisallowHeapAllocation no_gc;
3252 int deopt_index = Safepoint::kNoDeoptimizationIndex;
3253 DeoptimizationInputData* data =
3254 static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
3255 TranslationIterator it(data->TranslationByteArray(),
3256 data->TranslationIndex(deopt_index)->value());
3257 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
3258 CHECK_EQ(opcode, Translation::BEGIN);
3259 it.Next(); // Drop frame count.
3261 stack_frame_id_ = frame->fp();
3263 int jsframe_count = it.Next();
3264 CHECK_GT(jsframe_count, inlined_jsframe_index);
3265 int jsframes_to_skip = inlined_jsframe_index;
3266 int number_of_slots = -1; // Number of slots inside our frame (yet unknown)
3267 bool should_deopt = false;
3268 while (number_of_slots != 0) {
3269 opcode = static_cast<Translation::Opcode>(it.Next());
3270 bool processed = false;
3271 if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
3272 if (jsframes_to_skip == 0) {
3273 CHECK_EQ(Translation::NumberOfOperandsFor(opcode), 2);
3275 it.Skip(1); // literal id
3276 int height = it.Next();
3278 // Skip the translation command for the receiver.
3279 it.Skip(Translation::NumberOfOperandsFor(
3280 static_cast<Translation::Opcode>(it.Next())));
3282 // We reached the arguments adaptor frame corresponding to the
3283 // inlined function in question. Number of arguments is height - 1.
3284 first_slot_index_ = slot_refs_.length();
3285 args_length_ = height - 1;
3286 number_of_slots = height - 1;
3289 } else if (opcode == Translation::JS_FRAME) {
3290 if (jsframes_to_skip == 0) {
3291 // Skip over operands to advance to the next opcode.
3292 it.Skip(Translation::NumberOfOperandsFor(opcode));
3294 // Skip the translation command for the receiver.
3295 it.Skip(Translation::NumberOfOperandsFor(
3296 static_cast<Translation::Opcode>(it.Next())));
3298 // We reached the frame corresponding to the inlined function
3299 // in question. Process the translation commands for the
3300 // arguments. Number of arguments is equal to the number of
3301 // format parameter count.
3302 first_slot_index_ = slot_refs_.length();
3303 args_length_ = formal_parameter_count;
3304 number_of_slots = formal_parameter_count;
3308 } else if (opcode != Translation::BEGIN &&
3309 opcode != Translation::CONSTRUCT_STUB_FRAME &&
3310 opcode != Translation::GETTER_STUB_FRAME &&
3311 opcode != Translation::SETTER_STUB_FRAME &&
3312 opcode != Translation::COMPILED_STUB_FRAME) {
3313 slot_refs_.Add(ComputeSlotForNextArgument(opcode, &it, data, frame));
3315 if (first_slot_index_ >= 0) {
3316 // We have found the beginning of our frame -> make sure we count
3317 // the nested slots of captured objects
3319 SlotRef& slot = slot_refs_.last();
3320 CHECK_NE(slot.Representation(), SlotRef::ARGUMENTS_OBJECT);
3321 number_of_slots += slot.GetChildrenCount();
3322 if (slot.Representation() == SlotRef::DEFERRED_OBJECT ||
3323 slot.Representation() == SlotRef::DUPLICATE_OBJECT) {
3324 should_deopt = true;
3331 // Skip over operands to advance to the next opcode.
3332 it.Skip(Translation::NumberOfOperandsFor(opcode));
3336 List<JSFunction*> functions(2);
3337 frame->GetFunctions(&functions);
3338 Deoptimizer::DeoptimizeFunction(functions[0]);
3343 Handle<Object> SlotRef::GetValue(Isolate* isolate) {
3344 switch (representation_) {
3346 return Handle<Object>(Memory::Object_at(addr_), isolate);
3349 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
3350 int value = Memory::int32_at(addr_ + kIntSize);
3352 int value = Memory::int32_at(addr_);
3354 if (Smi::IsValid(value)) {
3355 return Handle<Object>(Smi::FromInt(value), isolate);
3357 return isolate->factory()->NewNumberFromInt(value);
3362 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
3363 uint32_t value = Memory::uint32_at(addr_ + kIntSize);
3365 uint32_t value = Memory::uint32_at(addr_);
3367 if (value <= static_cast<uint32_t>(Smi::kMaxValue)) {
3368 return Handle<Object>(Smi::FromInt(static_cast<int>(value)), isolate);
3370 return isolate->factory()->NewNumber(static_cast<double>(value));
3375 double value = read_double_value(addr_);
3376 return isolate->factory()->NewNumber(value);
3383 FATAL("We should never get here - unexpected deopt info.");
3384 return Handle<Object>::null();
3389 void SlotRefValueBuilder::Prepare(Isolate* isolate) {
3390 MaterializedObjectStore* materialized_store =
3391 isolate->materialized_object_store();
3392 previously_materialized_objects_ = materialized_store->Get(stack_frame_id_);
3393 prev_materialized_count_ = previously_materialized_objects_.is_null()
3394 ? 0 : previously_materialized_objects_->length();
3396 // Skip any materialized objects of the inlined "parent" frames.
3397 // (Note that we still need to materialize them because they might be
3398 // referred to as duplicated objects.)
3399 while (current_slot_ < first_slot_index_) {
3400 GetNext(isolate, 0);
3402 CHECK_EQ(current_slot_, first_slot_index_);
3406 Handle<Object> SlotRefValueBuilder::GetPreviouslyMaterialized(
3407 Isolate* isolate, int length) {
3408 int object_index = materialized_objects_.length();
3409 Handle<Object> return_value = Handle<Object>(
3410 previously_materialized_objects_->get(object_index), isolate);
3411 materialized_objects_.Add(return_value);
3413 // Now need to skip all the nested objects (and possibly read them from
3414 // the materialization store, too).
3415 for (int i = 0; i < length; i++) {
3416 SlotRef& slot = slot_refs_[current_slot_];
3419 // We need to read all the nested objects - add them to the
3420 // number of objects we need to process.
3421 length += slot.GetChildrenCount();
3423 // Put the nested deferred/duplicate objects into our materialization
3425 if (slot.Representation() == SlotRef::DEFERRED_OBJECT ||
3426 slot.Representation() == SlotRef::DUPLICATE_OBJECT) {
3427 int nested_object_index = materialized_objects_.length();
3428 Handle<Object> nested_object = Handle<Object>(
3429 previously_materialized_objects_->get(nested_object_index),
3431 materialized_objects_.Add(nested_object);
3435 return return_value;
3439 Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) {
3440 SlotRef& slot = slot_refs_[current_slot_];
3442 switch (slot.Representation()) {
3443 case SlotRef::TAGGED:
3444 case SlotRef::INT32:
3445 case SlotRef::UINT32:
3446 case SlotRef::DOUBLE:
3447 case SlotRef::LITERAL: {
3448 return slot.GetValue(isolate);
3450 case SlotRef::ARGUMENTS_OBJECT: {
3451 // We should never need to materialize an arguments object,
3452 // but we still need to put something into the array
3453 // so that the indexing is consistent.
3454 materialized_objects_.Add(isolate->factory()->undefined_value());
3455 int length = slot.GetChildrenCount();
3456 for (int i = 0; i < length; ++i) {
3457 // We don't need the argument, just ignore it
3458 GetNext(isolate, lvl + 1);
3460 return isolate->factory()->undefined_value();
3462 case SlotRef::DEFERRED_OBJECT: {
3463 int length = slot.GetChildrenCount();
3464 CHECK(slot_refs_[current_slot_].Representation() == SlotRef::LITERAL ||
3465 slot_refs_[current_slot_].Representation() == SlotRef::TAGGED);
3467 int object_index = materialized_objects_.length();
3468 if (object_index < prev_materialized_count_) {
3469 return GetPreviouslyMaterialized(isolate, length);
3472 Handle<Object> map_object = slot_refs_[current_slot_].GetValue(isolate);
3473 Handle<Map> map = Map::GeneralizeAllFieldRepresentations(
3474 Handle<Map>::cast(map_object));
3476 // TODO(jarin) this should be unified with the code in
3477 // Deoptimizer::MaterializeNextHeapObject()
3478 switch (map->instance_type()) {
3479 case MUTABLE_HEAP_NUMBER_TYPE:
3480 case HEAP_NUMBER_TYPE: {
3481 // Reuse the HeapNumber value directly as it is already properly
3482 // tagged and skip materializing the HeapNumber explicitly.
3483 Handle<Object> object = GetNext(isolate, lvl + 1);
3484 materialized_objects_.Add(object);
3485 // On 32-bit architectures, there is an extra slot there because
3486 // the escape analysis calculates the number of slots as
3487 // object-size/pointer-size. To account for this, we read out
3489 for (int i = 0; i < length - 2; i++) {
3490 GetNext(isolate, lvl + 1);
3494 case JS_OBJECT_TYPE: {
3495 Handle<JSObject> object =
3496 isolate->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
3497 materialized_objects_.Add(object);
3498 Handle<Object> properties = GetNext(isolate, lvl + 1);
3499 Handle<Object> elements = GetNext(isolate, lvl + 1);
3500 object->set_properties(FixedArray::cast(*properties));
3501 object->set_elements(FixedArrayBase::cast(*elements));
3502 for (int i = 0; i < length - 3; ++i) {
3503 Handle<Object> value = GetNext(isolate, lvl + 1);
3504 FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
3505 object->FastPropertyAtPut(index, *value);
3509 case JS_ARRAY_TYPE: {
3510 Handle<JSArray> object =
3511 isolate->factory()->NewJSArray(0, map->elements_kind());
3512 materialized_objects_.Add(object);
3513 Handle<Object> properties = GetNext(isolate, lvl + 1);
3514 Handle<Object> elements = GetNext(isolate, lvl + 1);
3515 Handle<Object> length = GetNext(isolate, lvl + 1);
3516 object->set_properties(FixedArray::cast(*properties));
3517 object->set_elements(FixedArrayBase::cast(*elements));
3518 object->set_length(*length);
3523 "[couldn't handle instance type %d]\n", map->instance_type());
3531 case SlotRef::DUPLICATE_OBJECT: {
3532 int object_index = slot.DuplicateObjectId();
3533 Handle<Object> object = materialized_objects_[object_index];
3534 materialized_objects_.Add(object);
3542 FATAL("We should never get here - unexpected deopt slot kind.");
3543 return Handle<Object>::null();
3547 void SlotRefValueBuilder::Finish(Isolate* isolate) {
3548 // We should have processed all the slots
3549 CHECK_EQ(slot_refs_.length(), current_slot_);
3551 if (materialized_objects_.length() > prev_materialized_count_) {
3552 // We have materialized some new objects, so we have to store them
3553 // to prevent duplicate materialization
3554 Handle<FixedArray> array = isolate->factory()->NewFixedArray(
3555 materialized_objects_.length());
3556 for (int i = 0; i < materialized_objects_.length(); i++) {
3557 array->set(i, *(materialized_objects_.at(i)));
3559 isolate->materialized_object_store()->Set(stack_frame_id_, array);
3564 Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
3565 int index = StackIdToIndex(fp);
3567 return Handle<FixedArray>::null();
3569 Handle<FixedArray> array = GetStackEntries();
3570 CHECK_GT(array->length(), index);
3571 return Handle<FixedArray>::cast(Handle<Object>(array->get(index),
3576 void MaterializedObjectStore::Set(Address fp,
3577 Handle<FixedArray> materialized_objects) {
3578 int index = StackIdToIndex(fp);
3580 index = frame_fps_.length();
3584 Handle<FixedArray> array = EnsureStackEntries(index + 1);
3585 array->set(index, *materialized_objects);
3589 void MaterializedObjectStore::Remove(Address fp) {
3590 int index = StackIdToIndex(fp);
3593 frame_fps_.Remove(index);
3594 Handle<FixedArray> array = GetStackEntries();
3595 CHECK_LT(index, array->length());
3596 for (int i = index; i < frame_fps_.length(); i++) {
3597 array->set(i, array->get(i + 1));
3599 array->set(frame_fps_.length(), isolate()->heap()->undefined_value());
3603 int MaterializedObjectStore::StackIdToIndex(Address fp) {
3604 for (int i = 0; i < frame_fps_.length(); i++) {
3605 if (frame_fps_[i] == fp) {
3613 Handle<FixedArray> MaterializedObjectStore::GetStackEntries() {
3614 return Handle<FixedArray>(isolate()->heap()->materialized_objects());
3618 Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
3619 Handle<FixedArray> array = GetStackEntries();
3620 if (array->length() >= length) {
3624 int new_length = length > 10 ? length : 10;
3625 if (new_length < 2 * array->length()) {
3626 new_length = 2 * array->length();
3629 Handle<FixedArray> new_array =
3630 isolate()->factory()->NewFixedArray(new_length, TENURED);
3631 for (int i = 0; i < array->length(); i++) {
3632 new_array->set(i, array->get(i));
3634 for (int i = array->length(); i < length; i++) {
3635 new_array->set(i, isolate()->heap()->undefined_value());
3637 isolate()->heap()->public_set_materialized_objects(*new_array);
3642 DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
3644 bool has_arguments_adaptor,
3645 bool has_construct_stub) {
3646 FrameDescription* output_frame = deoptimizer->output_[frame_index];
3647 function_ = output_frame->GetFunction();
3648 has_construct_stub_ = has_construct_stub;
3649 expression_count_ = output_frame->GetExpressionCount();
3650 expression_stack_ = new Object*[expression_count_];
3651 // Get the source position using the unoptimized code.
3652 Address pc = reinterpret_cast<Address>(output_frame->GetPc());
3653 Code* code = Code::cast(deoptimizer->isolate()->FindCodeObject(pc));
3654 source_position_ = code->SourcePosition(pc);
3656 for (int i = 0; i < expression_count_; i++) {
3657 SetExpression(i, output_frame->GetExpression(i));
3660 if (has_arguments_adaptor) {
3661 output_frame = deoptimizer->output_[frame_index - 1];
3662 CHECK_EQ(output_frame->GetFrameType(), StackFrame::ARGUMENTS_ADAPTOR);
3665 parameters_count_ = output_frame->ComputeParametersCount();
3666 parameters_ = new Object*[parameters_count_];
3667 for (int i = 0; i < parameters_count_; i++) {
3668 SetParameter(i, output_frame->GetParameter(i));
3673 DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
3674 delete[] expression_stack_;
3675 delete[] parameters_;
3679 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
3680 v->VisitPointer(BitCast<Object**>(&function_));
3681 v->VisitPointers(parameters_, parameters_ + parameters_count_);
3682 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
3685 } } // namespace v8::internal