1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
10 #include "allocation.h"
11 #include "macro-assembler.h"
19 static inline double read_double_value(Address p) {
20 #ifdef V8_HOST_CAN_READ_UNALIGNED
21 return Memory::double_at(p);
22 #else // V8_HOST_CAN_READ_UNALIGNED
23 // Prevent gcc from using load-double (mips ldc1) on (possibly)
24 // non-64-bit aligned address.
29 c.u[0] = *reinterpret_cast<uint32_t*>(p);
30 c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
32 #endif // V8_HOST_CAN_READ_UNALIGNED
35 static inline simd128_value_t read_simd128_value(Address p) {
36 return *reinterpret_cast<simd128_value_t*>(p);
39 class FrameDescription;
40 class TranslationIterator;
41 class DeoptimizedFrameInfo;
44 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
46 HeapNumberMaterializationDescriptor(T destination, double value)
47 : destination_(destination), value_(value) { }
49 T destination() const { return destination_; }
50 double value() const { return value_; }
59 class SIMD128MaterializationDescriptor BASE_EMBEDDED {
61 SIMD128MaterializationDescriptor(T destination, simd128_value_t value)
62 : destination_(destination), value_(value) { }
64 T destination() const { return destination_; }
65 simd128_value_t value() const { return value_; }
69 simd128_value_t value_;
73 class ObjectMaterializationDescriptor BASE_EMBEDDED {
75 ObjectMaterializationDescriptor(
76 Address slot_address, int frame, int length, int duplicate, bool is_args)
77 : slot_address_(slot_address),
78 jsframe_index_(frame),
79 object_length_(length),
80 duplicate_object_(duplicate),
81 is_arguments_(is_args) { }
83 Address slot_address() const { return slot_address_; }
84 int jsframe_index() const { return jsframe_index_; }
85 int object_length() const { return object_length_; }
86 int duplicate_object() const { return duplicate_object_; }
87 bool is_arguments() const { return is_arguments_; }
89 // Only used for allocated receivers in DoComputeConstructStubFrame.
90 void patch_slot_address(intptr_t slot) {
91 slot_address_ = reinterpret_cast<Address>(slot);
95 Address slot_address_;
98 int duplicate_object_;
103 class OptimizedFunctionVisitor BASE_EMBEDDED {
105 virtual ~OptimizedFunctionVisitor() {}
107 // Function which is called before iteration of any optimized functions
108 // from given native context.
109 virtual void EnterContext(Context* context) = 0;
111 virtual void VisitFunction(JSFunction* function) = 0;
113 // Function which is called after iteration of all optimized functions
114 // from given native context.
115 virtual void LeaveContext(Context* context) = 0;
119 class Deoptimizer : public Malloced {
125 // This last bailout type is not really a bailout, but used by the
126 // debugger to deoptimize stack frames to allow inspection.
130 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
132 struct JumpTableEntry : public ZoneObject {
133 inline JumpTableEntry(Address entry,
134 Deoptimizer::BailoutType type,
139 needs_frame(frame) { }
142 Deoptimizer::BailoutType bailout_type;
146 static bool TraceEnabledFor(BailoutType deopt_type,
147 StackFrame::Type frame_type);
148 static const char* MessageFor(BailoutType type);
150 int output_count() const { return output_count_; }
152 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
153 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
154 BailoutType bailout_type() const { return bailout_type_; }
156 // Number of created JS frames. Not all created frames are necessarily JS.
157 int jsframe_count() const { return jsframe_count_; }
159 static Deoptimizer* New(JSFunction* function,
165 static Deoptimizer* Grab(Isolate* isolate);
167 // The returned object with information on the optimized frame needs to be
168 // freed before another one can be generated.
169 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
172 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
175 // Makes sure that there is enough room in the relocation
176 // information of a code object to perform lazy deoptimization
177 // patching. If there is not enough room a new relocation
178 // information object is allocated and comments are added until it
180 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
182 // Deoptimize the function now. Its current optimized code will never be run
183 // again and any activations of the optimized code will get deoptimized when
184 // execution returns.
185 static void DeoptimizeFunction(JSFunction* function);
187 // Deoptimize all code in the given isolate.
188 static void DeoptimizeAll(Isolate* isolate);
190 // Deoptimize code associated with the given global object.
191 static void DeoptimizeGlobalObject(JSObject* object);
193 // Deoptimizes all optimized code that has been previously marked
194 // (via code->set_marked_for_deoptimization) and unlinks all functions that
195 // refer to that code.
196 static void DeoptimizeMarkedCode(Isolate* isolate);
198 // Visit all the known optimized functions in a given isolate.
199 static void VisitAllOptimizedFunctions(
200 Isolate* isolate, OptimizedFunctionVisitor* visitor);
202 // The size in bytes of the code required at a lazy deopt patch site.
203 static int patch_size();
207 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
209 void MaterializeHeapNumbersForDebuggerInspectableFrame(
210 Address parameters_top,
211 uint32_t parameters_size,
212 Address expressions_top,
213 uint32_t expressions_size,
214 DeoptimizedFrameInfo* info);
216 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
220 CALCULATE_ENTRY_ADDRESS,
225 static Address GetDeoptimizationEntry(
229 GetEntryMode mode = ENSURE_ENTRY_CODE);
230 static int GetDeoptimizationId(Isolate* isolate,
233 static int GetOutputInfo(DeoptimizationOutputData* data,
235 SharedFunctionInfo* shared);
237 // Code generation support.
238 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
239 static int output_count_offset() {
240 return OFFSET_OF(Deoptimizer, output_count_);
242 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
244 static int has_alignment_padding_offset() {
245 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
248 static int GetDeoptimizedCodeCount(Isolate* isolate);
250 static const int kNotDeoptimizationEntry = -1;
252 // Generators for the deoptimization entry code.
253 class EntryGenerator BASE_EMBEDDED {
255 EntryGenerator(MacroAssembler* masm, BailoutType type)
256 : masm_(masm), type_(type) { }
257 virtual ~EntryGenerator() { }
262 MacroAssembler* masm() const { return masm_; }
263 BailoutType type() const { return type_; }
264 Isolate* isolate() const { return masm_->isolate(); }
266 virtual void GeneratePrologue() { }
269 MacroAssembler* masm_;
270 Deoptimizer::BailoutType type_;
273 class TableEntryGenerator : public EntryGenerator {
275 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
276 : EntryGenerator(masm, type), count_(count) { }
279 virtual void GeneratePrologue();
282 int count() const { return count_; }
287 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
289 static size_t GetMaxDeoptTableSize();
291 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
295 Isolate* isolate() const { return isolate_; }
298 static const int kMinNumberOfEntries = 64;
299 static const int kMaxNumberOfEntries = 16384;
301 Deoptimizer(Isolate* isolate,
302 JSFunction* function,
307 Code* optimized_code);
308 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
309 void PrintFunctionName();
310 void DeleteFrameDescriptions();
312 void DoComputeOutputFrames();
313 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
314 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
316 void DoComputeConstructStubFrame(TranslationIterator* iterator,
318 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
320 bool is_setter_stub_frame);
321 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
324 // Translate object, store the result into an auxiliary array
325 // (deferred_objects_tagged_values_).
326 void DoTranslateObject(TranslationIterator* iterator,
330 // Translate value, store the result into the given frame slot.
331 void DoTranslateCommand(TranslationIterator* iterator,
333 unsigned output_offset);
335 // Translate object, do not store the result anywhere (but do update
336 // the deferred materialization array).
337 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
339 unsigned ComputeInputFrameSize() const;
340 unsigned ComputeFixedSize(JSFunction* function) const;
342 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
343 unsigned ComputeOutgoingArgumentSize() const;
345 Object* ComputeLiteral(int index) const;
347 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
348 void AddObjectDuplication(intptr_t slot, int object_index);
349 void AddObjectTaggedValue(intptr_t value);
350 void AddObjectDoubleValue(double value);
351 void AddObjectSIMD128Value(simd128_value_t value, int translation_opcode);
352 void AddDoubleValue(intptr_t slot_address, double value);
353 void AddSIMD128Value(intptr_t slot_address, simd128_value_t value,
354 int translation_opcode);
356 bool ArgumentsObjectIsAdapted(int object_index) {
357 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
358 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
359 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
362 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
363 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
364 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
365 return jsframe_functions_[reverse_jsframe_index];
368 // Helper function for heap object materialization.
369 Handle<Object> MaterializeNextHeapObject();
370 Handle<Object> MaterializeNextValue();
372 static void GenerateDeoptimizationEntries(
373 MacroAssembler* masm, int count, BailoutType type);
375 // Marks all the code in the given context for deoptimization.
376 static void MarkAllCodeForContext(Context* native_context);
378 // Visit all the known optimized functions in a given context.
379 static void VisitAllOptimizedFunctionsForContext(
380 Context* context, OptimizedFunctionVisitor* visitor);
382 // Deoptimizes all code marked in the given context.
383 static void DeoptimizeMarkedCodeForContext(Context* native_context);
385 // Patch the given code so that it will deoptimize itself.
386 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
388 // Searches the list of known deoptimizing code for a Code object
389 // containing the given address (which is supposedly faster than
390 // searching all code objects).
391 Code* FindDeoptimizingCode(Address addr);
393 // Fill the input from from a JavaScript frame. This is used when
394 // the debugger needs to inspect an optimized frame. For normal
395 // deoptimizations the input frame is filled in generated code.
396 void FillInputFrame(Address tos, JavaScriptFrame* frame);
398 // Fill the given output frame's registers to contain the failure handler
399 // address and the number of parameters for a stub failure trampoline.
400 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
401 CodeStubInterfaceDescriptor* desc);
403 // Fill the given output frame's simd128 registers with the original values
404 // from the input frame's simd128 registers.
405 void CopySIMD128Registers(FrameDescription* output_frame);
407 // Determines whether the input frame contains alignment padding by looking
408 // at the dynamic alignment state slot inside the frame.
409 bool HasAlignmentPadding(JSFunction* function);
411 // Select the version of NotifyStubFailure builtin that either saves or
412 // doesn't save the double registers depending on CPU features.
413 Code* NotifyStubFailureBuiltin();
416 JSFunction* function_;
417 Code* compiled_code_;
418 unsigned bailout_id_;
419 BailoutType bailout_type_;
422 int has_alignment_padding_;
424 // Input frame description.
425 FrameDescription* input_;
426 // Number of output frames.
428 // Number of output js frames.
430 // Array of output frame descriptions.
431 FrameDescription** output_;
433 // Deferred values to be materialized.
434 List<Object*> deferred_objects_tagged_values_;
435 List<HeapNumberMaterializationDescriptor<int> >
436 deferred_objects_double_values_;
437 List<SIMD128MaterializationDescriptor<int> >
438 deferred_objects_float32x4_values_;
439 List<SIMD128MaterializationDescriptor<int> >
440 deferred_objects_float64x2_values_;
441 List<SIMD128MaterializationDescriptor<int> >
442 deferred_objects_int32x4_values_;
443 List<ObjectMaterializationDescriptor> deferred_objects_;
444 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
445 List<SIMD128MaterializationDescriptor<Address> > deferred_float32x4s_;
446 List<SIMD128MaterializationDescriptor<Address> > deferred_float64x2s_;
447 List<SIMD128MaterializationDescriptor<Address> > deferred_int32x4s_;
449 // Key for lookup of previously materialized objects
451 Handle<FixedArray> previously_materialized_objects_;
452 int prev_materialized_count_;
454 // Output frame information. Only used during heap object materialization.
455 List<Handle<JSFunction> > jsframe_functions_;
456 List<bool> jsframe_has_adapted_arguments_;
458 // Materialized objects. Only used during heap object materialization.
459 List<Handle<Object> >* materialized_values_;
460 List<Handle<Object> >* materialized_objects_;
461 int materialization_value_index_;
462 int materialization_object_index_;
465 DisallowHeapAllocation* disallow_heap_allocation_;
468 CodeTracer::Scope* trace_scope_;
470 static const int table_entry_size_;
472 friend class FrameDescription;
473 friend class DeoptimizedFrameInfo;
477 class FrameDescription {
479 FrameDescription(uint32_t frame_size,
480 JSFunction* function);
482 void* operator new(size_t size, uint32_t frame_size) {
483 // Subtracts kPointerSize, as the member frame_content_ already supplies
484 // the first element of the area to store the frame.
485 return malloc(size + frame_size - kPointerSize);
488 void operator delete(void* pointer, uint32_t frame_size) {
492 void operator delete(void* description) {
496 uint32_t GetFrameSize() const {
497 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
498 return static_cast<uint32_t>(frame_size_);
501 JSFunction* GetFunction() const { return function_; }
503 unsigned GetOffsetFromSlotIndex(int slot_index);
505 intptr_t GetFrameSlot(unsigned offset) {
506 return *GetFrameSlotPointer(offset);
509 double GetDoubleFrameSlot(unsigned offset) {
510 intptr_t* ptr = GetFrameSlotPointer(offset);
511 return read_double_value(reinterpret_cast<Address>(ptr));
514 simd128_value_t GetSIMD128FrameSlot(unsigned offset) {
515 intptr_t* ptr = GetFrameSlotPointer(offset);
516 return read_simd128_value(reinterpret_cast<Address>(ptr));
519 void SetFrameSlot(unsigned offset, intptr_t value) {
520 *GetFrameSlotPointer(offset) = value;
523 void SetCallerPc(unsigned offset, intptr_t value);
525 void SetCallerFp(unsigned offset, intptr_t value);
527 void SetCallerConstantPool(unsigned offset, intptr_t value);
529 intptr_t GetRegister(unsigned n) const {
531 // This convoluted ASSERT is needed to work around a gcc problem that
532 // improperly detects an array bounds overflow in optimized debug builds
533 // when using a plain ASSERT.
534 if (n >= ARRAY_SIZE(registers_)) {
539 return registers_[n];
542 double GetDoubleRegister(unsigned n) const;
544 simd128_value_t GetSIMD128Register(unsigned n) const {
545 ASSERT(n < ARRAY_SIZE(simd128_registers_));
546 return simd128_registers_[n];
549 void SetRegister(unsigned n, intptr_t value) {
550 ASSERT(n < ARRAY_SIZE(registers_));
551 registers_[n] = value;
554 void SetDoubleRegister(unsigned n, double value);
556 void SetSIMD128Register(unsigned n, simd128_value_t value) {
557 ASSERT(n < ARRAY_SIZE(simd128_registers_));
558 simd128_registers_[n] = value;
561 intptr_t GetTop() const { return top_; }
562 void SetTop(intptr_t top) { top_ = top; }
564 intptr_t GetPc() const { return pc_; }
565 void SetPc(intptr_t pc) { pc_ = pc; }
567 intptr_t GetFp() const { return fp_; }
568 void SetFp(intptr_t fp) { fp_ = fp; }
570 intptr_t GetContext() const { return context_; }
571 void SetContext(intptr_t context) { context_ = context; }
573 intptr_t GetConstantPool() const { return constant_pool_; }
574 void SetConstantPool(intptr_t constant_pool) {
575 constant_pool_ = constant_pool;
578 Smi* GetState() const { return state_; }
579 void SetState(Smi* state) { state_ = state; }
581 void SetContinuation(intptr_t pc) { continuation_ = pc; }
583 StackFrame::Type GetFrameType() const { return type_; }
584 void SetFrameType(StackFrame::Type type) { type_ = type; }
586 // Get the incoming arguments count.
587 int ComputeParametersCount();
589 // Get a parameter value for an unoptimized frame.
590 Object* GetParameter(int index);
592 // Get the expression stack height for a unoptimized frame.
593 unsigned GetExpressionCount();
595 // Get the expression stack value for an unoptimized frame.
596 Object* GetExpression(int index);
598 static int registers_offset() {
599 return OFFSET_OF(FrameDescription, registers_);
602 static int simd128_registers_offset() {
603 return OFFSET_OF(FrameDescription, simd128_registers_);
606 static int frame_size_offset() {
607 return OFFSET_OF(FrameDescription, frame_size_);
610 static int pc_offset() {
611 return OFFSET_OF(FrameDescription, pc_);
614 static int state_offset() {
615 return OFFSET_OF(FrameDescription, state_);
618 static int continuation_offset() {
619 return OFFSET_OF(FrameDescription, continuation_);
622 static int frame_content_offset() {
623 return OFFSET_OF(FrameDescription, frame_content_);
627 static const uint32_t kZapUint32 = 0xbeeddead;
629 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
630 // keep the variable-size array frame_content_ of type intptr_t at
631 // the end of the structure aligned.
632 uintptr_t frame_size_; // Number of bytes.
633 JSFunction* function_;
634 intptr_t registers_[Register::kNumRegisters];
635 simd128_value_t simd128_registers_[SIMD128Register::kMaxNumRegisters];
640 intptr_t constant_pool_;
641 StackFrame::Type type_;
644 // Continuation is the PC where the execution continues after
646 intptr_t continuation_;
648 // This must be at the end of the object as the object is allocated larger
649 // than it's definition indicate to extend this array.
650 intptr_t frame_content_[1];
652 intptr_t* GetFrameSlotPointer(unsigned offset) {
653 ASSERT(offset < frame_size_);
654 return reinterpret_cast<intptr_t*>(
655 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
658 int ComputeFixedSize();
662 class DeoptimizerData {
664 explicit DeoptimizerData(MemoryAllocator* allocator);
667 void Iterate(ObjectVisitor* v);
670 MemoryAllocator* allocator_;
671 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
672 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
674 DeoptimizedFrameInfo* deoptimized_frame_info_;
676 Deoptimizer* current_;
678 friend class Deoptimizer;
680 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
684 class TranslationBuffer BASE_EMBEDDED {
686 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
688 int CurrentIndex() const { return contents_.length(); }
689 void Add(int32_t value, Zone* zone);
691 Handle<ByteArray> CreateByteArray(Factory* factory);
694 ZoneList<uint8_t> contents_;
698 class TranslationIterator BASE_EMBEDDED {
700 TranslationIterator(ByteArray* buffer, int index)
701 : buffer_(buffer), index_(index) {
702 ASSERT(index >= 0 && index < buffer->length());
707 bool HasNext() const { return index_ < buffer_->length(); }
710 for (int i = 0; i < n; i++) Next();
719 #define TRANSLATION_OPCODE_LIST(V) \
722 V(CONSTRUCT_STUB_FRAME) \
723 V(GETTER_STUB_FRAME) \
724 V(SETTER_STUB_FRAME) \
725 V(ARGUMENTS_ADAPTOR_FRAME) \
726 V(COMPILED_STUB_FRAME) \
727 V(DUPLICATED_OBJECT) \
728 V(ARGUMENTS_OBJECT) \
734 V(FLOAT32x4_REGISTER) \
735 V(FLOAT64x2_REGISTER) \
736 V(INT32x4_REGISTER) \
738 V(INT32_STACK_SLOT) \
739 V(UINT32_STACK_SLOT) \
740 V(DOUBLE_STACK_SLOT) \
741 V(FLOAT32x4_STACK_SLOT) \
742 V(FLOAT64x2_STACK_SLOT) \
743 V(INT32x4_STACK_SLOT) \
747 class Translation BASE_EMBEDDED {
749 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
751 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
754 #undef DECLARE_TRANSLATION_OPCODE_ENUM
756 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
759 index_(buffer->CurrentIndex()),
761 buffer_->Add(BEGIN, zone);
762 buffer_->Add(frame_count, zone);
763 buffer_->Add(jsframe_count, zone);
766 int index() const { return index_; }
769 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
770 void BeginCompiledStubFrame();
771 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
772 void BeginConstructStubFrame(int literal_id, unsigned height);
773 void BeginGetterStubFrame(int literal_id);
774 void BeginSetterStubFrame(int literal_id);
775 void BeginArgumentsObject(int args_length);
776 void BeginCapturedObject(int length);
777 void DuplicateObject(int object_index);
778 void StoreRegister(Register reg);
779 void StoreInt32Register(Register reg);
780 void StoreUint32Register(Register reg);
781 void StoreDoubleRegister(DoubleRegister reg);
782 void StoreSIMD128Register(SIMD128Register reg, Opcode opcode);
783 void StoreStackSlot(int index);
784 void StoreInt32StackSlot(int index);
785 void StoreUint32StackSlot(int index);
786 void StoreDoubleStackSlot(int index);
787 void StoreSIMD128StackSlot(int index, Opcode opcode);
788 void StoreLiteral(int literal_id);
789 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
791 Zone* zone() const { return zone_; }
793 static int NumberOfOperandsFor(Opcode opcode);
795 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
796 static const char* StringFor(Opcode opcode);
799 // A literal id which refers to the JSFunction itself.
800 static const int kSelfLiteralId = -239;
803 TranslationBuffer* buffer_;
809 class SlotRef BASE_EMBEDDED {
811 enum SlotRepresentation {
821 DEFERRED_OBJECT, // Object captured by the escape analysis.
822 // The number of nested objects can be obtained
823 // with the DeferredObjectLength() method
824 // (the SlotRefs of the nested objects follow
825 // this SlotRef in the depth-first order.)
826 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
827 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
828 // in sync, it should not be materialized.
832 : addr_(NULL), representation_(UNKNOWN) { }
834 SlotRef(Address addr, SlotRepresentation representation)
835 : addr_(addr), representation_(representation) { }
837 SlotRef(Isolate* isolate, Object* literal)
838 : literal_(literal, isolate), representation_(LITERAL) { }
840 static SlotRef NewArgumentsObject(int length) {
842 slot.representation_ = ARGUMENTS_OBJECT;
843 slot.deferred_object_length_ = length;
847 static SlotRef NewDeferredObject(int length) {
849 slot.representation_ = DEFERRED_OBJECT;
850 slot.deferred_object_length_ = length;
854 SlotRepresentation Representation() { return representation_; }
856 static SlotRef NewDuplicateObject(int id) {
858 slot.representation_ = DUPLICATE_OBJECT;
859 slot.duplicate_object_id_ = id;
863 int GetChildrenCount() {
864 if (representation_ == DEFERRED_OBJECT ||
865 representation_ == ARGUMENTS_OBJECT) {
866 return deferred_object_length_;
872 int DuplicateObjectId() { return duplicate_object_id_; }
874 Handle<Object> GetValue(Isolate* isolate);
878 Handle<Object> literal_;
879 SlotRepresentation representation_;
880 int deferred_object_length_;
881 int duplicate_object_id_;
884 class SlotRefValueBuilder BASE_EMBEDDED {
887 JavaScriptFrame* frame,
888 int inlined_frame_index,
889 int formal_parameter_count);
891 void Prepare(Isolate* isolate);
892 Handle<Object> GetNext(Isolate* isolate, int level);
893 void Finish(Isolate* isolate);
895 int args_length() { return args_length_; }
898 List<Handle<Object> > materialized_objects_;
899 Handle<FixedArray> previously_materialized_objects_;
900 int prev_materialized_count_;
901 Address stack_frame_id_;
902 List<SlotRef> slot_refs_;
905 int first_slot_index_;
907 static SlotRef ComputeSlotForNextArgument(
908 Translation::Opcode opcode,
909 TranslationIterator* iterator,
910 DeoptimizationInputData* data,
911 JavaScriptFrame* frame);
913 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
915 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
916 if (slot_index >= 0) {
917 const int offset = JavaScriptFrameConstants::kLocal0Offset;
918 return frame->fp() + offset - (slot_index * kPointerSize);
920 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
921 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
925 Handle<Object> GetDeferredObject(Isolate* isolate);
928 class MaterializedObjectStore {
930 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
933 Handle<FixedArray> Get(Address fp);
934 void Set(Address fp, Handle<FixedArray> materialized_objects);
935 void Remove(Address fp);
938 Isolate* isolate() { return isolate_; }
939 Handle<FixedArray> GetStackEntries();
940 Handle<FixedArray> EnsureStackEntries(int size);
942 int StackIdToIndex(Address fp);
945 List<Address> frame_fps_;
949 // Class used to represent an unoptimized frame when the debugger
950 // needs to inspect a frame that is part of an optimized frame. The
951 // internally used FrameDescription objects are not GC safe so for use
952 // by the debugger frame information is copied to an object of this type.
953 // Represents parameters in unadapted form so their number might mismatch
954 // formal parameter count.
955 class DeoptimizedFrameInfo : public Malloced {
957 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
959 bool has_arguments_adaptor,
960 bool has_construct_stub);
961 virtual ~DeoptimizedFrameInfo();
964 void Iterate(ObjectVisitor* v);
966 // Return the number of incoming arguments.
967 int parameters_count() { return parameters_count_; }
969 // Return the height of the expression stack.
970 int expression_count() { return expression_count_; }
972 // Get the frame function.
973 JSFunction* GetFunction() {
977 // Check if this frame is preceded by construct stub frame. The bottom-most
978 // inlined frame might still be called by an uninlined construct stub.
979 bool HasConstructStub() {
980 return has_construct_stub_;
983 // Get an incoming argument.
984 Object* GetParameter(int index) {
985 ASSERT(0 <= index && index < parameters_count());
986 return parameters_[index];
989 // Get an expression from the expression stack.
990 Object* GetExpression(int index) {
991 ASSERT(0 <= index && index < expression_count());
992 return expression_stack_[index];
995 int GetSourcePosition() {
996 return source_position_;
1000 // Set an incoming argument.
1001 void SetParameter(int index, Object* obj) {
1002 ASSERT(0 <= index && index < parameters_count());
1003 parameters_[index] = obj;
1006 // Set an expression on the expression stack.
1007 void SetExpression(int index, Object* obj) {
1008 ASSERT(0 <= index && index < expression_count());
1009 expression_stack_[index] = obj;
1012 JSFunction* function_;
1013 bool has_construct_stub_;
1014 int parameters_count_;
1015 int expression_count_;
1016 Object** parameters_;
1017 Object** expression_stack_;
1018 int source_position_;
1020 friend class Deoptimizer;
1023 } } // namespace v8::internal
1025 #endif // V8_DEOPTIMIZER_H_