1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
12 #include "src/zone-inl.h"
19 static inline double read_double_value(Address p) {
20 #ifdef V8_HOST_CAN_READ_UNALIGNED
21 return Memory::double_at(p);
22 #else // V8_HOST_CAN_READ_UNALIGNED
23 // Prevent gcc from using load-double (mips ldc1) on (possibly)
24 // non-64-bit aligned address.
29 c.u[0] = *reinterpret_cast<uint32_t*>(p);
30 c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
32 #endif // V8_HOST_CAN_READ_UNALIGNED
35 static inline simd128_value_t read_simd128_value(Address p) {
36 return *reinterpret_cast<simd128_value_t*>(p);
39 class FrameDescription;
40 class TranslationIterator;
41 class DeoptimizedFrameInfo;
44 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
46 HeapNumberMaterializationDescriptor(T destination, double value)
47 : destination_(destination), value_(value) { }
49 T destination() const { return destination_; }
50 double value() const { return value_; }
59 class SIMD128MaterializationDescriptor BASE_EMBEDDED {
61 SIMD128MaterializationDescriptor(T destination, simd128_value_t value)
62 : destination_(destination), value_(value) { }
64 T destination() const { return destination_; }
65 simd128_value_t value() const { return value_; }
69 simd128_value_t value_;
73 class ObjectMaterializationDescriptor BASE_EMBEDDED {
75 ObjectMaterializationDescriptor(
76 Address slot_address, int frame, int length, int duplicate, bool is_args)
77 : slot_address_(slot_address),
78 jsframe_index_(frame),
79 object_length_(length),
80 duplicate_object_(duplicate),
81 is_arguments_(is_args) { }
83 Address slot_address() const { return slot_address_; }
84 int jsframe_index() const { return jsframe_index_; }
85 int object_length() const { return object_length_; }
86 int duplicate_object() const { return duplicate_object_; }
87 bool is_arguments() const { return is_arguments_; }
89 // Only used for allocated receivers in DoComputeConstructStubFrame.
90 void patch_slot_address(intptr_t slot) {
91 slot_address_ = reinterpret_cast<Address>(slot);
95 Address slot_address_;
98 int duplicate_object_;
103 class OptimizedFunctionVisitor BASE_EMBEDDED {
105 virtual ~OptimizedFunctionVisitor() {}
107 // Function which is called before iteration of any optimized functions
108 // from given native context.
109 virtual void EnterContext(Context* context) = 0;
111 virtual void VisitFunction(JSFunction* function) = 0;
113 // Function which is called after iteration of all optimized functions
114 // from given native context.
115 virtual void LeaveContext(Context* context) = 0;
119 class Deoptimizer : public Malloced {
125 // This last bailout type is not really a bailout, but used by the
126 // debugger to deoptimize stack frames to allow inspection.
130 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
132 struct JumpTableEntry : public ZoneObject {
133 inline JumpTableEntry(Address entry,
134 Deoptimizer::BailoutType type,
139 needs_frame(frame) { }
142 Deoptimizer::BailoutType bailout_type;
146 static bool TraceEnabledFor(BailoutType deopt_type,
147 StackFrame::Type frame_type);
148 static const char* MessageFor(BailoutType type);
150 int output_count() const { return output_count_; }
152 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
153 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
154 BailoutType bailout_type() const { return bailout_type_; }
156 // Number of created JS frames. Not all created frames are necessarily JS.
157 int jsframe_count() const { return jsframe_count_; }
159 static Deoptimizer* New(JSFunction* function,
165 static Deoptimizer* Grab(Isolate* isolate);
167 // The returned object with information on the optimized frame needs to be
168 // freed before another one can be generated.
169 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
172 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
175 // Makes sure that there is enough room in the relocation
176 // information of a code object to perform lazy deoptimization
177 // patching. If there is not enough room a new relocation
178 // information object is allocated and comments are added until it
180 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
182 // Deoptimize the function now. Its current optimized code will never be run
183 // again and any activations of the optimized code will get deoptimized when
184 // execution returns.
185 static void DeoptimizeFunction(JSFunction* function);
187 // Deoptimize all code in the given isolate.
188 static void DeoptimizeAll(Isolate* isolate);
190 // Deoptimize code associated with the given global object.
191 static void DeoptimizeGlobalObject(JSObject* object);
193 // Deoptimizes all optimized code that has been previously marked
194 // (via code->set_marked_for_deoptimization) and unlinks all functions that
195 // refer to that code.
196 static void DeoptimizeMarkedCode(Isolate* isolate);
198 // Visit all the known optimized functions in a given isolate.
199 static void VisitAllOptimizedFunctions(
200 Isolate* isolate, OptimizedFunctionVisitor* visitor);
202 // The size in bytes of the code required at a lazy deopt patch site.
203 static int patch_size();
207 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
209 void MaterializeHeapNumbersForDebuggerInspectableFrame(
210 Address parameters_top,
211 uint32_t parameters_size,
212 Address expressions_top,
213 uint32_t expressions_size,
214 DeoptimizedFrameInfo* info);
216 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
220 CALCULATE_ENTRY_ADDRESS,
225 static Address GetDeoptimizationEntry(
229 GetEntryMode mode = ENSURE_ENTRY_CODE);
230 static int GetDeoptimizationId(Isolate* isolate,
233 static int GetOutputInfo(DeoptimizationOutputData* data,
235 SharedFunctionInfo* shared);
237 // Code generation support.
238 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
239 static int output_count_offset() {
240 return OFFSET_OF(Deoptimizer, output_count_);
242 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
244 static int has_alignment_padding_offset() {
245 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
248 static int GetDeoptimizedCodeCount(Isolate* isolate);
250 static const int kNotDeoptimizationEntry = -1;
252 // Generators for the deoptimization entry code.
253 class EntryGenerator BASE_EMBEDDED {
255 EntryGenerator(MacroAssembler* masm, BailoutType type)
256 : masm_(masm), type_(type) { }
257 virtual ~EntryGenerator() { }
262 MacroAssembler* masm() const { return masm_; }
263 BailoutType type() const { return type_; }
264 Isolate* isolate() const { return masm_->isolate(); }
266 virtual void GeneratePrologue() { }
269 MacroAssembler* masm_;
270 Deoptimizer::BailoutType type_;
273 class TableEntryGenerator : public EntryGenerator {
275 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
276 : EntryGenerator(masm, type), count_(count) { }
279 virtual void GeneratePrologue();
282 int count() const { return count_; }
287 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
289 static size_t GetMaxDeoptTableSize();
291 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
295 Isolate* isolate() const { return isolate_; }
298 static const int kMinNumberOfEntries = 64;
299 static const int kMaxNumberOfEntries = 16384;
301 Deoptimizer(Isolate* isolate,
302 JSFunction* function,
307 Code* optimized_code);
308 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
309 void PrintFunctionName();
310 void DeleteFrameDescriptions();
312 void DoComputeOutputFrames();
313 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
314 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
316 void DoComputeConstructStubFrame(TranslationIterator* iterator,
318 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
320 bool is_setter_stub_frame);
321 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
324 // Translate object, store the result into an auxiliary array
325 // (deferred_objects_tagged_values_).
326 void DoTranslateObject(TranslationIterator* iterator,
330 // Translate value, store the result into the given frame slot.
331 void DoTranslateCommand(TranslationIterator* iterator,
333 unsigned output_offset);
335 // Translate object, do not store the result anywhere (but do update
336 // the deferred materialization array).
337 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
339 unsigned ComputeInputFrameSize() const;
340 unsigned ComputeFixedSize(JSFunction* function) const;
342 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
343 unsigned ComputeOutgoingArgumentSize() const;
345 Object* ComputeLiteral(int index) const;
347 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
348 void AddObjectDuplication(intptr_t slot, int object_index);
349 void AddObjectTaggedValue(intptr_t value);
350 void AddObjectDoubleValue(double value);
351 void AddObjectSIMD128Value(simd128_value_t value, int translation_opcode);
352 void AddDoubleValue(intptr_t slot_address, double value);
353 void AddSIMD128Value(intptr_t slot_address, simd128_value_t value,
354 int translation_opcode);
356 bool ArgumentsObjectIsAdapted(int object_index) {
357 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
358 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
359 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
362 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
363 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
364 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
365 return jsframe_functions_[reverse_jsframe_index];
368 // Helper function for heap object materialization.
369 Handle<Object> MaterializeNextHeapObject();
370 Handle<Object> MaterializeNextValue();
372 static void GenerateDeoptimizationEntries(
373 MacroAssembler* masm, int count, BailoutType type);
375 // Marks all the code in the given context for deoptimization.
376 static void MarkAllCodeForContext(Context* native_context);
378 // Visit all the known optimized functions in a given context.
379 static void VisitAllOptimizedFunctionsForContext(
380 Context* context, OptimizedFunctionVisitor* visitor);
382 // Deoptimizes all code marked in the given context.
383 static void DeoptimizeMarkedCodeForContext(Context* native_context);
385 // Patch the given code so that it will deoptimize itself.
386 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
388 // Searches the list of known deoptimizing code for a Code object
389 // containing the given address (which is supposedly faster than
390 // searching all code objects).
391 Code* FindDeoptimizingCode(Address addr);
393 // Fill the input from from a JavaScript frame. This is used when
394 // the debugger needs to inspect an optimized frame. For normal
395 // deoptimizations the input frame is filled in generated code.
396 void FillInputFrame(Address tos, JavaScriptFrame* frame);
398 // Fill the given output frame's registers to contain the failure handler
399 // address and the number of parameters for a stub failure trampoline.
400 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
401 CodeStubInterfaceDescriptor* desc);
403 // Fill the given output frame's simd128 registers with the original values
404 // from the input frame's simd128 registers.
405 void CopySIMD128Registers(FrameDescription* output_frame);
407 // Determines whether the input frame contains alignment padding by looking
408 // at the dynamic alignment state slot inside the frame.
409 bool HasAlignmentPadding(JSFunction* function);
412 JSFunction* function_;
413 Code* compiled_code_;
414 unsigned bailout_id_;
415 BailoutType bailout_type_;
418 int has_alignment_padding_;
420 // Input frame description.
421 FrameDescription* input_;
422 // Number of output frames.
424 // Number of output js frames.
426 // Array of output frame descriptions.
427 FrameDescription** output_;
429 // Deferred values to be materialized.
430 List<Object*> deferred_objects_tagged_values_;
431 List<HeapNumberMaterializationDescriptor<int> >
432 deferred_objects_double_values_;
433 List<SIMD128MaterializationDescriptor<int> >
434 deferred_objects_float32x4_values_;
435 List<SIMD128MaterializationDescriptor<int> >
436 deferred_objects_float64x2_values_;
437 List<SIMD128MaterializationDescriptor<int> >
438 deferred_objects_int32x4_values_;
439 List<ObjectMaterializationDescriptor> deferred_objects_;
440 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
441 List<SIMD128MaterializationDescriptor<Address> > deferred_float32x4s_;
442 List<SIMD128MaterializationDescriptor<Address> > deferred_float64x2s_;
443 List<SIMD128MaterializationDescriptor<Address> > deferred_int32x4s_;
445 // Key for lookup of previously materialized objects
447 Handle<FixedArray> previously_materialized_objects_;
448 int prev_materialized_count_;
450 // Output frame information. Only used during heap object materialization.
451 List<Handle<JSFunction> > jsframe_functions_;
452 List<bool> jsframe_has_adapted_arguments_;
454 // Materialized objects. Only used during heap object materialization.
455 List<Handle<Object> >* materialized_values_;
456 List<Handle<Object> >* materialized_objects_;
457 int materialization_value_index_;
458 int materialization_object_index_;
461 DisallowHeapAllocation* disallow_heap_allocation_;
464 CodeTracer::Scope* trace_scope_;
466 static const int table_entry_size_;
468 friend class FrameDescription;
469 friend class DeoptimizedFrameInfo;
473 class FrameDescription {
475 FrameDescription(uint32_t frame_size,
476 JSFunction* function);
478 void* operator new(size_t size, uint32_t frame_size) {
479 // Subtracts kPointerSize, as the member frame_content_ already supplies
480 // the first element of the area to store the frame.
481 return malloc(size + frame_size - kPointerSize);
484 void operator delete(void* pointer, uint32_t frame_size) {
488 void operator delete(void* description) {
492 uint32_t GetFrameSize() const {
493 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
494 return static_cast<uint32_t>(frame_size_);
497 JSFunction* GetFunction() const { return function_; }
499 unsigned GetOffsetFromSlotIndex(int slot_index);
501 intptr_t GetFrameSlot(unsigned offset) {
502 return *GetFrameSlotPointer(offset);
505 double GetDoubleFrameSlot(unsigned offset) {
506 intptr_t* ptr = GetFrameSlotPointer(offset);
507 return read_double_value(reinterpret_cast<Address>(ptr));
510 simd128_value_t GetSIMD128FrameSlot(unsigned offset) {
511 intptr_t* ptr = GetFrameSlotPointer(offset);
512 return read_simd128_value(reinterpret_cast<Address>(ptr));
515 void SetFrameSlot(unsigned offset, intptr_t value) {
516 *GetFrameSlotPointer(offset) = value;
519 void SetCallerPc(unsigned offset, intptr_t value);
521 void SetCallerFp(unsigned offset, intptr_t value);
523 void SetCallerConstantPool(unsigned offset, intptr_t value);
525 intptr_t GetRegister(unsigned n) const {
527 // This convoluted ASSERT is needed to work around a gcc problem that
528 // improperly detects an array bounds overflow in optimized debug builds
529 // when using a plain ASSERT.
530 if (n >= ARRAY_SIZE(registers_)) {
535 return registers_[n];
538 double GetDoubleRegister(unsigned n) const;
540 simd128_value_t GetSIMD128Register(unsigned n) const {
541 ASSERT(n < ARRAY_SIZE(simd128_registers_));
542 return simd128_registers_[n];
545 void SetRegister(unsigned n, intptr_t value) {
546 ASSERT(n < ARRAY_SIZE(registers_));
547 registers_[n] = value;
550 void SetDoubleRegister(unsigned n, double value);
552 void SetSIMD128Register(unsigned n, simd128_value_t value) {
553 ASSERT(n < ARRAY_SIZE(simd128_registers_));
554 simd128_registers_[n] = value;
557 intptr_t GetTop() const { return top_; }
558 void SetTop(intptr_t top) { top_ = top; }
560 intptr_t GetPc() const { return pc_; }
561 void SetPc(intptr_t pc) { pc_ = pc; }
563 intptr_t GetFp() const { return fp_; }
564 void SetFp(intptr_t fp) { fp_ = fp; }
566 intptr_t GetContext() const { return context_; }
567 void SetContext(intptr_t context) { context_ = context; }
569 intptr_t GetConstantPool() const { return constant_pool_; }
570 void SetConstantPool(intptr_t constant_pool) {
571 constant_pool_ = constant_pool;
574 Smi* GetState() const { return state_; }
575 void SetState(Smi* state) { state_ = state; }
577 void SetContinuation(intptr_t pc) { continuation_ = pc; }
579 StackFrame::Type GetFrameType() const { return type_; }
580 void SetFrameType(StackFrame::Type type) { type_ = type; }
582 // Get the incoming arguments count.
583 int ComputeParametersCount();
585 // Get a parameter value for an unoptimized frame.
586 Object* GetParameter(int index);
588 // Get the expression stack height for a unoptimized frame.
589 unsigned GetExpressionCount();
591 // Get the expression stack value for an unoptimized frame.
592 Object* GetExpression(int index);
594 static int registers_offset() {
595 return OFFSET_OF(FrameDescription, registers_);
598 static int simd128_registers_offset() {
599 return OFFSET_OF(FrameDescription, simd128_registers_);
602 static int frame_size_offset() {
603 return OFFSET_OF(FrameDescription, frame_size_);
606 static int pc_offset() {
607 return OFFSET_OF(FrameDescription, pc_);
610 static int state_offset() {
611 return OFFSET_OF(FrameDescription, state_);
614 static int continuation_offset() {
615 return OFFSET_OF(FrameDescription, continuation_);
618 static int frame_content_offset() {
619 return OFFSET_OF(FrameDescription, frame_content_);
623 static const uint32_t kZapUint32 = 0xbeeddead;
625 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
626 // keep the variable-size array frame_content_ of type intptr_t at
627 // the end of the structure aligned.
628 uintptr_t frame_size_; // Number of bytes.
629 JSFunction* function_;
630 intptr_t registers_[Register::kNumRegisters];
631 simd128_value_t simd128_registers_[SIMD128Register::kMaxNumRegisters];
636 intptr_t constant_pool_;
637 StackFrame::Type type_;
640 // Continuation is the PC where the execution continues after
642 intptr_t continuation_;
644 // This must be at the end of the object as the object is allocated larger
645 // than it's definition indicate to extend this array.
646 intptr_t frame_content_[1];
648 intptr_t* GetFrameSlotPointer(unsigned offset) {
649 ASSERT(offset < frame_size_);
650 return reinterpret_cast<intptr_t*>(
651 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
654 int ComputeFixedSize();
658 class DeoptimizerData {
660 explicit DeoptimizerData(MemoryAllocator* allocator);
663 void Iterate(ObjectVisitor* v);
666 MemoryAllocator* allocator_;
667 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
668 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
670 DeoptimizedFrameInfo* deoptimized_frame_info_;
672 Deoptimizer* current_;
674 friend class Deoptimizer;
676 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
680 class TranslationBuffer BASE_EMBEDDED {
682 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
684 int CurrentIndex() const { return contents_.length(); }
685 void Add(int32_t value, Zone* zone);
687 Handle<ByteArray> CreateByteArray(Factory* factory);
690 ZoneList<uint8_t> contents_;
694 class TranslationIterator BASE_EMBEDDED {
696 TranslationIterator(ByteArray* buffer, int index)
697 : buffer_(buffer), index_(index) {
698 ASSERT(index >= 0 && index < buffer->length());
703 bool HasNext() const { return index_ < buffer_->length(); }
706 for (int i = 0; i < n; i++) Next();
715 #define TRANSLATION_OPCODE_LIST(V) \
718 V(CONSTRUCT_STUB_FRAME) \
719 V(GETTER_STUB_FRAME) \
720 V(SETTER_STUB_FRAME) \
721 V(ARGUMENTS_ADAPTOR_FRAME) \
722 V(COMPILED_STUB_FRAME) \
723 V(DUPLICATED_OBJECT) \
724 V(ARGUMENTS_OBJECT) \
730 V(FLOAT32x4_REGISTER) \
731 V(FLOAT64x2_REGISTER) \
732 V(INT32x4_REGISTER) \
734 V(INT32_STACK_SLOT) \
735 V(UINT32_STACK_SLOT) \
736 V(DOUBLE_STACK_SLOT) \
737 V(FLOAT32x4_STACK_SLOT) \
738 V(FLOAT64x2_STACK_SLOT) \
739 V(INT32x4_STACK_SLOT) \
743 class Translation BASE_EMBEDDED {
745 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
747 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
750 #undef DECLARE_TRANSLATION_OPCODE_ENUM
752 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
755 index_(buffer->CurrentIndex()),
757 buffer_->Add(BEGIN, zone);
758 buffer_->Add(frame_count, zone);
759 buffer_->Add(jsframe_count, zone);
762 int index() const { return index_; }
765 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
766 void BeginCompiledStubFrame();
767 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
768 void BeginConstructStubFrame(int literal_id, unsigned height);
769 void BeginGetterStubFrame(int literal_id);
770 void BeginSetterStubFrame(int literal_id);
771 void BeginArgumentsObject(int args_length);
772 void BeginCapturedObject(int length);
773 void DuplicateObject(int object_index);
774 void StoreRegister(Register reg);
775 void StoreInt32Register(Register reg);
776 void StoreUint32Register(Register reg);
777 void StoreDoubleRegister(DoubleRegister reg);
778 void StoreSIMD128Register(SIMD128Register reg, Opcode opcode);
779 void StoreStackSlot(int index);
780 void StoreInt32StackSlot(int index);
781 void StoreUint32StackSlot(int index);
782 void StoreDoubleStackSlot(int index);
783 void StoreSIMD128StackSlot(int index, Opcode opcode);
784 void StoreLiteral(int literal_id);
785 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
787 Zone* zone() const { return zone_; }
789 static int NumberOfOperandsFor(Opcode opcode);
791 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
792 static const char* StringFor(Opcode opcode);
795 // A literal id which refers to the JSFunction itself.
796 static const int kSelfLiteralId = -239;
799 TranslationBuffer* buffer_;
805 class SlotRef BASE_EMBEDDED {
807 enum SlotRepresentation {
817 DEFERRED_OBJECT, // Object captured by the escape analysis.
818 // The number of nested objects can be obtained
819 // with the DeferredObjectLength() method
820 // (the SlotRefs of the nested objects follow
821 // this SlotRef in the depth-first order.)
822 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
823 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
824 // in sync, it should not be materialized.
828 : addr_(NULL), representation_(UNKNOWN) { }
830 SlotRef(Address addr, SlotRepresentation representation)
831 : addr_(addr), representation_(representation) { }
833 SlotRef(Isolate* isolate, Object* literal)
834 : literal_(literal, isolate), representation_(LITERAL) { }
836 static SlotRef NewArgumentsObject(int length) {
838 slot.representation_ = ARGUMENTS_OBJECT;
839 slot.deferred_object_length_ = length;
843 static SlotRef NewDeferredObject(int length) {
845 slot.representation_ = DEFERRED_OBJECT;
846 slot.deferred_object_length_ = length;
850 SlotRepresentation Representation() { return representation_; }
852 static SlotRef NewDuplicateObject(int id) {
854 slot.representation_ = DUPLICATE_OBJECT;
855 slot.duplicate_object_id_ = id;
859 int GetChildrenCount() {
860 if (representation_ == DEFERRED_OBJECT ||
861 representation_ == ARGUMENTS_OBJECT) {
862 return deferred_object_length_;
868 int DuplicateObjectId() { return duplicate_object_id_; }
870 Handle<Object> GetValue(Isolate* isolate);
874 Handle<Object> literal_;
875 SlotRepresentation representation_;
876 int deferred_object_length_;
877 int duplicate_object_id_;
880 class SlotRefValueBuilder BASE_EMBEDDED {
883 JavaScriptFrame* frame,
884 int inlined_frame_index,
885 int formal_parameter_count);
887 void Prepare(Isolate* isolate);
888 Handle<Object> GetNext(Isolate* isolate, int level);
889 void Finish(Isolate* isolate);
891 int args_length() { return args_length_; }
894 List<Handle<Object> > materialized_objects_;
895 Handle<FixedArray> previously_materialized_objects_;
896 int prev_materialized_count_;
897 Address stack_frame_id_;
898 List<SlotRef> slot_refs_;
901 int first_slot_index_;
903 static SlotRef ComputeSlotForNextArgument(
904 Translation::Opcode opcode,
905 TranslationIterator* iterator,
906 DeoptimizationInputData* data,
907 JavaScriptFrame* frame);
909 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
911 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
912 if (slot_index >= 0) {
913 const int offset = JavaScriptFrameConstants::kLocal0Offset;
914 return frame->fp() + offset - (slot_index * kPointerSize);
916 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
917 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
921 Handle<Object> GetDeferredObject(Isolate* isolate);
924 class MaterializedObjectStore {
926 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
929 Handle<FixedArray> Get(Address fp);
930 void Set(Address fp, Handle<FixedArray> materialized_objects);
931 void Remove(Address fp);
934 Isolate* isolate() { return isolate_; }
935 Handle<FixedArray> GetStackEntries();
936 Handle<FixedArray> EnsureStackEntries(int size);
938 int StackIdToIndex(Address fp);
941 List<Address> frame_fps_;
945 // Class used to represent an unoptimized frame when the debugger
946 // needs to inspect a frame that is part of an optimized frame. The
947 // internally used FrameDescription objects are not GC safe so for use
948 // by the debugger frame information is copied to an object of this type.
949 // Represents parameters in unadapted form so their number might mismatch
950 // formal parameter count.
951 class DeoptimizedFrameInfo : public Malloced {
953 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
955 bool has_arguments_adaptor,
956 bool has_construct_stub);
957 virtual ~DeoptimizedFrameInfo();
960 void Iterate(ObjectVisitor* v);
962 // Return the number of incoming arguments.
963 int parameters_count() { return parameters_count_; }
965 // Return the height of the expression stack.
966 int expression_count() { return expression_count_; }
968 // Get the frame function.
969 JSFunction* GetFunction() {
973 // Check if this frame is preceded by construct stub frame. The bottom-most
974 // inlined frame might still be called by an uninlined construct stub.
975 bool HasConstructStub() {
976 return has_construct_stub_;
979 // Get an incoming argument.
980 Object* GetParameter(int index) {
981 ASSERT(0 <= index && index < parameters_count());
982 return parameters_[index];
985 // Get an expression from the expression stack.
986 Object* GetExpression(int index) {
987 ASSERT(0 <= index && index < expression_count());
988 return expression_stack_[index];
991 int GetSourcePosition() {
992 return source_position_;
996 // Set an incoming argument.
997 void SetParameter(int index, Object* obj) {
998 ASSERT(0 <= index && index < parameters_count());
999 parameters_[index] = obj;
1002 // Set an expression on the expression stack.
1003 void SetExpression(int index, Object* obj) {
1004 ASSERT(0 <= index && index < expression_count());
1005 expression_stack_[index] = obj;
1008 JSFunction* function_;
1009 bool has_construct_stub_;
1010 int parameters_count_;
1011 int expression_count_;
1012 Object** parameters_;
1013 Object** expression_stack_;
1014 int source_position_;
1016 friend class Deoptimizer;
1019 } } // namespace v8::internal
1021 #endif // V8_DEOPTIMIZER_H_