1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
12 #include "src/zone-inl.h"
19 static inline double read_double_value(Address p) {
20 #ifdef V8_HOST_CAN_READ_UNALIGNED
21 return Memory::double_at(p);
22 #else // V8_HOST_CAN_READ_UNALIGNED
23 // Prevent gcc from using load-double (mips ldc1) on (possibly)
24 // non-64-bit aligned address.
29 c.u[0] = *reinterpret_cast<uint32_t*>(p);
30 c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
32 #endif // V8_HOST_CAN_READ_UNALIGNED
35 static inline simd128_value_t read_simd128_value(Address p) {
36 return *reinterpret_cast<simd128_value_t*>(p);
39 class FrameDescription;
40 class TranslationIterator;
41 class DeoptimizedFrameInfo;
44 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
46 HeapNumberMaterializationDescriptor(T destination, double value)
47 : destination_(destination), value_(value) { }
49 T destination() const { return destination_; }
50 double value() const { return value_; }
59 class SIMD128MaterializationDescriptor BASE_EMBEDDED {
61 SIMD128MaterializationDescriptor(T destination, simd128_value_t value)
62 : destination_(destination), value_(value) { }
64 T destination() const { return destination_; }
65 simd128_value_t value() const { return value_; }
69 simd128_value_t value_;
73 class ObjectMaterializationDescriptor BASE_EMBEDDED {
75 ObjectMaterializationDescriptor(
76 Address slot_address, int frame, int length, int duplicate, bool is_args)
77 : slot_address_(slot_address),
78 jsframe_index_(frame),
79 object_length_(length),
80 duplicate_object_(duplicate),
81 is_arguments_(is_args) { }
83 Address slot_address() const { return slot_address_; }
84 int jsframe_index() const { return jsframe_index_; }
85 int object_length() const { return object_length_; }
86 int duplicate_object() const { return duplicate_object_; }
87 bool is_arguments() const { return is_arguments_; }
89 // Only used for allocated receivers in DoComputeConstructStubFrame.
90 void patch_slot_address(intptr_t slot) {
91 slot_address_ = reinterpret_cast<Address>(slot);
95 Address slot_address_;
98 int duplicate_object_;
103 class OptimizedFunctionVisitor BASE_EMBEDDED {
105 virtual ~OptimizedFunctionVisitor() {}
107 // Function which is called before iteration of any optimized functions
108 // from given native context.
109 virtual void EnterContext(Context* context) = 0;
111 virtual void VisitFunction(JSFunction* function) = 0;
113 // Function which is called after iteration of all optimized functions
114 // from given native context.
115 virtual void LeaveContext(Context* context) = 0;
119 class Deoptimizer : public Malloced {
125 // This last bailout type is not really a bailout, but used by the
126 // debugger to deoptimize stack frames to allow inspection.
130 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
132 struct JumpTableEntry : public ZoneObject {
133 inline JumpTableEntry(Address entry,
134 Deoptimizer::BailoutType type,
139 needs_frame(frame) { }
142 Deoptimizer::BailoutType bailout_type;
146 static bool TraceEnabledFor(BailoutType deopt_type,
147 StackFrame::Type frame_type);
148 static const char* MessageFor(BailoutType type);
150 int output_count() const { return output_count_; }
152 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
153 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
154 BailoutType bailout_type() const { return bailout_type_; }
156 // Number of created JS frames. Not all created frames are necessarily JS.
157 int jsframe_count() const { return jsframe_count_; }
159 static Deoptimizer* New(JSFunction* function,
165 static Deoptimizer* Grab(Isolate* isolate);
167 // The returned object with information on the optimized frame needs to be
168 // freed before another one can be generated.
169 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
172 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
175 // Makes sure that there is enough room in the relocation
176 // information of a code object to perform lazy deoptimization
177 // patching. If there is not enough room a new relocation
178 // information object is allocated and comments are added until it
180 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
182 // Deoptimize the function now. Its current optimized code will never be run
183 // again and any activations of the optimized code will get deoptimized when
184 // execution returns.
185 static void DeoptimizeFunction(JSFunction* function);
187 // Deoptimize all code in the given isolate.
188 static void DeoptimizeAll(Isolate* isolate);
190 // Deoptimize code associated with the given global object.
191 static void DeoptimizeGlobalObject(JSObject* object);
193 // Deoptimizes all optimized code that has been previously marked
194 // (via code->set_marked_for_deoptimization) and unlinks all functions that
195 // refer to that code.
196 static void DeoptimizeMarkedCode(Isolate* isolate);
198 static void PatchStackForMarkedCode(Isolate* isolate);
200 // Visit all the known optimized functions in a given isolate.
201 static void VisitAllOptimizedFunctions(
202 Isolate* isolate, OptimizedFunctionVisitor* visitor);
204 // The size in bytes of the code required at a lazy deopt patch site.
205 static int patch_size();
209 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
211 void MaterializeHeapNumbersForDebuggerInspectableFrame(
212 Address parameters_top,
213 uint32_t parameters_size,
214 Address expressions_top,
215 uint32_t expressions_size,
216 DeoptimizedFrameInfo* info);
218 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
222 CALCULATE_ENTRY_ADDRESS,
227 static Address GetDeoptimizationEntry(
231 GetEntryMode mode = ENSURE_ENTRY_CODE);
232 static int GetDeoptimizationId(Isolate* isolate,
235 static int GetOutputInfo(DeoptimizationOutputData* data,
237 SharedFunctionInfo* shared);
239 // Code generation support.
240 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
241 static int output_count_offset() {
242 return OFFSET_OF(Deoptimizer, output_count_);
244 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
246 static int has_alignment_padding_offset() {
247 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
250 static int GetDeoptimizedCodeCount(Isolate* isolate);
252 static const int kNotDeoptimizationEntry = -1;
254 // Generators for the deoptimization entry code.
255 class EntryGenerator BASE_EMBEDDED {
257 EntryGenerator(MacroAssembler* masm, BailoutType type)
258 : masm_(masm), type_(type) { }
259 virtual ~EntryGenerator() { }
264 MacroAssembler* masm() const { return masm_; }
265 BailoutType type() const { return type_; }
266 Isolate* isolate() const { return masm_->isolate(); }
268 virtual void GeneratePrologue() { }
271 MacroAssembler* masm_;
272 Deoptimizer::BailoutType type_;
275 class TableEntryGenerator : public EntryGenerator {
277 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
278 : EntryGenerator(masm, type), count_(count) { }
281 virtual void GeneratePrologue();
284 int count() const { return count_; }
289 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
291 static size_t GetMaxDeoptTableSize();
293 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
297 Isolate* isolate() const { return isolate_; }
300 static const int kMinNumberOfEntries = 64;
301 static const int kMaxNumberOfEntries = 16384;
303 Deoptimizer(Isolate* isolate,
304 JSFunction* function,
309 Code* optimized_code);
310 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
311 void PrintFunctionName();
312 void DeleteFrameDescriptions();
314 void DoComputeOutputFrames();
315 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
316 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
318 void DoComputeConstructStubFrame(TranslationIterator* iterator,
320 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
322 bool is_setter_stub_frame);
323 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
326 // Translate object, store the result into an auxiliary array
327 // (deferred_objects_tagged_values_).
328 void DoTranslateObject(TranslationIterator* iterator,
332 // Translate value, store the result into the given frame slot.
333 void DoTranslateCommand(TranslationIterator* iterator,
335 unsigned output_offset);
337 // Translate object, do not store the result anywhere (but do update
338 // the deferred materialization array).
339 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
341 unsigned ComputeInputFrameSize() const;
342 unsigned ComputeFixedSize(JSFunction* function) const;
344 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
345 unsigned ComputeOutgoingArgumentSize() const;
347 Object* ComputeLiteral(int index) const;
349 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
350 void AddObjectDuplication(intptr_t slot, int object_index);
351 void AddObjectTaggedValue(intptr_t value);
352 void AddObjectDoubleValue(double value);
353 void AddObjectSIMD128Value(simd128_value_t value, int translation_opcode);
354 void AddDoubleValue(intptr_t slot_address, double value);
355 void AddSIMD128Value(intptr_t slot_address, simd128_value_t value,
356 int translation_opcode);
358 bool ArgumentsObjectIsAdapted(int object_index) {
359 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
360 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
361 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
364 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
365 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
366 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
367 return jsframe_functions_[reverse_jsframe_index];
370 // Helper function for heap object materialization.
371 Handle<Object> MaterializeNextHeapObject();
372 Handle<Object> MaterializeNextValue();
374 static void GenerateDeoptimizationEntries(
375 MacroAssembler* masm, int count, BailoutType type);
377 // Marks all the code in the given context for deoptimization.
378 static void MarkAllCodeForContext(Context* native_context);
380 // Visit all the known optimized functions in a given context.
381 static void VisitAllOptimizedFunctionsForContext(
382 Context* context, OptimizedFunctionVisitor* visitor);
384 // Deoptimizes all code marked in the given context.
385 static void DeoptimizeMarkedCodeForContext(Context* native_context);
387 // Patch the given code so that it will deoptimize itself.
388 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
390 // Searches the list of known deoptimizing code for a Code object
391 // containing the given address (which is supposedly faster than
392 // searching all code objects).
393 Code* FindDeoptimizingCode(Address addr);
395 // Fill the input from from a JavaScript frame. This is used when
396 // the debugger needs to inspect an optimized frame. For normal
397 // deoptimizations the input frame is filled in generated code.
398 void FillInputFrame(Address tos, JavaScriptFrame* frame);
400 // Fill the given output frame's registers to contain the failure handler
401 // address and the number of parameters for a stub failure trampoline.
402 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
403 CodeStubInterfaceDescriptor* desc);
405 // Fill the given output frame's simd128 registers with the original values
406 // from the input frame's simd128 registers.
407 void CopySIMD128Registers(FrameDescription* output_frame);
409 // Determines whether the input frame contains alignment padding by looking
410 // at the dynamic alignment state slot inside the frame.
411 bool HasAlignmentPadding(JSFunction* function);
414 JSFunction* function_;
415 Code* compiled_code_;
416 unsigned bailout_id_;
417 BailoutType bailout_type_;
420 int has_alignment_padding_;
422 // Input frame description.
423 FrameDescription* input_;
424 // Number of output frames.
426 // Number of output js frames.
428 // Array of output frame descriptions.
429 FrameDescription** output_;
431 // Deferred values to be materialized.
432 List<Object*> deferred_objects_tagged_values_;
433 List<HeapNumberMaterializationDescriptor<int> >
434 deferred_objects_double_values_;
435 List<SIMD128MaterializationDescriptor<int> >
436 deferred_objects_float32x4_values_;
437 List<SIMD128MaterializationDescriptor<int> >
438 deferred_objects_float64x2_values_;
439 List<SIMD128MaterializationDescriptor<int> >
440 deferred_objects_int32x4_values_;
441 List<ObjectMaterializationDescriptor> deferred_objects_;
442 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
443 List<SIMD128MaterializationDescriptor<Address> > deferred_float32x4s_;
444 List<SIMD128MaterializationDescriptor<Address> > deferred_float64x2s_;
445 List<SIMD128MaterializationDescriptor<Address> > deferred_int32x4s_;
447 // Key for lookup of previously materialized objects
449 Handle<FixedArray> previously_materialized_objects_;
450 int prev_materialized_count_;
452 // Output frame information. Only used during heap object materialization.
453 List<Handle<JSFunction> > jsframe_functions_;
454 List<bool> jsframe_has_adapted_arguments_;
456 // Materialized objects. Only used during heap object materialization.
457 List<Handle<Object> >* materialized_values_;
458 List<Handle<Object> >* materialized_objects_;
459 int materialization_value_index_;
460 int materialization_object_index_;
463 DisallowHeapAllocation* disallow_heap_allocation_;
466 CodeTracer::Scope* trace_scope_;
468 static const int table_entry_size_;
470 friend class FrameDescription;
471 friend class DeoptimizedFrameInfo;
475 class FrameDescription {
477 FrameDescription(uint32_t frame_size,
478 JSFunction* function);
480 void* operator new(size_t size, uint32_t frame_size) {
481 // Subtracts kPointerSize, as the member frame_content_ already supplies
482 // the first element of the area to store the frame.
483 return malloc(size + frame_size - kPointerSize);
486 void operator delete(void* pointer, uint32_t frame_size) {
490 void operator delete(void* description) {
494 uint32_t GetFrameSize() const {
495 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_);
496 return static_cast<uint32_t>(frame_size_);
499 JSFunction* GetFunction() const { return function_; }
501 unsigned GetOffsetFromSlotIndex(int slot_index);
503 intptr_t GetFrameSlot(unsigned offset) {
504 return *GetFrameSlotPointer(offset);
507 double GetDoubleFrameSlot(unsigned offset) {
508 intptr_t* ptr = GetFrameSlotPointer(offset);
509 return read_double_value(reinterpret_cast<Address>(ptr));
512 simd128_value_t GetSIMD128FrameSlot(unsigned offset) {
513 intptr_t* ptr = GetFrameSlotPointer(offset);
514 return read_simd128_value(reinterpret_cast<Address>(ptr));
517 void SetFrameSlot(unsigned offset, intptr_t value) {
518 *GetFrameSlotPointer(offset) = value;
521 void SetCallerPc(unsigned offset, intptr_t value);
523 void SetCallerFp(unsigned offset, intptr_t value);
525 void SetCallerConstantPool(unsigned offset, intptr_t value);
527 intptr_t GetRegister(unsigned n) const {
529 // This convoluted DCHECK is needed to work around a gcc problem that
530 // improperly detects an array bounds overflow in optimized debug builds
531 // when using a plain DCHECK.
532 if (n >= ARRAY_SIZE(registers_)) {
537 return registers_[n];
540 double GetDoubleRegister(unsigned n) const;
542 simd128_value_t GetSIMD128Register(unsigned n) const {
543 DCHECK(n < ARRAY_SIZE(simd128_registers_));
544 return simd128_registers_[n];
547 void SetRegister(unsigned n, intptr_t value) {
548 DCHECK(n < ARRAY_SIZE(registers_));
549 registers_[n] = value;
552 void SetDoubleRegister(unsigned n, double value);
554 void SetSIMD128Register(unsigned n, simd128_value_t value) {
555 DCHECK(n < ARRAY_SIZE(simd128_registers_));
556 simd128_registers_[n] = value;
559 intptr_t GetTop() const { return top_; }
560 void SetTop(intptr_t top) { top_ = top; }
562 intptr_t GetPc() const { return pc_; }
563 void SetPc(intptr_t pc) { pc_ = pc; }
565 intptr_t GetFp() const { return fp_; }
566 void SetFp(intptr_t fp) { fp_ = fp; }
568 intptr_t GetContext() const { return context_; }
569 void SetContext(intptr_t context) { context_ = context; }
571 intptr_t GetConstantPool() const { return constant_pool_; }
572 void SetConstantPool(intptr_t constant_pool) {
573 constant_pool_ = constant_pool;
576 Smi* GetState() const { return state_; }
577 void SetState(Smi* state) { state_ = state; }
579 void SetContinuation(intptr_t pc) { continuation_ = pc; }
581 StackFrame::Type GetFrameType() const { return type_; }
582 void SetFrameType(StackFrame::Type type) { type_ = type; }
584 // Get the incoming arguments count.
585 int ComputeParametersCount();
587 // Get a parameter value for an unoptimized frame.
588 Object* GetParameter(int index);
590 // Get the expression stack height for a unoptimized frame.
591 unsigned GetExpressionCount();
593 // Get the expression stack value for an unoptimized frame.
594 Object* GetExpression(int index);
596 static int registers_offset() {
597 return OFFSET_OF(FrameDescription, registers_);
600 static int simd128_registers_offset() {
601 return OFFSET_OF(FrameDescription, simd128_registers_);
604 static int frame_size_offset() {
605 return OFFSET_OF(FrameDescription, frame_size_);
608 static int pc_offset() {
609 return OFFSET_OF(FrameDescription, pc_);
612 static int state_offset() {
613 return OFFSET_OF(FrameDescription, state_);
616 static int continuation_offset() {
617 return OFFSET_OF(FrameDescription, continuation_);
620 static int frame_content_offset() {
621 return OFFSET_OF(FrameDescription, frame_content_);
625 static const uint32_t kZapUint32 = 0xbeeddead;
627 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
628 // keep the variable-size array frame_content_ of type intptr_t at
629 // the end of the structure aligned.
630 uintptr_t frame_size_; // Number of bytes.
631 JSFunction* function_;
632 intptr_t registers_[Register::kNumRegisters];
633 simd128_value_t simd128_registers_[SIMD128Register::kMaxNumRegisters];
638 intptr_t constant_pool_;
639 StackFrame::Type type_;
642 // Continuation is the PC where the execution continues after
644 intptr_t continuation_;
646 // This must be at the end of the object as the object is allocated larger
647 // than it's definition indicate to extend this array.
648 intptr_t frame_content_[1];
650 intptr_t* GetFrameSlotPointer(unsigned offset) {
651 DCHECK(offset < frame_size_);
652 return reinterpret_cast<intptr_t*>(
653 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
656 int ComputeFixedSize();
660 class DeoptimizerData {
662 explicit DeoptimizerData(MemoryAllocator* allocator);
665 void Iterate(ObjectVisitor* v);
668 MemoryAllocator* allocator_;
669 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
670 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
672 DeoptimizedFrameInfo* deoptimized_frame_info_;
674 Deoptimizer* current_;
676 friend class Deoptimizer;
678 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
682 class TranslationBuffer BASE_EMBEDDED {
684 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
686 int CurrentIndex() const { return contents_.length(); }
687 void Add(int32_t value, Zone* zone);
689 Handle<ByteArray> CreateByteArray(Factory* factory);
692 ZoneList<uint8_t> contents_;
696 class TranslationIterator BASE_EMBEDDED {
698 TranslationIterator(ByteArray* buffer, int index)
699 : buffer_(buffer), index_(index) {
700 DCHECK(index >= 0 && index < buffer->length());
705 bool HasNext() const { return index_ < buffer_->length(); }
708 for (int i = 0; i < n; i++) Next();
717 #define TRANSLATION_OPCODE_LIST(V) \
720 V(CONSTRUCT_STUB_FRAME) \
721 V(GETTER_STUB_FRAME) \
722 V(SETTER_STUB_FRAME) \
723 V(ARGUMENTS_ADAPTOR_FRAME) \
724 V(COMPILED_STUB_FRAME) \
725 V(DUPLICATED_OBJECT) \
726 V(ARGUMENTS_OBJECT) \
732 V(FLOAT32x4_REGISTER) \
733 V(FLOAT64x2_REGISTER) \
734 V(INT32x4_REGISTER) \
736 V(INT32_STACK_SLOT) \
737 V(UINT32_STACK_SLOT) \
738 V(DOUBLE_STACK_SLOT) \
739 V(FLOAT32x4_STACK_SLOT) \
740 V(FLOAT64x2_STACK_SLOT) \
741 V(INT32x4_STACK_SLOT) \
745 class Translation BASE_EMBEDDED {
747 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
749 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
752 #undef DECLARE_TRANSLATION_OPCODE_ENUM
754 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
757 index_(buffer->CurrentIndex()),
759 buffer_->Add(BEGIN, zone);
760 buffer_->Add(frame_count, zone);
761 buffer_->Add(jsframe_count, zone);
764 int index() const { return index_; }
767 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
768 void BeginCompiledStubFrame();
769 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
770 void BeginConstructStubFrame(int literal_id, unsigned height);
771 void BeginGetterStubFrame(int literal_id);
772 void BeginSetterStubFrame(int literal_id);
773 void BeginArgumentsObject(int args_length);
774 void BeginCapturedObject(int length);
775 void DuplicateObject(int object_index);
776 void StoreRegister(Register reg);
777 void StoreInt32Register(Register reg);
778 void StoreUint32Register(Register reg);
779 void StoreDoubleRegister(DoubleRegister reg);
780 void StoreSIMD128Register(SIMD128Register reg, Opcode opcode);
781 void StoreStackSlot(int index);
782 void StoreInt32StackSlot(int index);
783 void StoreUint32StackSlot(int index);
784 void StoreDoubleStackSlot(int index);
785 void StoreSIMD128StackSlot(int index, Opcode opcode);
786 void StoreLiteral(int literal_id);
787 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
789 Zone* zone() const { return zone_; }
791 static int NumberOfOperandsFor(Opcode opcode);
793 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
794 static const char* StringFor(Opcode opcode);
797 // A literal id which refers to the JSFunction itself.
798 static const int kSelfLiteralId = -239;
801 TranslationBuffer* buffer_;
807 class SlotRef BASE_EMBEDDED {
809 enum SlotRepresentation {
819 DEFERRED_OBJECT, // Object captured by the escape analysis.
820 // The number of nested objects can be obtained
821 // with the DeferredObjectLength() method
822 // (the SlotRefs of the nested objects follow
823 // this SlotRef in the depth-first order.)
824 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
825 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
826 // in sync, it should not be materialized.
830 : addr_(NULL), representation_(UNKNOWN) { }
832 SlotRef(Address addr, SlotRepresentation representation)
833 : addr_(addr), representation_(representation) { }
835 SlotRef(Isolate* isolate, Object* literal)
836 : literal_(literal, isolate), representation_(LITERAL) { }
838 static SlotRef NewArgumentsObject(int length) {
840 slot.representation_ = ARGUMENTS_OBJECT;
841 slot.deferred_object_length_ = length;
845 static SlotRef NewDeferredObject(int length) {
847 slot.representation_ = DEFERRED_OBJECT;
848 slot.deferred_object_length_ = length;
852 SlotRepresentation Representation() { return representation_; }
854 static SlotRef NewDuplicateObject(int id) {
856 slot.representation_ = DUPLICATE_OBJECT;
857 slot.duplicate_object_id_ = id;
861 int GetChildrenCount() {
862 if (representation_ == DEFERRED_OBJECT ||
863 representation_ == ARGUMENTS_OBJECT) {
864 return deferred_object_length_;
870 int DuplicateObjectId() { return duplicate_object_id_; }
872 Handle<Object> GetValue(Isolate* isolate);
876 Handle<Object> literal_;
877 SlotRepresentation representation_;
878 int deferred_object_length_;
879 int duplicate_object_id_;
882 class SlotRefValueBuilder BASE_EMBEDDED {
885 JavaScriptFrame* frame,
886 int inlined_frame_index,
887 int formal_parameter_count);
889 void Prepare(Isolate* isolate);
890 Handle<Object> GetNext(Isolate* isolate, int level);
891 void Finish(Isolate* isolate);
893 int args_length() { return args_length_; }
896 List<Handle<Object> > materialized_objects_;
897 Handle<FixedArray> previously_materialized_objects_;
898 int prev_materialized_count_;
899 Address stack_frame_id_;
900 List<SlotRef> slot_refs_;
903 int first_slot_index_;
905 static SlotRef ComputeSlotForNextArgument(
906 Translation::Opcode opcode,
907 TranslationIterator* iterator,
908 DeoptimizationInputData* data,
909 JavaScriptFrame* frame);
911 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
913 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
914 if (slot_index >= 0) {
915 const int offset = JavaScriptFrameConstants::kLocal0Offset;
916 return frame->fp() + offset - (slot_index * kPointerSize);
918 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
919 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
923 Handle<Object> GetDeferredObject(Isolate* isolate);
926 class MaterializedObjectStore {
928 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
931 Handle<FixedArray> Get(Address fp);
932 void Set(Address fp, Handle<FixedArray> materialized_objects);
933 void Remove(Address fp);
936 Isolate* isolate() { return isolate_; }
937 Handle<FixedArray> GetStackEntries();
938 Handle<FixedArray> EnsureStackEntries(int size);
940 int StackIdToIndex(Address fp);
943 List<Address> frame_fps_;
947 // Class used to represent an unoptimized frame when the debugger
948 // needs to inspect a frame that is part of an optimized frame. The
949 // internally used FrameDescription objects are not GC safe so for use
950 // by the debugger frame information is copied to an object of this type.
951 // Represents parameters in unadapted form so their number might mismatch
952 // formal parameter count.
953 class DeoptimizedFrameInfo : public Malloced {
955 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
957 bool has_arguments_adaptor,
958 bool has_construct_stub);
959 virtual ~DeoptimizedFrameInfo();
962 void Iterate(ObjectVisitor* v);
964 // Return the number of incoming arguments.
965 int parameters_count() { return parameters_count_; }
967 // Return the height of the expression stack.
968 int expression_count() { return expression_count_; }
970 // Get the frame function.
971 JSFunction* GetFunction() {
975 // Check if this frame is preceded by construct stub frame. The bottom-most
976 // inlined frame might still be called by an uninlined construct stub.
977 bool HasConstructStub() {
978 return has_construct_stub_;
981 // Get an incoming argument.
982 Object* GetParameter(int index) {
983 DCHECK(0 <= index && index < parameters_count());
984 return parameters_[index];
987 // Get an expression from the expression stack.
988 Object* GetExpression(int index) {
989 DCHECK(0 <= index && index < expression_count());
990 return expression_stack_[index];
993 int GetSourcePosition() {
994 return source_position_;
998 // Set an incoming argument.
999 void SetParameter(int index, Object* obj) {
1000 DCHECK(0 <= index && index < parameters_count());
1001 parameters_[index] = obj;
1004 // Set an expression on the expression stack.
1005 void SetExpression(int index, Object* obj) {
1006 DCHECK(0 <= index && index < expression_count());
1007 expression_stack_[index] = obj;
1010 JSFunction* function_;
1011 bool has_construct_stub_;
1012 int parameters_count_;
1013 int expression_count_;
1014 Object** parameters_;
1015 Object** expression_stack_;
1016 int source_position_;
1018 friend class Deoptimizer;
1021 } } // namespace v8::internal
1023 #endif // V8_DEOPTIMIZER_H_