1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
12 #include "src/zone-inl.h"
19 static inline double read_double_value(Address p) {
20 #ifdef V8_HOST_CAN_READ_UNALIGNED
21 return Memory::double_at(p);
22 #else // V8_HOST_CAN_READ_UNALIGNED
23 // Prevent gcc from using load-double (mips ldc1) on (possibly)
24 // non-64-bit aligned address.
29 c.u[0] = *reinterpret_cast<uint32_t*>(p);
30 c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
32 #endif // V8_HOST_CAN_READ_UNALIGNED
36 class FrameDescription;
37 class TranslationIterator;
38 class DeoptimizedFrameInfo;
41 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
43 HeapNumberMaterializationDescriptor(T destination, double value)
44 : destination_(destination), value_(value) { }
46 T destination() const { return destination_; }
47 double value() const { return value_; }
55 class ObjectMaterializationDescriptor BASE_EMBEDDED {
57 ObjectMaterializationDescriptor(
58 Address slot_address, int frame, int length, int duplicate, bool is_args)
59 : slot_address_(slot_address),
60 jsframe_index_(frame),
61 object_length_(length),
62 duplicate_object_(duplicate),
63 is_arguments_(is_args) { }
65 Address slot_address() const { return slot_address_; }
66 int jsframe_index() const { return jsframe_index_; }
67 int object_length() const { return object_length_; }
68 int duplicate_object() const { return duplicate_object_; }
69 bool is_arguments() const { return is_arguments_; }
71 // Only used for allocated receivers in DoComputeConstructStubFrame.
72 void patch_slot_address(intptr_t slot) {
73 slot_address_ = reinterpret_cast<Address>(slot);
77 Address slot_address_;
80 int duplicate_object_;
85 class OptimizedFunctionVisitor BASE_EMBEDDED {
87 virtual ~OptimizedFunctionVisitor() {}
89 // Function which is called before iteration of any optimized functions
90 // from given native context.
91 virtual void EnterContext(Context* context) = 0;
93 virtual void VisitFunction(JSFunction* function) = 0;
95 // Function which is called after iteration of all optimized functions
96 // from given native context.
97 virtual void LeaveContext(Context* context) = 0;
101 class Deoptimizer : public Malloced {
107 // This last bailout type is not really a bailout, but used by the
108 // debugger to deoptimize stack frames to allow inspection.
112 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
114 struct JumpTableEntry : public ZoneObject {
115 inline JumpTableEntry(Address entry,
116 Deoptimizer::BailoutType type,
121 needs_frame(frame) { }
124 Deoptimizer::BailoutType bailout_type;
128 static bool TraceEnabledFor(BailoutType deopt_type,
129 StackFrame::Type frame_type);
130 static const char* MessageFor(BailoutType type);
132 int output_count() const { return output_count_; }
134 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
135 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
136 BailoutType bailout_type() const { return bailout_type_; }
138 // Number of created JS frames. Not all created frames are necessarily JS.
139 int jsframe_count() const { return jsframe_count_; }
141 static Deoptimizer* New(JSFunction* function,
147 static Deoptimizer* Grab(Isolate* isolate);
149 // The returned object with information on the optimized frame needs to be
150 // freed before another one can be generated.
151 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
154 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
157 // Makes sure that there is enough room in the relocation
158 // information of a code object to perform lazy deoptimization
159 // patching. If there is not enough room a new relocation
160 // information object is allocated and comments are added until it
162 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
164 // Deoptimize the function now. Its current optimized code will never be run
165 // again and any activations of the optimized code will get deoptimized when
166 // execution returns.
167 static void DeoptimizeFunction(JSFunction* function);
169 // Deoptimize all code in the given isolate.
170 static void DeoptimizeAll(Isolate* isolate);
172 // Deoptimize code associated with the given global object.
173 static void DeoptimizeGlobalObject(JSObject* object);
175 // Deoptimizes all optimized code that has been previously marked
176 // (via code->set_marked_for_deoptimization) and unlinks all functions that
177 // refer to that code.
178 static void DeoptimizeMarkedCode(Isolate* isolate);
180 static void PatchStackForMarkedCode(Isolate* isolate);
182 // Visit all the known optimized functions in a given isolate.
183 static void VisitAllOptimizedFunctions(
184 Isolate* isolate, OptimizedFunctionVisitor* visitor);
186 // The size in bytes of the code required at a lazy deopt patch site.
187 static int patch_size();
191 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
193 void MaterializeHeapNumbersForDebuggerInspectableFrame(
194 Address parameters_top,
195 uint32_t parameters_size,
196 Address expressions_top,
197 uint32_t expressions_size,
198 DeoptimizedFrameInfo* info);
200 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
204 CALCULATE_ENTRY_ADDRESS,
209 static Address GetDeoptimizationEntry(
213 GetEntryMode mode = ENSURE_ENTRY_CODE);
214 static int GetDeoptimizationId(Isolate* isolate,
217 static int GetOutputInfo(DeoptimizationOutputData* data,
219 SharedFunctionInfo* shared);
221 // Code generation support.
222 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
223 static int output_count_offset() {
224 return OFFSET_OF(Deoptimizer, output_count_);
226 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
228 static int has_alignment_padding_offset() {
229 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
232 static int GetDeoptimizedCodeCount(Isolate* isolate);
234 static const int kNotDeoptimizationEntry = -1;
236 // Generators for the deoptimization entry code.
237 class EntryGenerator BASE_EMBEDDED {
239 EntryGenerator(MacroAssembler* masm, BailoutType type)
240 : masm_(masm), type_(type) { }
241 virtual ~EntryGenerator() { }
246 MacroAssembler* masm() const { return masm_; }
247 BailoutType type() const { return type_; }
248 Isolate* isolate() const { return masm_->isolate(); }
250 virtual void GeneratePrologue() { }
253 MacroAssembler* masm_;
254 Deoptimizer::BailoutType type_;
257 class TableEntryGenerator : public EntryGenerator {
259 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
260 : EntryGenerator(masm, type), count_(count) { }
263 virtual void GeneratePrologue();
266 int count() const { return count_; }
271 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
273 static size_t GetMaxDeoptTableSize();
275 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
279 Isolate* isolate() const { return isolate_; }
282 static const int kMinNumberOfEntries = 64;
283 static const int kMaxNumberOfEntries = 16384;
285 Deoptimizer(Isolate* isolate,
286 JSFunction* function,
291 Code* optimized_code);
292 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
293 void PrintFunctionName();
294 void DeleteFrameDescriptions();
296 void DoComputeOutputFrames();
297 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
298 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
300 void DoComputeConstructStubFrame(TranslationIterator* iterator,
302 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
304 bool is_setter_stub_frame);
305 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
308 // Translate object, store the result into an auxiliary array
309 // (deferred_objects_tagged_values_).
310 void DoTranslateObject(TranslationIterator* iterator,
314 // Translate value, store the result into the given frame slot.
315 void DoTranslateCommand(TranslationIterator* iterator,
317 unsigned output_offset);
319 // Translate object, do not store the result anywhere (but do update
320 // the deferred materialization array).
321 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
323 unsigned ComputeInputFrameSize() const;
324 unsigned ComputeFixedSize(JSFunction* function) const;
326 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
327 unsigned ComputeOutgoingArgumentSize() const;
329 Object* ComputeLiteral(int index) const;
331 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
332 void AddObjectDuplication(intptr_t slot, int object_index);
333 void AddObjectTaggedValue(intptr_t value);
334 void AddObjectDoubleValue(double value);
335 void AddDoubleValue(intptr_t slot_address, double value);
337 bool ArgumentsObjectIsAdapted(int object_index) {
338 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
339 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
340 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
343 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
344 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
345 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
346 return jsframe_functions_[reverse_jsframe_index];
349 // Helper function for heap object materialization.
350 Handle<Object> MaterializeNextHeapObject();
351 Handle<Object> MaterializeNextValue();
353 static void GenerateDeoptimizationEntries(
354 MacroAssembler* masm, int count, BailoutType type);
356 // Marks all the code in the given context for deoptimization.
357 static void MarkAllCodeForContext(Context* native_context);
359 // Visit all the known optimized functions in a given context.
360 static void VisitAllOptimizedFunctionsForContext(
361 Context* context, OptimizedFunctionVisitor* visitor);
363 // Deoptimizes all code marked in the given context.
364 static void DeoptimizeMarkedCodeForContext(Context* native_context);
366 // Patch the given code so that it will deoptimize itself.
367 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
369 // Searches the list of known deoptimizing code for a Code object
370 // containing the given address (which is supposedly faster than
371 // searching all code objects).
372 Code* FindDeoptimizingCode(Address addr);
374 // Fill the input from from a JavaScript frame. This is used when
375 // the debugger needs to inspect an optimized frame. For normal
376 // deoptimizations the input frame is filled in generated code.
377 void FillInputFrame(Address tos, JavaScriptFrame* frame);
379 // Fill the given output frame's registers to contain the failure handler
380 // address and the number of parameters for a stub failure trampoline.
381 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
382 CodeStubInterfaceDescriptor* desc);
384 // Fill the given output frame's double registers with the original values
385 // from the input frame's double registers.
386 void CopyDoubleRegisters(FrameDescription* output_frame);
388 // Determines whether the input frame contains alignment padding by looking
389 // at the dynamic alignment state slot inside the frame.
390 bool HasAlignmentPadding(JSFunction* function);
393 JSFunction* function_;
394 Code* compiled_code_;
395 unsigned bailout_id_;
396 BailoutType bailout_type_;
399 int has_alignment_padding_;
401 // Input frame description.
402 FrameDescription* input_;
403 // Number of output frames.
405 // Number of output js frames.
407 // Array of output frame descriptions.
408 FrameDescription** output_;
410 // Deferred values to be materialized.
411 List<Object*> deferred_objects_tagged_values_;
412 List<HeapNumberMaterializationDescriptor<int> >
413 deferred_objects_double_values_;
414 List<ObjectMaterializationDescriptor> deferred_objects_;
415 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
417 // Key for lookup of previously materialized objects
419 Handle<FixedArray> previously_materialized_objects_;
420 int prev_materialized_count_;
422 // Output frame information. Only used during heap object materialization.
423 List<Handle<JSFunction> > jsframe_functions_;
424 List<bool> jsframe_has_adapted_arguments_;
426 // Materialized objects. Only used during heap object materialization.
427 List<Handle<Object> >* materialized_values_;
428 List<Handle<Object> >* materialized_objects_;
429 int materialization_value_index_;
430 int materialization_object_index_;
433 DisallowHeapAllocation* disallow_heap_allocation_;
436 CodeTracer::Scope* trace_scope_;
438 static const int table_entry_size_;
440 friend class FrameDescription;
441 friend class DeoptimizedFrameInfo;
445 class FrameDescription {
447 FrameDescription(uint32_t frame_size,
448 JSFunction* function);
450 void* operator new(size_t size, uint32_t frame_size) {
451 // Subtracts kPointerSize, as the member frame_content_ already supplies
452 // the first element of the area to store the frame.
453 return malloc(size + frame_size - kPointerSize);
456 void operator delete(void* pointer, uint32_t frame_size) {
460 void operator delete(void* description) {
464 uint32_t GetFrameSize() const {
465 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_);
466 return static_cast<uint32_t>(frame_size_);
469 JSFunction* GetFunction() const { return function_; }
471 unsigned GetOffsetFromSlotIndex(int slot_index);
473 intptr_t GetFrameSlot(unsigned offset) {
474 return *GetFrameSlotPointer(offset);
477 double GetDoubleFrameSlot(unsigned offset) {
478 intptr_t* ptr = GetFrameSlotPointer(offset);
479 return read_double_value(reinterpret_cast<Address>(ptr));
482 void SetFrameSlot(unsigned offset, intptr_t value) {
483 *GetFrameSlotPointer(offset) = value;
486 void SetCallerPc(unsigned offset, intptr_t value);
488 void SetCallerFp(unsigned offset, intptr_t value);
490 void SetCallerConstantPool(unsigned offset, intptr_t value);
492 intptr_t GetRegister(unsigned n) const {
494 // This convoluted DCHECK is needed to work around a gcc problem that
495 // improperly detects an array bounds overflow in optimized debug builds
496 // when using a plain DCHECK.
497 if (n >= ARRAY_SIZE(registers_)) {
502 return registers_[n];
505 double GetDoubleRegister(unsigned n) const {
506 DCHECK(n < ARRAY_SIZE(double_registers_));
507 return double_registers_[n];
510 void SetRegister(unsigned n, intptr_t value) {
511 DCHECK(n < ARRAY_SIZE(registers_));
512 registers_[n] = value;
515 void SetDoubleRegister(unsigned n, double value) {
516 DCHECK(n < ARRAY_SIZE(double_registers_));
517 double_registers_[n] = value;
520 intptr_t GetTop() const { return top_; }
521 void SetTop(intptr_t top) { top_ = top; }
523 intptr_t GetPc() const { return pc_; }
524 void SetPc(intptr_t pc) { pc_ = pc; }
526 intptr_t GetFp() const { return fp_; }
527 void SetFp(intptr_t fp) { fp_ = fp; }
529 intptr_t GetContext() const { return context_; }
530 void SetContext(intptr_t context) { context_ = context; }
532 intptr_t GetConstantPool() const { return constant_pool_; }
533 void SetConstantPool(intptr_t constant_pool) {
534 constant_pool_ = constant_pool;
537 Smi* GetState() const { return state_; }
538 void SetState(Smi* state) { state_ = state; }
540 void SetContinuation(intptr_t pc) { continuation_ = pc; }
542 StackFrame::Type GetFrameType() const { return type_; }
543 void SetFrameType(StackFrame::Type type) { type_ = type; }
545 // Get the incoming arguments count.
546 int ComputeParametersCount();
548 // Get a parameter value for an unoptimized frame.
549 Object* GetParameter(int index);
551 // Get the expression stack height for a unoptimized frame.
552 unsigned GetExpressionCount();
554 // Get the expression stack value for an unoptimized frame.
555 Object* GetExpression(int index);
557 static int registers_offset() {
558 return OFFSET_OF(FrameDescription, registers_);
561 static int double_registers_offset() {
562 return OFFSET_OF(FrameDescription, double_registers_);
565 static int frame_size_offset() {
566 return OFFSET_OF(FrameDescription, frame_size_);
569 static int pc_offset() {
570 return OFFSET_OF(FrameDescription, pc_);
573 static int state_offset() {
574 return OFFSET_OF(FrameDescription, state_);
577 static int continuation_offset() {
578 return OFFSET_OF(FrameDescription, continuation_);
581 static int frame_content_offset() {
582 return OFFSET_OF(FrameDescription, frame_content_);
586 static const uint32_t kZapUint32 = 0xbeeddead;
588 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
589 // keep the variable-size array frame_content_ of type intptr_t at
590 // the end of the structure aligned.
591 uintptr_t frame_size_; // Number of bytes.
592 JSFunction* function_;
593 intptr_t registers_[Register::kNumRegisters];
594 double double_registers_[DoubleRegister::kMaxNumRegisters];
599 intptr_t constant_pool_;
600 StackFrame::Type type_;
603 // Continuation is the PC where the execution continues after
605 intptr_t continuation_;
607 // This must be at the end of the object as the object is allocated larger
608 // than it's definition indicate to extend this array.
609 intptr_t frame_content_[1];
611 intptr_t* GetFrameSlotPointer(unsigned offset) {
612 DCHECK(offset < frame_size_);
613 return reinterpret_cast<intptr_t*>(
614 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
617 int ComputeFixedSize();
621 class DeoptimizerData {
623 explicit DeoptimizerData(MemoryAllocator* allocator);
626 void Iterate(ObjectVisitor* v);
629 MemoryAllocator* allocator_;
630 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
631 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
633 DeoptimizedFrameInfo* deoptimized_frame_info_;
635 Deoptimizer* current_;
637 friend class Deoptimizer;
639 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
643 class TranslationBuffer BASE_EMBEDDED {
645 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
647 int CurrentIndex() const { return contents_.length(); }
648 void Add(int32_t value, Zone* zone);
650 Handle<ByteArray> CreateByteArray(Factory* factory);
653 ZoneList<uint8_t> contents_;
657 class TranslationIterator BASE_EMBEDDED {
659 TranslationIterator(ByteArray* buffer, int index)
660 : buffer_(buffer), index_(index) {
661 DCHECK(index >= 0 && index < buffer->length());
666 bool HasNext() const { return index_ < buffer_->length(); }
669 for (int i = 0; i < n; i++) Next();
678 #define TRANSLATION_OPCODE_LIST(V) \
681 V(CONSTRUCT_STUB_FRAME) \
682 V(GETTER_STUB_FRAME) \
683 V(SETTER_STUB_FRAME) \
684 V(ARGUMENTS_ADAPTOR_FRAME) \
685 V(COMPILED_STUB_FRAME) \
686 V(DUPLICATED_OBJECT) \
687 V(ARGUMENTS_OBJECT) \
694 V(INT32_STACK_SLOT) \
695 V(UINT32_STACK_SLOT) \
696 V(DOUBLE_STACK_SLOT) \
700 class Translation BASE_EMBEDDED {
702 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
704 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
707 #undef DECLARE_TRANSLATION_OPCODE_ENUM
709 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
712 index_(buffer->CurrentIndex()),
714 buffer_->Add(BEGIN, zone);
715 buffer_->Add(frame_count, zone);
716 buffer_->Add(jsframe_count, zone);
719 int index() const { return index_; }
722 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
723 void BeginCompiledStubFrame();
724 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
725 void BeginConstructStubFrame(int literal_id, unsigned height);
726 void BeginGetterStubFrame(int literal_id);
727 void BeginSetterStubFrame(int literal_id);
728 void BeginArgumentsObject(int args_length);
729 void BeginCapturedObject(int length);
730 void DuplicateObject(int object_index);
731 void StoreRegister(Register reg);
732 void StoreInt32Register(Register reg);
733 void StoreUint32Register(Register reg);
734 void StoreDoubleRegister(DoubleRegister reg);
735 void StoreStackSlot(int index);
736 void StoreInt32StackSlot(int index);
737 void StoreUint32StackSlot(int index);
738 void StoreDoubleStackSlot(int index);
739 void StoreLiteral(int literal_id);
740 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
742 Zone* zone() const { return zone_; }
744 static int NumberOfOperandsFor(Opcode opcode);
746 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
747 static const char* StringFor(Opcode opcode);
750 // A literal id which refers to the JSFunction itself.
751 static const int kSelfLiteralId = -239;
754 TranslationBuffer* buffer_;
760 class SlotRef BASE_EMBEDDED {
762 enum SlotRepresentation {
769 DEFERRED_OBJECT, // Object captured by the escape analysis.
770 // The number of nested objects can be obtained
771 // with the DeferredObjectLength() method
772 // (the SlotRefs of the nested objects follow
773 // this SlotRef in the depth-first order.)
774 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
775 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
776 // in sync, it should not be materialized.
780 : addr_(NULL), representation_(UNKNOWN) { }
782 SlotRef(Address addr, SlotRepresentation representation)
783 : addr_(addr), representation_(representation) { }
785 SlotRef(Isolate* isolate, Object* literal)
786 : literal_(literal, isolate), representation_(LITERAL) { }
788 static SlotRef NewArgumentsObject(int length) {
790 slot.representation_ = ARGUMENTS_OBJECT;
791 slot.deferred_object_length_ = length;
795 static SlotRef NewDeferredObject(int length) {
797 slot.representation_ = DEFERRED_OBJECT;
798 slot.deferred_object_length_ = length;
802 SlotRepresentation Representation() { return representation_; }
804 static SlotRef NewDuplicateObject(int id) {
806 slot.representation_ = DUPLICATE_OBJECT;
807 slot.duplicate_object_id_ = id;
811 int GetChildrenCount() {
812 if (representation_ == DEFERRED_OBJECT ||
813 representation_ == ARGUMENTS_OBJECT) {
814 return deferred_object_length_;
820 int DuplicateObjectId() { return duplicate_object_id_; }
822 Handle<Object> GetValue(Isolate* isolate);
826 Handle<Object> literal_;
827 SlotRepresentation representation_;
828 int deferred_object_length_;
829 int duplicate_object_id_;
832 class SlotRefValueBuilder BASE_EMBEDDED {
835 JavaScriptFrame* frame,
836 int inlined_frame_index,
837 int formal_parameter_count);
839 void Prepare(Isolate* isolate);
840 Handle<Object> GetNext(Isolate* isolate, int level);
841 void Finish(Isolate* isolate);
843 int args_length() { return args_length_; }
846 List<Handle<Object> > materialized_objects_;
847 Handle<FixedArray> previously_materialized_objects_;
848 int prev_materialized_count_;
849 Address stack_frame_id_;
850 List<SlotRef> slot_refs_;
853 int first_slot_index_;
855 static SlotRef ComputeSlotForNextArgument(
856 Translation::Opcode opcode,
857 TranslationIterator* iterator,
858 DeoptimizationInputData* data,
859 JavaScriptFrame* frame);
861 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
863 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
864 if (slot_index >= 0) {
865 const int offset = JavaScriptFrameConstants::kLocal0Offset;
866 return frame->fp() + offset - (slot_index * kPointerSize);
868 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
869 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
873 Handle<Object> GetDeferredObject(Isolate* isolate);
876 class MaterializedObjectStore {
878 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
881 Handle<FixedArray> Get(Address fp);
882 void Set(Address fp, Handle<FixedArray> materialized_objects);
883 void Remove(Address fp);
886 Isolate* isolate() { return isolate_; }
887 Handle<FixedArray> GetStackEntries();
888 Handle<FixedArray> EnsureStackEntries(int size);
890 int StackIdToIndex(Address fp);
893 List<Address> frame_fps_;
897 // Class used to represent an unoptimized frame when the debugger
898 // needs to inspect a frame that is part of an optimized frame. The
899 // internally used FrameDescription objects are not GC safe so for use
900 // by the debugger frame information is copied to an object of this type.
901 // Represents parameters in unadapted form so their number might mismatch
902 // formal parameter count.
903 class DeoptimizedFrameInfo : public Malloced {
905 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
907 bool has_arguments_adaptor,
908 bool has_construct_stub);
909 virtual ~DeoptimizedFrameInfo();
912 void Iterate(ObjectVisitor* v);
914 // Return the number of incoming arguments.
915 int parameters_count() { return parameters_count_; }
917 // Return the height of the expression stack.
918 int expression_count() { return expression_count_; }
920 // Get the frame function.
921 JSFunction* GetFunction() {
925 // Check if this frame is preceded by construct stub frame. The bottom-most
926 // inlined frame might still be called by an uninlined construct stub.
927 bool HasConstructStub() {
928 return has_construct_stub_;
931 // Get an incoming argument.
932 Object* GetParameter(int index) {
933 DCHECK(0 <= index && index < parameters_count());
934 return parameters_[index];
937 // Get an expression from the expression stack.
938 Object* GetExpression(int index) {
939 DCHECK(0 <= index && index < expression_count());
940 return expression_stack_[index];
943 int GetSourcePosition() {
944 return source_position_;
948 // Set an incoming argument.
949 void SetParameter(int index, Object* obj) {
950 DCHECK(0 <= index && index < parameters_count());
951 parameters_[index] = obj;
954 // Set an expression on the expression stack.
955 void SetExpression(int index, Object* obj) {
956 DCHECK(0 <= index && index < expression_count());
957 expression_stack_[index] = obj;
960 JSFunction* function_;
961 bool has_construct_stub_;
962 int parameters_count_;
963 int expression_count_;
964 Object** parameters_;
965 Object** expression_stack_;
966 int source_position_;
968 friend class Deoptimizer;
971 } } // namespace v8::internal
973 #endif // V8_DEOPTIMIZER_H_