1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
12 #include "src/zone-inl.h"
19 static inline double read_double_value(Address p) {
21 memcpy(&d, p, sizeof(d));
26 class FrameDescription;
27 class TranslationIterator;
28 class DeoptimizedFrameInfo;
31 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
33 HeapNumberMaterializationDescriptor(T destination, double value)
34 : destination_(destination), value_(value) { }
36 T destination() const { return destination_; }
37 double value() const { return value_; }
45 class ObjectMaterializationDescriptor BASE_EMBEDDED {
47 ObjectMaterializationDescriptor(
48 Address slot_address, int frame, int length, int duplicate, bool is_args)
49 : slot_address_(slot_address),
50 jsframe_index_(frame),
51 object_length_(length),
52 duplicate_object_(duplicate),
53 is_arguments_(is_args) { }
55 Address slot_address() const { return slot_address_; }
56 int jsframe_index() const { return jsframe_index_; }
57 int object_length() const { return object_length_; }
58 int duplicate_object() const { return duplicate_object_; }
59 bool is_arguments() const { return is_arguments_; }
61 // Only used for allocated receivers in DoComputeConstructStubFrame.
62 void patch_slot_address(intptr_t slot) {
63 slot_address_ = reinterpret_cast<Address>(slot);
67 Address slot_address_;
70 int duplicate_object_;
75 class OptimizedFunctionVisitor BASE_EMBEDDED {
77 virtual ~OptimizedFunctionVisitor() {}
79 // Function which is called before iteration of any optimized functions
80 // from given native context.
81 virtual void EnterContext(Context* context) = 0;
83 virtual void VisitFunction(JSFunction* function) = 0;
85 // Function which is called after iteration of all optimized functions
86 // from given native context.
87 virtual void LeaveContext(Context* context) = 0;
91 class Deoptimizer : public Malloced {
97 // This last bailout type is not really a bailout, but used by the
98 // debugger to deoptimize stack frames to allow inspection.
102 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
105 Reason(int r, const char* m, const char* d)
106 : raw_position(r), mnemonic(m), detail(d) {}
108 bool operator==(const Reason& other) const {
109 return raw_position == other.raw_position &&
110 CStringEquals(mnemonic, other.mnemonic) &&
111 CStringEquals(detail, other.detail);
114 bool operator!=(const Reason& other) const { return !(*this == other); }
117 const char* mnemonic;
121 struct JumpTableEntry : public ZoneObject {
122 inline JumpTableEntry(Address entry, const Reason& the_reason,
123 Deoptimizer::BailoutType type, bool frame)
128 needs_frame(frame) {}
130 bool IsEquivalentTo(const JumpTableEntry& other) const {
131 return address == other.address && bailout_type == other.bailout_type &&
132 needs_frame == other.needs_frame &&
133 (!FLAG_trace_deopt || reason == other.reason);
139 Deoptimizer::BailoutType bailout_type;
143 static bool TraceEnabledFor(BailoutType deopt_type,
144 StackFrame::Type frame_type);
145 static const char* MessageFor(BailoutType type);
147 int output_count() const { return output_count_; }
149 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
150 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
151 BailoutType bailout_type() const { return bailout_type_; }
153 // Number of created JS frames. Not all created frames are necessarily JS.
154 int jsframe_count() const { return jsframe_count_; }
156 static Deoptimizer* New(JSFunction* function,
162 static Deoptimizer* Grab(Isolate* isolate);
164 // The returned object with information on the optimized frame needs to be
165 // freed before another one can be generated.
166 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
169 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
172 // Makes sure that there is enough room in the relocation
173 // information of a code object to perform lazy deoptimization
174 // patching. If there is not enough room a new relocation
175 // information object is allocated and comments are added until it
177 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
179 // Deoptimize the function now. Its current optimized code will never be run
180 // again and any activations of the optimized code will get deoptimized when
181 // execution returns.
182 static void DeoptimizeFunction(JSFunction* function);
184 // Deoptimize all code in the given isolate.
185 static void DeoptimizeAll(Isolate* isolate);
187 // Deoptimize code associated with the given global object.
188 static void DeoptimizeGlobalObject(JSObject* object);
190 // Deoptimizes all optimized code that has been previously marked
191 // (via code->set_marked_for_deoptimization) and unlinks all functions that
192 // refer to that code.
193 static void DeoptimizeMarkedCode(Isolate* isolate);
195 // Visit all the known optimized functions in a given isolate.
196 static void VisitAllOptimizedFunctions(
197 Isolate* isolate, OptimizedFunctionVisitor* visitor);
199 // The size in bytes of the code required at a lazy deopt patch site.
200 static int patch_size();
204 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
206 void MaterializeHeapNumbersForDebuggerInspectableFrame(
207 Address parameters_top,
208 uint32_t parameters_size,
209 Address expressions_top,
210 uint32_t expressions_size,
211 DeoptimizedFrameInfo* info);
213 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
217 CALCULATE_ENTRY_ADDRESS,
222 static Address GetDeoptimizationEntry(
226 GetEntryMode mode = ENSURE_ENTRY_CODE);
227 static int GetDeoptimizationId(Isolate* isolate,
230 static int GetOutputInfo(DeoptimizationOutputData* data,
232 SharedFunctionInfo* shared);
234 // Code generation support.
235 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
236 static int output_count_offset() {
237 return OFFSET_OF(Deoptimizer, output_count_);
239 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
241 static int has_alignment_padding_offset() {
242 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
245 static int GetDeoptimizedCodeCount(Isolate* isolate);
247 static const int kNotDeoptimizationEntry = -1;
249 // Generators for the deoptimization entry code.
250 class EntryGenerator BASE_EMBEDDED {
252 EntryGenerator(MacroAssembler* masm, BailoutType type)
253 : masm_(masm), type_(type) { }
254 virtual ~EntryGenerator() { }
259 MacroAssembler* masm() const { return masm_; }
260 BailoutType type() const { return type_; }
261 Isolate* isolate() const { return masm_->isolate(); }
263 virtual void GeneratePrologue() { }
266 MacroAssembler* masm_;
267 Deoptimizer::BailoutType type_;
270 class TableEntryGenerator : public EntryGenerator {
272 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
273 : EntryGenerator(masm, type), count_(count) { }
276 virtual void GeneratePrologue();
279 int count() const { return count_; }
284 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
286 static size_t GetMaxDeoptTableSize();
288 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
292 Isolate* isolate() const { return isolate_; }
295 static const int kMinNumberOfEntries = 64;
296 static const int kMaxNumberOfEntries = 16384;
298 Deoptimizer(Isolate* isolate,
299 JSFunction* function,
304 Code* optimized_code);
305 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
306 void PrintFunctionName();
307 void DeleteFrameDescriptions();
309 void DoComputeOutputFrames();
310 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
311 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
313 void DoComputeConstructStubFrame(TranslationIterator* iterator,
315 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
317 bool is_setter_stub_frame);
318 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
321 // Translate object, store the result into an auxiliary array
322 // (deferred_objects_tagged_values_).
323 void DoTranslateObject(TranslationIterator* iterator,
327 // Translate value, store the result into the given frame slot.
328 void DoTranslateCommand(TranslationIterator* iterator,
330 unsigned output_offset);
332 // Translate object, do not store the result anywhere (but do update
333 // the deferred materialization array).
334 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
336 unsigned ComputeInputFrameSize() const;
337 unsigned ComputeFixedSize(JSFunction* function) const;
339 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
340 unsigned ComputeOutgoingArgumentSize() const;
342 Object* ComputeLiteral(int index) const;
344 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
345 void AddObjectDuplication(intptr_t slot, int object_index);
346 void AddObjectTaggedValue(intptr_t value);
347 void AddObjectDoubleValue(double value);
348 void AddDoubleValue(intptr_t slot_address, double value);
350 bool ArgumentsObjectIsAdapted(int object_index) {
351 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
352 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
353 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
356 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
357 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
358 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
359 return jsframe_functions_[reverse_jsframe_index];
362 // Helper function for heap object materialization.
363 Handle<Object> MaterializeNextHeapObject();
364 Handle<Object> MaterializeNextValue();
366 static void GenerateDeoptimizationEntries(
367 MacroAssembler* masm, int count, BailoutType type);
369 // Marks all the code in the given context for deoptimization.
370 static void MarkAllCodeForContext(Context* native_context);
372 // Visit all the known optimized functions in a given context.
373 static void VisitAllOptimizedFunctionsForContext(
374 Context* context, OptimizedFunctionVisitor* visitor);
376 // Deoptimizes all code marked in the given context.
377 static void DeoptimizeMarkedCodeForContext(Context* native_context);
379 // Patch the given code so that it will deoptimize itself.
380 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
382 // Searches the list of known deoptimizing code for a Code object
383 // containing the given address (which is supposedly faster than
384 // searching all code objects).
385 Code* FindDeoptimizingCode(Address addr);
387 // Fill the input from from a JavaScript frame. This is used when
388 // the debugger needs to inspect an optimized frame. For normal
389 // deoptimizations the input frame is filled in generated code.
390 void FillInputFrame(Address tos, JavaScriptFrame* frame);
392 // Fill the given output frame's registers to contain the failure handler
393 // address and the number of parameters for a stub failure trampoline.
394 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
395 CodeStubDescriptor* desc);
397 // Fill the given output frame's double registers with the original values
398 // from the input frame's double registers.
399 void CopyDoubleRegisters(FrameDescription* output_frame);
401 // Determines whether the input frame contains alignment padding by looking
402 // at the dynamic alignment state slot inside the frame.
403 bool HasAlignmentPadding(JSFunction* function);
406 JSFunction* function_;
407 Code* compiled_code_;
408 unsigned bailout_id_;
409 BailoutType bailout_type_;
412 int has_alignment_padding_;
414 // Input frame description.
415 FrameDescription* input_;
416 // Number of output frames.
418 // Number of output js frames.
420 // Array of output frame descriptions.
421 FrameDescription** output_;
423 // Deferred values to be materialized.
424 List<Object*> deferred_objects_tagged_values_;
425 List<HeapNumberMaterializationDescriptor<int> >
426 deferred_objects_double_values_;
427 List<ObjectMaterializationDescriptor> deferred_objects_;
428 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
430 // Key for lookup of previously materialized objects
432 Handle<FixedArray> previously_materialized_objects_;
433 int prev_materialized_count_;
435 // Output frame information. Only used during heap object materialization.
436 List<Handle<JSFunction> > jsframe_functions_;
437 List<bool> jsframe_has_adapted_arguments_;
439 // Materialized objects. Only used during heap object materialization.
440 List<Handle<Object> >* materialized_values_;
441 List<Handle<Object> >* materialized_objects_;
442 int materialization_value_index_;
443 int materialization_object_index_;
446 DisallowHeapAllocation* disallow_heap_allocation_;
449 CodeTracer::Scope* trace_scope_;
451 static const int table_entry_size_;
453 friend class FrameDescription;
454 friend class DeoptimizedFrameInfo;
458 class FrameDescription {
460 FrameDescription(uint32_t frame_size,
461 JSFunction* function);
463 void* operator new(size_t size, uint32_t frame_size) {
464 // Subtracts kPointerSize, as the member frame_content_ already supplies
465 // the first element of the area to store the frame.
466 return malloc(size + frame_size - kPointerSize);
469 void operator delete(void* pointer, uint32_t frame_size) {
473 void operator delete(void* description) {
477 uint32_t GetFrameSize() const {
478 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_);
479 return static_cast<uint32_t>(frame_size_);
482 JSFunction* GetFunction() const { return function_; }
484 unsigned GetOffsetFromSlotIndex(int slot_index);
486 intptr_t GetFrameSlot(unsigned offset) {
487 return *GetFrameSlotPointer(offset);
490 double GetDoubleFrameSlot(unsigned offset) {
491 intptr_t* ptr = GetFrameSlotPointer(offset);
492 return read_double_value(reinterpret_cast<Address>(ptr));
495 void SetFrameSlot(unsigned offset, intptr_t value) {
496 *GetFrameSlotPointer(offset) = value;
499 void SetCallerPc(unsigned offset, intptr_t value);
501 void SetCallerFp(unsigned offset, intptr_t value);
503 void SetCallerConstantPool(unsigned offset, intptr_t value);
505 intptr_t GetRegister(unsigned n) const {
507 // This convoluted DCHECK is needed to work around a gcc problem that
508 // improperly detects an array bounds overflow in optimized debug builds
509 // when using a plain DCHECK.
510 if (n >= arraysize(registers_)) {
515 return registers_[n];
518 double GetDoubleRegister(unsigned n) const {
519 DCHECK(n < arraysize(double_registers_));
520 return double_registers_[n];
523 void SetRegister(unsigned n, intptr_t value) {
524 DCHECK(n < arraysize(registers_));
525 registers_[n] = value;
528 void SetDoubleRegister(unsigned n, double value) {
529 DCHECK(n < arraysize(double_registers_));
530 double_registers_[n] = value;
533 intptr_t GetTop() const { return top_; }
534 void SetTop(intptr_t top) { top_ = top; }
536 intptr_t GetPc() const { return pc_; }
537 void SetPc(intptr_t pc) { pc_ = pc; }
539 intptr_t GetFp() const { return fp_; }
540 void SetFp(intptr_t fp) { fp_ = fp; }
542 intptr_t GetContext() const { return context_; }
543 void SetContext(intptr_t context) { context_ = context; }
545 intptr_t GetConstantPool() const { return constant_pool_; }
546 void SetConstantPool(intptr_t constant_pool) {
547 constant_pool_ = constant_pool;
550 Smi* GetState() const { return state_; }
551 void SetState(Smi* state) { state_ = state; }
553 void SetContinuation(intptr_t pc) { continuation_ = pc; }
555 StackFrame::Type GetFrameType() const { return type_; }
556 void SetFrameType(StackFrame::Type type) { type_ = type; }
558 // Get the incoming arguments count.
559 int ComputeParametersCount();
561 // Get a parameter value for an unoptimized frame.
562 Object* GetParameter(int index);
564 // Get the expression stack height for a unoptimized frame.
565 unsigned GetExpressionCount();
567 // Get the expression stack value for an unoptimized frame.
568 Object* GetExpression(int index);
570 static int registers_offset() {
571 return OFFSET_OF(FrameDescription, registers_);
574 static int double_registers_offset() {
575 return OFFSET_OF(FrameDescription, double_registers_);
578 static int frame_size_offset() {
579 return OFFSET_OF(FrameDescription, frame_size_);
582 static int pc_offset() {
583 return OFFSET_OF(FrameDescription, pc_);
586 static int state_offset() {
587 return OFFSET_OF(FrameDescription, state_);
590 static int continuation_offset() {
591 return OFFSET_OF(FrameDescription, continuation_);
594 static int frame_content_offset() {
595 return OFFSET_OF(FrameDescription, frame_content_);
599 static const uint32_t kZapUint32 = 0xbeeddead;
601 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
602 // keep the variable-size array frame_content_ of type intptr_t at
603 // the end of the structure aligned.
604 uintptr_t frame_size_; // Number of bytes.
605 JSFunction* function_;
606 intptr_t registers_[Register::kNumRegisters];
607 double double_registers_[DoubleRegister::kMaxNumRegisters];
612 intptr_t constant_pool_;
613 StackFrame::Type type_;
616 // Continuation is the PC where the execution continues after
618 intptr_t continuation_;
620 // This must be at the end of the object as the object is allocated larger
621 // than it's definition indicate to extend this array.
622 intptr_t frame_content_[1];
624 intptr_t* GetFrameSlotPointer(unsigned offset) {
625 DCHECK(offset < frame_size_);
626 return reinterpret_cast<intptr_t*>(
627 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
630 int ComputeFixedSize();
634 class DeoptimizerData {
636 explicit DeoptimizerData(MemoryAllocator* allocator);
639 void Iterate(ObjectVisitor* v);
642 MemoryAllocator* allocator_;
643 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
644 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
646 DeoptimizedFrameInfo* deoptimized_frame_info_;
648 Deoptimizer* current_;
650 friend class Deoptimizer;
652 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
656 class TranslationBuffer BASE_EMBEDDED {
658 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
660 int CurrentIndex() const { return contents_.length(); }
661 void Add(int32_t value, Zone* zone);
663 Handle<ByteArray> CreateByteArray(Factory* factory);
666 ZoneList<uint8_t> contents_;
670 class TranslationIterator BASE_EMBEDDED {
672 TranslationIterator(ByteArray* buffer, int index)
673 : buffer_(buffer), index_(index) {
674 DCHECK(index >= 0 && index < buffer->length());
679 bool HasNext() const { return index_ < buffer_->length(); }
682 for (int i = 0; i < n; i++) Next();
691 #define TRANSLATION_OPCODE_LIST(V) \
694 V(CONSTRUCT_STUB_FRAME) \
695 V(GETTER_STUB_FRAME) \
696 V(SETTER_STUB_FRAME) \
697 V(ARGUMENTS_ADAPTOR_FRAME) \
698 V(COMPILED_STUB_FRAME) \
699 V(DUPLICATED_OBJECT) \
700 V(ARGUMENTS_OBJECT) \
707 V(INT32_STACK_SLOT) \
708 V(UINT32_STACK_SLOT) \
709 V(DOUBLE_STACK_SLOT) \
713 class Translation BASE_EMBEDDED {
715 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
717 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
720 #undef DECLARE_TRANSLATION_OPCODE_ENUM
722 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
725 index_(buffer->CurrentIndex()),
727 buffer_->Add(BEGIN, zone);
728 buffer_->Add(frame_count, zone);
729 buffer_->Add(jsframe_count, zone);
732 int index() const { return index_; }
735 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
736 void BeginCompiledStubFrame();
737 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
738 void BeginConstructStubFrame(int literal_id, unsigned height);
739 void BeginGetterStubFrame(int literal_id);
740 void BeginSetterStubFrame(int literal_id);
741 void BeginArgumentsObject(int args_length);
742 void BeginCapturedObject(int length);
743 void DuplicateObject(int object_index);
744 void StoreRegister(Register reg);
745 void StoreInt32Register(Register reg);
746 void StoreUint32Register(Register reg);
747 void StoreDoubleRegister(DoubleRegister reg);
748 void StoreStackSlot(int index);
749 void StoreInt32StackSlot(int index);
750 void StoreUint32StackSlot(int index);
751 void StoreDoubleStackSlot(int index);
752 void StoreLiteral(int literal_id);
753 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
755 Zone* zone() const { return zone_; }
757 static int NumberOfOperandsFor(Opcode opcode);
759 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
760 static const char* StringFor(Opcode opcode);
763 // A literal id which refers to the JSFunction itself.
764 static const int kSelfLiteralId = -239;
767 TranslationBuffer* buffer_;
773 class SlotRef BASE_EMBEDDED {
775 enum SlotRepresentation {
782 DEFERRED_OBJECT, // Object captured by the escape analysis.
783 // The number of nested objects can be obtained
784 // with the DeferredObjectLength() method
785 // (the SlotRefs of the nested objects follow
786 // this SlotRef in the depth-first order.)
787 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
788 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
789 // in sync, it should not be materialized.
793 : addr_(NULL), representation_(UNKNOWN) { }
795 SlotRef(Address addr, SlotRepresentation representation)
796 : addr_(addr), representation_(representation) { }
798 SlotRef(Isolate* isolate, Object* literal)
799 : literal_(literal, isolate), representation_(LITERAL) { }
801 static SlotRef NewArgumentsObject(int length) {
803 slot.representation_ = ARGUMENTS_OBJECT;
804 slot.deferred_object_length_ = length;
808 static SlotRef NewDeferredObject(int length) {
810 slot.representation_ = DEFERRED_OBJECT;
811 slot.deferred_object_length_ = length;
815 SlotRepresentation Representation() { return representation_; }
817 static SlotRef NewDuplicateObject(int id) {
819 slot.representation_ = DUPLICATE_OBJECT;
820 slot.duplicate_object_id_ = id;
824 int GetChildrenCount() {
825 if (representation_ == DEFERRED_OBJECT ||
826 representation_ == ARGUMENTS_OBJECT) {
827 return deferred_object_length_;
833 int DuplicateObjectId() { return duplicate_object_id_; }
835 Handle<Object> GetValue(Isolate* isolate);
839 Handle<Object> literal_;
840 SlotRepresentation representation_;
841 int deferred_object_length_;
842 int duplicate_object_id_;
845 class SlotRefValueBuilder BASE_EMBEDDED {
848 JavaScriptFrame* frame,
849 int inlined_frame_index,
850 int formal_parameter_count);
852 void Prepare(Isolate* isolate);
853 Handle<Object> GetNext(Isolate* isolate, int level);
854 void Finish(Isolate* isolate);
856 int args_length() { return args_length_; }
859 List<Handle<Object> > materialized_objects_;
860 Handle<FixedArray> previously_materialized_objects_;
861 int prev_materialized_count_;
862 Address stack_frame_id_;
863 List<SlotRef> slot_refs_;
866 int first_slot_index_;
868 static SlotRef ComputeSlotForNextArgument(
869 Translation::Opcode opcode,
870 TranslationIterator* iterator,
871 DeoptimizationInputData* data,
872 JavaScriptFrame* frame);
874 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
876 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
877 if (slot_index >= 0) {
878 const int offset = JavaScriptFrameConstants::kLocal0Offset;
879 return frame->fp() + offset - (slot_index * kPointerSize);
881 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
882 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
886 Handle<Object> GetDeferredObject(Isolate* isolate);
889 class MaterializedObjectStore {
891 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
894 Handle<FixedArray> Get(Address fp);
895 void Set(Address fp, Handle<FixedArray> materialized_objects);
896 void Remove(Address fp);
899 Isolate* isolate() { return isolate_; }
900 Handle<FixedArray> GetStackEntries();
901 Handle<FixedArray> EnsureStackEntries(int size);
903 int StackIdToIndex(Address fp);
906 List<Address> frame_fps_;
910 // Class used to represent an unoptimized frame when the debugger
911 // needs to inspect a frame that is part of an optimized frame. The
912 // internally used FrameDescription objects are not GC safe so for use
913 // by the debugger frame information is copied to an object of this type.
914 // Represents parameters in unadapted form so their number might mismatch
915 // formal parameter count.
916 class DeoptimizedFrameInfo : public Malloced {
918 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
920 bool has_arguments_adaptor,
921 bool has_construct_stub);
922 virtual ~DeoptimizedFrameInfo();
925 void Iterate(ObjectVisitor* v);
927 // Return the number of incoming arguments.
928 int parameters_count() { return parameters_count_; }
930 // Return the height of the expression stack.
931 int expression_count() { return expression_count_; }
933 // Get the frame function.
934 JSFunction* GetFunction() {
938 // Get the frame context.
939 Object* GetContext() { return context_; }
941 // Check if this frame is preceded by construct stub frame. The bottom-most
942 // inlined frame might still be called by an uninlined construct stub.
943 bool HasConstructStub() {
944 return has_construct_stub_;
947 // Get an incoming argument.
948 Object* GetParameter(int index) {
949 DCHECK(0 <= index && index < parameters_count());
950 return parameters_[index];
953 // Get an expression from the expression stack.
954 Object* GetExpression(int index) {
955 DCHECK(0 <= index && index < expression_count());
956 return expression_stack_[index];
959 int GetSourcePosition() {
960 return source_position_;
964 // Set an incoming argument.
965 void SetParameter(int index, Object* obj) {
966 DCHECK(0 <= index && index < parameters_count());
967 parameters_[index] = obj;
970 // Set an expression on the expression stack.
971 void SetExpression(int index, Object* obj) {
972 DCHECK(0 <= index && index < expression_count());
973 expression_stack_[index] = obj;
976 JSFunction* function_;
978 bool has_construct_stub_;
979 int parameters_count_;
980 int expression_count_;
981 Object** parameters_;
982 Object** expression_stack_;
983 int source_position_;
985 friend class Deoptimizer;
988 } } // namespace v8::internal
990 #endif // V8_DEOPTIMIZER_H_