1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
18 static inline double read_double_value(Address p) {
20 memcpy(&d, p, sizeof(d));
25 class FrameDescription;
26 class TranslationIterator;
27 class DeoptimizedFrameInfo;
30 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
32 HeapNumberMaterializationDescriptor(T destination, double value)
33 : destination_(destination), value_(value) { }
35 T destination() const { return destination_; }
36 double value() const { return value_; }
44 class ObjectMaterializationDescriptor BASE_EMBEDDED {
46 ObjectMaterializationDescriptor(
47 Address slot_address, int frame, int length, int duplicate, bool is_args)
48 : slot_address_(slot_address),
49 jsframe_index_(frame),
50 object_length_(length),
51 duplicate_object_(duplicate),
52 is_arguments_(is_args) { }
54 Address slot_address() const { return slot_address_; }
55 int jsframe_index() const { return jsframe_index_; }
56 int object_length() const { return object_length_; }
57 int duplicate_object() const { return duplicate_object_; }
58 bool is_arguments() const { return is_arguments_; }
60 // Only used for allocated receivers in DoComputeConstructStubFrame.
61 void patch_slot_address(intptr_t slot) {
62 slot_address_ = reinterpret_cast<Address>(slot);
66 Address slot_address_;
69 int duplicate_object_;
74 class OptimizedFunctionVisitor BASE_EMBEDDED {
76 virtual ~OptimizedFunctionVisitor() {}
78 // Function which is called before iteration of any optimized functions
79 // from given native context.
80 virtual void EnterContext(Context* context) = 0;
82 virtual void VisitFunction(JSFunction* function) = 0;
84 // Function which is called after iteration of all optimized functions
85 // from given native context.
86 virtual void LeaveContext(Context* context) = 0;
90 #define DEOPT_MESSAGES_LIST(V) \
91 V(kNoReason, "no reason") \
92 V(kConstantGlobalVariableAssignment, "Constant global variable assignment") \
93 V(kConversionOverflow, "conversion overflow") \
94 V(kDivisionByZero, "division by zero") \
95 V(kElementsKindUnhandledInKeyedLoadGenericStub, \
96 "ElementsKind unhandled in KeyedLoadGenericStub") \
97 V(kExpectedHeapNumber, "Expected heap number") \
98 V(kExpectedSmi, "Expected smi") \
99 V(kForcedDeoptToRuntime, "Forced deopt to runtime") \
101 V(kHoleyArrayDespitePackedElements_kindFeedback, \
102 "Holey array despite packed elements_kind feedback") \
103 V(kInstanceMigrationFailed, "instance migration failed") \
104 V(kInsufficientTypeFeedbackForCallWithArguments, \
105 "Insufficient type feedback for call with arguments") \
106 V(kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation, \
107 "Insufficient type feedback for combined type of binary operation") \
108 V(kInsufficientTypeFeedbackForGenericNamedAccess, \
109 "Insufficient type feedback for generic named access") \
110 V(kInsufficientTypeFeedbackForKeyedLoad, \
111 "Insufficient type feedback for keyed load") \
112 V(kInsufficientTypeFeedbackForKeyedStore, \
113 "Insufficient type feedback for keyed store") \
114 V(kInsufficientTypeFeedbackForLHSOfBinaryOperation, \
115 "Insufficient type feedback for LHS of binary operation") \
116 V(kInsufficientTypeFeedbackForRHSOfBinaryOperation, \
117 "Insufficient type feedback for RHS of binary operation") \
118 V(kKeyIsNegative, "key is negative") \
119 V(kLostPrecision, "lost precision") \
120 V(kLostPrecisionOrNaN, "lost precision or NaN") \
121 V(kMementoFound, "memento found") \
122 V(kMinusZero, "minus zero") \
124 V(kNegativeKeyEncountered, "Negative key encountered") \
125 V(kNegativeValue, "negative value") \
126 V(kNoCache, "no cache") \
127 V(kNonStrictElementsInKeyedLoadGenericStub, \
128 "non-strict elements in KeyedLoadGenericStub") \
129 V(kNotADateObject, "not a date object") \
130 V(kNotAHeapNumber, "not a heap number") \
131 V(kNotAHeapNumberUndefinedBoolean, "not a heap number/undefined/true/false") \
132 V(kNotAHeapNumberUndefined, "not a heap number/undefined") \
133 V(kNotAJavaScriptObject, "not a JavaScript object") \
134 V(kNotASmi, "not a Smi") \
135 V(kNotHeapNumber, "not heap number") \
137 V(kOutOfBounds, "out of bounds") \
138 V(kOutsideOfRange, "Outside of range") \
139 V(kOverflow, "overflow") \
140 V(kReceiverWasAGlobalObject, "receiver was a global object") \
142 V(kTooManyArguments, "too many arguments") \
143 V(kTooManyUndetectableTypes, "Too many undetectable types") \
144 V(kTracingElementsTransitions, "Tracing elements transitions") \
145 V(kTypeMismatchBetweenFeedbackAndConstant, \
146 "Type mismatch between feedback and constant") \
147 V(kUndefined, "undefined") \
148 V(kUnexpectedCellContentsInConstantGlobalStore, \
149 "Unexpected cell contents in constant global store") \
150 V(kUnexpectedCellContentsInGlobalStore, \
151 "Unexpected cell contents in global store") \
152 V(kUnexpectedObject, "unexpected object") \
153 V(kUnexpectedRHSOfBinaryOperation, "Unexpected RHS of binary operation") \
154 V(kUninitializedBoilerplateInFastClone, \
155 "Uninitialized boilerplate in fast clone") \
156 V(kUninitializedBoilerplateLiterals, "Uninitialized boilerplate literals") \
157 V(kUnknownMapInPolymorphicAccess, "Unknown map in polymorphic access") \
158 V(kUnknownMapInPolymorphicCall, "Unknown map in polymorphic call") \
159 V(kUnknownMapInPolymorphicElementAccess, \
160 "Unknown map in polymorphic element access") \
161 V(kUnknownMap, "Unknown map") \
162 V(kValueMismatch, "value mismatch") \
163 V(kWrongInstanceType, "wrong instance type") \
164 V(kWrongMap, "wrong map")
167 class Deoptimizer : public Malloced {
173 // This last bailout type is not really a bailout, but used by the
174 // debugger to deoptimize stack frames to allow inspection.
176 kBailoutTypesWithCodeEntry = SOFT + 1
179 #define DEOPT_MESSAGES_CONSTANTS(C, T) C,
181 DEOPT_MESSAGES_LIST(DEOPT_MESSAGES_CONSTANTS) kLastDeoptReason
183 #undef DEOPT_MESSAGES_CONSTANTS
184 static const char* GetDeoptReason(DeoptReason deopt_reason);
187 DeoptInfo(int r, const char* m, DeoptReason d)
188 : raw_position(r), mnemonic(m), deopt_reason(d) {}
191 const char* mnemonic;
192 DeoptReason deopt_reason;
195 static DeoptInfo GetDeoptInfo(Code* code, int bailout_id);
197 struct JumpTableEntry : public ZoneObject {
198 inline JumpTableEntry(Address entry, const DeoptInfo& deopt_info,
199 Deoptimizer::BailoutType type, bool frame)
202 deopt_info(deopt_info),
204 needs_frame(frame) {}
206 bool IsEquivalentTo(const JumpTableEntry& other) const {
207 return address == other.address && bailout_type == other.bailout_type &&
208 needs_frame == other.needs_frame;
213 DeoptInfo deopt_info;
214 Deoptimizer::BailoutType bailout_type;
218 static bool TraceEnabledFor(BailoutType deopt_type,
219 StackFrame::Type frame_type);
220 static const char* MessageFor(BailoutType type);
222 int output_count() const { return output_count_; }
224 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
225 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
226 BailoutType bailout_type() const { return bailout_type_; }
228 // Number of created JS frames. Not all created frames are necessarily JS.
229 int jsframe_count() const { return jsframe_count_; }
231 static Deoptimizer* New(JSFunction* function,
237 static Deoptimizer* Grab(Isolate* isolate);
239 // The returned object with information on the optimized frame needs to be
240 // freed before another one can be generated.
241 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
244 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
247 // Makes sure that there is enough room in the relocation
248 // information of a code object to perform lazy deoptimization
249 // patching. If there is not enough room a new relocation
250 // information object is allocated and comments are added until it
252 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
254 // Deoptimize the function now. Its current optimized code will never be run
255 // again and any activations of the optimized code will get deoptimized when
256 // execution returns.
257 static void DeoptimizeFunction(JSFunction* function);
259 // Deoptimize all code in the given isolate.
260 static void DeoptimizeAll(Isolate* isolate);
262 // Deoptimize code associated with the given global object.
263 static void DeoptimizeGlobalObject(JSObject* object);
265 // Deoptimizes all optimized code that has been previously marked
266 // (via code->set_marked_for_deoptimization) and unlinks all functions that
267 // refer to that code.
268 static void DeoptimizeMarkedCode(Isolate* isolate);
270 // Visit all the known optimized functions in a given isolate.
271 static void VisitAllOptimizedFunctions(
272 Isolate* isolate, OptimizedFunctionVisitor* visitor);
274 // The size in bytes of the code required at a lazy deopt patch site.
275 static int patch_size();
279 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
281 void MaterializeHeapNumbersForDebuggerInspectableFrame(
282 Address parameters_top,
283 uint32_t parameters_size,
284 Address expressions_top,
285 uint32_t expressions_size,
286 DeoptimizedFrameInfo* info);
288 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
292 CALCULATE_ENTRY_ADDRESS,
297 static Address GetDeoptimizationEntry(
301 GetEntryMode mode = ENSURE_ENTRY_CODE);
302 static int GetDeoptimizationId(Isolate* isolate,
305 static int GetOutputInfo(DeoptimizationOutputData* data,
307 SharedFunctionInfo* shared);
309 // Code generation support.
310 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
311 static int output_count_offset() {
312 return OFFSET_OF(Deoptimizer, output_count_);
314 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
316 static int has_alignment_padding_offset() {
317 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
320 static int GetDeoptimizedCodeCount(Isolate* isolate);
322 static const int kNotDeoptimizationEntry = -1;
324 // Generators for the deoptimization entry code.
325 class EntryGenerator BASE_EMBEDDED {
327 EntryGenerator(MacroAssembler* masm, BailoutType type)
328 : masm_(masm), type_(type) { }
329 virtual ~EntryGenerator() { }
334 MacroAssembler* masm() const { return masm_; }
335 BailoutType type() const { return type_; }
336 Isolate* isolate() const { return masm_->isolate(); }
338 virtual void GeneratePrologue() { }
341 MacroAssembler* masm_;
342 Deoptimizer::BailoutType type_;
345 class TableEntryGenerator : public EntryGenerator {
347 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
348 : EntryGenerator(masm, type), count_(count) { }
351 virtual void GeneratePrologue();
354 int count() const { return count_; }
359 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
361 static size_t GetMaxDeoptTableSize();
363 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
367 Isolate* isolate() const { return isolate_; }
370 static const int kMinNumberOfEntries = 64;
371 static const int kMaxNumberOfEntries = 16384;
373 Deoptimizer(Isolate* isolate,
374 JSFunction* function,
379 Code* optimized_code);
380 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
381 void PrintFunctionName();
382 void DeleteFrameDescriptions();
384 void DoComputeOutputFrames();
385 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
386 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
388 void DoComputeConstructStubFrame(TranslationIterator* iterator,
390 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
392 bool is_setter_stub_frame);
393 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
396 // Translate object, store the result into an auxiliary array
397 // (deferred_objects_tagged_values_).
398 void DoTranslateObject(TranslationIterator* iterator,
402 // Translate value, store the result into the given frame slot.
403 void DoTranslateCommand(TranslationIterator* iterator,
405 unsigned output_offset);
407 // Translate object, do not store the result anywhere (but do update
408 // the deferred materialization array).
409 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
411 unsigned ComputeInputFrameSize() const;
412 unsigned ComputeFixedSize(JSFunction* function) const;
414 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
415 unsigned ComputeOutgoingArgumentSize() const;
417 Object* ComputeLiteral(int index) const;
419 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
420 void AddObjectDuplication(intptr_t slot, int object_index);
421 void AddObjectTaggedValue(intptr_t value);
422 void AddObjectDoubleValue(double value);
423 void AddDoubleValue(intptr_t slot_address, double value);
425 bool ArgumentsObjectIsAdapted(int object_index) {
426 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
427 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
428 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
431 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
432 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
433 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
434 return jsframe_functions_[reverse_jsframe_index];
437 // Helper function for heap object materialization.
438 Handle<Object> MaterializeNextHeapObject();
439 Handle<Object> MaterializeNextValue();
441 static void GenerateDeoptimizationEntries(
442 MacroAssembler* masm, int count, BailoutType type);
444 // Marks all the code in the given context for deoptimization.
445 static void MarkAllCodeForContext(Context* native_context);
447 // Visit all the known optimized functions in a given context.
448 static void VisitAllOptimizedFunctionsForContext(
449 Context* context, OptimizedFunctionVisitor* visitor);
451 // Deoptimizes all code marked in the given context.
452 static void DeoptimizeMarkedCodeForContext(Context* native_context);
454 // Patch the given code so that it will deoptimize itself.
455 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
457 // Searches the list of known deoptimizing code for a Code object
458 // containing the given address (which is supposedly faster than
459 // searching all code objects).
460 Code* FindDeoptimizingCode(Address addr);
462 // Fill the input from from a JavaScript frame. This is used when
463 // the debugger needs to inspect an optimized frame. For normal
464 // deoptimizations the input frame is filled in generated code.
465 void FillInputFrame(Address tos, JavaScriptFrame* frame);
467 // Fill the given output frame's registers to contain the failure handler
468 // address and the number of parameters for a stub failure trampoline.
469 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
470 CodeStubDescriptor* desc);
472 // Fill the given output frame's double registers with the original values
473 // from the input frame's double registers.
474 void CopyDoubleRegisters(FrameDescription* output_frame);
476 // Determines whether the input frame contains alignment padding by looking
477 // at the dynamic alignment state slot inside the frame.
478 bool HasAlignmentPadding(JSFunction* function);
481 JSFunction* function_;
482 Code* compiled_code_;
483 unsigned bailout_id_;
484 BailoutType bailout_type_;
487 int has_alignment_padding_;
489 // Input frame description.
490 FrameDescription* input_;
491 // Number of output frames.
493 // Number of output js frames.
495 // Array of output frame descriptions.
496 FrameDescription** output_;
498 // Deferred values to be materialized.
499 List<Object*> deferred_objects_tagged_values_;
500 List<HeapNumberMaterializationDescriptor<int> >
501 deferred_objects_double_values_;
502 List<ObjectMaterializationDescriptor> deferred_objects_;
503 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
505 // Key for lookup of previously materialized objects
507 Handle<FixedArray> previously_materialized_objects_;
508 int prev_materialized_count_;
510 // Output frame information. Only used during heap object materialization.
511 List<Handle<JSFunction> > jsframe_functions_;
512 List<bool> jsframe_has_adapted_arguments_;
514 // Materialized objects. Only used during heap object materialization.
515 List<Handle<Object> >* materialized_values_;
516 List<Handle<Object> >* materialized_objects_;
517 int materialization_value_index_;
518 int materialization_object_index_;
521 DisallowHeapAllocation* disallow_heap_allocation_;
524 CodeTracer::Scope* trace_scope_;
526 static const int table_entry_size_;
528 friend class FrameDescription;
529 friend class DeoptimizedFrameInfo;
533 class FrameDescription {
535 FrameDescription(uint32_t frame_size,
536 JSFunction* function);
538 void* operator new(size_t size, uint32_t frame_size) {
539 // Subtracts kPointerSize, as the member frame_content_ already supplies
540 // the first element of the area to store the frame.
541 return malloc(size + frame_size - kPointerSize);
544 void operator delete(void* pointer, uint32_t frame_size) {
548 void operator delete(void* description) {
552 uint32_t GetFrameSize() const {
553 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_);
554 return static_cast<uint32_t>(frame_size_);
557 JSFunction* GetFunction() const { return function_; }
559 unsigned GetOffsetFromSlotIndex(int slot_index);
561 intptr_t GetFrameSlot(unsigned offset) {
562 return *GetFrameSlotPointer(offset);
565 double GetDoubleFrameSlot(unsigned offset) {
566 intptr_t* ptr = GetFrameSlotPointer(offset);
567 return read_double_value(reinterpret_cast<Address>(ptr));
570 void SetFrameSlot(unsigned offset, intptr_t value) {
571 *GetFrameSlotPointer(offset) = value;
574 void SetCallerPc(unsigned offset, intptr_t value);
576 void SetCallerFp(unsigned offset, intptr_t value);
578 void SetCallerConstantPool(unsigned offset, intptr_t value);
580 intptr_t GetRegister(unsigned n) const {
582 // This convoluted DCHECK is needed to work around a gcc problem that
583 // improperly detects an array bounds overflow in optimized debug builds
584 // when using a plain DCHECK.
585 if (n >= arraysize(registers_)) {
590 return registers_[n];
593 double GetDoubleRegister(unsigned n) const {
594 DCHECK(n < arraysize(double_registers_));
595 return double_registers_[n];
598 void SetRegister(unsigned n, intptr_t value) {
599 DCHECK(n < arraysize(registers_));
600 registers_[n] = value;
603 void SetDoubleRegister(unsigned n, double value) {
604 DCHECK(n < arraysize(double_registers_));
605 double_registers_[n] = value;
608 intptr_t GetTop() const { return top_; }
609 void SetTop(intptr_t top) { top_ = top; }
611 intptr_t GetPc() const { return pc_; }
612 void SetPc(intptr_t pc) { pc_ = pc; }
614 intptr_t GetFp() const { return fp_; }
615 void SetFp(intptr_t fp) { fp_ = fp; }
617 intptr_t GetContext() const { return context_; }
618 void SetContext(intptr_t context) { context_ = context; }
620 intptr_t GetConstantPool() const { return constant_pool_; }
621 void SetConstantPool(intptr_t constant_pool) {
622 constant_pool_ = constant_pool;
625 Smi* GetState() const { return state_; }
626 void SetState(Smi* state) { state_ = state; }
628 void SetContinuation(intptr_t pc) { continuation_ = pc; }
630 StackFrame::Type GetFrameType() const { return type_; }
631 void SetFrameType(StackFrame::Type type) { type_ = type; }
633 // Get the incoming arguments count.
634 int ComputeParametersCount();
636 // Get a parameter value for an unoptimized frame.
637 Object* GetParameter(int index);
639 // Get the expression stack height for a unoptimized frame.
640 unsigned GetExpressionCount();
642 // Get the expression stack value for an unoptimized frame.
643 Object* GetExpression(int index);
645 static int registers_offset() {
646 return OFFSET_OF(FrameDescription, registers_);
649 static int double_registers_offset() {
650 return OFFSET_OF(FrameDescription, double_registers_);
653 static int frame_size_offset() {
654 return OFFSET_OF(FrameDescription, frame_size_);
657 static int pc_offset() {
658 return OFFSET_OF(FrameDescription, pc_);
661 static int state_offset() {
662 return OFFSET_OF(FrameDescription, state_);
665 static int continuation_offset() {
666 return OFFSET_OF(FrameDescription, continuation_);
669 static int frame_content_offset() {
670 return OFFSET_OF(FrameDescription, frame_content_);
674 static const uint32_t kZapUint32 = 0xbeeddead;
676 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
677 // keep the variable-size array frame_content_ of type intptr_t at
678 // the end of the structure aligned.
679 uintptr_t frame_size_; // Number of bytes.
680 JSFunction* function_;
681 intptr_t registers_[Register::kNumRegisters];
682 double double_registers_[DoubleRegister::kMaxNumRegisters];
687 intptr_t constant_pool_;
688 StackFrame::Type type_;
691 // Continuation is the PC where the execution continues after
693 intptr_t continuation_;
695 // This must be at the end of the object as the object is allocated larger
696 // than it's definition indicate to extend this array.
697 intptr_t frame_content_[1];
699 intptr_t* GetFrameSlotPointer(unsigned offset) {
700 DCHECK(offset < frame_size_);
701 return reinterpret_cast<intptr_t*>(
702 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
705 int ComputeFixedSize();
709 class DeoptimizerData {
711 explicit DeoptimizerData(MemoryAllocator* allocator);
714 void Iterate(ObjectVisitor* v);
717 MemoryAllocator* allocator_;
718 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
719 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
721 DeoptimizedFrameInfo* deoptimized_frame_info_;
723 Deoptimizer* current_;
725 friend class Deoptimizer;
727 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
731 class TranslationBuffer BASE_EMBEDDED {
733 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
735 int CurrentIndex() const { return contents_.length(); }
736 void Add(int32_t value, Zone* zone);
738 Handle<ByteArray> CreateByteArray(Factory* factory);
741 ZoneList<uint8_t> contents_;
745 class TranslationIterator BASE_EMBEDDED {
747 TranslationIterator(ByteArray* buffer, int index)
748 : buffer_(buffer), index_(index) {
749 DCHECK(index >= 0 && index < buffer->length());
754 bool HasNext() const { return index_ < buffer_->length(); }
757 for (int i = 0; i < n; i++) Next();
766 #define TRANSLATION_OPCODE_LIST(V) \
769 V(CONSTRUCT_STUB_FRAME) \
770 V(GETTER_STUB_FRAME) \
771 V(SETTER_STUB_FRAME) \
772 V(ARGUMENTS_ADAPTOR_FRAME) \
773 V(COMPILED_STUB_FRAME) \
774 V(DUPLICATED_OBJECT) \
775 V(ARGUMENTS_OBJECT) \
782 V(INT32_STACK_SLOT) \
783 V(UINT32_STACK_SLOT) \
784 V(DOUBLE_STACK_SLOT) \
788 class Translation BASE_EMBEDDED {
790 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
792 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
795 #undef DECLARE_TRANSLATION_OPCODE_ENUM
797 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
800 index_(buffer->CurrentIndex()),
802 buffer_->Add(BEGIN, zone);
803 buffer_->Add(frame_count, zone);
804 buffer_->Add(jsframe_count, zone);
807 int index() const { return index_; }
810 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
811 void BeginCompiledStubFrame();
812 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
813 void BeginConstructStubFrame(int literal_id, unsigned height);
814 void BeginGetterStubFrame(int literal_id);
815 void BeginSetterStubFrame(int literal_id);
816 void BeginArgumentsObject(int args_length);
817 void BeginCapturedObject(int length);
818 void DuplicateObject(int object_index);
819 void StoreRegister(Register reg);
820 void StoreInt32Register(Register reg);
821 void StoreUint32Register(Register reg);
822 void StoreDoubleRegister(DoubleRegister reg);
823 void StoreStackSlot(int index);
824 void StoreInt32StackSlot(int index);
825 void StoreUint32StackSlot(int index);
826 void StoreDoubleStackSlot(int index);
827 void StoreLiteral(int literal_id);
828 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
830 Zone* zone() const { return zone_; }
832 static int NumberOfOperandsFor(Opcode opcode);
834 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
835 static const char* StringFor(Opcode opcode);
838 // A literal id which refers to the JSFunction itself.
839 static const int kSelfLiteralId = -239;
842 TranslationBuffer* buffer_;
848 class SlotRef BASE_EMBEDDED {
850 enum SlotRepresentation {
857 DEFERRED_OBJECT, // Object captured by the escape analysis.
858 // The number of nested objects can be obtained
859 // with the DeferredObjectLength() method
860 // (the SlotRefs of the nested objects follow
861 // this SlotRef in the depth-first order.)
862 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
863 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
864 // in sync, it should not be materialized.
868 : addr_(NULL), representation_(UNKNOWN) { }
870 SlotRef(Address addr, SlotRepresentation representation)
871 : addr_(addr), representation_(representation) { }
873 SlotRef(Isolate* isolate, Object* literal)
874 : literal_(literal, isolate), representation_(LITERAL) { }
876 static SlotRef NewArgumentsObject(int length) {
878 slot.representation_ = ARGUMENTS_OBJECT;
879 slot.deferred_object_length_ = length;
883 static SlotRef NewDeferredObject(int length) {
885 slot.representation_ = DEFERRED_OBJECT;
886 slot.deferred_object_length_ = length;
890 SlotRepresentation Representation() { return representation_; }
892 static SlotRef NewDuplicateObject(int id) {
894 slot.representation_ = DUPLICATE_OBJECT;
895 slot.duplicate_object_id_ = id;
899 int GetChildrenCount() {
900 if (representation_ == DEFERRED_OBJECT ||
901 representation_ == ARGUMENTS_OBJECT) {
902 return deferred_object_length_;
908 int DuplicateObjectId() { return duplicate_object_id_; }
910 Handle<Object> GetValue(Isolate* isolate);
914 Handle<Object> literal_;
915 SlotRepresentation representation_;
916 int deferred_object_length_;
917 int duplicate_object_id_;
920 class SlotRefValueBuilder BASE_EMBEDDED {
923 JavaScriptFrame* frame,
924 int inlined_frame_index,
925 int formal_parameter_count);
927 void Prepare(Isolate* isolate);
928 Handle<Object> GetNext(Isolate* isolate, int level);
929 void Finish(Isolate* isolate);
931 int args_length() { return args_length_; }
934 List<Handle<Object> > materialized_objects_;
935 Handle<FixedArray> previously_materialized_objects_;
936 int prev_materialized_count_;
937 Address stack_frame_id_;
938 List<SlotRef> slot_refs_;
941 int first_slot_index_;
942 bool should_deoptimize_;
944 static SlotRef ComputeSlotForNextArgument(
945 Translation::Opcode opcode,
946 TranslationIterator* iterator,
947 DeoptimizationInputData* data,
948 JavaScriptFrame* frame);
950 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
952 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
953 if (slot_index >= 0) {
954 const int offset = JavaScriptFrameConstants::kLocal0Offset;
955 return frame->fp() + offset - (slot_index * kPointerSize);
957 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
958 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
962 Handle<Object> GetDeferredObject(Isolate* isolate);
965 class MaterializedObjectStore {
967 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
970 Handle<FixedArray> Get(Address fp);
971 void Set(Address fp, Handle<FixedArray> materialized_objects);
972 void Remove(Address fp);
975 Isolate* isolate() { return isolate_; }
976 Handle<FixedArray> GetStackEntries();
977 Handle<FixedArray> EnsureStackEntries(int size);
979 int StackIdToIndex(Address fp);
982 List<Address> frame_fps_;
986 // Class used to represent an unoptimized frame when the debugger
987 // needs to inspect a frame that is part of an optimized frame. The
988 // internally used FrameDescription objects are not GC safe so for use
989 // by the debugger frame information is copied to an object of this type.
990 // Represents parameters in unadapted form so their number might mismatch
991 // formal parameter count.
992 class DeoptimizedFrameInfo : public Malloced {
994 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
996 bool has_arguments_adaptor,
997 bool has_construct_stub);
998 virtual ~DeoptimizedFrameInfo();
1001 void Iterate(ObjectVisitor* v);
1003 // Return the number of incoming arguments.
1004 int parameters_count() { return parameters_count_; }
1006 // Return the height of the expression stack.
1007 int expression_count() { return expression_count_; }
1009 // Get the frame function.
1010 JSFunction* GetFunction() {
1014 // Get the frame context.
1015 Object* GetContext() { return context_; }
1017 // Check if this frame is preceded by construct stub frame. The bottom-most
1018 // inlined frame might still be called by an uninlined construct stub.
1019 bool HasConstructStub() {
1020 return has_construct_stub_;
1023 // Get an incoming argument.
1024 Object* GetParameter(int index) {
1025 DCHECK(0 <= index && index < parameters_count());
1026 return parameters_[index];
1029 // Get an expression from the expression stack.
1030 Object* GetExpression(int index) {
1031 DCHECK(0 <= index && index < expression_count());
1032 return expression_stack_[index];
1035 int GetSourcePosition() {
1036 return source_position_;
1040 // Set an incoming argument.
1041 void SetParameter(int index, Object* obj) {
1042 DCHECK(0 <= index && index < parameters_count());
1043 parameters_[index] = obj;
1046 // Set an expression on the expression stack.
1047 void SetExpression(int index, Object* obj) {
1048 DCHECK(0 <= index && index < expression_count());
1049 expression_stack_[index] = obj;
1052 JSFunction* function_;
1054 bool has_construct_stub_;
1055 int parameters_count_;
1056 int expression_count_;
1057 Object** parameters_;
1058 Object** expression_stack_;
1059 int source_position_;
1061 friend class Deoptimizer;
1064 } } // namespace v8::internal
1066 #endif // V8_DEOPTIMIZER_H_