1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_DEOPTIMIZER_H_
6 #define V8_DEOPTIMIZER_H_
10 #include "src/allocation.h"
11 #include "src/macro-assembler.h"
18 static inline double read_double_value(Address p) {
20 memcpy(&d, p, sizeof(d));
25 class FrameDescription;
26 class TranslationIterator;
27 class DeoptimizedFrameInfo;
30 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
32 HeapNumberMaterializationDescriptor(T destination, double value)
33 : destination_(destination), value_(value) { }
35 T destination() const { return destination_; }
36 double value() const { return value_; }
44 class ObjectMaterializationDescriptor BASE_EMBEDDED {
46 ObjectMaterializationDescriptor(
47 Address slot_address, int frame, int length, int duplicate, bool is_args)
48 : slot_address_(slot_address),
49 jsframe_index_(frame),
50 object_length_(length),
51 duplicate_object_(duplicate),
52 is_arguments_(is_args) { }
54 Address slot_address() const { return slot_address_; }
55 int jsframe_index() const { return jsframe_index_; }
56 int object_length() const { return object_length_; }
57 int duplicate_object() const { return duplicate_object_; }
58 bool is_arguments() const { return is_arguments_; }
60 // Only used for allocated receivers in DoComputeConstructStubFrame.
61 void patch_slot_address(intptr_t slot) {
62 slot_address_ = reinterpret_cast<Address>(slot);
66 Address slot_address_;
69 int duplicate_object_;
74 class OptimizedFunctionVisitor BASE_EMBEDDED {
76 virtual ~OptimizedFunctionVisitor() {}
78 // Function which is called before iteration of any optimized functions
79 // from given native context.
80 virtual void EnterContext(Context* context) = 0;
82 virtual void VisitFunction(JSFunction* function) = 0;
84 // Function which is called after iteration of all optimized functions
85 // from given native context.
86 virtual void LeaveContext(Context* context) = 0;
90 #define DEOPT_MESSAGES_LIST(V) \
91 V(kNoReason, "no reason") \
92 V(kConstantGlobalVariableAssignment, "Constant global variable assignment") \
93 V(kConversionOverflow, "conversion overflow") \
94 V(kDivisionByZero, "division by zero") \
95 V(kElementsKindUnhandledInKeyedLoadGenericStub, \
96 "ElementsKind unhandled in KeyedLoadGenericStub") \
97 V(kExpectedHeapNumber, "Expected heap number") \
98 V(kExpectedSmi, "Expected smi") \
99 V(kForcedDeoptToRuntime, "Forced deopt to runtime") \
101 V(kHoleyArrayDespitePackedElements_kindFeedback, \
102 "Holey array despite packed elements_kind feedback") \
103 V(kInstanceMigrationFailed, "instance migration failed") \
104 V(kInsufficientTypeFeedbackForCallWithArguments, \
105 "Insufficient type feedback for call with arguments") \
106 V(kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation, \
107 "Insufficient type feedback for combined type of binary operation") \
108 V(kInsufficientTypeFeedbackForGenericNamedAccess, \
109 "Insufficient type feedback for generic named access") \
110 V(kInsufficientTypeFeedbackForKeyedLoad, \
111 "Insufficient type feedback for keyed load") \
112 V(kInsufficientTypeFeedbackForKeyedStore, \
113 "Insufficient type feedback for keyed store") \
114 V(kInsufficientTypeFeedbackForLHSOfBinaryOperation, \
115 "Insufficient type feedback for LHS of binary operation") \
116 V(kInsufficientTypeFeedbackForRHSOfBinaryOperation, \
117 "Insufficient type feedback for RHS of binary operation") \
118 V(kKeyIsNegative, "key is negative") \
119 V(kLostPrecision, "lost precision") \
120 V(kLostPrecisionOrNaN, "lost precision or NaN") \
121 V(kMementoFound, "memento found") \
122 V(kMinusZero, "minus zero") \
124 V(kNegativeKeyEncountered, "Negative key encountered") \
125 V(kNegativeValue, "negative value") \
126 V(kNoCache, "no cache") \
127 V(kNonStrictElementsInKeyedLoadGenericStub, \
128 "non-strict elements in KeyedLoadGenericStub") \
129 V(kNotADateObject, "not a date object") \
130 V(kNotAHeapNumber, "not a heap number") \
131 V(kNotAHeapNumberUndefinedBoolean, "not a heap number/undefined/true/false") \
132 V(kNotAHeapNumberUndefined, "not a heap number/undefined") \
133 V(kNotAJavaScriptObject, "not a JavaScript object") \
134 V(kNotASmi, "not a Smi") \
136 V(kOutOfBounds, "out of bounds") \
137 V(kOutsideOfRange, "Outside of range") \
138 V(kOverflow, "overflow") \
139 V(kReceiverWasAGlobalObject, "receiver was a global object") \
141 V(kTooManyArguments, "too many arguments") \
142 V(kTooManyUndetectableTypes, "Too many undetectable types") \
143 V(kTracingElementsTransitions, "Tracing elements transitions") \
144 V(kTypeMismatchBetweenFeedbackAndConstant, \
145 "Type mismatch between feedback and constant") \
146 V(kUndefined, "undefined") \
147 V(kUnexpectedCellContentsInConstantGlobalStore, \
148 "Unexpected cell contents in constant global store") \
149 V(kUnexpectedCellContentsInGlobalStore, \
150 "Unexpected cell contents in global store") \
151 V(kUnexpectedObject, "unexpected object") \
152 V(kUnexpectedRHSOfBinaryOperation, "Unexpected RHS of binary operation") \
153 V(kUninitializedBoilerplateInFastClone, \
154 "Uninitialized boilerplate in fast clone") \
155 V(kUninitializedBoilerplateLiterals, "Uninitialized boilerplate literals") \
156 V(kUnknownMapInPolymorphicAccess, "Unknown map in polymorphic access") \
157 V(kUnknownMapInPolymorphicCall, "Unknown map in polymorphic call") \
158 V(kUnknownMapInPolymorphicElementAccess, \
159 "Unknown map in polymorphic element access") \
160 V(kUnknownMap, "Unknown map") \
161 V(kValueMismatch, "value mismatch") \
162 V(kWrongInstanceType, "wrong instance type") \
163 V(kWrongMap, "wrong map")
166 class Deoptimizer : public Malloced {
172 // This last bailout type is not really a bailout, but used by the
173 // debugger to deoptimize stack frames to allow inspection.
175 kBailoutTypesWithCodeEntry = SOFT + 1
178 #define DEOPT_MESSAGES_CONSTANTS(C, T) C,
180 DEOPT_MESSAGES_LIST(DEOPT_MESSAGES_CONSTANTS) kLastDeoptReason
182 #undef DEOPT_MESSAGES_CONSTANTS
183 static const char* GetDeoptReason(DeoptReason deopt_reason);
186 DeoptInfo(SourcePosition position, const char* m, DeoptReason d)
187 : position(position), mnemonic(m), deopt_reason(d), inlining_id(0) {}
189 SourcePosition position;
190 const char* mnemonic;
191 DeoptReason deopt_reason;
195 static DeoptInfo GetDeoptInfo(Code* code, byte* from);
197 struct JumpTableEntry : public ZoneObject {
198 inline JumpTableEntry(Address entry, const DeoptInfo& deopt_info,
199 Deoptimizer::BailoutType type, bool frame)
202 deopt_info(deopt_info),
204 needs_frame(frame) {}
206 bool IsEquivalentTo(const JumpTableEntry& other) const {
207 return address == other.address && bailout_type == other.bailout_type &&
208 needs_frame == other.needs_frame;
213 DeoptInfo deopt_info;
214 Deoptimizer::BailoutType bailout_type;
218 static bool TraceEnabledFor(BailoutType deopt_type,
219 StackFrame::Type frame_type);
220 static const char* MessageFor(BailoutType type);
222 int output_count() const { return output_count_; }
224 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
225 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
226 BailoutType bailout_type() const { return bailout_type_; }
228 // Number of created JS frames. Not all created frames are necessarily JS.
229 int jsframe_count() const { return jsframe_count_; }
231 static Deoptimizer* New(JSFunction* function,
237 static Deoptimizer* Grab(Isolate* isolate);
239 // The returned object with information on the optimized frame needs to be
240 // freed before another one can be generated.
241 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
244 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
247 // Makes sure that there is enough room in the relocation
248 // information of a code object to perform lazy deoptimization
249 // patching. If there is not enough room a new relocation
250 // information object is allocated and comments are added until it
252 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
254 // Deoptimize the function now. Its current optimized code will never be run
255 // again and any activations of the optimized code will get deoptimized when
256 // execution returns.
257 static void DeoptimizeFunction(JSFunction* function);
259 // Deoptimize all code in the given isolate.
260 static void DeoptimizeAll(Isolate* isolate);
262 // Deoptimize code associated with the given global object.
263 static void DeoptimizeGlobalObject(JSObject* object);
265 // Deoptimizes all optimized code that has been previously marked
266 // (via code->set_marked_for_deoptimization) and unlinks all functions that
267 // refer to that code.
268 static void DeoptimizeMarkedCode(Isolate* isolate);
270 // Visit all the known optimized functions in a given isolate.
271 static void VisitAllOptimizedFunctions(
272 Isolate* isolate, OptimizedFunctionVisitor* visitor);
274 // The size in bytes of the code required at a lazy deopt patch site.
275 static int patch_size();
279 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
281 void MaterializeHeapNumbersForDebuggerInspectableFrame(
282 Address parameters_top,
283 uint32_t parameters_size,
284 Address expressions_top,
285 uint32_t expressions_size,
286 DeoptimizedFrameInfo* info);
288 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
292 CALCULATE_ENTRY_ADDRESS,
297 static Address GetDeoptimizationEntry(
301 GetEntryMode mode = ENSURE_ENTRY_CODE);
302 static int GetDeoptimizationId(Isolate* isolate,
305 static int GetOutputInfo(DeoptimizationOutputData* data,
307 SharedFunctionInfo* shared);
309 // Code generation support.
310 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
311 static int output_count_offset() {
312 return OFFSET_OF(Deoptimizer, output_count_);
314 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
316 static int has_alignment_padding_offset() {
317 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
320 static int GetDeoptimizedCodeCount(Isolate* isolate);
322 static const int kNotDeoptimizationEntry = -1;
324 // Generators for the deoptimization entry code.
325 class TableEntryGenerator BASE_EMBEDDED {
327 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
328 : masm_(masm), type_(type), count_(count) {}
333 MacroAssembler* masm() const { return masm_; }
334 BailoutType type() const { return type_; }
335 Isolate* isolate() const { return masm_->isolate(); }
337 void GeneratePrologue();
340 int count() const { return count_; }
342 MacroAssembler* masm_;
343 Deoptimizer::BailoutType type_;
347 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
349 static size_t GetMaxDeoptTableSize();
351 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
355 Isolate* isolate() const { return isolate_; }
358 static const int kMinNumberOfEntries = 64;
359 static const int kMaxNumberOfEntries = 16384;
361 Deoptimizer(Isolate* isolate,
362 JSFunction* function,
367 Code* optimized_code);
368 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
369 void PrintFunctionName();
370 void DeleteFrameDescriptions();
372 void DoComputeOutputFrames();
373 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
374 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
376 void DoComputeConstructStubFrame(TranslationIterator* iterator,
378 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
380 bool is_setter_stub_frame);
381 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
384 // Translate object, store the result into an auxiliary array
385 // (deferred_objects_tagged_values_).
386 void DoTranslateObject(TranslationIterator* iterator,
390 // Translate value, store the result into the given frame slot.
391 void DoTranslateCommand(TranslationIterator* iterator,
393 unsigned output_offset);
395 // Translate object, do not store the result anywhere (but do update
396 // the deferred materialization array).
397 void DoTranslateObjectAndSkip(TranslationIterator* iterator);
399 unsigned ComputeInputFrameSize() const;
400 unsigned ComputeFixedSize(JSFunction* function) const;
402 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
403 unsigned ComputeOutgoingArgumentSize() const;
405 Object* ComputeLiteral(int index) const;
407 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
408 void AddObjectDuplication(intptr_t slot, int object_index);
409 void AddObjectTaggedValue(intptr_t value);
410 void AddObjectDoubleValue(double value);
411 void AddDoubleValue(intptr_t slot_address, double value);
413 bool ArgumentsObjectIsAdapted(int object_index) {
414 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
415 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
416 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
419 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
420 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
421 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
422 return jsframe_functions_[reverse_jsframe_index];
425 // Helper function for heap object materialization.
426 Handle<Object> MaterializeNextHeapObject();
427 Handle<Object> MaterializeNextValue();
429 static void GenerateDeoptimizationEntries(
430 MacroAssembler* masm, int count, BailoutType type);
432 // Marks all the code in the given context for deoptimization.
433 static void MarkAllCodeForContext(Context* native_context);
435 // Visit all the known optimized functions in a given context.
436 static void VisitAllOptimizedFunctionsForContext(
437 Context* context, OptimizedFunctionVisitor* visitor);
439 // Deoptimizes all code marked in the given context.
440 static void DeoptimizeMarkedCodeForContext(Context* native_context);
442 // Patch the given code so that it will deoptimize itself.
443 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
445 // Searches the list of known deoptimizing code for a Code object
446 // containing the given address (which is supposedly faster than
447 // searching all code objects).
448 Code* FindDeoptimizingCode(Address addr);
450 // Fill the input from from a JavaScript frame. This is used when
451 // the debugger needs to inspect an optimized frame. For normal
452 // deoptimizations the input frame is filled in generated code.
453 void FillInputFrame(Address tos, JavaScriptFrame* frame);
455 // Fill the given output frame's registers to contain the failure handler
456 // address and the number of parameters for a stub failure trampoline.
457 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
458 CodeStubDescriptor* desc);
460 // Fill the given output frame's double registers with the original values
461 // from the input frame's double registers.
462 void CopyDoubleRegisters(FrameDescription* output_frame);
464 // Determines whether the input frame contains alignment padding by looking
465 // at the dynamic alignment state slot inside the frame.
466 bool HasAlignmentPadding(JSFunction* function);
469 JSFunction* function_;
470 Code* compiled_code_;
471 unsigned bailout_id_;
472 BailoutType bailout_type_;
475 int has_alignment_padding_;
477 // Input frame description.
478 FrameDescription* input_;
479 // Number of output frames.
481 // Number of output js frames.
483 // Array of output frame descriptions.
484 FrameDescription** output_;
486 // Deferred values to be materialized.
487 List<Object*> deferred_objects_tagged_values_;
488 List<HeapNumberMaterializationDescriptor<int> >
489 deferred_objects_double_values_;
490 List<ObjectMaterializationDescriptor> deferred_objects_;
491 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
493 // Key for lookup of previously materialized objects
495 Handle<FixedArray> previously_materialized_objects_;
496 int prev_materialized_count_;
498 // Output frame information. Only used during heap object materialization.
499 List<Handle<JSFunction> > jsframe_functions_;
500 List<bool> jsframe_has_adapted_arguments_;
502 // Materialized objects. Only used during heap object materialization.
503 List<Handle<Object> >* materialized_values_;
504 List<Handle<Object> >* materialized_objects_;
505 int materialization_value_index_;
506 int materialization_object_index_;
509 DisallowHeapAllocation* disallow_heap_allocation_;
512 CodeTracer::Scope* trace_scope_;
514 static const int table_entry_size_;
516 friend class FrameDescription;
517 friend class DeoptimizedFrameInfo;
521 class FrameDescription {
523 FrameDescription(uint32_t frame_size,
524 JSFunction* function);
526 void* operator new(size_t size, uint32_t frame_size) {
527 // Subtracts kPointerSize, as the member frame_content_ already supplies
528 // the first element of the area to store the frame.
529 return malloc(size + frame_size - kPointerSize);
532 void operator delete(void* pointer, uint32_t frame_size) {
536 void operator delete(void* description) {
540 uint32_t GetFrameSize() const {
541 DCHECK(static_cast<uint32_t>(frame_size_) == frame_size_);
542 return static_cast<uint32_t>(frame_size_);
545 JSFunction* GetFunction() const { return function_; }
547 unsigned GetOffsetFromSlotIndex(int slot_index);
549 intptr_t GetFrameSlot(unsigned offset) {
550 return *GetFrameSlotPointer(offset);
553 double GetDoubleFrameSlot(unsigned offset) {
554 intptr_t* ptr = GetFrameSlotPointer(offset);
555 return read_double_value(reinterpret_cast<Address>(ptr));
558 void SetFrameSlot(unsigned offset, intptr_t value) {
559 *GetFrameSlotPointer(offset) = value;
562 void SetCallerPc(unsigned offset, intptr_t value);
564 void SetCallerFp(unsigned offset, intptr_t value);
566 void SetCallerConstantPool(unsigned offset, intptr_t value);
568 intptr_t GetRegister(unsigned n) const {
570 // This convoluted DCHECK is needed to work around a gcc problem that
571 // improperly detects an array bounds overflow in optimized debug builds
572 // when using a plain DCHECK.
573 if (n >= arraysize(registers_)) {
578 return registers_[n];
581 double GetDoubleRegister(unsigned n) const {
582 DCHECK(n < arraysize(double_registers_));
583 return double_registers_[n];
586 void SetRegister(unsigned n, intptr_t value) {
587 DCHECK(n < arraysize(registers_));
588 registers_[n] = value;
591 void SetDoubleRegister(unsigned n, double value) {
592 DCHECK(n < arraysize(double_registers_));
593 double_registers_[n] = value;
596 intptr_t GetTop() const { return top_; }
597 void SetTop(intptr_t top) { top_ = top; }
599 intptr_t GetPc() const { return pc_; }
600 void SetPc(intptr_t pc) { pc_ = pc; }
602 intptr_t GetFp() const { return fp_; }
603 void SetFp(intptr_t fp) { fp_ = fp; }
605 intptr_t GetContext() const { return context_; }
606 void SetContext(intptr_t context) { context_ = context; }
608 intptr_t GetConstantPool() const { return constant_pool_; }
609 void SetConstantPool(intptr_t constant_pool) {
610 constant_pool_ = constant_pool;
613 Smi* GetState() const { return state_; }
614 void SetState(Smi* state) { state_ = state; }
616 void SetContinuation(intptr_t pc) { continuation_ = pc; }
618 StackFrame::Type GetFrameType() const { return type_; }
619 void SetFrameType(StackFrame::Type type) { type_ = type; }
621 // Get the incoming arguments count.
622 int ComputeParametersCount();
624 // Get a parameter value for an unoptimized frame.
625 Object* GetParameter(int index);
627 // Get the expression stack height for a unoptimized frame.
628 unsigned GetExpressionCount();
630 // Get the expression stack value for an unoptimized frame.
631 Object* GetExpression(int index);
633 static int registers_offset() {
634 return OFFSET_OF(FrameDescription, registers_);
637 static int double_registers_offset() {
638 return OFFSET_OF(FrameDescription, double_registers_);
641 static int frame_size_offset() {
642 return OFFSET_OF(FrameDescription, frame_size_);
645 static int pc_offset() {
646 return OFFSET_OF(FrameDescription, pc_);
649 static int state_offset() {
650 return OFFSET_OF(FrameDescription, state_);
653 static int continuation_offset() {
654 return OFFSET_OF(FrameDescription, continuation_);
657 static int frame_content_offset() {
658 return OFFSET_OF(FrameDescription, frame_content_);
662 static const uint32_t kZapUint32 = 0xbeeddead;
664 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
665 // keep the variable-size array frame_content_ of type intptr_t at
666 // the end of the structure aligned.
667 uintptr_t frame_size_; // Number of bytes.
668 JSFunction* function_;
669 intptr_t registers_[Register::kNumRegisters];
670 double double_registers_[DoubleRegister::kMaxNumRegisters];
675 intptr_t constant_pool_;
676 StackFrame::Type type_;
679 // Continuation is the PC where the execution continues after
681 intptr_t continuation_;
683 // This must be at the end of the object as the object is allocated larger
684 // than it's definition indicate to extend this array.
685 intptr_t frame_content_[1];
687 intptr_t* GetFrameSlotPointer(unsigned offset) {
688 DCHECK(offset < frame_size_);
689 return reinterpret_cast<intptr_t*>(
690 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
693 int ComputeFixedSize();
697 class DeoptimizerData {
699 explicit DeoptimizerData(MemoryAllocator* allocator);
702 void Iterate(ObjectVisitor* v);
705 MemoryAllocator* allocator_;
706 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
707 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
709 DeoptimizedFrameInfo* deoptimized_frame_info_;
711 Deoptimizer* current_;
713 friend class Deoptimizer;
715 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
719 class TranslationBuffer BASE_EMBEDDED {
721 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
723 int CurrentIndex() const { return contents_.length(); }
724 void Add(int32_t value, Zone* zone);
726 Handle<ByteArray> CreateByteArray(Factory* factory);
729 ZoneList<uint8_t> contents_;
733 class TranslationIterator BASE_EMBEDDED {
735 TranslationIterator(ByteArray* buffer, int index)
736 : buffer_(buffer), index_(index) {
737 DCHECK(index >= 0 && index < buffer->length());
742 bool HasNext() const { return index_ < buffer_->length(); }
745 for (int i = 0; i < n; i++) Next();
754 #define TRANSLATION_OPCODE_LIST(V) \
757 V(CONSTRUCT_STUB_FRAME) \
758 V(GETTER_STUB_FRAME) \
759 V(SETTER_STUB_FRAME) \
760 V(ARGUMENTS_ADAPTOR_FRAME) \
761 V(COMPILED_STUB_FRAME) \
762 V(DUPLICATED_OBJECT) \
763 V(ARGUMENTS_OBJECT) \
770 V(INT32_STACK_SLOT) \
771 V(UINT32_STACK_SLOT) \
772 V(DOUBLE_STACK_SLOT) \
776 class Translation BASE_EMBEDDED {
778 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
780 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
783 #undef DECLARE_TRANSLATION_OPCODE_ENUM
785 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
788 index_(buffer->CurrentIndex()),
790 buffer_->Add(BEGIN, zone);
791 buffer_->Add(frame_count, zone);
792 buffer_->Add(jsframe_count, zone);
795 int index() const { return index_; }
798 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
799 void BeginCompiledStubFrame();
800 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
801 void BeginConstructStubFrame(int literal_id, unsigned height);
802 void BeginGetterStubFrame(int literal_id);
803 void BeginSetterStubFrame(int literal_id);
804 void BeginArgumentsObject(int args_length);
805 void BeginCapturedObject(int length);
806 void DuplicateObject(int object_index);
807 void StoreRegister(Register reg);
808 void StoreInt32Register(Register reg);
809 void StoreUint32Register(Register reg);
810 void StoreDoubleRegister(DoubleRegister reg);
811 void StoreStackSlot(int index);
812 void StoreInt32StackSlot(int index);
813 void StoreUint32StackSlot(int index);
814 void StoreDoubleStackSlot(int index);
815 void StoreLiteral(int literal_id);
816 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
818 Zone* zone() const { return zone_; }
820 static int NumberOfOperandsFor(Opcode opcode);
822 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
823 static const char* StringFor(Opcode opcode);
826 // A literal id which refers to the JSFunction itself.
827 static const int kSelfLiteralId = -239;
830 TranslationBuffer* buffer_;
836 class SlotRef BASE_EMBEDDED {
838 enum SlotRepresentation {
845 DEFERRED_OBJECT, // Object captured by the escape analysis.
846 // The number of nested objects can be obtained
847 // with the DeferredObjectLength() method
848 // (the SlotRefs of the nested objects follow
849 // this SlotRef in the depth-first order.)
850 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
851 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
852 // in sync, it should not be materialized.
856 : addr_(NULL), representation_(UNKNOWN) { }
858 SlotRef(Address addr, SlotRepresentation representation)
859 : addr_(addr), representation_(representation) { }
861 SlotRef(Isolate* isolate, Object* literal)
862 : literal_(literal, isolate), representation_(LITERAL) { }
864 static SlotRef NewArgumentsObject(int length) {
866 slot.representation_ = ARGUMENTS_OBJECT;
867 slot.deferred_object_length_ = length;
871 static SlotRef NewDeferredObject(int length) {
873 slot.representation_ = DEFERRED_OBJECT;
874 slot.deferred_object_length_ = length;
878 SlotRepresentation Representation() { return representation_; }
880 static SlotRef NewDuplicateObject(int id) {
882 slot.representation_ = DUPLICATE_OBJECT;
883 slot.duplicate_object_id_ = id;
887 int GetChildrenCount() {
888 if (representation_ == DEFERRED_OBJECT ||
889 representation_ == ARGUMENTS_OBJECT) {
890 return deferred_object_length_;
896 int DuplicateObjectId() { return duplicate_object_id_; }
898 Handle<Object> GetValue(Isolate* isolate);
902 Handle<Object> literal_;
903 SlotRepresentation representation_;
904 int deferred_object_length_;
905 int duplicate_object_id_;
908 class SlotRefValueBuilder BASE_EMBEDDED {
911 JavaScriptFrame* frame,
912 int inlined_frame_index,
913 int formal_parameter_count);
915 void Prepare(Isolate* isolate);
916 Handle<Object> GetNext(Isolate* isolate, int level);
917 void Finish(Isolate* isolate);
919 int args_length() { return args_length_; }
922 List<Handle<Object> > materialized_objects_;
923 Handle<FixedArray> previously_materialized_objects_;
924 int prev_materialized_count_;
925 Address stack_frame_id_;
926 List<SlotRef> slot_refs_;
929 int first_slot_index_;
930 bool should_deoptimize_;
932 static SlotRef ComputeSlotForNextArgument(
933 Translation::Opcode opcode,
934 TranslationIterator* iterator,
935 DeoptimizationInputData* data,
936 JavaScriptFrame* frame);
938 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
940 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
941 if (slot_index >= 0) {
942 const int offset = JavaScriptFrameConstants::kLocal0Offset;
943 return frame->fp() + offset - (slot_index * kPointerSize);
945 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
946 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
950 Handle<Object> GetDeferredObject(Isolate* isolate);
953 class MaterializedObjectStore {
955 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
958 Handle<FixedArray> Get(Address fp);
959 void Set(Address fp, Handle<FixedArray> materialized_objects);
960 void Remove(Address fp);
963 Isolate* isolate() { return isolate_; }
964 Handle<FixedArray> GetStackEntries();
965 Handle<FixedArray> EnsureStackEntries(int size);
967 int StackIdToIndex(Address fp);
970 List<Address> frame_fps_;
974 // Class used to represent an unoptimized frame when the debugger
975 // needs to inspect a frame that is part of an optimized frame. The
976 // internally used FrameDescription objects are not GC safe so for use
977 // by the debugger frame information is copied to an object of this type.
978 // Represents parameters in unadapted form so their number might mismatch
979 // formal parameter count.
980 class DeoptimizedFrameInfo : public Malloced {
982 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
984 bool has_arguments_adaptor,
985 bool has_construct_stub);
986 virtual ~DeoptimizedFrameInfo();
989 void Iterate(ObjectVisitor* v);
991 // Return the number of incoming arguments.
992 int parameters_count() { return parameters_count_; }
994 // Return the height of the expression stack.
995 int expression_count() { return expression_count_; }
997 // Get the frame function.
998 JSFunction* GetFunction() {
1002 // Get the frame context.
1003 Object* GetContext() { return context_; }
1005 // Check if this frame is preceded by construct stub frame. The bottom-most
1006 // inlined frame might still be called by an uninlined construct stub.
1007 bool HasConstructStub() {
1008 return has_construct_stub_;
1011 // Get an incoming argument.
1012 Object* GetParameter(int index) {
1013 DCHECK(0 <= index && index < parameters_count());
1014 return parameters_[index];
1017 // Get an expression from the expression stack.
1018 Object* GetExpression(int index) {
1019 DCHECK(0 <= index && index < expression_count());
1020 return expression_stack_[index];
1023 int GetSourcePosition() {
1024 return source_position_;
1028 // Set an incoming argument.
1029 void SetParameter(int index, Object* obj) {
1030 DCHECK(0 <= index && index < parameters_count());
1031 parameters_[index] = obj;
1034 // Set an expression on the expression stack.
1035 void SetExpression(int index, Object* obj) {
1036 DCHECK(0 <= index && index < expression_count());
1037 expression_stack_[index] = obj;
1040 JSFunction* function_;
1042 bool has_construct_stub_;
1043 int parameters_count_;
1044 int expression_count_;
1045 Object** parameters_;
1046 Object** expression_stack_;
1047 int source_position_;
1049 friend class Deoptimizer;
1052 } } // namespace v8::internal
1054 #endif // V8_DEOPTIMIZER_H_