1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_DEOPTIMIZER_H_
29 #define V8_DEOPTIMIZER_H_
33 #include "allocation.h"
34 #include "macro-assembler.h"
42 static inline double read_double_value(Address p) {
43 #ifdef V8_HOST_CAN_READ_UNALIGNED
44 return Memory::double_at(p);
45 #else // V8_HOST_CAN_READ_UNALIGNED
46 // Prevent gcc from using load-double (mips ldc1) on (possibly)
47 // non-64-bit aligned address.
52 c.u[0] = *reinterpret_cast<uint32_t*>(p);
53 c.u[1] = *reinterpret_cast<uint32_t*>(p + 4);
55 #endif // V8_HOST_CAN_READ_UNALIGNED
58 static inline simd128_value_t read_simd128_value(Address p) {
59 return *reinterpret_cast<simd128_value_t*>(p);
62 class FrameDescription;
63 class TranslationIterator;
64 class DeoptimizedFrameInfo;
67 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
69 HeapNumberMaterializationDescriptor(T destination, double value)
70 : destination_(destination), value_(value) { }
72 T destination() const { return destination_; }
73 double value() const { return value_; }
82 class SIMD128MaterializationDescriptor BASE_EMBEDDED {
84 SIMD128MaterializationDescriptor(T destination, simd128_value_t value)
85 : destination_(destination), value_(value) { }
87 T destination() const { return destination_; }
88 simd128_value_t value() const { return value_; }
92 simd128_value_t value_;
96 class ObjectMaterializationDescriptor BASE_EMBEDDED {
98 ObjectMaterializationDescriptor(
99 Address slot_address, int frame, int length, int duplicate, bool is_args)
100 : slot_address_(slot_address),
101 jsframe_index_(frame),
102 object_length_(length),
103 duplicate_object_(duplicate),
104 is_arguments_(is_args) { }
106 Address slot_address() const { return slot_address_; }
107 int jsframe_index() const { return jsframe_index_; }
108 int object_length() const { return object_length_; }
109 int duplicate_object() const { return duplicate_object_; }
110 bool is_arguments() const { return is_arguments_; }
112 // Only used for allocated receivers in DoComputeConstructStubFrame.
113 void patch_slot_address(intptr_t slot) {
114 slot_address_ = reinterpret_cast<Address>(slot);
118 Address slot_address_;
121 int duplicate_object_;
126 class OptimizedFunctionVisitor BASE_EMBEDDED {
128 virtual ~OptimizedFunctionVisitor() {}
130 // Function which is called before iteration of any optimized functions
131 // from given native context.
132 virtual void EnterContext(Context* context) = 0;
134 virtual void VisitFunction(JSFunction* function) = 0;
136 // Function which is called after iteration of all optimized functions
137 // from given native context.
138 virtual void LeaveContext(Context* context) = 0;
142 class Deoptimizer : public Malloced {
148 // This last bailout type is not really a bailout, but used by the
149 // debugger to deoptimize stack frames to allow inspection.
153 static const int kBailoutTypesWithCodeEntry = SOFT + 1;
155 struct JumpTableEntry : public ZoneObject {
156 inline JumpTableEntry(Address entry,
157 Deoptimizer::BailoutType type,
162 needs_frame(frame) { }
165 Deoptimizer::BailoutType bailout_type;
169 static bool TraceEnabledFor(BailoutType deopt_type,
170 StackFrame::Type frame_type);
171 static const char* MessageFor(BailoutType type);
173 int output_count() const { return output_count_; }
175 Handle<JSFunction> function() const { return Handle<JSFunction>(function_); }
176 Handle<Code> compiled_code() const { return Handle<Code>(compiled_code_); }
177 BailoutType bailout_type() const { return bailout_type_; }
179 // Number of created JS frames. Not all created frames are necessarily JS.
180 int jsframe_count() const { return jsframe_count_; }
182 static Deoptimizer* New(JSFunction* function,
188 static Deoptimizer* Grab(Isolate* isolate);
190 #ifdef ENABLE_DEBUGGER_SUPPORT
191 // The returned object with information on the optimized frame needs to be
192 // freed before another one can be generated.
193 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
196 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
200 // Makes sure that there is enough room in the relocation
201 // information of a code object to perform lazy deoptimization
202 // patching. If there is not enough room a new relocation
203 // information object is allocated and comments are added until it
205 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
207 // Deoptimize the function now. Its current optimized code will never be run
208 // again and any activations of the optimized code will get deoptimized when
209 // execution returns.
210 static void DeoptimizeFunction(JSFunction* function);
212 // Deoptimize all code in the given isolate.
213 static void DeoptimizeAll(Isolate* isolate);
215 // Deoptimize code associated with the given global object.
216 static void DeoptimizeGlobalObject(JSObject* object);
218 // Deoptimizes all optimized code that has been previously marked
219 // (via code->set_marked_for_deoptimization) and unlinks all functions that
220 // refer to that code.
221 static void DeoptimizeMarkedCode(Isolate* isolate);
223 // Visit all the known optimized functions in a given isolate.
224 static void VisitAllOptimizedFunctions(
225 Isolate* isolate, OptimizedFunctionVisitor* visitor);
227 // The size in bytes of the code required at a lazy deopt patch site.
228 static int patch_size();
232 void MaterializeHeapObjects(JavaScriptFrameIterator* it);
233 #ifdef ENABLE_DEBUGGER_SUPPORT
234 void MaterializeHeapNumbersForDebuggerInspectableFrame(
235 Address parameters_top,
236 uint32_t parameters_size,
237 Address expressions_top,
238 uint32_t expressions_size,
239 DeoptimizedFrameInfo* info);
242 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
246 CALCULATE_ENTRY_ADDRESS,
251 static Address GetDeoptimizationEntry(
255 GetEntryMode mode = ENSURE_ENTRY_CODE);
256 static int GetDeoptimizationId(Isolate* isolate,
259 static int GetOutputInfo(DeoptimizationOutputData* data,
261 SharedFunctionInfo* shared);
263 // Code generation support.
264 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
265 static int output_count_offset() {
266 return OFFSET_OF(Deoptimizer, output_count_);
268 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
270 static int has_alignment_padding_offset() {
271 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
274 static int GetDeoptimizedCodeCount(Isolate* isolate);
276 static const int kNotDeoptimizationEntry = -1;
278 // Generators for the deoptimization entry code.
279 class EntryGenerator BASE_EMBEDDED {
281 EntryGenerator(MacroAssembler* masm, BailoutType type)
282 : masm_(masm), type_(type) { }
283 virtual ~EntryGenerator() { }
288 MacroAssembler* masm() const { return masm_; }
289 BailoutType type() const { return type_; }
290 Isolate* isolate() const { return masm_->isolate(); }
292 virtual void GeneratePrologue() { }
295 MacroAssembler* masm_;
296 Deoptimizer::BailoutType type_;
299 class TableEntryGenerator : public EntryGenerator {
301 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
302 : EntryGenerator(masm, type), count_(count) { }
305 virtual void GeneratePrologue();
308 int count() const { return count_; }
313 int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
315 static size_t GetMaxDeoptTableSize();
317 static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
321 Isolate* isolate() const { return isolate_; }
324 static const int kMinNumberOfEntries = 64;
325 static const int kMaxNumberOfEntries = 16384;
327 Deoptimizer(Isolate* isolate,
328 JSFunction* function,
333 Code* optimized_code);
334 Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
335 void PrintFunctionName();
336 void DeleteFrameDescriptions();
338 void DoComputeOutputFrames();
339 void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
340 void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
342 void DoComputeConstructStubFrame(TranslationIterator* iterator,
344 void DoComputeAccessorStubFrame(TranslationIterator* iterator,
346 bool is_setter_stub_frame);
347 void DoComputeCompiledStubFrame(TranslationIterator* iterator,
350 void DoTranslateObject(TranslationIterator* iterator,
354 void DoTranslateCommand(TranslationIterator* iterator,
356 unsigned output_offset);
358 unsigned ComputeInputFrameSize() const;
359 unsigned ComputeFixedSize(JSFunction* function) const;
361 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
362 unsigned ComputeOutgoingArgumentSize() const;
364 Object* ComputeLiteral(int index) const;
366 void AddObjectStart(intptr_t slot_address, int argc, bool is_arguments);
367 void AddObjectDuplication(intptr_t slot, int object_index);
368 void AddObjectTaggedValue(intptr_t value);
369 void AddObjectDoubleValue(double value);
370 void AddObjectSIMD128Value(simd128_value_t value, int translation_opcode);
371 void AddDoubleValue(intptr_t slot_address, double value);
372 void AddSIMD128Value(intptr_t slot_address, simd128_value_t value,
373 int translation_opcode);
375 bool ArgumentsObjectIsAdapted(int object_index) {
376 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
377 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
378 return jsframe_has_adapted_arguments_[reverse_jsframe_index];
381 Handle<JSFunction> ArgumentsObjectFunction(int object_index) {
382 ObjectMaterializationDescriptor desc = deferred_objects_.at(object_index);
383 int reverse_jsframe_index = jsframe_count_ - desc.jsframe_index() - 1;
384 return jsframe_functions_[reverse_jsframe_index];
387 // Helper function for heap object materialization.
388 Handle<Object> MaterializeNextHeapObject();
389 Handle<Object> MaterializeNextValue();
391 static void GenerateDeoptimizationEntries(
392 MacroAssembler* masm, int count, BailoutType type);
394 // Marks all the code in the given context for deoptimization.
395 static void MarkAllCodeForContext(Context* native_context);
397 // Visit all the known optimized functions in a given context.
398 static void VisitAllOptimizedFunctionsForContext(
399 Context* context, OptimizedFunctionVisitor* visitor);
401 // Deoptimizes all code marked in the given context.
402 static void DeoptimizeMarkedCodeForContext(Context* native_context);
404 // Patch the given code so that it will deoptimize itself.
405 static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
407 // Searches the list of known deoptimizing code for a Code object
408 // containing the given address (which is supposedly faster than
409 // searching all code objects).
410 Code* FindDeoptimizingCode(Address addr);
412 // Fill the input from from a JavaScript frame. This is used when
413 // the debugger needs to inspect an optimized frame. For normal
414 // deoptimizations the input frame is filled in generated code.
415 void FillInputFrame(Address tos, JavaScriptFrame* frame);
417 // Fill the given output frame's registers to contain the failure handler
418 // address and the number of parameters for a stub failure trampoline.
419 void SetPlatformCompiledStubRegisters(FrameDescription* output_frame,
420 CodeStubInterfaceDescriptor* desc);
422 // Fill the given output frame's simd128 registers with the original values
423 // from the input frame's simd128 registers.
424 void CopySIMD128Registers(FrameDescription* output_frame);
426 // Determines whether the input frame contains alignment padding by looking
427 // at the dynamic alignment state slot inside the frame.
428 bool HasAlignmentPadding(JSFunction* function);
430 // Select the version of NotifyStubFailure builtin that either saves or
431 // doesn't save the double registers depending on CPU features.
432 Code* NotifyStubFailureBuiltin();
435 JSFunction* function_;
436 Code* compiled_code_;
437 unsigned bailout_id_;
438 BailoutType bailout_type_;
441 int has_alignment_padding_;
443 // Input frame description.
444 FrameDescription* input_;
445 // Number of output frames.
447 // Number of output js frames.
449 // Array of output frame descriptions.
450 FrameDescription** output_;
452 // Deferred values to be materialized.
453 List<Object*> deferred_objects_tagged_values_;
454 List<HeapNumberMaterializationDescriptor<int> >
455 deferred_objects_double_values_;
456 List<SIMD128MaterializationDescriptor<int> >
457 deferred_objects_float32x4_values_;
458 List<SIMD128MaterializationDescriptor<int> >
459 deferred_objects_int32x4_values_;
460 List<ObjectMaterializationDescriptor> deferred_objects_;
461 List<HeapNumberMaterializationDescriptor<Address> > deferred_heap_numbers_;
462 List<SIMD128MaterializationDescriptor<Address> > deferred_float32x4s_;
463 List<SIMD128MaterializationDescriptor<Address> > deferred_int32x4s_;
465 // Key for lookup of previously materialized objects
467 Handle<FixedArray> previously_materialized_objects_;
468 int prev_materialized_count_;
470 // Output frame information. Only used during heap object materialization.
471 List<Handle<JSFunction> > jsframe_functions_;
472 List<bool> jsframe_has_adapted_arguments_;
474 // Materialized objects. Only used during heap object materialization.
475 List<Handle<Object> >* materialized_values_;
476 List<Handle<Object> >* materialized_objects_;
477 int materialization_value_index_;
478 int materialization_object_index_;
481 DisallowHeapAllocation* disallow_heap_allocation_;
484 CodeTracer::Scope* trace_scope_;
486 static const int table_entry_size_;
488 friend class FrameDescription;
489 friend class DeoptimizedFrameInfo;
493 class FrameDescription {
495 FrameDescription(uint32_t frame_size,
496 JSFunction* function);
498 void* operator new(size_t size, uint32_t frame_size) {
499 // Subtracts kPointerSize, as the member frame_content_ already supplies
500 // the first element of the area to store the frame.
501 return malloc(size + frame_size - kPointerSize);
504 void operator delete(void* pointer, uint32_t frame_size) {
508 void operator delete(void* description) {
512 uint32_t GetFrameSize() const {
513 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
514 return static_cast<uint32_t>(frame_size_);
517 JSFunction* GetFunction() const { return function_; }
519 unsigned GetOffsetFromSlotIndex(int slot_index);
521 intptr_t GetFrameSlot(unsigned offset) {
522 return *GetFrameSlotPointer(offset);
525 double GetDoubleFrameSlot(unsigned offset) {
526 intptr_t* ptr = GetFrameSlotPointer(offset);
527 return read_double_value(reinterpret_cast<Address>(ptr));
530 simd128_value_t GetSIMD128FrameSlot(unsigned offset) {
531 intptr_t* ptr = GetFrameSlotPointer(offset);
532 return read_simd128_value(reinterpret_cast<Address>(ptr));
535 void SetFrameSlot(unsigned offset, intptr_t value) {
536 *GetFrameSlotPointer(offset) = value;
539 void SetCallerPc(unsigned offset, intptr_t value);
541 void SetCallerFp(unsigned offset, intptr_t value);
543 void SetCallerConstantPool(unsigned offset, intptr_t value);
545 intptr_t GetRegister(unsigned n) const {
547 // This convoluted ASSERT is needed to work around a gcc problem that
548 // improperly detects an array bounds overflow in optimized debug builds
549 // when using a plain ASSERT.
550 if (n >= ARRAY_SIZE(registers_)) {
555 return registers_[n];
558 double GetDoubleRegister(unsigned n) const;
560 simd128_value_t GetSIMD128Register(unsigned n) const {
561 ASSERT(n < ARRAY_SIZE(simd128_registers_));
562 return simd128_registers_[n];
565 void SetRegister(unsigned n, intptr_t value) {
566 ASSERT(n < ARRAY_SIZE(registers_));
567 registers_[n] = value;
570 void SetDoubleRegister(unsigned n, double value);
572 void SetSIMD128Register(unsigned n, simd128_value_t value) {
573 ASSERT(n < ARRAY_SIZE(simd128_registers_));
574 simd128_registers_[n] = value;
577 intptr_t GetTop() const { return top_; }
578 void SetTop(intptr_t top) { top_ = top; }
580 intptr_t GetPc() const { return pc_; }
581 void SetPc(intptr_t pc) { pc_ = pc; }
583 intptr_t GetFp() const { return fp_; }
584 void SetFp(intptr_t fp) { fp_ = fp; }
586 intptr_t GetContext() const { return context_; }
587 void SetContext(intptr_t context) { context_ = context; }
589 intptr_t GetConstantPool() const { return constant_pool_; }
590 void SetConstantPool(intptr_t constant_pool) {
591 constant_pool_ = constant_pool;
594 Smi* GetState() const { return state_; }
595 void SetState(Smi* state) { state_ = state; }
597 void SetContinuation(intptr_t pc) { continuation_ = pc; }
599 StackFrame::Type GetFrameType() const { return type_; }
600 void SetFrameType(StackFrame::Type type) { type_ = type; }
602 // Get the incoming arguments count.
603 int ComputeParametersCount();
605 // Get a parameter value for an unoptimized frame.
606 Object* GetParameter(int index);
608 // Get the expression stack height for a unoptimized frame.
609 unsigned GetExpressionCount();
611 // Get the expression stack value for an unoptimized frame.
612 Object* GetExpression(int index);
614 static int registers_offset() {
615 return OFFSET_OF(FrameDescription, registers_);
618 static int simd128_registers_offset() {
619 return OFFSET_OF(FrameDescription, simd128_registers_);
622 static int frame_size_offset() {
623 return OFFSET_OF(FrameDescription, frame_size_);
626 static int pc_offset() {
627 return OFFSET_OF(FrameDescription, pc_);
630 static int state_offset() {
631 return OFFSET_OF(FrameDescription, state_);
634 static int continuation_offset() {
635 return OFFSET_OF(FrameDescription, continuation_);
638 static int frame_content_offset() {
639 return OFFSET_OF(FrameDescription, frame_content_);
643 static const uint32_t kZapUint32 = 0xbeeddead;
645 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
646 // keep the variable-size array frame_content_ of type intptr_t at
647 // the end of the structure aligned.
648 uintptr_t frame_size_; // Number of bytes.
649 JSFunction* function_;
650 intptr_t registers_[Register::kNumRegisters];
651 simd128_value_t simd128_registers_[SIMD128Register::kMaxNumRegisters];
656 intptr_t constant_pool_;
657 StackFrame::Type type_;
660 // Continuation is the PC where the execution continues after
662 intptr_t continuation_;
664 // This must be at the end of the object as the object is allocated larger
665 // than it's definition indicate to extend this array.
666 intptr_t frame_content_[1];
668 intptr_t* GetFrameSlotPointer(unsigned offset) {
669 ASSERT(offset < frame_size_);
670 return reinterpret_cast<intptr_t*>(
671 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
674 int ComputeFixedSize();
678 class DeoptimizerData {
680 explicit DeoptimizerData(MemoryAllocator* allocator);
683 #ifdef ENABLE_DEBUGGER_SUPPORT
684 void Iterate(ObjectVisitor* v);
688 MemoryAllocator* allocator_;
689 int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
690 MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
692 #ifdef ENABLE_DEBUGGER_SUPPORT
693 DeoptimizedFrameInfo* deoptimized_frame_info_;
696 Deoptimizer* current_;
698 friend class Deoptimizer;
700 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
704 class TranslationBuffer BASE_EMBEDDED {
706 explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
708 int CurrentIndex() const { return contents_.length(); }
709 void Add(int32_t value, Zone* zone);
711 Handle<ByteArray> CreateByteArray(Factory* factory);
714 ZoneList<uint8_t> contents_;
718 class TranslationIterator BASE_EMBEDDED {
720 TranslationIterator(ByteArray* buffer, int index)
721 : buffer_(buffer), index_(index) {
722 ASSERT(index >= 0 && index < buffer->length());
727 bool HasNext() const { return index_ < buffer_->length(); }
730 for (int i = 0; i < n; i++) Next();
739 #define TRANSLATION_OPCODE_LIST(V) \
742 V(CONSTRUCT_STUB_FRAME) \
743 V(GETTER_STUB_FRAME) \
744 V(SETTER_STUB_FRAME) \
745 V(ARGUMENTS_ADAPTOR_FRAME) \
746 V(COMPILED_STUB_FRAME) \
747 V(DUPLICATED_OBJECT) \
748 V(ARGUMENTS_OBJECT) \
754 V(FLOAT32x4_REGISTER) \
755 V(INT32x4_REGISTER) \
757 V(INT32_STACK_SLOT) \
758 V(UINT32_STACK_SLOT) \
759 V(DOUBLE_STACK_SLOT) \
760 V(FLOAT32x4_STACK_SLOT) \
761 V(INT32x4_STACK_SLOT) \
765 class Translation BASE_EMBEDDED {
767 #define DECLARE_TRANSLATION_OPCODE_ENUM(item) item,
769 TRANSLATION_OPCODE_LIST(DECLARE_TRANSLATION_OPCODE_ENUM)
772 #undef DECLARE_TRANSLATION_OPCODE_ENUM
774 Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
777 index_(buffer->CurrentIndex()),
779 buffer_->Add(BEGIN, zone);
780 buffer_->Add(frame_count, zone);
781 buffer_->Add(jsframe_count, zone);
784 int index() const { return index_; }
787 void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height);
788 void BeginCompiledStubFrame();
789 void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
790 void BeginConstructStubFrame(int literal_id, unsigned height);
791 void BeginGetterStubFrame(int literal_id);
792 void BeginSetterStubFrame(int literal_id);
793 void BeginArgumentsObject(int args_length);
794 void BeginCapturedObject(int length);
795 void DuplicateObject(int object_index);
796 void StoreRegister(Register reg);
797 void StoreInt32Register(Register reg);
798 void StoreUint32Register(Register reg);
799 void StoreDoubleRegister(DoubleRegister reg);
800 void StoreSIMD128Register(SIMD128Register reg, Opcode opcode);
801 void StoreStackSlot(int index);
802 void StoreInt32StackSlot(int index);
803 void StoreUint32StackSlot(int index);
804 void StoreDoubleStackSlot(int index);
805 void StoreSIMD128StackSlot(int index, Opcode opcode);
806 void StoreLiteral(int literal_id);
807 void StoreArgumentsObject(bool args_known, int args_index, int args_length);
809 Zone* zone() const { return zone_; }
811 static int NumberOfOperandsFor(Opcode opcode);
813 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
814 static const char* StringFor(Opcode opcode);
817 // A literal id which refers to the JSFunction itself.
818 static const int kSelfLiteralId = -239;
821 TranslationBuffer* buffer_;
827 class SlotRef BASE_EMBEDDED {
829 enum SlotRepresentation {
838 DEFERRED_OBJECT, // Object captured by the escape analysis.
839 // The number of nested objects can be obtained
840 // with the DeferredObjectLength() method
841 // (the SlotRefs of the nested objects follow
842 // this SlotRef in the depth-first order.)
843 DUPLICATE_OBJECT, // Duplicated object of a deferred object.
844 ARGUMENTS_OBJECT // Arguments object - only used to keep indexing
845 // in sync, it should not be materialized.
849 : addr_(NULL), representation_(UNKNOWN) { }
851 SlotRef(Address addr, SlotRepresentation representation)
852 : addr_(addr), representation_(representation) { }
854 SlotRef(Isolate* isolate, Object* literal)
855 : literal_(literal, isolate), representation_(LITERAL) { }
857 static SlotRef NewArgumentsObject(int length) {
859 slot.representation_ = ARGUMENTS_OBJECT;
860 slot.deferred_object_length_ = length;
864 static SlotRef NewDeferredObject(int length) {
866 slot.representation_ = DEFERRED_OBJECT;
867 slot.deferred_object_length_ = length;
871 SlotRepresentation Representation() { return representation_; }
873 static SlotRef NewDuplicateObject(int id) {
875 slot.representation_ = DUPLICATE_OBJECT;
876 slot.duplicate_object_id_ = id;
880 int GetChildrenCount() {
881 if (representation_ == DEFERRED_OBJECT ||
882 representation_ == ARGUMENTS_OBJECT) {
883 return deferred_object_length_;
889 int DuplicateObjectId() { return duplicate_object_id_; }
891 Handle<Object> GetValue(Isolate* isolate);
895 Handle<Object> literal_;
896 SlotRepresentation representation_;
897 int deferred_object_length_;
898 int duplicate_object_id_;
901 class SlotRefValueBuilder BASE_EMBEDDED {
904 JavaScriptFrame* frame,
905 int inlined_frame_index,
906 int formal_parameter_count);
908 void Prepare(Isolate* isolate);
909 Handle<Object> GetNext(Isolate* isolate, int level);
910 void Finish(Isolate* isolate);
912 int args_length() { return args_length_; }
915 List<Handle<Object> > materialized_objects_;
916 Handle<FixedArray> previously_materialized_objects_;
917 int prev_materialized_count_;
918 Address stack_frame_id_;
919 List<SlotRef> slot_refs_;
922 int first_slot_index_;
924 static SlotRef ComputeSlotForNextArgument(
925 Translation::Opcode opcode,
926 TranslationIterator* iterator,
927 DeoptimizationInputData* data,
928 JavaScriptFrame* frame);
930 Handle<Object> GetPreviouslyMaterialized(Isolate* isolate, int length);
932 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
933 if (slot_index >= 0) {
934 const int offset = JavaScriptFrameConstants::kLocal0Offset;
935 return frame->fp() + offset - (slot_index * kPointerSize);
937 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
938 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
942 Handle<Object> GetDeferredObject(Isolate* isolate);
945 class MaterializedObjectStore {
947 explicit MaterializedObjectStore(Isolate* isolate) : isolate_(isolate) {
950 Handle<FixedArray> Get(Address fp);
951 void Set(Address fp, Handle<FixedArray> materialized_objects);
952 void Remove(Address fp);
955 Isolate* isolate() { return isolate_; }
956 Handle<FixedArray> GetStackEntries();
957 Handle<FixedArray> EnsureStackEntries(int size);
959 int StackIdToIndex(Address fp);
962 List<Address> frame_fps_;
966 #ifdef ENABLE_DEBUGGER_SUPPORT
967 // Class used to represent an unoptimized frame when the debugger
968 // needs to inspect a frame that is part of an optimized frame. The
969 // internally used FrameDescription objects are not GC safe so for use
970 // by the debugger frame information is copied to an object of this type.
971 // Represents parameters in unadapted form so their number might mismatch
972 // formal parameter count.
973 class DeoptimizedFrameInfo : public Malloced {
975 DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
977 bool has_arguments_adaptor,
978 bool has_construct_stub);
979 virtual ~DeoptimizedFrameInfo();
982 void Iterate(ObjectVisitor* v);
984 // Return the number of incoming arguments.
985 int parameters_count() { return parameters_count_; }
987 // Return the height of the expression stack.
988 int expression_count() { return expression_count_; }
990 // Get the frame function.
991 JSFunction* GetFunction() {
995 // Check if this frame is preceded by construct stub frame. The bottom-most
996 // inlined frame might still be called by an uninlined construct stub.
997 bool HasConstructStub() {
998 return has_construct_stub_;
1001 // Get an incoming argument.
1002 Object* GetParameter(int index) {
1003 ASSERT(0 <= index && index < parameters_count());
1004 return parameters_[index];
1007 // Get an expression from the expression stack.
1008 Object* GetExpression(int index) {
1009 ASSERT(0 <= index && index < expression_count());
1010 return expression_stack_[index];
1013 int GetSourcePosition() {
1014 return source_position_;
1018 // Set an incoming argument.
1019 void SetParameter(int index, Object* obj) {
1020 ASSERT(0 <= index && index < parameters_count());
1021 parameters_[index] = obj;
1024 // Set an expression on the expression stack.
1025 void SetExpression(int index, Object* obj) {
1026 ASSERT(0 <= index && index < expression_count());
1027 expression_stack_[index] = obj;
1030 JSFunction* function_;
1031 bool has_construct_stub_;
1032 int parameters_count_;
1033 int expression_count_;
1034 Object** parameters_;
1035 Object** expression_stack_;
1036 int source_position_;
1038 friend class Deoptimizer;
1042 } } // namespace v8::internal
1044 #endif // V8_DEOPTIMIZER_H_