1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_DEOPTIMIZER_H_
29 #define V8_DEOPTIMIZER_H_
33 #include "allocation.h"
34 #include "macro-assembler.h"
41 class FrameDescription;
42 class TranslationIterator;
43 class DeoptimizingCodeListNode;
44 class DeoptimizedFrameInfo;
46 class HeapNumberMaterializationDescriptor BASE_EMBEDDED {
48 HeapNumberMaterializationDescriptor(Address slot_address, double val)
49 : slot_address_(slot_address), val_(val) { }
51 Address slot_address() const { return slot_address_; }
52 double value() const { return val_; }
55 Address slot_address_;
60 class OptimizedFunctionVisitor BASE_EMBEDDED {
62 virtual ~OptimizedFunctionVisitor() {}
64 // Function which is called before iteration of any optimized functions
65 // from given global context.
66 virtual void EnterContext(Context* context) = 0;
68 virtual void VisitFunction(JSFunction* function) = 0;
70 // Function which is called after iteration of all optimized functions
71 // from given global context.
72 virtual void LeaveContext(Context* context) = 0;
79 class DeoptimizerData {
84 #ifdef ENABLE_DEBUGGER_SUPPORT
85 void Iterate(ObjectVisitor* v);
89 MemoryChunk* eager_deoptimization_entry_code_;
90 MemoryChunk* lazy_deoptimization_entry_code_;
91 Deoptimizer* current_;
93 #ifdef ENABLE_DEBUGGER_SUPPORT
94 DeoptimizedFrameInfo* deoptimized_frame_info_;
97 // List of deoptimized code which still have references from active stack
98 // frames. These code objects are needed by the deoptimizer when deoptimizing
99 // a frame for which the code object for the function function has been
100 // changed from the code present when deoptimizing was done.
101 DeoptimizingCodeListNode* deoptimizing_code_list_;
103 friend class Deoptimizer;
105 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
109 class Deoptimizer : public Malloced {
115 // This last bailout type is not really a bailout, but used by the
116 // debugger to deoptimize stack frames to allow inspection.
120 int output_count() const { return output_count_; }
122 static Deoptimizer* New(JSFunction* function,
128 static Deoptimizer* Grab(Isolate* isolate);
130 #ifdef ENABLE_DEBUGGER_SUPPORT
131 // The returned object with information on the optimized frame needs to be
132 // freed before another one can be generated.
133 static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
136 static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
140 // Makes sure that there is enough room in the relocation
141 // information of a code object to perform lazy deoptimization
142 // patching. If there is not enough room a new relocation
143 // information object is allocated and comments are added until it
145 static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
147 // Deoptimize the function now. Its current optimized code will never be run
148 // again and any activations of the optimized code will get deoptimized when
149 // execution returns.
150 static void DeoptimizeFunction(JSFunction* function);
152 // Deoptimize all functions in the heap.
153 static void DeoptimizeAll();
155 static void DeoptimizeGlobalObject(JSObject* object);
157 static void VisitAllOptimizedFunctionsForContext(
158 Context* context, OptimizedFunctionVisitor* visitor);
160 static void VisitAllOptimizedFunctionsForGlobalObject(
161 JSObject* object, OptimizedFunctionVisitor* visitor);
163 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor);
165 // The size in bytes of the code required at a lazy deopt patch site.
166 static int patch_size();
168 // Patch all stack guard checks in the unoptimized code to
169 // unconditionally call replacement_code.
170 static void PatchStackCheckCode(Code* unoptimized_code,
172 Code* replacement_code);
174 // Patch stack guard check at instruction before pc_after in
175 // the unoptimized code to unconditionally call replacement_code.
176 static void PatchStackCheckCodeAt(Code* unoptimized_code,
179 Code* replacement_code);
181 // Change all patched stack guard checks in the unoptimized code
182 // back to a normal stack guard check.
183 static void RevertStackCheckCode(Code* unoptimized_code,
185 Code* replacement_code);
187 // Change all patched stack guard checks in the unoptimized code
188 // back to a normal stack guard check.
189 static void RevertStackCheckCodeAt(Code* unoptimized_code,
192 Code* replacement_code);
196 void MaterializeHeapNumbers();
197 #ifdef ENABLE_DEBUGGER_SUPPORT
198 void MaterializeHeapNumbersForDebuggerInspectableFrame(
199 Address top, uint32_t size, DeoptimizedFrameInfo* info);
202 static void ComputeOutputFrames(Deoptimizer* deoptimizer);
204 static Address GetDeoptimizationEntry(int id, BailoutType type);
205 static int GetDeoptimizationId(Address addr, BailoutType type);
206 static int GetOutputInfo(DeoptimizationOutputData* data,
208 SharedFunctionInfo* shared);
210 // Code generation support.
211 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); }
212 static int output_count_offset() {
213 return OFFSET_OF(Deoptimizer, output_count_);
215 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
216 static int frame_alignment_marker_offset() {
217 return OFFSET_OF(Deoptimizer, frame_alignment_marker_); }
218 static int has_alignment_padding_offset() {
219 return OFFSET_OF(Deoptimizer, has_alignment_padding_);
222 static int GetDeoptimizedCodeCount(Isolate* isolate);
224 static const int kNotDeoptimizationEntry = -1;
226 // Generators for the deoptimization entry code.
227 class EntryGenerator BASE_EMBEDDED {
229 EntryGenerator(MacroAssembler* masm, BailoutType type)
230 : masm_(masm), type_(type) { }
231 virtual ~EntryGenerator() { }
236 MacroAssembler* masm() const { return masm_; }
237 BailoutType type() const { return type_; }
239 virtual void GeneratePrologue() { }
242 MacroAssembler* masm_;
243 Deoptimizer::BailoutType type_;
246 class TableEntryGenerator : public EntryGenerator {
248 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count)
249 : EntryGenerator(masm, type), count_(count) { }
252 virtual void GeneratePrologue();
255 int count() const { return count_; }
261 static const int kNumberOfEntries = 4096;
263 Deoptimizer(Isolate* isolate,
264 JSFunction* function,
269 Code* optimized_code);
270 void DeleteFrameDescriptions();
272 void DoComputeOutputFrames();
273 void DoComputeOsrOutputFrame();
274 void DoComputeFrame(TranslationIterator* iterator, int frame_index);
275 void DoTranslateCommand(TranslationIterator* iterator,
277 unsigned output_offset);
278 // Translate a command for OSR. Updates the input offset to be used for
279 // the next command. Returns false if translation of the command failed
280 // (e.g., a number conversion failed) and may or may not have updated the
282 bool DoOsrTranslateCommand(TranslationIterator* iterator,
285 unsigned ComputeInputFrameSize() const;
286 unsigned ComputeFixedSize(JSFunction* function) const;
288 unsigned ComputeIncomingArgumentSize(JSFunction* function) const;
289 unsigned ComputeOutgoingArgumentSize() const;
291 Object* ComputeLiteral(int index) const;
293 void AddDoubleValue(intptr_t slot_address, double value);
295 static MemoryChunk* CreateCode(BailoutType type);
296 static void GenerateDeoptimizationEntries(
297 MacroAssembler* masm, int count, BailoutType type);
299 // Weak handle callback for deoptimizing code objects.
300 static void HandleWeakDeoptimizedCode(
301 v8::Persistent<v8::Value> obj, void* data);
302 static Code* FindDeoptimizingCodeFromAddress(Address addr);
303 static void RemoveDeoptimizingCode(Code* code);
305 // Fill the input from from a JavaScript frame. This is used when
306 // the debugger needs to inspect an optimized frame. For normal
307 // deoptimizations the input frame is filled in generated code.
308 void FillInputFrame(Address tos, JavaScriptFrame* frame);
311 JSFunction* function_;
312 Code* optimized_code_;
313 unsigned bailout_id_;
314 BailoutType bailout_type_;
318 // Input frame description.
319 FrameDescription* input_;
320 // Number of output frames.
322 // Array of output frame descriptions.
323 FrameDescription** output_;
325 // Frames can be dynamically padded on ia32 to align untagged doubles.
326 Object* frame_alignment_marker_;
327 intptr_t has_alignment_padding_;
329 List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
331 static const int table_entry_size_;
333 friend class FrameDescription;
334 friend class DeoptimizingCodeListNode;
335 friend class DeoptimizedFrameInfo;
339 class FrameDescription {
341 FrameDescription(uint32_t frame_size,
342 JSFunction* function);
344 void* operator new(size_t size, uint32_t frame_size) {
345 // Subtracts kPointerSize, as the member frame_content_ already supplies
346 // the first element of the area to store the frame.
347 return malloc(size + frame_size - kPointerSize);
350 void operator delete(void* pointer, uint32_t frame_size) {
354 void operator delete(void* description) {
358 uint32_t GetFrameSize() const {
359 ASSERT(static_cast<uint32_t>(frame_size_) == frame_size_);
360 return static_cast<uint32_t>(frame_size_);
363 JSFunction* GetFunction() const { return function_; }
365 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
367 intptr_t GetFrameSlot(unsigned offset) {
368 return *GetFrameSlotPointer(offset);
371 double GetDoubleFrameSlot(unsigned offset) {
372 intptr_t* ptr = GetFrameSlotPointer(offset);
373 #if V8_TARGET_ARCH_MIPS
374 // Prevent gcc from using load-double (mips ldc1) on (possibly)
375 // non-64-bit aligned double. Uses two lwc1 instructions.
380 c.u[0] = *reinterpret_cast<uint32_t*>(ptr);
381 c.u[1] = *(reinterpret_cast<uint32_t*>(ptr) + 1);
384 return *reinterpret_cast<double*>(ptr);
388 void SetFrameSlot(unsigned offset, intptr_t value) {
389 *GetFrameSlotPointer(offset) = value;
392 intptr_t GetRegister(unsigned n) const {
393 ASSERT(n < ARRAY_SIZE(registers_));
394 return registers_[n];
397 double GetDoubleRegister(unsigned n) const {
398 ASSERT(n < ARRAY_SIZE(double_registers_));
399 return double_registers_[n];
402 void SetRegister(unsigned n, intptr_t value) {
403 ASSERT(n < ARRAY_SIZE(registers_));
404 registers_[n] = value;
407 void SetDoubleRegister(unsigned n, double value) {
408 ASSERT(n < ARRAY_SIZE(double_registers_));
409 double_registers_[n] = value;
412 intptr_t GetTop() const { return top_; }
413 void SetTop(intptr_t top) { top_ = top; }
415 intptr_t GetPc() const { return pc_; }
416 void SetPc(intptr_t pc) { pc_ = pc; }
418 intptr_t GetFp() const { return fp_; }
419 void SetFp(intptr_t fp) { fp_ = fp; }
421 Smi* GetState() const { return state_; }
422 void SetState(Smi* state) { state_ = state; }
424 void SetContinuation(intptr_t pc) { continuation_ = pc; }
427 Code::Kind GetKind() const { return kind_; }
428 void SetKind(Code::Kind kind) { kind_ = kind; }
431 // Get the incoming arguments count.
432 int ComputeParametersCount();
434 // Get a parameter value for an unoptimized frame.
435 Object* GetParameter(Deoptimizer* deoptimizer, int index);
437 // Get the expression stack height for a unoptimized frame.
438 unsigned GetExpressionCount(Deoptimizer* deoptimizer);
440 // Get the expression stack value for an unoptimized frame.
441 Object* GetExpression(Deoptimizer* deoptimizer, int index);
443 static int registers_offset() {
444 return OFFSET_OF(FrameDescription, registers_);
447 static int double_registers_offset() {
448 return OFFSET_OF(FrameDescription, double_registers_);
451 static int frame_size_offset() {
452 return OFFSET_OF(FrameDescription, frame_size_);
455 static int pc_offset() {
456 return OFFSET_OF(FrameDescription, pc_);
459 static int state_offset() {
460 return OFFSET_OF(FrameDescription, state_);
463 static int continuation_offset() {
464 return OFFSET_OF(FrameDescription, continuation_);
467 static int frame_content_offset() {
468 return OFFSET_OF(FrameDescription, frame_content_);
472 static const uint32_t kZapUint32 = 0xbeeddead;
474 // Frame_size_ must hold a uint32_t value. It is only a uintptr_t to
475 // keep the variable-size array frame_content_ of type intptr_t at
476 // the end of the structure aligned.
477 uintptr_t frame_size_; // Number of bytes.
478 JSFunction* function_;
479 intptr_t registers_[Register::kNumRegisters];
480 double double_registers_[DoubleRegister::kNumAllocatableRegisters];
489 // Continuation is the PC where the execution continues after
491 intptr_t continuation_;
493 // This must be at the end of the object as the object is allocated larger
494 // than it's definition indicate to extend this array.
495 intptr_t frame_content_[1];
497 intptr_t* GetFrameSlotPointer(unsigned offset) {
498 ASSERT(offset < frame_size_);
499 return reinterpret_cast<intptr_t*>(
500 reinterpret_cast<Address>(this) + frame_content_offset() + offset);
505 class TranslationBuffer BASE_EMBEDDED {
507 TranslationBuffer() : contents_(256) { }
509 int CurrentIndex() const { return contents_.length(); }
510 void Add(int32_t value);
512 Handle<ByteArray> CreateByteArray();
515 ZoneList<uint8_t> contents_;
519 class TranslationIterator BASE_EMBEDDED {
521 TranslationIterator(ByteArray* buffer, int index)
522 : buffer_(buffer), index_(index) {
523 ASSERT(index >= 0 && index < buffer->length());
528 bool HasNext() const { return index_ < buffer_->length(); }
531 for (int i = 0; i < n; i++) Next();
540 class Translation BASE_EMBEDDED {
554 // A prefix indicating that the next command is a duplicate of the one
559 Translation(TranslationBuffer* buffer, int frame_count)
561 index_(buffer->CurrentIndex()) {
563 buffer_->Add(frame_count);
566 int index() const { return index_; }
569 void BeginFrame(int node_id, int literal_id, unsigned height);
570 void StoreRegister(Register reg);
571 void StoreInt32Register(Register reg);
572 void StoreDoubleRegister(DoubleRegister reg);
573 void StoreStackSlot(int index);
574 void StoreInt32StackSlot(int index);
575 void StoreDoubleStackSlot(int index);
576 void StoreLiteral(int literal_id);
577 void StoreArgumentsObject();
578 void MarkDuplicate();
580 static int NumberOfOperandsFor(Opcode opcode);
582 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
583 static const char* StringFor(Opcode opcode);
587 TranslationBuffer* buffer_;
592 // Linked list holding deoptimizing code objects. The deoptimizing code objects
593 // are kept as weak handles until they are no longer activated on the stack.
594 class DeoptimizingCodeListNode : public Malloced {
596 explicit DeoptimizingCodeListNode(Code* code);
597 ~DeoptimizingCodeListNode();
599 DeoptimizingCodeListNode* next() const { return next_; }
600 void set_next(DeoptimizingCodeListNode* next) { next_ = next; }
601 Handle<Code> code() const { return code_; }
604 // Global (weak) handle to the deoptimizing code object.
607 // Next pointer for linked list.
608 DeoptimizingCodeListNode* next_;
612 class SlotRef BASE_EMBEDDED {
614 enum SlotRepresentation {
623 : addr_(NULL), representation_(UNKNOWN) { }
625 SlotRef(Address addr, SlotRepresentation representation)
626 : addr_(addr), representation_(representation) { }
628 explicit SlotRef(Object* literal)
629 : literal_(literal), representation_(LITERAL) { }
631 Handle<Object> GetValue() {
632 switch (representation_) {
634 return Handle<Object>(Memory::Object_at(addr_));
637 int value = Memory::int32_at(addr_);
638 if (Smi::IsValid(value)) {
639 return Handle<Object>(Smi::FromInt(value));
641 return Isolate::Current()->factory()->NewNumberFromInt(value);
646 double value = Memory::double_at(addr_);
647 return Isolate::Current()->factory()->NewNumber(value);
655 return Handle<Object>::null();
659 static void ComputeSlotMappingForArguments(JavaScriptFrame* frame,
660 int inlined_frame_index,
661 Vector<SlotRef>* args_slots);
665 Handle<Object> literal_;
666 SlotRepresentation representation_;
668 static Address SlotAddress(JavaScriptFrame* frame, int slot_index) {
669 if (slot_index >= 0) {
670 const int offset = JavaScriptFrameConstants::kLocal0Offset;
671 return frame->fp() + offset - (slot_index * kPointerSize);
673 const int offset = JavaScriptFrameConstants::kLastParameterOffset;
674 return frame->fp() + offset - ((slot_index + 1) * kPointerSize);
678 static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
679 DeoptimizationInputData* data,
680 JavaScriptFrame* frame);
684 #ifdef ENABLE_DEBUGGER_SUPPORT
685 // Class used to represent an unoptimized frame when the debugger
686 // needs to inspect a frame that is part of an optimized frame. The
687 // internally used FrameDescription objects are not GC safe so for use
688 // by the debugger frame information is copied to an object of this type.
689 class DeoptimizedFrameInfo : public Malloced {
691 DeoptimizedFrameInfo(Deoptimizer* deoptimizer, int frame_index);
692 virtual ~DeoptimizedFrameInfo();
695 void Iterate(ObjectVisitor* v);
697 // Return the number of incoming arguments.
698 int parameters_count() { return parameters_count_; }
700 // Return the height of the expression stack.
701 int expression_count() { return expression_count_; }
703 // Get the frame function.
704 JSFunction* GetFunction() {
708 // Get an incoming argument.
709 Object* GetParameter(int index) {
710 ASSERT(0 <= index && index < parameters_count());
711 return parameters_[index];
714 // Get an expression from the expression stack.
715 Object* GetExpression(int index) {
716 ASSERT(0 <= index && index < expression_count());
717 return expression_stack_[index];
721 // Set the frame function.
722 void SetFunction(JSFunction* function) {
723 function_ = function;
726 // Set an incoming argument.
727 void SetParameter(int index, Object* obj) {
728 ASSERT(0 <= index && index < parameters_count());
729 parameters_[index] = obj;
732 // Set an expression on the expression stack.
733 void SetExpression(int index, Object* obj) {
734 ASSERT(0 <= index && index < expression_count());
735 expression_stack_[index] = obj;
738 JSFunction* function_;
739 int parameters_count_;
740 int expression_count_;
741 Object** parameters_;
742 Object** expression_stack_;
744 friend class Deoptimizer;
748 } } // namespace v8::internal
750 #endif // V8_DEOPTIMIZER_H_