1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "deoptimizer.h"
32 #include "frames-inl.h"
33 #include "full-codegen.h"
34 #include "mark-compact.h"
35 #include "safepoint-table.h"
36 #include "scopeinfo.h"
37 #include "string-stream.h"
39 #include "allocation-inl.h"
44 // Iterator that supports traversing the stack handlers of a
45 // particular frame. Needs to know the top of the handler chain.
46 class StackHandlerIterator BASE_EMBEDDED {
48 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
49 : limit_(frame->fp()), handler_(handler) {
50 // Make sure the handler has already been unwound to this frame.
51 ASSERT(frame->sp() <= handler->address());
54 StackHandler* handler() const { return handler_; }
57 return handler_ == NULL || handler_->address() > limit_;
61 handler_ = handler_->next();
66 StackHandler* handler_;
70 // -------------------------------------------------------------------------
73 #define INITIALIZE_SINGLETON(type, field) field##_(this),
74 StackFrameIterator::StackFrameIterator()
75 : isolate_(Isolate::Current()),
76 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
77 frame_(NULL), handler_(NULL),
78 thread_(isolate_->thread_local_top()),
79 fp_(NULL), sp_(NULL), advance_(&StackFrameIterator::AdvanceWithHandler) {
82 StackFrameIterator::StackFrameIterator(Isolate* isolate)
84 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
85 frame_(NULL), handler_(NULL),
86 thread_(isolate_->thread_local_top()),
87 fp_(NULL), sp_(NULL), advance_(&StackFrameIterator::AdvanceWithHandler) {
90 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
92 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
93 frame_(NULL), handler_(NULL), thread_(t),
94 fp_(NULL), sp_(NULL), advance_(&StackFrameIterator::AdvanceWithHandler) {
97 StackFrameIterator::StackFrameIterator(Isolate* isolate,
98 bool use_top, Address fp, Address sp)
100 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
101 frame_(NULL), handler_(NULL),
102 thread_(use_top ? isolate_->thread_local_top() : NULL),
103 fp_(use_top ? NULL : fp), sp_(sp),
104 advance_(use_top ? &StackFrameIterator::AdvanceWithHandler :
105 &StackFrameIterator::AdvanceWithoutHandler) {
106 if (use_top || fp != NULL) {
111 #undef INITIALIZE_SINGLETON
114 void StackFrameIterator::AdvanceWithHandler() {
116 // Compute the state of the calling frame before restoring
117 // callee-saved registers and unwinding handlers. This allows the
118 // frame code that computes the caller state to access the top
119 // handler and the value of any callee-saved register if needed.
120 StackFrame::State state;
121 StackFrame::Type type = frame_->GetCallerState(&state);
123 // Unwind handlers corresponding to the current frame.
124 StackHandlerIterator it(frame_, handler_);
125 while (!it.done()) it.Advance();
126 handler_ = it.handler();
128 // Advance to the calling frame.
129 frame_ = SingletonFor(type, &state);
131 // When we're done iterating over the stack frames, the handler
132 // chain must have been completely unwound.
133 ASSERT(!done() || handler_ == NULL);
137 void StackFrameIterator::AdvanceWithoutHandler() {
138 // A simpler version of Advance which doesn't care about handler.
140 StackFrame::State state;
141 StackFrame::Type type = frame_->GetCallerState(&state);
142 frame_ = SingletonFor(type, &state);
146 void StackFrameIterator::Reset() {
147 StackFrame::State state;
148 StackFrame::Type type;
149 if (thread_ != NULL) {
150 type = ExitFrame::GetStateForFramePointer(
151 Isolate::c_entry_fp(thread_), &state);
152 handler_ = StackHandler::FromAddress(
153 Isolate::handler(thread_));
159 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp_));
160 type = StackFrame::ComputeType(isolate(), &state);
162 if (SingletonFor(type) == NULL) return;
163 frame_ = SingletonFor(type, &state);
167 StackFrame* StackFrameIterator::SingletonFor(StackFrame::Type type,
168 StackFrame::State* state) {
169 if (type == StackFrame::NONE) return NULL;
170 StackFrame* result = SingletonFor(type);
171 ASSERT(result != NULL);
172 result->state_ = *state;
177 StackFrame* StackFrameIterator::SingletonFor(StackFrame::Type type) {
178 #define FRAME_TYPE_CASE(type, field) \
179 case StackFrame::type: result = &field##_; break;
181 StackFrame* result = NULL;
183 case StackFrame::NONE: return NULL;
184 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
189 #undef FRAME_TYPE_CASE
193 // -------------------------------------------------------------------------
196 StackTraceFrameIterator::StackTraceFrameIterator() {
197 if (!done() && !IsValidFrame()) Advance();
201 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
202 : JavaScriptFrameIterator(isolate) {
203 if (!done() && !IsValidFrame()) Advance();
207 void StackTraceFrameIterator::Advance() {
209 JavaScriptFrameIterator::Advance();
211 if (IsValidFrame()) return;
215 bool StackTraceFrameIterator::IsValidFrame() {
216 if (!frame()->function()->IsJSFunction()) return false;
217 Object* script = JSFunction::cast(frame()->function())->shared()->script();
218 // Don't show functions from native scripts to user.
219 return (script->IsScript() &&
220 Script::TYPE_NATIVE != Script::cast(script)->type()->value());
224 // -------------------------------------------------------------------------
227 bool SafeStackFrameIterator::ExitFrameValidator::IsValidFP(Address fp) {
228 if (!validator_.IsValid(fp)) return false;
229 Address sp = ExitFrame::ComputeStackPointer(fp);
230 if (!validator_.IsValid(sp)) return false;
231 StackFrame::State state;
232 ExitFrame::FillState(fp, sp, &state);
233 if (!validator_.IsValid(reinterpret_cast<Address>(state.pc_address))) {
236 return *state.pc_address != NULL;
240 SafeStackFrameIterator::ActiveCountMaintainer::ActiveCountMaintainer(
242 : isolate_(isolate) {
243 isolate_->set_safe_stack_iterator_counter(
244 isolate_->safe_stack_iterator_counter() + 1);
248 SafeStackFrameIterator::ActiveCountMaintainer::~ActiveCountMaintainer() {
249 isolate_->set_safe_stack_iterator_counter(
250 isolate_->safe_stack_iterator_counter() - 1);
254 SafeStackFrameIterator::SafeStackFrameIterator(
256 Address fp, Address sp, Address low_bound, Address high_bound) :
257 maintainer_(isolate),
258 stack_validator_(low_bound, high_bound),
259 is_valid_top_(IsValidTop(isolate, low_bound, high_bound)),
260 is_valid_fp_(IsWithinBounds(low_bound, high_bound, fp)),
261 is_working_iterator_(is_valid_top_ || is_valid_fp_),
262 iteration_done_(!is_working_iterator_),
263 iterator_(isolate, is_valid_top_, is_valid_fp_ ? fp : NULL, sp) {
266 bool SafeStackFrameIterator::is_active(Isolate* isolate) {
267 return isolate->safe_stack_iterator_counter() > 0;
271 bool SafeStackFrameIterator::IsValidTop(Isolate* isolate,
272 Address low_bound, Address high_bound) {
273 ThreadLocalTop* top = isolate->thread_local_top();
274 Address fp = Isolate::c_entry_fp(top);
275 ExitFrameValidator validator(low_bound, high_bound);
276 if (!validator.IsValidFP(fp)) return false;
277 return Isolate::handler(top) != NULL;
281 void SafeStackFrameIterator::Advance() {
282 ASSERT(is_working_iterator_);
284 StackFrame* last_frame = iterator_.frame();
285 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
286 // Before advancing to the next stack frame, perform pointer validity tests
287 iteration_done_ = !IsValidFrame(last_frame) ||
288 !CanIterateHandles(last_frame, iterator_.handler()) ||
289 !IsValidCaller(last_frame);
290 if (iteration_done_) return;
293 if (iterator_.done()) return;
294 // Check that we have actually moved to the previous frame in the stack
295 StackFrame* prev_frame = iterator_.frame();
296 iteration_done_ = prev_frame->sp() < last_sp || prev_frame->fp() < last_fp;
300 bool SafeStackFrameIterator::CanIterateHandles(StackFrame* frame,
301 StackHandler* handler) {
302 // If StackIterator iterates over StackHandles, verify that
303 // StackHandlerIterator can be instantiated (see StackHandlerIterator
305 return !is_valid_top_ || (frame->sp() <= handler->address());
309 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
310 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
314 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
315 StackFrame::State state;
316 if (frame->is_entry() || frame->is_entry_construct()) {
317 // See EntryFrame::GetCallerState. It computes the caller FP address
318 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
319 // sure that caller FP address is valid.
320 Address caller_fp = Memory::Address_at(
321 frame->fp() + EntryFrameConstants::kCallerFPOffset);
322 ExitFrameValidator validator(stack_validator_);
323 if (!validator.IsValidFP(caller_fp)) return false;
324 } else if (frame->is_arguments_adaptor()) {
325 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
326 // the number of arguments is stored on stack as Smi. We need to check
327 // that it really an Smi.
328 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
330 if (!number_of_args->IsSmi()) {
334 frame->ComputeCallerState(&state);
335 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
336 iterator_.SingletonFor(frame->GetCallerState(&state)) != NULL;
340 void SafeStackFrameIterator::Reset() {
341 if (is_working_iterator_) {
343 iteration_done_ = false;
348 // -------------------------------------------------------------------------
351 SafeStackTraceFrameIterator::SafeStackTraceFrameIterator(
353 Address fp, Address sp, Address low_bound, Address high_bound) :
354 SafeJavaScriptFrameIterator(isolate, fp, sp, low_bound, high_bound) {
355 if (!done() && !frame()->is_java_script()) Advance();
359 void SafeStackTraceFrameIterator::Advance() {
361 SafeJavaScriptFrameIterator::Advance();
363 if (frame()->is_java_script()) return;
368 Code* StackFrame::GetSafepointData(Isolate* isolate,
369 Address inner_pointer,
370 SafepointEntry* safepoint_entry,
371 unsigned* stack_slots) {
372 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
373 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
374 if (!entry->safepoint_entry.is_valid()) {
375 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
376 ASSERT(entry->safepoint_entry.is_valid());
378 ASSERT(entry->safepoint_entry.Equals(
379 entry->code->GetSafepointEntry(inner_pointer)));
382 // Fill in the results and return the code.
383 Code* code = entry->code;
384 *safepoint_entry = entry->safepoint_entry;
385 *stack_slots = code->stack_slots();
390 bool StackFrame::HasHandler() const {
391 StackHandlerIterator it(this, top_handler());
397 static bool GcSafeCodeContains(HeapObject* object, Address addr);
401 void StackFrame::IteratePc(ObjectVisitor* v,
404 Address pc = *pc_address;
405 ASSERT(GcSafeCodeContains(holder, pc));
406 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
407 Object* code = holder;
408 v->VisitPointer(&code);
409 if (code != holder) {
410 holder = reinterpret_cast<Code*>(code);
411 pc = holder->instruction_start() + pc_offset;
417 StackFrame::Type StackFrame::ComputeType(Isolate* isolate, State* state) {
418 ASSERT(state->fp != NULL);
419 if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
420 return ARGUMENTS_ADAPTOR;
422 // The marker and function offsets overlap. If the marker isn't a
423 // smi then the frame is a JavaScript frame -- and the marker is
424 // really the function.
425 const int offset = StandardFrameConstants::kMarkerOffset;
426 Object* marker = Memory::Object_at(state->fp + offset);
427 if (!marker->IsSmi()) {
428 // If we're using a "safe" stack iterator, we treat optimized
429 // frames as normal JavaScript frames to avoid having to look
430 // into the heap to determine the state. This is safe as long
431 // as nobody tries to GC...
432 if (SafeStackFrameIterator::is_active(isolate)) return JAVA_SCRIPT;
433 Code::Kind kind = GetContainingCode(isolate, *(state->pc_address))->kind();
434 ASSERT(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
435 return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
437 return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
442 StackFrame::Type StackFrame::GetCallerState(State* state) const {
443 ComputeCallerState(state);
444 return ComputeType(isolate(), state);
448 Code* EntryFrame::unchecked_code() const {
449 return HEAP->raw_unchecked_js_entry_code();
453 void EntryFrame::ComputeCallerState(State* state) const {
454 GetCallerState(state);
458 void EntryFrame::SetCallerFp(Address caller_fp) {
459 const int offset = EntryFrameConstants::kCallerFPOffset;
460 Memory::Address_at(this->fp() + offset) = caller_fp;
464 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
465 const int offset = EntryFrameConstants::kCallerFPOffset;
466 Address fp = Memory::Address_at(this->fp() + offset);
467 return ExitFrame::GetStateForFramePointer(fp, state);
471 Code* EntryConstructFrame::unchecked_code() const {
472 return HEAP->raw_unchecked_js_construct_entry_code();
476 Object*& ExitFrame::code_slot() const {
477 const int offset = ExitFrameConstants::kCodeOffset;
478 return Memory::Object_at(fp() + offset);
482 Code* ExitFrame::unchecked_code() const {
483 return reinterpret_cast<Code*>(code_slot());
487 void ExitFrame::ComputeCallerState(State* state) const {
488 // Setup the caller state.
489 state->sp = caller_sp();
490 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
492 = reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset);
496 void ExitFrame::SetCallerFp(Address caller_fp) {
497 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
501 void ExitFrame::Iterate(ObjectVisitor* v) const {
502 // The arguments are traversed as part of the expression stack of
503 // the calling frame.
504 IteratePc(v, pc_address(), LookupCode());
505 v->VisitPointer(&code_slot());
509 Address ExitFrame::GetCallerStackPointer() const {
510 return fp() + ExitFrameConstants::kCallerSPDisplacement;
514 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
515 if (fp == 0) return NONE;
516 Address sp = ComputeStackPointer(fp);
517 FillState(fp, sp, state);
518 ASSERT(*state->pc_address != NULL);
523 void ExitFrame::FillState(Address fp, Address sp, State* state) {
526 state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize);
530 Address StandardFrame::GetExpressionAddress(int n) const {
531 const int offset = StandardFrameConstants::kExpressionsOffset;
532 return fp() + offset - n * kPointerSize;
536 Object* StandardFrame::GetExpression(Address fp, int index) {
537 return Memory::Object_at(GetExpressionAddress(fp, index));
541 Address StandardFrame::GetExpressionAddress(Address fp, int n) {
542 const int offset = StandardFrameConstants::kExpressionsOffset;
543 return fp + offset - n * kPointerSize;
547 int StandardFrame::ComputeExpressionsCount() const {
549 StandardFrameConstants::kExpressionsOffset + kPointerSize;
550 Address base = fp() + offset;
551 Address limit = sp();
552 ASSERT(base >= limit); // stack grows downwards
553 // Include register-allocated locals in number of expressions.
554 return static_cast<int>((base - limit) / kPointerSize);
558 void StandardFrame::ComputeCallerState(State* state) const {
559 state->sp = caller_sp();
560 state->fp = caller_fp();
561 state->pc_address = reinterpret_cast<Address*>(ComputePCAddress(fp()));
565 void StandardFrame::SetCallerFp(Address caller_fp) {
566 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
571 bool StandardFrame::IsExpressionInsideHandler(int n) const {
572 Address address = GetExpressionAddress(n);
573 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
574 if (it.handler()->includes(address)) return true;
580 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
582 // Make sure that optimized frames do not contain any stack handlers.
583 StackHandlerIterator it(this, top_handler());
587 // Make sure that we're not doing "safe" stack frame iteration. We cannot
588 // possibly find pointers in optimized frames in that state.
589 ASSERT(!SafeStackFrameIterator::is_active(isolate()));
591 // Compute the safepoint information.
592 unsigned stack_slots = 0;
593 SafepointEntry safepoint_entry;
594 Code* code = StackFrame::GetSafepointData(
595 isolate(), pc(), &safepoint_entry, &stack_slots);
596 unsigned slot_space = stack_slots * kPointerSize;
598 // Visit the outgoing parameters.
599 Object** parameters_base = &Memory::Object_at(sp());
600 Object** parameters_limit = &Memory::Object_at(
601 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
603 // Visit the parameters that may be on top of the saved registers.
604 if (safepoint_entry.argument_count() > 0) {
605 v->VisitPointers(parameters_base,
606 parameters_base + safepoint_entry.argument_count());
607 parameters_base += safepoint_entry.argument_count();
610 // Skip saved double registers.
611 if (safepoint_entry.has_doubles()) {
612 parameters_base += DoubleRegister::kNumAllocatableRegisters *
613 kDoubleSize / kPointerSize;
616 // Visit the registers that contain pointers if any.
617 if (safepoint_entry.HasRegisters()) {
618 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
619 if (safepoint_entry.HasRegisterAt(i)) {
620 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
621 v->VisitPointer(parameters_base + reg_stack_index);
624 // Skip the words containing the register values.
625 parameters_base += kNumSafepointRegisters;
628 // We're done dealing with the register bits.
629 uint8_t* safepoint_bits = safepoint_entry.bits();
630 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
632 // Visit the rest of the parameters.
633 v->VisitPointers(parameters_base, parameters_limit);
635 // Visit pointer spill slots and locals.
636 for (unsigned index = 0; index < stack_slots; index++) {
637 int byte_index = index >> kBitsPerByteLog2;
638 int bit_index = index & (kBitsPerByte - 1);
639 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
640 v->VisitPointer(parameters_limit + index);
644 // Visit the context and the function.
645 Object** fixed_base = &Memory::Object_at(
646 fp() + JavaScriptFrameConstants::kFunctionOffset);
647 Object** fixed_limit = &Memory::Object_at(fp());
648 v->VisitPointers(fixed_base, fixed_limit);
650 // Visit the return address in the callee and incoming arguments.
651 IteratePc(v, pc_address(), code);
655 bool JavaScriptFrame::IsConstructor() const {
656 Address fp = caller_fp();
657 if (has_adapted_arguments()) {
658 // Skip the arguments adaptor frame and look at the real caller.
659 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
661 return IsConstructFrame(fp);
665 int JavaScriptFrame::GetArgumentsLength() const {
666 // If there is an arguments adaptor frame get the arguments length from it.
667 if (has_adapted_arguments()) {
668 return Smi::cast(GetExpression(caller_fp(), 0))->value();
670 return GetNumberOfIncomingArguments();
675 Code* JavaScriptFrame::unchecked_code() const {
676 JSFunction* function = JSFunction::cast(this->function());
677 return function->unchecked_code();
681 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
682 ASSERT(!SafeStackFrameIterator::is_active(isolate()) &&
683 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
685 JSFunction* function = JSFunction::cast(this->function());
686 return function->shared()->formal_parameter_count();
690 Address JavaScriptFrame::GetCallerStackPointer() const {
691 return fp() + StandardFrameConstants::kCallerSPOffset;
695 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) {
696 ASSERT(functions->length() == 0);
697 functions->Add(JSFunction::cast(function()));
701 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
702 ASSERT(functions->length() == 0);
703 Code* code_pointer = LookupCode();
704 int offset = static_cast<int>(pc() - code_pointer->address());
705 FrameSummary summary(receiver(),
706 JSFunction::cast(function()),
710 functions->Add(summary);
714 void JavaScriptFrame::PrintTop(FILE* file,
716 bool print_line_number) {
719 AssertNoAllocation no_allocation;
720 JavaScriptFrameIterator it;
722 if (it.frame()->is_java_script()) {
723 JavaScriptFrame* frame = it.frame();
724 if (frame->IsConstructor()) PrintF(file, "new ");
726 Object* fun = frame->function();
727 if (fun->IsJSFunction()) {
728 SharedFunctionInfo* shared = JSFunction::cast(fun)->shared();
729 shared->DebugName()->ShortPrint(file);
730 if (print_line_number) {
731 Address pc = frame->pc();
732 Code* code = Code::cast(
733 v8::internal::Isolate::Current()->heap()->FindCodeObject(pc));
734 int source_pos = code->SourcePosition(pc);
735 Object* maybe_script = shared->script();
736 if (maybe_script->IsScript()) {
737 Handle<Script> script(Script::cast(maybe_script));
738 int line = GetScriptLineNumberSafe(script, source_pos) + 1;
739 Object* script_name_raw = script->name();
740 if (script_name_raw->IsString()) {
741 String* script_name = String::cast(script->name());
742 SmartArrayPointer<char> c_script_name =
743 script_name->ToCString(DISALLOW_NULLS,
744 ROBUST_STRING_TRAVERSAL);
745 PrintF(file, " at %s:%d", *c_script_name, line);
747 PrintF(file, "at <unknown>:%d", line);
750 PrintF(file, " at <unknown>:<unknown>");
754 fun->ShortPrint(file);
758 // function arguments
759 // (we are intentionally only printing the actually
760 // supplied parameters, not all parameters required)
761 PrintF(file, "(this=");
762 frame->receiver()->ShortPrint(file);
763 const int length = frame->ComputeParametersCount();
764 for (int i = 0; i < length; i++) {
766 frame->GetParameter(i)->ShortPrint(file);
777 void FrameSummary::Print() {
778 PrintF("receiver: ");
779 receiver_->ShortPrint();
780 PrintF("\nfunction: ");
781 function_->shared()->DebugName()->ShortPrint();
784 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
785 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
786 PrintF("\npc: %d\n", offset_);
790 void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
791 ASSERT(frames->length() == 0);
792 ASSERT(is_optimized());
794 int deopt_index = Safepoint::kNoDeoptimizationIndex;
795 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
797 // BUG(3243555): Since we don't have a lazy-deopt registered at
798 // throw-statements, we can't use the translation at the call-site of
799 // throw. An entry with no deoptimization index indicates a call-site
800 // without a lazy-deopt. As a consequence we are not allowed to inline
801 // functions containing throw.
802 if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
803 JavaScriptFrame::Summarize(frames);
807 TranslationIterator it(data->TranslationByteArray(),
808 data->TranslationIndex(deopt_index)->value());
809 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
810 ASSERT(opcode == Translation::BEGIN);
811 int frame_count = it.Next();
813 // We create the summary in reverse order because the frames
814 // in the deoptimization translation are ordered bottom-to-top.
817 opcode = static_cast<Translation::Opcode>(it.Next());
818 if (opcode == Translation::FRAME) {
819 // We don't inline constructor calls, so only the first, outermost
820 // frame can be a constructor frame in case of inlining.
821 bool is_constructor = (i == frame_count) && IsConstructor();
824 int ast_id = it.Next();
825 int function_id = it.Next();
826 it.Next(); // Skip height.
827 JSFunction* function =
828 JSFunction::cast(data->LiteralArray()->get(function_id));
830 // The translation commands are ordered and the receiver is always
831 // at the first position. Since we are always at a call when we need
832 // to construct a stack trace, the receiver is always in a stack slot.
833 opcode = static_cast<Translation::Opcode>(it.Next());
834 ASSERT(opcode == Translation::STACK_SLOT ||
835 opcode == Translation::LITERAL);
836 int index = it.Next();
838 // Get the correct receiver in the optimized frame.
839 Object* receiver = NULL;
840 if (opcode == Translation::LITERAL) {
841 receiver = data->LiteralArray()->get(index);
843 // Positive index means the value is spilled to the locals
844 // area. Negative means it is stored in the incoming parameter
847 receiver = GetExpression(index);
849 // Index -1 overlaps with last parameter, -n with the first parameter,
850 // (-n - 1) with the receiver with n being the number of parameters
851 // of the outermost, optimized frame.
852 int parameter_count = ComputeParametersCount();
853 int parameter_index = index + parameter_count;
854 receiver = (parameter_index == -1)
856 : this->GetParameter(parameter_index);
860 Code* code = function->shared()->code();
861 DeoptimizationOutputData* output_data =
862 DeoptimizationOutputData::cast(code->deoptimization_data());
863 unsigned entry = Deoptimizer::GetOutputInfo(output_data,
867 FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize;
868 ASSERT(pc_offset > 0);
870 FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
871 frames->Add(summary);
873 // Skip over operands to advance to the next opcode.
874 it.Skip(Translation::NumberOfOperandsFor(opcode));
880 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
882 ASSERT(is_optimized());
884 JSFunction* opt_function = JSFunction::cast(function());
885 Code* code = opt_function->code();
887 // The code object may have been replaced by lazy deoptimization. Fall
888 // back to a slow search in this case to find the original optimized
890 if (!code->contains(pc())) {
891 code = isolate()->inner_pointer_to_code_cache()->
892 GcSafeFindCodeForInnerPointer(pc());
894 ASSERT(code != NULL);
895 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
897 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
898 *deopt_index = safepoint_entry.deoptimization_index();
899 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex);
901 return DeoptimizationInputData::cast(code->deoptimization_data());
905 int OptimizedFrame::GetInlineCount() {
906 ASSERT(is_optimized());
908 int deopt_index = Safepoint::kNoDeoptimizationIndex;
909 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
911 TranslationIterator it(data->TranslationByteArray(),
912 data->TranslationIndex(deopt_index)->value());
913 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
914 ASSERT(opcode == Translation::BEGIN);
916 int frame_count = it.Next();
921 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
922 ASSERT(functions->length() == 0);
923 ASSERT(is_optimized());
925 int deopt_index = Safepoint::kNoDeoptimizationIndex;
926 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
928 TranslationIterator it(data->TranslationByteArray(),
929 data->TranslationIndex(deopt_index)->value());
930 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
931 ASSERT(opcode == Translation::BEGIN);
932 int frame_count = it.Next();
934 // We insert the frames in reverse order because the frames
935 // in the deoptimization translation are ordered bottom-to-top.
936 while (frame_count > 0) {
937 opcode = static_cast<Translation::Opcode>(it.Next());
938 if (opcode == Translation::FRAME) {
940 it.Next(); // Skip ast id.
941 int function_id = it.Next();
942 it.Next(); // Skip height.
943 JSFunction* function =
944 JSFunction::cast(data->LiteralArray()->get(function_id));
945 functions->Add(function);
947 // Skip over operands to advance to the next opcode.
948 it.Skip(Translation::NumberOfOperandsFor(opcode));
954 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
955 return Smi::cast(GetExpression(0))->value();
959 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
960 return fp() + StandardFrameConstants::kCallerSPOffset;
964 Address InternalFrame::GetCallerStackPointer() const {
965 // Internal frames have no arguments. The stack pointer of the
966 // caller is at a fixed offset from the frame pointer.
967 return fp() + StandardFrameConstants::kCallerSPOffset;
971 Code* ArgumentsAdaptorFrame::unchecked_code() const {
972 return isolate()->builtins()->builtin(
973 Builtins::kArgumentsAdaptorTrampoline);
977 Code* InternalFrame::unchecked_code() const {
978 const int offset = InternalFrameConstants::kCodeOffset;
979 Object* code = Memory::Object_at(fp() + offset);
980 ASSERT(code != NULL);
981 return reinterpret_cast<Code*>(code);
985 void StackFrame::PrintIndex(StringStream* accumulator,
988 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
992 void JavaScriptFrame::Print(StringStream* accumulator,
996 Object* receiver = this->receiver();
997 Object* function = this->function();
999 accumulator->PrintSecurityTokenIfChanged(function);
1000 PrintIndex(accumulator, mode, index);
1002 if (IsConstructor()) accumulator->Add("new ");
1003 accumulator->PrintFunction(function, receiver, &code);
1005 Handle<SerializedScopeInfo> scope_info(SerializedScopeInfo::Empty());
1007 if (function->IsJSFunction()) {
1008 Handle<SharedFunctionInfo> shared(JSFunction::cast(function)->shared());
1009 scope_info = Handle<SerializedScopeInfo>(shared->scope_info());
1010 Object* script_obj = shared->script();
1011 if (script_obj->IsScript()) {
1012 Handle<Script> script(Script::cast(script_obj));
1013 accumulator->Add(" [");
1014 accumulator->PrintName(script->name());
1016 Address pc = this->pc();
1017 if (code != NULL && code->kind() == Code::FUNCTION &&
1018 pc >= code->instruction_start() && pc < code->instruction_end()) {
1019 int source_pos = code->SourcePosition(pc);
1020 int line = GetScriptLineNumberSafe(script, source_pos) + 1;
1021 accumulator->Add(":%d", line);
1023 int function_start_pos = shared->start_position();
1024 int line = GetScriptLineNumberSafe(script, function_start_pos) + 1;
1025 accumulator->Add(":~%d", line);
1028 accumulator->Add("] ");
1032 accumulator->Add("(this=%o", receiver);
1034 // Get scope information for nicer output, if possible. If code is
1035 // NULL, or doesn't contain scope info, info will return 0 for the
1036 // number of parameters, stack slots, or context slots.
1037 ScopeInfo<PreallocatedStorage> info(*scope_info);
1039 // Print the parameters.
1040 int parameters_count = ComputeParametersCount();
1041 for (int i = 0; i < parameters_count; i++) {
1042 accumulator->Add(",");
1043 // If we have a name for the parameter we print it. Nameless
1044 // parameters are either because we have more actual parameters
1045 // than formal parameters or because we have no scope information.
1046 if (i < info.number_of_parameters()) {
1047 accumulator->PrintName(*info.parameter_name(i));
1048 accumulator->Add("=");
1050 accumulator->Add("%o", GetParameter(i));
1053 accumulator->Add(")");
1054 if (mode == OVERVIEW) {
1055 accumulator->Add("\n");
1058 if (is_optimized()) {
1059 accumulator->Add(" {\n// optimized frame\n}\n");
1062 accumulator->Add(" {\n");
1064 // Compute the number of locals and expression stack elements.
1065 int stack_locals_count = info.number_of_stack_slots();
1066 int heap_locals_count = info.number_of_context_slots();
1067 int expressions_count = ComputeExpressionsCount();
1069 // Print stack-allocated local variables.
1070 if (stack_locals_count > 0) {
1071 accumulator->Add(" // stack-allocated locals\n");
1073 for (int i = 0; i < stack_locals_count; i++) {
1074 accumulator->Add(" var ");
1075 accumulator->PrintName(*info.stack_slot_name(i));
1076 accumulator->Add(" = ");
1077 if (i < expressions_count) {
1078 accumulator->Add("%o", GetExpression(i));
1080 accumulator->Add("// no expression found - inconsistent frame?");
1082 accumulator->Add("\n");
1085 // Try to get hold of the context of this frame.
1086 Context* context = NULL;
1087 if (this->context() != NULL && this->context()->IsContext()) {
1088 context = Context::cast(this->context());
1091 // Print heap-allocated local variables.
1092 if (heap_locals_count > Context::MIN_CONTEXT_SLOTS) {
1093 accumulator->Add(" // heap-allocated locals\n");
1095 for (int i = Context::MIN_CONTEXT_SLOTS; i < heap_locals_count; i++) {
1096 accumulator->Add(" var ");
1097 accumulator->PrintName(*info.context_slot_name(i));
1098 accumulator->Add(" = ");
1099 if (context != NULL) {
1100 if (i < context->length()) {
1101 accumulator->Add("%o", context->get(i));
1104 "// warning: missing context slot - inconsistent frame?");
1107 accumulator->Add("// warning: no context found - inconsistent frame?");
1109 accumulator->Add("\n");
1112 // Print the expression stack.
1113 int expressions_start = stack_locals_count;
1114 if (expressions_start < expressions_count) {
1115 accumulator->Add(" // expression stack (top to bottom)\n");
1117 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1118 if (IsExpressionInsideHandler(i)) continue;
1119 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1122 // Print details about the function.
1123 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1124 SharedFunctionInfo* shared = JSFunction::cast(function)->shared();
1125 accumulator->Add("--------- s o u r c e c o d e ---------\n");
1126 shared->SourceCodePrint(accumulator, FLAG_max_stack_trace_source_length);
1127 accumulator->Add("\n-----------------------------------------\n");
1130 accumulator->Add("}\n\n");
1134 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1137 int actual = ComputeParametersCount();
1139 Object* function = this->function();
1140 if (function->IsJSFunction()) {
1141 expected = JSFunction::cast(function)->shared()->formal_parameter_count();
1144 PrintIndex(accumulator, mode, index);
1145 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1146 if (mode == OVERVIEW) {
1147 accumulator->Add("\n");
1150 accumulator->Add(" {\n");
1152 // Print actual arguments.
1153 if (actual > 0) accumulator->Add(" // actual arguments\n");
1154 for (int i = 0; i < actual; i++) {
1155 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1156 if (expected != -1 && i >= expected) {
1157 accumulator->Add(" // not passed to callee");
1159 accumulator->Add("\n");
1162 accumulator->Add("}\n\n");
1166 void EntryFrame::Iterate(ObjectVisitor* v) const {
1167 StackHandlerIterator it(this, top_handler());
1169 StackHandler* handler = it.handler();
1170 ASSERT(handler->is_entry());
1171 handler->Iterate(v, LookupCode());
1173 // Make sure that the entry frame does not contain more than one
1178 IteratePc(v, pc_address(), LookupCode());
1182 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1183 const int offset = StandardFrameConstants::kContextOffset;
1184 Object** base = &Memory::Object_at(sp());
1185 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1186 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
1187 StackHandler* handler = it.handler();
1188 // Traverse pointers down to - but not including - the next
1189 // handler in the handler chain. Update the base to skip the
1190 // handler and allow the handler to traverse its own pointers.
1191 const Address address = handler->address();
1192 v->VisitPointers(base, reinterpret_cast<Object**>(address));
1193 base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize);
1194 // Traverse the pointers in the handler itself.
1195 handler->Iterate(v, LookupCode());
1197 v->VisitPointers(base, limit);
1201 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1202 IterateExpressions(v);
1203 IteratePc(v, pc_address(), LookupCode());
1207 void InternalFrame::Iterate(ObjectVisitor* v) const {
1208 // Internal frames only have object pointers on the expression stack
1209 // as they never have any arguments.
1210 IterateExpressions(v);
1211 IteratePc(v, pc_address(), LookupCode());
1215 // -------------------------------------------------------------------------
1218 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1220 for (int i = 0; i <= n; i++) {
1221 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1222 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1223 iterator_.Advance();
1230 // -------------------------------------------------------------------------
1233 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1234 MapWord map_word = object->map_word();
1235 return map_word.IsForwardingAddress() ?
1236 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1240 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1241 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1246 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1247 Map* map = GcSafeMapOfCodeSpaceObject(code);
1248 ASSERT(map == code->GetHeap()->code_map());
1249 Address start = code->address();
1250 Address end = code->address() + code->SizeFromMap(map);
1251 return start <= addr && addr < end;
1256 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1257 Address inner_pointer) {
1258 Code* code = reinterpret_cast<Code*>(object);
1259 ASSERT(code != NULL && GcSafeCodeContains(code, inner_pointer));
1264 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1265 Address inner_pointer) {
1266 Heap* heap = isolate_->heap();
1267 // Check if the inner pointer points into a large object chunk.
1268 LargePage* large_page = heap->lo_space()->FindPageContainingPc(inner_pointer);
1269 if (large_page != NULL) {
1270 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1273 // Iterate through the page until we reach the end or find an object starting
1274 // after the inner pointer.
1275 Page* page = Page::FromAddress(inner_pointer);
1277 Address addr = page->skip_list()->StartFor(inner_pointer);
1279 Address top = heap->code_space()->top();
1280 Address limit = heap->code_space()->limit();
1283 if (addr == top && addr != limit) {
1288 HeapObject* obj = HeapObject::FromAddress(addr);
1289 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1290 Address next_addr = addr + obj_size;
1291 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1297 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
1298 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1299 isolate_->counters()->pc_to_code()->Increment();
1300 ASSERT(IsPowerOf2(kInnerPointerToCodeCacheSize));
1301 uint32_t hash = ComputeIntegerHash(
1302 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)));
1303 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1304 InnerPointerToCodeCacheEntry* entry = cache(index);
1305 if (entry->inner_pointer == inner_pointer) {
1306 isolate_->counters()->pc_to_code_cached()->Increment();
1307 ASSERT(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1309 // Because this code may be interrupted by a profiling signal that
1310 // also queries the cache, we cannot update inner_pointer before the code
1311 // has been set. Otherwise, we risk trying to use a cache entry before
1312 // the code has been computed.
1313 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1314 entry->safepoint_entry.Reset();
1315 entry->inner_pointer = inner_pointer;
1321 // -------------------------------------------------------------------------
1323 int NumRegs(RegList reglist) {
1325 while (reglist != 0) {
1327 reglist &= reglist - 1; // clear one bit
1333 struct JSCallerSavedCodeData {
1334 JSCallerSavedCodeData() {
1336 for (int r = 0; r < kNumRegs; r++)
1337 if ((kJSCallerSaved & (1 << r)) != 0)
1340 ASSERT(i == kNumJSCallerSaved);
1342 int reg_code[kNumJSCallerSaved];
1346 static const JSCallerSavedCodeData kCallerSavedCodeData;
1349 int JSCallerSavedCode(int n) {
1350 ASSERT(0 <= n && n < kNumJSCallerSaved);
1351 return kCallerSavedCodeData.reg_code[n];
1355 #define DEFINE_WRAPPER(type, field) \
1356 class field##_Wrapper : public ZoneObject { \
1357 public: /* NOLINT */ \
1358 field##_Wrapper(const field& original) : frame_(original) { \
1362 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1363 #undef DEFINE_WRAPPER
1365 static StackFrame* AllocateFrameCopy(StackFrame* frame) {
1366 #define FRAME_TYPE_CASE(type, field) \
1367 case StackFrame::type: { \
1368 field##_Wrapper* wrapper = \
1369 new field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1370 return &wrapper->frame_; \
1373 switch (frame->type()) {
1374 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1375 default: UNREACHABLE();
1377 #undef FRAME_TYPE_CASE
1381 Vector<StackFrame*> CreateStackMap() {
1382 ZoneList<StackFrame*> list(10);
1383 for (StackFrameIterator it; !it.done(); it.Advance()) {
1384 StackFrame* frame = AllocateFrameCopy(it.frame());
1387 return list.ToVector();
1391 } } // namespace v8::internal