1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/frames.h"
12 #include "src/base/bits.h"
13 #include "src/deoptimizer.h"
14 #include "src/frames-inl.h"
15 #include "src/full-codegen.h"
16 #include "src/heap/mark-compact.h"
17 #include "src/safepoint-table.h"
18 #include "src/scopeinfo.h"
19 #include "src/string-stream.h"
20 #include "src/vm-state-inl.h"
26 ReturnAddressLocationResolver
27 StackFrame::return_address_location_resolver_ = NULL;
30 // Iterator that supports traversing the stack handlers of a
31 // particular frame. Needs to know the top of the handler chain.
32 class StackHandlerIterator BASE_EMBEDDED {
34 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
35 : limit_(frame->fp()), handler_(handler) {
36 // Make sure the handler has already been unwound to this frame.
37 DCHECK(frame->sp() <= handler->address());
40 StackHandler* handler() const { return handler_; }
43 return handler_ == NULL || handler_->address() > limit_;
47 handler_ = handler_->next();
52 StackHandler* handler_;
56 // -------------------------------------------------------------------------
59 #define INITIALIZE_SINGLETON(type, field) field##_(this),
60 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
61 bool can_access_heap_objects)
63 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
64 frame_(NULL), handler_(NULL),
65 can_access_heap_objects_(can_access_heap_objects) {
67 #undef INITIALIZE_SINGLETON
70 StackFrameIterator::StackFrameIterator(Isolate* isolate)
71 : StackFrameIteratorBase(isolate, true) {
72 Reset(isolate->thread_local_top());
76 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
77 : StackFrameIteratorBase(isolate, true) {
82 void StackFrameIterator::Advance() {
84 // Compute the state of the calling frame before restoring
85 // callee-saved registers and unwinding handlers. This allows the
86 // frame code that computes the caller state to access the top
87 // handler and the value of any callee-saved register if needed.
88 StackFrame::State state;
89 StackFrame::Type type = frame_->GetCallerState(&state);
91 // Unwind handlers corresponding to the current frame.
92 StackHandlerIterator it(frame_, handler_);
93 while (!it.done()) it.Advance();
94 handler_ = it.handler();
96 // Advance to the calling frame.
97 frame_ = SingletonFor(type, &state);
99 // When we're done iterating over the stack frames, the handler
100 // chain must have been completely unwound.
101 DCHECK(!done() || handler_ == NULL);
105 void StackFrameIterator::Reset(ThreadLocalTop* top) {
106 StackFrame::State state;
107 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
108 Isolate::c_entry_fp(top), &state);
109 handler_ = StackHandler::FromAddress(Isolate::handler(top));
110 if (SingletonFor(type) == NULL) return;
111 frame_ = SingletonFor(type, &state);
115 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
116 StackFrame::State* state) {
117 if (type == StackFrame::NONE) return NULL;
118 StackFrame* result = SingletonFor(type);
119 DCHECK(result != NULL);
120 result->state_ = *state;
125 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
126 #define FRAME_TYPE_CASE(type, field) \
127 case StackFrame::type: result = &field##_; break;
129 StackFrame* result = NULL;
131 case StackFrame::NONE: return NULL;
132 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
137 #undef FRAME_TYPE_CASE
141 // -------------------------------------------------------------------------
144 JavaScriptFrameIterator::JavaScriptFrameIterator(
145 Isolate* isolate, StackFrame::Id id)
146 : iterator_(isolate) {
149 if (frame()->id() == id) return;
154 void JavaScriptFrameIterator::Advance() {
157 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
161 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
162 if (!frame()->has_adapted_arguments()) return;
164 DCHECK(iterator_.frame()->is_arguments_adaptor());
168 // -------------------------------------------------------------------------
171 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
172 : JavaScriptFrameIterator(isolate) {
173 if (!done() && !IsValidFrame()) Advance();
177 void StackTraceFrameIterator::Advance() {
179 JavaScriptFrameIterator::Advance();
181 if (IsValidFrame()) return;
186 bool StackTraceFrameIterator::IsValidFrame() {
187 if (!frame()->function()->IsJSFunction()) return false;
188 Object* script = frame()->function()->shared()->script();
189 // Don't show functions from native scripts to user.
190 return (script->IsScript() &&
191 Script::TYPE_NATIVE != Script::cast(script)->type()->value());
195 // -------------------------------------------------------------------------
198 SafeStackFrameIterator::SafeStackFrameIterator(
200 Address fp, Address sp, Address js_entry_sp)
201 : StackFrameIteratorBase(isolate, false),
203 high_bound_(js_entry_sp),
204 top_frame_type_(StackFrame::NONE),
205 external_callback_scope_(isolate->external_callback_scope()) {
206 StackFrame::State state;
207 StackFrame::Type type;
208 ThreadLocalTop* top = isolate->thread_local_top();
209 if (IsValidTop(top)) {
210 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
211 top_frame_type_ = type;
212 } else if (IsValidStackAddress(fp)) {
216 state.pc_address = StackFrame::ResolveReturnAddressLocation(
217 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
218 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
219 // we check only that kMarkerOffset is within the stack bounds and do
220 // compile time check that kContextOffset slot is pushed on the stack before
222 STATIC_ASSERT(StandardFrameConstants::kMarkerOffset <
223 StandardFrameConstants::kContextOffset);
224 Address frame_marker = fp + StandardFrameConstants::kMarkerOffset;
225 if (IsValidStackAddress(frame_marker)) {
226 type = StackFrame::ComputeType(this, &state);
227 top_frame_type_ = type;
229 // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
230 // The frame anyways will be skipped.
231 type = StackFrame::JAVA_SCRIPT;
232 // Top frame is incomplete so we cannot reliably determine its type.
233 top_frame_type_ = StackFrame::NONE;
238 if (SingletonFor(type) == NULL) return;
239 frame_ = SingletonFor(type, &state);
240 if (frame_ == NULL) return;
244 if (frame_ != NULL && !frame_->is_exit() &&
245 external_callback_scope_ != NULL &&
246 external_callback_scope_->scope_address() < frame_->fp()) {
247 // Skip top ExternalCallbackScope if we already advanced to a JS frame
248 // under it. Sampler will anyways take this top external callback.
249 external_callback_scope_ = external_callback_scope_->previous();
254 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
255 Address c_entry_fp = Isolate::c_entry_fp(top);
256 if (!IsValidExitFrame(c_entry_fp)) return false;
257 // There should be at least one JS_ENTRY stack handler.
258 Address handler = Isolate::handler(top);
259 if (handler == NULL) return false;
260 // Check that there are no js frames on top of the native frames.
261 return c_entry_fp < handler;
265 void SafeStackFrameIterator::AdvanceOneFrame() {
267 StackFrame* last_frame = frame_;
268 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
269 // Before advancing to the next stack frame, perform pointer validity tests.
270 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
275 // Advance to the previous frame.
276 StackFrame::State state;
277 StackFrame::Type type = frame_->GetCallerState(&state);
278 frame_ = SingletonFor(type, &state);
279 if (frame_ == NULL) return;
281 // Check that we have actually moved to the previous frame in the stack.
282 if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
288 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
289 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
293 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
294 StackFrame::State state;
295 if (frame->is_entry() || frame->is_entry_construct()) {
296 // See EntryFrame::GetCallerState. It computes the caller FP address
297 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
298 // sure that caller FP address is valid.
299 Address caller_fp = Memory::Address_at(
300 frame->fp() + EntryFrameConstants::kCallerFPOffset);
301 if (!IsValidExitFrame(caller_fp)) return false;
302 } else if (frame->is_arguments_adaptor()) {
303 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
304 // the number of arguments is stored on stack as Smi. We need to check
305 // that it really an Smi.
306 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
308 if (!number_of_args->IsSmi()) {
312 frame->ComputeCallerState(&state);
313 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
314 SingletonFor(frame->GetCallerState(&state)) != NULL;
318 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
319 if (!IsValidStackAddress(fp)) return false;
320 Address sp = ExitFrame::ComputeStackPointer(fp);
321 if (!IsValidStackAddress(sp)) return false;
322 StackFrame::State state;
323 ExitFrame::FillState(fp, sp, &state);
324 if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) {
327 return *state.pc_address != NULL;
331 void SafeStackFrameIterator::Advance() {
335 if (frame_->is_java_script()) return;
336 if (frame_->is_exit() && external_callback_scope_) {
337 // Some of the EXIT frames may have ExternalCallbackScope allocated on
338 // top of them. In that case the scope corresponds to the first EXIT
339 // frame beneath it. There may be other EXIT frames on top of the
340 // ExternalCallbackScope, just skip them as we cannot collect any useful
341 // information about them.
342 if (external_callback_scope_->scope_address() < frame_->fp()) {
343 Address* callback_address =
344 external_callback_scope_->callback_address();
345 if (*callback_address != NULL) {
346 frame_->state_.pc_address = callback_address;
348 external_callback_scope_ = external_callback_scope_->previous();
349 DCHECK(external_callback_scope_ == NULL ||
350 external_callback_scope_->scope_address() > frame_->fp());
358 // -------------------------------------------------------------------------
361 Code* StackFrame::GetSafepointData(Isolate* isolate,
362 Address inner_pointer,
363 SafepointEntry* safepoint_entry,
364 unsigned* stack_slots) {
365 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
366 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
367 if (!entry->safepoint_entry.is_valid()) {
368 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
369 DCHECK(entry->safepoint_entry.is_valid());
371 DCHECK(entry->safepoint_entry.Equals(
372 entry->code->GetSafepointEntry(inner_pointer)));
375 // Fill in the results and return the code.
376 Code* code = entry->code;
377 *safepoint_entry = entry->safepoint_entry;
378 *stack_slots = code->stack_slots();
384 static bool GcSafeCodeContains(HeapObject* object, Address addr);
388 void StackFrame::IteratePc(ObjectVisitor* v,
391 Address pc = *pc_address;
392 DCHECK(GcSafeCodeContains(holder, pc));
393 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
394 Object* code = holder;
395 v->VisitPointer(&code);
396 if (code != holder) {
397 holder = reinterpret_cast<Code*>(code);
398 pc = holder->instruction_start() + pc_offset;
404 void StackFrame::SetReturnAddressLocationResolver(
405 ReturnAddressLocationResolver resolver) {
406 DCHECK(return_address_location_resolver_ == NULL);
407 return_address_location_resolver_ = resolver;
411 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
413 DCHECK(state->fp != NULL);
414 if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
415 return ARGUMENTS_ADAPTOR;
417 // The marker and function offsets overlap. If the marker isn't a
418 // smi then the frame is a JavaScript frame -- and the marker is
419 // really the function.
420 const int offset = StandardFrameConstants::kMarkerOffset;
421 Object* marker = Memory::Object_at(state->fp + offset);
422 if (!marker->IsSmi()) {
423 // If we're using a "safe" stack iterator, we treat optimized
424 // frames as normal JavaScript frames to avoid having to look
425 // into the heap to determine the state. This is safe as long
426 // as nobody tries to GC...
427 if (!iterator->can_access_heap_objects_) return JAVA_SCRIPT;
428 Code::Kind kind = GetContainingCode(iterator->isolate(),
429 *(state->pc_address))->kind();
430 DCHECK(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
431 return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
433 return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
438 bool StackFrame::can_access_heap_objects() const {
439 return iterator_->can_access_heap_objects_;
444 StackFrame::Type StackFrame::GetCallerState(State* state) const {
445 ComputeCallerState(state);
446 return ComputeType(iterator_, state);
450 Address StackFrame::UnpaddedFP() const {
451 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
452 if (!is_optimized()) return fp();
453 int32_t alignment_state = Memory::int32_at(
454 fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
456 return (alignment_state == kAlignmentPaddingPushed) ?
457 (fp() + kPointerSize) : fp();
464 Code* EntryFrame::unchecked_code() const {
465 return isolate()->heap()->js_entry_code();
469 void EntryFrame::ComputeCallerState(State* state) const {
470 GetCallerState(state);
474 void EntryFrame::SetCallerFp(Address caller_fp) {
475 const int offset = EntryFrameConstants::kCallerFPOffset;
476 Memory::Address_at(this->fp() + offset) = caller_fp;
480 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
481 const int offset = EntryFrameConstants::kCallerFPOffset;
482 Address fp = Memory::Address_at(this->fp() + offset);
483 return ExitFrame::GetStateForFramePointer(fp, state);
487 Code* EntryConstructFrame::unchecked_code() const {
488 return isolate()->heap()->js_construct_entry_code();
492 Object*& ExitFrame::code_slot() const {
493 const int offset = ExitFrameConstants::kCodeOffset;
494 return Memory::Object_at(fp() + offset);
498 Code* ExitFrame::unchecked_code() const {
499 return reinterpret_cast<Code*>(code_slot());
503 void ExitFrame::ComputeCallerState(State* state) const {
504 // Set up the caller state.
505 state->sp = caller_sp();
506 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
507 state->pc_address = ResolveReturnAddressLocation(
508 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
509 if (FLAG_enable_ool_constant_pool) {
510 state->constant_pool_address = reinterpret_cast<Address*>(
511 fp() + ExitFrameConstants::kConstantPoolOffset);
516 void ExitFrame::SetCallerFp(Address caller_fp) {
517 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
521 void ExitFrame::Iterate(ObjectVisitor* v) const {
522 // The arguments are traversed as part of the expression stack of
523 // the calling frame.
524 IteratePc(v, pc_address(), LookupCode());
525 v->VisitPointer(&code_slot());
526 if (FLAG_enable_ool_constant_pool) {
527 v->VisitPointer(&constant_pool_slot());
532 Address ExitFrame::GetCallerStackPointer() const {
533 return fp() + ExitFrameConstants::kCallerSPDisplacement;
537 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
538 if (fp == 0) return NONE;
539 Address sp = ComputeStackPointer(fp);
540 FillState(fp, sp, state);
541 DCHECK(*state->pc_address != NULL);
546 Address ExitFrame::ComputeStackPointer(Address fp) {
547 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
551 void ExitFrame::FillState(Address fp, Address sp, State* state) {
554 state->pc_address = ResolveReturnAddressLocation(
555 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
556 state->constant_pool_address =
557 reinterpret_cast<Address*>(fp + ExitFrameConstants::kConstantPoolOffset);
561 Address StandardFrame::GetExpressionAddress(int n) const {
562 const int offset = StandardFrameConstants::kExpressionsOffset;
563 return fp() + offset - n * kPointerSize;
567 Object* StandardFrame::GetExpression(Address fp, int index) {
568 return Memory::Object_at(GetExpressionAddress(fp, index));
572 Address StandardFrame::GetExpressionAddress(Address fp, int n) {
573 const int offset = StandardFrameConstants::kExpressionsOffset;
574 return fp + offset - n * kPointerSize;
578 int StandardFrame::ComputeExpressionsCount() const {
580 StandardFrameConstants::kExpressionsOffset + kPointerSize;
581 Address base = fp() + offset;
582 Address limit = sp();
583 DCHECK(base >= limit); // stack grows downwards
584 // Include register-allocated locals in number of expressions.
585 return static_cast<int>((base - limit) / kPointerSize);
589 void StandardFrame::ComputeCallerState(State* state) const {
590 state->sp = caller_sp();
591 state->fp = caller_fp();
592 state->pc_address = ResolveReturnAddressLocation(
593 reinterpret_cast<Address*>(ComputePCAddress(fp())));
594 state->constant_pool_address =
595 reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
599 void StandardFrame::SetCallerFp(Address caller_fp) {
600 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
605 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
606 // Make sure that we're not doing "safe" stack frame iteration. We cannot
607 // possibly find pointers in optimized frames in that state.
608 DCHECK(can_access_heap_objects());
610 // Compute the safepoint information.
611 unsigned stack_slots = 0;
612 SafepointEntry safepoint_entry;
613 Code* code = StackFrame::GetSafepointData(
614 isolate(), pc(), &safepoint_entry, &stack_slots);
615 unsigned slot_space = stack_slots * kPointerSize;
617 // Visit the outgoing parameters.
618 Object** parameters_base = &Memory::Object_at(sp());
619 Object** parameters_limit = &Memory::Object_at(
620 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
622 // Visit the parameters that may be on top of the saved registers.
623 if (safepoint_entry.argument_count() > 0) {
624 v->VisitPointers(parameters_base,
625 parameters_base + safepoint_entry.argument_count());
626 parameters_base += safepoint_entry.argument_count();
629 // Skip saved double registers.
630 if (safepoint_entry.has_doubles()) {
631 // Number of doubles not known at snapshot time.
632 DCHECK(!isolate()->serializer_enabled());
633 parameters_base += DoubleRegister::NumAllocatableRegisters() *
634 kDoubleSize / kPointerSize;
637 // Visit the registers that contain pointers if any.
638 if (safepoint_entry.HasRegisters()) {
639 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
640 if (safepoint_entry.HasRegisterAt(i)) {
641 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
642 v->VisitPointer(parameters_base + reg_stack_index);
645 // Skip the words containing the register values.
646 parameters_base += kNumSafepointRegisters;
649 // We're done dealing with the register bits.
650 uint8_t* safepoint_bits = safepoint_entry.bits();
651 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
653 // Visit the rest of the parameters.
654 v->VisitPointers(parameters_base, parameters_limit);
656 // Visit pointer spill slots and locals.
657 for (unsigned index = 0; index < stack_slots; index++) {
658 int byte_index = index >> kBitsPerByteLog2;
659 int bit_index = index & (kBitsPerByte - 1);
660 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
661 v->VisitPointer(parameters_limit + index);
665 // Visit the return address in the callee and incoming arguments.
666 IteratePc(v, pc_address(), code);
668 // Visit the context in stub frame and JavaScript frame.
669 // Visit the function in JavaScript frame.
670 Object** fixed_base = &Memory::Object_at(
671 fp() + StandardFrameConstants::kMarkerOffset);
672 Object** fixed_limit = &Memory::Object_at(fp());
673 v->VisitPointers(fixed_base, fixed_limit);
677 void StubFrame::Iterate(ObjectVisitor* v) const {
678 IterateCompiledFrame(v);
682 Code* StubFrame::unchecked_code() const {
683 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
687 Address StubFrame::GetCallerStackPointer() const {
688 return fp() + ExitFrameConstants::kCallerSPDisplacement;
692 int StubFrame::GetNumberOfIncomingArguments() const {
697 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
698 IterateCompiledFrame(v);
702 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
703 Memory::Object_at(GetParameterSlot(index)) = value;
707 bool JavaScriptFrame::IsConstructor() const {
708 Address fp = caller_fp();
709 if (has_adapted_arguments()) {
710 // Skip the arguments adaptor frame and look at the real caller.
711 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
713 return IsConstructFrame(fp);
717 int JavaScriptFrame::GetArgumentsLength() const {
718 // If there is an arguments adaptor frame get the arguments length from it.
719 if (has_adapted_arguments()) {
720 return Smi::cast(GetExpression(caller_fp(), 0))->value();
722 return GetNumberOfIncomingArguments();
727 Code* JavaScriptFrame::unchecked_code() const {
728 return function()->code();
732 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
733 DCHECK(can_access_heap_objects() &&
734 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
736 return function()->shared()->internal_formal_parameter_count();
740 Address JavaScriptFrame::GetCallerStackPointer() const {
741 return fp() + StandardFrameConstants::kCallerSPOffset;
745 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) {
746 DCHECK(functions->length() == 0);
747 functions->Add(function());
751 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
752 DCHECK(functions->length() == 0);
753 Code* code_pointer = LookupCode();
754 int offset = static_cast<int>(pc() - code_pointer->address());
755 FrameSummary summary(receiver(),
760 functions->Add(summary);
764 int JavaScriptFrame::LookupExceptionHandlerInTable(int* stack_slots) {
765 Code* code = LookupCode();
766 DCHECK(!code->is_optimized_code());
767 HandlerTable* table = HandlerTable::cast(code->handler_table());
768 int pc_offset = static_cast<int>(pc() - code->entry());
769 return table->LookupRange(pc_offset, stack_slots);
773 void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function, Code* code,
774 Address pc, FILE* file,
775 bool print_line_number) {
776 PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
777 function->PrintName(file);
778 int code_offset = static_cast<int>(pc - code->instruction_start());
779 PrintF(file, "+%d", code_offset);
780 if (print_line_number) {
781 SharedFunctionInfo* shared = function->shared();
782 int source_pos = code->SourcePosition(pc);
783 Object* maybe_script = shared->script();
784 if (maybe_script->IsScript()) {
785 Script* script = Script::cast(maybe_script);
786 int line = script->GetLineNumber(source_pos) + 1;
787 Object* script_name_raw = script->name();
788 if (script_name_raw->IsString()) {
789 String* script_name = String::cast(script->name());
790 SmartArrayPointer<char> c_script_name =
791 script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
792 PrintF(file, " at %s:%d", c_script_name.get(), line);
794 PrintF(file, " at <unknown>:%d", line);
797 PrintF(file, " at <unknown>:<unknown>");
803 void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
804 bool print_line_number) {
806 DisallowHeapAllocation no_allocation;
807 JavaScriptFrameIterator it(isolate);
809 if (it.frame()->is_java_script()) {
810 JavaScriptFrame* frame = it.frame();
811 if (frame->IsConstructor()) PrintF(file, "new ");
812 PrintFunctionAndOffset(frame->function(), frame->unchecked_code(),
813 frame->pc(), file, print_line_number);
815 // function arguments
816 // (we are intentionally only printing the actually
817 // supplied parameters, not all parameters required)
818 PrintF(file, "(this=");
819 frame->receiver()->ShortPrint(file);
820 const int length = frame->ComputeParametersCount();
821 for (int i = 0; i < length; i++) {
823 frame->GetParameter(i)->ShortPrint(file);
834 void JavaScriptFrame::SaveOperandStack(FixedArray* store) const {
835 int operands_count = store->length();
836 DCHECK_LE(operands_count, ComputeOperandsCount());
837 for (int i = 0; i < operands_count; i++) {
838 store->set(i, GetOperand(i));
843 void JavaScriptFrame::RestoreOperandStack(FixedArray* store) {
844 int operands_count = store->length();
845 DCHECK_LE(operands_count, ComputeOperandsCount());
846 for (int i = 0; i < operands_count; i++) {
847 DCHECK_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
848 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
853 void FrameSummary::Print() {
854 PrintF("receiver: ");
855 receiver_->ShortPrint();
856 PrintF("\nfunction: ");
857 function_->shared()->DebugName()->ShortPrint();
860 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
861 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
862 PrintF("\npc: %d\n", offset_);
866 JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array,
868 if (literal_id == Translation::kSelfLiteralId) {
872 return JSFunction::cast(literal_array->get(literal_id));
876 void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
877 DCHECK(frames->length() == 0);
878 DCHECK(is_optimized());
880 // Delegate to JS frame in absence of turbofan deoptimization.
881 // TODO(turbofan): Revisit once we support deoptimization across the board.
882 if (LookupCode()->is_turbofanned() && !FLAG_turbo_deoptimization) {
883 return JavaScriptFrame::Summarize(frames);
886 int deopt_index = Safepoint::kNoDeoptimizationIndex;
887 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
888 FixedArray* literal_array = data->LiteralArray();
890 // BUG(3243555): Since we don't have a lazy-deopt registered at
891 // throw-statements, we can't use the translation at the call-site of
892 // throw. An entry with no deoptimization index indicates a call-site
893 // without a lazy-deopt. As a consequence we are not allowed to inline
894 // functions containing throw.
895 DCHECK(deopt_index != Safepoint::kNoDeoptimizationIndex);
897 TranslationIterator it(data->TranslationByteArray(),
898 data->TranslationIndex(deopt_index)->value());
899 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
900 DCHECK(opcode == Translation::BEGIN);
901 it.Next(); // Drop frame count.
902 int jsframe_count = it.Next();
904 // We create the summary in reverse order because the frames
905 // in the deoptimization translation are ordered bottom-to-top.
906 bool is_constructor = IsConstructor();
907 int i = jsframe_count;
909 opcode = static_cast<Translation::Opcode>(it.Next());
910 if (opcode == Translation::JS_FRAME) {
912 BailoutId ast_id = BailoutId(it.Next());
913 JSFunction* function = LiteralAt(literal_array, it.Next());
914 it.Next(); // Skip height.
916 // The translation commands are ordered and the receiver is always
917 // at the first position.
918 // If we are at a call, the receiver is always in a stack slot.
919 // Otherwise we are not guaranteed to get the receiver value.
920 opcode = static_cast<Translation::Opcode>(it.Next());
921 int index = it.Next();
923 // Get the correct receiver in the optimized frame.
924 Object* receiver = NULL;
925 if (opcode == Translation::LITERAL) {
926 receiver = data->LiteralArray()->get(index);
927 } else if (opcode == Translation::STACK_SLOT) {
928 // Positive index means the value is spilled to the locals
929 // area. Negative means it is stored in the incoming parameter
932 receiver = GetExpression(index);
934 // Index -1 overlaps with last parameter, -n with the first parameter,
935 // (-n - 1) with the receiver with n being the number of parameters
936 // of the outermost, optimized frame.
937 int parameter_count = ComputeParametersCount();
938 int parameter_index = index + parameter_count;
939 receiver = (parameter_index == -1)
941 : this->GetParameter(parameter_index);
944 // The receiver is not in a stack slot nor in a literal. We give up.
945 // TODO(3029): Materializing a captured object (or duplicated
946 // object) is hard, we return undefined for now. This breaks the
947 // produced stack trace, as constructor frames aren't marked as
949 receiver = isolate()->heap()->undefined_value();
952 Code* code = function->shared()->code();
953 DeoptimizationOutputData* output_data =
954 DeoptimizationOutputData::cast(code->deoptimization_data());
955 unsigned entry = Deoptimizer::GetOutputInfo(output_data,
959 FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize;
960 DCHECK(pc_offset > 0);
962 FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
963 frames->Add(summary);
964 is_constructor = false;
965 } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
966 // The next encountered JS_FRAME will be marked as a constructor call.
967 it.Skip(Translation::NumberOfOperandsFor(opcode));
968 DCHECK(!is_constructor);
969 is_constructor = true;
971 // Skip over operands to advance to the next opcode.
972 it.Skip(Translation::NumberOfOperandsFor(opcode));
975 DCHECK(!is_constructor);
979 int OptimizedFrame::LookupExceptionHandlerInTable(int* stack_slots) {
980 Code* code = LookupCode();
981 DCHECK(code->is_optimized_code());
982 HandlerTable* table = HandlerTable::cast(code->handler_table());
983 int pc_offset = static_cast<int>(pc() - code->entry());
984 *stack_slots = code->stack_slots();
985 return table->LookupReturn(pc_offset);
989 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
991 DCHECK(is_optimized());
993 JSFunction* opt_function = function();
994 Code* code = opt_function->code();
996 // The code object may have been replaced by lazy deoptimization. Fall
997 // back to a slow search in this case to find the original optimized
999 if (!code->contains(pc())) {
1000 code = isolate()->inner_pointer_to_code_cache()->
1001 GcSafeFindCodeForInnerPointer(pc());
1003 DCHECK(code != NULL);
1004 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1006 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1007 *deopt_index = safepoint_entry.deoptimization_index();
1008 DCHECK(*deopt_index != Safepoint::kNoDeoptimizationIndex);
1010 return DeoptimizationInputData::cast(code->deoptimization_data());
1014 int OptimizedFrame::GetInlineCount() {
1015 DCHECK(is_optimized());
1017 // Delegate to JS frame in absence of turbofan deoptimization.
1018 // TODO(turbofan): Revisit once we support deoptimization across the board.
1019 if (LookupCode()->is_turbofanned() && !FLAG_turbo_deoptimization) {
1020 return JavaScriptFrame::GetInlineCount();
1023 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1024 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1026 TranslationIterator it(data->TranslationByteArray(),
1027 data->TranslationIndex(deopt_index)->value());
1028 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1029 DCHECK(opcode == Translation::BEGIN);
1031 it.Next(); // Drop frame count.
1032 int jsframe_count = it.Next();
1033 return jsframe_count;
1037 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
1038 DCHECK(functions->length() == 0);
1039 DCHECK(is_optimized());
1041 // Delegate to JS frame in absence of turbofan deoptimization.
1042 // TODO(turbofan): Revisit once we support deoptimization across the board.
1043 if (LookupCode()->is_turbofanned() && !FLAG_turbo_deoptimization) {
1044 return JavaScriptFrame::GetFunctions(functions);
1047 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1048 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1049 FixedArray* literal_array = data->LiteralArray();
1051 TranslationIterator it(data->TranslationByteArray(),
1052 data->TranslationIndex(deopt_index)->value());
1053 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1054 DCHECK(opcode == Translation::BEGIN);
1055 it.Next(); // Drop frame count.
1056 int jsframe_count = it.Next();
1058 // We insert the frames in reverse order because the frames
1059 // in the deoptimization translation are ordered bottom-to-top.
1060 while (jsframe_count > 0) {
1061 opcode = static_cast<Translation::Opcode>(it.Next());
1062 if (opcode == Translation::JS_FRAME) {
1064 it.Next(); // Skip ast id.
1065 JSFunction* function = LiteralAt(literal_array, it.Next());
1066 it.Next(); // Skip height.
1067 functions->Add(function);
1069 // Skip over operands to advance to the next opcode.
1070 it.Skip(Translation::NumberOfOperandsFor(opcode));
1076 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1077 return Smi::cast(GetExpression(0))->value();
1081 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
1082 return fp() + StandardFrameConstants::kCallerSPOffset;
1086 Address InternalFrame::GetCallerStackPointer() const {
1087 // Internal frames have no arguments. The stack pointer of the
1088 // caller is at a fixed offset from the frame pointer.
1089 return fp() + StandardFrameConstants::kCallerSPOffset;
1093 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1094 return isolate()->builtins()->builtin(
1095 Builtins::kArgumentsAdaptorTrampoline);
1099 Code* InternalFrame::unchecked_code() const {
1100 const int offset = InternalFrameConstants::kCodeOffset;
1101 Object* code = Memory::Object_at(fp() + offset);
1102 DCHECK(code != NULL);
1103 return reinterpret_cast<Code*>(code);
1107 void StackFrame::PrintIndex(StringStream* accumulator,
1110 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1114 void JavaScriptFrame::Print(StringStream* accumulator,
1117 DisallowHeapAllocation no_gc;
1118 Object* receiver = this->receiver();
1119 JSFunction* function = this->function();
1121 accumulator->PrintSecurityTokenIfChanged(function);
1122 PrintIndex(accumulator, mode, index);
1124 if (IsConstructor()) accumulator->Add("new ");
1125 accumulator->PrintFunction(function, receiver, &code);
1127 // Get scope information for nicer output, if possible. If code is NULL, or
1128 // doesn't contain scope info, scope_info will return 0 for the number of
1129 // parameters, stack local variables, context local variables, stack slots,
1130 // or context slots.
1131 SharedFunctionInfo* shared = function->shared();
1132 ScopeInfo* scope_info = shared->scope_info();
1133 Object* script_obj = shared->script();
1134 if (script_obj->IsScript()) {
1135 Script* script = Script::cast(script_obj);
1136 accumulator->Add(" [");
1137 accumulator->PrintName(script->name());
1139 Address pc = this->pc();
1140 if (code != NULL && code->kind() == Code::FUNCTION &&
1141 pc >= code->instruction_start() && pc < code->instruction_end()) {
1142 int source_pos = code->SourcePosition(pc);
1143 int line = script->GetLineNumber(source_pos) + 1;
1144 accumulator->Add(":%d", line);
1146 int function_start_pos = shared->start_position();
1147 int line = script->GetLineNumber(function_start_pos) + 1;
1148 accumulator->Add(":~%d", line);
1151 accumulator->Add("] ");
1154 accumulator->Add("(this=%o", receiver);
1156 // Print the parameters.
1157 int parameters_count = ComputeParametersCount();
1158 for (int i = 0; i < parameters_count; i++) {
1159 accumulator->Add(",");
1160 // If we have a name for the parameter we print it. Nameless
1161 // parameters are either because we have more actual parameters
1162 // than formal parameters or because we have no scope information.
1163 if (i < scope_info->ParameterCount()) {
1164 accumulator->PrintName(scope_info->ParameterName(i));
1165 accumulator->Add("=");
1167 accumulator->Add("%o", GetParameter(i));
1170 accumulator->Add(")");
1171 if (mode == OVERVIEW) {
1172 accumulator->Add("\n");
1175 if (is_optimized()) {
1176 accumulator->Add(" {\n// optimized frame\n}\n");
1179 accumulator->Add(" {\n");
1181 // Compute the number of locals and expression stack elements.
1182 int stack_locals_count = scope_info->StackLocalCount();
1183 int heap_locals_count = scope_info->ContextLocalCount();
1184 int expressions_count = ComputeExpressionsCount();
1186 // Print stack-allocated local variables.
1187 if (stack_locals_count > 0) {
1188 accumulator->Add(" // stack-allocated locals\n");
1190 for (int i = 0; i < stack_locals_count; i++) {
1191 accumulator->Add(" var ");
1192 accumulator->PrintName(scope_info->StackLocalName(i));
1193 accumulator->Add(" = ");
1194 if (i < expressions_count) {
1195 accumulator->Add("%o", GetExpression(i));
1197 accumulator->Add("// no expression found - inconsistent frame?");
1199 accumulator->Add("\n");
1202 // Try to get hold of the context of this frame.
1203 Context* context = NULL;
1204 if (this->context() != NULL && this->context()->IsContext()) {
1205 context = Context::cast(this->context());
1207 while (context->IsWithContext()) {
1208 context = context->previous();
1209 DCHECK(context != NULL);
1212 // Print heap-allocated local variables.
1213 if (heap_locals_count > 0) {
1214 accumulator->Add(" // heap-allocated locals\n");
1216 for (int i = 0; i < heap_locals_count; i++) {
1217 accumulator->Add(" var ");
1218 accumulator->PrintName(scope_info->ContextLocalName(i));
1219 accumulator->Add(" = ");
1220 if (context != NULL) {
1221 int index = Context::MIN_CONTEXT_SLOTS + i;
1222 if (index < context->length()) {
1223 accumulator->Add("%o", context->get(index));
1226 "// warning: missing context slot - inconsistent frame?");
1229 accumulator->Add("// warning: no context found - inconsistent frame?");
1231 accumulator->Add("\n");
1234 // Print the expression stack.
1235 int expressions_start = stack_locals_count;
1236 if (expressions_start < expressions_count) {
1237 accumulator->Add(" // expression stack (top to bottom)\n");
1239 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1240 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1243 // Print details about the function.
1244 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1245 std::ostringstream os;
1246 SharedFunctionInfo* shared = function->shared();
1247 os << "--------- s o u r c e c o d e ---------\n"
1248 << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
1249 << "\n-----------------------------------------\n";
1250 accumulator->Add(os.str().c_str());
1253 accumulator->Add("}\n\n");
1257 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1260 int actual = ComputeParametersCount();
1262 JSFunction* function = this->function();
1263 expected = function->shared()->internal_formal_parameter_count();
1265 PrintIndex(accumulator, mode, index);
1266 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1267 if (mode == OVERVIEW) {
1268 accumulator->Add("\n");
1271 accumulator->Add(" {\n");
1273 // Print actual arguments.
1274 if (actual > 0) accumulator->Add(" // actual arguments\n");
1275 for (int i = 0; i < actual; i++) {
1276 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1277 if (expected != -1 && i >= expected) {
1278 accumulator->Add(" // not passed to callee");
1280 accumulator->Add("\n");
1283 accumulator->Add("}\n\n");
1287 void EntryFrame::Iterate(ObjectVisitor* v) const {
1288 IteratePc(v, pc_address(), LookupCode());
1292 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1293 const int offset = StandardFrameConstants::kLastObjectOffset;
1294 Object** base = &Memory::Object_at(sp());
1295 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1296 v->VisitPointers(base, limit);
1300 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1301 IterateExpressions(v);
1302 IteratePc(v, pc_address(), LookupCode());
1306 void InternalFrame::Iterate(ObjectVisitor* v) const {
1307 // Internal frames only have object pointers on the expression stack
1308 // as they never have any arguments.
1309 IterateExpressions(v);
1310 IteratePc(v, pc_address(), LookupCode());
1314 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1315 Object** base = &Memory::Object_at(sp());
1316 Object** limit = &Memory::Object_at(fp() +
1317 kFirstRegisterParameterFrameOffset);
1318 v->VisitPointers(base, limit);
1319 base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset);
1320 const int offset = StandardFrameConstants::kLastObjectOffset;
1321 limit = &Memory::Object_at(fp() + offset) + 1;
1322 v->VisitPointers(base, limit);
1323 IteratePc(v, pc_address(), LookupCode());
1327 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1328 return fp() + StandardFrameConstants::kCallerSPOffset;
1332 Code* StubFailureTrampolineFrame::unchecked_code() const {
1334 StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
1335 FindCodeInCache(&trampoline);
1336 if (trampoline->contains(pc())) {
1340 StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
1341 FindCodeInCache(&trampoline);
1342 if (trampoline->contains(pc())) {
1351 // -------------------------------------------------------------------------
1354 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1356 for (int i = 0; i <= n; i++) {
1357 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1358 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1359 iterator_.Advance();
1366 // -------------------------------------------------------------------------
1369 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1370 MapWord map_word = object->map_word();
1371 return map_word.IsForwardingAddress() ?
1372 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1376 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1377 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1382 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1383 Map* map = GcSafeMapOfCodeSpaceObject(code);
1384 DCHECK(map == code->GetHeap()->code_map());
1385 Address start = code->address();
1386 Address end = code->address() + code->SizeFromMap(map);
1387 return start <= addr && addr < end;
1392 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1393 Address inner_pointer) {
1394 Code* code = reinterpret_cast<Code*>(object);
1395 DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
1400 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1401 Address inner_pointer) {
1402 Heap* heap = isolate_->heap();
1403 // Check if the inner pointer points into a large object chunk.
1404 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1405 if (large_page != NULL) {
1406 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1409 // Iterate through the page until we reach the end or find an object starting
1410 // after the inner pointer.
1411 Page* page = Page::FromAddress(inner_pointer);
1413 Address addr = page->skip_list()->StartFor(inner_pointer);
1415 Address top = heap->code_space()->top();
1416 Address limit = heap->code_space()->limit();
1419 if (addr == top && addr != limit) {
1424 HeapObject* obj = HeapObject::FromAddress(addr);
1425 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1426 Address next_addr = addr + obj_size;
1427 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1433 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
1434 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1435 isolate_->counters()->pc_to_code()->Increment();
1436 DCHECK(base::bits::IsPowerOfTwo32(kInnerPointerToCodeCacheSize));
1437 uint32_t hash = ComputeIntegerHash(
1438 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)),
1439 v8::internal::kZeroHashSeed);
1440 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1441 InnerPointerToCodeCacheEntry* entry = cache(index);
1442 if (entry->inner_pointer == inner_pointer) {
1443 isolate_->counters()->pc_to_code_cached()->Increment();
1444 DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1446 // Because this code may be interrupted by a profiling signal that
1447 // also queries the cache, we cannot update inner_pointer before the code
1448 // has been set. Otherwise, we risk trying to use a cache entry before
1449 // the code has been computed.
1450 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1451 entry->safepoint_entry.Reset();
1452 entry->inner_pointer = inner_pointer;
1458 // -------------------------------------------------------------------------
1461 int NumRegs(RegList reglist) { return base::bits::CountPopulation32(reglist); }
1464 struct JSCallerSavedCodeData {
1465 int reg_code[kNumJSCallerSaved];
1468 JSCallerSavedCodeData caller_saved_code_data;
1470 void SetUpJSCallerSavedCodeData() {
1472 for (int r = 0; r < kNumRegs; r++)
1473 if ((kJSCallerSaved & (1 << r)) != 0)
1474 caller_saved_code_data.reg_code[i++] = r;
1476 DCHECK(i == kNumJSCallerSaved);
1480 int JSCallerSavedCode(int n) {
1481 DCHECK(0 <= n && n < kNumJSCallerSaved);
1482 return caller_saved_code_data.reg_code[n];
1486 #define DEFINE_WRAPPER(type, field) \
1487 class field##_Wrapper : public ZoneObject { \
1488 public: /* NOLINT */ \
1489 field##_Wrapper(const field& original) : frame_(original) { \
1493 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1494 #undef DEFINE_WRAPPER
1496 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1497 #define FRAME_TYPE_CASE(type, field) \
1498 case StackFrame::type: { \
1499 field##_Wrapper* wrapper = \
1500 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1501 return &wrapper->frame_; \
1504 switch (frame->type()) {
1505 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1506 default: UNREACHABLE();
1508 #undef FRAME_TYPE_CASE
1513 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1514 ZoneList<StackFrame*> list(10, zone);
1515 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1516 StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1517 list.Add(frame, zone);
1519 return list.ToVector();
1523 } } // namespace v8::internal