1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "deoptimizer.h"
32 #include "frames-inl.h"
33 #include "full-codegen.h"
34 #include "lazy-instance.h"
35 #include "mark-compact.h"
36 #include "safepoint-table.h"
37 #include "scopeinfo.h"
38 #include "string-stream.h"
39 #include "vm-state-inl.h"
45 ReturnAddressLocationResolver
46 StackFrame::return_address_location_resolver_ = NULL;
49 // Iterator that supports traversing the stack handlers of a
50 // particular frame. Needs to know the top of the handler chain.
51 class StackHandlerIterator BASE_EMBEDDED {
53 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
54 : limit_(frame->fp()), handler_(handler) {
55 // Make sure the handler has already been unwound to this frame.
56 ASSERT(frame->sp() <= handler->address());
59 StackHandler* handler() const { return handler_; }
62 return handler_ == NULL || handler_->address() > limit_;
66 handler_ = handler_->next();
71 StackHandler* handler_;
75 // -------------------------------------------------------------------------
78 #define INITIALIZE_SINGLETON(type, field) field##_(this),
79 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
80 bool can_access_heap_objects)
82 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
83 frame_(NULL), handler_(NULL),
84 can_access_heap_objects_(can_access_heap_objects) {
86 #undef INITIALIZE_SINGLETON
89 StackFrameIterator::StackFrameIterator(Isolate* isolate)
90 : StackFrameIteratorBase(isolate, true) {
91 Reset(isolate->thread_local_top());
95 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
96 : StackFrameIteratorBase(isolate, true) {
101 void StackFrameIterator::Advance() {
103 // Compute the state of the calling frame before restoring
104 // callee-saved registers and unwinding handlers. This allows the
105 // frame code that computes the caller state to access the top
106 // handler and the value of any callee-saved register if needed.
107 StackFrame::State state;
108 StackFrame::Type type = frame_->GetCallerState(&state);
110 // Unwind handlers corresponding to the current frame.
111 StackHandlerIterator it(frame_, handler_);
112 while (!it.done()) it.Advance();
113 handler_ = it.handler();
115 // Advance to the calling frame.
116 frame_ = SingletonFor(type, &state);
118 // When we're done iterating over the stack frames, the handler
119 // chain must have been completely unwound.
120 ASSERT(!done() || handler_ == NULL);
124 void StackFrameIterator::Reset(ThreadLocalTop* top) {
125 StackFrame::State state;
126 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
127 Isolate::c_entry_fp(top), &state);
128 handler_ = StackHandler::FromAddress(Isolate::handler(top));
129 if (SingletonFor(type) == NULL) return;
130 frame_ = SingletonFor(type, &state);
134 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
135 StackFrame::State* state) {
136 if (type == StackFrame::NONE) return NULL;
137 StackFrame* result = SingletonFor(type);
138 ASSERT(result != NULL);
139 result->state_ = *state;
144 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
145 #define FRAME_TYPE_CASE(type, field) \
146 case StackFrame::type: result = &field##_; break;
148 StackFrame* result = NULL;
150 case StackFrame::NONE: return NULL;
151 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
156 #undef FRAME_TYPE_CASE
160 // -------------------------------------------------------------------------
163 JavaScriptFrameIterator::JavaScriptFrameIterator(
164 Isolate* isolate, StackFrame::Id id)
165 : iterator_(isolate) {
168 if (frame()->id() == id) return;
173 void JavaScriptFrameIterator::Advance() {
176 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
180 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
181 if (!frame()->has_adapted_arguments()) return;
183 ASSERT(iterator_.frame()->is_arguments_adaptor());
187 // -------------------------------------------------------------------------
190 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
191 : JavaScriptFrameIterator(isolate) {
192 if (!done() && !IsValidFrame()) Advance();
196 void StackTraceFrameIterator::Advance() {
198 JavaScriptFrameIterator::Advance();
200 if (IsValidFrame()) return;
205 bool StackTraceFrameIterator::IsValidFrame() {
206 if (!frame()->function()->IsJSFunction()) return false;
207 Object* script = frame()->function()->shared()->script();
208 // Don't show functions from native scripts to user.
209 return (script->IsScript() &&
210 Script::TYPE_NATIVE != Script::cast(script)->type()->value());
214 // -------------------------------------------------------------------------
217 SafeStackFrameIterator::SafeStackFrameIterator(
219 Address fp, Address sp, Address js_entry_sp)
220 : StackFrameIteratorBase(isolate, false),
222 high_bound_(js_entry_sp),
223 top_frame_type_(StackFrame::NONE),
224 external_callback_scope_(isolate->external_callback_scope()) {
225 StackFrame::State state;
226 StackFrame::Type type;
227 ThreadLocalTop* top = isolate->thread_local_top();
228 if (IsValidTop(top)) {
229 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
230 top_frame_type_ = type;
231 } else if (IsValidStackAddress(fp)) {
235 state.pc_address = StackFrame::ResolveReturnAddressLocation(
236 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
237 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
238 // we check only that kMarkerOffset is within the stack bounds and do
239 // compile time check that kContextOffset slot is pushed on the stack before
241 STATIC_ASSERT(StandardFrameConstants::kMarkerOffset <
242 StandardFrameConstants::kContextOffset);
243 Address frame_marker = fp + StandardFrameConstants::kMarkerOffset;
244 if (IsValidStackAddress(frame_marker)) {
245 type = StackFrame::ComputeType(this, &state);
246 top_frame_type_ = type;
248 // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
249 // The frame anyways will be skipped.
250 type = StackFrame::JAVA_SCRIPT;
251 // Top frame is incomplete so we cannot reliably determine its type.
252 top_frame_type_ = StackFrame::NONE;
257 if (SingletonFor(type) == NULL) return;
258 frame_ = SingletonFor(type, &state);
259 if (frame_ == NULL) return;
263 if (frame_ != NULL && !frame_->is_exit() &&
264 external_callback_scope_ != NULL &&
265 external_callback_scope_->scope_address() < frame_->fp()) {
266 // Skip top ExternalCallbackScope if we already advanced to a JS frame
267 // under it. Sampler will anyways take this top external callback.
268 external_callback_scope_ = external_callback_scope_->previous();
273 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
274 Address c_entry_fp = Isolate::c_entry_fp(top);
275 if (!IsValidExitFrame(c_entry_fp)) return false;
276 // There should be at least one JS_ENTRY stack handler.
277 Address handler = Isolate::handler(top);
278 if (handler == NULL) return false;
279 // Check that there are no js frames on top of the native frames.
280 return c_entry_fp < handler;
284 void SafeStackFrameIterator::AdvanceOneFrame() {
286 StackFrame* last_frame = frame_;
287 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
288 // Before advancing to the next stack frame, perform pointer validity tests.
289 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
294 // Advance to the previous frame.
295 StackFrame::State state;
296 StackFrame::Type type = frame_->GetCallerState(&state);
297 frame_ = SingletonFor(type, &state);
298 if (frame_ == NULL) return;
300 // Check that we have actually moved to the previous frame in the stack.
301 if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
307 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
308 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
312 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
313 StackFrame::State state;
314 if (frame->is_entry() || frame->is_entry_construct()) {
315 // See EntryFrame::GetCallerState. It computes the caller FP address
316 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
317 // sure that caller FP address is valid.
318 Address caller_fp = Memory::Address_at(
319 frame->fp() + EntryFrameConstants::kCallerFPOffset);
320 if (!IsValidExitFrame(caller_fp)) return false;
321 } else if (frame->is_arguments_adaptor()) {
322 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
323 // the number of arguments is stored on stack as Smi. We need to check
324 // that it really an Smi.
325 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
327 if (!number_of_args->IsSmi()) {
331 frame->ComputeCallerState(&state);
332 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
333 SingletonFor(frame->GetCallerState(&state)) != NULL;
337 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
338 if (!IsValidStackAddress(fp)) return false;
339 Address sp = ExitFrame::ComputeStackPointer(fp);
340 if (!IsValidStackAddress(sp)) return false;
341 StackFrame::State state;
342 ExitFrame::FillState(fp, sp, &state);
343 if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) {
346 return *state.pc_address != NULL;
350 void SafeStackFrameIterator::Advance() {
354 if (frame_->is_java_script()) return;
355 if (frame_->is_exit() && external_callback_scope_) {
356 // Some of the EXIT frames may have ExternalCallbackScope allocated on
357 // top of them. In that case the scope corresponds to the first EXIT
358 // frame beneath it. There may be other EXIT frames on top of the
359 // ExternalCallbackScope, just skip them as we cannot collect any useful
360 // information about them.
361 if (external_callback_scope_->scope_address() < frame_->fp()) {
362 Address* callback_address =
363 external_callback_scope_->callback_address();
364 if (*callback_address != NULL) {
365 frame_->state_.pc_address = callback_address;
367 external_callback_scope_ = external_callback_scope_->previous();
368 ASSERT(external_callback_scope_ == NULL ||
369 external_callback_scope_->scope_address() > frame_->fp());
377 // -------------------------------------------------------------------------
380 Code* StackFrame::GetSafepointData(Isolate* isolate,
381 Address inner_pointer,
382 SafepointEntry* safepoint_entry,
383 unsigned* stack_slots) {
384 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
385 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
386 if (!entry->safepoint_entry.is_valid()) {
387 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
388 ASSERT(entry->safepoint_entry.is_valid());
390 ASSERT(entry->safepoint_entry.Equals(
391 entry->code->GetSafepointEntry(inner_pointer)));
394 // Fill in the results and return the code.
395 Code* code = entry->code;
396 *safepoint_entry = entry->safepoint_entry;
397 *stack_slots = code->stack_slots();
402 bool StackFrame::HasHandler() const {
403 StackHandlerIterator it(this, top_handler());
409 static bool GcSafeCodeContains(HeapObject* object, Address addr);
413 void StackFrame::IteratePc(ObjectVisitor* v,
416 Address pc = *pc_address;
417 ASSERT(GcSafeCodeContains(holder, pc));
418 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
419 Object* code = holder;
420 v->VisitPointer(&code);
421 if (code != holder) {
422 holder = reinterpret_cast<Code*>(code);
423 pc = holder->instruction_start() + pc_offset;
429 void StackFrame::SetReturnAddressLocationResolver(
430 ReturnAddressLocationResolver resolver) {
431 ASSERT(return_address_location_resolver_ == NULL);
432 return_address_location_resolver_ = resolver;
436 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
438 ASSERT(state->fp != NULL);
439 if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
440 return ARGUMENTS_ADAPTOR;
442 // The marker and function offsets overlap. If the marker isn't a
443 // smi then the frame is a JavaScript frame -- and the marker is
444 // really the function.
445 const int offset = StandardFrameConstants::kMarkerOffset;
446 Object* marker = Memory::Object_at(state->fp + offset);
447 if (!marker->IsSmi()) {
448 // If we're using a "safe" stack iterator, we treat optimized
449 // frames as normal JavaScript frames to avoid having to look
450 // into the heap to determine the state. This is safe as long
451 // as nobody tries to GC...
452 if (!iterator->can_access_heap_objects_) return JAVA_SCRIPT;
453 Code::Kind kind = GetContainingCode(iterator->isolate(),
454 *(state->pc_address))->kind();
455 ASSERT(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
456 return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
458 return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
463 bool StackFrame::can_access_heap_objects() const {
464 return iterator_->can_access_heap_objects_;
469 StackFrame::Type StackFrame::GetCallerState(State* state) const {
470 ComputeCallerState(state);
471 return ComputeType(iterator_, state);
475 Address StackFrame::UnpaddedFP() const {
476 #if V8_TARGET_ARCH_IA32
477 if (!is_optimized()) return fp();
478 int32_t alignment_state = Memory::int32_at(
479 fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
481 return (alignment_state == kAlignmentPaddingPushed) ?
482 (fp() + kPointerSize) : fp();
489 Code* EntryFrame::unchecked_code() const {
490 return isolate()->heap()->js_entry_code();
494 void EntryFrame::ComputeCallerState(State* state) const {
495 GetCallerState(state);
499 void EntryFrame::SetCallerFp(Address caller_fp) {
500 const int offset = EntryFrameConstants::kCallerFPOffset;
501 Memory::Address_at(this->fp() + offset) = caller_fp;
505 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
506 const int offset = EntryFrameConstants::kCallerFPOffset;
507 Address fp = Memory::Address_at(this->fp() + offset);
508 return ExitFrame::GetStateForFramePointer(fp, state);
512 Code* EntryConstructFrame::unchecked_code() const {
513 return isolate()->heap()->js_construct_entry_code();
517 Object*& ExitFrame::code_slot() const {
518 const int offset = ExitFrameConstants::kCodeOffset;
519 return Memory::Object_at(fp() + offset);
523 Code* ExitFrame::unchecked_code() const {
524 return reinterpret_cast<Code*>(code_slot());
528 void ExitFrame::ComputeCallerState(State* state) const {
529 // Set up the caller state.
530 state->sp = caller_sp();
531 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
532 state->pc_address = ResolveReturnAddressLocation(
533 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
534 if (FLAG_enable_ool_constant_pool) {
535 state->constant_pool_address = reinterpret_cast<Address*>(
536 fp() + ExitFrameConstants::kConstantPoolOffset);
541 void ExitFrame::SetCallerFp(Address caller_fp) {
542 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
546 void ExitFrame::Iterate(ObjectVisitor* v) const {
547 // The arguments are traversed as part of the expression stack of
548 // the calling frame.
549 IteratePc(v, pc_address(), LookupCode());
550 v->VisitPointer(&code_slot());
551 if (FLAG_enable_ool_constant_pool) {
552 v->VisitPointer(&constant_pool_slot());
557 Address ExitFrame::GetCallerStackPointer() const {
558 return fp() + ExitFrameConstants::kCallerSPDisplacement;
562 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
563 if (fp == 0) return NONE;
564 Address sp = ComputeStackPointer(fp);
565 FillState(fp, sp, state);
566 ASSERT(*state->pc_address != NULL);
571 Address ExitFrame::ComputeStackPointer(Address fp) {
572 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
576 void ExitFrame::FillState(Address fp, Address sp, State* state) {
579 state->pc_address = ResolveReturnAddressLocation(
580 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
581 state->constant_pool_address =
582 reinterpret_cast<Address*>(fp + ExitFrameConstants::kConstantPoolOffset);
586 Address StandardFrame::GetExpressionAddress(int n) const {
587 const int offset = StandardFrameConstants::kExpressionsOffset;
588 return fp() + offset - n * kPointerSize;
592 Object* StandardFrame::GetExpression(Address fp, int index) {
593 return Memory::Object_at(GetExpressionAddress(fp, index));
597 Address StandardFrame::GetExpressionAddress(Address fp, int n) {
598 const int offset = StandardFrameConstants::kExpressionsOffset;
599 return fp + offset - n * kPointerSize;
603 int StandardFrame::ComputeExpressionsCount() const {
605 StandardFrameConstants::kExpressionsOffset + kPointerSize;
606 Address base = fp() + offset;
607 Address limit = sp();
608 ASSERT(base >= limit); // stack grows downwards
609 // Include register-allocated locals in number of expressions.
610 return static_cast<int>((base - limit) / kPointerSize);
614 void StandardFrame::ComputeCallerState(State* state) const {
615 state->sp = caller_sp();
616 state->fp = caller_fp();
617 state->pc_address = ResolveReturnAddressLocation(
618 reinterpret_cast<Address*>(ComputePCAddress(fp())));
619 state->constant_pool_address =
620 reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
624 void StandardFrame::SetCallerFp(Address caller_fp) {
625 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
630 bool StandardFrame::IsExpressionInsideHandler(int n) const {
631 Address address = GetExpressionAddress(n);
632 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
633 if (it.handler()->includes(address)) return true;
639 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
640 // Make sure that we're not doing "safe" stack frame iteration. We cannot
641 // possibly find pointers in optimized frames in that state.
642 ASSERT(can_access_heap_objects());
644 // Compute the safepoint information.
645 unsigned stack_slots = 0;
646 SafepointEntry safepoint_entry;
647 Code* code = StackFrame::GetSafepointData(
648 isolate(), pc(), &safepoint_entry, &stack_slots);
649 unsigned slot_space = stack_slots * kPointerSize;
651 // Visit the outgoing parameters.
652 Object** parameters_base = &Memory::Object_at(sp());
653 Object** parameters_limit = &Memory::Object_at(
654 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
656 // Visit the parameters that may be on top of the saved registers.
657 if (safepoint_entry.argument_count() > 0) {
658 v->VisitPointers(parameters_base,
659 parameters_base + safepoint_entry.argument_count());
660 parameters_base += safepoint_entry.argument_count();
663 // Skip saved double registers.
664 if (safepoint_entry.has_doubles()) {
665 // Number of doubles not known at snapshot time.
666 ASSERT(!Serializer::enabled());
667 parameters_base += DoubleRegister::NumAllocatableRegisters() *
668 kDoubleSize / kPointerSize;
671 // Visit the registers that contain pointers if any.
672 if (safepoint_entry.HasRegisters()) {
673 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
674 if (safepoint_entry.HasRegisterAt(i)) {
675 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
676 v->VisitPointer(parameters_base + reg_stack_index);
679 // Skip the words containing the register values.
680 parameters_base += kNumSafepointRegisters;
683 // We're done dealing with the register bits.
684 uint8_t* safepoint_bits = safepoint_entry.bits();
685 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
687 // Visit the rest of the parameters.
688 v->VisitPointers(parameters_base, parameters_limit);
690 // Visit pointer spill slots and locals.
691 for (unsigned index = 0; index < stack_slots; index++) {
692 int byte_index = index >> kBitsPerByteLog2;
693 int bit_index = index & (kBitsPerByte - 1);
694 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
695 v->VisitPointer(parameters_limit + index);
699 // Visit the return address in the callee and incoming arguments.
700 IteratePc(v, pc_address(), code);
702 // Visit the context in stub frame and JavaScript frame.
703 // Visit the function in JavaScript frame.
704 Object** fixed_base = &Memory::Object_at(
705 fp() + StandardFrameConstants::kMarkerOffset);
706 Object** fixed_limit = &Memory::Object_at(fp());
707 v->VisitPointers(fixed_base, fixed_limit);
711 void StubFrame::Iterate(ObjectVisitor* v) const {
712 IterateCompiledFrame(v);
716 Code* StubFrame::unchecked_code() const {
717 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
721 Address StubFrame::GetCallerStackPointer() const {
722 return fp() + ExitFrameConstants::kCallerSPDisplacement;
726 int StubFrame::GetNumberOfIncomingArguments() const {
731 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
733 // Make sure that optimized frames do not contain any stack handlers.
734 StackHandlerIterator it(this, top_handler());
738 IterateCompiledFrame(v);
742 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
743 Memory::Object_at(GetParameterSlot(index)) = value;
747 bool JavaScriptFrame::IsConstructor() const {
748 Address fp = caller_fp();
749 if (has_adapted_arguments()) {
750 // Skip the arguments adaptor frame and look at the real caller.
751 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
753 return IsConstructFrame(fp);
757 int JavaScriptFrame::GetArgumentsLength() const {
758 // If there is an arguments adaptor frame get the arguments length from it.
759 if (has_adapted_arguments()) {
760 return Smi::cast(GetExpression(caller_fp(), 0))->value();
762 return GetNumberOfIncomingArguments();
767 Code* JavaScriptFrame::unchecked_code() const {
768 return function()->code();
772 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
773 ASSERT(can_access_heap_objects() &&
774 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
776 return function()->shared()->formal_parameter_count();
780 Address JavaScriptFrame::GetCallerStackPointer() const {
781 return fp() + StandardFrameConstants::kCallerSPOffset;
785 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) {
786 ASSERT(functions->length() == 0);
787 functions->Add(function());
791 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
792 ASSERT(functions->length() == 0);
793 Code* code_pointer = LookupCode();
794 int offset = static_cast<int>(pc() - code_pointer->address());
795 FrameSummary summary(receiver(),
800 functions->Add(summary);
804 void JavaScriptFrame::PrintTop(Isolate* isolate,
807 bool print_line_number) {
809 HandleScope scope(isolate);
810 DisallowHeapAllocation no_allocation;
811 JavaScriptFrameIterator it(isolate);
813 if (it.frame()->is_java_script()) {
814 JavaScriptFrame* frame = it.frame();
815 if (frame->IsConstructor()) PrintF(file, "new ");
817 JSFunction* fun = frame->function();
819 Code* js_code = frame->unchecked_code();
820 Address pc = frame->pc();
822 static_cast<int>(pc - js_code->instruction_start());
823 PrintF("+%d", code_offset);
824 SharedFunctionInfo* shared = fun->shared();
825 if (print_line_number) {
826 Code* code = Code::cast(isolate->FindCodeObject(pc));
827 int source_pos = code->SourcePosition(pc);
828 Object* maybe_script = shared->script();
829 if (maybe_script->IsScript()) {
830 Handle<Script> script(Script::cast(maybe_script));
831 int line = GetScriptLineNumberSafe(script, source_pos) + 1;
832 Object* script_name_raw = script->name();
833 if (script_name_raw->IsString()) {
834 String* script_name = String::cast(script->name());
835 SmartArrayPointer<char> c_script_name =
836 script_name->ToCString(DISALLOW_NULLS,
837 ROBUST_STRING_TRAVERSAL);
838 PrintF(file, " at %s:%d", c_script_name.get(), line);
840 PrintF(file, " at <unknown>:%d", line);
843 PrintF(file, " at <unknown>:<unknown>");
848 // function arguments
849 // (we are intentionally only printing the actually
850 // supplied parameters, not all parameters required)
851 PrintF(file, "(this=");
852 frame->receiver()->ShortPrint(file);
853 const int length = frame->ComputeParametersCount();
854 for (int i = 0; i < length; i++) {
856 frame->GetParameter(i)->ShortPrint(file);
867 void JavaScriptFrame::SaveOperandStack(FixedArray* store,
868 int* stack_handler_index) const {
869 int operands_count = store->length();
870 ASSERT_LE(operands_count, ComputeOperandsCount());
872 // Visit the stack in LIFO order, saving operands and stack handlers into the
873 // array. The saved stack handlers store a link to the next stack handler,
874 // which will allow RestoreOperandStack to rewind the handlers.
875 StackHandlerIterator it(this, top_handler());
876 int i = operands_count - 1;
877 *stack_handler_index = -1;
878 for (; !it.done(); it.Advance()) {
879 StackHandler* handler = it.handler();
880 // Save operands pushed after the handler was pushed.
881 for (; GetOperandSlot(i) < handler->address(); i--) {
882 store->set(i, GetOperand(i));
884 ASSERT_GE(i + 1, StackHandlerConstants::kSlotCount);
885 ASSERT_EQ(handler->address(), GetOperandSlot(i));
886 int next_stack_handler_index = i + 1 - StackHandlerConstants::kSlotCount;
887 handler->Unwind(isolate(), store, next_stack_handler_index,
888 *stack_handler_index);
889 *stack_handler_index = next_stack_handler_index;
890 i -= StackHandlerConstants::kSlotCount;
893 // Save any remaining operands.
894 for (; i >= 0; i--) {
895 store->set(i, GetOperand(i));
900 void JavaScriptFrame::RestoreOperandStack(FixedArray* store,
901 int stack_handler_index) {
902 int operands_count = store->length();
903 ASSERT_LE(operands_count, ComputeOperandsCount());
905 while (i <= stack_handler_index) {
906 if (i < stack_handler_index) {
908 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
909 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
913 ASSERT_EQ(i, stack_handler_index);
914 // The FixedArray store grows up. The stack grows down. So the operand
915 // slot for i actually points to the bottom of the top word in the
916 // handler. The base of the StackHandler* is the address of the bottom
917 // word, which will be the last slot that is in the handler.
918 int handler_slot_index = i + StackHandlerConstants::kSlotCount - 1;
919 StackHandler *handler =
920 StackHandler::FromAddress(GetOperandSlot(handler_slot_index));
921 stack_handler_index = handler->Rewind(isolate(), store, i, fp());
922 i += StackHandlerConstants::kSlotCount;
926 for (; i < operands_count; i++) {
927 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
928 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
933 void FrameSummary::Print() {
934 PrintF("receiver: ");
935 receiver_->ShortPrint();
936 PrintF("\nfunction: ");
937 function_->shared()->DebugName()->ShortPrint();
940 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
941 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
942 PrintF("\npc: %d\n", offset_);
946 JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array,
948 if (literal_id == Translation::kSelfLiteralId) {
952 return JSFunction::cast(literal_array->get(literal_id));
956 void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
957 ASSERT(frames->length() == 0);
958 ASSERT(is_optimized());
960 int deopt_index = Safepoint::kNoDeoptimizationIndex;
961 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
962 FixedArray* literal_array = data->LiteralArray();
964 // BUG(3243555): Since we don't have a lazy-deopt registered at
965 // throw-statements, we can't use the translation at the call-site of
966 // throw. An entry with no deoptimization index indicates a call-site
967 // without a lazy-deopt. As a consequence we are not allowed to inline
968 // functions containing throw.
969 if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
970 JavaScriptFrame::Summarize(frames);
974 TranslationIterator it(data->TranslationByteArray(),
975 data->TranslationIndex(deopt_index)->value());
976 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
977 ASSERT(opcode == Translation::BEGIN);
978 it.Next(); // Drop frame count.
979 int jsframe_count = it.Next();
981 // We create the summary in reverse order because the frames
982 // in the deoptimization translation are ordered bottom-to-top.
983 bool is_constructor = IsConstructor();
984 int i = jsframe_count;
986 opcode = static_cast<Translation::Opcode>(it.Next());
987 if (opcode == Translation::JS_FRAME) {
989 BailoutId ast_id = BailoutId(it.Next());
990 JSFunction* function = LiteralAt(literal_array, it.Next());
991 it.Next(); // Skip height.
993 // The translation commands are ordered and the receiver is always
994 // at the first position. Since we are always at a call when we need
995 // to construct a stack trace, the receiver is always in a stack slot.
996 opcode = static_cast<Translation::Opcode>(it.Next());
997 ASSERT(opcode == Translation::STACK_SLOT ||
998 opcode == Translation::LITERAL ||
999 opcode == Translation::CAPTURED_OBJECT ||
1000 opcode == Translation::DUPLICATED_OBJECT);
1001 int index = it.Next();
1003 // Get the correct receiver in the optimized frame.
1004 Object* receiver = NULL;
1005 if (opcode == Translation::LITERAL) {
1006 receiver = data->LiteralArray()->get(index);
1007 } else if (opcode == Translation::STACK_SLOT) {
1008 // Positive index means the value is spilled to the locals
1009 // area. Negative means it is stored in the incoming parameter
1012 receiver = GetExpression(index);
1014 // Index -1 overlaps with last parameter, -n with the first parameter,
1015 // (-n - 1) with the receiver with n being the number of parameters
1016 // of the outermost, optimized frame.
1017 int parameter_count = ComputeParametersCount();
1018 int parameter_index = index + parameter_count;
1019 receiver = (parameter_index == -1)
1021 : this->GetParameter(parameter_index);
1024 // TODO(3029): Materializing a captured object (or duplicated
1025 // object) is hard, we return undefined for now. This breaks the
1026 // produced stack trace, as constructor frames aren't marked as
1028 receiver = isolate()->heap()->undefined_value();
1031 Code* code = function->shared()->code();
1032 DeoptimizationOutputData* output_data =
1033 DeoptimizationOutputData::cast(code->deoptimization_data());
1034 unsigned entry = Deoptimizer::GetOutputInfo(output_data,
1036 function->shared());
1037 unsigned pc_offset =
1038 FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize;
1039 ASSERT(pc_offset > 0);
1041 FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
1042 frames->Add(summary);
1043 is_constructor = false;
1044 } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
1045 // The next encountered JS_FRAME will be marked as a constructor call.
1046 it.Skip(Translation::NumberOfOperandsFor(opcode));
1047 ASSERT(!is_constructor);
1048 is_constructor = true;
1050 // Skip over operands to advance to the next opcode.
1051 it.Skip(Translation::NumberOfOperandsFor(opcode));
1054 ASSERT(!is_constructor);
1058 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1060 ASSERT(is_optimized());
1062 JSFunction* opt_function = function();
1063 Code* code = opt_function->code();
1065 // The code object may have been replaced by lazy deoptimization. Fall
1066 // back to a slow search in this case to find the original optimized
1068 if (!code->contains(pc())) {
1069 code = isolate()->inner_pointer_to_code_cache()->
1070 GcSafeFindCodeForInnerPointer(pc());
1072 ASSERT(code != NULL);
1073 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
1075 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1076 *deopt_index = safepoint_entry.deoptimization_index();
1077 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex);
1079 return DeoptimizationInputData::cast(code->deoptimization_data());
1083 int OptimizedFrame::GetInlineCount() {
1084 ASSERT(is_optimized());
1086 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1087 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1089 TranslationIterator it(data->TranslationByteArray(),
1090 data->TranslationIndex(deopt_index)->value());
1091 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1092 ASSERT(opcode == Translation::BEGIN);
1094 it.Next(); // Drop frame count.
1095 int jsframe_count = it.Next();
1096 return jsframe_count;
1100 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
1101 ASSERT(functions->length() == 0);
1102 ASSERT(is_optimized());
1104 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1105 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1106 FixedArray* literal_array = data->LiteralArray();
1108 TranslationIterator it(data->TranslationByteArray(),
1109 data->TranslationIndex(deopt_index)->value());
1110 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1111 ASSERT(opcode == Translation::BEGIN);
1112 it.Next(); // Drop frame count.
1113 int jsframe_count = it.Next();
1115 // We insert the frames in reverse order because the frames
1116 // in the deoptimization translation are ordered bottom-to-top.
1117 while (jsframe_count > 0) {
1118 opcode = static_cast<Translation::Opcode>(it.Next());
1119 if (opcode == Translation::JS_FRAME) {
1121 it.Next(); // Skip ast id.
1122 JSFunction* function = LiteralAt(literal_array, it.Next());
1123 it.Next(); // Skip height.
1124 functions->Add(function);
1126 // Skip over operands to advance to the next opcode.
1127 it.Skip(Translation::NumberOfOperandsFor(opcode));
1133 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1134 return Smi::cast(GetExpression(0))->value();
1138 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
1139 return fp() + StandardFrameConstants::kCallerSPOffset;
1143 Address InternalFrame::GetCallerStackPointer() const {
1144 // Internal frames have no arguments. The stack pointer of the
1145 // caller is at a fixed offset from the frame pointer.
1146 return fp() + StandardFrameConstants::kCallerSPOffset;
1150 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1151 return isolate()->builtins()->builtin(
1152 Builtins::kArgumentsAdaptorTrampoline);
1156 Code* InternalFrame::unchecked_code() const {
1157 const int offset = InternalFrameConstants::kCodeOffset;
1158 Object* code = Memory::Object_at(fp() + offset);
1159 ASSERT(code != NULL);
1160 return reinterpret_cast<Code*>(code);
1164 void StackFrame::PrintIndex(StringStream* accumulator,
1167 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1171 void JavaScriptFrame::Print(StringStream* accumulator,
1174 HandleScope scope(isolate());
1175 Object* receiver = this->receiver();
1176 JSFunction* function = this->function();
1178 accumulator->PrintSecurityTokenIfChanged(function);
1179 PrintIndex(accumulator, mode, index);
1181 if (IsConstructor()) accumulator->Add("new ");
1182 accumulator->PrintFunction(function, receiver, &code);
1184 // Get scope information for nicer output, if possible. If code is NULL, or
1185 // doesn't contain scope info, scope_info will return 0 for the number of
1186 // parameters, stack local variables, context local variables, stack slots,
1187 // or context slots.
1188 Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate()));
1190 Handle<SharedFunctionInfo> shared(function->shared());
1191 scope_info = Handle<ScopeInfo>(shared->scope_info());
1192 Object* script_obj = shared->script();
1193 if (script_obj->IsScript()) {
1194 Handle<Script> script(Script::cast(script_obj));
1195 accumulator->Add(" [");
1196 accumulator->PrintName(script->name());
1198 Address pc = this->pc();
1199 if (code != NULL && code->kind() == Code::FUNCTION &&
1200 pc >= code->instruction_start() && pc < code->instruction_end()) {
1201 int source_pos = code->SourcePosition(pc);
1202 int line = GetScriptLineNumberSafe(script, source_pos) + 1;
1203 accumulator->Add(":%d", line);
1205 int function_start_pos = shared->start_position();
1206 int line = GetScriptLineNumberSafe(script, function_start_pos) + 1;
1207 accumulator->Add(":~%d", line);
1210 accumulator->Add("] ");
1213 accumulator->Add("(this=%o", receiver);
1215 // Print the parameters.
1216 int parameters_count = ComputeParametersCount();
1217 for (int i = 0; i < parameters_count; i++) {
1218 accumulator->Add(",");
1219 // If we have a name for the parameter we print it. Nameless
1220 // parameters are either because we have more actual parameters
1221 // than formal parameters or because we have no scope information.
1222 if (i < scope_info->ParameterCount()) {
1223 accumulator->PrintName(scope_info->ParameterName(i));
1224 accumulator->Add("=");
1226 accumulator->Add("%o", GetParameter(i));
1229 accumulator->Add(")");
1230 if (mode == OVERVIEW) {
1231 accumulator->Add("\n");
1234 if (is_optimized()) {
1235 accumulator->Add(" {\n// optimized frame\n}\n");
1238 accumulator->Add(" {\n");
1240 // Compute the number of locals and expression stack elements.
1241 int stack_locals_count = scope_info->StackLocalCount();
1242 int heap_locals_count = scope_info->ContextLocalCount();
1243 int expressions_count = ComputeExpressionsCount();
1245 // Print stack-allocated local variables.
1246 if (stack_locals_count > 0) {
1247 accumulator->Add(" // stack-allocated locals\n");
1249 for (int i = 0; i < stack_locals_count; i++) {
1250 accumulator->Add(" var ");
1251 accumulator->PrintName(scope_info->StackLocalName(i));
1252 accumulator->Add(" = ");
1253 if (i < expressions_count) {
1254 accumulator->Add("%o", GetExpression(i));
1256 accumulator->Add("// no expression found - inconsistent frame?");
1258 accumulator->Add("\n");
1261 // Try to get hold of the context of this frame.
1262 Context* context = NULL;
1263 if (this->context() != NULL && this->context()->IsContext()) {
1264 context = Context::cast(this->context());
1267 // Print heap-allocated local variables.
1268 if (heap_locals_count > 0) {
1269 accumulator->Add(" // heap-allocated locals\n");
1271 for (int i = 0; i < heap_locals_count; i++) {
1272 accumulator->Add(" var ");
1273 accumulator->PrintName(scope_info->ContextLocalName(i));
1274 accumulator->Add(" = ");
1275 if (context != NULL) {
1276 if (i < context->length()) {
1277 accumulator->Add("%o", context->get(Context::MIN_CONTEXT_SLOTS + i));
1280 "// warning: missing context slot - inconsistent frame?");
1283 accumulator->Add("// warning: no context found - inconsistent frame?");
1285 accumulator->Add("\n");
1288 // Print the expression stack.
1289 int expressions_start = stack_locals_count;
1290 if (expressions_start < expressions_count) {
1291 accumulator->Add(" // expression stack (top to bottom)\n");
1293 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1294 if (IsExpressionInsideHandler(i)) continue;
1295 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1298 // Print details about the function.
1299 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1300 SharedFunctionInfo* shared = function->shared();
1301 accumulator->Add("--------- s o u r c e c o d e ---------\n");
1302 shared->SourceCodePrint(accumulator, FLAG_max_stack_trace_source_length);
1303 accumulator->Add("\n-----------------------------------------\n");
1306 accumulator->Add("}\n\n");
1310 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1313 int actual = ComputeParametersCount();
1315 JSFunction* function = this->function();
1316 expected = function->shared()->formal_parameter_count();
1318 PrintIndex(accumulator, mode, index);
1319 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1320 if (mode == OVERVIEW) {
1321 accumulator->Add("\n");
1324 accumulator->Add(" {\n");
1326 // Print actual arguments.
1327 if (actual > 0) accumulator->Add(" // actual arguments\n");
1328 for (int i = 0; i < actual; i++) {
1329 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1330 if (expected != -1 && i >= expected) {
1331 accumulator->Add(" // not passed to callee");
1333 accumulator->Add("\n");
1336 accumulator->Add("}\n\n");
1340 void EntryFrame::Iterate(ObjectVisitor* v) const {
1341 StackHandlerIterator it(this, top_handler());
1343 StackHandler* handler = it.handler();
1344 ASSERT(handler->is_js_entry());
1345 handler->Iterate(v, LookupCode());
1347 // Make sure that the entry frame does not contain more than one
1352 IteratePc(v, pc_address(), LookupCode());
1356 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1357 const int offset = StandardFrameConstants::kLastObjectOffset;
1358 Object** base = &Memory::Object_at(sp());
1359 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1360 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
1361 StackHandler* handler = it.handler();
1362 // Traverse pointers down to - but not including - the next
1363 // handler in the handler chain. Update the base to skip the
1364 // handler and allow the handler to traverse its own pointers.
1365 const Address address = handler->address();
1366 v->VisitPointers(base, reinterpret_cast<Object**>(address));
1367 base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize);
1368 // Traverse the pointers in the handler itself.
1369 handler->Iterate(v, LookupCode());
1371 v->VisitPointers(base, limit);
1375 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1376 IterateExpressions(v);
1377 IteratePc(v, pc_address(), LookupCode());
1381 void InternalFrame::Iterate(ObjectVisitor* v) const {
1382 // Internal frames only have object pointers on the expression stack
1383 // as they never have any arguments.
1384 IterateExpressions(v);
1385 IteratePc(v, pc_address(), LookupCode());
1389 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1390 Object** base = &Memory::Object_at(sp());
1391 Object** limit = &Memory::Object_at(fp() +
1392 kFirstRegisterParameterFrameOffset);
1393 v->VisitPointers(base, limit);
1394 base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset);
1395 const int offset = StandardFrameConstants::kLastObjectOffset;
1396 limit = &Memory::Object_at(fp() + offset) + 1;
1397 v->VisitPointers(base, limit);
1398 IteratePc(v, pc_address(), LookupCode());
1402 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1403 return fp() + StandardFrameConstants::kCallerSPOffset;
1407 Code* StubFailureTrampolineFrame::unchecked_code() const {
1409 StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE).
1410 FindCodeInCache(&trampoline, isolate());
1411 if (trampoline->contains(pc())) {
1415 StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE).
1416 FindCodeInCache(&trampoline, isolate());
1417 if (trampoline->contains(pc())) {
1426 // -------------------------------------------------------------------------
1429 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1431 for (int i = 0; i <= n; i++) {
1432 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1433 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1434 iterator_.Advance();
1441 // -------------------------------------------------------------------------
1444 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1445 MapWord map_word = object->map_word();
1446 return map_word.IsForwardingAddress() ?
1447 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1451 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1452 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1457 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1458 Map* map = GcSafeMapOfCodeSpaceObject(code);
1459 ASSERT(map == code->GetHeap()->code_map());
1460 Address start = code->address();
1461 Address end = code->address() + code->SizeFromMap(map);
1462 return start <= addr && addr < end;
1467 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1468 Address inner_pointer) {
1469 Code* code = reinterpret_cast<Code*>(object);
1470 ASSERT(code != NULL && GcSafeCodeContains(code, inner_pointer));
1475 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1476 Address inner_pointer) {
1477 Heap* heap = isolate_->heap();
1478 // Check if the inner pointer points into a large object chunk.
1479 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1480 if (large_page != NULL) {
1481 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1484 // Iterate through the page until we reach the end or find an object starting
1485 // after the inner pointer.
1486 Page* page = Page::FromAddress(inner_pointer);
1488 Address addr = page->skip_list()->StartFor(inner_pointer);
1490 Address top = heap->code_space()->top();
1491 Address limit = heap->code_space()->limit();
1494 if (addr == top && addr != limit) {
1499 HeapObject* obj = HeapObject::FromAddress(addr);
1500 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1501 Address next_addr = addr + obj_size;
1502 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1508 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
1509 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1510 isolate_->counters()->pc_to_code()->Increment();
1511 ASSERT(IsPowerOf2(kInnerPointerToCodeCacheSize));
1512 uint32_t hash = ComputeIntegerHash(
1513 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)),
1514 v8::internal::kZeroHashSeed);
1515 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1516 InnerPointerToCodeCacheEntry* entry = cache(index);
1517 if (entry->inner_pointer == inner_pointer) {
1518 isolate_->counters()->pc_to_code_cached()->Increment();
1519 ASSERT(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1521 // Because this code may be interrupted by a profiling signal that
1522 // also queries the cache, we cannot update inner_pointer before the code
1523 // has been set. Otherwise, we risk trying to use a cache entry before
1524 // the code has been computed.
1525 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1526 entry->safepoint_entry.Reset();
1527 entry->inner_pointer = inner_pointer;
1533 // -------------------------------------------------------------------------
1536 void StackHandler::Unwind(Isolate* isolate,
1539 int previous_handler_offset) const {
1540 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1541 ASSERT_LE(0, offset);
1542 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1543 // Unwinding a stack handler into an array chains it in the opposite
1544 // direction, re-using the "next" slot as a "previous" link, so that stack
1545 // handlers can be later re-wound in the correct order. Decode the "state"
1546 // slot into "index" and "kind" and store them separately, using the fp slot.
1547 array->set(offset, Smi::FromInt(previous_handler_offset)); // next
1548 array->set(offset + 1, *code_address()); // code
1549 array->set(offset + 2, Smi::FromInt(static_cast<int>(index()))); // state
1550 array->set(offset + 3, *context_address()); // context
1551 array->set(offset + 4, Smi::FromInt(static_cast<int>(kind()))); // fp
1553 *isolate->handler_address() = next()->address();
1557 int StackHandler::Rewind(Isolate* isolate,
1561 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1562 ASSERT_LE(0, offset);
1563 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1564 Smi* prev_handler_offset = Smi::cast(array->get(offset));
1565 Code* code = Code::cast(array->get(offset + 1));
1566 Smi* smi_index = Smi::cast(array->get(offset + 2));
1567 Object* context = array->get(offset + 3);
1568 Smi* smi_kind = Smi::cast(array->get(offset + 4));
1570 unsigned state = KindField::encode(static_cast<Kind>(smi_kind->value())) |
1571 IndexField::encode(static_cast<unsigned>(smi_index->value()));
1573 Memory::Address_at(address() + StackHandlerConstants::kNextOffset) =
1574 *isolate->handler_address();
1575 Memory::Object_at(address() + StackHandlerConstants::kCodeOffset) = code;
1576 Memory::uintptr_at(address() + StackHandlerConstants::kStateOffset) = state;
1577 Memory::Object_at(address() + StackHandlerConstants::kContextOffset) =
1579 SetFp(address() + StackHandlerConstants::kFPOffset, fp);
1581 *isolate->handler_address() = address();
1583 return prev_handler_offset->value();
1587 // -------------------------------------------------------------------------
1589 int NumRegs(RegList reglist) {
1590 return CompilerIntrinsics::CountSetBits(reglist);
1594 struct JSCallerSavedCodeData {
1595 int reg_code[kNumJSCallerSaved];
1598 JSCallerSavedCodeData caller_saved_code_data;
1600 void SetUpJSCallerSavedCodeData() {
1602 for (int r = 0; r < kNumRegs; r++)
1603 if ((kJSCallerSaved & (1 << r)) != 0)
1604 caller_saved_code_data.reg_code[i++] = r;
1606 ASSERT(i == kNumJSCallerSaved);
1610 int JSCallerSavedCode(int n) {
1611 ASSERT(0 <= n && n < kNumJSCallerSaved);
1612 return caller_saved_code_data.reg_code[n];
1616 #define DEFINE_WRAPPER(type, field) \
1617 class field##_Wrapper : public ZoneObject { \
1618 public: /* NOLINT */ \
1619 field##_Wrapper(const field& original) : frame_(original) { \
1623 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1624 #undef DEFINE_WRAPPER
1626 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1627 #define FRAME_TYPE_CASE(type, field) \
1628 case StackFrame::type: { \
1629 field##_Wrapper* wrapper = \
1630 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1631 return &wrapper->frame_; \
1634 switch (frame->type()) {
1635 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1636 default: UNREACHABLE();
1638 #undef FRAME_TYPE_CASE
1643 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1644 ZoneList<StackFrame*> list(10, zone);
1645 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1646 StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1647 list.Add(frame, zone);
1649 return list.ToVector();
1653 } } // namespace v8::internal