1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "include/v8-debug.h"
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/base/atomicops.h"
13 #include "src/builtins.h"
14 #include "src/contexts.h"
16 #include "src/execution.h"
17 #include "src/frames.h"
18 #include "src/global-handles.h"
19 #include "src/handles.h"
20 #include "src/hashmap.h"
21 #include "src/heap/heap.h"
22 #include "src/optimizing-compiler-thread.h"
23 #include "src/regexp-stack.h"
24 #include "src/runtime/runtime.h"
25 #include "src/runtime-profiler.h"
31 class RandomNumberGenerator;
36 class BasicBlockProfiler;
38 class CallInterfaceDescriptorData;
41 class CodeStubDescriptor;
43 class CompilationCache;
44 class CompilationStatistics;
45 class ContextSlotCache;
49 class DeoptimizerData;
52 class ExternalCallbackScope;
53 class ExternalReferenceTable;
55 class FunctionInfoListener;
56 class HandleScopeImplementer;
60 class InlineRuntimeFunctionsTable;
61 class InnerPointerToCodeCache;
62 class MaterializedObjectStore;
63 class CodeAgingHelper;
71 class ThreadVisitor; // Defined in v8threads.h
73 template <StateTag Tag> class VMState;
75 // 'void function pointer', used to roundtrip the
76 // ExternalReference::ExternalReferenceRedirector since we can not include
77 // assembler.h, where it is defined, here.
78 typedef void* ExternalReferenceRedirectorPointer();
85 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
86 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
87 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
88 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
89 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
95 // Static indirection table for handles to constants. If a frame
96 // element represents a constant, the data contains an index into
97 // this table of handles to the actual constants.
98 // Static indirection table for handles to constants. If a Result
99 // represents a constant, the data contains an index into this table
100 // of handles to the actual constants.
101 typedef ZoneList<Handle<Object> > ZoneObjectList;
103 #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate) \
105 Isolate* __isolate__ = (isolate); \
106 if (__isolate__->has_scheduled_exception()) { \
107 return __isolate__->PromoteScheduledException(); \
111 // Macros for MaybeHandle.
113 #define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \
115 Isolate* __isolate__ = (isolate); \
116 if (__isolate__->has_scheduled_exception()) { \
117 __isolate__->PromoteScheduledException(); \
122 #define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
123 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>())
125 #define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
127 if (!(call).ToHandle(&dst)) { \
128 DCHECK((isolate)->has_pending_exception()); \
133 #define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \
134 ASSIGN_RETURN_ON_EXCEPTION_VALUE( \
135 isolate, dst, call, isolate->heap()->exception())
137 #define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T) \
138 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
140 #define THROW_NEW_ERROR(isolate, call, T) \
142 Handle<Object> __error__; \
143 ASSIGN_RETURN_ON_EXCEPTION(isolate, __error__, isolate->factory()->call, \
145 return isolate->Throw<T>(__error__); \
148 #define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call) \
150 Handle<Object> __error__; \
151 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, __error__, \
152 isolate->factory()->call); \
153 return isolate->Throw(*__error__); \
156 #define RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \
158 if ((call).is_null()) { \
159 DCHECK((isolate)->has_pending_exception()); \
164 #define RETURN_FAILURE_ON_EXCEPTION(isolate, call) \
165 RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception())
167 #define RETURN_ON_EXCEPTION(isolate, call, T) \
168 RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
171 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \
172 C(Handler, handler) \
173 C(CEntryFP, c_entry_fp) \
174 C(CFunction, c_function) \
175 C(Context, context) \
176 C(PendingException, pending_exception) \
177 C(ExternalCaughtException, external_caught_exception) \
178 C(JSEntrySP, js_entry_sp)
181 // Platform-independent, reliable thread identifier.
184 // Creates an invalid ThreadId.
185 ThreadId() { base::NoBarrier_Store(&id_, kInvalidId); }
187 ThreadId& operator=(const ThreadId& other) {
188 base::NoBarrier_Store(&id_, base::NoBarrier_Load(&other.id_));
192 // Returns ThreadId for current thread.
193 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
195 // Returns invalid ThreadId (guaranteed not to be equal to any thread).
196 static ThreadId Invalid() { return ThreadId(kInvalidId); }
198 // Compares ThreadIds for equality.
199 INLINE(bool Equals(const ThreadId& other) const) {
200 return base::NoBarrier_Load(&id_) == base::NoBarrier_Load(&other.id_);
203 // Checks whether this ThreadId refers to any thread.
204 INLINE(bool IsValid() const) {
205 return base::NoBarrier_Load(&id_) != kInvalidId;
208 // Converts ThreadId to an integer representation
209 // (required for public API: V8::V8::GetCurrentThreadId).
210 int ToInteger() const { return static_cast<int>(base::NoBarrier_Load(&id_)); }
212 // Converts ThreadId to an integer representation
213 // (required for public API: V8::V8::TerminateExecution).
214 static ThreadId FromInteger(int id) { return ThreadId(id); }
217 static const int kInvalidId = -1;
219 explicit ThreadId(int id) { base::NoBarrier_Store(&id_, id); }
221 static int AllocateThreadId();
223 static int GetCurrentThreadId();
227 static base::Atomic32 highest_thread_id_;
229 friend class Isolate;
233 #define FIELD_ACCESSOR(type, name) \
234 inline void set_##name(type v) { name##_ = v; } \
235 inline type name() const { return name##_; }
238 class ThreadLocalTop BASE_EMBEDDED {
240 // Does early low-level initialization that does not depend on the
241 // isolate being present.
244 // Initialize the thread data.
247 // Get the top C++ try catch handler or NULL if none are registered.
249 // This method is not guaranteed to return an address that can be
250 // used for comparison with addresses into the JS stack. If such an
251 // address is needed, use try_catch_handler_address.
252 FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
254 // Get the address of the top C++ try catch handler or NULL if
255 // none are registered.
257 // This method always returns an address that can be compared to
258 // pointers into the JavaScript stack. When running on actual
259 // hardware, try_catch_handler_address and TryCatchHandler return
260 // the same pointer. When running on a simulator with a separate JS
261 // stack, try_catch_handler_address returns a JS stack address that
262 // corresponds to the place on the JS stack where the C++ handler
263 // would have been if the stack were not separate.
264 Address try_catch_handler_address() {
265 return reinterpret_cast<Address>(
266 v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
272 // The context where the current execution method is created and for variable
276 Object* pending_exception_;
277 bool has_pending_message_;
278 bool rethrowing_message_;
279 Object* pending_message_obj_;
280 Object* pending_message_script_;
281 int pending_message_start_pos_;
282 int pending_message_end_pos_;
283 // Use a separate value for scheduled exceptions to preserve the
284 // invariants that hold about pending_exception. We may want to
286 Object* scheduled_exception_;
287 bool external_caught_exception_;
288 SaveContext* save_context_;
289 v8::TryCatch* catcher_;
292 Address c_entry_fp_; // the frame pointer of the top c entry frame
293 Address handler_; // try-blocks are chained through the stack
294 Address c_function_; // C function that was called at c entry.
296 // Throwing an exception may cause a Promise rejection. For this purpose
297 // we keep track of a stack of nested promises and the corresponding
298 // try-catch handlers.
299 PromiseOnStack* promise_on_stack_;
302 Simulator* simulator_;
305 Address js_entry_sp_; // the stack pointer of the bottom JS entry frame
306 // the external callback we're currently in
307 ExternalCallbackScope* external_callback_scope_;
308 StateTag current_vm_state_;
310 // Generated code scratch locations.
311 int32_t formal_count_;
313 // Call back function to report unsafe JS accesses.
314 v8::FailedAccessCheckCallback failed_access_check_callback_;
317 void InitializeInternal();
319 v8::TryCatch* try_catch_handler_;
323 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
324 V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
325 V8_TARGET_ARCH_PPC && !defined(__PPC__) || \
326 V8_TARGET_ARCH_MIPS && !defined(__mips__) || \
327 V8_TARGET_ARCH_MIPS64 && !defined(__mips__)
329 #define ISOLATE_INIT_SIMULATOR_LIST(V) \
330 V(bool, simulator_initialized, false) \
331 V(HashMap*, simulator_i_cache, NULL) \
332 V(Redirection*, simulator_redirection, NULL)
335 #define ISOLATE_INIT_SIMULATOR_LIST(V)
342 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
343 V(CommentStatistic, paged_space_comments_statistics, \
344 CommentStatistic::kMaxComments + 1) \
345 V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
348 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
352 #define ISOLATE_INIT_ARRAY_LIST(V) \
353 /* SerializerDeserializer state. */ \
354 V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
355 V(int, bad_char_shift_table, kUC16AlphabetSize) \
356 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
357 V(int, suffix_table, (kBMMaxShift + 1)) \
358 V(uint32_t, private_random_seed, 2) \
359 ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
361 typedef List<HeapObject*> DebugObjectCache;
363 #define ISOLATE_INIT_LIST(V) \
364 /* SerializerDeserializer state. */ \
365 V(int, serialize_partial_snapshot_cache_length, 0) \
366 V(int, serialize_partial_snapshot_cache_capacity, 0) \
367 V(Object**, serialize_partial_snapshot_cache, NULL) \
368 /* Assembler state. */ \
369 V(FatalErrorCallback, exception_behavior, NULL) \
370 V(LogEventCallback, event_logger, NULL) \
371 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
372 /* To distinguish the function templates, so that we can find them in the */ \
373 /* function cache of the native context. */ \
374 V(int, next_serial_number, 0) \
375 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
376 /* Part of the state of liveedit. */ \
377 V(FunctionInfoListener*, active_function_info_listener, NULL) \
378 /* State for Relocatable. */ \
379 V(Relocatable*, relocatable_top, NULL) \
380 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
381 V(Object*, string_stream_current_security_token, NULL) \
382 /* Serializer state. */ \
383 V(ExternalReferenceTable*, external_reference_table, NULL) \
384 V(int, pending_microtask_count, 0) \
385 V(bool, autorun_microtasks, true) \
386 V(HStatistics*, hstatistics, NULL) \
387 V(CompilationStatistics*, turbo_statistics, NULL) \
388 V(HTracer*, htracer, NULL) \
389 V(CodeTracer*, code_tracer, NULL) \
390 V(bool, fp_stubs_generated, false) \
391 V(int, max_available_threads, 0) \
392 V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu) \
393 V(PromiseRejectCallback, promise_reject_callback, NULL) \
394 ISOLATE_INIT_SIMULATOR_LIST(V)
396 #define THREAD_LOCAL_TOP_ACCESSOR(type, name) \
397 inline void set_##name(type v) { thread_local_top_.name##_ = v; } \
398 inline type name() const { return thread_local_top_.name##_; }
402 // These forward declarations are required to make the friend declarations in
403 // PerIsolateThreadData work on some older versions of gcc.
404 class ThreadDataTable;
405 class EntryStackItem;
409 // A thread has a PerIsolateThreadData instance for each isolate that it has
410 // entered. That instance is allocated when the isolate is initially entered
411 // and reused on subsequent entries.
412 class PerIsolateThreadData {
414 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
416 thread_id_(thread_id),
419 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
420 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
421 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
422 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
423 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
428 ~PerIsolateThreadData();
429 Isolate* isolate() const { return isolate_; }
430 ThreadId thread_id() const { return thread_id_; }
432 FIELD_ACCESSOR(uintptr_t, stack_limit)
433 FIELD_ACCESSOR(ThreadState*, thread_state)
435 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
436 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
437 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
438 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
439 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
440 FIELD_ACCESSOR(Simulator*, simulator)
443 bool Matches(Isolate* isolate, ThreadId thread_id) const {
444 return isolate_ == isolate && thread_id_.Equals(thread_id);
450 uintptr_t stack_limit_;
451 ThreadState* thread_state_;
453 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
454 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
455 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
456 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
457 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
458 Simulator* simulator_;
461 PerIsolateThreadData* next_;
462 PerIsolateThreadData* prev_;
464 friend class Isolate;
465 friend class ThreadDataTable;
466 friend class EntryStackItem;
468 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
473 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
474 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
479 static void InitializeOncePerProcess();
481 // Returns the PerIsolateThreadData for the current thread (or NULL if one is
482 // not currently set).
483 static PerIsolateThreadData* CurrentPerIsolateThreadData() {
484 return reinterpret_cast<PerIsolateThreadData*>(
485 base::Thread::GetThreadLocal(per_isolate_thread_data_key_));
488 // Returns the isolate inside which the current thread is running.
489 INLINE(static Isolate* Current()) {
490 DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
491 Isolate* isolate = reinterpret_cast<Isolate*>(
492 base::Thread::GetExistingThreadLocal(isolate_key_));
493 DCHECK(isolate != NULL);
497 INLINE(static Isolate* UncheckedCurrent()) {
498 DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
499 return reinterpret_cast<Isolate*>(
500 base::Thread::GetThreadLocal(isolate_key_));
503 // Like UncheckedCurrent, but skips the check that |isolate_key_| was
504 // initialized. Callers have to ensure that themselves.
505 INLINE(static Isolate* UnsafeCurrent()) {
506 return reinterpret_cast<Isolate*>(
507 base::Thread::GetThreadLocal(isolate_key_));
510 // Usually called by Init(), but can be called early e.g. to allow
511 // testing components that require logging but not the whole
514 // Safe to call more than once.
515 void InitializeLoggingAndCounters();
517 bool Init(Deserializer* des);
519 // True if at least one thread Enter'ed this isolate.
520 bool IsInUse() { return entry_stack_ != NULL; }
522 // Destroys the non-default isolates.
523 // Sets default isolate into "has_been_disposed" state rather then destroying,
524 // for legacy API reasons.
527 static void GlobalTearDown();
529 // Find the PerThread for this particular (isolate, thread) combination
530 // If one does not yet exist, return null.
531 PerIsolateThreadData* FindPerThreadDataForThisThread();
533 // Find the PerThread for given (isolate, thread) combination
534 // If one does not yet exist, return null.
535 PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
537 // Returns the key used to store the pointer to the current isolate.
538 // Used internally for V8 threads that do not execute JavaScript but still
539 // are part of the domain of an isolate (like the context switcher).
540 static base::Thread::LocalStorageKey isolate_key() {
544 // Returns the key used to store process-wide thread IDs.
545 static base::Thread::LocalStorageKey thread_id_key() {
546 return thread_id_key_;
549 static base::Thread::LocalStorageKey per_isolate_thread_data_key();
551 // Mutex for serializing access to break control structures.
552 base::RecursiveMutex* break_access() { return &break_access_; }
554 Address get_address_from_id(AddressId id);
556 // Access to top context (where the current function object was created).
557 Context* context() { return thread_local_top_.context_; }
558 void set_context(Context* context) {
559 DCHECK(context == NULL || context->IsContext());
560 thread_local_top_.context_ = context;
562 Context** context_address() { return &thread_local_top_.context_; }
564 THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
566 // Access to current thread id.
567 THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
569 // Interface to pending exception.
570 Object* pending_exception() {
571 DCHECK(has_pending_exception());
572 DCHECK(!thread_local_top_.pending_exception_->IsException());
573 return thread_local_top_.pending_exception_;
576 void set_pending_exception(Object* exception_obj) {
577 DCHECK(!exception_obj->IsException());
578 thread_local_top_.pending_exception_ = exception_obj;
581 void clear_pending_exception() {
582 DCHECK(!thread_local_top_.pending_exception_->IsException());
583 thread_local_top_.pending_exception_ = heap_.the_hole_value();
586 Object** pending_exception_address() {
587 return &thread_local_top_.pending_exception_;
590 bool has_pending_exception() {
591 DCHECK(!thread_local_top_.pending_exception_->IsException());
592 return !thread_local_top_.pending_exception_->IsTheHole();
595 THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
597 void clear_pending_message() {
598 thread_local_top_.has_pending_message_ = false;
599 thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
600 thread_local_top_.pending_message_script_ = heap_.the_hole_value();
602 v8::TryCatch* try_catch_handler() {
603 return thread_local_top_.try_catch_handler();
605 Address try_catch_handler_address() {
606 return thread_local_top_.try_catch_handler_address();
608 bool* external_caught_exception_address() {
609 return &thread_local_top_.external_caught_exception_;
612 THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch*, catcher)
614 Object** scheduled_exception_address() {
615 return &thread_local_top_.scheduled_exception_;
618 Address pending_message_obj_address() {
619 return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
622 Address has_pending_message_address() {
623 return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
626 Address pending_message_script_address() {
627 return reinterpret_cast<Address>(
628 &thread_local_top_.pending_message_script_);
631 Object* scheduled_exception() {
632 DCHECK(has_scheduled_exception());
633 DCHECK(!thread_local_top_.scheduled_exception_->IsException());
634 return thread_local_top_.scheduled_exception_;
636 bool has_scheduled_exception() {
637 DCHECK(!thread_local_top_.scheduled_exception_->IsException());
638 return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
640 void clear_scheduled_exception() {
641 DCHECK(!thread_local_top_.scheduled_exception_->IsException());
642 thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
645 bool HasExternalTryCatch();
646 bool IsFinallyOnTop();
648 bool is_catchable_by_javascript(Object* exception) {
649 return exception != heap()->termination_exception();
653 void PushToPartialSnapshotCache(Object* obj);
655 // JS execution stack (see frames.h).
656 static Address c_entry_fp(ThreadLocalTop* thread) {
657 return thread->c_entry_fp_;
659 static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
660 Address c_function() { return thread_local_top_.c_function_; }
662 inline Address* c_entry_fp_address() {
663 return &thread_local_top_.c_entry_fp_;
665 inline Address* handler_address() { return &thread_local_top_.handler_; }
666 inline Address* c_function_address() {
667 return &thread_local_top_.c_function_;
671 Address js_entry_sp() {
672 return thread_local_top_.js_entry_sp_;
674 inline Address* js_entry_sp_address() {
675 return &thread_local_top_.js_entry_sp_;
678 // Generated code scratch locations.
679 void* formal_count_address() { return &thread_local_top_.formal_count_; }
681 // Returns the global object of the current context. It could be
682 // a builtin object, or a JS global object.
683 Handle<GlobalObject> global_object() {
684 return Handle<GlobalObject>(context()->global_object());
687 // Returns the global proxy object of the current context.
688 JSObject* global_proxy() {
689 return context()->global_proxy();
692 Handle<JSBuiltinsObject> js_builtins_object() {
693 return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
696 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
697 void FreeThreadResources() { thread_local_top_.Free(); }
699 // This method is called by the api after operations that may throw
700 // exceptions. If an exception was thrown and not handled by an external
701 // handler the exception is scheduled to be rethrown when we return to running
702 // JavaScript code. If an exception is scheduled true is returned.
703 bool OptionalRescheduleException(bool is_bottom_call);
705 // Push and pop a promise and the current try-catch handler.
706 void PushPromise(Handle<JSObject> promise);
708 Handle<Object> GetPromiseOnStackOnThrow();
710 class ExceptionScope {
712 explicit ExceptionScope(Isolate* isolate) :
713 // Scope currently can only be used for regular exceptions,
714 // not termination exception.
716 pending_exception_(isolate_->pending_exception(), isolate_),
717 catcher_(isolate_->catcher())
721 isolate_->set_catcher(catcher_);
722 isolate_->set_pending_exception(*pending_exception_);
727 Handle<Object> pending_exception_;
728 v8::TryCatch* catcher_;
731 void SetCaptureStackTraceForUncaughtExceptions(
734 StackTrace::StackTraceOptions options);
736 void PrintCurrentStackTrace(FILE* out);
737 void PrintStack(StringStream* accumulator);
738 void PrintStack(FILE* out);
739 Handle<String> StackTraceString();
740 NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
743 unsigned int magic2));
744 Handle<JSArray> CaptureCurrentStackTrace(
746 StackTrace::StackTraceOptions options);
747 Handle<Object> CaptureSimpleStackTrace(Handle<JSObject> error_object,
748 Handle<Object> caller);
749 void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
750 void CaptureAndSetSimpleStackTrace(Handle<JSObject> error_object,
751 Handle<Object> caller);
752 Handle<JSArray> GetDetailedStackTrace(Handle<JSObject> error_object);
753 Handle<JSArray> GetDetailedFromSimpleStackTrace(
754 Handle<JSObject> error_object);
756 // Returns if the top context may access the given global object. If
757 // the result is false, the pending exception is guaranteed to be
760 bool MayNamedAccess(Handle<JSObject> receiver,
762 v8::AccessType type);
763 bool MayIndexedAccess(Handle<JSObject> receiver,
765 v8::AccessType type);
766 bool IsInternallyUsedPropertyName(Handle<Object> name);
767 bool IsInternallyUsedPropertyName(Object* name);
769 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
770 void ReportFailedAccessCheck(Handle<JSObject> receiver, v8::AccessType type);
772 // Exception throwing support. The caller should use the result
773 // of Throw() as its return value.
774 Object* Throw(Object* exception, MessageLocation* location = NULL);
776 template <typename T>
777 MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
778 MessageLocation* location = NULL) {
779 Throw(*exception, location);
780 return MaybeHandle<T>();
783 // Re-throw an exception. This involves no error reporting since
784 // error reporting was handled when the exception was thrown
786 Object* ReThrow(Object* exception);
787 void ScheduleThrow(Object* exception);
788 // Re-set pending message, script and positions reported to the TryCatch
789 // back to the TLS for re-use when rethrowing.
790 void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
791 // Un-schedule an exception that was caught by a TryCatch handler.
792 void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler);
793 void ReportPendingMessages();
794 // Return pending location if any or unfilled structure.
795 MessageLocation GetMessageLocation();
796 Object* ThrowIllegalOperation();
798 // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
799 Object* PromoteScheduledException();
800 void DoThrow(Object* exception, MessageLocation* location);
801 // Checks if exception should be reported and finds out if it's
802 // caught externally.
803 bool ShouldReportException(bool* can_be_caught_externally,
804 bool catchable_by_javascript);
806 // Attempts to compute the current source location, storing the
807 // result in the target out parameter.
808 void ComputeLocation(MessageLocation* target);
809 bool ComputeLocationFromException(MessageLocation* target,
810 Handle<Object> exception);
811 bool ComputeLocationFromStackTrace(MessageLocation* target,
812 Handle<Object> exception);
814 Handle<JSMessageObject> CreateMessage(Handle<Object> exception,
815 MessageLocation* location);
817 // Out of resource exception helpers.
818 Object* StackOverflow();
819 Object* TerminateExecution();
820 void CancelTerminateExecution();
822 void RequestInterrupt(InterruptCallback callback, void* data);
823 void InvokeApiInterruptCallbacks();
826 void Iterate(ObjectVisitor* v);
827 void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
828 char* Iterate(ObjectVisitor* v, char* t);
829 void IterateThread(ThreadVisitor* v, char* t);
832 // Returns the current native context.
833 Handle<Context> native_context();
835 // Returns the native context of the calling JavaScript code. That
836 // is, the native context of the top-most JavaScript frame.
837 Handle<Context> GetCallingNativeContext();
839 void RegisterTryCatchHandler(v8::TryCatch* that);
840 void UnregisterTryCatchHandler(v8::TryCatch* that);
842 char* ArchiveThread(char* to);
843 char* RestoreThread(char* from);
845 static const char* const kStackOverflowMessage;
847 static const int kUC16AlphabetSize = 256; // See StringSearchBase.
848 static const int kBMMaxShift = 250; // See StringSearchBase.
851 #define GLOBAL_ACCESSOR(type, name, initialvalue) \
852 inline type name() const { \
853 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
856 inline void set_##name(type value) { \
857 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
860 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
861 #undef GLOBAL_ACCESSOR
863 #define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
864 inline type* name() { \
865 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
866 return &(name##_)[0]; \
868 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
869 #undef GLOBAL_ARRAY_ACCESSOR
871 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
872 Handle<type> name() { \
873 return Handle<type>(native_context()->name(), this); \
875 bool is_##name(type* value) { \
876 return native_context()->is_##name(value); \
878 NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
879 #undef NATIVE_CONTEXT_FIELD_ACCESSOR
881 Bootstrapper* bootstrapper() { return bootstrapper_; }
882 Counters* counters() {
883 // Call InitializeLoggingAndCounters() if logging is needed before
884 // the isolate is fully initialized.
885 DCHECK(counters_ != NULL);
888 CodeRange* code_range() { return code_range_; }
889 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
890 CompilationCache* compilation_cache() { return compilation_cache_; }
892 // Call InitializeLoggingAndCounters() if logging is needed before
893 // the isolate is fully initialized.
894 DCHECK(logger_ != NULL);
897 StackGuard* stack_guard() { return &stack_guard_; }
898 Heap* heap() { return &heap_; }
899 StatsTable* stats_table();
900 StubCache* stub_cache() { return stub_cache_; }
901 CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
902 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
903 ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
904 MaterializedObjectStore* materialized_object_store() {
905 return materialized_object_store_;
908 MemoryAllocator* memory_allocator() {
909 return memory_allocator_;
912 KeyedLookupCache* keyed_lookup_cache() {
913 return keyed_lookup_cache_;
916 ContextSlotCache* context_slot_cache() {
917 return context_slot_cache_;
920 DescriptorLookupCache* descriptor_lookup_cache() {
921 return descriptor_lookup_cache_;
924 HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
926 HandleScopeImplementer* handle_scope_implementer() {
927 DCHECK(handle_scope_implementer_);
928 return handle_scope_implementer_;
930 Zone* runtime_zone() { return &runtime_zone_; }
932 UnicodeCache* unicode_cache() {
933 return unicode_cache_;
936 InnerPointerToCodeCache* inner_pointer_to_code_cache() {
937 return inner_pointer_to_code_cache_;
940 GlobalHandles* global_handles() { return global_handles_; }
942 EternalHandles* eternal_handles() { return eternal_handles_; }
944 ThreadManager* thread_manager() { return thread_manager_; }
946 StringTracker* string_tracker() { return string_tracker_; }
948 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
949 return &jsregexp_uncanonicalize_;
952 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
953 return &jsregexp_canonrange_;
956 RuntimeState* runtime_state() { return &runtime_state_; }
958 Builtins* builtins() { return &builtins_; }
960 void NotifyExtensionInstalled() {
961 has_installed_extensions_ = true;
964 bool has_installed_extensions() { return has_installed_extensions_; }
966 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
967 regexp_macro_assembler_canonicalize() {
968 return ®exp_macro_assembler_canonicalize_;
971 RegExpStack* regexp_stack() { return regexp_stack_; }
973 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
974 interp_canonicalize_mapping() {
975 return &interp_canonicalize_mapping_;
978 Debug* debug() { return debug_; }
980 CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
981 HeapProfiler* heap_profiler() const { return heap_profiler_; }
984 HistogramInfo* heap_histograms() { return heap_histograms_; }
986 JSObject::SpillInformation* js_spill_information() {
987 return &js_spill_information_;
991 Factory* factory() { return reinterpret_cast<Factory*>(this); }
993 static const int kJSRegexpStaticOffsetsVectorSize = 128;
995 THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
997 THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
999 void SetData(uint32_t slot, void* data) {
1000 DCHECK(slot < Internals::kNumIsolateDataSlots);
1001 embedder_data_[slot] = data;
1003 void* GetData(uint32_t slot) {
1004 DCHECK(slot < Internals::kNumIsolateDataSlots);
1005 return embedder_data_[slot];
1008 bool serializer_enabled() const { return serializer_enabled_; }
1010 bool IsDead() { return has_fatal_error_; }
1011 void SignalFatalError() { has_fatal_error_ = true; }
1013 bool use_crankshaft() const;
1015 bool initialized_from_snapshot() { return initialized_from_snapshot_; }
1017 double time_millis_since_init() {
1018 return base::OS::TimeCurrentMillis() - time_millis_at_init_;
1021 DateCache* date_cache() {
1025 void set_date_cache(DateCache* date_cache) {
1026 if (date_cache != date_cache_) {
1029 date_cache_ = date_cache;
1032 Map* get_initial_js_array_map(ElementsKind kind);
1034 bool IsFastArrayConstructorPrototypeChainIntact();
1036 CallInterfaceDescriptorData* call_descriptor_data(int index);
1038 void IterateDeferredHandles(ObjectVisitor* visitor);
1039 void LinkDeferredHandles(DeferredHandles* deferred_handles);
1040 void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
1043 bool IsDeferredHandle(Object** location);
1046 bool concurrent_recompilation_enabled() {
1047 // Thread is only available with flag enabled.
1048 DCHECK(optimizing_compiler_thread_ == NULL ||
1049 FLAG_concurrent_recompilation);
1050 return optimizing_compiler_thread_ != NULL;
1053 bool concurrent_osr_enabled() const {
1054 // Thread is only available with flag enabled.
1055 DCHECK(optimizing_compiler_thread_ == NULL ||
1056 FLAG_concurrent_recompilation);
1057 return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
1060 OptimizingCompilerThread* optimizing_compiler_thread() {
1061 return optimizing_compiler_thread_;
1064 int id() const { return static_cast<int>(id_); }
1066 HStatistics* GetHStatistics();
1067 CompilationStatistics* GetTurboStatistics();
1068 HTracer* GetHTracer();
1069 CodeTracer* GetCodeTracer();
1071 void DumpAndResetCompilationStats();
1073 FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
1074 void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
1075 function_entry_hook_ = function_entry_hook;
1078 void* stress_deopt_count_address() { return &stress_deopt_count_; }
1080 inline base::RandomNumberGenerator* random_number_generator();
1082 // Given an address occupied by a live code object, return that object.
1083 Object* FindCodeObject(Address a);
1085 int NextOptimizationId() {
1086 int id = next_optimization_id_++;
1087 if (!Smi::IsValid(next_optimization_id_)) {
1088 next_optimization_id_ = 0;
1093 // Get (and lazily initialize) the registry for per-isolate symbols.
1094 Handle<JSObject> GetSymbolRegistry();
1096 void AddCallCompletedCallback(CallCompletedCallback callback);
1097 void RemoveCallCompletedCallback(CallCompletedCallback callback);
1098 void FireCallCompletedCallback();
1100 void SetPromiseRejectCallback(PromiseRejectCallback callback);
1101 void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value,
1102 v8::PromiseRejectEvent event);
1104 void EnqueueMicrotask(Handle<Object> microtask);
1105 void RunMicrotasks();
1107 void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
1108 void CountUsage(v8::Isolate::UseCounterFeature feature);
1110 BasicBlockProfiler* GetOrCreateBasicBlockProfiler();
1111 BasicBlockProfiler* basic_block_profiler() { return basic_block_profiler_; }
1113 static Isolate* NewForTesting() { return new Isolate(false); }
1115 std::string GetTurboCfgFileName();
1118 int GetNextUniqueSharedFunctionInfoId() { return next_unique_sfi_id_++; }
1121 void set_store_buffer_hash_set_1_address(
1122 uintptr_t* store_buffer_hash_set_1_address) {
1123 store_buffer_hash_set_1_address_ = store_buffer_hash_set_1_address;
1126 uintptr_t* store_buffer_hash_set_1_address() {
1127 return store_buffer_hash_set_1_address_;
1130 void set_store_buffer_hash_set_2_address(
1131 uintptr_t* store_buffer_hash_set_2_address) {
1132 store_buffer_hash_set_2_address_ = store_buffer_hash_set_2_address;
1135 uintptr_t* store_buffer_hash_set_2_address() {
1136 return store_buffer_hash_set_2_address_;
1139 void AddDetachedContext(Handle<Context> context);
1140 void CheckDetachedContextsAfterGC();
1143 explicit Isolate(bool enable_serializer);
1145 friend struct GlobalState;
1146 friend struct InitializeGlobalState;
1148 // These fields are accessed through the API, offsets must be kept in sync
1149 // with v8::internal::Internals (in include/v8.h) constants. This is also
1150 // verified in Isolate::Init() using runtime checks.
1151 void* embedder_data_[Internals::kNumIsolateDataSlots];
1154 // The per-process lock should be acquired before the ThreadDataTable is
1156 class ThreadDataTable {
1161 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1162 void Insert(PerIsolateThreadData* data);
1163 void Remove(PerIsolateThreadData* data);
1164 void RemoveAllThreads(Isolate* isolate);
1167 PerIsolateThreadData* list_;
1170 // These items form a stack synchronously with threads Enter'ing and Exit'ing
1171 // the Isolate. The top of the stack points to a thread which is currently
1172 // running the Isolate. When the stack is empty, the Isolate is considered
1173 // not entered by any thread and can be Disposed.
1174 // If the same thread enters the Isolate more then once, the entry_count_
1175 // is incremented rather then a new item pushed to the stack.
1176 class EntryStackItem {
1178 EntryStackItem(PerIsolateThreadData* previous_thread_data,
1179 Isolate* previous_isolate,
1180 EntryStackItem* previous_item)
1182 previous_thread_data(previous_thread_data),
1183 previous_isolate(previous_isolate),
1184 previous_item(previous_item) { }
1187 PerIsolateThreadData* previous_thread_data;
1188 Isolate* previous_isolate;
1189 EntryStackItem* previous_item;
1192 DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1195 static base::LazyMutex thread_data_table_mutex_;
1197 static base::Thread::LocalStorageKey per_isolate_thread_data_key_;
1198 static base::Thread::LocalStorageKey isolate_key_;
1199 static base::Thread::LocalStorageKey thread_id_key_;
1200 static ThreadDataTable* thread_data_table_;
1202 // A global counter for all generated Isolates, might overflow.
1203 static base::Atomic32 isolate_counter_;
1206 static base::Atomic32 isolate_key_created_;
1211 static void SetIsolateThreadLocals(Isolate* isolate,
1212 PerIsolateThreadData* data);
1214 // Find the PerThread for this particular (isolate, thread) combination.
1215 // If one does not yet exist, allocate a new one.
1216 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1218 // Initializes the current thread to run this Isolate.
1219 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1220 // at the same time, this should be prevented using external locking.
1223 // Exits the current thread. The previosuly entered Isolate is restored
1225 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1226 // at the same time, this should be prevented using external locking.
1229 void InitializeThreadLocal();
1231 void MarkCompactPrologue(bool is_compacting,
1232 ThreadLocalTop* archived_thread_data);
1233 void MarkCompactEpilogue(bool is_compacting,
1234 ThreadLocalTop* archived_thread_data);
1238 // Propagate pending exception message to the v8::TryCatch.
1239 // If there is no external try-catch or message was successfully propagated,
1240 // then return true.
1241 bool PropagatePendingExceptionToExternalTryCatch();
1243 // Traverse prototype chain to find out whether the object is derived from
1244 // the Error object.
1245 bool IsErrorObject(Handle<Object> obj);
1248 EntryStackItem* entry_stack_;
1249 int stack_trace_nesting_level_;
1250 StringStream* incomplete_message_;
1251 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
1252 Bootstrapper* bootstrapper_;
1253 RuntimeProfiler* runtime_profiler_;
1254 CompilationCache* compilation_cache_;
1255 Counters* counters_;
1256 CodeRange* code_range_;
1257 base::RecursiveMutex break_access_;
1259 StackGuard stack_guard_;
1260 StatsTable* stats_table_;
1261 StubCache* stub_cache_;
1262 CodeAgingHelper* code_aging_helper_;
1263 DeoptimizerData* deoptimizer_data_;
1264 MaterializedObjectStore* materialized_object_store_;
1265 ThreadLocalTop thread_local_top_;
1266 bool capture_stack_trace_for_uncaught_exceptions_;
1267 int stack_trace_for_uncaught_exceptions_frame_limit_;
1268 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1269 MemoryAllocator* memory_allocator_;
1270 KeyedLookupCache* keyed_lookup_cache_;
1271 ContextSlotCache* context_slot_cache_;
1272 DescriptorLookupCache* descriptor_lookup_cache_;
1273 HandleScopeData handle_scope_data_;
1274 HandleScopeImplementer* handle_scope_implementer_;
1275 UnicodeCache* unicode_cache_;
1277 InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1278 GlobalHandles* global_handles_;
1279 EternalHandles* eternal_handles_;
1280 ThreadManager* thread_manager_;
1281 RuntimeState runtime_state_;
1283 bool has_installed_extensions_;
1284 StringTracker* string_tracker_;
1285 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1286 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1287 unibrow::Mapping<unibrow::Ecma262Canonicalize>
1288 regexp_macro_assembler_canonicalize_;
1289 RegExpStack* regexp_stack_;
1290 DateCache* date_cache_;
1291 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1292 CallInterfaceDescriptorData* call_descriptor_data_;
1293 base::RandomNumberGenerator* random_number_generator_;
1294 // TODO(hpayer): Remove the following store buffer addresses.
1295 uintptr_t* store_buffer_hash_set_1_address_;
1296 uintptr_t* store_buffer_hash_set_2_address_;
1298 // Whether the isolate has been created for snapshotting.
1299 bool serializer_enabled_;
1301 // True if fatal error has been signaled for this isolate.
1302 bool has_fatal_error_;
1304 // True if this isolate was initialized from a snapshot.
1305 bool initialized_from_snapshot_;
1307 // Time stamp at initialization.
1308 double time_millis_at_init_;
1311 // A static array of histogram info for each type.
1312 HistogramInfo heap_histograms_[LAST_TYPE + 1];
1313 JSObject::SpillInformation js_spill_information_;
1317 CpuProfiler* cpu_profiler_;
1318 HeapProfiler* heap_profiler_;
1319 FunctionEntryHook function_entry_hook_;
1321 typedef std::pair<InterruptCallback, void*> InterruptEntry;
1322 std::queue<InterruptEntry> api_interrupts_queue_;
1324 #define GLOBAL_BACKING_STORE(type, name, initialvalue) \
1326 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1327 #undef GLOBAL_BACKING_STORE
1329 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
1330 type name##_[length];
1331 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1332 #undef GLOBAL_ARRAY_BACKING_STORE
1335 // This class is huge and has a number of fields controlled by
1336 // preprocessor defines. Make sure the offsets of these fields agree
1337 // between compilation units.
1338 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
1339 static const intptr_t name##_debug_offset_;
1340 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1341 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1342 #undef ISOLATE_FIELD_OFFSET
1345 DeferredHandles* deferred_handles_head_;
1346 OptimizingCompilerThread* optimizing_compiler_thread_;
1348 // Counts deopt points if deopt_every_n_times is enabled.
1349 unsigned int stress_deopt_count_;
1351 int next_optimization_id_;
1354 int next_unique_sfi_id_;
1357 // List of callbacks when a Call completes.
1358 List<CallCompletedCallback> call_completed_callbacks_;
1360 v8::Isolate::UseCounterCallback use_counter_callback_;
1361 BasicBlockProfiler* basic_block_profiler_;
1364 friend class ExecutionAccess;
1365 friend class HandleScopeImplementer;
1366 friend class OptimizingCompilerThread;
1367 friend class SweeperThread;
1368 friend class ThreadManager;
1369 friend class Simulator;
1370 friend class StackGuard;
1371 friend class ThreadId;
1372 friend class TestMemoryAllocatorScope;
1373 friend class TestCodeRangeScope;
1374 friend class v8::Isolate;
1375 friend class v8::Locker;
1376 friend class v8::Unlocker;
1378 DISALLOW_COPY_AND_ASSIGN(Isolate);
1382 #undef FIELD_ACCESSOR
1383 #undef THREAD_LOCAL_TOP_ACCESSOR
1386 class PromiseOnStack {
1388 PromiseOnStack(StackHandler* handler, Handle<JSObject> promise,
1389 PromiseOnStack* prev)
1390 : handler_(handler), promise_(promise), prev_(prev) {}
1391 StackHandler* handler() { return handler_; }
1392 Handle<JSObject> promise() { return promise_; }
1393 PromiseOnStack* prev() { return prev_; }
1396 StackHandler* handler_;
1397 Handle<JSObject> promise_;
1398 PromiseOnStack* prev_;
1402 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1403 // class as a work around for a bug in the generated code found with these
1404 // versions of GCC. See V8 issue 122 for details.
1405 class SaveContext BASE_EMBEDDED {
1407 inline explicit SaveContext(Isolate* isolate);
1410 isolate_->set_context(context_.is_null() ? NULL : *context_);
1411 isolate_->set_save_context(prev_);
1414 Handle<Context> context() { return context_; }
1415 SaveContext* prev() { return prev_; }
1417 // Returns true if this save context is below a given JavaScript frame.
1418 bool IsBelowFrame(JavaScriptFrame* frame) {
1419 return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1424 Handle<Context> context_;
1426 Address c_entry_fp_;
1430 class AssertNoContextChange BASE_EMBEDDED {
1433 explicit AssertNoContextChange(Isolate* isolate)
1434 : isolate_(isolate),
1435 context_(isolate->context(), isolate) { }
1436 ~AssertNoContextChange() {
1437 DCHECK(isolate_->context() == *context_);
1442 Handle<Context> context_;
1445 explicit AssertNoContextChange(Isolate* isolate) { }
1450 class ExecutionAccess BASE_EMBEDDED {
1452 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1455 ~ExecutionAccess() { Unlock(isolate_); }
1457 static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
1458 static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
1460 static bool TryLock(Isolate* isolate) {
1461 return isolate->break_access()->TryLock();
1469 // Support for checking for stack-overflows.
1470 class StackLimitCheck BASE_EMBEDDED {
1472 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1474 // Use this to check for stack-overflows in C++ code.
1475 inline bool HasOverflowed() const {
1476 StackGuard* stack_guard = isolate_->stack_guard();
1477 return GetCurrentStackPosition() < stack_guard->real_climit();
1480 // Use this to check for stack-overflow when entering runtime from JS code.
1481 bool JsHasOverflowed() const;
1488 // Support for temporarily postponing interrupts. When the outermost
1489 // postpone scope is left the interrupts will be re-enabled and any
1490 // interrupts that occurred while in the scope will be taken into
1492 class PostponeInterruptsScope BASE_EMBEDDED {
1494 PostponeInterruptsScope(Isolate* isolate,
1495 int intercept_mask = StackGuard::ALL_INTERRUPTS)
1496 : stack_guard_(isolate->stack_guard()),
1497 intercept_mask_(intercept_mask),
1498 intercepted_flags_(0) {
1499 stack_guard_->PushPostponeInterruptsScope(this);
1502 ~PostponeInterruptsScope() {
1503 stack_guard_->PopPostponeInterruptsScope();
1506 // Find the bottom-most scope that intercepts this interrupt.
1507 // Return whether the interrupt has been intercepted.
1508 bool Intercept(StackGuard::InterruptFlag flag);
1511 StackGuard* stack_guard_;
1512 int intercept_mask_;
1513 int intercepted_flags_;
1514 PostponeInterruptsScope* prev_;
1516 friend class StackGuard;
1520 class CodeTracer FINAL : public Malloced {
1522 explicit CodeTracer(int isolate_id)
1525 if (!ShouldRedirect()) {
1530 if (FLAG_redirect_code_traces_to == NULL) {
1533 base::OS::GetCurrentProcessId(),
1536 StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
1539 WriteChars(filename_.start(), "", 0, false);
1544 explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
1545 ~Scope() { tracer_->CloseFile(); }
1547 FILE* file() const { return tracer_->file(); }
1550 CodeTracer* tracer_;
1554 if (!ShouldRedirect()) {
1558 if (file_ == NULL) {
1559 file_ = base::OS::FOpen(filename_.start(), "ab");
1566 if (!ShouldRedirect()) {
1570 if (--scope_depth_ == 0) {
1576 FILE* file() const { return file_; }
1579 static bool ShouldRedirect() {
1580 return FLAG_redirect_code_traces;
1583 EmbeddedVector<char, 128> filename_;
1588 } } // namespace v8::internal
1590 #endif // V8_ISOLATE_H_