1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "include/v8-debug.h"
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/base/atomicops.h"
13 #include "src/builtins.h"
14 #include "src/contexts.h"
16 #include "src/execution.h"
17 #include "src/frames.h"
18 #include "src/global-handles.h"
19 #include "src/handles.h"
20 #include "src/hashmap.h"
21 #include "src/heap/heap.h"
22 #include "src/optimizing-compiler-thread.h"
23 #include "src/regexp-stack.h"
24 #include "src/runtime/runtime.h"
25 #include "src/runtime-profiler.h"
31 class RandomNumberGenerator;
36 class BasicBlockProfiler;
38 class CallInterfaceDescriptorData;
41 class CodeStubDescriptor;
43 class CompilationCache;
44 class CompilationStatistics;
45 class ContextSlotCache;
49 class DeoptimizerData;
52 class ExternalCallbackScope;
53 class ExternalReferenceTable;
55 class FunctionInfoListener;
56 class HandleScopeImplementer;
60 class InlineRuntimeFunctionsTable;
61 class InnerPointerToCodeCache;
62 class MaterializedObjectStore;
63 class CodeAgingHelper;
71 class ThreadVisitor; // Defined in v8threads.h
73 template <StateTag Tag> class VMState;
75 // 'void function pointer', used to roundtrip the
76 // ExternalReference::ExternalReferenceRedirector since we can not include
77 // assembler.h, where it is defined, here.
78 typedef void* ExternalReferenceRedirectorPointer();
85 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
86 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
87 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
88 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
89 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
95 // Static indirection table for handles to constants. If a frame
96 // element represents a constant, the data contains an index into
97 // this table of handles to the actual constants.
98 // Static indirection table for handles to constants. If a Result
99 // represents a constant, the data contains an index into this table
100 // of handles to the actual constants.
101 typedef ZoneList<Handle<Object> > ZoneObjectList;
103 #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate) \
105 Isolate* __isolate__ = (isolate); \
106 if (__isolate__->has_scheduled_exception()) { \
107 return __isolate__->PromoteScheduledException(); \
111 // Macros for MaybeHandle.
113 #define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \
115 Isolate* __isolate__ = (isolate); \
116 if (__isolate__->has_scheduled_exception()) { \
117 __isolate__->PromoteScheduledException(); \
122 #define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
123 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>())
125 #define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
127 if (!(call).ToHandle(&dst)) { \
128 DCHECK((isolate)->has_pending_exception()); \
133 #define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \
134 ASSIGN_RETURN_ON_EXCEPTION_VALUE( \
135 isolate, dst, call, isolate->heap()->exception())
137 #define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T) \
138 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
140 #define THROW_NEW_ERROR(isolate, call, T) \
142 return isolate->Throw<T>(isolate->factory()->call); \
145 #define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call) \
147 return isolate->Throw(*isolate->factory()->call); \
150 #define RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \
152 if ((call).is_null()) { \
153 DCHECK((isolate)->has_pending_exception()); \
158 #define RETURN_FAILURE_ON_EXCEPTION(isolate, call) \
159 RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception())
161 #define RETURN_ON_EXCEPTION(isolate, call, T) \
162 RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
165 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \
166 C(Handler, handler) \
167 C(CEntryFP, c_entry_fp) \
168 C(CFunction, c_function) \
169 C(Context, context) \
170 C(PendingException, pending_exception) \
171 C(PendingHandlerContext, pending_handler_context) \
172 C(PendingHandlerCode, pending_handler_code) \
173 C(PendingHandlerOffset, pending_handler_offset) \
174 C(PendingHandlerFP, pending_handler_fp) \
175 C(PendingHandlerSP, pending_handler_sp) \
176 C(ExternalCaughtException, external_caught_exception) \
177 C(JSEntrySP, js_entry_sp)
180 // Platform-independent, reliable thread identifier.
183 // Creates an invalid ThreadId.
184 ThreadId() { base::NoBarrier_Store(&id_, kInvalidId); }
186 ThreadId& operator=(const ThreadId& other) {
187 base::NoBarrier_Store(&id_, base::NoBarrier_Load(&other.id_));
191 // Returns ThreadId for current thread.
192 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
194 // Returns invalid ThreadId (guaranteed not to be equal to any thread).
195 static ThreadId Invalid() { return ThreadId(kInvalidId); }
197 // Compares ThreadIds for equality.
198 INLINE(bool Equals(const ThreadId& other) const) {
199 return base::NoBarrier_Load(&id_) == base::NoBarrier_Load(&other.id_);
202 // Checks whether this ThreadId refers to any thread.
203 INLINE(bool IsValid() const) {
204 return base::NoBarrier_Load(&id_) != kInvalidId;
207 // Converts ThreadId to an integer representation
208 // (required for public API: V8::V8::GetCurrentThreadId).
209 int ToInteger() const { return static_cast<int>(base::NoBarrier_Load(&id_)); }
211 // Converts ThreadId to an integer representation
212 // (required for public API: V8::V8::TerminateExecution).
213 static ThreadId FromInteger(int id) { return ThreadId(id); }
216 static const int kInvalidId = -1;
218 explicit ThreadId(int id) { base::NoBarrier_Store(&id_, id); }
220 static int AllocateThreadId();
222 static int GetCurrentThreadId();
226 static base::Atomic32 highest_thread_id_;
228 friend class Isolate;
232 #define FIELD_ACCESSOR(type, name) \
233 inline void set_##name(type v) { name##_ = v; } \
234 inline type name() const { return name##_; }
237 class ThreadLocalTop BASE_EMBEDDED {
239 // Does early low-level initialization that does not depend on the
240 // isolate being present.
243 // Initialize the thread data.
246 // Get the top C++ try catch handler or NULL if none are registered.
248 // This method is not guaranteed to return an address that can be
249 // used for comparison with addresses into the JS stack. If such an
250 // address is needed, use try_catch_handler_address.
251 FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
253 // Get the address of the top C++ try catch handler or NULL if
254 // none are registered.
256 // This method always returns an address that can be compared to
257 // pointers into the JavaScript stack. When running on actual
258 // hardware, try_catch_handler_address and TryCatchHandler return
259 // the same pointer. When running on a simulator with a separate JS
260 // stack, try_catch_handler_address returns a JS stack address that
261 // corresponds to the place on the JS stack where the C++ handler
262 // would have been if the stack were not separate.
263 Address try_catch_handler_address() {
264 return reinterpret_cast<Address>(
265 v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
271 // The context where the current execution method is created and for variable
275 Object* pending_exception_;
277 // Communication channel between Isolate::FindHandler and the CEntryStub.
278 Context* pending_handler_context_;
279 Code* pending_handler_code_;
280 intptr_t pending_handler_offset_;
281 Address pending_handler_fp_;
282 Address pending_handler_sp_;
284 // Communication channel between Isolate::Throw and message consumers.
285 bool rethrowing_message_;
286 Object* pending_message_obj_;
288 // Use a separate value for scheduled exceptions to preserve the
289 // invariants that hold about pending_exception. We may want to
291 Object* scheduled_exception_;
292 bool external_caught_exception_;
293 SaveContext* save_context_;
296 Address c_entry_fp_; // the frame pointer of the top c entry frame
297 Address handler_; // try-blocks are chained through the stack
298 Address c_function_; // C function that was called at c entry.
300 // Throwing an exception may cause a Promise rejection. For this purpose
301 // we keep track of a stack of nested promises and the corresponding
302 // try-catch handlers.
303 PromiseOnStack* promise_on_stack_;
306 Simulator* simulator_;
309 Address js_entry_sp_; // the stack pointer of the bottom JS entry frame
310 // the external callback we're currently in
311 ExternalCallbackScope* external_callback_scope_;
312 StateTag current_vm_state_;
314 // Call back function to report unsafe JS accesses.
315 v8::FailedAccessCheckCallback failed_access_check_callback_;
318 void InitializeInternal();
320 v8::TryCatch* try_catch_handler_;
324 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
325 V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
326 V8_TARGET_ARCH_PPC && !defined(__PPC__) || \
327 V8_TARGET_ARCH_MIPS && !defined(__mips__) || \
328 V8_TARGET_ARCH_MIPS64 && !defined(__mips__)
330 #define ISOLATE_INIT_SIMULATOR_LIST(V) \
331 V(bool, simulator_initialized, false) \
332 V(HashMap*, simulator_i_cache, NULL) \
333 V(Redirection*, simulator_redirection, NULL)
336 #define ISOLATE_INIT_SIMULATOR_LIST(V)
343 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
344 V(CommentStatistic, paged_space_comments_statistics, \
345 CommentStatistic::kMaxComments + 1) \
346 V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
349 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
353 #define ISOLATE_INIT_ARRAY_LIST(V) \
354 /* SerializerDeserializer state. */ \
355 V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
356 V(int, bad_char_shift_table, kUC16AlphabetSize) \
357 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
358 V(int, suffix_table, (kBMMaxShift + 1)) \
359 V(uint32_t, private_random_seed, 2) \
360 ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
362 typedef List<HeapObject*> DebugObjectCache;
364 #define ISOLATE_INIT_LIST(V) \
365 /* Assembler state. */ \
366 V(FatalErrorCallback, exception_behavior, NULL) \
367 V(LogEventCallback, event_logger, NULL) \
368 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
369 /* To distinguish the function templates, so that we can find them in the */ \
370 /* function cache of the native context. */ \
371 V(int, next_serial_number, 0) \
372 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
373 /* Part of the state of liveedit. */ \
374 V(FunctionInfoListener*, active_function_info_listener, NULL) \
375 /* State for Relocatable. */ \
376 V(Relocatable*, relocatable_top, NULL) \
377 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
378 V(Object*, string_stream_current_security_token, NULL) \
379 V(ExternalReferenceTable*, external_reference_table, NULL) \
380 V(HashMap*, external_reference_map, NULL) \
381 V(HashMap*, root_index_map, NULL) \
382 V(int, pending_microtask_count, 0) \
383 V(bool, autorun_microtasks, true) \
384 V(HStatistics*, hstatistics, NULL) \
385 V(CompilationStatistics*, turbo_statistics, NULL) \
386 V(HTracer*, htracer, NULL) \
387 V(CodeTracer*, code_tracer, NULL) \
388 V(bool, fp_stubs_generated, false) \
389 V(int, max_available_threads, 0) \
390 V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu) \
391 V(PromiseRejectCallback, promise_reject_callback, NULL) \
392 V(const v8::StartupData*, snapshot_blob, NULL) \
393 ISOLATE_INIT_SIMULATOR_LIST(V)
395 #define THREAD_LOCAL_TOP_ACCESSOR(type, name) \
396 inline void set_##name(type v) { thread_local_top_.name##_ = v; } \
397 inline type name() const { return thread_local_top_.name##_; }
399 #define THREAD_LOCAL_TOP_ADDRESS(type, name) \
400 type* name##_address() { return &thread_local_top_.name##_; }
404 // These forward declarations are required to make the friend declarations in
405 // PerIsolateThreadData work on some older versions of gcc.
406 class ThreadDataTable;
407 class EntryStackItem;
411 // A thread has a PerIsolateThreadData instance for each isolate that it has
412 // entered. That instance is allocated when the isolate is initially entered
413 // and reused on subsequent entries.
414 class PerIsolateThreadData {
416 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
418 thread_id_(thread_id),
421 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
422 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
423 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
424 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
425 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
430 ~PerIsolateThreadData();
431 Isolate* isolate() const { return isolate_; }
432 ThreadId thread_id() const { return thread_id_; }
434 FIELD_ACCESSOR(uintptr_t, stack_limit)
435 FIELD_ACCESSOR(ThreadState*, thread_state)
437 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
438 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
439 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
440 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
441 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
442 FIELD_ACCESSOR(Simulator*, simulator)
445 bool Matches(Isolate* isolate, ThreadId thread_id) const {
446 return isolate_ == isolate && thread_id_.Equals(thread_id);
452 uintptr_t stack_limit_;
453 ThreadState* thread_state_;
455 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
456 !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
457 !defined(__PPC__) && V8_TARGET_ARCH_PPC || \
458 !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
459 !defined(__mips__) && V8_TARGET_ARCH_MIPS64
460 Simulator* simulator_;
463 PerIsolateThreadData* next_;
464 PerIsolateThreadData* prev_;
466 friend class Isolate;
467 friend class ThreadDataTable;
468 friend class EntryStackItem;
470 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
475 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
476 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
481 static void InitializeOncePerProcess();
483 // Returns the PerIsolateThreadData for the current thread (or NULL if one is
484 // not currently set).
485 static PerIsolateThreadData* CurrentPerIsolateThreadData() {
486 return reinterpret_cast<PerIsolateThreadData*>(
487 base::Thread::GetThreadLocal(per_isolate_thread_data_key_));
490 // Returns the isolate inside which the current thread is running.
491 INLINE(static Isolate* Current()) {
492 DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
493 Isolate* isolate = reinterpret_cast<Isolate*>(
494 base::Thread::GetExistingThreadLocal(isolate_key_));
495 DCHECK(isolate != NULL);
499 INLINE(static Isolate* UncheckedCurrent()) {
500 DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
501 return reinterpret_cast<Isolate*>(
502 base::Thread::GetThreadLocal(isolate_key_));
505 // Like UncheckedCurrent, but skips the check that |isolate_key_| was
506 // initialized. Callers have to ensure that themselves.
507 INLINE(static Isolate* UnsafeCurrent()) {
508 return reinterpret_cast<Isolate*>(
509 base::Thread::GetThreadLocal(isolate_key_));
512 // Usually called by Init(), but can be called early e.g. to allow
513 // testing components that require logging but not the whole
516 // Safe to call more than once.
517 void InitializeLoggingAndCounters();
519 bool Init(Deserializer* des);
521 // True if at least one thread Enter'ed this isolate.
522 bool IsInUse() { return entry_stack_ != NULL; }
524 // Destroys the non-default isolates.
525 // Sets default isolate into "has_been_disposed" state rather then destroying,
526 // for legacy API reasons.
529 static void GlobalTearDown();
531 void ClearSerializerData();
533 // Find the PerThread for this particular (isolate, thread) combination
534 // If one does not yet exist, return null.
535 PerIsolateThreadData* FindPerThreadDataForThisThread();
537 // Find the PerThread for given (isolate, thread) combination
538 // If one does not yet exist, return null.
539 PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
541 // Returns the key used to store the pointer to the current isolate.
542 // Used internally for V8 threads that do not execute JavaScript but still
543 // are part of the domain of an isolate (like the context switcher).
544 static base::Thread::LocalStorageKey isolate_key() {
548 // Returns the key used to store process-wide thread IDs.
549 static base::Thread::LocalStorageKey thread_id_key() {
550 return thread_id_key_;
553 static base::Thread::LocalStorageKey per_isolate_thread_data_key();
555 // Mutex for serializing access to break control structures.
556 base::RecursiveMutex* break_access() { return &break_access_; }
558 Address get_address_from_id(AddressId id);
560 // Access to top context (where the current function object was created).
561 Context* context() { return thread_local_top_.context_; }
562 void set_context(Context* context) {
563 DCHECK(context == NULL || context->IsContext());
564 thread_local_top_.context_ = context;
566 Context** context_address() { return &thread_local_top_.context_; }
568 THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
570 // Access to current thread id.
571 THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
573 // Interface to pending exception.
574 Object* pending_exception() {
575 DCHECK(has_pending_exception());
576 DCHECK(!thread_local_top_.pending_exception_->IsException());
577 return thread_local_top_.pending_exception_;
580 void set_pending_exception(Object* exception_obj) {
581 DCHECK(!exception_obj->IsException());
582 thread_local_top_.pending_exception_ = exception_obj;
585 void clear_pending_exception() {
586 DCHECK(!thread_local_top_.pending_exception_->IsException());
587 thread_local_top_.pending_exception_ = heap_.the_hole_value();
590 THREAD_LOCAL_TOP_ADDRESS(Object*, pending_exception)
592 bool has_pending_exception() {
593 DCHECK(!thread_local_top_.pending_exception_->IsException());
594 return !thread_local_top_.pending_exception_->IsTheHole();
597 THREAD_LOCAL_TOP_ADDRESS(Context*, pending_handler_context)
598 THREAD_LOCAL_TOP_ADDRESS(Code*, pending_handler_code)
599 THREAD_LOCAL_TOP_ADDRESS(intptr_t, pending_handler_offset)
600 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_fp)
601 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_sp)
603 THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
605 void clear_pending_message() {
606 thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
608 v8::TryCatch* try_catch_handler() {
609 return thread_local_top_.try_catch_handler();
611 bool* external_caught_exception_address() {
612 return &thread_local_top_.external_caught_exception_;
615 THREAD_LOCAL_TOP_ADDRESS(Object*, scheduled_exception)
617 Address pending_message_obj_address() {
618 return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
621 Object* scheduled_exception() {
622 DCHECK(has_scheduled_exception());
623 DCHECK(!thread_local_top_.scheduled_exception_->IsException());
624 return thread_local_top_.scheduled_exception_;
626 bool has_scheduled_exception() {
627 DCHECK(!thread_local_top_.scheduled_exception_->IsException());
628 return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
630 void clear_scheduled_exception() {
631 DCHECK(!thread_local_top_.scheduled_exception_->IsException());
632 thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
635 bool IsJavaScriptHandlerOnTop(Object* exception);
636 bool IsExternalHandlerOnTop(Object* exception);
638 bool is_catchable_by_javascript(Object* exception) {
639 return exception != heap()->termination_exception();
642 // JS execution stack (see frames.h).
643 static Address c_entry_fp(ThreadLocalTop* thread) {
644 return thread->c_entry_fp_;
646 static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
647 Address c_function() { return thread_local_top_.c_function_; }
649 inline Address* c_entry_fp_address() {
650 return &thread_local_top_.c_entry_fp_;
652 inline Address* handler_address() { return &thread_local_top_.handler_; }
653 inline Address* c_function_address() {
654 return &thread_local_top_.c_function_;
658 Address js_entry_sp() {
659 return thread_local_top_.js_entry_sp_;
661 inline Address* js_entry_sp_address() {
662 return &thread_local_top_.js_entry_sp_;
665 // Returns the global object of the current context. It could be
666 // a builtin object, or a JS global object.
667 Handle<GlobalObject> global_object() {
668 return Handle<GlobalObject>(context()->global_object());
671 // Returns the global proxy object of the current context.
672 JSObject* global_proxy() {
673 return context()->global_proxy();
676 Handle<JSBuiltinsObject> js_builtins_object() {
677 return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
680 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
681 void FreeThreadResources() { thread_local_top_.Free(); }
683 // This method is called by the api after operations that may throw
684 // exceptions. If an exception was thrown and not handled by an external
685 // handler the exception is scheduled to be rethrown when we return to running
686 // JavaScript code. If an exception is scheduled true is returned.
687 bool OptionalRescheduleException(bool is_bottom_call);
689 // Push and pop a promise and the current try-catch handler.
690 void PushPromise(Handle<JSObject> promise, Handle<JSFunction> function);
692 Handle<Object> GetPromiseOnStackOnThrow();
694 class ExceptionScope {
696 // Scope currently can only be used for regular exceptions,
697 // not termination exception.
698 explicit ExceptionScope(Isolate* isolate)
700 pending_exception_(isolate_->pending_exception(), isolate_) {}
703 isolate_->set_pending_exception(*pending_exception_);
708 Handle<Object> pending_exception_;
711 void SetCaptureStackTraceForUncaughtExceptions(
714 StackTrace::StackTraceOptions options);
716 void PrintCurrentStackTrace(FILE* out);
717 void PrintStack(StringStream* accumulator);
718 void PrintStack(FILE* out);
719 Handle<String> StackTraceString();
720 NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
723 unsigned int magic2));
724 Handle<JSArray> CaptureCurrentStackTrace(
726 StackTrace::StackTraceOptions options);
727 Handle<Object> CaptureSimpleStackTrace(Handle<JSObject> error_object,
728 Handle<Object> caller);
729 void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
730 void CaptureAndSetSimpleStackTrace(Handle<JSObject> error_object,
731 Handle<Object> caller);
732 Handle<JSArray> GetDetailedStackTrace(Handle<JSObject> error_object);
733 Handle<JSArray> GetDetailedFromSimpleStackTrace(
734 Handle<JSObject> error_object);
736 // Returns if the top context may access the given global object. If
737 // the result is false, the pending exception is guaranteed to be
740 bool MayAccess(Handle<JSObject> receiver);
741 bool IsInternallyUsedPropertyName(Handle<Object> name);
742 bool IsInternallyUsedPropertyName(Object* name);
744 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
745 void ReportFailedAccessCheck(Handle<JSObject> receiver);
747 // Exception throwing support. The caller should use the result
748 // of Throw() as its return value.
749 Object* Throw(Object* exception, MessageLocation* location = NULL);
750 Object* ThrowIllegalOperation();
752 template <typename T>
753 MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
754 MessageLocation* location = NULL) {
755 Throw(*exception, location);
756 return MaybeHandle<T>();
759 // Re-throw an exception. This involves no error reporting since error
760 // reporting was handled when the exception was thrown originally.
761 Object* ReThrow(Object* exception);
763 // Find the correct handler for the current pending exception. This also
764 // clears and returns the current pending exception.
765 Object* FindHandler();
767 // Tries to predict whether an exception will be caught. Note that this can
768 // only produce an estimate, because it is undecidable whether a finally
769 // clause will consume or re-throw an exception. We conservatively assume any
770 // finally clause will behave as if the exception were consumed.
771 enum CatchType { NOT_CAUGHT, CAUGHT_BY_JAVASCRIPT, CAUGHT_BY_EXTERNAL };
772 CatchType PredictExceptionCatcher();
774 void ScheduleThrow(Object* exception);
775 // Re-set pending message, script and positions reported to the TryCatch
776 // back to the TLS for re-use when rethrowing.
777 void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
778 // Un-schedule an exception that was caught by a TryCatch handler.
779 void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler);
780 void ReportPendingMessages();
781 // Return pending location if any or unfilled structure.
782 MessageLocation GetMessageLocation();
784 // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
785 Object* PromoteScheduledException();
787 // Attempts to compute the current source location, storing the
788 // result in the target out parameter.
789 void ComputeLocation(MessageLocation* target);
790 bool ComputeLocationFromException(MessageLocation* target,
791 Handle<Object> exception);
792 bool ComputeLocationFromStackTrace(MessageLocation* target,
793 Handle<Object> exception);
795 Handle<JSMessageObject> CreateMessage(Handle<Object> exception,
796 MessageLocation* location);
798 // Out of resource exception helpers.
799 Object* StackOverflow();
800 Object* TerminateExecution();
801 void CancelTerminateExecution();
803 void RequestInterrupt(InterruptCallback callback, void* data);
804 void InvokeApiInterruptCallbacks();
807 void Iterate(ObjectVisitor* v);
808 void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
809 char* Iterate(ObjectVisitor* v, char* t);
810 void IterateThread(ThreadVisitor* v, char* t);
812 // Returns the current native context.
813 Handle<Context> native_context();
815 // Returns the native context of the calling JavaScript code. That
816 // is, the native context of the top-most JavaScript frame.
817 Handle<Context> GetCallingNativeContext();
819 void RegisterTryCatchHandler(v8::TryCatch* that);
820 void UnregisterTryCatchHandler(v8::TryCatch* that);
822 char* ArchiveThread(char* to);
823 char* RestoreThread(char* from);
825 static const char* const kStackOverflowMessage;
827 static const int kUC16AlphabetSize = 256; // See StringSearchBase.
828 static const int kBMMaxShift = 250; // See StringSearchBase.
831 #define GLOBAL_ACCESSOR(type, name, initialvalue) \
832 inline type name() const { \
833 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
836 inline void set_##name(type value) { \
837 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
840 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
841 #undef GLOBAL_ACCESSOR
843 #define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
844 inline type* name() { \
845 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
846 return &(name##_)[0]; \
848 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
849 #undef GLOBAL_ARRAY_ACCESSOR
851 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
852 Handle<type> name() { \
853 return Handle<type>(native_context()->name(), this); \
855 bool is_##name(type* value) { \
856 return native_context()->is_##name(value); \
858 NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
859 #undef NATIVE_CONTEXT_FIELD_ACCESSOR
861 Bootstrapper* bootstrapper() { return bootstrapper_; }
862 Counters* counters() {
863 // Call InitializeLoggingAndCounters() if logging is needed before
864 // the isolate is fully initialized.
865 DCHECK(counters_ != NULL);
868 CodeRange* code_range() { return code_range_; }
869 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
870 CompilationCache* compilation_cache() { return compilation_cache_; }
872 // Call InitializeLoggingAndCounters() if logging is needed before
873 // the isolate is fully initialized.
874 DCHECK(logger_ != NULL);
877 StackGuard* stack_guard() { return &stack_guard_; }
878 Heap* heap() { return &heap_; }
879 StatsTable* stats_table();
880 StubCache* stub_cache() { return stub_cache_; }
881 CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
882 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
883 ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
884 MaterializedObjectStore* materialized_object_store() {
885 return materialized_object_store_;
888 MemoryAllocator* memory_allocator() {
889 return memory_allocator_;
892 KeyedLookupCache* keyed_lookup_cache() {
893 return keyed_lookup_cache_;
896 ContextSlotCache* context_slot_cache() {
897 return context_slot_cache_;
900 DescriptorLookupCache* descriptor_lookup_cache() {
901 return descriptor_lookup_cache_;
904 HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
906 HandleScopeImplementer* handle_scope_implementer() {
907 DCHECK(handle_scope_implementer_);
908 return handle_scope_implementer_;
910 Zone* runtime_zone() { return &runtime_zone_; }
912 UnicodeCache* unicode_cache() {
913 return unicode_cache_;
916 InnerPointerToCodeCache* inner_pointer_to_code_cache() {
917 return inner_pointer_to_code_cache_;
920 GlobalHandles* global_handles() { return global_handles_; }
922 EternalHandles* eternal_handles() { return eternal_handles_; }
924 ThreadManager* thread_manager() { return thread_manager_; }
926 StringTracker* string_tracker() { return string_tracker_; }
928 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
929 return &jsregexp_uncanonicalize_;
932 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
933 return &jsregexp_canonrange_;
936 RuntimeState* runtime_state() { return &runtime_state_; }
938 Builtins* builtins() { return &builtins_; }
940 void NotifyExtensionInstalled() {
941 has_installed_extensions_ = true;
944 bool has_installed_extensions() { return has_installed_extensions_; }
946 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
947 regexp_macro_assembler_canonicalize() {
948 return ®exp_macro_assembler_canonicalize_;
951 RegExpStack* regexp_stack() { return regexp_stack_; }
953 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
954 interp_canonicalize_mapping() {
955 return &interp_canonicalize_mapping_;
958 Debug* debug() { return debug_; }
960 CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
961 HeapProfiler* heap_profiler() const { return heap_profiler_; }
964 HistogramInfo* heap_histograms() { return heap_histograms_; }
966 JSObject::SpillInformation* js_spill_information() {
967 return &js_spill_information_;
971 Factory* factory() { return reinterpret_cast<Factory*>(this); }
973 static const int kJSRegexpStaticOffsetsVectorSize = 128;
975 THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
977 THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
979 void SetData(uint32_t slot, void* data) {
980 DCHECK(slot < Internals::kNumIsolateDataSlots);
981 embedder_data_[slot] = data;
983 void* GetData(uint32_t slot) {
984 DCHECK(slot < Internals::kNumIsolateDataSlots);
985 return embedder_data_[slot];
988 bool serializer_enabled() const { return serializer_enabled_; }
989 bool snapshot_available() const { return snapshot_blob_ != NULL; }
991 bool IsDead() { return has_fatal_error_; }
992 void SignalFatalError() { has_fatal_error_ = true; }
994 bool use_crankshaft() const;
996 bool initialized_from_snapshot() { return initialized_from_snapshot_; }
998 double time_millis_since_init() {
999 return base::OS::TimeCurrentMillis() - time_millis_at_init_;
1002 DateCache* date_cache() {
1006 void set_date_cache(DateCache* date_cache) {
1007 if (date_cache != date_cache_) {
1010 date_cache_ = date_cache;
1013 Map* get_initial_js_array_map(ElementsKind kind);
1015 bool IsFastArrayConstructorPrototypeChainIntact();
1017 CallInterfaceDescriptorData* call_descriptor_data(int index);
1019 void IterateDeferredHandles(ObjectVisitor* visitor);
1020 void LinkDeferredHandles(DeferredHandles* deferred_handles);
1021 void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
1024 bool IsDeferredHandle(Object** location);
1027 bool concurrent_recompilation_enabled() {
1028 // Thread is only available with flag enabled.
1029 DCHECK(optimizing_compiler_thread_ == NULL ||
1030 FLAG_concurrent_recompilation);
1031 return optimizing_compiler_thread_ != NULL;
1034 bool concurrent_osr_enabled() const {
1035 // Thread is only available with flag enabled.
1036 DCHECK(optimizing_compiler_thread_ == NULL ||
1037 FLAG_concurrent_recompilation);
1038 return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
1041 OptimizingCompilerThread* optimizing_compiler_thread() {
1042 return optimizing_compiler_thread_;
1045 int id() const { return static_cast<int>(id_); }
1047 HStatistics* GetHStatistics();
1048 CompilationStatistics* GetTurboStatistics();
1049 HTracer* GetHTracer();
1050 CodeTracer* GetCodeTracer();
1052 void DumpAndResetCompilationStats();
1054 FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
1055 void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
1056 function_entry_hook_ = function_entry_hook;
1059 void* stress_deopt_count_address() { return &stress_deopt_count_; }
1061 inline base::RandomNumberGenerator* random_number_generator();
1063 // Given an address occupied by a live code object, return that object.
1064 Object* FindCodeObject(Address a);
1066 int NextOptimizationId() {
1067 int id = next_optimization_id_++;
1068 if (!Smi::IsValid(next_optimization_id_)) {
1069 next_optimization_id_ = 0;
1074 // Get (and lazily initialize) the registry for per-isolate symbols.
1075 Handle<JSObject> GetSymbolRegistry();
1077 void AddCallCompletedCallback(CallCompletedCallback callback);
1078 void RemoveCallCompletedCallback(CallCompletedCallback callback);
1079 void FireCallCompletedCallback();
1081 void SetPromiseRejectCallback(PromiseRejectCallback callback);
1082 void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value,
1083 v8::PromiseRejectEvent event);
1085 void EnqueueMicrotask(Handle<Object> microtask);
1086 void RunMicrotasks();
1088 void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
1089 void CountUsage(v8::Isolate::UseCounterFeature feature);
1091 BasicBlockProfiler* GetOrCreateBasicBlockProfiler();
1092 BasicBlockProfiler* basic_block_profiler() { return basic_block_profiler_; }
1094 static Isolate* NewForTesting() { return new Isolate(false); }
1096 std::string GetTurboCfgFileName();
1099 int GetNextUniqueSharedFunctionInfoId() { return next_unique_sfi_id_++; }
1102 void set_store_buffer_hash_set_1_address(
1103 uintptr_t* store_buffer_hash_set_1_address) {
1104 store_buffer_hash_set_1_address_ = store_buffer_hash_set_1_address;
1107 uintptr_t* store_buffer_hash_set_1_address() {
1108 return store_buffer_hash_set_1_address_;
1111 void set_store_buffer_hash_set_2_address(
1112 uintptr_t* store_buffer_hash_set_2_address) {
1113 store_buffer_hash_set_2_address_ = store_buffer_hash_set_2_address;
1116 uintptr_t* store_buffer_hash_set_2_address() {
1117 return store_buffer_hash_set_2_address_;
1120 void AddDetachedContext(Handle<Context> context);
1121 void CheckDetachedContextsAfterGC();
1123 List<Object*>* partial_snapshot_cache() { return &partial_snapshot_cache_; }
1126 explicit Isolate(bool enable_serializer);
1129 friend struct GlobalState;
1130 friend struct InitializeGlobalState;
1132 // These fields are accessed through the API, offsets must be kept in sync
1133 // with v8::internal::Internals (in include/v8.h) constants. This is also
1134 // verified in Isolate::Init() using runtime checks.
1135 void* embedder_data_[Internals::kNumIsolateDataSlots];
1138 // The per-process lock should be acquired before the ThreadDataTable is
1140 class ThreadDataTable {
1145 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1146 void Insert(PerIsolateThreadData* data);
1147 void Remove(PerIsolateThreadData* data);
1148 void RemoveAllThreads(Isolate* isolate);
1151 PerIsolateThreadData* list_;
1154 // These items form a stack synchronously with threads Enter'ing and Exit'ing
1155 // the Isolate. The top of the stack points to a thread which is currently
1156 // running the Isolate. When the stack is empty, the Isolate is considered
1157 // not entered by any thread and can be Disposed.
1158 // If the same thread enters the Isolate more then once, the entry_count_
1159 // is incremented rather then a new item pushed to the stack.
1160 class EntryStackItem {
1162 EntryStackItem(PerIsolateThreadData* previous_thread_data,
1163 Isolate* previous_isolate,
1164 EntryStackItem* previous_item)
1166 previous_thread_data(previous_thread_data),
1167 previous_isolate(previous_isolate),
1168 previous_item(previous_item) { }
1171 PerIsolateThreadData* previous_thread_data;
1172 Isolate* previous_isolate;
1173 EntryStackItem* previous_item;
1176 DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1179 static base::LazyMutex thread_data_table_mutex_;
1181 static base::Thread::LocalStorageKey per_isolate_thread_data_key_;
1182 static base::Thread::LocalStorageKey isolate_key_;
1183 static base::Thread::LocalStorageKey thread_id_key_;
1184 static ThreadDataTable* thread_data_table_;
1186 // A global counter for all generated Isolates, might overflow.
1187 static base::Atomic32 isolate_counter_;
1190 static base::Atomic32 isolate_key_created_;
1195 static void SetIsolateThreadLocals(Isolate* isolate,
1196 PerIsolateThreadData* data);
1198 // Find the PerThread for this particular (isolate, thread) combination.
1199 // If one does not yet exist, allocate a new one.
1200 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1202 // Initializes the current thread to run this Isolate.
1203 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1204 // at the same time, this should be prevented using external locking.
1207 // Exits the current thread. The previosuly entered Isolate is restored
1209 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1210 // at the same time, this should be prevented using external locking.
1213 void InitializeThreadLocal();
1215 void MarkCompactPrologue(bool is_compacting,
1216 ThreadLocalTop* archived_thread_data);
1217 void MarkCompactEpilogue(bool is_compacting,
1218 ThreadLocalTop* archived_thread_data);
1222 // Propagate pending exception message to the v8::TryCatch.
1223 // If there is no external try-catch or message was successfully propagated,
1224 // then return true.
1225 bool PropagatePendingExceptionToExternalTryCatch();
1227 // Traverse prototype chain to find out whether the object is derived from
1228 // the Error object.
1229 bool IsErrorObject(Handle<Object> obj);
1232 EntryStackItem* entry_stack_;
1233 int stack_trace_nesting_level_;
1234 StringStream* incomplete_message_;
1235 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
1236 Bootstrapper* bootstrapper_;
1237 RuntimeProfiler* runtime_profiler_;
1238 CompilationCache* compilation_cache_;
1239 Counters* counters_;
1240 CodeRange* code_range_;
1241 base::RecursiveMutex break_access_;
1243 StackGuard stack_guard_;
1244 StatsTable* stats_table_;
1245 StubCache* stub_cache_;
1246 CodeAgingHelper* code_aging_helper_;
1247 DeoptimizerData* deoptimizer_data_;
1248 MaterializedObjectStore* materialized_object_store_;
1249 ThreadLocalTop thread_local_top_;
1250 bool capture_stack_trace_for_uncaught_exceptions_;
1251 int stack_trace_for_uncaught_exceptions_frame_limit_;
1252 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1253 MemoryAllocator* memory_allocator_;
1254 KeyedLookupCache* keyed_lookup_cache_;
1255 ContextSlotCache* context_slot_cache_;
1256 DescriptorLookupCache* descriptor_lookup_cache_;
1257 HandleScopeData handle_scope_data_;
1258 HandleScopeImplementer* handle_scope_implementer_;
1259 UnicodeCache* unicode_cache_;
1261 InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1262 GlobalHandles* global_handles_;
1263 EternalHandles* eternal_handles_;
1264 ThreadManager* thread_manager_;
1265 RuntimeState runtime_state_;
1267 bool has_installed_extensions_;
1268 StringTracker* string_tracker_;
1269 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1270 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1271 unibrow::Mapping<unibrow::Ecma262Canonicalize>
1272 regexp_macro_assembler_canonicalize_;
1273 RegExpStack* regexp_stack_;
1274 DateCache* date_cache_;
1275 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1276 CallInterfaceDescriptorData* call_descriptor_data_;
1277 base::RandomNumberGenerator* random_number_generator_;
1278 // TODO(hpayer): Remove the following store buffer addresses.
1279 uintptr_t* store_buffer_hash_set_1_address_;
1280 uintptr_t* store_buffer_hash_set_2_address_;
1282 // Whether the isolate has been created for snapshotting.
1283 bool serializer_enabled_;
1285 // True if fatal error has been signaled for this isolate.
1286 bool has_fatal_error_;
1288 // True if this isolate was initialized from a snapshot.
1289 bool initialized_from_snapshot_;
1291 // Time stamp at initialization.
1292 double time_millis_at_init_;
1295 // A static array of histogram info for each type.
1296 HistogramInfo heap_histograms_[LAST_TYPE + 1];
1297 JSObject::SpillInformation js_spill_information_;
1301 CpuProfiler* cpu_profiler_;
1302 HeapProfiler* heap_profiler_;
1303 FunctionEntryHook function_entry_hook_;
1305 typedef std::pair<InterruptCallback, void*> InterruptEntry;
1306 std::queue<InterruptEntry> api_interrupts_queue_;
1308 #define GLOBAL_BACKING_STORE(type, name, initialvalue) \
1310 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1311 #undef GLOBAL_BACKING_STORE
1313 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
1314 type name##_[length];
1315 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1316 #undef GLOBAL_ARRAY_BACKING_STORE
1319 // This class is huge and has a number of fields controlled by
1320 // preprocessor defines. Make sure the offsets of these fields agree
1321 // between compilation units.
1322 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
1323 static const intptr_t name##_debug_offset_;
1324 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1325 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1326 #undef ISOLATE_FIELD_OFFSET
1329 DeferredHandles* deferred_handles_head_;
1330 OptimizingCompilerThread* optimizing_compiler_thread_;
1332 // Counts deopt points if deopt_every_n_times is enabled.
1333 unsigned int stress_deopt_count_;
1335 int next_optimization_id_;
1338 int next_unique_sfi_id_;
1341 // List of callbacks when a Call completes.
1342 List<CallCompletedCallback> call_completed_callbacks_;
1344 v8::Isolate::UseCounterCallback use_counter_callback_;
1345 BasicBlockProfiler* basic_block_profiler_;
1347 List<Object*> partial_snapshot_cache_;
1349 friend class ExecutionAccess;
1350 friend class HandleScopeImplementer;
1351 friend class OptimizingCompilerThread;
1352 friend class SweeperThread;
1353 friend class ThreadManager;
1354 friend class Simulator;
1355 friend class StackGuard;
1356 friend class ThreadId;
1357 friend class TestMemoryAllocatorScope;
1358 friend class TestCodeRangeScope;
1359 friend class v8::Isolate;
1360 friend class v8::Locker;
1361 friend class v8::Unlocker;
1362 friend v8::StartupData v8::V8::CreateSnapshotDataBlob(const char*);
1364 DISALLOW_COPY_AND_ASSIGN(Isolate);
1368 #undef FIELD_ACCESSOR
1369 #undef THREAD_LOCAL_TOP_ACCESSOR
1372 class PromiseOnStack {
1374 PromiseOnStack(Handle<JSFunction> function, Handle<JSObject> promise,
1375 PromiseOnStack* prev)
1376 : function_(function), promise_(promise), prev_(prev) {}
1377 Handle<JSFunction> function() { return function_; }
1378 Handle<JSObject> promise() { return promise_; }
1379 PromiseOnStack* prev() { return prev_; }
1382 Handle<JSFunction> function_;
1383 Handle<JSObject> promise_;
1384 PromiseOnStack* prev_;
1388 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1389 // class as a work around for a bug in the generated code found with these
1390 // versions of GCC. See V8 issue 122 for details.
1391 class SaveContext BASE_EMBEDDED {
1393 inline explicit SaveContext(Isolate* isolate);
1396 isolate_->set_context(context_.is_null() ? NULL : *context_);
1397 isolate_->set_save_context(prev_);
1400 Handle<Context> context() { return context_; }
1401 SaveContext* prev() { return prev_; }
1403 // Returns true if this save context is below a given JavaScript frame.
1404 bool IsBelowFrame(JavaScriptFrame* frame) {
1405 return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1410 Handle<Context> context_;
1412 Address c_entry_fp_;
1416 class AssertNoContextChange BASE_EMBEDDED {
1419 explicit AssertNoContextChange(Isolate* isolate)
1420 : isolate_(isolate),
1421 context_(isolate->context(), isolate) { }
1422 ~AssertNoContextChange() {
1423 DCHECK(isolate_->context() == *context_);
1428 Handle<Context> context_;
1431 explicit AssertNoContextChange(Isolate* isolate) { }
1436 class ExecutionAccess BASE_EMBEDDED {
1438 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1441 ~ExecutionAccess() { Unlock(isolate_); }
1443 static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
1444 static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
1446 static bool TryLock(Isolate* isolate) {
1447 return isolate->break_access()->TryLock();
1455 // Support for checking for stack-overflows.
1456 class StackLimitCheck BASE_EMBEDDED {
1458 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1460 // Use this to check for stack-overflows in C++ code.
1461 inline bool HasOverflowed() const {
1462 StackGuard* stack_guard = isolate_->stack_guard();
1463 return GetCurrentStackPosition() < stack_guard->real_climit();
1466 // Use this to check for stack-overflow when entering runtime from JS code.
1467 bool JsHasOverflowed() const;
1474 // Support for temporarily postponing interrupts. When the outermost
1475 // postpone scope is left the interrupts will be re-enabled and any
1476 // interrupts that occurred while in the scope will be taken into
1478 class PostponeInterruptsScope BASE_EMBEDDED {
1480 PostponeInterruptsScope(Isolate* isolate,
1481 int intercept_mask = StackGuard::ALL_INTERRUPTS)
1482 : stack_guard_(isolate->stack_guard()),
1483 intercept_mask_(intercept_mask),
1484 intercepted_flags_(0) {
1485 stack_guard_->PushPostponeInterruptsScope(this);
1488 ~PostponeInterruptsScope() {
1489 stack_guard_->PopPostponeInterruptsScope();
1492 // Find the bottom-most scope that intercepts this interrupt.
1493 // Return whether the interrupt has been intercepted.
1494 bool Intercept(StackGuard::InterruptFlag flag);
1497 StackGuard* stack_guard_;
1498 int intercept_mask_;
1499 int intercepted_flags_;
1500 PostponeInterruptsScope* prev_;
1502 friend class StackGuard;
1506 class CodeTracer FINAL : public Malloced {
1508 explicit CodeTracer(int isolate_id)
1511 if (!ShouldRedirect()) {
1516 if (FLAG_redirect_code_traces_to == NULL) {
1519 base::OS::GetCurrentProcessId(),
1522 StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
1525 WriteChars(filename_.start(), "", 0, false);
1530 explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
1531 ~Scope() { tracer_->CloseFile(); }
1533 FILE* file() const { return tracer_->file(); }
1536 CodeTracer* tracer_;
1540 if (!ShouldRedirect()) {
1544 if (file_ == NULL) {
1545 file_ = base::OS::FOpen(filename_.start(), "ab");
1552 if (!ShouldRedirect()) {
1556 if (--scope_depth_ == 0) {
1562 FILE* file() const { return file_; }
1565 static bool ShouldRedirect() {
1566 return FLAG_redirect_code_traces;
1569 EmbeddedVector<char, 128> filename_;
1574 } } // namespace v8::internal
1576 #endif // V8_ISOLATE_H_