1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "../include/v8-debug.h"
32 #include "allocation.h"
34 #include "atomicops.h"
37 #include "execution.h"
40 #include "global-handles.h"
44 #include "regexp-stack.h"
45 #include "runtime-profiler.h"
55 class CompilationCache;
56 class ContextSlotCache;
57 class ContextSwitcher;
61 class DeoptimizerData;
64 class ExternalReferenceTable;
66 class FunctionInfoListener;
67 class HandleScopeImplementer;
69 class InlineRuntimeFunctionsTable;
70 class NoAllocationStringAllocator;
71 class InnerPointerToCodeCache;
72 class PreallocatedMemoryThread;
76 class StringInputBuffer;
81 class ThreadVisitor; // Defined in v8threads.h
84 // 'void function pointer', used to roundtrip the
85 // ExternalReference::ExternalReferenceRedirector since we can not include
86 // assembler.h, where it is defined, here.
87 typedef void* ExternalReferenceRedirectorPointer();
90 #ifdef ENABLE_DEBUGGER_SUPPORT
96 #if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
97 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
103 // Static indirection table for handles to constants. If a frame
104 // element represents a constant, the data contains an index into
105 // this table of handles to the actual constants.
106 // Static indirection table for handles to constants. If a Result
107 // represents a constant, the data contains an index into this table
108 // of handles to the actual constants.
109 typedef ZoneList<Handle<Object> > ZoneObjectList;
111 #define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \
113 Isolate* __isolate__ = (isolate); \
114 if (__isolate__->has_scheduled_exception()) { \
115 return __isolate__->PromoteScheduledException(); \
119 #define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
121 if ((call).is_null()) { \
122 ASSERT((isolate)->has_pending_exception()); \
127 #define CHECK_NOT_EMPTY_HANDLE(isolate, call) \
129 ASSERT(!(isolate)->has_pending_exception()); \
130 CHECK(!(call).is_null()); \
131 CHECK(!(isolate)->has_pending_exception()); \
134 #define RETURN_IF_EMPTY_HANDLE(isolate, call) \
135 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
137 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \
138 C(Handler, handler) \
139 C(CEntryFP, c_entry_fp) \
140 C(Context, context) \
141 C(PendingException, pending_exception) \
142 C(ExternalCaughtException, external_caught_exception) \
143 C(JSEntrySP, js_entry_sp)
146 // Platform-independent, reliable thread identifier.
149 // Creates an invalid ThreadId.
150 ThreadId() : id_(kInvalidId) {}
152 // Returns ThreadId for current thread.
153 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
155 // Returns invalid ThreadId (guaranteed not to be equal to any thread).
156 static ThreadId Invalid() { return ThreadId(kInvalidId); }
158 // Compares ThreadIds for equality.
159 INLINE(bool Equals(const ThreadId& other) const) {
160 return id_ == other.id_;
163 // Checks whether this ThreadId refers to any thread.
164 INLINE(bool IsValid() const) {
165 return id_ != kInvalidId;
168 // Converts ThreadId to an integer representation
169 // (required for public API: V8::V8::GetCurrentThreadId).
170 int ToInteger() const { return id_; }
172 // Converts ThreadId to an integer representation
173 // (required for public API: V8::V8::TerminateExecution).
174 static ThreadId FromInteger(int id) { return ThreadId(id); }
177 static const int kInvalidId = -1;
179 explicit ThreadId(int id) : id_(id) {}
181 static int AllocateThreadId();
183 static int GetCurrentThreadId();
187 static Atomic32 highest_thread_id_;
189 friend class Isolate;
193 class ThreadLocalTop BASE_EMBEDDED {
195 // Does early low-level initialization that does not depend on the
196 // isolate being present.
199 // Initialize the thread data.
202 // Get the top C++ try catch handler or NULL if none are registered.
204 // This method is not guarenteed to return an address that can be
205 // used for comparison with addresses into the JS stack. If such an
206 // address is needed, use try_catch_handler_address.
207 v8::TryCatch* TryCatchHandler();
209 // Get the address of the top C++ try catch handler or NULL if
210 // none are registered.
212 // This method always returns an address that can be compared to
213 // pointers into the JavaScript stack. When running on actual
214 // hardware, try_catch_handler_address and TryCatchHandler return
215 // the same pointer. When running on a simulator with a separate JS
216 // stack, try_catch_handler_address returns a JS stack address that
217 // corresponds to the place on the JS stack where the C++ handler
218 // would have been if the stack were not separate.
219 inline Address try_catch_handler_address() {
220 return try_catch_handler_address_;
223 // Set the address of the top C++ try catch handler.
224 inline void set_try_catch_handler_address(Address address) {
225 try_catch_handler_address_ = address;
229 ASSERT(!has_pending_message_);
230 ASSERT(!external_caught_exception_);
231 ASSERT(try_catch_handler_address_ == NULL);
235 // The context where the current execution method is created and for variable
239 MaybeObject* pending_exception_;
240 bool has_pending_message_;
241 Object* pending_message_obj_;
242 Script* pending_message_script_;
243 int pending_message_start_pos_;
244 int pending_message_end_pos_;
245 // Use a separate value for scheduled exceptions to preserve the
246 // invariants that hold about pending_exception. We may want to
248 MaybeObject* scheduled_exception_;
249 bool external_caught_exception_;
250 SaveContext* save_context_;
251 v8::TryCatch* catcher_;
254 Address c_entry_fp_; // the frame pointer of the top c entry frame
255 Address handler_; // try-blocks are chained through the stack
258 #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
259 Simulator* simulator_;
261 #endif // USE_SIMULATOR
263 Address js_entry_sp_; // the stack pointer of the bottom JS entry frame
264 Address external_callback_; // the external callback we're currently in
265 StateTag current_vm_state_;
267 // Generated code scratch locations.
268 int32_t formal_count_;
270 // Call back function to report unsafe JS accesses.
271 v8::FailedAccessCheckCallback failed_access_check_callback_;
273 // Head of the list of live LookupResults.
274 LookupResult* top_lookup_result_;
276 // Whether out of memory exceptions should be ignored.
277 bool ignore_out_of_memory_;
280 void InitializeInternal();
282 Address try_catch_handler_address_;
286 #ifdef ENABLE_DEBUGGER_SUPPORT
288 #define ISOLATE_DEBUGGER_INIT_LIST(V) \
289 V(v8::Debug::EventCallback, debug_event_callback, NULL) \
290 V(DebuggerAgent*, debugger_agent_instance, NULL)
293 #define ISOLATE_DEBUGGER_INIT_LIST(V)
299 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
300 V(CommentStatistic, paged_space_comments_statistics, \
301 CommentStatistic::kMaxComments + 1)
304 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
308 #define ISOLATE_INIT_ARRAY_LIST(V) \
309 /* SerializerDeserializer state. */ \
310 V(Object*, serialize_partial_snapshot_cache, kPartialSnapshotCacheCapacity) \
311 V(int, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
312 V(int, bad_char_shift_table, kUC16AlphabetSize) \
313 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
314 V(int, suffix_table, (kBMMaxShift + 1)) \
315 V(uint32_t, private_random_seed, 2) \
316 ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
318 typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
320 #define ISOLATE_INIT_LIST(V) \
321 /* SerializerDeserializer state. */ \
322 V(int, serialize_partial_snapshot_cache_length, 0) \
323 /* Assembler state. */ \
324 /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */ \
325 V(byte*, assembler_spare_buffer, NULL) \
326 V(FatalErrorCallback, exception_behavior, NULL) \
327 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \
328 V(v8::Debug::MessageHandler, message_handler, NULL) \
329 /* To distinguish the function templates, so that we can find them in the */ \
330 /* function cache of the global context. */ \
331 V(int, next_serial_number, 0) \
332 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \
333 V(bool, always_allow_natives_syntax, false) \
334 /* Part of the state of liveedit. */ \
335 V(FunctionInfoListener*, active_function_info_listener, NULL) \
336 /* State for Relocatable. */ \
337 V(Relocatable*, relocatable_top, NULL) \
338 /* State for CodeEntry in profile-generator. */ \
339 V(CodeGenerator*, current_code_generator, NULL) \
340 V(bool, jump_target_compiling_deferred_code, false) \
341 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
342 V(Object*, string_stream_current_security_token, NULL) \
343 /* TODO(isolates): Release this on destruction? */ \
344 V(int*, irregexp_interpreter_backtrack_stack_cache, NULL) \
345 /* Serializer state. */ \
346 V(ExternalReferenceTable*, external_reference_table, NULL) \
347 /* AstNode state. */ \
348 V(int, ast_node_id, 0) \
349 V(unsigned, ast_node_count, 0) \
350 /* SafeStackFrameIterator activations count. */ \
351 V(int, safe_stack_iterator_counter, 0) \
352 V(uint64_t, enabled_cpu_features, 0) \
353 V(CpuProfiler*, cpu_profiler, NULL) \
354 V(HeapProfiler*, heap_profiler, NULL) \
355 ISOLATE_DEBUGGER_INIT_LIST(V)
358 // These forward declarations are required to make the friend declarations in
359 // PerIsolateThreadData work on some older versions of gcc.
360 class ThreadDataTable;
361 class EntryStackItem;
365 // A thread has a PerIsolateThreadData instance for each isolate that it has
366 // entered. That instance is allocated when the isolate is initially entered
367 // and reused on subsequent entries.
368 class PerIsolateThreadData {
370 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
372 thread_id_(thread_id),
375 #if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
376 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
381 Isolate* isolate() const { return isolate_; }
382 ThreadId thread_id() const { return thread_id_; }
383 void set_stack_limit(uintptr_t value) { stack_limit_ = value; }
384 uintptr_t stack_limit() const { return stack_limit_; }
385 ThreadState* thread_state() const { return thread_state_; }
386 void set_thread_state(ThreadState* value) { thread_state_ = value; }
388 #if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
389 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
390 Simulator* simulator() const { return simulator_; }
391 void set_simulator(Simulator* simulator) {
392 simulator_ = simulator;
396 bool Matches(Isolate* isolate, ThreadId thread_id) const {
397 return isolate_ == isolate && thread_id_.Equals(thread_id);
403 uintptr_t stack_limit_;
404 ThreadState* thread_state_;
406 #if !defined(__arm__) && defined(V8_TARGET_ARCH_ARM) || \
407 !defined(__mips__) && defined(V8_TARGET_ARCH_MIPS)
408 Simulator* simulator_;
411 PerIsolateThreadData* next_;
412 PerIsolateThreadData* prev_;
414 friend class Isolate;
415 friend class ThreadDataTable;
416 friend class EntryStackItem;
418 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
423 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
424 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
429 // Returns the PerIsolateThreadData for the current thread (or NULL if one is
430 // not currently set).
431 static PerIsolateThreadData* CurrentPerIsolateThreadData() {
432 return reinterpret_cast<PerIsolateThreadData*>(
433 Thread::GetThreadLocal(per_isolate_thread_data_key_));
436 // Returns the isolate inside which the current thread is running.
437 INLINE(static Isolate* Current()) {
438 Isolate* isolate = reinterpret_cast<Isolate*>(
439 Thread::GetExistingThreadLocal(isolate_key_));
440 ASSERT(isolate != NULL);
444 INLINE(static Isolate* UncheckedCurrent()) {
445 return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
448 // Usually called by Init(), but can be called early e.g. to allow
449 // testing components that require logging but not the whole
452 // Safe to call more than once.
453 void InitializeLoggingAndCounters();
455 bool Init(Deserializer* des);
457 bool IsInitialized() { return state_ == INITIALIZED; }
459 // True if at least one thread Enter'ed this isolate.
460 bool IsInUse() { return entry_stack_ != NULL; }
462 // Destroys the non-default isolates.
463 // Sets default isolate into "has_been_disposed" state rather then destroying,
464 // for legacy API reasons.
467 bool IsDefaultIsolate() const { return this == default_isolate_; }
469 // Ensures that process-wide resources and the default isolate have been
470 // allocated. It is only necessary to call this method in rare cases, for
471 // example if you are using V8 from within the body of a static initializer.
472 // Safe to call multiple times.
473 static void EnsureDefaultIsolate();
475 // Find the PerThread for this particular (isolate, thread) combination
476 // If one does not yet exist, return null.
477 PerIsolateThreadData* FindPerThreadDataForThisThread();
479 #ifdef ENABLE_DEBUGGER_SUPPORT
480 // Get the debugger from the default isolate. Preinitializes the
481 // default isolate if needed.
482 static Debugger* GetDefaultIsolateDebugger();
485 // Get the stack guard from the default isolate. Preinitializes the
486 // default isolate if needed.
487 static StackGuard* GetDefaultIsolateStackGuard();
489 // Returns the key used to store the pointer to the current isolate.
490 // Used internally for V8 threads that do not execute JavaScript but still
491 // are part of the domain of an isolate (like the context switcher).
492 static Thread::LocalStorageKey isolate_key() {
496 // Returns the key used to store process-wide thread IDs.
497 static Thread::LocalStorageKey thread_id_key() {
498 return thread_id_key_;
501 static Thread::LocalStorageKey per_isolate_thread_data_key();
503 // If a client attempts to create a Locker without specifying an isolate,
504 // we assume that the client is using legacy behavior. Set up the current
505 // thread to be inside the implicit isolate (or fail a check if we have
506 // switched to non-legacy behavior).
507 static void EnterDefaultIsolate();
509 // Mutex for serializing access to break control structures.
510 Mutex* break_access() { return break_access_; }
512 // Mutex for serializing access to debugger.
513 Mutex* debugger_access() { return debugger_access_; }
515 Address get_address_from_id(AddressId id);
517 // Access to top context (where the current function object was created).
518 Context* context() { return thread_local_top_.context_; }
519 void set_context(Context* context) {
520 ASSERT(context == NULL || context->IsContext());
521 thread_local_top_.context_ = context;
523 Context** context_address() { return &thread_local_top_.context_; }
525 SaveContext* save_context() {return thread_local_top_.save_context_; }
526 void set_save_context(SaveContext* save) {
527 thread_local_top_.save_context_ = save;
530 // Access to current thread id.
531 ThreadId thread_id() { return thread_local_top_.thread_id_; }
532 void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; }
534 // Interface to pending exception.
535 MaybeObject* pending_exception() {
536 ASSERT(has_pending_exception());
537 return thread_local_top_.pending_exception_;
539 bool external_caught_exception() {
540 return thread_local_top_.external_caught_exception_;
542 void set_external_caught_exception(bool value) {
543 thread_local_top_.external_caught_exception_ = value;
545 void set_pending_exception(MaybeObject* exception) {
546 thread_local_top_.pending_exception_ = exception;
548 void clear_pending_exception() {
549 thread_local_top_.pending_exception_ = heap_.the_hole_value();
551 MaybeObject** pending_exception_address() {
552 return &thread_local_top_.pending_exception_;
554 bool has_pending_exception() {
555 return !thread_local_top_.pending_exception_->IsTheHole();
557 void clear_pending_message() {
558 thread_local_top_.has_pending_message_ = false;
559 thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
560 thread_local_top_.pending_message_script_ = NULL;
562 v8::TryCatch* try_catch_handler() {
563 return thread_local_top_.TryCatchHandler();
565 Address try_catch_handler_address() {
566 return thread_local_top_.try_catch_handler_address();
568 bool* external_caught_exception_address() {
569 return &thread_local_top_.external_caught_exception_;
571 v8::TryCatch* catcher() {
572 return thread_local_top_.catcher_;
574 void set_catcher(v8::TryCatch* catcher) {
575 thread_local_top_.catcher_ = catcher;
578 MaybeObject** scheduled_exception_address() {
579 return &thread_local_top_.scheduled_exception_;
581 MaybeObject* scheduled_exception() {
582 ASSERT(has_scheduled_exception());
583 return thread_local_top_.scheduled_exception_;
585 bool has_scheduled_exception() {
586 return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
588 void clear_scheduled_exception() {
589 thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
592 bool IsExternallyCaught();
594 bool is_catchable_by_javascript(MaybeObject* exception) {
595 return (exception != Failure::OutOfMemoryException()) &&
596 (exception != heap()->termination_exception());
599 // JS execution stack (see frames.h).
600 static Address c_entry_fp(ThreadLocalTop* thread) {
601 return thread->c_entry_fp_;
603 static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
605 inline Address* c_entry_fp_address() {
606 return &thread_local_top_.c_entry_fp_;
608 inline Address* handler_address() { return &thread_local_top_.handler_; }
610 // Bottom JS entry (see StackTracer::Trace in log.cc).
611 static Address js_entry_sp(ThreadLocalTop* thread) {
612 return thread->js_entry_sp_;
614 inline Address* js_entry_sp_address() {
615 return &thread_local_top_.js_entry_sp_;
618 // Generated code scratch locations.
619 void* formal_count_address() { return &thread_local_top_.formal_count_; }
621 // Returns the global object of the current context. It could be
622 // a builtin object, or a JS global object.
623 Handle<GlobalObject> global() {
624 return Handle<GlobalObject>(context()->global());
627 // Returns the global proxy object of the current context.
628 Object* global_proxy() {
629 return context()->global_proxy();
632 Handle<JSBuiltinsObject> js_builtins_object() {
633 return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
636 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
637 void FreeThreadResources() { thread_local_top_.Free(); }
639 // This method is called by the api after operations that may throw
640 // exceptions. If an exception was thrown and not handled by an external
641 // handler the exception is scheduled to be rethrown when we return to running
642 // JavaScript code. If an exception is scheduled true is returned.
643 bool OptionalRescheduleException(bool is_bottom_call);
645 class ExceptionScope {
647 explicit ExceptionScope(Isolate* isolate) :
648 // Scope currently can only be used for regular exceptions, not
649 // failures like OOM or termination exception.
651 pending_exception_(isolate_->pending_exception()->ToObjectUnchecked()),
652 catcher_(isolate_->catcher())
656 isolate_->set_catcher(catcher_);
657 isolate_->set_pending_exception(*pending_exception_);
662 Handle<Object> pending_exception_;
663 v8::TryCatch* catcher_;
666 void SetCaptureStackTraceForUncaughtExceptions(
669 StackTrace::StackTraceOptions options);
671 // Tells whether the current context has experienced an out of memory
673 bool is_out_of_memory();
674 bool ignore_out_of_memory() {
675 return thread_local_top_.ignore_out_of_memory_;
677 void set_ignore_out_of_memory(bool value) {
678 thread_local_top_.ignore_out_of_memory_ = value;
681 void PrintCurrentStackTrace(FILE* out);
682 void PrintStackTrace(FILE* out, char* thread_data);
683 void PrintStack(StringStream* accumulator);
685 Handle<String> StackTraceString();
686 Handle<JSArray> CaptureCurrentStackTrace(
688 StackTrace::StackTraceOptions options);
690 void CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object);
692 // Returns if the top context may access the given global object. If
693 // the result is false, the pending exception is guaranteed to be
695 bool MayNamedAccess(JSObject* receiver,
697 v8::AccessType type);
698 bool MayIndexedAccess(JSObject* receiver,
700 v8::AccessType type);
702 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
703 void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
705 // Exception throwing support. The caller should use the result
706 // of Throw() as its return value.
707 Failure* Throw(Object* exception, MessageLocation* location = NULL);
708 // Re-throw an exception. This involves no error reporting since
709 // error reporting was handled when the exception was thrown
711 Failure* ReThrow(MaybeObject* exception, MessageLocation* location = NULL);
712 void ScheduleThrow(Object* exception);
713 void ReportPendingMessages();
714 Failure* ThrowIllegalOperation();
716 // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
717 Failure* PromoteScheduledException();
718 void DoThrow(Object* exception, MessageLocation* location);
719 // Checks if exception should be reported and finds out if it's
720 // caught externally.
721 bool ShouldReportException(bool* can_be_caught_externally,
722 bool catchable_by_javascript);
724 // Attempts to compute the current source location, storing the
725 // result in the target out parameter.
726 void ComputeLocation(MessageLocation* target);
728 // Override command line flag.
729 void TraceException(bool flag);
731 // Out of resource exception helpers.
732 Failure* StackOverflow();
733 Failure* TerminateExecution();
736 void Iterate(ObjectVisitor* v);
737 void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
738 char* Iterate(ObjectVisitor* v, char* t);
739 void IterateThread(ThreadVisitor* v);
740 void IterateThread(ThreadVisitor* v, char* t);
743 // Returns the current global context.
744 Handle<Context> global_context();
746 // Returns the global context of the calling JavaScript code. That
747 // is, the global context of the top-most JavaScript frame.
748 Handle<Context> GetCallingGlobalContext();
750 void RegisterTryCatchHandler(v8::TryCatch* that);
751 void UnregisterTryCatchHandler(v8::TryCatch* that);
753 char* ArchiveThread(char* to);
754 char* RestoreThread(char* from);
756 static const char* const kStackOverflowMessage;
758 static const int kUC16AlphabetSize = 256; // See StringSearchBase.
759 static const int kBMMaxShift = 250; // See StringSearchBase.
762 #define GLOBAL_ACCESSOR(type, name, initialvalue) \
763 inline type name() const { \
764 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
767 inline void set_##name(type value) { \
768 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
771 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
772 #undef GLOBAL_ACCESSOR
774 #define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
775 inline type* name() { \
776 ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
777 return &(name##_)[0]; \
779 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
780 #undef GLOBAL_ARRAY_ACCESSOR
782 #define GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name) \
783 Handle<type> name() { \
784 return Handle<type>(context()->global_context()->name()); \
786 GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSOR)
787 #undef GLOBAL_CONTEXT_FIELD_ACCESSOR
789 Bootstrapper* bootstrapper() { return bootstrapper_; }
790 Counters* counters() {
791 // Call InitializeLoggingAndCounters() if logging is needed before
792 // the isolate is fully initialized.
793 ASSERT(counters_ != NULL);
796 CodeRange* code_range() { return code_range_; }
797 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
798 CompilationCache* compilation_cache() { return compilation_cache_; }
800 // Call InitializeLoggingAndCounters() if logging is needed before
801 // the isolate is fully initialized.
802 ASSERT(logger_ != NULL);
805 StackGuard* stack_guard() { return &stack_guard_; }
806 Heap* heap() { return &heap_; }
807 StatsTable* stats_table();
808 StubCache* stub_cache() { return stub_cache_; }
809 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
810 ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
812 TranscendentalCache* transcendental_cache() const {
813 return transcendental_cache_;
816 MemoryAllocator* memory_allocator() {
817 return memory_allocator_;
820 KeyedLookupCache* keyed_lookup_cache() {
821 return keyed_lookup_cache_;
824 ContextSlotCache* context_slot_cache() {
825 return context_slot_cache_;
828 DescriptorLookupCache* descriptor_lookup_cache() {
829 return descriptor_lookup_cache_;
832 v8::ImplementationUtilities::HandleScopeData* handle_scope_data() {
833 return &handle_scope_data_;
835 HandleScopeImplementer* handle_scope_implementer() {
836 ASSERT(handle_scope_implementer_);
837 return handle_scope_implementer_;
839 Zone* zone() { return &zone_; }
841 UnicodeCache* unicode_cache() {
842 return unicode_cache_;
845 InnerPointerToCodeCache* inner_pointer_to_code_cache() {
846 return inner_pointer_to_code_cache_;
849 StringInputBuffer* write_input_buffer() { return write_input_buffer_; }
851 GlobalHandles* global_handles() { return global_handles_; }
853 ThreadManager* thread_manager() { return thread_manager_; }
855 ContextSwitcher* context_switcher() { return context_switcher_; }
857 void set_context_switcher(ContextSwitcher* switcher) {
858 context_switcher_ = switcher;
861 StringTracker* string_tracker() { return string_tracker_; }
863 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
864 return &jsregexp_uncanonicalize_;
867 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
868 return &jsregexp_canonrange_;
871 StringInputBuffer* objects_string_compare_buffer_a() {
872 return &objects_string_compare_buffer_a_;
875 StringInputBuffer* objects_string_compare_buffer_b() {
876 return &objects_string_compare_buffer_b_;
879 StaticResource<StringInputBuffer>* objects_string_input_buffer() {
880 return &objects_string_input_buffer_;
883 RuntimeState* runtime_state() { return &runtime_state_; }
885 void set_fp_stubs_generated(bool value) {
886 fp_stubs_generated_ = value;
889 bool fp_stubs_generated() { return fp_stubs_generated_; }
891 StaticResource<SafeStringInputBuffer>* compiler_safe_string_input_buffer() {
892 return &compiler_safe_string_input_buffer_;
895 Builtins* builtins() { return &builtins_; }
897 void NotifyExtensionInstalled() {
898 has_installed_extensions_ = true;
901 bool has_installed_extensions() { return has_installed_extensions_; }
903 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
904 regexp_macro_assembler_canonicalize() {
905 return ®exp_macro_assembler_canonicalize_;
908 RegExpStack* regexp_stack() { return regexp_stack_; }
910 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
911 interp_canonicalize_mapping() {
912 return &interp_canonicalize_mapping_;
915 void* PreallocatedStorageNew(size_t size);
916 void PreallocatedStorageDelete(void* p);
917 void PreallocatedStorageInit(size_t size);
919 #ifdef ENABLE_DEBUGGER_SUPPORT
920 Debugger* debugger() {
921 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
925 if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
930 inline bool IsDebuggerActive();
931 inline bool DebuggerHasBreakPoints();
934 HistogramInfo* heap_histograms() { return heap_histograms_; }
936 JSObject::SpillInformation* js_spill_information() {
937 return &js_spill_information_;
940 int* code_kind_statistics() { return code_kind_statistics_; }
943 #if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
944 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
945 bool simulator_initialized() { return simulator_initialized_; }
946 void set_simulator_initialized(bool initialized) {
947 simulator_initialized_ = initialized;
950 HashMap* simulator_i_cache() { return simulator_i_cache_; }
951 void set_simulator_i_cache(HashMap* hash_map) {
952 simulator_i_cache_ = hash_map;
955 Redirection* simulator_redirection() {
956 return simulator_redirection_;
958 void set_simulator_redirection(Redirection* redirection) {
959 simulator_redirection_ = redirection;
963 Factory* factory() { return reinterpret_cast<Factory*>(this); }
965 // SerializerDeserializer state.
966 static const int kPartialSnapshotCacheCapacity = 1400;
968 static const int kJSRegexpStaticOffsetsVectorSize = 50;
970 Address external_callback() {
971 return thread_local_top_.external_callback_;
973 void set_external_callback(Address callback) {
974 thread_local_top_.external_callback_ = callback;
977 StateTag current_vm_state() {
978 return thread_local_top_.current_vm_state_;
981 void SetCurrentVMState(StateTag state) {
982 if (RuntimeProfiler::IsEnabled()) {
983 // Make sure thread local top is initialized.
984 ASSERT(thread_local_top_.isolate_ == this);
985 StateTag current_state = thread_local_top_.current_vm_state_;
986 if (current_state != JS && state == JS) {
987 // Non-JS -> JS transition.
988 RuntimeProfiler::IsolateEnteredJS(this);
989 } else if (current_state == JS && state != JS) {
990 // JS -> non-JS transition.
991 ASSERT(RuntimeProfiler::IsSomeIsolateInJS());
992 RuntimeProfiler::IsolateExitedJS(this);
994 // Other types of state transitions are not interesting to the
995 // runtime profiler, because they don't affect whether we're
997 ASSERT((current_state == JS) == (state == JS));
1000 thread_local_top_.current_vm_state_ = state;
1003 void SetData(void* data) { embedder_data_ = data; }
1004 void* GetData() { return embedder_data_; }
1006 LookupResult* top_lookup_result() {
1007 return thread_local_top_.top_lookup_result_;
1009 void SetTopLookupResult(LookupResult* top) {
1010 thread_local_top_.top_lookup_result_ = top;
1013 bool context_exit_happened() {
1014 return context_exit_happened_;
1016 void set_context_exit_happened(bool context_exit_happened) {
1017 context_exit_happened_ = context_exit_happened;
1020 double time_millis_since_init() {
1021 return OS::TimeCurrentMillis() - time_millis_at_init_;
1024 DateCache* date_cache() {
1028 void set_date_cache(DateCache* date_cache) {
1029 if (date_cache != date_cache_) {
1032 date_cache_ = date_cache;
1038 friend struct GlobalState;
1039 friend struct InitializeGlobalState;
1042 UNINITIALIZED, // Some components may not have been allocated.
1043 INITIALIZED // All components are fully initialized.
1046 // These fields are accessed through the API, offsets must be kept in sync
1047 // with v8::internal::Internals (in include/v8.h) constants. This is also
1048 // verified in Isolate::Init() using runtime checks.
1049 State state_; // Will be padded to kApiPointerSize.
1050 void* embedder_data_;
1053 // The per-process lock should be acquired before the ThreadDataTable is
1055 class ThreadDataTable {
1060 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1061 void Insert(PerIsolateThreadData* data);
1062 void Remove(Isolate* isolate, ThreadId thread_id);
1063 void Remove(PerIsolateThreadData* data);
1064 void RemoveAllThreads(Isolate* isolate);
1067 PerIsolateThreadData* list_;
1070 // These items form a stack synchronously with threads Enter'ing and Exit'ing
1071 // the Isolate. The top of the stack points to a thread which is currently
1072 // running the Isolate. When the stack is empty, the Isolate is considered
1073 // not entered by any thread and can be Disposed.
1074 // If the same thread enters the Isolate more then once, the entry_count_
1075 // is incremented rather then a new item pushed to the stack.
1076 class EntryStackItem {
1078 EntryStackItem(PerIsolateThreadData* previous_thread_data,
1079 Isolate* previous_isolate,
1080 EntryStackItem* previous_item)
1082 previous_thread_data(previous_thread_data),
1083 previous_isolate(previous_isolate),
1084 previous_item(previous_item) { }
1087 PerIsolateThreadData* previous_thread_data;
1088 Isolate* previous_isolate;
1089 EntryStackItem* previous_item;
1092 DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1095 // This mutex protects highest_thread_id_, thread_data_table_ and
1096 // default_isolate_.
1097 static Mutex* process_wide_mutex_;
1099 static Thread::LocalStorageKey per_isolate_thread_data_key_;
1100 static Thread::LocalStorageKey isolate_key_;
1101 static Thread::LocalStorageKey thread_id_key_;
1102 static Isolate* default_isolate_;
1103 static ThreadDataTable* thread_data_table_;
1107 static void SetIsolateThreadLocals(Isolate* isolate,
1108 PerIsolateThreadData* data);
1110 // Allocate and insert PerIsolateThreadData into the ThreadDataTable
1111 // (regardless of whether such data already exists).
1112 PerIsolateThreadData* AllocatePerIsolateThreadData(ThreadId thread_id);
1114 // Find the PerThread for this particular (isolate, thread) combination.
1115 // If one does not yet exist, allocate a new one.
1116 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1118 // PreInits and returns a default isolate. Needed when a new thread tries
1119 // to create a Locker for the first time (the lock itself is in the isolate).
1120 static Isolate* GetDefaultIsolateForLocking();
1122 // Initializes the current thread to run this Isolate.
1123 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1124 // at the same time, this should be prevented using external locking.
1127 // Exits the current thread. The previosuly entered Isolate is restored
1129 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1130 // at the same time, this should be prevented using external locking.
1133 void PreallocatedMemoryThreadStart();
1134 void PreallocatedMemoryThreadStop();
1135 void InitializeThreadLocal();
1137 void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
1138 void MarkCompactPrologue(bool is_compacting,
1139 ThreadLocalTop* archived_thread_data);
1140 void MarkCompactEpilogue(bool is_compacting,
1141 ThreadLocalTop* archived_thread_data);
1145 void PropagatePendingExceptionToExternalTryCatch();
1147 void InitializeDebugger();
1149 // Traverse prototype chain to find out whether the object is derived from
1150 // the Error object.
1151 bool IsErrorObject(Handle<Object> obj);
1153 EntryStackItem* entry_stack_;
1154 int stack_trace_nesting_level_;
1155 StringStream* incomplete_message_;
1156 // The preallocated memory thread singleton.
1157 PreallocatedMemoryThread* preallocated_memory_thread_;
1158 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
1159 NoAllocationStringAllocator* preallocated_message_space_;
1160 Bootstrapper* bootstrapper_;
1161 RuntimeProfiler* runtime_profiler_;
1162 CompilationCache* compilation_cache_;
1163 Counters* counters_;
1164 CodeRange* code_range_;
1165 Mutex* break_access_;
1166 Atomic32 debugger_initialized_;
1167 Mutex* debugger_access_;
1169 StackGuard stack_guard_;
1170 StatsTable* stats_table_;
1171 StubCache* stub_cache_;
1172 DeoptimizerData* deoptimizer_data_;
1173 ThreadLocalTop thread_local_top_;
1174 bool capture_stack_trace_for_uncaught_exceptions_;
1175 int stack_trace_for_uncaught_exceptions_frame_limit_;
1176 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1177 TranscendentalCache* transcendental_cache_;
1178 MemoryAllocator* memory_allocator_;
1179 KeyedLookupCache* keyed_lookup_cache_;
1180 ContextSlotCache* context_slot_cache_;
1181 DescriptorLookupCache* descriptor_lookup_cache_;
1182 v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
1183 HandleScopeImplementer* handle_scope_implementer_;
1184 UnicodeCache* unicode_cache_;
1186 PreallocatedStorage in_use_list_;
1187 PreallocatedStorage free_list_;
1188 bool preallocated_storage_preallocated_;
1189 InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1190 StringInputBuffer* write_input_buffer_;
1191 GlobalHandles* global_handles_;
1192 ContextSwitcher* context_switcher_;
1193 ThreadManager* thread_manager_;
1194 RuntimeState runtime_state_;
1195 bool fp_stubs_generated_;
1196 StaticResource<SafeStringInputBuffer> compiler_safe_string_input_buffer_;
1198 bool has_installed_extensions_;
1199 StringTracker* string_tracker_;
1200 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1201 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1202 StringInputBuffer objects_string_compare_buffer_a_;
1203 StringInputBuffer objects_string_compare_buffer_b_;
1204 StaticResource<StringInputBuffer> objects_string_input_buffer_;
1205 unibrow::Mapping<unibrow::Ecma262Canonicalize>
1206 regexp_macro_assembler_canonicalize_;
1207 RegExpStack* regexp_stack_;
1208 DateCache* date_cache_;
1209 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1211 // The garbage collector should be a little more aggressive when it knows
1212 // that a context was recently exited.
1213 bool context_exit_happened_;
1215 // Time stamp at initialization.
1216 double time_millis_at_init_;
1218 #if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
1219 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
1220 bool simulator_initialized_;
1221 HashMap* simulator_i_cache_;
1222 Redirection* simulator_redirection_;
1226 // A static array of histogram info for each type.
1227 HistogramInfo heap_histograms_[LAST_TYPE + 1];
1228 JSObject::SpillInformation js_spill_information_;
1229 int code_kind_statistics_[Code::NUMBER_OF_KINDS];
1232 #ifdef ENABLE_DEBUGGER_SUPPORT
1233 Debugger* debugger_;
1237 #define GLOBAL_BACKING_STORE(type, name, initialvalue) \
1239 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1240 #undef GLOBAL_BACKING_STORE
1242 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \
1243 type name##_[length];
1244 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1245 #undef GLOBAL_ARRAY_BACKING_STORE
1248 // This class is huge and has a number of fields controlled by
1249 // preprocessor defines. Make sure the offsets of these fields agree
1250 // between compilation units.
1251 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
1252 static const intptr_t name##_debug_offset_;
1253 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1254 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1255 #undef ISOLATE_FIELD_OFFSET
1258 friend class ExecutionAccess;
1259 friend class IsolateInitializer;
1260 friend class ThreadManager;
1261 friend class Simulator;
1262 friend class StackGuard;
1263 friend class ThreadId;
1264 friend class TestMemoryAllocatorScope;
1265 friend class v8::Isolate;
1266 friend class v8::Locker;
1267 friend class v8::Unlocker;
1269 DISALLOW_COPY_AND_ASSIGN(Isolate);
1273 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1274 // class as a work around for a bug in the generated code found with these
1275 // versions of GCC. See V8 issue 122 for details.
1276 class SaveContext BASE_EMBEDDED {
1278 inline explicit SaveContext(Isolate* isolate);
1281 if (context_.is_null()) {
1282 Isolate* isolate = Isolate::Current();
1283 isolate->set_context(NULL);
1284 isolate->set_save_context(prev_);
1286 Isolate* isolate = context_->GetIsolate();
1287 isolate->set_context(*context_);
1288 isolate->set_save_context(prev_);
1292 Handle<Context> context() { return context_; }
1293 SaveContext* prev() { return prev_; }
1295 // Returns true if this save context is below a given JavaScript frame.
1296 bool IsBelowFrame(JavaScriptFrame* frame) {
1297 return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1301 Handle<Context> context_;
1302 #if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
1303 Handle<Context> dummy_;
1306 Address c_entry_fp_;
1310 class AssertNoContextChange BASE_EMBEDDED {
1313 AssertNoContextChange() :
1314 scope_(Isolate::Current()),
1315 context_(Isolate::Current()->context(), Isolate::Current()) {
1318 ~AssertNoContextChange() {
1319 ASSERT(Isolate::Current()->context() == *context_);
1324 Handle<Context> context_;
1327 AssertNoContextChange() { }
1332 class ExecutionAccess BASE_EMBEDDED {
1334 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1337 ~ExecutionAccess() { Unlock(isolate_); }
1339 static void Lock(Isolate* isolate) { isolate->break_access_->Lock(); }
1340 static void Unlock(Isolate* isolate) { isolate->break_access_->Unlock(); }
1342 static bool TryLock(Isolate* isolate) {
1343 return isolate->break_access_->TryLock();
1351 // Support for checking for stack-overflows in C++ code.
1352 class StackLimitCheck BASE_EMBEDDED {
1354 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1356 bool HasOverflowed() const {
1357 StackGuard* stack_guard = isolate_->stack_guard();
1358 // Stack has overflowed in C++ code only if stack pointer exceeds the C++
1359 // stack guard and the limits are not set to interrupt values.
1360 // TODO(214): Stack overflows are ignored if a interrupt is pending. This
1361 // code should probably always use the initial C++ limit.
1362 return (reinterpret_cast<uintptr_t>(this) < stack_guard->climit()) &&
1363 stack_guard->IsStackOverflow();
1370 // Support for temporarily postponing interrupts. When the outermost
1371 // postpone scope is left the interrupts will be re-enabled and any
1372 // interrupts that occurred while in the scope will be taken into
1374 class PostponeInterruptsScope BASE_EMBEDDED {
1376 explicit PostponeInterruptsScope(Isolate* isolate)
1377 : stack_guard_(isolate->stack_guard()) {
1378 stack_guard_->thread_local_.postpone_interrupts_nesting_++;
1379 stack_guard_->DisableInterrupts();
1382 ~PostponeInterruptsScope() {
1383 if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
1384 stack_guard_->EnableInterrupts();
1388 StackGuard* stack_guard_;
1392 // Temporary macros for accessing current isolate and its subobjects.
1393 // They provide better readability, especially when used a lot in the code.
1394 #define HEAP (v8::internal::Isolate::Current()->heap())
1395 #define FACTORY (v8::internal::Isolate::Current()->factory())
1396 #define ISOLATE (v8::internal::Isolate::Current())
1397 #define ZONE (v8::internal::Isolate::Current()->zone())
1398 #define LOGGER (v8::internal::Isolate::Current()->logger())
1401 // Tells whether the global context is marked with out of memory.
1402 inline bool Context::has_out_of_memory() {
1403 return global_context()->out_of_memory()->IsTrue();
1407 // Mark the global context with out of memory.
1408 inline void Context::mark_out_of_memory() {
1409 global_context()->set_out_of_memory(HEAP->true_value());
1413 } } // namespace v8::internal
1415 #endif // V8_ISOLATE_H_