deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / isolate.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_ISOLATE_H_
6 #define V8_ISOLATE_H_
7
8 #include <queue>
9 #include "include/v8-debug.h"
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/base/atomicops.h"
13 #include "src/builtins.h"
14 #include "src/contexts.h"
15 #include "src/date.h"
16 #include "src/execution.h"
17 #include "src/frames.h"
18 #include "src/global-handles.h"
19 #include "src/handles.h"
20 #include "src/hashmap.h"
21 #include "src/heap/heap.h"
22 #include "src/optimizing-compiler-thread.h"
23 #include "src/regexp-stack.h"
24 #include "src/runtime/runtime.h"
25 #include "src/runtime-profiler.h"
26 #include "src/zone.h"
27
28 namespace v8 {
29
30 namespace base {
31 class RandomNumberGenerator;
32 }
33
34 namespace internal {
35
36 class BasicBlockProfiler;
37 class Bootstrapper;
38 class CallInterfaceDescriptorData;
39 class CodeGenerator;
40 class CodeRange;
41 class CodeStubDescriptor;
42 class CodeTracer;
43 class CompilationCache;
44 class CompilationStatistics;
45 class ContextSlotCache;
46 class Counters;
47 class CpuFeatures;
48 class CpuProfiler;
49 class DeoptimizerData;
50 class Deserializer;
51 class EmptyStatement;
52 class ExternalCallbackScope;
53 class ExternalReferenceTable;
54 class Factory;
55 class FunctionInfoListener;
56 class HandleScopeImplementer;
57 class HeapProfiler;
58 class HStatistics;
59 class HTracer;
60 class InlineRuntimeFunctionsTable;
61 class InnerPointerToCodeCache;
62 class MaterializedObjectStore;
63 class CodeAgingHelper;
64 class RegExpStack;
65 class SaveContext;
66 class StringTracker;
67 class StubCache;
68 class SweeperThread;
69 class ThreadManager;
70 class ThreadState;
71 class ThreadVisitor;  // Defined in v8threads.h
72 class UnicodeCache;
73 template <StateTag Tag> class VMState;
74
75 // 'void function pointer', used to roundtrip the
76 // ExternalReference::ExternalReferenceRedirector since we can not include
77 // assembler.h, where it is defined, here.
78 typedef void* ExternalReferenceRedirectorPointer();
79
80
81 class Debug;
82 class Debugger;
83 class PromiseOnStack;
84
85 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
86     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
87     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
88     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
89     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
90 class Redirection;
91 class Simulator;
92 #endif
93
94
95 // Static indirection table for handles to constants.  If a frame
96 // element represents a constant, the data contains an index into
97 // this table of handles to the actual constants.
98 // Static indirection table for handles to constants.  If a Result
99 // represents a constant, the data contains an index into this table
100 // of handles to the actual constants.
101 typedef ZoneList<Handle<Object> > ZoneObjectList;
102
103 #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate)    \
104   do {                                                    \
105     Isolate* __isolate__ = (isolate);                     \
106     if (__isolate__->has_scheduled_exception()) {         \
107       return __isolate__->PromoteScheduledException();    \
108     }                                                     \
109   } while (false)
110
111 // Macros for MaybeHandle.
112
113 #define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \
114   do {                                                      \
115     Isolate* __isolate__ = (isolate);                       \
116     if (__isolate__->has_scheduled_exception()) {           \
117       __isolate__->PromoteScheduledException();             \
118       return value;                                         \
119     }                                                       \
120   } while (false)
121
122 #define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
123   RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>())
124
125 #define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)  \
126   do {                                                               \
127     if (!(call).ToHandle(&dst)) {                                    \
128       DCHECK((isolate)->has_pending_exception());                    \
129       return value;                                                  \
130     }                                                                \
131   } while (false)
132
133 #define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)  \
134   ASSIGN_RETURN_ON_EXCEPTION_VALUE(                             \
135       isolate, dst, call, isolate->heap()->exception())
136
137 #define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T)  \
138   ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
139
140 #define THROW_NEW_ERROR(isolate, call, T)               \
141   do {                                                  \
142     return isolate->Throw<T>(isolate->factory()->call); \
143   } while (false)
144
145 #define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call) \
146   do {                                                \
147     return isolate->Throw(*isolate->factory()->call); \
148   } while (false)
149
150 #define RETURN_ON_EXCEPTION_VALUE(isolate, call, value)            \
151   do {                                                             \
152     if ((call).is_null()) {                                        \
153       DCHECK((isolate)->has_pending_exception());                  \
154       return value;                                                \
155     }                                                              \
156   } while (false)
157
158 #define RETURN_FAILURE_ON_EXCEPTION(isolate, call)  \
159   RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception())
160
161 #define RETURN_ON_EXCEPTION(isolate, call, T)  \
162   RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
163
164
165 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C)                \
166   C(Handler, handler)                                   \
167   C(CEntryFP, c_entry_fp)                               \
168   C(CFunction, c_function)                              \
169   C(Context, context)                                   \
170   C(PendingException, pending_exception)                \
171   C(PendingHandlerContext, pending_handler_context)     \
172   C(PendingHandlerCode, pending_handler_code)           \
173   C(PendingHandlerOffset, pending_handler_offset)       \
174   C(PendingHandlerFP, pending_handler_fp)               \
175   C(PendingHandlerSP, pending_handler_sp)               \
176   C(ExternalCaughtException, external_caught_exception) \
177   C(JSEntrySP, js_entry_sp)
178
179
180 // Platform-independent, reliable thread identifier.
181 class ThreadId {
182  public:
183   // Creates an invalid ThreadId.
184   ThreadId() { base::NoBarrier_Store(&id_, kInvalidId); }
185
186   ThreadId& operator=(const ThreadId& other) {
187     base::NoBarrier_Store(&id_, base::NoBarrier_Load(&other.id_));
188     return *this;
189   }
190
191   // Returns ThreadId for current thread.
192   static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
193
194   // Returns invalid ThreadId (guaranteed not to be equal to any thread).
195   static ThreadId Invalid() { return ThreadId(kInvalidId); }
196
197   // Compares ThreadIds for equality.
198   INLINE(bool Equals(const ThreadId& other) const) {
199     return base::NoBarrier_Load(&id_) == base::NoBarrier_Load(&other.id_);
200   }
201
202   // Checks whether this ThreadId refers to any thread.
203   INLINE(bool IsValid() const) {
204     return base::NoBarrier_Load(&id_) != kInvalidId;
205   }
206
207   // Converts ThreadId to an integer representation
208   // (required for public API: V8::V8::GetCurrentThreadId).
209   int ToInteger() const { return static_cast<int>(base::NoBarrier_Load(&id_)); }
210
211   // Converts ThreadId to an integer representation
212   // (required for public API: V8::V8::TerminateExecution).
213   static ThreadId FromInteger(int id) { return ThreadId(id); }
214
215  private:
216   static const int kInvalidId = -1;
217
218   explicit ThreadId(int id) { base::NoBarrier_Store(&id_, id); }
219
220   static int AllocateThreadId();
221
222   static int GetCurrentThreadId();
223
224   base::Atomic32 id_;
225
226   static base::Atomic32 highest_thread_id_;
227
228   friend class Isolate;
229 };
230
231
232 #define FIELD_ACCESSOR(type, name)                 \
233   inline void set_##name(type v) { name##_ = v; }  \
234   inline type name() const { return name##_; }
235
236
237 class ThreadLocalTop BASE_EMBEDDED {
238  public:
239   // Does early low-level initialization that does not depend on the
240   // isolate being present.
241   ThreadLocalTop();
242
243   // Initialize the thread data.
244   void Initialize();
245
246   // Get the top C++ try catch handler or NULL if none are registered.
247   //
248   // This method is not guaranteed to return an address that can be
249   // used for comparison with addresses into the JS stack.  If such an
250   // address is needed, use try_catch_handler_address.
251   FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
252
253   // Get the address of the top C++ try catch handler or NULL if
254   // none are registered.
255   //
256   // This method always returns an address that can be compared to
257   // pointers into the JavaScript stack.  When running on actual
258   // hardware, try_catch_handler_address and TryCatchHandler return
259   // the same pointer.  When running on a simulator with a separate JS
260   // stack, try_catch_handler_address returns a JS stack address that
261   // corresponds to the place on the JS stack where the C++ handler
262   // would have been if the stack were not separate.
263   Address try_catch_handler_address() {
264     return reinterpret_cast<Address>(
265         v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
266   }
267
268   void Free();
269
270   Isolate* isolate_;
271   // The context where the current execution method is created and for variable
272   // lookups.
273   Context* context_;
274   ThreadId thread_id_;
275   Object* pending_exception_;
276
277   // Communication channel between Isolate::FindHandler and the CEntryStub.
278   Context* pending_handler_context_;
279   Code* pending_handler_code_;
280   intptr_t pending_handler_offset_;
281   Address pending_handler_fp_;
282   Address pending_handler_sp_;
283
284   // Communication channel between Isolate::Throw and message consumers.
285   bool rethrowing_message_;
286   Object* pending_message_obj_;
287
288   // Use a separate value for scheduled exceptions to preserve the
289   // invariants that hold about pending_exception.  We may want to
290   // unify them later.
291   Object* scheduled_exception_;
292   bool external_caught_exception_;
293   SaveContext* save_context_;
294
295   // Stack.
296   Address c_entry_fp_;  // the frame pointer of the top c entry frame
297   Address handler_;     // try-blocks are chained through the stack
298   Address c_function_;  // C function that was called at c entry.
299
300   // Throwing an exception may cause a Promise rejection.  For this purpose
301   // we keep track of a stack of nested promises and the corresponding
302   // try-catch handlers.
303   PromiseOnStack* promise_on_stack_;
304
305 #ifdef USE_SIMULATOR
306   Simulator* simulator_;
307 #endif
308
309   Address js_entry_sp_;  // the stack pointer of the bottom JS entry frame
310   // the external callback we're currently in
311   ExternalCallbackScope* external_callback_scope_;
312   StateTag current_vm_state_;
313
314   // Call back function to report unsafe JS accesses.
315   v8::FailedAccessCheckCallback failed_access_check_callback_;
316
317  private:
318   void InitializeInternal();
319
320   v8::TryCatch* try_catch_handler_;
321 };
322
323
324 #if V8_TARGET_ARCH_ARM && !defined(__arm__) ||       \
325     V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
326     V8_TARGET_ARCH_PPC && !defined(__PPC__) ||       \
327     V8_TARGET_ARCH_MIPS && !defined(__mips__) ||     \
328     V8_TARGET_ARCH_MIPS64 && !defined(__mips__)
329
330 #define ISOLATE_INIT_SIMULATOR_LIST(V)                                         \
331   V(bool, simulator_initialized, false)                                        \
332   V(HashMap*, simulator_i_cache, NULL)                                         \
333   V(Redirection*, simulator_redirection, NULL)
334 #else
335
336 #define ISOLATE_INIT_SIMULATOR_LIST(V)
337
338 #endif
339
340
341 #ifdef DEBUG
342
343 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)                                       \
344   V(CommentStatistic, paged_space_comments_statistics,                         \
345       CommentStatistic::kMaxComments + 1)                                      \
346   V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
347 #else
348
349 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
350
351 #endif
352
353 #define ISOLATE_INIT_ARRAY_LIST(V)                                             \
354   /* SerializerDeserializer state. */                                          \
355   V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
356   V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
357   V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
358   V(int, suffix_table, (kBMMaxShift + 1))                                      \
359   V(uint32_t, private_random_seed, 2)                                          \
360   ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
361
362 typedef List<HeapObject*> DebugObjectCache;
363
364 #define ISOLATE_INIT_LIST(V)                                                   \
365   /* Assembler state. */                                                       \
366   V(FatalErrorCallback, exception_behavior, NULL)                              \
367   V(LogEventCallback, event_logger, NULL)                                      \
368   V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL)     \
369   /* To distinguish the function templates, so that we can find them in the */ \
370   /* function cache of the native context. */                                  \
371   V(int, next_serial_number, 0)                                                \
372   V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL)  \
373   /* Part of the state of liveedit. */                                         \
374   V(FunctionInfoListener*, active_function_info_listener, NULL)                \
375   /* State for Relocatable. */                                                 \
376   V(Relocatable*, relocatable_top, NULL)                                       \
377   V(DebugObjectCache*, string_stream_debug_object_cache, NULL)                 \
378   V(Object*, string_stream_current_security_token, NULL)                       \
379   V(ExternalReferenceTable*, external_reference_table, NULL)                   \
380   V(HashMap*, external_reference_map, NULL)                                    \
381   V(HashMap*, root_index_map, NULL)                                            \
382   V(int, pending_microtask_count, 0)                                           \
383   V(bool, autorun_microtasks, true)                                            \
384   V(HStatistics*, hstatistics, NULL)                                           \
385   V(CompilationStatistics*, turbo_statistics, NULL)                            \
386   V(HTracer*, htracer, NULL)                                                   \
387   V(CodeTracer*, code_tracer, NULL)                                            \
388   V(bool, fp_stubs_generated, false)                                           \
389   V(int, max_available_threads, 0)                                             \
390   V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu)                            \
391   V(PromiseRejectCallback, promise_reject_callback, NULL)                      \
392   V(const v8::StartupData*, snapshot_blob, NULL)                               \
393   ISOLATE_INIT_SIMULATOR_LIST(V)
394
395 #define THREAD_LOCAL_TOP_ACCESSOR(type, name)                        \
396   inline void set_##name(type v) { thread_local_top_.name##_ = v; }  \
397   inline type name() const { return thread_local_top_.name##_; }
398
399 #define THREAD_LOCAL_TOP_ADDRESS(type, name) \
400   type* name##_address() { return &thread_local_top_.name##_; }
401
402
403 class Isolate {
404   // These forward declarations are required to make the friend declarations in
405   // PerIsolateThreadData work on some older versions of gcc.
406   class ThreadDataTable;
407   class EntryStackItem;
408  public:
409   ~Isolate();
410
411   // A thread has a PerIsolateThreadData instance for each isolate that it has
412   // entered. That instance is allocated when the isolate is initially entered
413   // and reused on subsequent entries.
414   class PerIsolateThreadData {
415    public:
416     PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
417         : isolate_(isolate),
418           thread_id_(thread_id),
419           stack_limit_(0),
420           thread_state_(NULL),
421 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
422     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
423     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
424     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
425     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
426           simulator_(NULL),
427 #endif
428           next_(NULL),
429           prev_(NULL) { }
430     ~PerIsolateThreadData();
431     Isolate* isolate() const { return isolate_; }
432     ThreadId thread_id() const { return thread_id_; }
433
434     FIELD_ACCESSOR(uintptr_t, stack_limit)
435     FIELD_ACCESSOR(ThreadState*, thread_state)
436
437 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
438     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
439     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
440     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
441     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
442     FIELD_ACCESSOR(Simulator*, simulator)
443 #endif
444
445     bool Matches(Isolate* isolate, ThreadId thread_id) const {
446       return isolate_ == isolate && thread_id_.Equals(thread_id);
447     }
448
449    private:
450     Isolate* isolate_;
451     ThreadId thread_id_;
452     uintptr_t stack_limit_;
453     ThreadState* thread_state_;
454
455 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
456     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
457     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
458     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
459     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
460     Simulator* simulator_;
461 #endif
462
463     PerIsolateThreadData* next_;
464     PerIsolateThreadData* prev_;
465
466     friend class Isolate;
467     friend class ThreadDataTable;
468     friend class EntryStackItem;
469
470     DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
471   };
472
473
474   enum AddressId {
475 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
476     FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
477 #undef DECLARE_ENUM
478     kIsolateAddressCount
479   };
480
481   static void InitializeOncePerProcess();
482
483   // Returns the PerIsolateThreadData for the current thread (or NULL if one is
484   // not currently set).
485   static PerIsolateThreadData* CurrentPerIsolateThreadData() {
486     return reinterpret_cast<PerIsolateThreadData*>(
487         base::Thread::GetThreadLocal(per_isolate_thread_data_key_));
488   }
489
490   // Returns the isolate inside which the current thread is running.
491   INLINE(static Isolate* Current()) {
492     DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
493     Isolate* isolate = reinterpret_cast<Isolate*>(
494         base::Thread::GetExistingThreadLocal(isolate_key_));
495     DCHECK(isolate != NULL);
496     return isolate;
497   }
498
499   INLINE(static Isolate* UncheckedCurrent()) {
500     DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
501     return reinterpret_cast<Isolate*>(
502         base::Thread::GetThreadLocal(isolate_key_));
503   }
504
505   // Like UncheckedCurrent, but skips the check that |isolate_key_| was
506   // initialized. Callers have to ensure that themselves.
507   INLINE(static Isolate* UnsafeCurrent()) {
508     return reinterpret_cast<Isolate*>(
509         base::Thread::GetThreadLocal(isolate_key_));
510   }
511
512   // Usually called by Init(), but can be called early e.g. to allow
513   // testing components that require logging but not the whole
514   // isolate.
515   //
516   // Safe to call more than once.
517   void InitializeLoggingAndCounters();
518
519   bool Init(Deserializer* des);
520
521   // True if at least one thread Enter'ed this isolate.
522   bool IsInUse() { return entry_stack_ != NULL; }
523
524   // Destroys the non-default isolates.
525   // Sets default isolate into "has_been_disposed" state rather then destroying,
526   // for legacy API reasons.
527   void TearDown();
528
529   static void GlobalTearDown();
530
531   void ClearSerializerData();
532
533   // Find the PerThread for this particular (isolate, thread) combination
534   // If one does not yet exist, return null.
535   PerIsolateThreadData* FindPerThreadDataForThisThread();
536
537   // Find the PerThread for given (isolate, thread) combination
538   // If one does not yet exist, return null.
539   PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
540
541   // Returns the key used to store the pointer to the current isolate.
542   // Used internally for V8 threads that do not execute JavaScript but still
543   // are part of the domain of an isolate (like the context switcher).
544   static base::Thread::LocalStorageKey isolate_key() {
545     return isolate_key_;
546   }
547
548   // Returns the key used to store process-wide thread IDs.
549   static base::Thread::LocalStorageKey thread_id_key() {
550     return thread_id_key_;
551   }
552
553   static base::Thread::LocalStorageKey per_isolate_thread_data_key();
554
555   // Mutex for serializing access to break control structures.
556   base::RecursiveMutex* break_access() { return &break_access_; }
557
558   Address get_address_from_id(AddressId id);
559
560   // Access to top context (where the current function object was created).
561   Context* context() { return thread_local_top_.context_; }
562   void set_context(Context* context) {
563     DCHECK(context == NULL || context->IsContext());
564     thread_local_top_.context_ = context;
565   }
566   Context** context_address() { return &thread_local_top_.context_; }
567
568   THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
569
570   // Access to current thread id.
571   THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
572
573   // Interface to pending exception.
574   Object* pending_exception() {
575     DCHECK(has_pending_exception());
576     DCHECK(!thread_local_top_.pending_exception_->IsException());
577     return thread_local_top_.pending_exception_;
578   }
579
580   void set_pending_exception(Object* exception_obj) {
581     DCHECK(!exception_obj->IsException());
582     thread_local_top_.pending_exception_ = exception_obj;
583   }
584
585   void clear_pending_exception() {
586     DCHECK(!thread_local_top_.pending_exception_->IsException());
587     thread_local_top_.pending_exception_ = heap_.the_hole_value();
588   }
589
590   THREAD_LOCAL_TOP_ADDRESS(Object*, pending_exception)
591
592   bool has_pending_exception() {
593     DCHECK(!thread_local_top_.pending_exception_->IsException());
594     return !thread_local_top_.pending_exception_->IsTheHole();
595   }
596
597   THREAD_LOCAL_TOP_ADDRESS(Context*, pending_handler_context)
598   THREAD_LOCAL_TOP_ADDRESS(Code*, pending_handler_code)
599   THREAD_LOCAL_TOP_ADDRESS(intptr_t, pending_handler_offset)
600   THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_fp)
601   THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_sp)
602
603   THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
604
605   void clear_pending_message() {
606     thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
607   }
608   v8::TryCatch* try_catch_handler() {
609     return thread_local_top_.try_catch_handler();
610   }
611   bool* external_caught_exception_address() {
612     return &thread_local_top_.external_caught_exception_;
613   }
614
615   THREAD_LOCAL_TOP_ADDRESS(Object*, scheduled_exception)
616
617   Address pending_message_obj_address() {
618     return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
619   }
620
621   Object* scheduled_exception() {
622     DCHECK(has_scheduled_exception());
623     DCHECK(!thread_local_top_.scheduled_exception_->IsException());
624     return thread_local_top_.scheduled_exception_;
625   }
626   bool has_scheduled_exception() {
627     DCHECK(!thread_local_top_.scheduled_exception_->IsException());
628     return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
629   }
630   void clear_scheduled_exception() {
631     DCHECK(!thread_local_top_.scheduled_exception_->IsException());
632     thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
633   }
634
635   bool IsJavaScriptHandlerOnTop(Object* exception);
636   bool IsExternalHandlerOnTop(Object* exception);
637
638   bool is_catchable_by_javascript(Object* exception) {
639     return exception != heap()->termination_exception();
640   }
641
642   // JS execution stack (see frames.h).
643   static Address c_entry_fp(ThreadLocalTop* thread) {
644     return thread->c_entry_fp_;
645   }
646   static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
647   Address c_function() { return thread_local_top_.c_function_; }
648
649   inline Address* c_entry_fp_address() {
650     return &thread_local_top_.c_entry_fp_;
651   }
652   inline Address* handler_address() { return &thread_local_top_.handler_; }
653   inline Address* c_function_address() {
654     return &thread_local_top_.c_function_;
655   }
656
657   // Bottom JS entry.
658   Address js_entry_sp() {
659     return thread_local_top_.js_entry_sp_;
660   }
661   inline Address* js_entry_sp_address() {
662     return &thread_local_top_.js_entry_sp_;
663   }
664
665   // Returns the global object of the current context. It could be
666   // a builtin object, or a JS global object.
667   Handle<GlobalObject> global_object() {
668     return Handle<GlobalObject>(context()->global_object());
669   }
670
671   // Returns the global proxy object of the current context.
672   JSObject* global_proxy() {
673     return context()->global_proxy();
674   }
675
676   Handle<JSBuiltinsObject> js_builtins_object() {
677     return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
678   }
679
680   static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
681   void FreeThreadResources() { thread_local_top_.Free(); }
682
683   // This method is called by the api after operations that may throw
684   // exceptions.  If an exception was thrown and not handled by an external
685   // handler the exception is scheduled to be rethrown when we return to running
686   // JavaScript code.  If an exception is scheduled true is returned.
687   bool OptionalRescheduleException(bool is_bottom_call);
688
689   // Push and pop a promise and the current try-catch handler.
690   void PushPromise(Handle<JSObject> promise, Handle<JSFunction> function);
691   void PopPromise();
692   Handle<Object> GetPromiseOnStackOnThrow();
693
694   class ExceptionScope {
695    public:
696     // Scope currently can only be used for regular exceptions,
697     // not termination exception.
698     explicit ExceptionScope(Isolate* isolate)
699         : isolate_(isolate),
700           pending_exception_(isolate_->pending_exception(), isolate_) {}
701
702     ~ExceptionScope() {
703       isolate_->set_pending_exception(*pending_exception_);
704     }
705
706    private:
707     Isolate* isolate_;
708     Handle<Object> pending_exception_;
709   };
710
711   void SetCaptureStackTraceForUncaughtExceptions(
712       bool capture,
713       int frame_limit,
714       StackTrace::StackTraceOptions options);
715
716   void PrintCurrentStackTrace(FILE* out);
717   void PrintStack(StringStream* accumulator);
718   void PrintStack(FILE* out);
719   Handle<String> StackTraceString();
720   NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
721                                       Object* object,
722                                       Map* map,
723                                       unsigned int magic2));
724   Handle<JSArray> CaptureCurrentStackTrace(
725       int frame_limit,
726       StackTrace::StackTraceOptions options);
727   Handle<Object> CaptureSimpleStackTrace(Handle<JSObject> error_object,
728                                          Handle<Object> caller);
729   void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
730   void CaptureAndSetSimpleStackTrace(Handle<JSObject> error_object,
731                                      Handle<Object> caller);
732   Handle<JSArray> GetDetailedStackTrace(Handle<JSObject> error_object);
733   Handle<JSArray> GetDetailedFromSimpleStackTrace(
734       Handle<JSObject> error_object);
735
736   // Returns if the top context may access the given global object. If
737   // the result is false, the pending exception is guaranteed to be
738   // set.
739
740   bool MayAccess(Handle<JSObject> receiver);
741   bool IsInternallyUsedPropertyName(Handle<Object> name);
742   bool IsInternallyUsedPropertyName(Object* name);
743
744   void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
745   void ReportFailedAccessCheck(Handle<JSObject> receiver);
746
747   // Exception throwing support. The caller should use the result
748   // of Throw() as its return value.
749   Object* Throw(Object* exception, MessageLocation* location = NULL);
750   Object* ThrowIllegalOperation();
751
752   template <typename T>
753   MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
754                                        MessageLocation* location = NULL) {
755     Throw(*exception, location);
756     return MaybeHandle<T>();
757   }
758
759   // Re-throw an exception.  This involves no error reporting since error
760   // reporting was handled when the exception was thrown originally.
761   Object* ReThrow(Object* exception);
762
763   // Find the correct handler for the current pending exception. This also
764   // clears and returns the current pending exception.
765   Object* FindHandler();
766
767   // Tries to predict whether an exception will be caught. Note that this can
768   // only produce an estimate, because it is undecidable whether a finally
769   // clause will consume or re-throw an exception. We conservatively assume any
770   // finally clause will behave as if the exception were consumed.
771   enum CatchType { NOT_CAUGHT, CAUGHT_BY_JAVASCRIPT, CAUGHT_BY_EXTERNAL };
772   CatchType PredictExceptionCatcher();
773
774   void ScheduleThrow(Object* exception);
775   // Re-set pending message, script and positions reported to the TryCatch
776   // back to the TLS for re-use when rethrowing.
777   void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
778   // Un-schedule an exception that was caught by a TryCatch handler.
779   void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler);
780   void ReportPendingMessages();
781   // Return pending location if any or unfilled structure.
782   MessageLocation GetMessageLocation();
783
784   // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
785   Object* PromoteScheduledException();
786
787   // Attempts to compute the current source location, storing the
788   // result in the target out parameter.
789   void ComputeLocation(MessageLocation* target);
790   bool ComputeLocationFromException(MessageLocation* target,
791                                     Handle<Object> exception);
792   bool ComputeLocationFromStackTrace(MessageLocation* target,
793                                      Handle<Object> exception);
794
795   Handle<JSMessageObject> CreateMessage(Handle<Object> exception,
796                                         MessageLocation* location);
797
798   // Out of resource exception helpers.
799   Object* StackOverflow();
800   Object* TerminateExecution();
801   void CancelTerminateExecution();
802
803   void RequestInterrupt(InterruptCallback callback, void* data);
804   void InvokeApiInterruptCallbacks();
805
806   // Administration
807   void Iterate(ObjectVisitor* v);
808   void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
809   char* Iterate(ObjectVisitor* v, char* t);
810   void IterateThread(ThreadVisitor* v, char* t);
811
812   // Returns the current native context.
813   Handle<Context> native_context();
814
815   // Returns the native context of the calling JavaScript code.  That
816   // is, the native context of the top-most JavaScript frame.
817   Handle<Context> GetCallingNativeContext();
818
819   void RegisterTryCatchHandler(v8::TryCatch* that);
820   void UnregisterTryCatchHandler(v8::TryCatch* that);
821
822   char* ArchiveThread(char* to);
823   char* RestoreThread(char* from);
824
825   static const char* const kStackOverflowMessage;
826
827   static const int kUC16AlphabetSize = 256;  // See StringSearchBase.
828   static const int kBMMaxShift = 250;        // See StringSearchBase.
829
830   // Accessors.
831 #define GLOBAL_ACCESSOR(type, name, initialvalue)                       \
832   inline type name() const {                                            \
833     DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
834     return name##_;                                                     \
835   }                                                                     \
836   inline void set_##name(type value) {                                  \
837     DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
838     name##_ = value;                                                    \
839   }
840   ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
841 #undef GLOBAL_ACCESSOR
842
843 #define GLOBAL_ARRAY_ACCESSOR(type, name, length)                       \
844   inline type* name() {                                                 \
845     DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
846     return &(name##_)[0];                                               \
847   }
848   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
849 #undef GLOBAL_ARRAY_ACCESSOR
850
851 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name)            \
852   Handle<type> name() {                                             \
853     return Handle<type>(native_context()->name(), this);            \
854   }                                                                 \
855   bool is_##name(type* value) {                                     \
856     return native_context()->is_##name(value);                      \
857   }
858   NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
859 #undef NATIVE_CONTEXT_FIELD_ACCESSOR
860
861   Bootstrapper* bootstrapper() { return bootstrapper_; }
862   Counters* counters() {
863     // Call InitializeLoggingAndCounters() if logging is needed before
864     // the isolate is fully initialized.
865     DCHECK(counters_ != NULL);
866     return counters_;
867   }
868   CodeRange* code_range() { return code_range_; }
869   RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
870   CompilationCache* compilation_cache() { return compilation_cache_; }
871   Logger* logger() {
872     // Call InitializeLoggingAndCounters() if logging is needed before
873     // the isolate is fully initialized.
874     DCHECK(logger_ != NULL);
875     return logger_;
876   }
877   StackGuard* stack_guard() { return &stack_guard_; }
878   Heap* heap() { return &heap_; }
879   StatsTable* stats_table();
880   StubCache* stub_cache() { return stub_cache_; }
881   CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
882   DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
883   ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
884   MaterializedObjectStore* materialized_object_store() {
885     return materialized_object_store_;
886   }
887
888   MemoryAllocator* memory_allocator() {
889     return memory_allocator_;
890   }
891
892   KeyedLookupCache* keyed_lookup_cache() {
893     return keyed_lookup_cache_;
894   }
895
896   ContextSlotCache* context_slot_cache() {
897     return context_slot_cache_;
898   }
899
900   DescriptorLookupCache* descriptor_lookup_cache() {
901     return descriptor_lookup_cache_;
902   }
903
904   HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
905
906   HandleScopeImplementer* handle_scope_implementer() {
907     DCHECK(handle_scope_implementer_);
908     return handle_scope_implementer_;
909   }
910   Zone* runtime_zone() { return &runtime_zone_; }
911
912   UnicodeCache* unicode_cache() {
913     return unicode_cache_;
914   }
915
916   InnerPointerToCodeCache* inner_pointer_to_code_cache() {
917     return inner_pointer_to_code_cache_;
918   }
919
920   GlobalHandles* global_handles() { return global_handles_; }
921
922   EternalHandles* eternal_handles() { return eternal_handles_; }
923
924   ThreadManager* thread_manager() { return thread_manager_; }
925
926   StringTracker* string_tracker() { return string_tracker_; }
927
928   unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
929     return &jsregexp_uncanonicalize_;
930   }
931
932   unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
933     return &jsregexp_canonrange_;
934   }
935
936   RuntimeState* runtime_state() { return &runtime_state_; }
937
938   Builtins* builtins() { return &builtins_; }
939
940   void NotifyExtensionInstalled() {
941     has_installed_extensions_ = true;
942   }
943
944   bool has_installed_extensions() { return has_installed_extensions_; }
945
946   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
947       regexp_macro_assembler_canonicalize() {
948     return &regexp_macro_assembler_canonicalize_;
949   }
950
951   RegExpStack* regexp_stack() { return regexp_stack_; }
952
953   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
954       interp_canonicalize_mapping() {
955     return &interp_canonicalize_mapping_;
956   }
957
958   Debug* debug() { return debug_; }
959
960   CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
961   HeapProfiler* heap_profiler() const { return heap_profiler_; }
962
963 #ifdef DEBUG
964   HistogramInfo* heap_histograms() { return heap_histograms_; }
965
966   JSObject::SpillInformation* js_spill_information() {
967     return &js_spill_information_;
968   }
969 #endif
970
971   Factory* factory() { return reinterpret_cast<Factory*>(this); }
972
973   static const int kJSRegexpStaticOffsetsVectorSize = 128;
974
975   THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
976
977   THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
978
979   void SetData(uint32_t slot, void* data) {
980     DCHECK(slot < Internals::kNumIsolateDataSlots);
981     embedder_data_[slot] = data;
982   }
983   void* GetData(uint32_t slot) {
984     DCHECK(slot < Internals::kNumIsolateDataSlots);
985     return embedder_data_[slot];
986   }
987
988   bool serializer_enabled() const { return serializer_enabled_; }
989   bool snapshot_available() const { return snapshot_blob_ != NULL; }
990
991   bool IsDead() { return has_fatal_error_; }
992   void SignalFatalError() { has_fatal_error_ = true; }
993
994   bool use_crankshaft() const;
995
996   bool initialized_from_snapshot() { return initialized_from_snapshot_; }
997
998   double time_millis_since_init() {
999     return base::OS::TimeCurrentMillis() - time_millis_at_init_;
1000   }
1001
1002   DateCache* date_cache() {
1003     return date_cache_;
1004   }
1005
1006   void set_date_cache(DateCache* date_cache) {
1007     if (date_cache != date_cache_) {
1008       delete date_cache_;
1009     }
1010     date_cache_ = date_cache;
1011   }
1012
1013   Map* get_initial_js_array_map(ElementsKind kind);
1014
1015   bool IsFastArrayConstructorPrototypeChainIntact();
1016
1017   CallInterfaceDescriptorData* call_descriptor_data(int index);
1018
1019   void IterateDeferredHandles(ObjectVisitor* visitor);
1020   void LinkDeferredHandles(DeferredHandles* deferred_handles);
1021   void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
1022
1023 #ifdef DEBUG
1024   bool IsDeferredHandle(Object** location);
1025 #endif  // DEBUG
1026
1027   bool concurrent_recompilation_enabled() {
1028     // Thread is only available with flag enabled.
1029     DCHECK(optimizing_compiler_thread_ == NULL ||
1030            FLAG_concurrent_recompilation);
1031     return optimizing_compiler_thread_ != NULL;
1032   }
1033
1034   bool concurrent_osr_enabled() const {
1035     // Thread is only available with flag enabled.
1036     DCHECK(optimizing_compiler_thread_ == NULL ||
1037            FLAG_concurrent_recompilation);
1038     return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
1039   }
1040
1041   OptimizingCompilerThread* optimizing_compiler_thread() {
1042     return optimizing_compiler_thread_;
1043   }
1044
1045   int id() const { return static_cast<int>(id_); }
1046
1047   HStatistics* GetHStatistics();
1048   CompilationStatistics* GetTurboStatistics();
1049   HTracer* GetHTracer();
1050   CodeTracer* GetCodeTracer();
1051
1052   void DumpAndResetCompilationStats();
1053
1054   FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
1055   void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
1056     function_entry_hook_ = function_entry_hook;
1057   }
1058
1059   void* stress_deopt_count_address() { return &stress_deopt_count_; }
1060
1061   inline base::RandomNumberGenerator* random_number_generator();
1062
1063   // Given an address occupied by a live code object, return that object.
1064   Object* FindCodeObject(Address a);
1065
1066   int NextOptimizationId() {
1067     int id = next_optimization_id_++;
1068     if (!Smi::IsValid(next_optimization_id_)) {
1069       next_optimization_id_ = 0;
1070     }
1071     return id;
1072   }
1073
1074   // Get (and lazily initialize) the registry for per-isolate symbols.
1075   Handle<JSObject> GetSymbolRegistry();
1076
1077   void AddCallCompletedCallback(CallCompletedCallback callback);
1078   void RemoveCallCompletedCallback(CallCompletedCallback callback);
1079   void FireCallCompletedCallback();
1080
1081   void SetPromiseRejectCallback(PromiseRejectCallback callback);
1082   void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value,
1083                            v8::PromiseRejectEvent event);
1084
1085   void EnqueueMicrotask(Handle<Object> microtask);
1086   void RunMicrotasks();
1087
1088   void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
1089   void CountUsage(v8::Isolate::UseCounterFeature feature);
1090
1091   BasicBlockProfiler* GetOrCreateBasicBlockProfiler();
1092   BasicBlockProfiler* basic_block_profiler() { return basic_block_profiler_; }
1093
1094   static Isolate* NewForTesting() { return new Isolate(false); }
1095
1096   std::string GetTurboCfgFileName();
1097
1098 #if TRACE_MAPS
1099   int GetNextUniqueSharedFunctionInfoId() { return next_unique_sfi_id_++; }
1100 #endif
1101
1102   void set_store_buffer_hash_set_1_address(
1103       uintptr_t* store_buffer_hash_set_1_address) {
1104     store_buffer_hash_set_1_address_ = store_buffer_hash_set_1_address;
1105   }
1106
1107   uintptr_t* store_buffer_hash_set_1_address() {
1108     return store_buffer_hash_set_1_address_;
1109   }
1110
1111   void set_store_buffer_hash_set_2_address(
1112       uintptr_t* store_buffer_hash_set_2_address) {
1113     store_buffer_hash_set_2_address_ = store_buffer_hash_set_2_address;
1114   }
1115
1116   uintptr_t* store_buffer_hash_set_2_address() {
1117     return store_buffer_hash_set_2_address_;
1118   }
1119
1120   void AddDetachedContext(Handle<Context> context);
1121   void CheckDetachedContextsAfterGC();
1122
1123   List<Object*>* partial_snapshot_cache() { return &partial_snapshot_cache_; }
1124
1125  protected:
1126   explicit Isolate(bool enable_serializer);
1127
1128  private:
1129   friend struct GlobalState;
1130   friend struct InitializeGlobalState;
1131
1132   // These fields are accessed through the API, offsets must be kept in sync
1133   // with v8::internal::Internals (in include/v8.h) constants. This is also
1134   // verified in Isolate::Init() using runtime checks.
1135   void* embedder_data_[Internals::kNumIsolateDataSlots];
1136   Heap heap_;
1137
1138   // The per-process lock should be acquired before the ThreadDataTable is
1139   // modified.
1140   class ThreadDataTable {
1141    public:
1142     ThreadDataTable();
1143     ~ThreadDataTable();
1144
1145     PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1146     void Insert(PerIsolateThreadData* data);
1147     void Remove(PerIsolateThreadData* data);
1148     void RemoveAllThreads(Isolate* isolate);
1149
1150    private:
1151     PerIsolateThreadData* list_;
1152   };
1153
1154   // These items form a stack synchronously with threads Enter'ing and Exit'ing
1155   // the Isolate. The top of the stack points to a thread which is currently
1156   // running the Isolate. When the stack is empty, the Isolate is considered
1157   // not entered by any thread and can be Disposed.
1158   // If the same thread enters the Isolate more then once, the entry_count_
1159   // is incremented rather then a new item pushed to the stack.
1160   class EntryStackItem {
1161    public:
1162     EntryStackItem(PerIsolateThreadData* previous_thread_data,
1163                    Isolate* previous_isolate,
1164                    EntryStackItem* previous_item)
1165         : entry_count(1),
1166           previous_thread_data(previous_thread_data),
1167           previous_isolate(previous_isolate),
1168           previous_item(previous_item) { }
1169
1170     int entry_count;
1171     PerIsolateThreadData* previous_thread_data;
1172     Isolate* previous_isolate;
1173     EntryStackItem* previous_item;
1174
1175    private:
1176     DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1177   };
1178
1179   static base::LazyMutex thread_data_table_mutex_;
1180
1181   static base::Thread::LocalStorageKey per_isolate_thread_data_key_;
1182   static base::Thread::LocalStorageKey isolate_key_;
1183   static base::Thread::LocalStorageKey thread_id_key_;
1184   static ThreadDataTable* thread_data_table_;
1185
1186   // A global counter for all generated Isolates, might overflow.
1187   static base::Atomic32 isolate_counter_;
1188
1189 #if DEBUG
1190   static base::Atomic32 isolate_key_created_;
1191 #endif
1192
1193   void Deinit();
1194
1195   static void SetIsolateThreadLocals(Isolate* isolate,
1196                                      PerIsolateThreadData* data);
1197
1198   // Find the PerThread for this particular (isolate, thread) combination.
1199   // If one does not yet exist, allocate a new one.
1200   PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1201
1202   // Initializes the current thread to run this Isolate.
1203   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1204   // at the same time, this should be prevented using external locking.
1205   void Enter();
1206
1207   // Exits the current thread. The previosuly entered Isolate is restored
1208   // for the thread.
1209   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1210   // at the same time, this should be prevented using external locking.
1211   void Exit();
1212
1213   void InitializeThreadLocal();
1214
1215   void MarkCompactPrologue(bool is_compacting,
1216                            ThreadLocalTop* archived_thread_data);
1217   void MarkCompactEpilogue(bool is_compacting,
1218                            ThreadLocalTop* archived_thread_data);
1219
1220   void FillCache();
1221
1222   // Propagate pending exception message to the v8::TryCatch.
1223   // If there is no external try-catch or message was successfully propagated,
1224   // then return true.
1225   bool PropagatePendingExceptionToExternalTryCatch();
1226
1227   // Traverse prototype chain to find out whether the object is derived from
1228   // the Error object.
1229   bool IsErrorObject(Handle<Object> obj);
1230
1231   base::Atomic32 id_;
1232   EntryStackItem* entry_stack_;
1233   int stack_trace_nesting_level_;
1234   StringStream* incomplete_message_;
1235   Address isolate_addresses_[kIsolateAddressCount + 1];  // NOLINT
1236   Bootstrapper* bootstrapper_;
1237   RuntimeProfiler* runtime_profiler_;
1238   CompilationCache* compilation_cache_;
1239   Counters* counters_;
1240   CodeRange* code_range_;
1241   base::RecursiveMutex break_access_;
1242   Logger* logger_;
1243   StackGuard stack_guard_;
1244   StatsTable* stats_table_;
1245   StubCache* stub_cache_;
1246   CodeAgingHelper* code_aging_helper_;
1247   DeoptimizerData* deoptimizer_data_;
1248   MaterializedObjectStore* materialized_object_store_;
1249   ThreadLocalTop thread_local_top_;
1250   bool capture_stack_trace_for_uncaught_exceptions_;
1251   int stack_trace_for_uncaught_exceptions_frame_limit_;
1252   StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1253   MemoryAllocator* memory_allocator_;
1254   KeyedLookupCache* keyed_lookup_cache_;
1255   ContextSlotCache* context_slot_cache_;
1256   DescriptorLookupCache* descriptor_lookup_cache_;
1257   HandleScopeData handle_scope_data_;
1258   HandleScopeImplementer* handle_scope_implementer_;
1259   UnicodeCache* unicode_cache_;
1260   Zone runtime_zone_;
1261   InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1262   GlobalHandles* global_handles_;
1263   EternalHandles* eternal_handles_;
1264   ThreadManager* thread_manager_;
1265   RuntimeState runtime_state_;
1266   Builtins builtins_;
1267   bool has_installed_extensions_;
1268   StringTracker* string_tracker_;
1269   unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1270   unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1271   unibrow::Mapping<unibrow::Ecma262Canonicalize>
1272       regexp_macro_assembler_canonicalize_;
1273   RegExpStack* regexp_stack_;
1274   DateCache* date_cache_;
1275   unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1276   CallInterfaceDescriptorData* call_descriptor_data_;
1277   base::RandomNumberGenerator* random_number_generator_;
1278   // TODO(hpayer): Remove the following store buffer addresses.
1279   uintptr_t* store_buffer_hash_set_1_address_;
1280   uintptr_t* store_buffer_hash_set_2_address_;
1281
1282   // Whether the isolate has been created for snapshotting.
1283   bool serializer_enabled_;
1284
1285   // True if fatal error has been signaled for this isolate.
1286   bool has_fatal_error_;
1287
1288   // True if this isolate was initialized from a snapshot.
1289   bool initialized_from_snapshot_;
1290
1291   // Time stamp at initialization.
1292   double time_millis_at_init_;
1293
1294 #ifdef DEBUG
1295   // A static array of histogram info for each type.
1296   HistogramInfo heap_histograms_[LAST_TYPE + 1];
1297   JSObject::SpillInformation js_spill_information_;
1298 #endif
1299
1300   Debug* debug_;
1301   CpuProfiler* cpu_profiler_;
1302   HeapProfiler* heap_profiler_;
1303   FunctionEntryHook function_entry_hook_;
1304
1305   typedef std::pair<InterruptCallback, void*> InterruptEntry;
1306   std::queue<InterruptEntry> api_interrupts_queue_;
1307
1308 #define GLOBAL_BACKING_STORE(type, name, initialvalue)                         \
1309   type name##_;
1310   ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1311 #undef GLOBAL_BACKING_STORE
1312
1313 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length)                         \
1314   type name##_[length];
1315   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1316 #undef GLOBAL_ARRAY_BACKING_STORE
1317
1318 #ifdef DEBUG
1319   // This class is huge and has a number of fields controlled by
1320   // preprocessor defines. Make sure the offsets of these fields agree
1321   // between compilation units.
1322 #define ISOLATE_FIELD_OFFSET(type, name, ignored)                              \
1323   static const intptr_t name##_debug_offset_;
1324   ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1325   ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1326 #undef ISOLATE_FIELD_OFFSET
1327 #endif
1328
1329   DeferredHandles* deferred_handles_head_;
1330   OptimizingCompilerThread* optimizing_compiler_thread_;
1331
1332   // Counts deopt points if deopt_every_n_times is enabled.
1333   unsigned int stress_deopt_count_;
1334
1335   int next_optimization_id_;
1336
1337 #if TRACE_MAPS
1338   int next_unique_sfi_id_;
1339 #endif
1340
1341   // List of callbacks when a Call completes.
1342   List<CallCompletedCallback> call_completed_callbacks_;
1343
1344   v8::Isolate::UseCounterCallback use_counter_callback_;
1345   BasicBlockProfiler* basic_block_profiler_;
1346
1347   List<Object*> partial_snapshot_cache_;
1348
1349   friend class ExecutionAccess;
1350   friend class HandleScopeImplementer;
1351   friend class OptimizingCompilerThread;
1352   friend class SweeperThread;
1353   friend class ThreadManager;
1354   friend class Simulator;
1355   friend class StackGuard;
1356   friend class ThreadId;
1357   friend class TestMemoryAllocatorScope;
1358   friend class TestCodeRangeScope;
1359   friend class v8::Isolate;
1360   friend class v8::Locker;
1361   friend class v8::Unlocker;
1362   friend v8::StartupData v8::V8::CreateSnapshotDataBlob(const char*);
1363
1364   DISALLOW_COPY_AND_ASSIGN(Isolate);
1365 };
1366
1367
1368 #undef FIELD_ACCESSOR
1369 #undef THREAD_LOCAL_TOP_ACCESSOR
1370
1371
1372 class PromiseOnStack {
1373  public:
1374   PromiseOnStack(Handle<JSFunction> function, Handle<JSObject> promise,
1375                  PromiseOnStack* prev)
1376       : function_(function), promise_(promise), prev_(prev) {}
1377   Handle<JSFunction> function() { return function_; }
1378   Handle<JSObject> promise() { return promise_; }
1379   PromiseOnStack* prev() { return prev_; }
1380
1381  private:
1382   Handle<JSFunction> function_;
1383   Handle<JSObject> promise_;
1384   PromiseOnStack* prev_;
1385 };
1386
1387
1388 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1389 // class as a work around for a bug in the generated code found with these
1390 // versions of GCC. See V8 issue 122 for details.
1391 class SaveContext BASE_EMBEDDED {
1392  public:
1393   inline explicit SaveContext(Isolate* isolate);
1394
1395   ~SaveContext() {
1396     isolate_->set_context(context_.is_null() ? NULL : *context_);
1397     isolate_->set_save_context(prev_);
1398   }
1399
1400   Handle<Context> context() { return context_; }
1401   SaveContext* prev() { return prev_; }
1402
1403   // Returns true if this save context is below a given JavaScript frame.
1404   bool IsBelowFrame(JavaScriptFrame* frame) {
1405     return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1406   }
1407
1408  private:
1409   Isolate* isolate_;
1410   Handle<Context> context_;
1411   SaveContext* prev_;
1412   Address c_entry_fp_;
1413 };
1414
1415
1416 class AssertNoContextChange BASE_EMBEDDED {
1417 #ifdef DEBUG
1418  public:
1419   explicit AssertNoContextChange(Isolate* isolate)
1420     : isolate_(isolate),
1421       context_(isolate->context(), isolate) { }
1422   ~AssertNoContextChange() {
1423     DCHECK(isolate_->context() == *context_);
1424   }
1425
1426  private:
1427   Isolate* isolate_;
1428   Handle<Context> context_;
1429 #else
1430  public:
1431   explicit AssertNoContextChange(Isolate* isolate) { }
1432 #endif
1433 };
1434
1435
1436 class ExecutionAccess BASE_EMBEDDED {
1437  public:
1438   explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1439     Lock(isolate);
1440   }
1441   ~ExecutionAccess() { Unlock(isolate_); }
1442
1443   static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
1444   static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
1445
1446   static bool TryLock(Isolate* isolate) {
1447     return isolate->break_access()->TryLock();
1448   }
1449
1450  private:
1451   Isolate* isolate_;
1452 };
1453
1454
1455 // Support for checking for stack-overflows.
1456 class StackLimitCheck BASE_EMBEDDED {
1457  public:
1458   explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1459
1460   // Use this to check for stack-overflows in C++ code.
1461   inline bool HasOverflowed() const {
1462     StackGuard* stack_guard = isolate_->stack_guard();
1463     return GetCurrentStackPosition() < stack_guard->real_climit();
1464   }
1465
1466   // Use this to check for stack-overflow when entering runtime from JS code.
1467   bool JsHasOverflowed() const;
1468
1469  private:
1470   Isolate* isolate_;
1471 };
1472
1473
1474 // Support for temporarily postponing interrupts. When the outermost
1475 // postpone scope is left the interrupts will be re-enabled and any
1476 // interrupts that occurred while in the scope will be taken into
1477 // account.
1478 class PostponeInterruptsScope BASE_EMBEDDED {
1479  public:
1480   PostponeInterruptsScope(Isolate* isolate,
1481                           int intercept_mask = StackGuard::ALL_INTERRUPTS)
1482       : stack_guard_(isolate->stack_guard()),
1483         intercept_mask_(intercept_mask),
1484         intercepted_flags_(0) {
1485     stack_guard_->PushPostponeInterruptsScope(this);
1486   }
1487
1488   ~PostponeInterruptsScope() {
1489     stack_guard_->PopPostponeInterruptsScope();
1490   }
1491
1492   // Find the bottom-most scope that intercepts this interrupt.
1493   // Return whether the interrupt has been intercepted.
1494   bool Intercept(StackGuard::InterruptFlag flag);
1495
1496  private:
1497   StackGuard* stack_guard_;
1498   int intercept_mask_;
1499   int intercepted_flags_;
1500   PostponeInterruptsScope* prev_;
1501
1502   friend class StackGuard;
1503 };
1504
1505
1506 class CodeTracer FINAL : public Malloced {
1507  public:
1508   explicit CodeTracer(int isolate_id)
1509       : file_(NULL),
1510         scope_depth_(0) {
1511     if (!ShouldRedirect()) {
1512       file_ = stdout;
1513       return;
1514     }
1515
1516     if (FLAG_redirect_code_traces_to == NULL) {
1517       SNPrintF(filename_,
1518                "code-%d-%d.asm",
1519                base::OS::GetCurrentProcessId(),
1520                isolate_id);
1521     } else {
1522       StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
1523     }
1524
1525     WriteChars(filename_.start(), "", 0, false);
1526   }
1527
1528   class Scope {
1529    public:
1530     explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
1531     ~Scope() { tracer_->CloseFile();  }
1532
1533     FILE* file() const { return tracer_->file(); }
1534
1535    private:
1536     CodeTracer* tracer_;
1537   };
1538
1539   void OpenFile() {
1540     if (!ShouldRedirect()) {
1541       return;
1542     }
1543
1544     if (file_ == NULL) {
1545       file_ = base::OS::FOpen(filename_.start(), "ab");
1546     }
1547
1548     scope_depth_++;
1549   }
1550
1551   void CloseFile() {
1552     if (!ShouldRedirect()) {
1553       return;
1554     }
1555
1556     if (--scope_depth_ == 0) {
1557       fclose(file_);
1558       file_ = NULL;
1559     }
1560   }
1561
1562   FILE* file() const { return file_; }
1563
1564  private:
1565   static bool ShouldRedirect() {
1566     return FLAG_redirect_code_traces;
1567   }
1568
1569   EmbeddedVector<char, 128> filename_;
1570   FILE* file_;
1571   int scope_depth_;
1572 };
1573
1574 } }  // namespace v8::internal
1575
1576 #endif  // V8_ISOLATE_H_