fdd183288897f4721d8aa96b5435db48b2a73b95
[platform/upstream/nodejs.git] / deps / v8 / src / isolate.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_ISOLATE_H_
6 #define V8_ISOLATE_H_
7
8 #include <queue>
9 #include "include/v8-debug.h"
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/base/atomicops.h"
13 #include "src/builtins.h"
14 #include "src/contexts.h"
15 #include "src/date.h"
16 #include "src/execution.h"
17 #include "src/frames.h"
18 #include "src/global-handles.h"
19 #include "src/handles.h"
20 #include "src/hashmap.h"
21 #include "src/heap/heap.h"
22 #include "src/optimizing-compiler-thread.h"
23 #include "src/regexp-stack.h"
24 #include "src/runtime/runtime.h"
25 #include "src/runtime-profiler.h"
26 #include "src/zone.h"
27
28 namespace v8 {
29
30 namespace base {
31 class RandomNumberGenerator;
32 }
33
34 namespace internal {
35
36 class BasicBlockProfiler;
37 class Bootstrapper;
38 class CallInterfaceDescriptorData;
39 class CodeGenerator;
40 class CodeRange;
41 class CodeStubDescriptor;
42 class CodeTracer;
43 class CompilationCache;
44 class CompilationStatistics;
45 class ContextSlotCache;
46 class Counters;
47 class CpuFeatures;
48 class CpuProfiler;
49 class DeoptimizerData;
50 class Deserializer;
51 class EmptyStatement;
52 class ExternalCallbackScope;
53 class ExternalReferenceTable;
54 class Factory;
55 class FunctionInfoListener;
56 class HandleScopeImplementer;
57 class HeapProfiler;
58 class HStatistics;
59 class HTracer;
60 class InlineRuntimeFunctionsTable;
61 class InnerPointerToCodeCache;
62 class MaterializedObjectStore;
63 class CodeAgingHelper;
64 class RegExpStack;
65 class SaveContext;
66 class StringTracker;
67 class StubCache;
68 class SweeperThread;
69 class ThreadManager;
70 class ThreadState;
71 class ThreadVisitor;  // Defined in v8threads.h
72 class UnicodeCache;
73 template <StateTag Tag> class VMState;
74
75 // 'void function pointer', used to roundtrip the
76 // ExternalReference::ExternalReferenceRedirector since we can not include
77 // assembler.h, where it is defined, here.
78 typedef void* ExternalReferenceRedirectorPointer();
79
80
81 class Debug;
82 class Debugger;
83 class PromiseOnStack;
84
85 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
86     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
87     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
88     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
89     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
90 class Redirection;
91 class Simulator;
92 #endif
93
94
95 // Static indirection table for handles to constants.  If a frame
96 // element represents a constant, the data contains an index into
97 // this table of handles to the actual constants.
98 // Static indirection table for handles to constants.  If a Result
99 // represents a constant, the data contains an index into this table
100 // of handles to the actual constants.
101 typedef ZoneList<Handle<Object> > ZoneObjectList;
102
103 #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate)    \
104   do {                                                    \
105     Isolate* __isolate__ = (isolate);                     \
106     if (__isolate__->has_scheduled_exception()) {         \
107       return __isolate__->PromoteScheduledException();    \
108     }                                                     \
109   } while (false)
110
111 // Macros for MaybeHandle.
112
113 #define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \
114   do {                                                      \
115     Isolate* __isolate__ = (isolate);                       \
116     if (__isolate__->has_scheduled_exception()) {           \
117       __isolate__->PromoteScheduledException();             \
118       return value;                                         \
119     }                                                       \
120   } while (false)
121
122 #define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
123   RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>())
124
125 #define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)  \
126   do {                                                               \
127     if (!(call).ToHandle(&dst)) {                                    \
128       DCHECK((isolate)->has_pending_exception());                    \
129       return value;                                                  \
130     }                                                                \
131   } while (false)
132
133 #define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)  \
134   ASSIGN_RETURN_ON_EXCEPTION_VALUE(                             \
135       isolate, dst, call, isolate->heap()->exception())
136
137 #define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T)  \
138   ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
139
140 #define THROW_NEW_ERROR(isolate, call, T)                                    \
141   do {                                                                       \
142     Handle<Object> __error__;                                                \
143     ASSIGN_RETURN_ON_EXCEPTION(isolate, __error__, isolate->factory()->call, \
144                                T);                                           \
145     return isolate->Throw<T>(__error__);                                     \
146   } while (false)
147
148 #define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call)             \
149   do {                                                            \
150     Handle<Object> __error__;                                     \
151     ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, __error__,        \
152                                        isolate->factory()->call); \
153     return isolate->Throw(*__error__);                            \
154   } while (false)
155
156 #define RETURN_ON_EXCEPTION_VALUE(isolate, call, value)            \
157   do {                                                             \
158     if ((call).is_null()) {                                        \
159       DCHECK((isolate)->has_pending_exception());                  \
160       return value;                                                \
161     }                                                              \
162   } while (false)
163
164 #define RETURN_FAILURE_ON_EXCEPTION(isolate, call)  \
165   RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception())
166
167 #define RETURN_ON_EXCEPTION(isolate, call, T)  \
168   RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
169
170
171 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C)                \
172   C(Handler, handler)                                   \
173   C(CEntryFP, c_entry_fp)                               \
174   C(CFunction, c_function)                              \
175   C(Context, context)                                   \
176   C(PendingException, pending_exception)                \
177   C(ExternalCaughtException, external_caught_exception) \
178   C(JSEntrySP, js_entry_sp)
179
180
181 // Platform-independent, reliable thread identifier.
182 class ThreadId {
183  public:
184   // Creates an invalid ThreadId.
185   ThreadId() { base::NoBarrier_Store(&id_, kInvalidId); }
186
187   ThreadId& operator=(const ThreadId& other) {
188     base::NoBarrier_Store(&id_, base::NoBarrier_Load(&other.id_));
189     return *this;
190   }
191
192   // Returns ThreadId for current thread.
193   static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
194
195   // Returns invalid ThreadId (guaranteed not to be equal to any thread).
196   static ThreadId Invalid() { return ThreadId(kInvalidId); }
197
198   // Compares ThreadIds for equality.
199   INLINE(bool Equals(const ThreadId& other) const) {
200     return base::NoBarrier_Load(&id_) == base::NoBarrier_Load(&other.id_);
201   }
202
203   // Checks whether this ThreadId refers to any thread.
204   INLINE(bool IsValid() const) {
205     return base::NoBarrier_Load(&id_) != kInvalidId;
206   }
207
208   // Converts ThreadId to an integer representation
209   // (required for public API: V8::V8::GetCurrentThreadId).
210   int ToInteger() const { return static_cast<int>(base::NoBarrier_Load(&id_)); }
211
212   // Converts ThreadId to an integer representation
213   // (required for public API: V8::V8::TerminateExecution).
214   static ThreadId FromInteger(int id) { return ThreadId(id); }
215
216  private:
217   static const int kInvalidId = -1;
218
219   explicit ThreadId(int id) { base::NoBarrier_Store(&id_, id); }
220
221   static int AllocateThreadId();
222
223   static int GetCurrentThreadId();
224
225   base::Atomic32 id_;
226
227   static base::Atomic32 highest_thread_id_;
228
229   friend class Isolate;
230 };
231
232
233 #define FIELD_ACCESSOR(type, name)                 \
234   inline void set_##name(type v) { name##_ = v; }  \
235   inline type name() const { return name##_; }
236
237
238 class ThreadLocalTop BASE_EMBEDDED {
239  public:
240   // Does early low-level initialization that does not depend on the
241   // isolate being present.
242   ThreadLocalTop();
243
244   // Initialize the thread data.
245   void Initialize();
246
247   // Get the top C++ try catch handler or NULL if none are registered.
248   //
249   // This method is not guaranteed to return an address that can be
250   // used for comparison with addresses into the JS stack.  If such an
251   // address is needed, use try_catch_handler_address.
252   FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
253
254   // Get the address of the top C++ try catch handler or NULL if
255   // none are registered.
256   //
257   // This method always returns an address that can be compared to
258   // pointers into the JavaScript stack.  When running on actual
259   // hardware, try_catch_handler_address and TryCatchHandler return
260   // the same pointer.  When running on a simulator with a separate JS
261   // stack, try_catch_handler_address returns a JS stack address that
262   // corresponds to the place on the JS stack where the C++ handler
263   // would have been if the stack were not separate.
264   Address try_catch_handler_address() {
265     return reinterpret_cast<Address>(
266         v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
267   }
268
269   void Free();
270
271   Isolate* isolate_;
272   // The context where the current execution method is created and for variable
273   // lookups.
274   Context* context_;
275   ThreadId thread_id_;
276   Object* pending_exception_;
277   bool has_pending_message_;
278   bool rethrowing_message_;
279   Object* pending_message_obj_;
280   Object* pending_message_script_;
281   int pending_message_start_pos_;
282   int pending_message_end_pos_;
283   // Use a separate value for scheduled exceptions to preserve the
284   // invariants that hold about pending_exception.  We may want to
285   // unify them later.
286   Object* scheduled_exception_;
287   bool external_caught_exception_;
288   SaveContext* save_context_;
289   v8::TryCatch* catcher_;
290
291   // Stack.
292   Address c_entry_fp_;  // the frame pointer of the top c entry frame
293   Address handler_;   // try-blocks are chained through the stack
294   Address c_function_;  // C function that was called at c entry.
295
296   // Throwing an exception may cause a Promise rejection.  For this purpose
297   // we keep track of a stack of nested promises and the corresponding
298   // try-catch handlers.
299   PromiseOnStack* promise_on_stack_;
300
301 #ifdef USE_SIMULATOR
302   Simulator* simulator_;
303 #endif
304
305   Address js_entry_sp_;  // the stack pointer of the bottom JS entry frame
306   // the external callback we're currently in
307   ExternalCallbackScope* external_callback_scope_;
308   StateTag current_vm_state_;
309
310   // Generated code scratch locations.
311   int32_t formal_count_;
312
313   // Call back function to report unsafe JS accesses.
314   v8::FailedAccessCheckCallback failed_access_check_callback_;
315
316  private:
317   void InitializeInternal();
318
319   v8::TryCatch* try_catch_handler_;
320 };
321
322
323 #if V8_TARGET_ARCH_ARM && !defined(__arm__) ||       \
324     V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
325     V8_TARGET_ARCH_PPC && !defined(__PPC__) ||       \
326     V8_TARGET_ARCH_MIPS && !defined(__mips__) ||     \
327     V8_TARGET_ARCH_MIPS64 && !defined(__mips__)
328
329 #define ISOLATE_INIT_SIMULATOR_LIST(V)                                         \
330   V(bool, simulator_initialized, false)                                        \
331   V(HashMap*, simulator_i_cache, NULL)                                         \
332   V(Redirection*, simulator_redirection, NULL)
333 #else
334
335 #define ISOLATE_INIT_SIMULATOR_LIST(V)
336
337 #endif
338
339
340 #ifdef DEBUG
341
342 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)                                       \
343   V(CommentStatistic, paged_space_comments_statistics,                         \
344       CommentStatistic::kMaxComments + 1)                                      \
345   V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
346 #else
347
348 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
349
350 #endif
351
352 #define ISOLATE_INIT_ARRAY_LIST(V)                                             \
353   /* SerializerDeserializer state. */                                          \
354   V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
355   V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
356   V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
357   V(int, suffix_table, (kBMMaxShift + 1))                                      \
358   V(uint32_t, private_random_seed, 2)                                          \
359   ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
360
361 typedef List<HeapObject*> DebugObjectCache;
362
363 #define ISOLATE_INIT_LIST(V)                                                   \
364   /* SerializerDeserializer state. */                                          \
365   V(int, serialize_partial_snapshot_cache_length, 0)                           \
366   V(int, serialize_partial_snapshot_cache_capacity, 0)                         \
367   V(Object**, serialize_partial_snapshot_cache, NULL)                          \
368   /* Assembler state. */                                                       \
369   V(FatalErrorCallback, exception_behavior, NULL)                              \
370   V(LogEventCallback, event_logger, NULL)                                      \
371   V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL)     \
372   /* To distinguish the function templates, so that we can find them in the */ \
373   /* function cache of the native context. */                                  \
374   V(int, next_serial_number, 0)                                                \
375   V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL)  \
376   /* Part of the state of liveedit. */                                         \
377   V(FunctionInfoListener*, active_function_info_listener, NULL)                \
378   /* State for Relocatable. */                                                 \
379   V(Relocatable*, relocatable_top, NULL)                                       \
380   V(DebugObjectCache*, string_stream_debug_object_cache, NULL)                 \
381   V(Object*, string_stream_current_security_token, NULL)                       \
382   /* Serializer state. */                                                      \
383   V(ExternalReferenceTable*, external_reference_table, NULL)                   \
384   V(int, pending_microtask_count, 0)                                           \
385   V(bool, autorun_microtasks, true)                                            \
386   V(HStatistics*, hstatistics, NULL)                                           \
387   V(CompilationStatistics*, turbo_statistics, NULL)                            \
388   V(HTracer*, htracer, NULL)                                                   \
389   V(CodeTracer*, code_tracer, NULL)                                            \
390   V(bool, fp_stubs_generated, false)                                           \
391   V(int, max_available_threads, 0)                                             \
392   V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu)                            \
393   V(PromiseRejectCallback, promise_reject_callback, NULL)                      \
394   ISOLATE_INIT_SIMULATOR_LIST(V)
395
396 #define THREAD_LOCAL_TOP_ACCESSOR(type, name)                        \
397   inline void set_##name(type v) { thread_local_top_.name##_ = v; }  \
398   inline type name() const { return thread_local_top_.name##_; }
399
400
401 class Isolate {
402   // These forward declarations are required to make the friend declarations in
403   // PerIsolateThreadData work on some older versions of gcc.
404   class ThreadDataTable;
405   class EntryStackItem;
406  public:
407   ~Isolate();
408
409   // A thread has a PerIsolateThreadData instance for each isolate that it has
410   // entered. That instance is allocated when the isolate is initially entered
411   // and reused on subsequent entries.
412   class PerIsolateThreadData {
413    public:
414     PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
415         : isolate_(isolate),
416           thread_id_(thread_id),
417           stack_limit_(0),
418           thread_state_(NULL),
419 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
420     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
421     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
422     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
423     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
424           simulator_(NULL),
425 #endif
426           next_(NULL),
427           prev_(NULL) { }
428     ~PerIsolateThreadData();
429     Isolate* isolate() const { return isolate_; }
430     ThreadId thread_id() const { return thread_id_; }
431
432     FIELD_ACCESSOR(uintptr_t, stack_limit)
433     FIELD_ACCESSOR(ThreadState*, thread_state)
434
435 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
436     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
437     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
438     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
439     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
440     FIELD_ACCESSOR(Simulator*, simulator)
441 #endif
442
443     bool Matches(Isolate* isolate, ThreadId thread_id) const {
444       return isolate_ == isolate && thread_id_.Equals(thread_id);
445     }
446
447    private:
448     Isolate* isolate_;
449     ThreadId thread_id_;
450     uintptr_t stack_limit_;
451     ThreadState* thread_state_;
452
453 #if !defined(__arm__) && V8_TARGET_ARCH_ARM ||       \
454     !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
455     !defined(__PPC__) && V8_TARGET_ARCH_PPC ||       \
456     !defined(__mips__) && V8_TARGET_ARCH_MIPS ||     \
457     !defined(__mips__) && V8_TARGET_ARCH_MIPS64
458     Simulator* simulator_;
459 #endif
460
461     PerIsolateThreadData* next_;
462     PerIsolateThreadData* prev_;
463
464     friend class Isolate;
465     friend class ThreadDataTable;
466     friend class EntryStackItem;
467
468     DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
469   };
470
471
472   enum AddressId {
473 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
474     FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
475 #undef DECLARE_ENUM
476     kIsolateAddressCount
477   };
478
479   static void InitializeOncePerProcess();
480
481   // Returns the PerIsolateThreadData for the current thread (or NULL if one is
482   // not currently set).
483   static PerIsolateThreadData* CurrentPerIsolateThreadData() {
484     return reinterpret_cast<PerIsolateThreadData*>(
485         base::Thread::GetThreadLocal(per_isolate_thread_data_key_));
486   }
487
488   // Returns the isolate inside which the current thread is running.
489   INLINE(static Isolate* Current()) {
490     DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
491     Isolate* isolate = reinterpret_cast<Isolate*>(
492         base::Thread::GetExistingThreadLocal(isolate_key_));
493     DCHECK(isolate != NULL);
494     return isolate;
495   }
496
497   INLINE(static Isolate* UncheckedCurrent()) {
498     DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
499     return reinterpret_cast<Isolate*>(
500         base::Thread::GetThreadLocal(isolate_key_));
501   }
502
503   // Like UncheckedCurrent, but skips the check that |isolate_key_| was
504   // initialized. Callers have to ensure that themselves.
505   INLINE(static Isolate* UnsafeCurrent()) {
506     return reinterpret_cast<Isolate*>(
507         base::Thread::GetThreadLocal(isolate_key_));
508   }
509
510   // Usually called by Init(), but can be called early e.g. to allow
511   // testing components that require logging but not the whole
512   // isolate.
513   //
514   // Safe to call more than once.
515   void InitializeLoggingAndCounters();
516
517   bool Init(Deserializer* des);
518
519   // True if at least one thread Enter'ed this isolate.
520   bool IsInUse() { return entry_stack_ != NULL; }
521
522   // Destroys the non-default isolates.
523   // Sets default isolate into "has_been_disposed" state rather then destroying,
524   // for legacy API reasons.
525   void TearDown();
526
527   static void GlobalTearDown();
528
529   // Find the PerThread for this particular (isolate, thread) combination
530   // If one does not yet exist, return null.
531   PerIsolateThreadData* FindPerThreadDataForThisThread();
532
533   // Find the PerThread for given (isolate, thread) combination
534   // If one does not yet exist, return null.
535   PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
536
537   // Returns the key used to store the pointer to the current isolate.
538   // Used internally for V8 threads that do not execute JavaScript but still
539   // are part of the domain of an isolate (like the context switcher).
540   static base::Thread::LocalStorageKey isolate_key() {
541     return isolate_key_;
542   }
543
544   // Returns the key used to store process-wide thread IDs.
545   static base::Thread::LocalStorageKey thread_id_key() {
546     return thread_id_key_;
547   }
548
549   static base::Thread::LocalStorageKey per_isolate_thread_data_key();
550
551   // Mutex for serializing access to break control structures.
552   base::RecursiveMutex* break_access() { return &break_access_; }
553
554   Address get_address_from_id(AddressId id);
555
556   // Access to top context (where the current function object was created).
557   Context* context() { return thread_local_top_.context_; }
558   void set_context(Context* context) {
559     DCHECK(context == NULL || context->IsContext());
560     thread_local_top_.context_ = context;
561   }
562   Context** context_address() { return &thread_local_top_.context_; }
563
564   THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
565
566   // Access to current thread id.
567   THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
568
569   // Interface to pending exception.
570   Object* pending_exception() {
571     DCHECK(has_pending_exception());
572     DCHECK(!thread_local_top_.pending_exception_->IsException());
573     return thread_local_top_.pending_exception_;
574   }
575
576   void set_pending_exception(Object* exception_obj) {
577     DCHECK(!exception_obj->IsException());
578     thread_local_top_.pending_exception_ = exception_obj;
579   }
580
581   void clear_pending_exception() {
582     DCHECK(!thread_local_top_.pending_exception_->IsException());
583     thread_local_top_.pending_exception_ = heap_.the_hole_value();
584   }
585
586   Object** pending_exception_address() {
587     return &thread_local_top_.pending_exception_;
588   }
589
590   bool has_pending_exception() {
591     DCHECK(!thread_local_top_.pending_exception_->IsException());
592     return !thread_local_top_.pending_exception_->IsTheHole();
593   }
594
595   THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
596
597   void clear_pending_message() {
598     thread_local_top_.has_pending_message_ = false;
599     thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
600     thread_local_top_.pending_message_script_ = heap_.the_hole_value();
601   }
602   v8::TryCatch* try_catch_handler() {
603     return thread_local_top_.try_catch_handler();
604   }
605   Address try_catch_handler_address() {
606     return thread_local_top_.try_catch_handler_address();
607   }
608   bool* external_caught_exception_address() {
609     return &thread_local_top_.external_caught_exception_;
610   }
611
612   THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch*, catcher)
613
614   Object** scheduled_exception_address() {
615     return &thread_local_top_.scheduled_exception_;
616   }
617
618   Address pending_message_obj_address() {
619     return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
620   }
621
622   Address has_pending_message_address() {
623     return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
624   }
625
626   Address pending_message_script_address() {
627     return reinterpret_cast<Address>(
628         &thread_local_top_.pending_message_script_);
629   }
630
631   Object* scheduled_exception() {
632     DCHECK(has_scheduled_exception());
633     DCHECK(!thread_local_top_.scheduled_exception_->IsException());
634     return thread_local_top_.scheduled_exception_;
635   }
636   bool has_scheduled_exception() {
637     DCHECK(!thread_local_top_.scheduled_exception_->IsException());
638     return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
639   }
640   void clear_scheduled_exception() {
641     DCHECK(!thread_local_top_.scheduled_exception_->IsException());
642     thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
643   }
644
645   bool HasExternalTryCatch();
646   bool IsFinallyOnTop();
647
648   bool is_catchable_by_javascript(Object* exception) {
649     return exception != heap()->termination_exception();
650   }
651
652   // Serializer.
653   void PushToPartialSnapshotCache(Object* obj);
654
655   // JS execution stack (see frames.h).
656   static Address c_entry_fp(ThreadLocalTop* thread) {
657     return thread->c_entry_fp_;
658   }
659   static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
660   Address c_function() { return thread_local_top_.c_function_; }
661
662   inline Address* c_entry_fp_address() {
663     return &thread_local_top_.c_entry_fp_;
664   }
665   inline Address* handler_address() { return &thread_local_top_.handler_; }
666   inline Address* c_function_address() {
667     return &thread_local_top_.c_function_;
668   }
669
670   // Bottom JS entry.
671   Address js_entry_sp() {
672     return thread_local_top_.js_entry_sp_;
673   }
674   inline Address* js_entry_sp_address() {
675     return &thread_local_top_.js_entry_sp_;
676   }
677
678   // Generated code scratch locations.
679   void* formal_count_address() { return &thread_local_top_.formal_count_; }
680
681   // Returns the global object of the current context. It could be
682   // a builtin object, or a JS global object.
683   Handle<GlobalObject> global_object() {
684     return Handle<GlobalObject>(context()->global_object());
685   }
686
687   // Returns the global proxy object of the current context.
688   JSObject* global_proxy() {
689     return context()->global_proxy();
690   }
691
692   Handle<JSBuiltinsObject> js_builtins_object() {
693     return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
694   }
695
696   static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
697   void FreeThreadResources() { thread_local_top_.Free(); }
698
699   // This method is called by the api after operations that may throw
700   // exceptions.  If an exception was thrown and not handled by an external
701   // handler the exception is scheduled to be rethrown when we return to running
702   // JavaScript code.  If an exception is scheduled true is returned.
703   bool OptionalRescheduleException(bool is_bottom_call);
704
705   // Push and pop a promise and the current try-catch handler.
706   void PushPromise(Handle<JSObject> promise);
707   void PopPromise();
708   Handle<Object> GetPromiseOnStackOnThrow();
709
710   class ExceptionScope {
711    public:
712     explicit ExceptionScope(Isolate* isolate) :
713       // Scope currently can only be used for regular exceptions,
714       // not termination exception.
715       isolate_(isolate),
716       pending_exception_(isolate_->pending_exception(), isolate_),
717       catcher_(isolate_->catcher())
718     { }
719
720     ~ExceptionScope() {
721       isolate_->set_catcher(catcher_);
722       isolate_->set_pending_exception(*pending_exception_);
723     }
724
725    private:
726     Isolate* isolate_;
727     Handle<Object> pending_exception_;
728     v8::TryCatch* catcher_;
729   };
730
731   void SetCaptureStackTraceForUncaughtExceptions(
732       bool capture,
733       int frame_limit,
734       StackTrace::StackTraceOptions options);
735
736   void PrintCurrentStackTrace(FILE* out);
737   void PrintStack(StringStream* accumulator);
738   void PrintStack(FILE* out);
739   Handle<String> StackTraceString();
740   NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
741                                       Object* object,
742                                       Map* map,
743                                       unsigned int magic2));
744   Handle<JSArray> CaptureCurrentStackTrace(
745       int frame_limit,
746       StackTrace::StackTraceOptions options);
747   Handle<Object> CaptureSimpleStackTrace(Handle<JSObject> error_object,
748                                          Handle<Object> caller);
749   void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
750   void CaptureAndSetSimpleStackTrace(Handle<JSObject> error_object,
751                                      Handle<Object> caller);
752   Handle<JSArray> GetDetailedStackTrace(Handle<JSObject> error_object);
753   Handle<JSArray> GetDetailedFromSimpleStackTrace(
754       Handle<JSObject> error_object);
755
756   // Returns if the top context may access the given global object. If
757   // the result is false, the pending exception is guaranteed to be
758   // set.
759
760   bool MayNamedAccess(Handle<JSObject> receiver,
761                       Handle<Object> key,
762                       v8::AccessType type);
763   bool MayIndexedAccess(Handle<JSObject> receiver,
764                         uint32_t index,
765                         v8::AccessType type);
766   bool IsInternallyUsedPropertyName(Handle<Object> name);
767   bool IsInternallyUsedPropertyName(Object* name);
768
769   void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
770   void ReportFailedAccessCheck(Handle<JSObject> receiver, v8::AccessType type);
771
772   // Exception throwing support. The caller should use the result
773   // of Throw() as its return value.
774   Object* Throw(Object* exception, MessageLocation* location = NULL);
775
776   template <typename T>
777   MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
778                                        MessageLocation* location = NULL) {
779     Throw(*exception, location);
780     return MaybeHandle<T>();
781   }
782
783   // Re-throw an exception.  This involves no error reporting since
784   // error reporting was handled when the exception was thrown
785   // originally.
786   Object* ReThrow(Object* exception);
787   void ScheduleThrow(Object* exception);
788   // Re-set pending message, script and positions reported to the TryCatch
789   // back to the TLS for re-use when rethrowing.
790   void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
791   // Un-schedule an exception that was caught by a TryCatch handler.
792   void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler);
793   void ReportPendingMessages();
794   // Return pending location if any or unfilled structure.
795   MessageLocation GetMessageLocation();
796   Object* ThrowIllegalOperation();
797
798   // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
799   Object* PromoteScheduledException();
800   void DoThrow(Object* exception, MessageLocation* location);
801   // Checks if exception should be reported and finds out if it's
802   // caught externally.
803   bool ShouldReportException(bool* can_be_caught_externally,
804                              bool catchable_by_javascript);
805
806   // Attempts to compute the current source location, storing the
807   // result in the target out parameter.
808   void ComputeLocation(MessageLocation* target);
809   bool ComputeLocationFromException(MessageLocation* target,
810                                     Handle<Object> exception);
811   bool ComputeLocationFromStackTrace(MessageLocation* target,
812                                      Handle<Object> exception);
813
814   Handle<JSMessageObject> CreateMessage(Handle<Object> exception,
815                                         MessageLocation* location);
816
817   // Out of resource exception helpers.
818   Object* StackOverflow();
819   Object* TerminateExecution();
820   void CancelTerminateExecution();
821
822   void RequestInterrupt(InterruptCallback callback, void* data);
823   void InvokeApiInterruptCallbacks();
824
825   // Administration
826   void Iterate(ObjectVisitor* v);
827   void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
828   char* Iterate(ObjectVisitor* v, char* t);
829   void IterateThread(ThreadVisitor* v, char* t);
830
831
832   // Returns the current native context.
833   Handle<Context> native_context();
834
835   // Returns the native context of the calling JavaScript code.  That
836   // is, the native context of the top-most JavaScript frame.
837   Handle<Context> GetCallingNativeContext();
838
839   void RegisterTryCatchHandler(v8::TryCatch* that);
840   void UnregisterTryCatchHandler(v8::TryCatch* that);
841
842   char* ArchiveThread(char* to);
843   char* RestoreThread(char* from);
844
845   static const char* const kStackOverflowMessage;
846
847   static const int kUC16AlphabetSize = 256;  // See StringSearchBase.
848   static const int kBMMaxShift = 250;        // See StringSearchBase.
849
850   // Accessors.
851 #define GLOBAL_ACCESSOR(type, name, initialvalue)                       \
852   inline type name() const {                                            \
853     DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
854     return name##_;                                                     \
855   }                                                                     \
856   inline void set_##name(type value) {                                  \
857     DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
858     name##_ = value;                                                    \
859   }
860   ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
861 #undef GLOBAL_ACCESSOR
862
863 #define GLOBAL_ARRAY_ACCESSOR(type, name, length)                       \
864   inline type* name() {                                                 \
865     DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
866     return &(name##_)[0];                                               \
867   }
868   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
869 #undef GLOBAL_ARRAY_ACCESSOR
870
871 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name)            \
872   Handle<type> name() {                                             \
873     return Handle<type>(native_context()->name(), this);            \
874   }                                                                 \
875   bool is_##name(type* value) {                                     \
876     return native_context()->is_##name(value);                      \
877   }
878   NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
879 #undef NATIVE_CONTEXT_FIELD_ACCESSOR
880
881   Bootstrapper* bootstrapper() { return bootstrapper_; }
882   Counters* counters() {
883     // Call InitializeLoggingAndCounters() if logging is needed before
884     // the isolate is fully initialized.
885     DCHECK(counters_ != NULL);
886     return counters_;
887   }
888   CodeRange* code_range() { return code_range_; }
889   RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
890   CompilationCache* compilation_cache() { return compilation_cache_; }
891   Logger* logger() {
892     // Call InitializeLoggingAndCounters() if logging is needed before
893     // the isolate is fully initialized.
894     DCHECK(logger_ != NULL);
895     return logger_;
896   }
897   StackGuard* stack_guard() { return &stack_guard_; }
898   Heap* heap() { return &heap_; }
899   StatsTable* stats_table();
900   StubCache* stub_cache() { return stub_cache_; }
901   CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
902   DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
903   ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
904   MaterializedObjectStore* materialized_object_store() {
905     return materialized_object_store_;
906   }
907
908   MemoryAllocator* memory_allocator() {
909     return memory_allocator_;
910   }
911
912   KeyedLookupCache* keyed_lookup_cache() {
913     return keyed_lookup_cache_;
914   }
915
916   ContextSlotCache* context_slot_cache() {
917     return context_slot_cache_;
918   }
919
920   DescriptorLookupCache* descriptor_lookup_cache() {
921     return descriptor_lookup_cache_;
922   }
923
924   HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
925
926   HandleScopeImplementer* handle_scope_implementer() {
927     DCHECK(handle_scope_implementer_);
928     return handle_scope_implementer_;
929   }
930   Zone* runtime_zone() { return &runtime_zone_; }
931
932   UnicodeCache* unicode_cache() {
933     return unicode_cache_;
934   }
935
936   InnerPointerToCodeCache* inner_pointer_to_code_cache() {
937     return inner_pointer_to_code_cache_;
938   }
939
940   GlobalHandles* global_handles() { return global_handles_; }
941
942   EternalHandles* eternal_handles() { return eternal_handles_; }
943
944   ThreadManager* thread_manager() { return thread_manager_; }
945
946   StringTracker* string_tracker() { return string_tracker_; }
947
948   unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
949     return &jsregexp_uncanonicalize_;
950   }
951
952   unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
953     return &jsregexp_canonrange_;
954   }
955
956   RuntimeState* runtime_state() { return &runtime_state_; }
957
958   Builtins* builtins() { return &builtins_; }
959
960   void NotifyExtensionInstalled() {
961     has_installed_extensions_ = true;
962   }
963
964   bool has_installed_extensions() { return has_installed_extensions_; }
965
966   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
967       regexp_macro_assembler_canonicalize() {
968     return &regexp_macro_assembler_canonicalize_;
969   }
970
971   RegExpStack* regexp_stack() { return regexp_stack_; }
972
973   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
974       interp_canonicalize_mapping() {
975     return &interp_canonicalize_mapping_;
976   }
977
978   Debug* debug() { return debug_; }
979
980   CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
981   HeapProfiler* heap_profiler() const { return heap_profiler_; }
982
983 #ifdef DEBUG
984   HistogramInfo* heap_histograms() { return heap_histograms_; }
985
986   JSObject::SpillInformation* js_spill_information() {
987     return &js_spill_information_;
988   }
989 #endif
990
991   Factory* factory() { return reinterpret_cast<Factory*>(this); }
992
993   static const int kJSRegexpStaticOffsetsVectorSize = 128;
994
995   THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
996
997   THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
998
999   void SetData(uint32_t slot, void* data) {
1000     DCHECK(slot < Internals::kNumIsolateDataSlots);
1001     embedder_data_[slot] = data;
1002   }
1003   void* GetData(uint32_t slot) {
1004     DCHECK(slot < Internals::kNumIsolateDataSlots);
1005     return embedder_data_[slot];
1006   }
1007
1008   bool serializer_enabled() const { return serializer_enabled_; }
1009
1010   bool IsDead() { return has_fatal_error_; }
1011   void SignalFatalError() { has_fatal_error_ = true; }
1012
1013   bool use_crankshaft() const;
1014
1015   bool initialized_from_snapshot() { return initialized_from_snapshot_; }
1016
1017   double time_millis_since_init() {
1018     return base::OS::TimeCurrentMillis() - time_millis_at_init_;
1019   }
1020
1021   DateCache* date_cache() {
1022     return date_cache_;
1023   }
1024
1025   void set_date_cache(DateCache* date_cache) {
1026     if (date_cache != date_cache_) {
1027       delete date_cache_;
1028     }
1029     date_cache_ = date_cache;
1030   }
1031
1032   Map* get_initial_js_array_map(ElementsKind kind);
1033
1034   bool IsFastArrayConstructorPrototypeChainIntact();
1035
1036   CallInterfaceDescriptorData* call_descriptor_data(int index);
1037
1038   void IterateDeferredHandles(ObjectVisitor* visitor);
1039   void LinkDeferredHandles(DeferredHandles* deferred_handles);
1040   void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
1041
1042 #ifdef DEBUG
1043   bool IsDeferredHandle(Object** location);
1044 #endif  // DEBUG
1045
1046   bool concurrent_recompilation_enabled() {
1047     // Thread is only available with flag enabled.
1048     DCHECK(optimizing_compiler_thread_ == NULL ||
1049            FLAG_concurrent_recompilation);
1050     return optimizing_compiler_thread_ != NULL;
1051   }
1052
1053   bool concurrent_osr_enabled() const {
1054     // Thread is only available with flag enabled.
1055     DCHECK(optimizing_compiler_thread_ == NULL ||
1056            FLAG_concurrent_recompilation);
1057     return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
1058   }
1059
1060   OptimizingCompilerThread* optimizing_compiler_thread() {
1061     return optimizing_compiler_thread_;
1062   }
1063
1064   int id() const { return static_cast<int>(id_); }
1065
1066   HStatistics* GetHStatistics();
1067   CompilationStatistics* GetTurboStatistics();
1068   HTracer* GetHTracer();
1069   CodeTracer* GetCodeTracer();
1070
1071   void DumpAndResetCompilationStats();
1072
1073   FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
1074   void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
1075     function_entry_hook_ = function_entry_hook;
1076   }
1077
1078   void* stress_deopt_count_address() { return &stress_deopt_count_; }
1079
1080   inline base::RandomNumberGenerator* random_number_generator();
1081
1082   // Given an address occupied by a live code object, return that object.
1083   Object* FindCodeObject(Address a);
1084
1085   int NextOptimizationId() {
1086     int id = next_optimization_id_++;
1087     if (!Smi::IsValid(next_optimization_id_)) {
1088       next_optimization_id_ = 0;
1089     }
1090     return id;
1091   }
1092
1093   // Get (and lazily initialize) the registry for per-isolate symbols.
1094   Handle<JSObject> GetSymbolRegistry();
1095
1096   void AddCallCompletedCallback(CallCompletedCallback callback);
1097   void RemoveCallCompletedCallback(CallCompletedCallback callback);
1098   void FireCallCompletedCallback();
1099
1100   void SetPromiseRejectCallback(PromiseRejectCallback callback);
1101   void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value,
1102                            v8::PromiseRejectEvent event);
1103
1104   void EnqueueMicrotask(Handle<Object> microtask);
1105   void RunMicrotasks();
1106
1107   void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
1108   void CountUsage(v8::Isolate::UseCounterFeature feature);
1109
1110   BasicBlockProfiler* GetOrCreateBasicBlockProfiler();
1111   BasicBlockProfiler* basic_block_profiler() { return basic_block_profiler_; }
1112
1113   static Isolate* NewForTesting() { return new Isolate(false); }
1114
1115   std::string GetTurboCfgFileName();
1116
1117 #if TRACE_MAPS
1118   int GetNextUniqueSharedFunctionInfoId() { return next_unique_sfi_id_++; }
1119 #endif
1120
1121   void set_store_buffer_hash_set_1_address(
1122       uintptr_t* store_buffer_hash_set_1_address) {
1123     store_buffer_hash_set_1_address_ = store_buffer_hash_set_1_address;
1124   }
1125
1126   uintptr_t* store_buffer_hash_set_1_address() {
1127     return store_buffer_hash_set_1_address_;
1128   }
1129
1130   void set_store_buffer_hash_set_2_address(
1131       uintptr_t* store_buffer_hash_set_2_address) {
1132     store_buffer_hash_set_2_address_ = store_buffer_hash_set_2_address;
1133   }
1134
1135   uintptr_t* store_buffer_hash_set_2_address() {
1136     return store_buffer_hash_set_2_address_;
1137   }
1138
1139   void AddDetachedContext(Handle<Context> context);
1140   void CheckDetachedContextsAfterGC();
1141
1142  private:
1143   explicit Isolate(bool enable_serializer);
1144
1145   friend struct GlobalState;
1146   friend struct InitializeGlobalState;
1147
1148   // These fields are accessed through the API, offsets must be kept in sync
1149   // with v8::internal::Internals (in include/v8.h) constants. This is also
1150   // verified in Isolate::Init() using runtime checks.
1151   void* embedder_data_[Internals::kNumIsolateDataSlots];
1152   Heap heap_;
1153
1154   // The per-process lock should be acquired before the ThreadDataTable is
1155   // modified.
1156   class ThreadDataTable {
1157    public:
1158     ThreadDataTable();
1159     ~ThreadDataTable();
1160
1161     PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1162     void Insert(PerIsolateThreadData* data);
1163     void Remove(PerIsolateThreadData* data);
1164     void RemoveAllThreads(Isolate* isolate);
1165
1166    private:
1167     PerIsolateThreadData* list_;
1168   };
1169
1170   // These items form a stack synchronously with threads Enter'ing and Exit'ing
1171   // the Isolate. The top of the stack points to a thread which is currently
1172   // running the Isolate. When the stack is empty, the Isolate is considered
1173   // not entered by any thread and can be Disposed.
1174   // If the same thread enters the Isolate more then once, the entry_count_
1175   // is incremented rather then a new item pushed to the stack.
1176   class EntryStackItem {
1177    public:
1178     EntryStackItem(PerIsolateThreadData* previous_thread_data,
1179                    Isolate* previous_isolate,
1180                    EntryStackItem* previous_item)
1181         : entry_count(1),
1182           previous_thread_data(previous_thread_data),
1183           previous_isolate(previous_isolate),
1184           previous_item(previous_item) { }
1185
1186     int entry_count;
1187     PerIsolateThreadData* previous_thread_data;
1188     Isolate* previous_isolate;
1189     EntryStackItem* previous_item;
1190
1191    private:
1192     DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1193   };
1194
1195   static base::LazyMutex thread_data_table_mutex_;
1196
1197   static base::Thread::LocalStorageKey per_isolate_thread_data_key_;
1198   static base::Thread::LocalStorageKey isolate_key_;
1199   static base::Thread::LocalStorageKey thread_id_key_;
1200   static ThreadDataTable* thread_data_table_;
1201
1202   // A global counter for all generated Isolates, might overflow.
1203   static base::Atomic32 isolate_counter_;
1204
1205 #if DEBUG
1206   static base::Atomic32 isolate_key_created_;
1207 #endif
1208
1209   void Deinit();
1210
1211   static void SetIsolateThreadLocals(Isolate* isolate,
1212                                      PerIsolateThreadData* data);
1213
1214   // Find the PerThread for this particular (isolate, thread) combination.
1215   // If one does not yet exist, allocate a new one.
1216   PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1217
1218   // Initializes the current thread to run this Isolate.
1219   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1220   // at the same time, this should be prevented using external locking.
1221   void Enter();
1222
1223   // Exits the current thread. The previosuly entered Isolate is restored
1224   // for the thread.
1225   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1226   // at the same time, this should be prevented using external locking.
1227   void Exit();
1228
1229   void InitializeThreadLocal();
1230
1231   void MarkCompactPrologue(bool is_compacting,
1232                            ThreadLocalTop* archived_thread_data);
1233   void MarkCompactEpilogue(bool is_compacting,
1234                            ThreadLocalTop* archived_thread_data);
1235
1236   void FillCache();
1237
1238   // Propagate pending exception message to the v8::TryCatch.
1239   // If there is no external try-catch or message was successfully propagated,
1240   // then return true.
1241   bool PropagatePendingExceptionToExternalTryCatch();
1242
1243   // Traverse prototype chain to find out whether the object is derived from
1244   // the Error object.
1245   bool IsErrorObject(Handle<Object> obj);
1246
1247   base::Atomic32 id_;
1248   EntryStackItem* entry_stack_;
1249   int stack_trace_nesting_level_;
1250   StringStream* incomplete_message_;
1251   Address isolate_addresses_[kIsolateAddressCount + 1];  // NOLINT
1252   Bootstrapper* bootstrapper_;
1253   RuntimeProfiler* runtime_profiler_;
1254   CompilationCache* compilation_cache_;
1255   Counters* counters_;
1256   CodeRange* code_range_;
1257   base::RecursiveMutex break_access_;
1258   Logger* logger_;
1259   StackGuard stack_guard_;
1260   StatsTable* stats_table_;
1261   StubCache* stub_cache_;
1262   CodeAgingHelper* code_aging_helper_;
1263   DeoptimizerData* deoptimizer_data_;
1264   MaterializedObjectStore* materialized_object_store_;
1265   ThreadLocalTop thread_local_top_;
1266   bool capture_stack_trace_for_uncaught_exceptions_;
1267   int stack_trace_for_uncaught_exceptions_frame_limit_;
1268   StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1269   MemoryAllocator* memory_allocator_;
1270   KeyedLookupCache* keyed_lookup_cache_;
1271   ContextSlotCache* context_slot_cache_;
1272   DescriptorLookupCache* descriptor_lookup_cache_;
1273   HandleScopeData handle_scope_data_;
1274   HandleScopeImplementer* handle_scope_implementer_;
1275   UnicodeCache* unicode_cache_;
1276   Zone runtime_zone_;
1277   InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1278   GlobalHandles* global_handles_;
1279   EternalHandles* eternal_handles_;
1280   ThreadManager* thread_manager_;
1281   RuntimeState runtime_state_;
1282   Builtins builtins_;
1283   bool has_installed_extensions_;
1284   StringTracker* string_tracker_;
1285   unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1286   unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1287   unibrow::Mapping<unibrow::Ecma262Canonicalize>
1288       regexp_macro_assembler_canonicalize_;
1289   RegExpStack* regexp_stack_;
1290   DateCache* date_cache_;
1291   unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1292   CallInterfaceDescriptorData* call_descriptor_data_;
1293   base::RandomNumberGenerator* random_number_generator_;
1294   // TODO(hpayer): Remove the following store buffer addresses.
1295   uintptr_t* store_buffer_hash_set_1_address_;
1296   uintptr_t* store_buffer_hash_set_2_address_;
1297
1298   // Whether the isolate has been created for snapshotting.
1299   bool serializer_enabled_;
1300
1301   // True if fatal error has been signaled for this isolate.
1302   bool has_fatal_error_;
1303
1304   // True if this isolate was initialized from a snapshot.
1305   bool initialized_from_snapshot_;
1306
1307   // Time stamp at initialization.
1308   double time_millis_at_init_;
1309
1310 #ifdef DEBUG
1311   // A static array of histogram info for each type.
1312   HistogramInfo heap_histograms_[LAST_TYPE + 1];
1313   JSObject::SpillInformation js_spill_information_;
1314 #endif
1315
1316   Debug* debug_;
1317   CpuProfiler* cpu_profiler_;
1318   HeapProfiler* heap_profiler_;
1319   FunctionEntryHook function_entry_hook_;
1320
1321   typedef std::pair<InterruptCallback, void*> InterruptEntry;
1322   std::queue<InterruptEntry> api_interrupts_queue_;
1323
1324 #define GLOBAL_BACKING_STORE(type, name, initialvalue)                         \
1325   type name##_;
1326   ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1327 #undef GLOBAL_BACKING_STORE
1328
1329 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length)                         \
1330   type name##_[length];
1331   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1332 #undef GLOBAL_ARRAY_BACKING_STORE
1333
1334 #ifdef DEBUG
1335   // This class is huge and has a number of fields controlled by
1336   // preprocessor defines. Make sure the offsets of these fields agree
1337   // between compilation units.
1338 #define ISOLATE_FIELD_OFFSET(type, name, ignored)                              \
1339   static const intptr_t name##_debug_offset_;
1340   ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1341   ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1342 #undef ISOLATE_FIELD_OFFSET
1343 #endif
1344
1345   DeferredHandles* deferred_handles_head_;
1346   OptimizingCompilerThread* optimizing_compiler_thread_;
1347
1348   // Counts deopt points if deopt_every_n_times is enabled.
1349   unsigned int stress_deopt_count_;
1350
1351   int next_optimization_id_;
1352
1353 #if TRACE_MAPS
1354   int next_unique_sfi_id_;
1355 #endif
1356
1357   // List of callbacks when a Call completes.
1358   List<CallCompletedCallback> call_completed_callbacks_;
1359
1360   v8::Isolate::UseCounterCallback use_counter_callback_;
1361   BasicBlockProfiler* basic_block_profiler_;
1362
1363
1364   friend class ExecutionAccess;
1365   friend class HandleScopeImplementer;
1366   friend class OptimizingCompilerThread;
1367   friend class SweeperThread;
1368   friend class ThreadManager;
1369   friend class Simulator;
1370   friend class StackGuard;
1371   friend class ThreadId;
1372   friend class TestMemoryAllocatorScope;
1373   friend class TestCodeRangeScope;
1374   friend class v8::Isolate;
1375   friend class v8::Locker;
1376   friend class v8::Unlocker;
1377
1378   DISALLOW_COPY_AND_ASSIGN(Isolate);
1379 };
1380
1381
1382 #undef FIELD_ACCESSOR
1383 #undef THREAD_LOCAL_TOP_ACCESSOR
1384
1385
1386 class PromiseOnStack {
1387  public:
1388   PromiseOnStack(StackHandler* handler, Handle<JSObject> promise,
1389                  PromiseOnStack* prev)
1390       : handler_(handler), promise_(promise), prev_(prev) {}
1391   StackHandler* handler() { return handler_; }
1392   Handle<JSObject> promise() { return promise_; }
1393   PromiseOnStack* prev() { return prev_; }
1394
1395  private:
1396   StackHandler* handler_;
1397   Handle<JSObject> promise_;
1398   PromiseOnStack* prev_;
1399 };
1400
1401
1402 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1403 // class as a work around for a bug in the generated code found with these
1404 // versions of GCC. See V8 issue 122 for details.
1405 class SaveContext BASE_EMBEDDED {
1406  public:
1407   inline explicit SaveContext(Isolate* isolate);
1408
1409   ~SaveContext() {
1410     isolate_->set_context(context_.is_null() ? NULL : *context_);
1411     isolate_->set_save_context(prev_);
1412   }
1413
1414   Handle<Context> context() { return context_; }
1415   SaveContext* prev() { return prev_; }
1416
1417   // Returns true if this save context is below a given JavaScript frame.
1418   bool IsBelowFrame(JavaScriptFrame* frame) {
1419     return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1420   }
1421
1422  private:
1423   Isolate* isolate_;
1424   Handle<Context> context_;
1425   SaveContext* prev_;
1426   Address c_entry_fp_;
1427 };
1428
1429
1430 class AssertNoContextChange BASE_EMBEDDED {
1431 #ifdef DEBUG
1432  public:
1433   explicit AssertNoContextChange(Isolate* isolate)
1434     : isolate_(isolate),
1435       context_(isolate->context(), isolate) { }
1436   ~AssertNoContextChange() {
1437     DCHECK(isolate_->context() == *context_);
1438   }
1439
1440  private:
1441   Isolate* isolate_;
1442   Handle<Context> context_;
1443 #else
1444  public:
1445   explicit AssertNoContextChange(Isolate* isolate) { }
1446 #endif
1447 };
1448
1449
1450 class ExecutionAccess BASE_EMBEDDED {
1451  public:
1452   explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1453     Lock(isolate);
1454   }
1455   ~ExecutionAccess() { Unlock(isolate_); }
1456
1457   static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
1458   static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
1459
1460   static bool TryLock(Isolate* isolate) {
1461     return isolate->break_access()->TryLock();
1462   }
1463
1464  private:
1465   Isolate* isolate_;
1466 };
1467
1468
1469 // Support for checking for stack-overflows.
1470 class StackLimitCheck BASE_EMBEDDED {
1471  public:
1472   explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1473
1474   // Use this to check for stack-overflows in C++ code.
1475   inline bool HasOverflowed() const {
1476     StackGuard* stack_guard = isolate_->stack_guard();
1477     return GetCurrentStackPosition() < stack_guard->real_climit();
1478   }
1479
1480   // Use this to check for stack-overflow when entering runtime from JS code.
1481   bool JsHasOverflowed() const;
1482
1483  private:
1484   Isolate* isolate_;
1485 };
1486
1487
1488 // Support for temporarily postponing interrupts. When the outermost
1489 // postpone scope is left the interrupts will be re-enabled and any
1490 // interrupts that occurred while in the scope will be taken into
1491 // account.
1492 class PostponeInterruptsScope BASE_EMBEDDED {
1493  public:
1494   PostponeInterruptsScope(Isolate* isolate,
1495                           int intercept_mask = StackGuard::ALL_INTERRUPTS)
1496       : stack_guard_(isolate->stack_guard()),
1497         intercept_mask_(intercept_mask),
1498         intercepted_flags_(0) {
1499     stack_guard_->PushPostponeInterruptsScope(this);
1500   }
1501
1502   ~PostponeInterruptsScope() {
1503     stack_guard_->PopPostponeInterruptsScope();
1504   }
1505
1506   // Find the bottom-most scope that intercepts this interrupt.
1507   // Return whether the interrupt has been intercepted.
1508   bool Intercept(StackGuard::InterruptFlag flag);
1509
1510  private:
1511   StackGuard* stack_guard_;
1512   int intercept_mask_;
1513   int intercepted_flags_;
1514   PostponeInterruptsScope* prev_;
1515
1516   friend class StackGuard;
1517 };
1518
1519
1520 class CodeTracer FINAL : public Malloced {
1521  public:
1522   explicit CodeTracer(int isolate_id)
1523       : file_(NULL),
1524         scope_depth_(0) {
1525     if (!ShouldRedirect()) {
1526       file_ = stdout;
1527       return;
1528     }
1529
1530     if (FLAG_redirect_code_traces_to == NULL) {
1531       SNPrintF(filename_,
1532                "code-%d-%d.asm",
1533                base::OS::GetCurrentProcessId(),
1534                isolate_id);
1535     } else {
1536       StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
1537     }
1538
1539     WriteChars(filename_.start(), "", 0, false);
1540   }
1541
1542   class Scope {
1543    public:
1544     explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
1545     ~Scope() { tracer_->CloseFile();  }
1546
1547     FILE* file() const { return tracer_->file(); }
1548
1549    private:
1550     CodeTracer* tracer_;
1551   };
1552
1553   void OpenFile() {
1554     if (!ShouldRedirect()) {
1555       return;
1556     }
1557
1558     if (file_ == NULL) {
1559       file_ = base::OS::FOpen(filename_.start(), "ab");
1560     }
1561
1562     scope_depth_++;
1563   }
1564
1565   void CloseFile() {
1566     if (!ShouldRedirect()) {
1567       return;
1568     }
1569
1570     if (--scope_depth_ == 0) {
1571       fclose(file_);
1572       file_ = NULL;
1573     }
1574   }
1575
1576   FILE* file() const { return file_; }
1577
1578  private:
1579   static bool ShouldRedirect() {
1580     return FLAG_redirect_code_traces;
1581   }
1582
1583   EmbeddedVector<char, 128> filename_;
1584   FILE* file_;
1585   int scope_depth_;
1586 };
1587
1588 } }  // namespace v8::internal
1589
1590 #endif  // V8_ISOLATE_H_