1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "bootstrapper.h"
35 #include "compilation-cache.h"
37 #include "deoptimizer.h"
38 #include "heap-profiler.h"
41 #include "lithium-allocator.h"
43 #include "marking-thread.h"
46 #include "regexp-stack.h"
47 #include "runtime-profiler.h"
48 #include "scopeinfo.h"
49 #include "serialize.h"
50 #include "simulator.h"
52 #include "stub-cache.h"
53 #include "sweeper-thread.h"
55 #include "vm-state-inl.h"
61 Atomic32 ThreadId::highest_thread_id_ = 0;
63 int ThreadId::AllocateThreadId() {
64 int new_id = NoBarrier_AtomicIncrement(&highest_thread_id_, 1);
69 int ThreadId::GetCurrentThreadId() {
70 int thread_id = Thread::GetThreadLocalInt(Isolate::thread_id_key_);
72 thread_id = AllocateThreadId();
73 Thread::SetThreadLocalInt(Isolate::thread_id_key_, thread_id);
79 ThreadLocalTop::ThreadLocalTop() {
81 // This flag may be set using v8::V8::IgnoreOutOfMemoryException()
82 // before an isolate is initialized. The initialize methods below do
83 // not touch it to preserve its value.
84 ignore_out_of_memory_ = false;
88 void ThreadLocalTop::InitializeInternal() {
95 external_callback_ = NULL;
96 current_vm_state_ = EXTERNAL;
97 try_catch_handler_address_ = NULL;
99 thread_id_ = ThreadId::Invalid();
100 external_caught_exception_ = false;
101 failed_access_check_callback_ = NULL;
102 save_context_ = NULL;
104 top_lookup_result_ = NULL;
106 // These members are re-initialized later after deserialization
108 pending_exception_ = NULL;
109 has_pending_message_ = false;
110 pending_message_obj_ = NULL;
111 pending_message_script_ = NULL;
112 scheduled_exception_ = NULL;
116 void ThreadLocalTop::Initialize() {
117 InitializeInternal();
119 #ifdef V8_TARGET_ARCH_ARM
120 simulator_ = Simulator::current(isolate_);
121 #elif V8_TARGET_ARCH_MIPS
122 simulator_ = Simulator::current(isolate_);
125 thread_id_ = ThreadId::Current();
129 v8::TryCatch* ThreadLocalTop::TryCatchHandler() {
130 return TRY_CATCH_FROM_ADDRESS(try_catch_handler_address());
134 int SystemThreadManager::NumberOfParallelSystemThreads(
135 ParallelSystemComponent type) {
136 int number_of_threads = Min(OS::NumberOfCores(), kMaxThreads);
137 ASSERT(number_of_threads > 0);
138 if (number_of_threads == 1) {
141 if (type == PARALLEL_SWEEPING) {
142 return number_of_threads;
143 } else if (type == CONCURRENT_SWEEPING) {
144 return number_of_threads - 1;
145 } else if (type == PARALLEL_MARKING) {
146 return number_of_threads;
152 // Create a dummy thread that will wait forever on a semaphore. The only
153 // purpose for this thread is to have some stack area to save essential data
154 // into for use by a stacks only core dump (aka minidump).
155 class PreallocatedMemoryThread: public Thread {
158 if (data_ready_semaphore_ != NULL) {
159 // Initial access is guarded until the data has been published.
160 data_ready_semaphore_->Wait();
161 delete data_ready_semaphore_;
162 data_ready_semaphore_ = NULL;
168 if (data_ready_semaphore_ != NULL) {
169 // Initial access is guarded until the data has been published.
170 data_ready_semaphore_->Wait();
171 delete data_ready_semaphore_;
172 data_ready_semaphore_ = NULL;
177 // Stop the PreallocatedMemoryThread and release its resources.
179 keep_running_ = false;
180 wait_for_ever_semaphore_->Signal();
182 // Wait for the thread to terminate.
185 if (data_ready_semaphore_ != NULL) {
186 delete data_ready_semaphore_;
187 data_ready_semaphore_ = NULL;
190 delete wait_for_ever_semaphore_;
191 wait_for_ever_semaphore_ = NULL;
195 // When the thread starts running it will allocate a fixed number of bytes
196 // on the stack and publish the location of this memory for others to use.
198 EmbeddedVector<char, 15 * 1024> local_buffer;
200 // Initialize the buffer with a known good value.
201 OS::StrNCpy(local_buffer, "Trace data was not generated.\n",
202 local_buffer.length());
204 // Publish the local buffer and signal its availability.
205 data_ = local_buffer.start();
206 length_ = local_buffer.length();
207 data_ready_semaphore_->Signal();
209 while (keep_running_) {
210 // This thread will wait here until the end of time.
211 wait_for_ever_semaphore_->Wait();
214 // Make sure we access the buffer after the wait to remove all possibility
215 // of it being optimized away.
216 OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n",
217 local_buffer.length());
222 PreallocatedMemoryThread()
223 : Thread("v8:PreallocMem"),
225 wait_for_ever_semaphore_(OS::CreateSemaphore(0)),
226 data_ready_semaphore_(OS::CreateSemaphore(0)),
231 // Used to make sure that the thread keeps looping even for spurious wakeups.
234 // This semaphore is used by the PreallocatedMemoryThread to wait for ever.
235 Semaphore* wait_for_ever_semaphore_;
236 // Semaphore to signal that the data has been initialized.
237 Semaphore* data_ready_semaphore_;
239 // Location and size of the preallocated memory block.
243 friend class Isolate;
245 DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread);
249 void Isolate::PreallocatedMemoryThreadStart() {
250 if (preallocated_memory_thread_ != NULL) return;
251 preallocated_memory_thread_ = new PreallocatedMemoryThread();
252 preallocated_memory_thread_->Start();
256 void Isolate::PreallocatedMemoryThreadStop() {
257 if (preallocated_memory_thread_ == NULL) return;
258 preallocated_memory_thread_->StopThread();
259 // Done with the thread entirely.
260 delete preallocated_memory_thread_;
261 preallocated_memory_thread_ = NULL;
265 void Isolate::PreallocatedStorageInit(size_t size) {
266 ASSERT(free_list_.next_ == &free_list_);
267 ASSERT(free_list_.previous_ == &free_list_);
268 PreallocatedStorage* free_chunk =
269 reinterpret_cast<PreallocatedStorage*>(new char[size]);
270 free_list_.next_ = free_list_.previous_ = free_chunk;
271 free_chunk->next_ = free_chunk->previous_ = &free_list_;
272 free_chunk->size_ = size - sizeof(PreallocatedStorage);
273 preallocated_storage_preallocated_ = true;
277 void* Isolate::PreallocatedStorageNew(size_t size) {
278 if (!preallocated_storage_preallocated_) {
279 return FreeStoreAllocationPolicy().New(size);
281 ASSERT(free_list_.next_ != &free_list_);
282 ASSERT(free_list_.previous_ != &free_list_);
284 size = (size + kPointerSize - 1) & ~(kPointerSize - 1);
285 // Search for exact fit.
286 for (PreallocatedStorage* storage = free_list_.next_;
287 storage != &free_list_;
288 storage = storage->next_) {
289 if (storage->size_ == size) {
291 storage->LinkTo(&in_use_list_);
292 return reinterpret_cast<void*>(storage + 1);
295 // Search for first fit.
296 for (PreallocatedStorage* storage = free_list_.next_;
297 storage != &free_list_;
298 storage = storage->next_) {
299 if (storage->size_ >= size + sizeof(PreallocatedStorage)) {
301 storage->LinkTo(&in_use_list_);
302 PreallocatedStorage* left_over =
303 reinterpret_cast<PreallocatedStorage*>(
304 reinterpret_cast<char*>(storage + 1) + size);
305 left_over->size_ = storage->size_ - size - sizeof(PreallocatedStorage);
306 ASSERT(size + left_over->size_ + sizeof(PreallocatedStorage) ==
308 storage->size_ = size;
309 left_over->LinkTo(&free_list_);
310 return reinterpret_cast<void*>(storage + 1);
313 // Allocation failure.
319 // We don't attempt to coalesce.
320 void Isolate::PreallocatedStorageDelete(void* p) {
324 if (!preallocated_storage_preallocated_) {
325 FreeStoreAllocationPolicy::Delete(p);
328 PreallocatedStorage* storage = reinterpret_cast<PreallocatedStorage*>(p) - 1;
329 ASSERT(storage->next_->previous_ == storage);
330 ASSERT(storage->previous_->next_ == storage);
332 storage->LinkTo(&free_list_);
335 Isolate* Isolate::default_isolate_ = NULL;
336 Thread::LocalStorageKey Isolate::isolate_key_;
337 Thread::LocalStorageKey Isolate::thread_id_key_;
338 Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
339 Mutex* Isolate::process_wide_mutex_ = OS::CreateMutex();
340 Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL;
341 Atomic32 Isolate::isolate_counter_ = 0;
343 Isolate::PerIsolateThreadData* Isolate::AllocatePerIsolateThreadData(
344 ThreadId thread_id) {
345 ASSERT(!thread_id.Equals(ThreadId::Invalid()));
346 PerIsolateThreadData* per_thread = new PerIsolateThreadData(this, thread_id);
348 ScopedLock lock(process_wide_mutex_);
349 ASSERT(thread_data_table_->Lookup(this, thread_id) == NULL);
350 thread_data_table_->Insert(per_thread);
351 ASSERT(thread_data_table_->Lookup(this, thread_id) == per_thread);
357 Isolate::PerIsolateThreadData*
358 Isolate::FindOrAllocatePerThreadDataForThisThread() {
359 ThreadId thread_id = ThreadId::Current();
360 PerIsolateThreadData* per_thread = NULL;
362 ScopedLock lock(process_wide_mutex_);
363 per_thread = thread_data_table_->Lookup(this, thread_id);
364 if (per_thread == NULL) {
365 per_thread = AllocatePerIsolateThreadData(thread_id);
372 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
373 ThreadId thread_id = ThreadId::Current();
374 return FindPerThreadDataForThread(thread_id);
378 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
379 ThreadId thread_id) {
380 PerIsolateThreadData* per_thread = NULL;
382 ScopedLock lock(process_wide_mutex_);
383 per_thread = thread_data_table_->Lookup(this, thread_id);
389 void Isolate::EnsureDefaultIsolate() {
390 ScopedLock lock(process_wide_mutex_);
391 if (default_isolate_ == NULL) {
392 isolate_key_ = Thread::CreateThreadLocalKey();
393 thread_id_key_ = Thread::CreateThreadLocalKey();
394 per_isolate_thread_data_key_ = Thread::CreateThreadLocalKey();
395 thread_data_table_ = new Isolate::ThreadDataTable();
396 default_isolate_ = new Isolate();
398 // Can't use SetIsolateThreadLocals(default_isolate_, NULL) here
399 // because a non-null thread data may be already set.
400 if (Thread::GetThreadLocal(isolate_key_) == NULL) {
401 Thread::SetThreadLocal(isolate_key_, default_isolate_);
405 struct StaticInitializer {
406 StaticInitializer() {
407 Isolate::EnsureDefaultIsolate();
409 } static_initializer;
411 #ifdef ENABLE_DEBUGGER_SUPPORT
412 Debugger* Isolate::GetDefaultIsolateDebugger() {
413 EnsureDefaultIsolate();
414 return default_isolate_->debugger();
419 StackGuard* Isolate::GetDefaultIsolateStackGuard() {
420 EnsureDefaultIsolate();
421 return default_isolate_->stack_guard();
425 void Isolate::EnterDefaultIsolate() {
426 EnsureDefaultIsolate();
427 ASSERT(default_isolate_ != NULL);
429 PerIsolateThreadData* data = CurrentPerIsolateThreadData();
430 // If not yet in default isolate - enter it.
431 if (data == NULL || data->isolate() != default_isolate_) {
432 default_isolate_->Enter();
437 v8::Isolate* Isolate::GetDefaultIsolateForLocking() {
438 EnsureDefaultIsolate();
439 return reinterpret_cast<v8::Isolate*>(default_isolate_);
443 Address Isolate::get_address_from_id(Isolate::AddressId id) {
444 return isolate_addresses_[id];
448 char* Isolate::Iterate(ObjectVisitor* v, char* thread_storage) {
449 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
451 return thread_storage + sizeof(ThreadLocalTop);
455 void Isolate::IterateThread(ThreadVisitor* v, char* t) {
456 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
457 v->VisitThread(this, thread);
461 void Isolate::Iterate(ObjectVisitor* v, ThreadLocalTop* thread) {
462 // Visit the roots from the top for a given thread.
464 // The pending exception can sometimes be a failure. We can't show
465 // that to the GC, which only understands objects.
466 if (thread->pending_exception_->ToObject(&pending)) {
467 v->VisitPointer(&pending);
468 thread->pending_exception_ = pending; // In case GC updated it.
470 v->VisitPointer(&(thread->pending_message_obj_));
471 v->VisitPointer(BitCast<Object**>(&(thread->pending_message_script_)));
472 v->VisitPointer(BitCast<Object**>(&(thread->context_)));
474 if (thread->scheduled_exception_->ToObject(&scheduled)) {
475 v->VisitPointer(&scheduled);
476 thread->scheduled_exception_ = scheduled;
479 for (v8::TryCatch* block = thread->TryCatchHandler();
481 block = TRY_CATCH_FROM_ADDRESS(block->next_)) {
482 v->VisitPointer(BitCast<Object**>(&(block->exception_)));
483 v->VisitPointer(BitCast<Object**>(&(block->message_)));
486 // Iterate over pointers on native execution stack.
487 for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
488 it.frame()->Iterate(v);
491 // Iterate pointers in live lookup results.
492 thread->top_lookup_result_->Iterate(v);
496 void Isolate::Iterate(ObjectVisitor* v) {
497 ThreadLocalTop* current_t = thread_local_top();
498 Iterate(v, current_t);
501 void Isolate::IterateDeferredHandles(ObjectVisitor* visitor) {
502 for (DeferredHandles* deferred = deferred_handles_head_;
504 deferred = deferred->next_) {
505 deferred->Iterate(visitor);
511 bool Isolate::IsDeferredHandle(Object** handle) {
512 // Each DeferredHandles instance keeps the handles to one job in the
513 // parallel recompilation queue, containing a list of blocks. Each block
514 // contains kHandleBlockSize handles except for the first block, which may
515 // not be fully filled.
516 // We iterate through all the blocks to see whether the argument handle
517 // belongs to one of the blocks. If so, it is deferred.
518 for (DeferredHandles* deferred = deferred_handles_head_;
520 deferred = deferred->next_) {
521 List<Object**>* blocks = &deferred->blocks_;
522 for (int i = 0; i < blocks->length(); i++) {
523 Object** block_limit = (i == 0) ? deferred->first_block_limit_
524 : blocks->at(i) + kHandleBlockSize;
525 if (blocks->at(i) <= handle && handle < block_limit) return true;
533 void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
534 // The ARM simulator has a separate JS stack. We therefore register
535 // the C++ try catch handler with the simulator and get back an
536 // address that can be used for comparisons with addresses into the
537 // JS stack. When running without the simulator, the address
538 // returned will be the address of the C++ try catch handler itself.
539 Address address = reinterpret_cast<Address>(
540 SimulatorStack::RegisterCTryCatch(reinterpret_cast<uintptr_t>(that)));
541 thread_local_top()->set_try_catch_handler_address(address);
545 void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
546 ASSERT(thread_local_top()->TryCatchHandler() == that);
547 thread_local_top()->set_try_catch_handler_address(
548 reinterpret_cast<Address>(that->next_));
549 thread_local_top()->catcher_ = NULL;
550 SimulatorStack::UnregisterCTryCatch();
554 Handle<String> Isolate::StackTraceString() {
555 if (stack_trace_nesting_level_ == 0) {
556 stack_trace_nesting_level_++;
557 HeapStringAllocator allocator;
558 StringStream::ClearMentionedObjectCache();
559 StringStream accumulator(&allocator);
560 incomplete_message_ = &accumulator;
561 PrintStack(&accumulator);
562 Handle<String> stack_trace = accumulator.ToString();
563 incomplete_message_ = NULL;
564 stack_trace_nesting_level_ = 0;
566 } else if (stack_trace_nesting_level_ == 1) {
567 stack_trace_nesting_level_++;
569 "\n\nAttempt to print stack while printing stack (double fault)\n");
571 "If you are lucky you may find a partial stack dump on stdout.\n\n");
572 incomplete_message_->OutputToStdOut();
573 return factory()->empty_string();
577 return factory()->empty_string();
582 void Isolate::PushStackTraceAndDie(unsigned int magic,
585 unsigned int magic2) {
586 const int kMaxStackTraceSize = 8192;
587 Handle<String> trace = StackTraceString();
588 uint8_t buffer[kMaxStackTraceSize];
589 int length = Min(kMaxStackTraceSize - 1, trace->length());
590 String::WriteToFlat(*trace, buffer, 0, length);
591 buffer[length] = '\0';
592 // TODO(dcarney): convert buffer to utf8?
593 OS::PrintError("Stacktrace (%x-%x) %p %p: %s\n",
595 static_cast<void*>(object), static_cast<void*>(map),
596 reinterpret_cast<char*>(buffer));
601 // Determines whether the given stack frame should be displayed in
602 // a stack trace. The caller is the error constructor that asked
603 // for the stack trace to be collected. The first time a construct
604 // call to this function is encountered it is skipped. The seen_caller
605 // in/out parameter is used to remember if the caller has been seen
607 static bool IsVisibleInStackTrace(StackFrame* raw_frame,
610 // Only display JS frames.
611 if (!raw_frame->is_java_script()) return false;
612 JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
613 Object* raw_fun = frame->function();
614 // Not sure when this can happen but skip it just in case.
615 if (!raw_fun->IsJSFunction()) return false;
616 if ((raw_fun == caller) && !(*seen_caller)) {
620 // Skip all frames until we've seen the caller.
621 if (!(*seen_caller)) return false;
622 // Also, skip non-visible built-in functions and any call with the builtins
623 // object as receiver, so as to not reveal either the builtins object or
624 // an internal function.
625 // The --builtins-in-stack-traces command line flag allows including
626 // internal call sites in the stack trace for debugging purposes.
627 if (!FLAG_builtins_in_stack_traces) {
628 JSFunction* fun = JSFunction::cast(raw_fun);
629 if (frame->receiver()->IsJSBuiltinsObject() ||
630 (fun->IsBuiltin() && !fun->shared()->native())) {
638 Handle<JSArray> Isolate::CaptureSimpleStackTrace(Handle<JSObject> error_object,
639 Handle<Object> caller,
641 limit = Max(limit, 0); // Ensure that limit is not negative.
642 int initial_size = Min(limit, 10);
643 Handle<FixedArray> elements =
644 factory()->NewFixedArrayWithHoles(initial_size * 4 + 1);
646 // If the caller parameter is a function we skip frames until we're
647 // under it before starting to collect.
648 bool seen_caller = !caller->IsJSFunction();
649 // First element is reserved to store the number of non-strict frames.
652 int non_strict_frames = 0;
653 bool encountered_strict_function = false;
654 for (StackFrameIterator iter(this);
655 !iter.done() && frames_seen < limit;
657 StackFrame* raw_frame = iter.frame();
658 if (IsVisibleInStackTrace(raw_frame, *caller, &seen_caller)) {
660 JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
661 // Set initial size to the maximum inlining level + 1 for the outermost
663 List<FrameSummary> frames(Compiler::kMaxInliningLevels + 1);
664 frame->Summarize(&frames);
665 for (int i = frames.length() - 1; i >= 0; i--) {
666 if (cursor + 4 > elements->length()) {
667 int new_capacity = JSObject::NewElementsCapacity(elements->length());
668 Handle<FixedArray> new_elements =
669 factory()->NewFixedArrayWithHoles(new_capacity);
670 for (int i = 0; i < cursor; i++) {
671 new_elements->set(i, elements->get(i));
673 elements = new_elements;
675 ASSERT(cursor + 4 <= elements->length());
677 Handle<Object> recv = frames[i].receiver();
678 Handle<JSFunction> fun = frames[i].function();
679 Handle<Code> code = frames[i].code();
680 Handle<Smi> offset(Smi::FromInt(frames[i].offset()), this);
681 // The stack trace API should not expose receivers and function
682 // objects on frames deeper than the top-most one with a strict
683 // mode function. The number of non-strict frames is stored as
684 // first element in the result array.
685 if (!encountered_strict_function) {
686 if (!fun->shared()->is_classic_mode()) {
687 encountered_strict_function = true;
692 elements->set(cursor++, *recv);
693 elements->set(cursor++, *fun);
694 elements->set(cursor++, *code);
695 elements->set(cursor++, *offset);
699 elements->set(0, Smi::FromInt(non_strict_frames));
700 Handle<JSArray> result = factory()->NewJSArrayWithElements(elements);
701 result->set_length(Smi::FromInt(cursor));
706 void Isolate::CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object) {
707 if (capture_stack_trace_for_uncaught_exceptions_) {
708 // Capture stack trace for a detailed exception message.
709 Handle<String> key = factory()->hidden_stack_trace_string();
710 Handle<JSArray> stack_trace = CaptureCurrentStackTrace(
711 stack_trace_for_uncaught_exceptions_frame_limit_,
712 stack_trace_for_uncaught_exceptions_options_);
713 JSObject::SetHiddenProperty(error_object, key, stack_trace);
718 Handle<JSArray> Isolate::CaptureCurrentStackTrace(
719 int frame_limit, StackTrace::StackTraceOptions options) {
720 // Ensure no negative values.
721 int limit = Max(frame_limit, 0);
722 Handle<JSArray> stack_trace = factory()->NewJSArray(frame_limit);
724 Handle<String> column_key =
725 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("column"));
726 Handle<String> line_key =
727 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("lineNumber"));
728 Handle<String> script_key =
729 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("scriptName"));
730 Handle<String> script_name_or_source_url_key =
731 factory()->InternalizeOneByteString(
732 STATIC_ASCII_VECTOR("scriptNameOrSourceURL"));
733 Handle<String> function_key =
734 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("functionName"));
735 Handle<String> eval_key =
736 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("isEval"));
737 Handle<String> constructor_key =
738 factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("isConstructor"));
740 StackTraceFrameIterator it(this);
742 while (!it.done() && (frames_seen < limit)) {
743 JavaScriptFrame* frame = it.frame();
744 // Set initial size to the maximum inlining level + 1 for the outermost
746 List<FrameSummary> frames(Compiler::kMaxInliningLevels + 1);
747 frame->Summarize(&frames);
748 for (int i = frames.length() - 1; i >= 0 && frames_seen < limit; i--) {
749 // Create a JSObject to hold the information for the StackFrame.
750 Handle<JSObject> stack_frame = factory()->NewJSObject(object_function());
752 Handle<JSFunction> fun = frames[i].function();
753 Handle<Script> script(Script::cast(fun->shared()->script()));
755 if (options & StackTrace::kLineNumber) {
756 int script_line_offset = script->line_offset()->value();
757 int position = frames[i].code()->SourcePosition(frames[i].pc());
758 int line_number = GetScriptLineNumber(script, position);
759 // line_number is already shifted by the script_line_offset.
760 int relative_line_number = line_number - script_line_offset;
761 if (options & StackTrace::kColumnOffset && relative_line_number >= 0) {
762 Handle<FixedArray> line_ends(FixedArray::cast(script->line_ends()));
763 int start = (relative_line_number == 0) ? 0 :
764 Smi::cast(line_ends->get(relative_line_number - 1))->value() + 1;
765 int column_offset = position - start;
766 if (relative_line_number == 0) {
767 // For the case where the code is on the same line as the script
769 column_offset += script->column_offset()->value();
771 CHECK_NOT_EMPTY_HANDLE(
773 JSObject::SetLocalPropertyIgnoreAttributes(
774 stack_frame, column_key,
775 Handle<Smi>(Smi::FromInt(column_offset + 1), this), NONE));
777 CHECK_NOT_EMPTY_HANDLE(
779 JSObject::SetLocalPropertyIgnoreAttributes(
780 stack_frame, line_key,
781 Handle<Smi>(Smi::FromInt(line_number + 1), this), NONE));
784 if (options & StackTrace::kScriptName) {
785 Handle<Object> script_name(script->name(), this);
786 CHECK_NOT_EMPTY_HANDLE(this,
787 JSObject::SetLocalPropertyIgnoreAttributes(
788 stack_frame, script_key, script_name, NONE));
791 if (options & StackTrace::kScriptNameOrSourceURL) {
792 Handle<Object> result = GetScriptNameOrSourceURL(script);
793 CHECK_NOT_EMPTY_HANDLE(this,
794 JSObject::SetLocalPropertyIgnoreAttributes(
795 stack_frame, script_name_or_source_url_key,
799 if (options & StackTrace::kFunctionName) {
800 Handle<Object> fun_name(fun->shared()->name(), this);
801 if (!fun_name->BooleanValue()) {
802 fun_name = Handle<Object>(fun->shared()->inferred_name(), this);
804 CHECK_NOT_EMPTY_HANDLE(this,
805 JSObject::SetLocalPropertyIgnoreAttributes(
806 stack_frame, function_key, fun_name, NONE));
809 if (options & StackTrace::kIsEval) {
810 int type = Smi::cast(script->compilation_type())->value();
811 Handle<Object> is_eval = (type == Script::COMPILATION_TYPE_EVAL) ?
812 factory()->true_value() : factory()->false_value();
813 CHECK_NOT_EMPTY_HANDLE(this,
814 JSObject::SetLocalPropertyIgnoreAttributes(
815 stack_frame, eval_key, is_eval, NONE));
818 if (options & StackTrace::kIsConstructor) {
819 Handle<Object> is_constructor = (frames[i].is_constructor()) ?
820 factory()->true_value() : factory()->false_value();
821 CHECK_NOT_EMPTY_HANDLE(this,
822 JSObject::SetLocalPropertyIgnoreAttributes(
823 stack_frame, constructor_key,
824 is_constructor, NONE));
827 FixedArray::cast(stack_trace->elements())->set(frames_seen, *stack_frame);
833 stack_trace->set_length(Smi::FromInt(frames_seen));
838 void Isolate::PrintStack() {
839 if (stack_trace_nesting_level_ == 0) {
840 stack_trace_nesting_level_++;
842 StringAllocator* allocator;
843 if (preallocated_message_space_ == NULL) {
844 allocator = new HeapStringAllocator();
846 allocator = preallocated_message_space_;
849 StringStream::ClearMentionedObjectCache();
850 StringStream accumulator(allocator);
851 incomplete_message_ = &accumulator;
852 PrintStack(&accumulator);
853 accumulator.OutputToStdOut();
854 InitializeLoggingAndCounters();
856 incomplete_message_ = NULL;
857 stack_trace_nesting_level_ = 0;
858 if (preallocated_message_space_ == NULL) {
859 // Remove the HeapStringAllocator created above.
862 } else if (stack_trace_nesting_level_ == 1) {
863 stack_trace_nesting_level_++;
865 "\n\nAttempt to print stack while printing stack (double fault)\n");
867 "If you are lucky you may find a partial stack dump on stdout.\n\n");
868 incomplete_message_->OutputToStdOut();
873 static void PrintFrames(Isolate* isolate,
874 StringStream* accumulator,
875 StackFrame::PrintMode mode) {
876 StackFrameIterator it(isolate);
877 for (int i = 0; !it.done(); it.Advance()) {
878 it.frame()->Print(accumulator, mode, i++);
883 void Isolate::PrintStack(StringStream* accumulator) {
884 if (!IsInitialized()) {
886 "\n==== JS stack trace is not available =======================\n\n");
888 "\n==== Isolate for the thread is not initialized =============\n\n");
891 // The MentionedObjectCache is not GC-proof at the moment.
892 AssertNoAllocation nogc;
893 ASSERT(StringStream::IsMentionedObjectCacheClear());
895 // Avoid printing anything if there are no frames.
896 if (c_entry_fp(thread_local_top()) == 0) return;
899 "\n==== JS stack trace =========================================\n\n");
900 PrintFrames(this, accumulator, StackFrame::OVERVIEW);
903 "\n==== Details ================================================\n\n");
904 PrintFrames(this, accumulator, StackFrame::DETAILS);
906 accumulator->PrintMentionedObjectCache();
907 accumulator->Add("=====================\n\n");
911 void Isolate::SetFailedAccessCheckCallback(
912 v8::FailedAccessCheckCallback callback) {
913 thread_local_top()->failed_access_check_callback_ = callback;
917 void Isolate::ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type) {
918 if (!thread_local_top()->failed_access_check_callback_) return;
920 ASSERT(receiver->IsAccessCheckNeeded());
923 // Get the data object from access check info.
924 JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
925 if (!constructor->shared()->IsApiFunction()) return;
927 constructor->shared()->get_api_func_data()->access_check_info();
928 if (data_obj == heap_.undefined_value()) return;
930 HandleScope scope(this);
931 Handle<JSObject> receiver_handle(receiver);
932 Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this);
933 { VMState<EXTERNAL> state(this);
934 thread_local_top()->failed_access_check_callback_(
935 v8::Utils::ToLocal(receiver_handle),
937 v8::Utils::ToLocal(data));
942 enum MayAccessDecision {
947 static MayAccessDecision MayAccessPreCheck(Isolate* isolate,
949 v8::AccessType type) {
950 // During bootstrapping, callback functions are not enabled yet.
951 if (isolate->bootstrapper()->IsActive()) return YES;
953 if (receiver->IsJSGlobalProxy()) {
954 Object* receiver_context = JSGlobalProxy::cast(receiver)->native_context();
955 if (!receiver_context->IsContext()) return NO;
957 // Get the native context of current top context.
958 // avoid using Isolate::native_context() because it uses Handle.
959 Context* native_context =
960 isolate->context()->global_object()->native_context();
961 if (receiver_context == native_context) return YES;
963 if (Context::cast(receiver_context)->security_token() ==
964 native_context->security_token())
972 bool Isolate::MayNamedAccess(JSObject* receiver, Object* key,
973 v8::AccessType type) {
974 ASSERT(receiver->IsAccessCheckNeeded());
976 // The callers of this method are not expecting a GC.
977 AssertNoAllocation no_gc;
979 // Skip checks for hidden properties access. Note, we do not
980 // require existence of a context in this case.
981 if (key == heap_.hidden_string()) return true;
983 // Check for compatibility between the security tokens in the
984 // current lexical context and the accessed object.
987 MayAccessDecision decision = MayAccessPreCheck(this, receiver, type);
988 if (decision != UNKNOWN) return decision == YES;
990 // Get named access check callback
991 JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
992 if (!constructor->shared()->IsApiFunction()) return false;
995 constructor->shared()->get_api_func_data()->access_check_info();
996 if (data_obj == heap_.undefined_value()) return false;
998 Object* fun_obj = AccessCheckInfo::cast(data_obj)->named_callback();
999 v8::NamedSecurityCallback callback =
1000 v8::ToCData<v8::NamedSecurityCallback>(fun_obj);
1002 if (!callback) return false;
1004 HandleScope scope(this);
1005 Handle<JSObject> receiver_handle(receiver, this);
1006 Handle<Object> key_handle(key, this);
1007 Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this);
1008 LOG(this, ApiNamedSecurityCheck(key));
1009 bool result = false;
1011 // Leaving JavaScript.
1012 VMState<EXTERNAL> state(this);
1013 result = callback(v8::Utils::ToLocal(receiver_handle),
1014 v8::Utils::ToLocal(key_handle),
1016 v8::Utils::ToLocal(data));
1022 bool Isolate::MayIndexedAccess(JSObject* receiver,
1024 v8::AccessType type) {
1025 ASSERT(receiver->IsAccessCheckNeeded());
1026 // Check for compatibility between the security tokens in the
1027 // current lexical context and the accessed object.
1030 MayAccessDecision decision = MayAccessPreCheck(this, receiver, type);
1031 if (decision != UNKNOWN) return decision == YES;
1033 // Get indexed access check callback
1034 JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
1035 if (!constructor->shared()->IsApiFunction()) return false;
1038 constructor->shared()->get_api_func_data()->access_check_info();
1039 if (data_obj == heap_.undefined_value()) return false;
1041 Object* fun_obj = AccessCheckInfo::cast(data_obj)->indexed_callback();
1042 v8::IndexedSecurityCallback callback =
1043 v8::ToCData<v8::IndexedSecurityCallback>(fun_obj);
1045 if (!callback) return false;
1047 HandleScope scope(this);
1048 Handle<JSObject> receiver_handle(receiver, this);
1049 Handle<Object> data(AccessCheckInfo::cast(data_obj)->data(), this);
1050 LOG(this, ApiIndexedSecurityCheck(index));
1051 bool result = false;
1053 // Leaving JavaScript.
1054 VMState<EXTERNAL> state(this);
1055 result = callback(v8::Utils::ToLocal(receiver_handle),
1058 v8::Utils::ToLocal(data));
1064 const char* const Isolate::kStackOverflowMessage =
1065 "Uncaught RangeError: Maximum call stack size exceeded";
1068 Failure* Isolate::StackOverflow() {
1069 HandleScope scope(this);
1070 // At this point we cannot create an Error object using its javascript
1071 // constructor. Instead, we copy the pre-constructed boilerplate and
1072 // attach the stack trace as a hidden property.
1073 Handle<String> key = factory()->stack_overflow_string();
1074 Handle<JSObject> boilerplate =
1075 Handle<JSObject>::cast(GetProperty(this, js_builtins_object(), key));
1076 Handle<JSObject> exception = Copy(boilerplate);
1077 DoThrow(*exception, NULL);
1079 // Get stack trace limit.
1080 Handle<Object> error = GetProperty(js_builtins_object(), "$Error");
1081 if (!error->IsJSObject()) return Failure::Exception();
1082 Handle<Object> stack_trace_limit =
1083 GetProperty(Handle<JSObject>::cast(error), "stackTraceLimit");
1084 if (!stack_trace_limit->IsNumber()) return Failure::Exception();
1085 double dlimit = stack_trace_limit->Number();
1086 int limit = std::isnan(dlimit) ? 0 : static_cast<int>(dlimit);
1088 Handle<JSArray> stack_trace = CaptureSimpleStackTrace(
1089 exception, factory()->undefined_value(), limit);
1090 JSObject::SetHiddenProperty(exception,
1091 factory()->hidden_stack_trace_string(),
1093 return Failure::Exception();
1097 Failure* Isolate::TerminateExecution() {
1098 DoThrow(heap_.termination_exception(), NULL);
1099 return Failure::Exception();
1103 void Isolate::CancelTerminateExecution() {
1104 if (try_catch_handler()) {
1105 try_catch_handler()->has_terminated_ = false;
1107 if (has_pending_exception() &&
1108 pending_exception() == heap_.termination_exception()) {
1109 thread_local_top()->external_caught_exception_ = false;
1110 clear_pending_exception();
1112 if (has_scheduled_exception() &&
1113 scheduled_exception() == heap_.termination_exception()) {
1114 thread_local_top()->external_caught_exception_ = false;
1115 clear_scheduled_exception();
1120 Failure* Isolate::Throw(Object* exception, MessageLocation* location) {
1121 DoThrow(exception, location);
1122 return Failure::Exception();
1126 Failure* Isolate::ReThrow(MaybeObject* exception) {
1127 bool can_be_caught_externally = false;
1128 bool catchable_by_javascript = is_catchable_by_javascript(exception);
1129 ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
1131 thread_local_top()->catcher_ = can_be_caught_externally ?
1132 try_catch_handler() : NULL;
1134 // Set the exception being re-thrown.
1135 set_pending_exception(exception);
1136 if (exception->IsFailure()) return exception->ToFailureUnchecked();
1137 return Failure::Exception();
1141 Failure* Isolate::ThrowIllegalOperation() {
1142 return Throw(heap_.illegal_access_string());
1146 void Isolate::ScheduleThrow(Object* exception) {
1147 // When scheduling a throw we first throw the exception to get the
1148 // error reporting if it is uncaught before rescheduling it.
1150 PropagatePendingExceptionToExternalTryCatch();
1151 if (has_pending_exception()) {
1152 thread_local_top()->scheduled_exception_ = pending_exception();
1153 thread_local_top()->external_caught_exception_ = false;
1154 clear_pending_exception();
1159 Failure* Isolate::PromoteScheduledException() {
1160 MaybeObject* thrown = scheduled_exception();
1161 clear_scheduled_exception();
1162 // Re-throw the exception to avoid getting repeated error reporting.
1163 return ReThrow(thrown);
1167 void Isolate::PrintCurrentStackTrace(FILE* out) {
1168 StackTraceFrameIterator it(this);
1169 while (!it.done()) {
1170 HandleScope scope(this);
1171 // Find code position if recorded in relocation info.
1172 JavaScriptFrame* frame = it.frame();
1173 int pos = frame->LookupCode()->SourcePosition(frame->pc());
1174 Handle<Object> pos_obj(Smi::FromInt(pos), this);
1175 // Fetch function and receiver.
1176 Handle<JSFunction> fun(JSFunction::cast(frame->function()));
1177 Handle<Object> recv(frame->receiver(), this);
1178 // Advance to the next JavaScript frame and determine if the
1179 // current frame is the top-level frame.
1181 Handle<Object> is_top_level = it.done()
1182 ? factory()->true_value()
1183 : factory()->false_value();
1184 // Generate and print stack trace line.
1185 Handle<String> line =
1186 Execution::GetStackTraceLine(recv, fun, pos_obj, is_top_level);
1187 if (line->length() > 0) {
1195 void Isolate::ComputeLocation(MessageLocation* target) {
1196 *target = MessageLocation(Handle<Script>(heap_.empty_script()), -1, -1);
1197 StackTraceFrameIterator it(this);
1199 JavaScriptFrame* frame = it.frame();
1200 JSFunction* fun = JSFunction::cast(frame->function());
1201 Object* script = fun->shared()->script();
1202 if (script->IsScript() &&
1203 !(Script::cast(script)->source()->IsUndefined())) {
1204 int pos = frame->LookupCode()->SourcePosition(frame->pc());
1205 // Compute the location from the function and the reloc info.
1206 Handle<Script> casted_script(Script::cast(script));
1207 *target = MessageLocation(casted_script, pos, pos + 1);
1213 bool Isolate::ShouldReportException(bool* can_be_caught_externally,
1214 bool catchable_by_javascript) {
1215 // Find the top-most try-catch handler.
1216 StackHandler* handler =
1217 StackHandler::FromAddress(Isolate::handler(thread_local_top()));
1218 while (handler != NULL && !handler->is_catch()) {
1219 handler = handler->next();
1222 // Get the address of the external handler so we can compare the address to
1223 // determine which one is closer to the top of the stack.
1224 Address external_handler_address =
1225 thread_local_top()->try_catch_handler_address();
1227 // The exception has been externally caught if and only if there is
1228 // an external handler which is on top of the top-most try-catch
1230 *can_be_caught_externally = external_handler_address != NULL &&
1231 (handler == NULL || handler->address() > external_handler_address ||
1232 !catchable_by_javascript);
1234 if (*can_be_caught_externally) {
1235 // Only report the exception if the external handler is verbose.
1236 return try_catch_handler()->is_verbose_;
1238 // Report the exception if it isn't caught by JavaScript code.
1239 return handler == NULL;
1244 bool Isolate::IsErrorObject(Handle<Object> obj) {
1245 if (!obj->IsJSObject()) return false;
1248 *(factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("$Error")));
1249 Object* error_constructor =
1250 js_builtins_object()->GetPropertyNoExceptionThrown(error_key);
1252 for (Object* prototype = *obj; !prototype->IsNull();
1253 prototype = prototype->GetPrototype(this)) {
1254 if (!prototype->IsJSObject()) return false;
1255 if (JSObject::cast(prototype)->map()->constructor() == error_constructor) {
1262 static int fatal_exception_depth = 0;
1264 void Isolate::DoThrow(Object* exception, MessageLocation* location) {
1265 ASSERT(!has_pending_exception());
1267 HandleScope scope(this);
1268 Handle<Object> exception_handle(exception, this);
1270 // Determine reporting and whether the exception is caught externally.
1271 bool catchable_by_javascript = is_catchable_by_javascript(exception);
1272 bool can_be_caught_externally = false;
1273 bool should_report_exception =
1274 ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
1275 bool report_exception = catchable_by_javascript && should_report_exception;
1276 bool try_catch_needs_message =
1277 can_be_caught_externally && try_catch_handler()->capture_message_;
1278 bool bootstrapping = bootstrapper()->IsActive();
1280 #ifdef ENABLE_DEBUGGER_SUPPORT
1281 // Notify debugger of exception.
1282 if (catchable_by_javascript) {
1283 debugger_->OnException(exception_handle, report_exception);
1287 // Generate the message if required.
1288 if (report_exception || try_catch_needs_message) {
1289 MessageLocation potential_computed_location;
1290 if (location == NULL) {
1291 // If no location was specified we use a computed one instead.
1292 ComputeLocation(&potential_computed_location);
1293 location = &potential_computed_location;
1295 // It's not safe to try to make message objects or collect stack traces
1296 // while the bootstrapper is active since the infrastructure may not have
1297 // been properly initialized.
1298 if (!bootstrapping) {
1299 Handle<String> stack_trace;
1300 if (FLAG_trace_exception) stack_trace = StackTraceString();
1301 Handle<JSArray> stack_trace_object;
1302 if (capture_stack_trace_for_uncaught_exceptions_) {
1303 if (IsErrorObject(exception_handle)) {
1304 // We fetch the stack trace that corresponds to this error object.
1305 String* key = heap()->hidden_stack_trace_string();
1306 Object* stack_property =
1307 JSObject::cast(*exception_handle)->GetHiddenProperty(key);
1308 // Property lookup may have failed. In this case it's probably not
1309 // a valid Error object.
1310 if (stack_property->IsJSArray()) {
1311 stack_trace_object = Handle<JSArray>(JSArray::cast(stack_property));
1314 if (stack_trace_object.is_null()) {
1315 // Not an error object, we capture at throw site.
1316 stack_trace_object = CaptureCurrentStackTrace(
1317 stack_trace_for_uncaught_exceptions_frame_limit_,
1318 stack_trace_for_uncaught_exceptions_options_);
1322 Handle<Object> exception_arg = exception_handle;
1323 // If the exception argument is a custom object, turn it into a string
1324 // before throwing as uncaught exception. Note that the pending
1325 // exception object to be set later must not be turned into a string.
1326 if (exception_arg->IsJSObject() && !IsErrorObject(exception_arg)) {
1327 bool failed = false;
1328 exception_arg = Execution::ToDetailString(exception_arg, &failed);
1330 exception_arg = factory()->InternalizeOneByteString(
1331 STATIC_ASCII_VECTOR("exception"));
1334 Handle<Object> message_obj = MessageHandler::MakeMessageObject(
1335 "uncaught_exception",
1337 HandleVector<Object>(&exception_arg, 1),
1339 stack_trace_object);
1340 thread_local_top()->pending_message_obj_ = *message_obj;
1341 if (location != NULL) {
1342 thread_local_top()->pending_message_script_ = *location->script();
1343 thread_local_top()->pending_message_start_pos_ = location->start_pos();
1344 thread_local_top()->pending_message_end_pos_ = location->end_pos();
1347 // If the abort-on-uncaught-exception flag is specified, abort on any
1348 // exception not caught by JavaScript, even when an external handler is
1349 // present. This flag is intended for use by JavaScript developers, so
1350 // print a user-friendly stack trace (not an internal one).
1351 if (fatal_exception_depth == 0 &&
1352 FLAG_abort_on_uncaught_exception &&
1353 (report_exception || can_be_caught_externally)) {
1354 fatal_exception_depth++;
1357 *MessageHandler::GetLocalizedMessage(this, message_obj));
1358 PrintCurrentStackTrace(stderr);
1361 } else if (location != NULL && !location->script().is_null()) {
1362 // We are bootstrapping and caught an error where the location is set
1363 // and we have a script for the location.
1364 // In this case we could have an extension (or an internal error
1365 // somewhere) and we print out the line number at which the error occured
1366 // to the console for easier debugging.
1367 int line_number = GetScriptLineNumberSafe(location->script(),
1368 location->start_pos());
1369 if (exception->IsString()) {
1371 "Extension or internal compilation error: %s in %s at line %d.\n",
1372 *String::cast(exception)->ToCString(),
1373 *String::cast(location->script()->name())->ToCString(),
1377 "Extension or internal compilation error in %s at line %d.\n",
1378 *String::cast(location->script()->name())->ToCString(),
1384 // Save the message for reporting if the the exception remains uncaught.
1385 thread_local_top()->has_pending_message_ = report_exception;
1387 // Do not forget to clean catcher_ if currently thrown exception cannot
1388 // be caught. If necessary, ReThrow will update the catcher.
1389 thread_local_top()->catcher_ = can_be_caught_externally ?
1390 try_catch_handler() : NULL;
1392 set_pending_exception(*exception_handle);
1396 bool Isolate::IsExternallyCaught() {
1397 ASSERT(has_pending_exception());
1399 if ((thread_local_top()->catcher_ == NULL) ||
1400 (try_catch_handler() != thread_local_top()->catcher_)) {
1401 // When throwing the exception, we found no v8::TryCatch
1402 // which should care about this exception.
1406 if (!is_catchable_by_javascript(pending_exception())) {
1410 // Get the address of the external handler so we can compare the address to
1411 // determine which one is closer to the top of the stack.
1412 Address external_handler_address =
1413 thread_local_top()->try_catch_handler_address();
1414 ASSERT(external_handler_address != NULL);
1416 // The exception has been externally caught if and only if there is
1417 // an external handler which is on top of the top-most try-finally
1419 // There should be no try-catch blocks as they would prohibit us from
1420 // finding external catcher in the first place (see catcher_ check above).
1422 // Note, that finally clause would rethrow an exception unless it's
1423 // aborted by jumps in control flow like return, break, etc. and we'll
1424 // have another chances to set proper v8::TryCatch.
1425 StackHandler* handler =
1426 StackHandler::FromAddress(Isolate::handler(thread_local_top()));
1427 while (handler != NULL && handler->address() < external_handler_address) {
1428 ASSERT(!handler->is_catch());
1429 if (handler->is_finally()) return false;
1431 handler = handler->next();
1438 void Isolate::ReportPendingMessages() {
1439 ASSERT(has_pending_exception());
1440 PropagatePendingExceptionToExternalTryCatch();
1442 // If the pending exception is OutOfMemoryException set out_of_memory in
1443 // the native context. Note: We have to mark the native context here
1444 // since the GenerateThrowOutOfMemory stub cannot make a RuntimeCall to
1446 HandleScope scope(this);
1447 if (thread_local_top_.pending_exception_->IsOutOfMemory()) {
1448 context()->mark_out_of_memory();
1449 } else if (thread_local_top_.pending_exception_ ==
1450 heap()->termination_exception()) {
1451 // Do nothing: if needed, the exception has been already propagated to
1454 if (thread_local_top_.has_pending_message_) {
1455 thread_local_top_.has_pending_message_ = false;
1456 if (!thread_local_top_.pending_message_obj_->IsTheHole()) {
1457 HandleScope scope(this);
1458 Handle<Object> message_obj(thread_local_top_.pending_message_obj_,
1460 if (thread_local_top_.pending_message_script_ != NULL) {
1461 Handle<Script> script(thread_local_top_.pending_message_script_);
1462 int start_pos = thread_local_top_.pending_message_start_pos_;
1463 int end_pos = thread_local_top_.pending_message_end_pos_;
1464 MessageLocation location(script, start_pos, end_pos);
1465 MessageHandler::ReportMessage(this, &location, message_obj);
1467 MessageHandler::ReportMessage(this, NULL, message_obj);
1472 clear_pending_message();
1476 MessageLocation Isolate::GetMessageLocation() {
1477 ASSERT(has_pending_exception());
1479 if (!thread_local_top_.pending_exception_->IsOutOfMemory() &&
1480 thread_local_top_.pending_exception_ != heap()->termination_exception() &&
1481 thread_local_top_.has_pending_message_ &&
1482 !thread_local_top_.pending_message_obj_->IsTheHole() &&
1483 thread_local_top_.pending_message_script_ != NULL) {
1484 Handle<Script> script(thread_local_top_.pending_message_script_);
1485 int start_pos = thread_local_top_.pending_message_start_pos_;
1486 int end_pos = thread_local_top_.pending_message_end_pos_;
1487 return MessageLocation(script, start_pos, end_pos);
1490 return MessageLocation();
1494 void Isolate::TraceException(bool flag) {
1495 FLAG_trace_exception = flag; // TODO(isolates): This is an unfortunate use.
1499 bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
1500 ASSERT(has_pending_exception());
1501 PropagatePendingExceptionToExternalTryCatch();
1503 // Always reschedule out of memory exceptions.
1504 if (!is_out_of_memory()) {
1505 bool is_termination_exception =
1506 pending_exception() == heap_.termination_exception();
1508 // Do not reschedule the exception if this is the bottom call.
1509 bool clear_exception = is_bottom_call;
1511 if (is_termination_exception) {
1512 if (is_bottom_call) {
1513 thread_local_top()->external_caught_exception_ = false;
1514 clear_pending_exception();
1517 } else if (thread_local_top()->external_caught_exception_) {
1518 // If the exception is externally caught, clear it if there are no
1519 // JavaScript frames on the way to the C++ frame that has the
1520 // external handler.
1521 ASSERT(thread_local_top()->try_catch_handler_address() != NULL);
1522 Address external_handler_address =
1523 thread_local_top()->try_catch_handler_address();
1524 JavaScriptFrameIterator it(this);
1525 if (it.done() || (it.frame()->sp() > external_handler_address)) {
1526 clear_exception = true;
1530 // Clear the exception if needed.
1531 if (clear_exception) {
1532 thread_local_top()->external_caught_exception_ = false;
1533 clear_pending_exception();
1538 // Reschedule the exception.
1539 thread_local_top()->scheduled_exception_ = pending_exception();
1540 clear_pending_exception();
1545 void Isolate::SetCaptureStackTraceForUncaughtExceptions(
1548 StackTrace::StackTraceOptions options) {
1549 capture_stack_trace_for_uncaught_exceptions_ = capture;
1550 stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
1551 stack_trace_for_uncaught_exceptions_options_ = options;
1555 bool Isolate::is_out_of_memory() {
1556 if (has_pending_exception()) {
1557 MaybeObject* e = pending_exception();
1558 if (e->IsFailure() && Failure::cast(e)->IsOutOfMemoryException()) {
1562 if (has_scheduled_exception()) {
1563 MaybeObject* e = scheduled_exception();
1564 if (e->IsFailure() && Failure::cast(e)->IsOutOfMemoryException()) {
1572 Handle<Context> Isolate::native_context() {
1573 return Handle<Context>(context()->global_object()->native_context());
1577 Handle<Context> Isolate::global_context() {
1578 return Handle<Context>(context()->global_object()->global_context());
1582 Handle<Context> Isolate::GetCallingNativeContext() {
1583 JavaScriptFrameIterator it(this);
1584 #ifdef ENABLE_DEBUGGER_SUPPORT
1585 if (debug_->InDebugger()) {
1586 while (!it.done()) {
1587 JavaScriptFrame* frame = it.frame();
1588 Context* context = Context::cast(frame->context());
1589 if (context->native_context() == *debug_->debug_context()) {
1596 #endif // ENABLE_DEBUGGER_SUPPORT
1597 if (it.done()) return Handle<Context>::null();
1598 JavaScriptFrame* frame = it.frame();
1599 Context* context = Context::cast(frame->context());
1600 return Handle<Context>(context->native_context());
1604 char* Isolate::ArchiveThread(char* to) {
1605 OS::MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
1606 sizeof(ThreadLocalTop));
1607 InitializeThreadLocal();
1608 clear_pending_exception();
1609 clear_pending_message();
1610 clear_scheduled_exception();
1611 return to + sizeof(ThreadLocalTop);
1615 char* Isolate::RestoreThread(char* from) {
1616 OS::MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
1617 sizeof(ThreadLocalTop));
1618 // This might be just paranoia, but it seems to be needed in case a
1619 // thread_local_top_ is restored on a separate OS thread.
1620 #ifdef USE_SIMULATOR
1621 #ifdef V8_TARGET_ARCH_ARM
1622 thread_local_top()->simulator_ = Simulator::current(this);
1623 #elif V8_TARGET_ARCH_MIPS
1624 thread_local_top()->simulator_ = Simulator::current(this);
1627 ASSERT(context() == NULL || context()->IsContext());
1628 return from + sizeof(ThreadLocalTop);
1632 Isolate::ThreadDataTable::ThreadDataTable()
1637 Isolate::ThreadDataTable::~ThreadDataTable() {
1638 // TODO(svenpanne) The assertion below would fire if an embedder does not
1639 // cleanly dispose all Isolates before disposing v8, so we are conservative
1640 // and leave it out for now.
1641 // ASSERT_EQ(NULL, list_);
1645 Isolate::PerIsolateThreadData*
1646 Isolate::ThreadDataTable::Lookup(Isolate* isolate,
1647 ThreadId thread_id) {
1648 for (PerIsolateThreadData* data = list_; data != NULL; data = data->next_) {
1649 if (data->Matches(isolate, thread_id)) return data;
1655 void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
1656 if (list_ != NULL) list_->prev_ = data;
1657 data->next_ = list_;
1662 void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
1663 if (list_ == data) list_ = data->next_;
1664 if (data->next_ != NULL) data->next_->prev_ = data->prev_;
1665 if (data->prev_ != NULL) data->prev_->next_ = data->next_;
1670 void Isolate::ThreadDataTable::Remove(Isolate* isolate,
1671 ThreadId thread_id) {
1672 PerIsolateThreadData* data = Lookup(isolate, thread_id);
1679 void Isolate::ThreadDataTable::RemoveAllThreads(Isolate* isolate) {
1680 PerIsolateThreadData* data = list_;
1681 while (data != NULL) {
1682 PerIsolateThreadData* next = data->next_;
1683 if (data->isolate() == isolate) Remove(data);
1690 #define TRACE_ISOLATE(tag) \
1692 if (FLAG_trace_isolates) { \
1693 PrintF("Isolate %p (id %d)" #tag "\n", \
1694 reinterpret_cast<void*>(this), id()); \
1698 #define TRACE_ISOLATE(tag)
1703 : state_(UNINITIALIZED),
1704 embedder_data_(NULL),
1706 stack_trace_nesting_level_(0),
1707 incomplete_message_(NULL),
1708 preallocated_memory_thread_(NULL),
1709 preallocated_message_space_(NULL),
1710 bootstrapper_(NULL),
1711 runtime_profiler_(NULL),
1712 compilation_cache_(NULL),
1715 // Must be initialized early to allow v8::SetResourceConstraints calls.
1716 break_access_(OS::CreateMutex()),
1717 debugger_initialized_(false),
1718 // Must be initialized early to allow v8::Debug calls.
1719 debugger_access_(OS::CreateMutex()),
1723 deoptimizer_data_(NULL),
1724 capture_stack_trace_for_uncaught_exceptions_(false),
1725 stack_trace_for_uncaught_exceptions_frame_limit_(0),
1726 stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
1727 transcendental_cache_(NULL),
1728 memory_allocator_(NULL),
1729 keyed_lookup_cache_(NULL),
1730 context_slot_cache_(NULL),
1731 descriptor_lookup_cache_(NULL),
1732 handle_scope_implementer_(NULL),
1733 unicode_cache_(NULL),
1734 runtime_zone_(this),
1737 preallocated_storage_preallocated_(false),
1738 inner_pointer_to_code_cache_(NULL),
1739 write_iterator_(NULL),
1740 global_handles_(NULL),
1741 context_switcher_(NULL),
1742 thread_manager_(NULL),
1743 fp_stubs_generated_(false),
1744 has_installed_extensions_(false),
1745 string_tracker_(NULL),
1746 regexp_stack_(NULL),
1748 code_stub_interface_descriptors_(NULL),
1749 context_exit_happened_(false),
1750 cpu_profiler_(NULL),
1751 heap_profiler_(NULL),
1752 deferred_handles_head_(NULL),
1753 optimizing_compiler_thread_(this),
1754 marking_thread_(NULL),
1755 sweeper_thread_(NULL) {
1756 id_ = NoBarrier_AtomicIncrement(&isolate_counter_, 1);
1757 TRACE_ISOLATE(constructor);
1759 memset(isolate_addresses_, 0,
1760 sizeof(isolate_addresses_[0]) * (kIsolateAddressCount + 1));
1762 heap_.isolate_ = this;
1763 stack_guard_.isolate_ = this;
1765 // ThreadManager is initialized early to support locking an isolate
1766 // before it is entered.
1767 thread_manager_ = new ThreadManager();
1768 thread_manager_->isolate_ = this;
1770 #if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
1771 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
1772 simulator_initialized_ = false;
1773 simulator_i_cache_ = NULL;
1774 simulator_redirection_ = NULL;
1778 // heap_histograms_ initializes itself.
1779 memset(&js_spill_information_, 0, sizeof(js_spill_information_));
1780 memset(code_kind_statistics_, 0,
1781 sizeof(code_kind_statistics_[0]) * Code::NUMBER_OF_KINDS);
1783 compiler_thread_handle_deref_state_ = HandleDereferenceGuard::ALLOW;
1784 execution_thread_handle_deref_state_ = HandleDereferenceGuard::ALLOW;
1787 #ifdef ENABLE_DEBUGGER_SUPPORT
1792 handle_scope_data_.Initialize();
1794 #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
1795 name##_ = (initial_value);
1796 ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
1797 #undef ISOLATE_INIT_EXECUTE
1799 #define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
1800 memset(name##_, 0, sizeof(type) * length);
1801 ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
1802 #undef ISOLATE_INIT_ARRAY_EXECUTE
1806 void Isolate::TearDown() {
1807 TRACE_ISOLATE(tear_down);
1809 // Temporarily set this isolate as current so that various parts of
1810 // the isolate can access it in their destructors without having a
1811 // direct pointer. We don't use Enter/Exit here to avoid
1812 // initializing the thread data.
1813 PerIsolateThreadData* saved_data = CurrentPerIsolateThreadData();
1814 Isolate* saved_isolate = UncheckedCurrent();
1815 SetIsolateThreadLocals(this, NULL);
1819 { ScopedLock lock(process_wide_mutex_);
1820 thread_data_table_->RemoveAllThreads(this);
1823 if (serialize_partial_snapshot_cache_ != NULL) {
1824 delete[] serialize_partial_snapshot_cache_;
1825 serialize_partial_snapshot_cache_ = NULL;
1828 if (!IsDefaultIsolate()) {
1832 // Restore the previous current isolate.
1833 SetIsolateThreadLocals(saved_isolate, saved_data);
1837 void Isolate::GlobalTearDown() {
1838 delete thread_data_table_;
1842 void Isolate::Deinit() {
1843 if (state_ == INITIALIZED) {
1844 TRACE_ISOLATE(deinit);
1846 if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Stop();
1848 if (FLAG_sweeper_threads > 0) {
1849 for (int i = 0; i < FLAG_sweeper_threads; i++) {
1850 sweeper_thread_[i]->Stop();
1851 delete sweeper_thread_[i];
1853 delete[] sweeper_thread_;
1856 if (FLAG_marking_threads > 0) {
1857 for (int i = 0; i < FLAG_marking_threads; i++) {
1858 marking_thread_[i]->Stop();
1859 delete marking_thread_[i];
1861 delete[] marking_thread_;
1864 if (FLAG_hydrogen_stats) GetHStatistics()->Print();
1866 // We must stop the logger before we tear down other components.
1867 Sampler* sampler = logger_->sampler();
1868 if (sampler && sampler->IsActive()) sampler->Stop();
1870 delete deoptimizer_data_;
1871 deoptimizer_data_ = NULL;
1872 if (FLAG_preemption) {
1873 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this));
1874 v8::Locker::StopPreemption();
1876 builtins_.TearDown();
1877 bootstrapper_->TearDown();
1879 // Remove the external reference to the preallocated stack memory.
1880 delete preallocated_message_space_;
1881 preallocated_message_space_ = NULL;
1882 PreallocatedMemoryThreadStop();
1884 if (runtime_profiler_ != NULL) {
1885 runtime_profiler_->TearDown();
1886 delete runtime_profiler_;
1887 runtime_profiler_ = NULL;
1890 logger_->TearDown();
1892 delete heap_profiler_;
1893 heap_profiler_ = NULL;
1894 delete cpu_profiler_;
1895 cpu_profiler_ = NULL;
1897 // The default isolate is re-initializable due to legacy API.
1898 state_ = UNINITIALIZED;
1903 void Isolate::PushToPartialSnapshotCache(Object* obj) {
1904 int length = serialize_partial_snapshot_cache_length();
1905 int capacity = serialize_partial_snapshot_cache_capacity();
1907 if (length >= capacity) {
1908 int new_capacity = static_cast<int>((capacity + 10) * 1.2);
1909 Object** new_array = new Object*[new_capacity];
1910 for (int i = 0; i < length; i++) {
1911 new_array[i] = serialize_partial_snapshot_cache()[i];
1913 if (capacity != 0) delete[] serialize_partial_snapshot_cache();
1914 set_serialize_partial_snapshot_cache(new_array);
1915 set_serialize_partial_snapshot_cache_capacity(new_capacity);
1918 serialize_partial_snapshot_cache()[length] = obj;
1919 set_serialize_partial_snapshot_cache_length(length + 1);
1923 void Isolate::SetIsolateThreadLocals(Isolate* isolate,
1924 PerIsolateThreadData* data) {
1925 Thread::SetThreadLocal(isolate_key_, isolate);
1926 Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
1930 Isolate::~Isolate() {
1931 TRACE_ISOLATE(destructor);
1933 // Has to be called while counters_ are still alive.
1934 runtime_zone_.DeleteKeptSegment();
1936 delete[] assembler_spare_buffer_;
1937 assembler_spare_buffer_ = NULL;
1939 delete unicode_cache_;
1940 unicode_cache_ = NULL;
1945 delete[] code_stub_interface_descriptors_;
1946 code_stub_interface_descriptors_ = NULL;
1948 delete regexp_stack_;
1949 regexp_stack_ = NULL;
1951 delete descriptor_lookup_cache_;
1952 descriptor_lookup_cache_ = NULL;
1953 delete context_slot_cache_;
1954 context_slot_cache_ = NULL;
1955 delete keyed_lookup_cache_;
1956 keyed_lookup_cache_ = NULL;
1958 delete transcendental_cache_;
1959 transcendental_cache_ = NULL;
1962 delete stats_table_;
1963 stats_table_ = NULL;
1971 delete handle_scope_implementer_;
1972 handle_scope_implementer_ = NULL;
1973 delete break_access_;
1974 break_access_ = NULL;
1975 delete debugger_access_;
1976 debugger_access_ = NULL;
1978 delete compilation_cache_;
1979 compilation_cache_ = NULL;
1980 delete bootstrapper_;
1981 bootstrapper_ = NULL;
1982 delete inner_pointer_to_code_cache_;
1983 inner_pointer_to_code_cache_ = NULL;
1984 delete write_iterator_;
1985 write_iterator_ = NULL;
1987 delete context_switcher_;
1988 context_switcher_ = NULL;
1989 delete thread_manager_;
1990 thread_manager_ = NULL;
1992 delete string_tracker_;
1993 string_tracker_ = NULL;
1995 delete memory_allocator_;
1996 memory_allocator_ = NULL;
1999 delete global_handles_;
2000 global_handles_ = NULL;
2002 delete external_reference_table_;
2003 external_reference_table_ = NULL;
2005 #ifdef ENABLE_DEBUGGER_SUPPORT
2014 void Isolate::InitializeThreadLocal() {
2015 thread_local_top_.isolate_ = this;
2016 thread_local_top_.Initialize();
2020 void Isolate::PropagatePendingExceptionToExternalTryCatch() {
2021 ASSERT(has_pending_exception());
2023 bool external_caught = IsExternallyCaught();
2024 thread_local_top_.external_caught_exception_ = external_caught;
2026 if (!external_caught) return;
2028 if (thread_local_top_.pending_exception_->IsOutOfMemory()) {
2029 // Do not propagate OOM exception: we should kill VM asap.
2030 } else if (thread_local_top_.pending_exception_ ==
2031 heap()->termination_exception()) {
2032 try_catch_handler()->can_continue_ = false;
2033 try_catch_handler()->has_terminated_ = true;
2034 try_catch_handler()->exception_ = heap()->null_value();
2036 // At this point all non-object (failure) exceptions have
2037 // been dealt with so this shouldn't fail.
2038 ASSERT(!pending_exception()->IsFailure());
2039 try_catch_handler()->can_continue_ = true;
2040 try_catch_handler()->has_terminated_ = false;
2041 try_catch_handler()->exception_ = pending_exception();
2042 if (!thread_local_top_.pending_message_obj_->IsTheHole()) {
2043 try_catch_handler()->message_ = thread_local_top_.pending_message_obj_;
2049 void Isolate::InitializeLoggingAndCounters() {
2050 if (logger_ == NULL) {
2051 logger_ = new Logger(this);
2053 if (counters_ == NULL) {
2054 counters_ = new Counters(this);
2059 void Isolate::InitializeDebugger() {
2060 #ifdef ENABLE_DEBUGGER_SUPPORT
2061 ScopedLock lock(debugger_access_);
2062 if (NoBarrier_Load(&debugger_initialized_)) return;
2063 InitializeLoggingAndCounters();
2064 debug_ = new Debug(this);
2065 debugger_ = new Debugger(this);
2066 Release_Store(&debugger_initialized_, true);
2071 bool Isolate::Init(Deserializer* des) {
2072 ASSERT(state_ != INITIALIZED);
2073 ASSERT(Isolate::Current() == this);
2074 TRACE_ISOLATE(init);
2076 // The initialization process does not handle memory exhaustion.
2077 DisallowAllocationFailure disallow_allocation_failure;
2079 InitializeLoggingAndCounters();
2081 InitializeDebugger();
2083 memory_allocator_ = new MemoryAllocator(this);
2084 code_range_ = new CodeRange(this);
2086 // Safe after setting Heap::isolate_, initializing StackGuard and
2087 // ensuring that Isolate::Current() == this.
2088 heap_.SetStackLimits();
2090 #define ASSIGN_ELEMENT(CamelName, hacker_name) \
2091 isolate_addresses_[Isolate::k##CamelName##Address] = \
2092 reinterpret_cast<Address>(hacker_name##_address());
2093 FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
2096 string_tracker_ = new StringTracker();
2097 string_tracker_->isolate_ = this;
2098 compilation_cache_ = new CompilationCache(this);
2099 transcendental_cache_ = new TranscendentalCache();
2100 keyed_lookup_cache_ = new KeyedLookupCache();
2101 context_slot_cache_ = new ContextSlotCache();
2102 descriptor_lookup_cache_ = new DescriptorLookupCache();
2103 unicode_cache_ = new UnicodeCache();
2104 inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
2105 write_iterator_ = new ConsStringIteratorOp();
2106 global_handles_ = new GlobalHandles(this);
2107 bootstrapper_ = new Bootstrapper(this);
2108 handle_scope_implementer_ = new HandleScopeImplementer(this);
2109 stub_cache_ = new StubCache(this, runtime_zone());
2110 regexp_stack_ = new RegExpStack();
2111 regexp_stack_->isolate_ = this;
2112 date_cache_ = new DateCache();
2113 code_stub_interface_descriptors_ =
2114 new CodeStubInterfaceDescriptor[CodeStub::NUMBER_OF_IDS];
2115 cpu_profiler_ = new CpuProfiler(this);
2116 heap_profiler_ = new HeapProfiler(heap());
2118 // Enable logging before setting up the heap
2119 logger_->SetUp(this);
2121 // Initialize other runtime facilities
2122 #if defined(USE_SIMULATOR)
2123 #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
2124 Simulator::Initialize(this);
2129 // Ensure that the thread has a valid stack guard. The v8::Locker object
2130 // will ensure this too, but we don't have to use lockers if we are only
2131 // using one thread.
2132 ExecutionAccess lock(this);
2133 stack_guard_.InitThread(lock);
2136 // SetUp the object heap.
2137 ASSERT(!heap_.HasBeenSetUp());
2138 if (!heap_.SetUp()) {
2139 V8::FatalProcessOutOfMemory("heap setup");
2143 deoptimizer_data_ = new DeoptimizerData(memory_allocator_);
2145 const bool create_heap_objects = (des == NULL);
2146 if (create_heap_objects && !heap_.CreateHeapObjects()) {
2147 V8::FatalProcessOutOfMemory("heap object creation");
2151 if (create_heap_objects) {
2152 // Terminate the cache array with the sentinel so we can iterate.
2153 PushToPartialSnapshotCache(heap_.undefined_value());
2156 InitializeThreadLocal();
2158 bootstrapper_->Initialize(create_heap_objects);
2159 builtins_.SetUp(create_heap_objects);
2161 // Only preallocate on the first initialization.
2162 if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) {
2163 // Start the thread which will set aside some memory.
2164 PreallocatedMemoryThreadStart();
2165 preallocated_message_space_ =
2166 new NoAllocationStringAllocator(
2167 preallocated_memory_thread_->data(),
2168 preallocated_memory_thread_->length());
2169 PreallocatedStorageInit(preallocated_memory_thread_->length() / 4);
2172 if (FLAG_preemption) {
2173 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this));
2174 v8::Locker::StartPreemption(100);
2177 #ifdef ENABLE_DEBUGGER_SUPPORT
2178 debug_->SetUp(create_heap_objects);
2181 // If we are deserializing, read the state into the now-empty heap.
2182 if (!create_heap_objects) {
2185 stub_cache_->Initialize();
2187 // Finish initialization of ThreadLocal after deserialization is done.
2188 clear_pending_exception();
2189 clear_pending_message();
2190 clear_scheduled_exception();
2192 // Deserializing may put strange things in the root array's copy of the
2194 heap_.SetStackLimits();
2196 // Quiet the heap NaN if needed on target platform.
2197 if (!create_heap_objects) Assembler::QuietNaN(heap_.nan_value());
2199 runtime_profiler_ = new RuntimeProfiler(this);
2200 runtime_profiler_->SetUp();
2202 // If we are deserializing, log non-function code objects and compiled
2203 // functions found in the snapshot.
2204 if (!create_heap_objects &&
2205 (FLAG_log_code || FLAG_ll_prof || logger_->is_logging_code_events())) {
2206 HandleScope scope(this);
2207 LOG(this, LogCodeObjects());
2208 LOG(this, LogCompiledFunctions());
2211 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, state_)),
2212 Internals::kIsolateStateOffset);
2213 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)),
2214 Internals::kIsolateEmbedderDataOffset);
2215 CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.roots_)),
2216 Internals::kIsolateRootsOffset);
2218 state_ = INITIALIZED;
2219 time_millis_at_init_ = OS::TimeCurrentMillis();
2221 if (!create_heap_objects) {
2222 // Now that the heap is consistent, it's OK to generate the code for the
2223 // deopt entry table that might have been referred to by optimized code in
2225 HandleScope scope(this);
2226 Deoptimizer::EnsureCodeForDeoptimizationEntry(
2229 kDeoptTableSerializeEntryCount - 1);
2232 if (!Serializer::enabled()) {
2233 // Ensure that all stubs which need to be generated ahead of time, but
2234 // cannot be serialized into the snapshot have been generated.
2235 HandleScope scope(this);
2236 CodeStub::GenerateFPStubs(this);
2237 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(this);
2238 StubFailureTrampolineStub::GenerateAheadOfTime(this);
2239 // TODO(mstarzinger): The following is an ugly hack to make sure the
2240 // interface descriptor is initialized even when stubs have been
2241 // deserialized out of the snapshot without the graph builder.
2242 FastCloneShallowArrayStub stub(FastCloneShallowArrayStub::CLONE_ELEMENTS,
2243 DONT_TRACK_ALLOCATION_SITE, 0);
2244 stub.InitializeInterfaceDescriptor(
2245 this, code_stub_interface_descriptor(CodeStub::FastCloneShallowArray));
2246 CompareNilICStub::InitializeForIsolate(this);
2247 ArrayConstructorStubBase::InstallDescriptors(this);
2250 if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Start();
2252 if (FLAG_parallel_marking && FLAG_marking_threads == 0) {
2253 FLAG_marking_threads = SystemThreadManager::
2254 NumberOfParallelSystemThreads(
2255 SystemThreadManager::PARALLEL_MARKING);
2257 if (FLAG_marking_threads > 0) {
2258 marking_thread_ = new MarkingThread*[FLAG_marking_threads];
2259 for (int i = 0; i < FLAG_marking_threads; i++) {
2260 marking_thread_[i] = new MarkingThread(this);
2261 marking_thread_[i]->Start();
2264 FLAG_parallel_marking = false;
2267 if (FLAG_sweeper_threads == 0) {
2268 if (FLAG_concurrent_sweeping) {
2269 FLAG_sweeper_threads = SystemThreadManager::
2270 NumberOfParallelSystemThreads(
2271 SystemThreadManager::CONCURRENT_SWEEPING);
2272 } else if (FLAG_parallel_sweeping) {
2273 FLAG_sweeper_threads = SystemThreadManager::
2274 NumberOfParallelSystemThreads(
2275 SystemThreadManager::PARALLEL_SWEEPING);
2278 if (FLAG_sweeper_threads > 0) {
2279 sweeper_thread_ = new SweeperThread*[FLAG_sweeper_threads];
2280 for (int i = 0; i < FLAG_sweeper_threads; i++) {
2281 sweeper_thread_[i] = new SweeperThread(this);
2282 sweeper_thread_[i]->Start();
2285 FLAG_concurrent_sweeping = false;
2286 FLAG_parallel_sweeping = false;
2288 if (FLAG_parallel_recompilation &&
2289 SystemThreadManager::NumberOfParallelSystemThreads(
2290 SystemThreadManager::PARALLEL_RECOMPILATION) == 0) {
2291 FLAG_parallel_recompilation = false;
2297 // Initialized lazily to allow early
2298 // v8::V8::SetAddHistogramSampleFunction calls.
2299 StatsTable* Isolate::stats_table() {
2300 if (stats_table_ == NULL) {
2301 stats_table_ = new StatsTable;
2303 return stats_table_;
2307 void Isolate::Enter() {
2308 Isolate* current_isolate = NULL;
2309 PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
2310 if (current_data != NULL) {
2311 current_isolate = current_data->isolate_;
2312 ASSERT(current_isolate != NULL);
2313 if (current_isolate == this) {
2314 ASSERT(Current() == this);
2315 ASSERT(entry_stack_ != NULL);
2316 ASSERT(entry_stack_->previous_thread_data == NULL ||
2317 entry_stack_->previous_thread_data->thread_id().Equals(
2318 ThreadId::Current()));
2319 // Same thread re-enters the isolate, no need to re-init anything.
2320 entry_stack_->entry_count++;
2325 // Threads can have default isolate set into TLS as Current but not yet have
2326 // PerIsolateThreadData for it, as it requires more advanced phase of the
2327 // initialization. For example, a thread might be the one that system used for
2328 // static initializers - in this case the default isolate is set in TLS but
2329 // the thread did not yet Enter the isolate. If PerisolateThreadData is not
2330 // there, use the isolate set in TLS.
2331 if (current_isolate == NULL) {
2332 current_isolate = Isolate::UncheckedCurrent();
2335 PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
2336 ASSERT(data != NULL);
2337 ASSERT(data->isolate_ == this);
2339 EntryStackItem* item = new EntryStackItem(current_data,
2342 entry_stack_ = item;
2344 SetIsolateThreadLocals(this, data);
2346 // In case it's the first time some thread enters the isolate.
2347 set_thread_id(data->thread_id());
2351 void Isolate::Exit() {
2352 ASSERT(entry_stack_ != NULL);
2353 ASSERT(entry_stack_->previous_thread_data == NULL ||
2354 entry_stack_->previous_thread_data->thread_id().Equals(
2355 ThreadId::Current()));
2357 if (--entry_stack_->entry_count > 0) return;
2359 ASSERT(CurrentPerIsolateThreadData() != NULL);
2360 ASSERT(CurrentPerIsolateThreadData()->isolate_ == this);
2363 EntryStackItem* item = entry_stack_;
2364 entry_stack_ = item->previous_item;
2366 PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
2367 Isolate* previous_isolate = item->previous_isolate;
2371 // Reinit the current thread for the isolate it was running before this one.
2372 SetIsolateThreadLocals(previous_isolate, previous_thread_data);
2376 void Isolate::LinkDeferredHandles(DeferredHandles* deferred) {
2377 deferred->next_ = deferred_handles_head_;
2378 if (deferred_handles_head_ != NULL) {
2379 deferred_handles_head_->previous_ = deferred;
2381 deferred_handles_head_ = deferred;
2385 void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) {
2387 // In debug mode assert that the linked list is well-formed.
2388 DeferredHandles* deferred_iterator = deferred;
2389 while (deferred_iterator->previous_ != NULL) {
2390 deferred_iterator = deferred_iterator->previous_;
2392 ASSERT(deferred_handles_head_ == deferred_iterator);
2394 if (deferred_handles_head_ == deferred) {
2395 deferred_handles_head_ = deferred_handles_head_->next_;
2397 if (deferred->next_ != NULL) {
2398 deferred->next_->previous_ = deferred->previous_;
2400 if (deferred->previous_ != NULL) {
2401 deferred->previous_->next_ = deferred->next_;
2407 HandleDereferenceGuard::State Isolate::HandleDereferenceGuardState() {
2408 if (execution_thread_handle_deref_state_ == HandleDereferenceGuard::ALLOW &&
2409 compiler_thread_handle_deref_state_ == HandleDereferenceGuard::ALLOW) {
2410 // Short-cut to avoid polling thread id.
2411 return HandleDereferenceGuard::ALLOW;
2413 if (FLAG_parallel_recompilation &&
2414 optimizing_compiler_thread()->IsOptimizerThread()) {
2415 return compiler_thread_handle_deref_state_;
2417 return execution_thread_handle_deref_state_;
2422 void Isolate::SetHandleDereferenceGuardState(
2423 HandleDereferenceGuard::State state) {
2424 if (FLAG_parallel_recompilation &&
2425 optimizing_compiler_thread()->IsOptimizerThread()) {
2426 compiler_thread_handle_deref_state_ = state;
2428 execution_thread_handle_deref_state_ = state;
2434 HStatistics* Isolate::GetHStatistics() {
2435 if (hstatistics() == NULL) set_hstatistics(new HStatistics());
2436 return hstatistics();
2440 HTracer* Isolate::GetHTracer() {
2441 if (htracer() == NULL) set_htracer(new HTracer(id()));
2446 CodeStubInterfaceDescriptor*
2447 Isolate::code_stub_interface_descriptor(int index) {
2448 return code_stub_interface_descriptors_ + index;
2453 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
2454 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
2455 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
2456 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
2457 #undef ISOLATE_FIELD_OFFSET
2460 } } // namespace v8::internal