0958da13a10ed550f9c5213442571d98af1e1dc9
[platform/upstream/nodejs.git] / deps / v8 / src / runtime / runtime-compiler.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/arguments.h"
8 #include "src/compiler.h"
9 #include "src/deoptimizer.h"
10 #include "src/frames.h"
11 #include "src/full-codegen.h"
12 #include "src/isolate-inl.h"
13 #include "src/runtime/runtime-utils.h"
14 #include "src/v8threads.h"
15 #include "src/vm-state-inl.h"
16
17 namespace v8 {
18 namespace internal {
19
20 RUNTIME_FUNCTION(Runtime_CompileLazy) {
21   HandleScope scope(isolate);
22   DCHECK(args.length() == 1);
23   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
24 #ifdef DEBUG
25   if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
26     PrintF("[unoptimized: ");
27     function->PrintName();
28     PrintF("]\n");
29   }
30 #endif
31
32   // Compile the target function.
33   DCHECK(function->shared()->allows_lazy_compilation());
34
35   Handle<Code> code;
36   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code,
37                                      Compiler::GetLazyCode(function));
38   DCHECK(code->kind() == Code::FUNCTION ||
39          code->kind() == Code::OPTIMIZED_FUNCTION);
40   function->ReplaceCode(*code);
41   return *code;
42 }
43
44
45 RUNTIME_FUNCTION(Runtime_CompileOptimized) {
46   HandleScope scope(isolate);
47   DCHECK(args.length() == 2);
48   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
49   CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1);
50   DCHECK(isolate->use_crankshaft());
51
52   Compiler::ConcurrencyMode mode =
53       concurrent ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT;
54   Handle<Code> code;
55   Handle<Code> unoptimized(function->shared()->code());
56   if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) {
57     // Optimization succeeded, return optimized code.
58     function->ReplaceCode(*code);
59   } else {
60     // Optimization failed, get unoptimized code.
61     if (isolate->has_pending_exception()) {  // Possible stack overflow.
62       return isolate->heap()->exception();
63     }
64     code = Handle<Code>(function->shared()->code(), isolate);
65     if (code->kind() != Code::FUNCTION &&
66         code->kind() != Code::OPTIMIZED_FUNCTION) {
67       ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
68           isolate, code, Compiler::GetUnoptimizedCode(function));
69     }
70     function->ReplaceCode(*code);
71   }
72
73   DCHECK(function->code()->kind() == Code::FUNCTION ||
74          function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
75          function->IsInOptimizationQueue());
76   return function->code();
77 }
78
79
80 RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
81   HandleScope scope(isolate);
82   DCHECK(args.length() == 0);
83   Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
84   DCHECK(AllowHeapAllocation::IsAllowed());
85   delete deoptimizer;
86   return isolate->heap()->undefined_value();
87 }
88
89
90 class ActivationsFinder : public ThreadVisitor {
91  public:
92   Code* code_;
93   bool has_code_activations_;
94
95   explicit ActivationsFinder(Code* code)
96       : code_(code), has_code_activations_(false) {}
97
98   void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
99     JavaScriptFrameIterator it(isolate, top);
100     VisitFrames(&it);
101   }
102
103   void VisitFrames(JavaScriptFrameIterator* it) {
104     for (; !it->done(); it->Advance()) {
105       JavaScriptFrame* frame = it->frame();
106       if (code_->contains(frame->pc())) has_code_activations_ = true;
107     }
108   }
109 };
110
111
112 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
113   HandleScope scope(isolate);
114   DCHECK(args.length() == 1);
115   CONVERT_SMI_ARG_CHECKED(type_arg, 0);
116   Deoptimizer::BailoutType type =
117       static_cast<Deoptimizer::BailoutType>(type_arg);
118   Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
119   DCHECK(AllowHeapAllocation::IsAllowed());
120
121   Handle<JSFunction> function = deoptimizer->function();
122   Handle<Code> optimized_code = deoptimizer->compiled_code();
123
124   DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
125   DCHECK(type == deoptimizer->bailout_type());
126
127   // Make sure to materialize objects before causing any allocation.
128   JavaScriptFrameIterator it(isolate);
129   deoptimizer->MaterializeHeapObjects(&it);
130   delete deoptimizer;
131
132   JavaScriptFrame* frame = it.frame();
133   RUNTIME_ASSERT(frame->function()->IsJSFunction());
134   DCHECK(frame->function() == *function);
135
136   // Avoid doing too much work when running with --always-opt and keep
137   // the optimized code around.
138   if (FLAG_always_opt || type == Deoptimizer::LAZY) {
139     return isolate->heap()->undefined_value();
140   }
141
142   // Search for other activations of the same function and code.
143   ActivationsFinder activations_finder(*optimized_code);
144   activations_finder.VisitFrames(&it);
145   isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
146
147   if (!activations_finder.has_code_activations_) {
148     if (function->code() == *optimized_code) {
149       if (FLAG_trace_deopt) {
150         PrintF("[removing optimized code for: ");
151         function->PrintName();
152         PrintF("]\n");
153       }
154       function->ReplaceCode(function->shared()->code());
155       // Evict optimized code for this function from the cache so that it
156       // doesn't get used for new closures.
157       function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
158                                                     "notify deoptimized");
159     }
160   } else {
161     // TODO(titzer): we should probably do DeoptimizeCodeList(code)
162     // unconditionally if the code is not already marked for deoptimization.
163     // If there is an index by shared function info, all the better.
164     Deoptimizer::DeoptimizeFunction(*function);
165   }
166
167   return isolate->heap()->undefined_value();
168 }
169
170
171 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
172                                             Handle<JSFunction> function,
173                                             Handle<Code> current_code) {
174   // Keep track of whether we've succeeded in optimizing.
175   if (!current_code->optimizable()) return false;
176   // If we are trying to do OSR when there are already optimized
177   // activations of the function, it means (a) the function is directly or
178   // indirectly recursive and (b) an optimized invocation has been
179   // deoptimized so that we are currently in an unoptimized activation.
180   // Check for optimized activations of this function.
181   for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
182     JavaScriptFrame* frame = it.frame();
183     if (frame->is_optimized() && frame->function() == *function) return false;
184   }
185
186   return true;
187 }
188
189
190 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
191   HandleScope scope(isolate);
192   DCHECK(args.length() == 1);
193   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
194   Handle<Code> caller_code(function->shared()->code());
195
196   // We're not prepared to handle a function with arguments object.
197   DCHECK(!function->shared()->uses_arguments());
198
199   RUNTIME_ASSERT(FLAG_use_osr);
200
201   // Passing the PC in the javascript frame from the caller directly is
202   // not GC safe, so we walk the stack to get it.
203   JavaScriptFrameIterator it(isolate);
204   JavaScriptFrame* frame = it.frame();
205   if (!caller_code->contains(frame->pc())) {
206     // Code on the stack may not be the code object referenced by the shared
207     // function info.  It may have been replaced to include deoptimization data.
208     caller_code = Handle<Code>(frame->LookupCode());
209   }
210
211   uint32_t pc_offset =
212       static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
213
214 #ifdef DEBUG
215   DCHECK_EQ(frame->function(), *function);
216   DCHECK_EQ(frame->LookupCode(), *caller_code);
217   DCHECK(caller_code->contains(frame->pc()));
218 #endif  // DEBUG
219
220
221   BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
222   DCHECK(!ast_id.IsNone());
223
224   Compiler::ConcurrencyMode mode =
225       isolate->concurrent_osr_enabled() &&
226               (function->shared()->ast_node_count() > 512)
227           ? Compiler::CONCURRENT
228           : Compiler::NOT_CONCURRENT;
229   Handle<Code> result = Handle<Code>::null();
230
231   OptimizedCompileJob* job = NULL;
232   if (mode == Compiler::CONCURRENT) {
233     // Gate the OSR entry with a stack check.
234     BackEdgeTable::AddStackCheck(caller_code, pc_offset);
235     // Poll already queued compilation jobs.
236     OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread();
237     if (thread->IsQueuedForOSR(function, ast_id)) {
238       if (FLAG_trace_osr) {
239         PrintF("[OSR - Still waiting for queued: ");
240         function->PrintName();
241         PrintF(" at AST id %d]\n", ast_id.ToInt());
242       }
243       return NULL;
244     }
245
246     job = thread->FindReadyOSRCandidate(function, ast_id);
247   }
248
249   if (job != NULL) {
250     if (FLAG_trace_osr) {
251       PrintF("[OSR - Found ready: ");
252       function->PrintName();
253       PrintF(" at AST id %d]\n", ast_id.ToInt());
254     }
255     result = Compiler::GetConcurrentlyOptimizedCode(job);
256   } else if (IsSuitableForOnStackReplacement(isolate, function, caller_code)) {
257     if (FLAG_trace_osr) {
258       PrintF("[OSR - Compiling: ");
259       function->PrintName();
260       PrintF(" at AST id %d]\n", ast_id.ToInt());
261     }
262     MaybeHandle<Code> maybe_result =
263         Compiler::GetOptimizedCode(function, caller_code, mode, ast_id);
264     if (maybe_result.ToHandle(&result) &&
265         result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
266       // Optimization is queued.  Return to check later.
267       return NULL;
268     }
269   }
270
271   // Revert the patched back edge table, regardless of whether OSR succeeds.
272   BackEdgeTable::Revert(isolate, *caller_code);
273
274   // Check whether we ended up with usable optimized code.
275   if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
276     DeoptimizationInputData* data =
277         DeoptimizationInputData::cast(result->deoptimization_data());
278
279     if (data->OsrPcOffset()->value() >= 0) {
280       DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
281       if (FLAG_trace_osr) {
282         PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
283                ast_id.ToInt(), data->OsrPcOffset()->value());
284       }
285       // TODO(titzer): this is a massive hack to make the deopt counts
286       // match. Fix heuristics for reenabling optimizations!
287       function->shared()->increment_deopt_count();
288
289       if (result->is_turbofanned()) {
290         // TurboFanned OSR code cannot be installed into the function.
291         // But the function is obviously hot, so optimize it next time.
292         function->ReplaceCode(
293             isolate->builtins()->builtin(Builtins::kCompileOptimized));
294       } else {
295         // Crankshafted OSR code can be installed into the function.
296         function->ReplaceCode(*result);
297       }
298       return *result;
299     }
300   }
301
302   // Failed.
303   if (FLAG_trace_osr) {
304     PrintF("[OSR - Failed: ");
305     function->PrintName();
306     PrintF(" at AST id %d]\n", ast_id.ToInt());
307   }
308
309   if (!function->IsOptimized()) {
310     function->ReplaceCode(function->shared()->code());
311   }
312   return NULL;
313 }
314
315
316 RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
317   HandleScope scope(isolate);
318   DCHECK(args.length() == 1);
319   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
320
321   // First check if this is a real stack overflow.
322   StackLimitCheck check(isolate);
323   if (check.JsHasOverflowed()) {
324     SealHandleScope shs(isolate);
325     return isolate->StackOverflow();
326   }
327
328   isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
329   return (function->IsOptimized()) ? function->code()
330                                    : function->shared()->code();
331 }
332
333
334 bool CodeGenerationFromStringsAllowed(Isolate* isolate,
335                                       Handle<Context> context) {
336   DCHECK(context->allow_code_gen_from_strings()->IsFalse());
337   // Check with callback if set.
338   AllowCodeGenerationFromStringsCallback callback =
339       isolate->allow_code_gen_callback();
340   if (callback == NULL) {
341     // No callback set and code generation disallowed.
342     return false;
343   } else {
344     // Callback set. Let it decide if code generation is allowed.
345     VMState<EXTERNAL> state(isolate);
346     return callback(v8::Utils::ToLocal(context));
347   }
348 }
349
350
351 RUNTIME_FUNCTION(Runtime_CompileString) {
352   HandleScope scope(isolate);
353   DCHECK(args.length() == 3);
354   CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
355   CONVERT_BOOLEAN_ARG_CHECKED(function_literal_only, 1);
356   CONVERT_SMI_ARG_CHECKED(source_offset, 2);
357
358   // Extract native context.
359   Handle<Context> context(isolate->native_context());
360
361   // Check if native context allows code generation from
362   // strings. Throw an exception if it doesn't.
363   if (context->allow_code_gen_from_strings()->IsFalse() &&
364       !CodeGenerationFromStringsAllowed(isolate, context)) {
365     Handle<Object> error_message =
366         context->ErrorMessageForCodeGenerationFromStrings();
367     THROW_NEW_ERROR_RETURN_FAILURE(
368         isolate, NewEvalError("code_gen_from_strings",
369                               HandleVector<Object>(&error_message, 1)));
370   }
371
372   // Compile source string in the native context.
373   ParseRestriction restriction = function_literal_only
374                                      ? ONLY_SINGLE_FUNCTION_LITERAL
375                                      : NO_PARSE_RESTRICTION;
376   Handle<SharedFunctionInfo> outer_info(context->closure()->shared(), isolate);
377   Handle<JSFunction> fun;
378   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
379       isolate, fun,
380       Compiler::GetFunctionFromEval(source, outer_info, context, SLOPPY,
381                                     restriction, RelocInfo::kNoPosition));
382   if (function_literal_only) {
383     // The actual body is wrapped, which shifts line numbers.
384     Handle<Script> script(Script::cast(fun->shared()->script()), isolate);
385     if (script->line_offset() == 0) {
386       int line_num = Script::GetLineNumber(script, source_offset);
387       script->set_line_offset(Smi::FromInt(-line_num));
388     }
389   }
390   return *fun;
391 }
392
393
394 static ObjectPair CompileGlobalEval(Isolate* isolate, Handle<String> source,
395                                     Handle<SharedFunctionInfo> outer_info,
396                                     Handle<Object> receiver,
397                                     LanguageMode language_mode,
398                                     int scope_position) {
399   Handle<Context> context = Handle<Context>(isolate->context());
400   Handle<Context> native_context = Handle<Context>(context->native_context());
401
402   // Check if native context allows code generation from
403   // strings. Throw an exception if it doesn't.
404   if (native_context->allow_code_gen_from_strings()->IsFalse() &&
405       !CodeGenerationFromStringsAllowed(isolate, native_context)) {
406     Handle<Object> error_message =
407         native_context->ErrorMessageForCodeGenerationFromStrings();
408     Handle<Object> error;
409     MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
410         "code_gen_from_strings", HandleVector<Object>(&error_message, 1));
411     if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
412     return MakePair(isolate->heap()->exception(), NULL);
413   }
414
415   // Deal with a normal eval call with a string argument. Compile it
416   // and return the compiled function bound in the local context.
417   static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
418   Handle<JSFunction> compiled;
419   ASSIGN_RETURN_ON_EXCEPTION_VALUE(
420       isolate, compiled,
421       Compiler::GetFunctionFromEval(source, outer_info, context, language_mode,
422                                     restriction, scope_position),
423       MakePair(isolate->heap()->exception(), NULL));
424   return MakePair(*compiled, *receiver);
425 }
426
427
428 RUNTIME_FUNCTION_RETURN_PAIR(Runtime_ResolvePossiblyDirectEval) {
429   HandleScope scope(isolate);
430   DCHECK(args.length() == 6);
431
432   Handle<Object> callee = args.at<Object>(0);
433
434   // If "eval" didn't refer to the original GlobalEval, it's not a
435   // direct call to eval.
436   // (And even if it is, but the first argument isn't a string, just let
437   // execution default to an indirect call to eval, which will also return
438   // the first argument without doing anything).
439   if (*callee != isolate->native_context()->global_eval_fun() ||
440       !args[1]->IsString()) {
441     return MakePair(*callee, isolate->heap()->undefined_value());
442   }
443
444   DCHECK(args[4]->IsSmi());
445   DCHECK(is_valid_language_mode(args.smi_at(4)));
446   LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(4));
447   DCHECK(args[5]->IsSmi());
448   Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
449                                         isolate);
450   return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
451                            args.at<Object>(3), language_mode, args.smi_at(5));
452 }
453 }
454 }  // namespace v8::internal