deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / runtime / runtime-compiler.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/arguments.h"
8 #include "src/compiler.h"
9 #include "src/deoptimizer.h"
10 #include "src/frames.h"
11 #include "src/full-codegen.h"
12 #include "src/isolate-inl.h"
13 #include "src/runtime/runtime-utils.h"
14 #include "src/v8threads.h"
15 #include "src/vm-state-inl.h"
16
17 namespace v8 {
18 namespace internal {
19
20 RUNTIME_FUNCTION(Runtime_CompileLazy) {
21   HandleScope scope(isolate);
22   DCHECK(args.length() == 1);
23   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
24 #ifdef DEBUG
25   if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
26     PrintF("[unoptimized: ");
27     function->PrintName();
28     PrintF("]\n");
29   }
30 #endif
31
32   // Compile the target function.
33   DCHECK(function->shared()->allows_lazy_compilation());
34
35   Handle<Code> code;
36   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code,
37                                      Compiler::GetLazyCode(function));
38   DCHECK(code->kind() == Code::FUNCTION ||
39          code->kind() == Code::OPTIMIZED_FUNCTION);
40   function->ReplaceCode(*code);
41   return *code;
42 }
43
44
45 RUNTIME_FUNCTION(Runtime_CompileOptimized) {
46   HandleScope scope(isolate);
47   DCHECK(args.length() == 2);
48   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
49   CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1);
50
51   Compiler::ConcurrencyMode mode =
52       concurrent ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT;
53   Handle<Code> code;
54   Handle<Code> unoptimized(function->shared()->code());
55   if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) {
56     // Optimization succeeded, return optimized code.
57     function->ReplaceCode(*code);
58   } else {
59     // Optimization failed, get unoptimized code.
60     if (isolate->has_pending_exception()) {  // Possible stack overflow.
61       return isolate->heap()->exception();
62     }
63     code = Handle<Code>(function->shared()->code(), isolate);
64     if (code->kind() != Code::FUNCTION &&
65         code->kind() != Code::OPTIMIZED_FUNCTION) {
66       ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
67           isolate, code, Compiler::GetUnoptimizedCode(function));
68     }
69     function->ReplaceCode(*code);
70   }
71
72   DCHECK(function->code()->kind() == Code::FUNCTION ||
73          function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
74          function->IsInOptimizationQueue());
75   return function->code();
76 }
77
78
79 RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
80   HandleScope scope(isolate);
81   DCHECK(args.length() == 0);
82   Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
83   DCHECK(AllowHeapAllocation::IsAllowed());
84   delete deoptimizer;
85   return isolate->heap()->undefined_value();
86 }
87
88
89 class ActivationsFinder : public ThreadVisitor {
90  public:
91   Code* code_;
92   bool has_code_activations_;
93
94   explicit ActivationsFinder(Code* code)
95       : code_(code), has_code_activations_(false) {}
96
97   void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
98     JavaScriptFrameIterator it(isolate, top);
99     VisitFrames(&it);
100   }
101
102   void VisitFrames(JavaScriptFrameIterator* it) {
103     for (; !it->done(); it->Advance()) {
104       JavaScriptFrame* frame = it->frame();
105       if (code_->contains(frame->pc())) has_code_activations_ = true;
106     }
107   }
108 };
109
110
111 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
112   HandleScope scope(isolate);
113   DCHECK(args.length() == 1);
114   CONVERT_SMI_ARG_CHECKED(type_arg, 0);
115   Deoptimizer::BailoutType type =
116       static_cast<Deoptimizer::BailoutType>(type_arg);
117   Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
118   DCHECK(AllowHeapAllocation::IsAllowed());
119
120   Handle<JSFunction> function = deoptimizer->function();
121   Handle<Code> optimized_code = deoptimizer->compiled_code();
122
123   DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
124   DCHECK(type == deoptimizer->bailout_type());
125
126   // Make sure to materialize objects before causing any allocation.
127   JavaScriptFrameIterator it(isolate);
128   deoptimizer->MaterializeHeapObjects(&it);
129   delete deoptimizer;
130
131   JavaScriptFrame* frame = it.frame();
132   RUNTIME_ASSERT(frame->function()->IsJSFunction());
133   DCHECK(frame->function() == *function);
134
135   // Avoid doing too much work when running with --always-opt and keep
136   // the optimized code around.
137   if (FLAG_always_opt || type == Deoptimizer::LAZY) {
138     return isolate->heap()->undefined_value();
139   }
140
141   // Search for other activations of the same function and code.
142   ActivationsFinder activations_finder(*optimized_code);
143   activations_finder.VisitFrames(&it);
144   isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
145
146   if (!activations_finder.has_code_activations_) {
147     if (function->code() == *optimized_code) {
148       if (FLAG_trace_deopt) {
149         PrintF("[removing optimized code for: ");
150         function->PrintName();
151         PrintF("]\n");
152       }
153       function->ReplaceCode(function->shared()->code());
154       // Evict optimized code for this function from the cache so that it
155       // doesn't get used for new closures.
156       function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
157                                                     "notify deoptimized");
158     }
159   } else {
160     // TODO(titzer): we should probably do DeoptimizeCodeList(code)
161     // unconditionally if the code is not already marked for deoptimization.
162     // If there is an index by shared function info, all the better.
163     Deoptimizer::DeoptimizeFunction(*function);
164   }
165
166   return isolate->heap()->undefined_value();
167 }
168
169
170 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
171                                             Handle<JSFunction> function,
172                                             Handle<Code> current_code) {
173   // Keep track of whether we've succeeded in optimizing.
174   if (!current_code->optimizable()) return false;
175   // If we are trying to do OSR when there are already optimized
176   // activations of the function, it means (a) the function is directly or
177   // indirectly recursive and (b) an optimized invocation has been
178   // deoptimized so that we are currently in an unoptimized activation.
179   // Check for optimized activations of this function.
180   for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
181     JavaScriptFrame* frame = it.frame();
182     if (frame->is_optimized() && frame->function() == *function) return false;
183   }
184
185   return true;
186 }
187
188
189 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
190   HandleScope scope(isolate);
191   DCHECK(args.length() == 1);
192   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
193   Handle<Code> caller_code(function->shared()->code());
194
195   // We're not prepared to handle a function with arguments object.
196   DCHECK(!function->shared()->uses_arguments());
197
198   RUNTIME_ASSERT(FLAG_use_osr);
199
200   // Passing the PC in the javascript frame from the caller directly is
201   // not GC safe, so we walk the stack to get it.
202   JavaScriptFrameIterator it(isolate);
203   JavaScriptFrame* frame = it.frame();
204   if (!caller_code->contains(frame->pc())) {
205     // Code on the stack may not be the code object referenced by the shared
206     // function info.  It may have been replaced to include deoptimization data.
207     caller_code = Handle<Code>(frame->LookupCode());
208   }
209
210   uint32_t pc_offset =
211       static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
212
213 #ifdef DEBUG
214   DCHECK_EQ(frame->function(), *function);
215   DCHECK_EQ(frame->LookupCode(), *caller_code);
216   DCHECK(caller_code->contains(frame->pc()));
217 #endif  // DEBUG
218
219
220   BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
221   DCHECK(!ast_id.IsNone());
222
223   Compiler::ConcurrencyMode mode =
224       isolate->concurrent_osr_enabled() &&
225               (function->shared()->ast_node_count() > 512)
226           ? Compiler::CONCURRENT
227           : Compiler::NOT_CONCURRENT;
228   Handle<Code> result = Handle<Code>::null();
229
230   OptimizedCompileJob* job = NULL;
231   if (mode == Compiler::CONCURRENT) {
232     // Gate the OSR entry with a stack check.
233     BackEdgeTable::AddStackCheck(caller_code, pc_offset);
234     // Poll already queued compilation jobs.
235     OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread();
236     if (thread->IsQueuedForOSR(function, ast_id)) {
237       if (FLAG_trace_osr) {
238         PrintF("[OSR - Still waiting for queued: ");
239         function->PrintName();
240         PrintF(" at AST id %d]\n", ast_id.ToInt());
241       }
242       return NULL;
243     }
244
245     job = thread->FindReadyOSRCandidate(function, ast_id);
246   }
247
248   if (job != NULL) {
249     if (FLAG_trace_osr) {
250       PrintF("[OSR - Found ready: ");
251       function->PrintName();
252       PrintF(" at AST id %d]\n", ast_id.ToInt());
253     }
254     result = Compiler::GetConcurrentlyOptimizedCode(job);
255   } else if (IsSuitableForOnStackReplacement(isolate, function, caller_code)) {
256     if (FLAG_trace_osr) {
257       PrintF("[OSR - Compiling: ");
258       function->PrintName();
259       PrintF(" at AST id %d]\n", ast_id.ToInt());
260     }
261     MaybeHandle<Code> maybe_result =
262         Compiler::GetOptimizedCode(function, caller_code, mode, ast_id);
263     if (maybe_result.ToHandle(&result) &&
264         result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
265       // Optimization is queued.  Return to check later.
266       return NULL;
267     }
268   }
269
270   // Revert the patched back edge table, regardless of whether OSR succeeds.
271   BackEdgeTable::Revert(isolate, *caller_code);
272
273   // Check whether we ended up with usable optimized code.
274   if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
275     DeoptimizationInputData* data =
276         DeoptimizationInputData::cast(result->deoptimization_data());
277
278     if (data->OsrPcOffset()->value() >= 0) {
279       DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
280       if (FLAG_trace_osr) {
281         PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
282                ast_id.ToInt(), data->OsrPcOffset()->value());
283       }
284       // TODO(titzer): this is a massive hack to make the deopt counts
285       // match. Fix heuristics for reenabling optimizations!
286       function->shared()->increment_deopt_count();
287
288       if (result->is_turbofanned()) {
289         // TurboFanned OSR code cannot be installed into the function.
290         // But the function is obviously hot, so optimize it next time.
291         function->ReplaceCode(
292             isolate->builtins()->builtin(Builtins::kCompileOptimized));
293       } else {
294         // Crankshafted OSR code can be installed into the function.
295         function->ReplaceCode(*result);
296       }
297       return *result;
298     }
299   }
300
301   // Failed.
302   if (FLAG_trace_osr) {
303     PrintF("[OSR - Failed: ");
304     function->PrintName();
305     PrintF(" at AST id %d]\n", ast_id.ToInt());
306   }
307
308   if (!function->IsOptimized()) {
309     function->ReplaceCode(function->shared()->code());
310   }
311   return NULL;
312 }
313
314
315 RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
316   HandleScope scope(isolate);
317   DCHECK(args.length() == 1);
318   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
319
320   // First check if this is a real stack overflow.
321   StackLimitCheck check(isolate);
322   if (check.JsHasOverflowed()) {
323     SealHandleScope shs(isolate);
324     return isolate->StackOverflow();
325   }
326
327   isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
328   return (function->IsOptimized()) ? function->code()
329                                    : function->shared()->code();
330 }
331
332
333 bool CodeGenerationFromStringsAllowed(Isolate* isolate,
334                                       Handle<Context> context) {
335   DCHECK(context->allow_code_gen_from_strings()->IsFalse());
336   // Check with callback if set.
337   AllowCodeGenerationFromStringsCallback callback =
338       isolate->allow_code_gen_callback();
339   if (callback == NULL) {
340     // No callback set and code generation disallowed.
341     return false;
342   } else {
343     // Callback set. Let it decide if code generation is allowed.
344     VMState<EXTERNAL> state(isolate);
345     return callback(v8::Utils::ToLocal(context));
346   }
347 }
348
349
350 RUNTIME_FUNCTION(Runtime_CompileString) {
351   HandleScope scope(isolate);
352   DCHECK(args.length() == 3);
353   CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
354   CONVERT_BOOLEAN_ARG_CHECKED(function_literal_only, 1);
355   CONVERT_SMI_ARG_CHECKED(source_offset, 2);
356
357   // Extract native context.
358   Handle<Context> context(isolate->native_context());
359
360   // Check if native context allows code generation from
361   // strings. Throw an exception if it doesn't.
362   if (context->allow_code_gen_from_strings()->IsFalse() &&
363       !CodeGenerationFromStringsAllowed(isolate, context)) {
364     Handle<Object> error_message =
365         context->ErrorMessageForCodeGenerationFromStrings();
366     THROW_NEW_ERROR_RETURN_FAILURE(
367         isolate, NewEvalError("code_gen_from_strings",
368                               HandleVector<Object>(&error_message, 1)));
369   }
370
371   // Compile source string in the native context.
372   ParseRestriction restriction = function_literal_only
373                                      ? ONLY_SINGLE_FUNCTION_LITERAL
374                                      : NO_PARSE_RESTRICTION;
375   Handle<SharedFunctionInfo> outer_info(context->closure()->shared(), isolate);
376   Handle<JSFunction> fun;
377   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
378       isolate, fun,
379       Compiler::GetFunctionFromEval(source, outer_info, context, SLOPPY,
380                                     restriction, RelocInfo::kNoPosition));
381   if (function_literal_only) {
382     // The actual body is wrapped, which shifts line numbers.
383     Handle<Script> script(Script::cast(fun->shared()->script()), isolate);
384     if (script->line_offset() == 0) {
385       int line_num = Script::GetLineNumber(script, source_offset);
386       script->set_line_offset(Smi::FromInt(-line_num));
387     }
388   }
389   return *fun;
390 }
391
392
393 static ObjectPair CompileGlobalEval(Isolate* isolate, Handle<String> source,
394                                     Handle<SharedFunctionInfo> outer_info,
395                                     Handle<Object> receiver,
396                                     LanguageMode language_mode,
397                                     int scope_position) {
398   Handle<Context> context = Handle<Context>(isolate->context());
399   Handle<Context> native_context = Handle<Context>(context->native_context());
400
401   // Check if native context allows code generation from
402   // strings. Throw an exception if it doesn't.
403   if (native_context->allow_code_gen_from_strings()->IsFalse() &&
404       !CodeGenerationFromStringsAllowed(isolate, native_context)) {
405     Handle<Object> error_message =
406         native_context->ErrorMessageForCodeGenerationFromStrings();
407     Handle<Object> error;
408     MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
409         "code_gen_from_strings", HandleVector<Object>(&error_message, 1));
410     if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
411     return MakePair(isolate->heap()->exception(), NULL);
412   }
413
414   // Deal with a normal eval call with a string argument. Compile it
415   // and return the compiled function bound in the local context.
416   static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
417   Handle<JSFunction> compiled;
418   ASSIGN_RETURN_ON_EXCEPTION_VALUE(
419       isolate, compiled,
420       Compiler::GetFunctionFromEval(source, outer_info, context, language_mode,
421                                     restriction, scope_position),
422       MakePair(isolate->heap()->exception(), NULL));
423   return MakePair(*compiled, *receiver);
424 }
425
426
427 RUNTIME_FUNCTION_RETURN_PAIR(Runtime_ResolvePossiblyDirectEval) {
428   HandleScope scope(isolate);
429   DCHECK(args.length() == 6);
430
431   Handle<Object> callee = args.at<Object>(0);
432
433   // If "eval" didn't refer to the original GlobalEval, it's not a
434   // direct call to eval.
435   // (And even if it is, but the first argument isn't a string, just let
436   // execution default to an indirect call to eval, which will also return
437   // the first argument without doing anything).
438   if (*callee != isolate->native_context()->global_eval_fun() ||
439       !args[1]->IsString()) {
440     return MakePair(*callee, isolate->heap()->undefined_value());
441   }
442
443   DCHECK(args[4]->IsSmi());
444   DCHECK(is_valid_language_mode(args.smi_at(4)));
445   LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(4));
446   DCHECK(args[5]->IsSmi());
447   Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
448                                         isolate);
449   return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
450                            args.at<Object>(3), language_mode, args.smi_at(5));
451 }
452 }
453 }  // namespace v8::internal