Upstream version 11.40.271.0
[platform/framework/web/crosswalk.git] / src / v8 / src / runtime / runtime-compiler.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/arguments.h"
8 #include "src/compiler.h"
9 #include "src/deoptimizer.h"
10 #include "src/frames.h"
11 #include "src/full-codegen.h"
12 #include "src/isolate-inl.h"
13 #include "src/runtime/runtime-utils.h"
14 #include "src/v8threads.h"
15 #include "src/vm-state-inl.h"
16
17 namespace v8 {
18 namespace internal {
19
20 RUNTIME_FUNCTION(Runtime_CompileLazy) {
21   HandleScope scope(isolate);
22   DCHECK(args.length() == 1);
23   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
24 #ifdef DEBUG
25   if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
26     PrintF("[unoptimized: ");
27     function->PrintName();
28     PrintF("]\n");
29   }
30 #endif
31
32   // Compile the target function.
33   DCHECK(function->shared()->allows_lazy_compilation());
34
35   Handle<Code> code;
36   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code,
37                                      Compiler::GetLazyCode(function));
38   DCHECK(code->kind() == Code::FUNCTION ||
39          code->kind() == Code::OPTIMIZED_FUNCTION);
40   function->ReplaceCode(*code);
41   return *code;
42 }
43
44
45 RUNTIME_FUNCTION(Runtime_CompileOptimized) {
46   HandleScope scope(isolate);
47   DCHECK(args.length() == 2);
48   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
49   CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1);
50
51   Handle<Code> unoptimized(function->shared()->code());
52   if (!isolate->use_crankshaft() ||
53       function->shared()->optimization_disabled() ||
54       isolate->DebuggerHasBreakPoints()) {
55     // If the function is not optimizable or debugger is active continue
56     // using the code from the full compiler.
57     if (FLAG_trace_opt) {
58       PrintF("[failed to optimize ");
59       function->PrintName();
60       PrintF(": is code optimizable: %s, is debugger enabled: %s]\n",
61              function->shared()->optimization_disabled() ? "F" : "T",
62              isolate->DebuggerHasBreakPoints() ? "T" : "F");
63     }
64     function->ReplaceCode(*unoptimized);
65     return function->code();
66   }
67
68   Compiler::ConcurrencyMode mode =
69       concurrent ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT;
70   Handle<Code> code;
71   if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) {
72     function->ReplaceCode(*code);
73   } else {
74     function->ReplaceCode(function->shared()->code());
75   }
76
77   DCHECK(function->code()->kind() == Code::FUNCTION ||
78          function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
79          function->IsInOptimizationQueue());
80   return function->code();
81 }
82
83
84 RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
85   HandleScope scope(isolate);
86   DCHECK(args.length() == 0);
87   Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
88   DCHECK(AllowHeapAllocation::IsAllowed());
89   delete deoptimizer;
90   return isolate->heap()->undefined_value();
91 }
92
93
94 class ActivationsFinder : public ThreadVisitor {
95  public:
96   Code* code_;
97   bool has_code_activations_;
98
99   explicit ActivationsFinder(Code* code)
100       : code_(code), has_code_activations_(false) {}
101
102   void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
103     JavaScriptFrameIterator it(isolate, top);
104     VisitFrames(&it);
105   }
106
107   void VisitFrames(JavaScriptFrameIterator* it) {
108     for (; !it->done(); it->Advance()) {
109       JavaScriptFrame* frame = it->frame();
110       if (code_->contains(frame->pc())) has_code_activations_ = true;
111     }
112   }
113 };
114
115
116 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
117   HandleScope scope(isolate);
118   DCHECK(args.length() == 1);
119   CONVERT_SMI_ARG_CHECKED(type_arg, 0);
120   Deoptimizer::BailoutType type =
121       static_cast<Deoptimizer::BailoutType>(type_arg);
122   Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
123   DCHECK(AllowHeapAllocation::IsAllowed());
124
125   Handle<JSFunction> function = deoptimizer->function();
126   Handle<Code> optimized_code = deoptimizer->compiled_code();
127
128   DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
129   DCHECK(type == deoptimizer->bailout_type());
130
131   // Make sure to materialize objects before causing any allocation.
132   JavaScriptFrameIterator it(isolate);
133   deoptimizer->MaterializeHeapObjects(&it);
134   delete deoptimizer;
135
136   JavaScriptFrame* frame = it.frame();
137   RUNTIME_ASSERT(frame->function()->IsJSFunction());
138   DCHECK(frame->function() == *function);
139
140   // Avoid doing too much work when running with --always-opt and keep
141   // the optimized code around.
142   if (FLAG_always_opt || type == Deoptimizer::LAZY) {
143     return isolate->heap()->undefined_value();
144   }
145
146   // Search for other activations of the same function and code.
147   ActivationsFinder activations_finder(*optimized_code);
148   activations_finder.VisitFrames(&it);
149   isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
150
151   if (!activations_finder.has_code_activations_) {
152     if (function->code() == *optimized_code) {
153       if (FLAG_trace_deopt) {
154         PrintF("[removing optimized code for: ");
155         function->PrintName();
156         PrintF("]\n");
157       }
158       function->ReplaceCode(function->shared()->code());
159       // Evict optimized code for this function from the cache so that it
160       // doesn't get used for new closures.
161       function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
162                                                     "notify deoptimized");
163     }
164   } else {
165     // TODO(titzer): we should probably do DeoptimizeCodeList(code)
166     // unconditionally if the code is not already marked for deoptimization.
167     // If there is an index by shared function info, all the better.
168     Deoptimizer::DeoptimizeFunction(*function);
169   }
170
171   return isolate->heap()->undefined_value();
172 }
173
174
175 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
176                                             Handle<JSFunction> function,
177                                             Handle<Code> current_code) {
178   // Keep track of whether we've succeeded in optimizing.
179   if (!isolate->use_crankshaft() || !current_code->optimizable()) return false;
180   // If we are trying to do OSR when there are already optimized
181   // activations of the function, it means (a) the function is directly or
182   // indirectly recursive and (b) an optimized invocation has been
183   // deoptimized so that we are currently in an unoptimized activation.
184   // Check for optimized activations of this function.
185   for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
186     JavaScriptFrame* frame = it.frame();
187     if (frame->is_optimized() && frame->function() == *function) return false;
188   }
189
190   return true;
191 }
192
193
194 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
195   HandleScope scope(isolate);
196   DCHECK(args.length() == 1);
197   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
198   Handle<Code> caller_code(function->shared()->code());
199
200   // We're not prepared to handle a function with arguments object.
201   DCHECK(!function->shared()->uses_arguments());
202
203   RUNTIME_ASSERT(FLAG_use_osr);
204
205   // Passing the PC in the javascript frame from the caller directly is
206   // not GC safe, so we walk the stack to get it.
207   JavaScriptFrameIterator it(isolate);
208   JavaScriptFrame* frame = it.frame();
209   if (!caller_code->contains(frame->pc())) {
210     // Code on the stack may not be the code object referenced by the shared
211     // function info.  It may have been replaced to include deoptimization data.
212     caller_code = Handle<Code>(frame->LookupCode());
213   }
214
215   uint32_t pc_offset =
216       static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
217
218 #ifdef DEBUG
219   DCHECK_EQ(frame->function(), *function);
220   DCHECK_EQ(frame->LookupCode(), *caller_code);
221   DCHECK(caller_code->contains(frame->pc()));
222 #endif  // DEBUG
223
224
225   BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
226   DCHECK(!ast_id.IsNone());
227
228   Compiler::ConcurrencyMode mode =
229       isolate->concurrent_osr_enabled() &&
230               (function->shared()->ast_node_count() > 512)
231           ? Compiler::CONCURRENT
232           : Compiler::NOT_CONCURRENT;
233   Handle<Code> result = Handle<Code>::null();
234
235   OptimizedCompileJob* job = NULL;
236   if (mode == Compiler::CONCURRENT) {
237     // Gate the OSR entry with a stack check.
238     BackEdgeTable::AddStackCheck(caller_code, pc_offset);
239     // Poll already queued compilation jobs.
240     OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread();
241     if (thread->IsQueuedForOSR(function, ast_id)) {
242       if (FLAG_trace_osr) {
243         PrintF("[OSR - Still waiting for queued: ");
244         function->PrintName();
245         PrintF(" at AST id %d]\n", ast_id.ToInt());
246       }
247       return NULL;
248     }
249
250     job = thread->FindReadyOSRCandidate(function, ast_id);
251   }
252
253   if (job != NULL) {
254     if (FLAG_trace_osr) {
255       PrintF("[OSR - Found ready: ");
256       function->PrintName();
257       PrintF(" at AST id %d]\n", ast_id.ToInt());
258     }
259     result = Compiler::GetConcurrentlyOptimizedCode(job);
260   } else if (IsSuitableForOnStackReplacement(isolate, function, caller_code)) {
261     if (FLAG_trace_osr) {
262       PrintF("[OSR - Compiling: ");
263       function->PrintName();
264       PrintF(" at AST id %d]\n", ast_id.ToInt());
265     }
266     MaybeHandle<Code> maybe_result =
267         Compiler::GetOptimizedCode(function, caller_code, mode, ast_id);
268     if (maybe_result.ToHandle(&result) &&
269         result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
270       // Optimization is queued.  Return to check later.
271       return NULL;
272     }
273   }
274
275   // Revert the patched back edge table, regardless of whether OSR succeeds.
276   BackEdgeTable::Revert(isolate, *caller_code);
277
278   // Check whether we ended up with usable optimized code.
279   if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
280     DeoptimizationInputData* data =
281         DeoptimizationInputData::cast(result->deoptimization_data());
282
283     if (data->OsrPcOffset()->value() >= 0) {
284       DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
285       if (FLAG_trace_osr) {
286         PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
287                ast_id.ToInt(), data->OsrPcOffset()->value());
288       }
289       // TODO(titzer): this is a massive hack to make the deopt counts
290       // match. Fix heuristics for reenabling optimizations!
291       function->shared()->increment_deopt_count();
292
293       // TODO(titzer): Do not install code into the function.
294       function->ReplaceCode(*result);
295       return *result;
296     }
297   }
298
299   // Failed.
300   if (FLAG_trace_osr) {
301     PrintF("[OSR - Failed: ");
302     function->PrintName();
303     PrintF(" at AST id %d]\n", ast_id.ToInt());
304   }
305
306   if (!function->IsOptimized()) {
307     function->ReplaceCode(function->shared()->code());
308   }
309   return NULL;
310 }
311
312
313 RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
314   HandleScope scope(isolate);
315   DCHECK(args.length() == 1);
316   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
317
318   // First check if this is a real stack overflow.
319   StackLimitCheck check(isolate);
320   if (check.JsHasOverflowed()) {
321     SealHandleScope shs(isolate);
322     return isolate->StackOverflow();
323   }
324
325   isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
326   return (function->IsOptimized()) ? function->code()
327                                    : function->shared()->code();
328 }
329
330
331 bool CodeGenerationFromStringsAllowed(Isolate* isolate,
332                                       Handle<Context> context) {
333   DCHECK(context->allow_code_gen_from_strings()->IsFalse());
334   // Check with callback if set.
335   AllowCodeGenerationFromStringsCallback callback =
336       isolate->allow_code_gen_callback();
337   if (callback == NULL) {
338     // No callback set and code generation disallowed.
339     return false;
340   } else {
341     // Callback set. Let it decide if code generation is allowed.
342     VMState<EXTERNAL> state(isolate);
343     return callback(v8::Utils::ToLocal(context));
344   }
345 }
346
347
348 RUNTIME_FUNCTION(Runtime_CompileString) {
349   HandleScope scope(isolate);
350   DCHECK(args.length() == 2);
351   CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
352   CONVERT_BOOLEAN_ARG_CHECKED(function_literal_only, 1);
353
354   // Extract native context.
355   Handle<Context> context(isolate->native_context());
356
357   // Check if native context allows code generation from
358   // strings. Throw an exception if it doesn't.
359   if (context->allow_code_gen_from_strings()->IsFalse() &&
360       !CodeGenerationFromStringsAllowed(isolate, context)) {
361     Handle<Object> error_message =
362         context->ErrorMessageForCodeGenerationFromStrings();
363     THROW_NEW_ERROR_RETURN_FAILURE(
364         isolate, NewEvalError("code_gen_from_strings",
365                               HandleVector<Object>(&error_message, 1)));
366   }
367
368   // Compile source string in the native context.
369   ParseRestriction restriction = function_literal_only
370                                      ? ONLY_SINGLE_FUNCTION_LITERAL
371                                      : NO_PARSE_RESTRICTION;
372   Handle<SharedFunctionInfo> outer_info(context->closure()->shared(), isolate);
373   Handle<JSFunction> fun;
374   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
375       isolate, fun,
376       Compiler::GetFunctionFromEval(source, outer_info, context, SLOPPY,
377                                     restriction, RelocInfo::kNoPosition));
378   return *fun;
379 }
380
381
382 static ObjectPair CompileGlobalEval(Isolate* isolate, Handle<String> source,
383                                     Handle<SharedFunctionInfo> outer_info,
384                                     Handle<Object> receiver,
385                                     StrictMode strict_mode,
386                                     int scope_position) {
387   Handle<Context> context = Handle<Context>(isolate->context());
388   Handle<Context> native_context = Handle<Context>(context->native_context());
389
390   // Check if native context allows code generation from
391   // strings. Throw an exception if it doesn't.
392   if (native_context->allow_code_gen_from_strings()->IsFalse() &&
393       !CodeGenerationFromStringsAllowed(isolate, native_context)) {
394     Handle<Object> error_message =
395         native_context->ErrorMessageForCodeGenerationFromStrings();
396     Handle<Object> error;
397     MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
398         "code_gen_from_strings", HandleVector<Object>(&error_message, 1));
399     if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
400     return MakePair(isolate->heap()->exception(), NULL);
401   }
402
403   // Deal with a normal eval call with a string argument. Compile it
404   // and return the compiled function bound in the local context.
405   static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
406   Handle<JSFunction> compiled;
407   ASSIGN_RETURN_ON_EXCEPTION_VALUE(
408       isolate, compiled,
409       Compiler::GetFunctionFromEval(source, outer_info, context, strict_mode,
410                                     restriction, scope_position),
411       MakePair(isolate->heap()->exception(), NULL));
412   return MakePair(*compiled, *receiver);
413 }
414
415
416 RUNTIME_FUNCTION_RETURN_PAIR(Runtime_ResolvePossiblyDirectEval) {
417   HandleScope scope(isolate);
418   DCHECK(args.length() == 6);
419
420   Handle<Object> callee = args.at<Object>(0);
421
422   // If "eval" didn't refer to the original GlobalEval, it's not a
423   // direct call to eval.
424   // (And even if it is, but the first argument isn't a string, just let
425   // execution default to an indirect call to eval, which will also return
426   // the first argument without doing anything).
427   if (*callee != isolate->native_context()->global_eval_fun() ||
428       !args[1]->IsString()) {
429     return MakePair(*callee, isolate->heap()->undefined_value());
430   }
431
432   DCHECK(args[4]->IsSmi());
433   DCHECK(args.smi_at(4) == SLOPPY || args.smi_at(4) == STRICT);
434   StrictMode strict_mode = static_cast<StrictMode>(args.smi_at(4));
435   DCHECK(args[5]->IsSmi());
436   Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
437                                         isolate);
438   return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
439                            args.at<Object>(3), strict_mode, args.smi_at(5));
440 }
441 }
442 }  // namespace v8::internal