From 68a0c56a7d9c26661245b06c7d1231625a0aa5ac Mon Sep 17 00:00:00 2001 From: isaacs Date: Mon, 13 Feb 2012 10:04:53 -0800 Subject: [PATCH] Upgrade V8 to 3.9.5 --- deps/v8/ChangeLog | 24 + deps/v8/SConstruct | 2 +- deps/v8/build/common.gypi | 26 + deps/v8/build/mipsu.gypi | 1 + deps/v8/include/v8-profiler.h | 4 +- deps/v8/src/api.cc | 9 +- deps/v8/src/arm/builtins-arm.cc | 38 +- deps/v8/src/arm/code-stubs-arm.cc | 4 +- deps/v8/src/arm/full-codegen-arm.cc | 35 +- deps/v8/src/arm/lithium-codegen-arm.cc | 1 - deps/v8/src/arm/macro-assembler-arm.cc | 25 +- deps/v8/src/arm/macro-assembler-arm.h | 7 +- deps/v8/src/arm/stub-cache-arm.cc | 68 +- deps/v8/src/ast.cc | 399 +++--- deps/v8/src/ast.h | 1353 ++++++++++++++------ deps/v8/src/builtins.cc | 2 +- deps/v8/src/codegen.cc | 13 +- deps/v8/src/compiler.cc | 20 +- deps/v8/src/compiler.h | 6 + deps/v8/src/cpu-profiler.cc | 4 +- deps/v8/src/d8.cc | 2 +- deps/v8/src/flag-definitions.h | 36 +- deps/v8/src/frames-inl.h | 14 +- deps/v8/src/frames.cc | 2 +- deps/v8/src/frames.h | 20 +- deps/v8/src/full-codegen.cc | 118 +- deps/v8/src/full-codegen.h | 6 +- deps/v8/src/handles.cc | 4 +- deps/v8/src/heap.cc | 13 +- deps/v8/src/heap.h | 5 +- deps/v8/src/hydrogen-instructions.cc | 7 + deps/v8/src/hydrogen-instructions.h | 42 +- deps/v8/src/hydrogen.cc | 312 ++++- deps/v8/src/hydrogen.h | 33 +- deps/v8/src/ia32/code-stubs-ia32.cc | 4 +- deps/v8/src/ia32/full-codegen-ia32.cc | 34 +- deps/v8/src/ia32/lithium-codegen-ia32.cc | 1 - deps/v8/src/ia32/macro-assembler-ia32.cc | 25 +- deps/v8/src/ia32/macro-assembler-ia32.h | 4 +- deps/v8/src/ia32/stub-cache-ia32.cc | 75 +- deps/v8/src/ic-inl.h | 5 +- deps/v8/src/ic.cc | 25 + deps/v8/src/ic.h | 1 + deps/v8/src/incremental-marking.cc | 3 +- deps/v8/src/isolate.cc | 114 +- deps/v8/src/isolate.h | 10 +- deps/v8/src/list-inl.h | 4 +- deps/v8/src/macro-assembler.h | 16 +- deps/v8/src/mark-compact.cc | 41 +- deps/v8/src/mark-compact.h | 7 +- deps/v8/src/messages.js | 6 +- deps/v8/src/mips/assembler-mips.cc | 4 + deps/v8/src/mips/builtins-mips.cc | 39 +- deps/v8/src/mips/code-stubs-mips.cc | 2 +- deps/v8/src/mips/ic-mips.cc | 62 +- deps/v8/src/mips/lithium-codegen-mips.cc | 1 - deps/v8/src/mips/macro-assembler-mips.cc | 77 +- deps/v8/src/mips/macro-assembler-mips.h | 23 +- deps/v8/src/mips/stub-cache-mips.cc | 68 +- deps/v8/src/objects-inl.h | 20 +- deps/v8/src/objects.cc | 116 +- deps/v8/src/objects.h | 53 +- deps/v8/src/parser.cc | 458 +++---- deps/v8/src/parser.h | 96 +- deps/v8/src/platform-freebsd.cc | 2 +- deps/v8/src/platform-linux.cc | 2 +- deps/v8/src/platform-macos.cc | 2 +- deps/v8/src/platform-openbsd.cc | 2 +- deps/v8/src/platform-solaris.cc | 2 +- deps/v8/src/platform-win32.cc | 2 +- deps/v8/src/preparser.h | 10 +- deps/v8/src/prettyprinter.cc | 454 +------ deps/v8/src/prettyprinter.h | 103 +- deps/v8/src/profile-generator.cc | 88 +- deps/v8/src/profile-generator.h | 11 +- deps/v8/src/property-details.h | 4 - deps/v8/src/property.h | 2 +- deps/v8/src/rewriter.cc | 40 +- deps/v8/src/runtime-profiler.cc | 116 +- deps/v8/src/runtime-profiler.h | 17 +- deps/v8/src/runtime.cc | 90 +- deps/v8/src/runtime.h | 2 +- deps/v8/src/scanner.cc | 18 +- deps/v8/src/scanner.h | 20 +- deps/v8/src/scopes.cc | 60 +- deps/v8/src/scopes.h | 42 +- deps/v8/src/spaces.h | 7 +- deps/v8/src/token.h | 3 + deps/v8/src/v8.cc | 5 +- deps/v8/src/version.cc | 2 +- deps/v8/src/x64/code-stubs-x64.cc | 4 +- deps/v8/src/x64/full-codegen-x64.cc | 35 +- deps/v8/src/x64/lithium-codegen-x64.cc | 5 +- deps/v8/src/x64/macro-assembler-x64.cc | 24 +- deps/v8/src/x64/macro-assembler-x64.h | 4 +- deps/v8/src/x64/stub-cache-x64.cc | 74 +- deps/v8/test/cctest/test-api.cc | 131 ++ deps/v8/test/cctest/test-ast.cc | 5 +- deps/v8/test/cctest/test-heap-profiler.cc | 4 +- deps/v8/test/cctest/test-mark-compact.cc | 10 +- deps/v8/test/cctest/test-parsing.cc | 3 +- deps/v8/test/mjsunit/array-join.js | 10 +- deps/v8/test/mjsunit/compiler/math-floor-global.js | 161 +++ deps/v8/test/mjsunit/compiler/math-floor-local.js | 161 +++ deps/v8/test/mjsunit/d8-os.js | 57 +- .../test/mjsunit/elements-transition-hoisting.js | 168 +++ deps/v8/test/mjsunit/mjsunit.status | 10 +- deps/v8/test/mjsunit/regress/regress-1924.js | 42 + .../mjsunit/regress/regress-smi-only-concat.js | 37 + deps/v8/test/mjsunit/string-replace-one-char.js | 4 +- deps/v8/tools/bash-completion.sh | 55 + deps/v8/tools/test.py | 2 +- 112 files changed, 3880 insertions(+), 2281 deletions(-) create mode 100644 deps/v8/test/mjsunit/compiler/math-floor-global.js create mode 100644 deps/v8/test/mjsunit/compiler/math-floor-local.js create mode 100644 deps/v8/test/mjsunit/elements-transition-hoisting.js create mode 100644 deps/v8/test/mjsunit/regress/regress-1924.js create mode 100644 deps/v8/test/mjsunit/regress/regress-smi-only-concat.js create mode 100644 deps/v8/tools/bash-completion.sh diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index 97174a8..482cca8 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,3 +1,27 @@ +2012-02-09: Version 3.9.5 + + Removed unused command line flags. + + Performance and stability improvements on all platforms. + + +2012-02-08: Version 3.9.4 + + Properly initialize element-transitioning array literals on ARM. + (issue 1930) + + Bug fixes on all platforms. + + +2012-02-07: Version 3.9.3 + + When rethrowing an exception, print the stack trace of its original + site instead of rethrow site (Chromium issue 60240). + + Increased size of small stacks from 32k to 64k to avoid hitting limits + in Chromium (Chromium issue 112843). + + 2012-02-06: Version 3.9.2 Add timestamp to --trace-gc output. (issue 1932) diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct index 61e16ea..d4eaebe 100644 --- a/deps/v8/SConstruct +++ b/deps/v8/SConstruct @@ -128,7 +128,7 @@ LIBRARY_FLAGS = { 'CPPDEFINES': ['__C99FEATURES__'], 'CPPPATH' : [src_dir, '/usr/local/include'], 'LIBPATH' : ['/usr/local/lib'], - 'CCFLAGS': ['-ansi', '-fno-omit-frame-pointer'], + 'CCFLAGS': ['-ansi'], }, 'os:netbsd': { 'CPPPATH' : [src_dir, '/usr/pkg/include'], diff --git a/deps/v8/build/common.gypi b/deps/v8/build/common.gypi index 0023505..548df7b 100644 --- a/deps/v8/build/common.gypi +++ b/deps/v8/build/common.gypi @@ -169,6 +169,28 @@ 'V8_TARGET_ARCH_MIPS', ], 'conditions': [ + [ 'target_arch=="mips"', { + 'target_conditions': [ + ['_toolset=="target"', { + 'cflags': ['-EL'], + 'ldflags': ['-EL'], + 'conditions': [ + [ 'v8_use_mips_abi_hardfloat=="true"', { + 'cflags': ['-mhard-float'], + 'ldflags': ['-mhard-float'], + }, { + 'cflags': ['-msoft-float'], + 'ldflags': ['-msoft-float'], + }], + ['mips_arch_variant=="mips32r2"', { + 'cflags': ['-mips32r2', '-Wa,-mips32r2'], + }, { + 'cflags': ['-mips32', '-Wa,-mips32'], + }], + ], + }], + ], + }], [ 'v8_can_use_fpu_instructions=="true"', { 'defines': [ 'CAN_USE_FPU_INSTRUCTIONS', @@ -184,6 +206,9 @@ '__mips_soft_float=1' ], }], + ['mips_arch_variant=="mips32r2"', { + 'defines': ['_MIPS_ARCH_MIPS32R2',], + }], # The MIPS assembler assumes the host is 32 bits, # so force building 32-bit host tools. ['host_arch=="x64"', { @@ -327,6 +352,7 @@ }], # OS=="mac" ['OS=="win"', { 'msvs_configuration_attributes': { + 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)', 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)', 'CharacterSet': '1', }, diff --git a/deps/v8/build/mipsu.gypi b/deps/v8/build/mipsu.gypi index 8dfea74..306f105 100644 --- a/deps/v8/build/mipsu.gypi +++ b/deps/v8/build/mipsu.gypi @@ -29,5 +29,6 @@ 'variables': { 'target_arch': 'ia32', 'v8_target_arch': 'mips', + 'mips_arch_variant': 'mips32r2', }, } diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index bcca65e..5a3a40f 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -255,7 +255,9 @@ class V8EXPORT HeapGraphNode { kClosure = 5, // Function closure. kRegExp = 6, // RegExp. kHeapNumber = 7, // Number stored in the heap. - kNative = 8 // Native object (not from V8 heap). + kNative = 8, // Native object (not from V8 heap). + kSynthetic = 9 // Synthetic object, usualy used for grouping + // snapshot items together. }; /** Returns node type (see HeapGraphNode::Type). */ diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 4a71539..2f8f1d1 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -6088,9 +6088,7 @@ static void SetFlagsFromString(const char* flags) { void Testing::PrepareStressRun(int run) { static const char* kLazyOptimizations = - "--prepare-always-opt --nolimit-inlining " - "--noalways-opt --noopt-eagerly"; - static const char* kEagerOptimizations = "--opt-eagerly"; + "--prepare-always-opt --nolimit-inlining --noalways-opt"; static const char* kForcedOptimizations = "--always-opt"; // If deoptimization stressed turn on frequent deoptimization. If no value @@ -6107,15 +6105,12 @@ void Testing::PrepareStressRun(int run) { if (run == GetStressRuns() - 1) { SetFlagsFromString(kForcedOptimizations); } else { - SetFlagsFromString(kEagerOptimizations); SetFlagsFromString(kLazyOptimizations); } #else if (run == GetStressRuns() - 1) { SetFlagsFromString(kForcedOptimizations); - } else if (run == GetStressRuns() - 2) { - SetFlagsFromString(kEagerOptimizations); - } else { + } else if (run != GetStressRuns() - 2) { SetFlagsFromString(kLazyOptimizations); } #endif diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc index 186d06e..50b6bce 100644 --- a/deps/v8/src/arm/builtins-arm.cc +++ b/deps/v8/src/arm/builtins-arm.cc @@ -895,23 +895,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // r4: JSObject __ bind(&allocated); __ push(r4); + __ push(r4); - // Push the function and the allocated receiver from the stack. - // sp[0]: receiver (newly allocated object) - // sp[1]: constructor function - // sp[2]: number of arguments (smi-tagged) - __ ldr(r1, MemOperand(sp, kPointerSize)); - __ push(r1); // Constructor function. - __ push(r4); // Receiver. - - // Reload the number of arguments from the stack. - // r1: constructor function + // Reload the number of arguments and the constructor from the stack. // sp[0]: receiver - // sp[1]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) - __ ldr(r3, MemOperand(sp, 4 * kPointerSize)); + // sp[1]: receiver + // sp[2]: constructor function + // sp[3]: number of arguments (smi-tagged) + __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); + __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); // Set up pointer to last argument. __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); @@ -921,14 +913,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Copy arguments and receiver to the expression stack. // r0: number of arguments - // r2: address of last argument (caller sp) // r1: constructor function + // r2: address of last argument (caller sp) // r3: number of arguments (smi-tagged) // sp[0]: receiver - // sp[1]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) + // sp[1]: receiver + // sp[2]: constructor function + // sp[3]: number of arguments (smi-tagged) Label loop, entry; __ b(&entry); __ bind(&loop); @@ -954,13 +945,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, NullCallWrapper(), CALL_AS_METHOD); } - // Pop the function from the stack. - // sp[0]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) - __ pop(); - // Restore context from the frame. // r0: result // sp[0]: receiver diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc index 3763867..c65f5bd 100644 --- a/deps/v8/src/arm/code-stubs-arm.cc +++ b/deps/v8/src/arm/code-stubs-arm.cc @@ -3964,7 +3964,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // handler block in this code object, so its index is 0. __ bind(&invoke); // Must preserve r0-r4, r5-r7 are available. - __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0); + __ PushTryHandler(StackHandler::JS_ENTRY, 0); // If an exception not caught by another handler occurs, this handler // returns control to the code after the bl(&invoke) above, which // restores all kCalleeSaved registers (including cp and fp) to their @@ -7358,7 +7358,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. __ bind(&double_elements); __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset)); - __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10, + __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r2, &slow_elements); __ Ret(); } diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc index a4fabe2..2adddef 100644 --- a/deps/v8/src/arm/full-codegen-arm.cc +++ b/deps/v8/src/arm/full-codegen-arm.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -139,6 +139,27 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { } #endif + // We can optionally optimize based on counters rather than statistical + // sampling. + if (info->ShouldSelfOptimize()) { + if (FLAG_trace_opt) { + PrintF("[adding self-optimization header to %s]\n", + *info->function()->debug_name()->ToCString()); + } + MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell( + Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt)); + JSGlobalPropertyCell* cell; + if (maybe_cell->To(&cell)) { + __ mov(r2, Operand(Handle(cell))); + __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); + __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC); + __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); + Handle compile_stub( + isolate()->builtins()->builtin(Builtins::kLazyRecompile)); + __ Jump(compile_stub, RelocInfo::CODE_TARGET, eq); + } + } + // Strict mode functions and builtins need to replace the receiver // with undefined when called as functions (without an explicit // receiver object). r5 is zero for method calls and non-zero for @@ -265,11 +286,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // For named function expressions, declare the function name as a // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { - int ignored = 0; VariableProxy* proxy = scope()->function(); ASSERT(proxy->var()->mode() == CONST || proxy->var()->mode() == CONST_HARMONY); - EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); + ASSERT(proxy->var()->location() != Variable::UNALLOCATED); + EmitDeclaration(proxy, proxy->var()->mode(), NULL); } VisitDeclarations(scope()->declarations()); } @@ -706,8 +727,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, VariableMode mode, - FunctionLiteral* function, - int* global_count) { + FunctionLiteral* function) { // If it was not possible to allocate the variable at compile time, we // need to "declare" it at runtime to make sure it actually exists in the // local context. @@ -716,7 +736,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, (mode == CONST || mode == CONST_HARMONY || mode == LET); switch (variable->location()) { case Variable::UNALLOCATED: - ++(*global_count); + ++global_count_; break; case Variable::PARAMETER: @@ -801,9 +821,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, } -void FullCodeGenerator::VisitDeclaration(Declaration* decl) { } - - void FullCodeGenerator::DeclareGlobals(Handle pairs) { // Call the runtime to declare the globals. // The context is the first argument. diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc index 6f898fc..64ca1a3 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.cc +++ b/deps/v8/src/arm/lithium-codegen-arm.cc @@ -673,7 +673,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { void LCodeGen::PopulateDeoptimizationData(Handle code) { int length = deoptimizations_.length(); if (length == 0) return; - ASSERT(FLAG_deopt); Handle data = factory()->NewDeoptimizationInputData(length, TENURED); diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index c92b943..2f0e5fa 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -1188,8 +1188,7 @@ void MacroAssembler::DebugBreak() { #endif -void MacroAssembler::PushTryHandler(CodeLocation try_location, - HandlerType type, +void MacroAssembler::PushTryHandler(StackHandler::Kind kind, int handler_index) { // Adjust this code if not the case. STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); @@ -1201,28 +1200,20 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, // For the JSEntry handler, we must preserve r0-r4, r5-r7 are available. // We will build up the handler from the bottom by pushing on the stack. - // First compute the state. - unsigned state = StackHandler::OffsetField::encode(handler_index); - if (try_location == IN_JAVASCRIPT) { - state |= (type == TRY_CATCH_HANDLER) - ? StackHandler::KindField::encode(StackHandler::TRY_CATCH) - : StackHandler::KindField::encode(StackHandler::TRY_FINALLY); - } else { - ASSERT(try_location == IN_JS_ENTRY); - state |= StackHandler::KindField::encode(StackHandler::ENTRY); - } - // Set up the code object (r5) and the state (r6) for pushing. + unsigned state = + StackHandler::IndexField::encode(handler_index) | + StackHandler::KindField::encode(kind); mov(r5, Operand(CodeObject())); mov(r6, Operand(state)); // Push the frame pointer, context, state, and code object. - if (try_location == IN_JAVASCRIPT) { - stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit()); - } else { + if (kind == StackHandler::JS_ENTRY) { mov(r7, Operand(Smi::FromInt(0))); // Indicates no context. mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer. stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit()); + } else { + stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit()); } // Link the current handler as the next handler. @@ -1330,7 +1321,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset)); bind(&check_kind); - STATIC_ASSERT(StackHandler::ENTRY == 0); + STATIC_ASSERT(StackHandler::JS_ENTRY == 0); ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset)); tst(r2, Operand(StackHandler::KindField::kMask)); b(ne, &fetch_next); diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h index 368ca5c..45cca90 100644 --- a/deps/v8/src/arm/macro-assembler-arm.h +++ b/deps/v8/src/arm/macro-assembler-arm.h @@ -582,9 +582,7 @@ class MacroAssembler: public Assembler { // Exception handling // Push a new try handler and link into try handler chain. - void PushTryHandler(CodeLocation try_location, - HandlerType type, - int handler_index); + void PushTryHandler(StackHandler::Kind kind, int handler_index); // Unlink the stack handler on top of the stack from the try handler chain. // Must preserve the result register. @@ -803,7 +801,8 @@ class MacroAssembler: public Assembler { // Check to see if maybe_number can be stored as a double in // FastDoubleElements. If it can, store it at the index specified by key in - // the FastDoubleElements array elements, otherwise jump to fail. + // the FastDoubleElements array elements. Otherwise jump to fail, in which + // case scratch2, scratch3 and scratch4 are unmodified. void StoreNumberToDoubleElements(Register value_reg, Register key_reg, Register receiver_reg, diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc index 15c5f4e..2f2c5a8 100644 --- a/deps/v8/src/arm/stub-cache-arm.cc +++ b/deps/v8/src/arm/stub-cache-arm.cc @@ -1475,28 +1475,30 @@ Handle CallStubCompiler::CompileArrayPushCall( __ Ret(); } else { Label call_builtin; - Register elements = r3; - Register end_elements = r5; - // Get the elements array of the object. - __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); - - // Check that the elements are in fast mode and writable. - __ CheckMap(elements, - r0, - Heap::kFixedArrayMapRootIndex, - &call_builtin, - DONT_DO_SMI_CHECK); if (argc == 1) { // Otherwise fall through to call the builtin. Label attempt_to_grow_elements; + Register elements = r6; + Register end_elements = r5; + // Get the elements array of the object. + __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CheckMap(elements, + r0, + Heap::kFixedArrayMapRootIndex, + &call_builtin, + DONT_DO_SMI_CHECK); + + // Get the array's length into r0 and calculate new length. __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTag == 0); __ add(r0, r0, Operand(Smi::FromInt(argc))); - // Get the element's length. + // Get the elements' length. __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); // Check if we could survive without allocation. @@ -1511,7 +1513,7 @@ Handle CallStubCompiler::CompileArrayPushCall( // Save new length. __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); - // Push the element. + // Store the value. // We may need a register containing the address end_elements below, // so write back the value in end_elements. __ add(end_elements, elements, @@ -1526,13 +1528,33 @@ Handle CallStubCompiler::CompileArrayPushCall( __ bind(&with_write_barrier); - __ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ CheckFastObjectElements(r6, r6, &call_builtin); + __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset)); + + if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { + Label fast_object, not_fast_object; + __ CheckFastObjectElements(r3, r7, ¬_fast_object); + __ jmp(&fast_object); + // In case of fast smi-only, convert to fast object, otherwise bail out. + __ bind(¬_fast_object); + __ CheckFastSmiOnlyElements(r3, r7, &call_builtin); + // edx: receiver + // r3: map + __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + FAST_ELEMENTS, + r3, + r7, + &call_builtin); + __ mov(r2, receiver); + ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm()); + __ bind(&fast_object); + } else { + __ CheckFastObjectElements(r3, r3, &call_builtin); + } // Save new length. __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); - // Push the element. + // Store the value. // We may need a register containing the address end_elements below, // so write back the value in end_elements. __ add(end_elements, elements, @@ -1578,25 +1600,25 @@ Handle CallStubCompiler::CompileArrayPushCall( Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); __ add(end_elements, end_elements, Operand(kEndElementsOffset)); __ mov(r7, Operand(new_space_allocation_top)); - __ ldr(r6, MemOperand(r7)); - __ cmp(end_elements, r6); + __ ldr(r3, MemOperand(r7)); + __ cmp(end_elements, r3); __ b(ne, &call_builtin); __ mov(r9, Operand(new_space_allocation_limit)); __ ldr(r9, MemOperand(r9)); - __ add(r6, r6, Operand(kAllocationDelta * kPointerSize)); - __ cmp(r6, r9); + __ add(r3, r3, Operand(kAllocationDelta * kPointerSize)); + __ cmp(r3, r9); __ b(hi, &call_builtin); // We fit and could grow elements. // Update new_space_allocation_top. - __ str(r6, MemOperand(r7)); + __ str(r3, MemOperand(r7)); // Push the argument. __ str(r2, MemOperand(end_elements)); // Fill the rest with holes. - __ LoadRoot(r6, Heap::kTheHoleValueRootIndex); + __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); for (int i = 1; i < kAllocationDelta; i++) { - __ str(r6, MemOperand(end_elements, i * kPointerSize)); + __ str(r3, MemOperand(end_elements, i * kPointerSize)); } // Update elements' and array's sizes. diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc index 18eb90c..980dba6 100644 --- a/deps/v8/src/ast.cc +++ b/deps/v8/src/ast.cc @@ -126,18 +126,7 @@ Assignment::Assignment(Isolate* isolate, assignment_id_(GetNextId(isolate)), block_start_(false), block_end_(false), - is_monomorphic_(false) { - ASSERT(Token::IsAssignmentOp(op)); - if (is_compound()) { - binary_operation_ = - new(isolate->zone()) BinaryOperation(isolate, - binary_op(), - target, - value, - pos + 1); - compound_load_id_ = GetNextId(isolate); - } -} + is_monomorphic_(false) { } Token::Value Assignment::binary_op() const { @@ -197,9 +186,7 @@ ObjectLiteral::Property::Property(Literal* key, Expression* value) { ObjectLiteral::Property::Property(bool is_getter, FunctionLiteral* value) { - Isolate* isolate = Isolate::Current(); emit_store_ = true; - key_ = new(isolate->zone()) Literal(isolate, value->name()); value_ = value; kind_ = is_getter ? GETTER : SETTER; } @@ -427,224 +414,11 @@ bool CompareOperation::IsLiteralCompareNull(Expression** expr) { // Inlining support bool Declaration::IsInlineable() const { - return proxy()->var()->IsStackAllocated() && fun() == NULL; -} - - -bool TargetCollector::IsInlineable() const { - UNREACHABLE(); - return false; -} - - -bool ForInStatement::IsInlineable() const { - return false; -} - - -bool WithStatement::IsInlineable() const { - return false; -} - - -bool SwitchStatement::IsInlineable() const { - return false; -} - - -bool TryStatement::IsInlineable() const { - return false; -} - - -bool TryCatchStatement::IsInlineable() const { - return false; -} - - -bool TryFinallyStatement::IsInlineable() const { - return false; -} - - -bool DebuggerStatement::IsInlineable() const { - return false; -} - - -bool Throw::IsInlineable() const { - return exception()->IsInlineable(); -} - - -bool MaterializedLiteral::IsInlineable() const { - // TODO(1322): Allow materialized literals. - return false; -} - - -bool FunctionLiteral::IsInlineable() const { - // TODO(1322): Allow materialized literals. - return false; -} - - -bool ThisFunction::IsInlineable() const { - return true; -} - - -bool SharedFunctionInfoLiteral::IsInlineable() const { - return false; -} - - -bool ForStatement::IsInlineable() const { - return (init() == NULL || init()->IsInlineable()) - && (cond() == NULL || cond()->IsInlineable()) - && (next() == NULL || next()->IsInlineable()) - && body()->IsInlineable(); -} - - -bool WhileStatement::IsInlineable() const { - return cond()->IsInlineable() - && body()->IsInlineable(); -} - - -bool DoWhileStatement::IsInlineable() const { - return cond()->IsInlineable() - && body()->IsInlineable(); -} - - -bool ContinueStatement::IsInlineable() const { - return true; -} - - -bool BreakStatement::IsInlineable() const { - return true; -} - - -bool EmptyStatement::IsInlineable() const { - return true; -} - - -bool Literal::IsInlineable() const { - return true; -} - - -bool Block::IsInlineable() const { - const int count = statements_.length(); - for (int i = 0; i < count; ++i) { - if (!statements_[i]->IsInlineable()) return false; - } - return true; -} - - -bool ExpressionStatement::IsInlineable() const { - return expression()->IsInlineable(); -} - - -bool IfStatement::IsInlineable() const { - return condition()->IsInlineable() - && then_statement()->IsInlineable() - && else_statement()->IsInlineable(); -} - - -bool ReturnStatement::IsInlineable() const { - return expression()->IsInlineable(); -} - - -bool Conditional::IsInlineable() const { - return condition()->IsInlineable() && then_expression()->IsInlineable() && - else_expression()->IsInlineable(); + return proxy()->var()->IsStackAllocated(); } - -bool VariableProxy::IsInlineable() const { - return var()->IsUnallocated() - || var()->IsStackAllocated() - || var()->IsContextSlot(); -} - - -bool Assignment::IsInlineable() const { - return target()->IsInlineable() && value()->IsInlineable(); -} - - -bool Property::IsInlineable() const { - return obj()->IsInlineable() && key()->IsInlineable(); -} - - -bool Call::IsInlineable() const { - if (!expression()->IsInlineable()) return false; - const int count = arguments()->length(); - for (int i = 0; i < count; ++i) { - if (!arguments()->at(i)->IsInlineable()) return false; - } - return true; -} - - -bool CallNew::IsInlineable() const { - if (!expression()->IsInlineable()) return false; - const int count = arguments()->length(); - for (int i = 0; i < count; ++i) { - if (!arguments()->at(i)->IsInlineable()) return false; - } - return true; -} - - -bool CallRuntime::IsInlineable() const { - // Don't try to inline JS runtime calls because we don't (currently) even - // optimize them. - if (is_jsruntime()) return false; - // Don't inline the %_ArgumentsLength or %_Arguments because their - // implementation will not work. There is no stack frame to get them - // from. - if (function()->intrinsic_type == Runtime::INLINE && - (name()->IsEqualTo(CStrVector("_ArgumentsLength")) || - name()->IsEqualTo(CStrVector("_Arguments")))) { - return false; - } - const int count = arguments()->length(); - for (int i = 0; i < count; ++i) { - if (!arguments()->at(i)->IsInlineable()) return false; - } - return true; -} - - -bool UnaryOperation::IsInlineable() const { - return expression()->IsInlineable(); -} - - -bool BinaryOperation::IsInlineable() const { - return left()->IsInlineable() && right()->IsInlineable(); -} - - -bool CompareOperation::IsInlineable() const { - return left()->IsInlineable() && right()->IsInlineable(); -} - - -bool CountOperation::IsInlineable() const { - return expression()->IsInlineable(); +bool VariableDeclaration::IsInlineable() const { + return Declaration::IsInlineable() && fun() == NULL; } @@ -1214,4 +988,169 @@ CaseClause::CaseClause(Isolate* isolate, entry_id_(AstNode::GetNextId(isolate)) { } + +#define INCREASE_NODE_COUNT(NodeType) \ + void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ + increase_node_count(); \ + } + +INCREASE_NODE_COUNT(VariableDeclaration) +INCREASE_NODE_COUNT(ModuleDeclaration) +INCREASE_NODE_COUNT(ModuleLiteral) +INCREASE_NODE_COUNT(ModuleVariable) +INCREASE_NODE_COUNT(ModulePath) +INCREASE_NODE_COUNT(ModuleUrl) +INCREASE_NODE_COUNT(Block) +INCREASE_NODE_COUNT(ExpressionStatement) +INCREASE_NODE_COUNT(EmptyStatement) +INCREASE_NODE_COUNT(IfStatement) +INCREASE_NODE_COUNT(ContinueStatement) +INCREASE_NODE_COUNT(BreakStatement) +INCREASE_NODE_COUNT(ReturnStatement) +INCREASE_NODE_COUNT(Conditional) +INCREASE_NODE_COUNT(Literal) +INCREASE_NODE_COUNT(Assignment) +INCREASE_NODE_COUNT(Throw) +INCREASE_NODE_COUNT(Property) +INCREASE_NODE_COUNT(UnaryOperation) +INCREASE_NODE_COUNT(CountOperation) +INCREASE_NODE_COUNT(BinaryOperation) +INCREASE_NODE_COUNT(CompareOperation) +INCREASE_NODE_COUNT(ThisFunction) + +#undef INCREASE_NODE_COUNT + + +void AstConstructionVisitor::VisitWithStatement(WithStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) { + increase_node_count(); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitForStatement(ForStatement* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitTryFinallyStatement( + TryFinallyStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) { + increase_node_count(); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitSharedFunctionInfoLiteral( + SharedFunctionInfoLiteral* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) { + increase_node_count(); + // In theory, we'd have to add: + // if(node->var()->IsLookupSlot()) { add_flag(kDontInline); } + // However, node->var() is usually not bound yet at VariableProxy creation + // time, and LOOKUP variables only result from constructs that cannot + // be inlined anyway. +} + + +void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) { + increase_node_count(); + add_flag(kDontInline); // TODO(1322): Allow materialized literals. +} + + +void AstConstructionVisitor::VisitObjectLiteral(ObjectLiteral* node) { + increase_node_count(); + add_flag(kDontInline); // TODO(1322): Allow materialized literals. +} + + +void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) { + increase_node_count(); + add_flag(kDontInline); // TODO(1322): Allow materialized literals. +} + + +void AstConstructionVisitor::VisitCall(Call* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitCallNew(CallNew* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); + if (node->is_jsruntime()) { + // Don't try to inline JS runtime calls because we don't (currently) even + // optimize them. + add_flag(kDontInline); + } else if (node->function()->intrinsic_type == Runtime::INLINE && + (node->name()->IsEqualTo(CStrVector("_ArgumentsLength")) || + node->name()->IsEqualTo(CStrVector("_Arguments")))) { + // Don't inline the %_ArgumentsLength or %_Arguments because their + // implementation will not work. There is no stack frame to get them + // from. + add_flag(kDontInline); + } +} + } } // namespace v8::internal diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h index 34fadab..7f81232 100644 --- a/deps/v8/src/ast.h +++ b/deps/v8/src/ast.h @@ -39,6 +39,7 @@ #include "small-pointer-list.h" #include "smart-array-pointer.h" #include "token.h" +#include "utils.h" #include "variables.h" #include "zone-inl.h" @@ -58,6 +59,16 @@ namespace internal { // Nodes of the abstract syntax tree. Only concrete classes are // enumerated here. +#define DECLARATION_NODE_LIST(V) \ + V(VariableDeclaration) \ + V(ModuleDeclaration) \ + +#define MODULE_NODE_LIST(V) \ + V(ModuleLiteral) \ + V(ModuleVariable) \ + V(ModulePath) \ + V(ModuleUrl) + #define STATEMENT_NODE_LIST(V) \ V(Block) \ V(ExpressionStatement) \ @@ -98,12 +109,17 @@ namespace internal { V(ThisFunction) #define AST_NODE_LIST(V) \ - V(Declaration) \ + DECLARATION_NODE_LIST(V) \ + MODULE_NODE_LIST(V) \ STATEMENT_NODE_LIST(V) \ EXPRESSION_NODE_LIST(V) // Forward declarations +class AstConstructionVisitor; +template class AstNodeFactory; class AstVisitor; +class Declaration; +class Module; class BreakableStatement; class Expression; class IterationStatement; @@ -136,6 +152,35 @@ typedef ZoneList > ZoneStringList; typedef ZoneList > ZoneObjectList; +#define DECLARE_NODE_TYPE(type) \ + virtual void Accept(AstVisitor* v); \ + virtual AstNode::Type node_type() const { return AstNode::k##type; } \ + + +enum AstPropertiesFlag { + kDontInline, + kDontOptimize, + kDontSelfOptimize, + kDontSoftInline +}; + + +class AstProperties BASE_EMBEDDED { + public: + class Flags : public EnumSet {}; + + AstProperties() : node_count_(0) { } + + Flags* flags() { return &flags_; } + int node_count() { return node_count_; } + void add_node_count(int count) { node_count_ += count; } + + private: + Flags flags_; + int node_count_; +}; + + class AstNode: public ZoneObject { public: #define DECLARE_TYPE_ENUM(type) k##type, @@ -152,14 +197,11 @@ class AstNode: public ZoneObject { // that emit code (function declarations). static const int kDeclarationsId = 3; - // Override ZoneObject's new to count allocated AST nodes. void* operator new(size_t size, Zone* zone) { - Isolate* isolate = zone->isolate(); - isolate->set_ast_node_count(isolate->ast_node_count() + 1); return zone->New(static_cast(size)); } - AstNode() {} + AstNode() { } virtual ~AstNode() { } @@ -173,6 +215,7 @@ class AstNode: public ZoneObject { AST_NODE_LIST(DECLARE_NODE_FUNCTIONS) #undef DECLARE_NODE_FUNCTIONS + virtual Declaration* AsDeclaration() { return NULL; } virtual Statement* AsStatement() { return NULL; } virtual Expression* AsExpression() { return NULL; } virtual TargetCollector* AsTargetCollector() { return NULL; } @@ -180,19 +223,15 @@ class AstNode: public ZoneObject { virtual IterationStatement* AsIterationStatement() { return NULL; } virtual MaterializedLiteral* AsMaterializedLiteral() { return NULL; } - // True if the node is simple enough for us to inline calls containing it. - virtual bool IsInlineable() const = 0; - - static int Count() { return Isolate::Current()->ast_node_count(); } static void ResetIds() { Isolate::Current()->set_ast_node_id(0); } protected: - static unsigned GetNextId(Isolate* isolate) { + static int GetNextId(Isolate* isolate) { return ReserveIdRange(isolate, 1); } - static unsigned ReserveIdRange(Isolate* isolate, int n) { - unsigned tmp = isolate->ast_node_id(); + static int ReserveIdRange(Isolate* isolate, int n) { + int tmp = isolate->ast_node_id(); isolate->set_ast_node_id(tmp + n); return tmp; } @@ -271,10 +310,6 @@ class Expression: public AstNode { kTest }; - explicit Expression(Isolate* isolate) - : id_(GetNextId(isolate)), - test_id_(GetNextId(isolate)) {} - virtual int position() const { UNREACHABLE(); return 0; @@ -325,9 +360,14 @@ class Expression: public AstNode { unsigned id() const { return id_; } unsigned test_id() const { return test_id_; } + protected: + explicit Expression(Isolate* isolate) + : id_(GetNextId(isolate)), + test_id_(GetNextId(isolate)) {} + private: - unsigned id_; - unsigned test_id_; + int id_; + int test_id_; }; @@ -376,21 +416,8 @@ class BreakableStatement: public Statement { class Block: public BreakableStatement { public: - Block(Isolate* isolate, - ZoneStringList* labels, - int capacity, - bool is_initializer_block) - : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY), - statements_(capacity), - is_initializer_block_(is_initializer_block), - block_scope_(NULL) { - } - - DECLARE_NODE_TYPE(Block) - virtual bool IsInlineable() const; - void AddStatement(Statement* statement) { statements_.Add(statement); } ZoneList* statements() { return &statements_; } @@ -399,6 +426,19 @@ class Block: public BreakableStatement { Scope* block_scope() const { return block_scope_; } void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; } + protected: + template friend class AstNodeFactory; + + Block(Isolate* isolate, + ZoneStringList* labels, + int capacity, + bool is_initializer_block) + : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY), + statements_(capacity), + is_initializer_block_(is_initializer_block), + block_scope_(NULL) { + } + private: ZoneList statements_; bool is_initializer_block_; @@ -408,37 +448,162 @@ class Block: public BreakableStatement { class Declaration: public AstNode { public: + VariableProxy* proxy() const { return proxy_; } + VariableMode mode() const { return mode_; } + Scope* scope() const { return scope_; } + virtual bool IsInlineable() const; + + virtual Declaration* AsDeclaration() { return this; } + virtual VariableDeclaration* AsVariableDeclaration() { return NULL; } + + protected: Declaration(VariableProxy* proxy, VariableMode mode, - FunctionLiteral* fun, Scope* scope) : proxy_(proxy), mode_(mode), - fun_(fun), scope_(scope) { ASSERT(mode == VAR || mode == CONST || mode == CONST_HARMONY || mode == LET); - // At the moment there are no "const functions"'s in JavaScript... - ASSERT(fun == NULL || mode == VAR || mode == LET); } - DECLARE_NODE_TYPE(Declaration) + private: + VariableProxy* proxy_; + VariableMode mode_; + + // Nested scope from which the declaration originated. + Scope* scope_; +}; + + +class VariableDeclaration: public Declaration { + public: + DECLARE_NODE_TYPE(VariableDeclaration) + + virtual VariableDeclaration* AsVariableDeclaration() { return this; } - VariableProxy* proxy() const { return proxy_; } - VariableMode mode() const { return mode_; } FunctionLiteral* fun() const { return fun_; } // may be NULL virtual bool IsInlineable() const; - Scope* scope() const { return scope_; } + + protected: + template friend class AstNodeFactory; + + VariableDeclaration(VariableProxy* proxy, + VariableMode mode, + FunctionLiteral* fun, + Scope* scope) + : Declaration(proxy, mode, scope), + fun_(fun) { + // At the moment there are no "const functions"'s in JavaScript... + ASSERT(fun == NULL || mode == VAR || mode == LET); + } private: - VariableProxy* proxy_; - VariableMode mode_; FunctionLiteral* fun_; +}; - // Nested scope from which the declaration originated. - Scope* scope_; + +class ModuleDeclaration: public Declaration { + public: + DECLARE_NODE_TYPE(ModuleDeclaration) + + Module* module() const { return module_; } + + protected: + template friend class AstNodeFactory; + + ModuleDeclaration(VariableProxy* proxy, + Module* module, + Scope* scope) + : Declaration(proxy, LET, scope), + module_(module) { + } + + private: + Module* module_; +}; + + +class Module: public AstNode { + // TODO(rossberg): stuff to come... + protected: + Module() {} +}; + + +class ModuleLiteral: public Module { + public: + DECLARE_NODE_TYPE(ModuleLiteral) + + Block* body() const { return body_; } + + protected: + template friend class AstNodeFactory; + + explicit ModuleLiteral(Block* body) + : body_(body) { + } + + private: + Block* body_; +}; + + +class ModuleVariable: public Module { + public: + DECLARE_NODE_TYPE(ModuleVariable) + + Variable* var() const { return var_; } + + protected: + template friend class AstNodeFactory; + + explicit ModuleVariable(Variable* var) + : var_(var) { + } + + private: + Variable* var_; +}; + + +class ModulePath: public Module { + public: + DECLARE_NODE_TYPE(ModulePath) + + Module* module() const { return module_; } + Handle name() const { return name_; } + + protected: + template friend class AstNodeFactory; + + ModulePath(Module* module, Handle name) + : module_(module), + name_(name) { + } + + private: + Module* module_; + Handle name_; +}; + + +class ModuleUrl: public Module { + public: + DECLARE_NODE_TYPE(ModuleUrl) + + Handle url() const { return url_; } + + protected: + template friend class AstNodeFactory; + + explicit ModuleUrl(Handle url) : url_(url) { + } + + private: + Handle url_; }; @@ -477,14 +642,6 @@ class IterationStatement: public BreakableStatement { class DoWhileStatement: public IterationStatement { public: - DoWhileStatement(Isolate* isolate, ZoneStringList* labels) - : IterationStatement(isolate, labels), - cond_(NULL), - condition_position_(-1), - continue_id_(GetNextId(isolate)), - back_edge_id_(GetNextId(isolate)) { - } - DECLARE_NODE_TYPE(DoWhileStatement) void Initialize(Expression* cond, Statement* body) { @@ -504,7 +661,16 @@ class DoWhileStatement: public IterationStatement { virtual int StackCheckId() const { return back_edge_id_; } int BackEdgeId() const { return back_edge_id_; } - virtual bool IsInlineable() const; + protected: + template friend class AstNodeFactory; + + DoWhileStatement(Isolate* isolate, ZoneStringList* labels) + : IterationStatement(isolate, labels), + cond_(NULL), + condition_position_(-1), + continue_id_(GetNextId(isolate)), + back_edge_id_(GetNextId(isolate)) { + } private: Expression* cond_; @@ -516,13 +682,6 @@ class DoWhileStatement: public IterationStatement { class WhileStatement: public IterationStatement { public: - WhileStatement(Isolate* isolate, ZoneStringList* labels) - : IterationStatement(isolate, labels), - cond_(NULL), - may_have_function_literal_(true), - body_id_(GetNextId(isolate)) { - } - DECLARE_NODE_TYPE(WhileStatement) void Initialize(Expression* cond, Statement* body) { @@ -537,13 +696,22 @@ class WhileStatement: public IterationStatement { void set_may_have_function_literal(bool value) { may_have_function_literal_ = value; } - virtual bool IsInlineable() const; // Bailout support. virtual int ContinueId() const { return EntryId(); } virtual int StackCheckId() const { return body_id_; } int BodyId() const { return body_id_; } + protected: + template friend class AstNodeFactory; + + WhileStatement(Isolate* isolate, ZoneStringList* labels) + : IterationStatement(isolate, labels), + cond_(NULL), + may_have_function_literal_(true), + body_id_(GetNextId(isolate)) { + } + private: Expression* cond_; // True if there is a function literal subexpression in the condition. @@ -554,17 +722,6 @@ class WhileStatement: public IterationStatement { class ForStatement: public IterationStatement { public: - ForStatement(Isolate* isolate, ZoneStringList* labels) - : IterationStatement(isolate, labels), - init_(NULL), - cond_(NULL), - next_(NULL), - may_have_function_literal_(true), - loop_variable_(NULL), - continue_id_(GetNextId(isolate)), - body_id_(GetNextId(isolate)) { - } - DECLARE_NODE_TYPE(ForStatement) void Initialize(Statement* init, @@ -596,7 +753,20 @@ class ForStatement: public IterationStatement { bool is_fast_smi_loop() { return loop_variable_ != NULL; } Variable* loop_variable() { return loop_variable_; } void set_loop_variable(Variable* var) { loop_variable_ = var; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + ForStatement(Isolate* isolate, ZoneStringList* labels) + : IterationStatement(isolate, labels), + init_(NULL), + cond_(NULL), + next_(NULL), + may_have_function_literal_(true), + loop_variable_(NULL), + continue_id_(GetNextId(isolate)), + body_id_(GetNextId(isolate)) { + } private: Statement* init_; @@ -612,13 +782,6 @@ class ForStatement: public IterationStatement { class ForInStatement: public IterationStatement { public: - ForInStatement(Isolate* isolate, ZoneStringList* labels) - : IterationStatement(isolate, labels), - each_(NULL), - enumerable_(NULL), - assignment_id_(GetNextId(isolate)) { - } - DECLARE_NODE_TYPE(ForInStatement) void Initialize(Expression* each, Expression* enumerable, Statement* body) { @@ -629,13 +792,22 @@ class ForInStatement: public IterationStatement { Expression* each() const { return each_; } Expression* enumerable() const { return enumerable_; } - virtual bool IsInlineable() const; // Bailout support. int AssignmentId() const { return assignment_id_; } virtual int ContinueId() const { return EntryId(); } virtual int StackCheckId() const { return EntryId(); } + protected: + template friend class AstNodeFactory; + + ForInStatement(Isolate* isolate, ZoneStringList* labels) + : IterationStatement(isolate, labels), + each_(NULL), + enumerable_(NULL), + assignment_id_(GetNextId(isolate)) { + } + private: Expression* each_; Expression* enumerable_; @@ -645,16 +817,17 @@ class ForInStatement: public IterationStatement { class ExpressionStatement: public Statement { public: - explicit ExpressionStatement(Expression* expression) - : expression_(expression) { } - DECLARE_NODE_TYPE(ExpressionStatement) - virtual bool IsInlineable() const; - void set_expression(Expression* e) { expression_ = e; } Expression* expression() const { return expression_; } + protected: + template friend class AstNodeFactory; + + explicit ExpressionStatement(Expression* expression) + : expression_(expression) { } + private: Expression* expression_; }; @@ -662,13 +835,15 @@ class ExpressionStatement: public Statement { class ContinueStatement: public Statement { public: - explicit ContinueStatement(IterationStatement* target) - : target_(target) { } - DECLARE_NODE_TYPE(ContinueStatement) IterationStatement* target() const { return target_; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + explicit ContinueStatement(IterationStatement* target) + : target_(target) { } private: IterationStatement* target_; @@ -677,13 +852,15 @@ class ContinueStatement: public Statement { class BreakStatement: public Statement { public: - explicit BreakStatement(BreakableStatement* target) - : target_(target) { } - DECLARE_NODE_TYPE(BreakStatement) BreakableStatement* target() const { return target_; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + explicit BreakStatement(BreakableStatement* target) + : target_(target) { } private: BreakableStatement* target_; @@ -692,30 +869,34 @@ class BreakStatement: public Statement { class ReturnStatement: public Statement { public: - explicit ReturnStatement(Expression* expression) - : expression_(expression) { } - DECLARE_NODE_TYPE(ReturnStatement) Expression* expression() const { return expression_; } - virtual bool IsInlineable() const; - private: + protected: + template friend class AstNodeFactory; + + explicit ReturnStatement(Expression* expression) + : expression_(expression) { } + + private: Expression* expression_; }; class WithStatement: public Statement { public: - WithStatement(Expression* expression, Statement* statement) - : expression_(expression), statement_(statement) { } - DECLARE_NODE_TYPE(WithStatement) Expression* expression() const { return expression_; } Statement* statement() const { return statement_; } - virtual bool IsInlineable() const; + protected: + template friend class AstNodeFactory; + + WithStatement(Expression* expression, Statement* statement) + : expression_(expression), + statement_(statement) { } private: Expression* expression_; @@ -771,13 +952,6 @@ class CaseClause: public ZoneObject { class SwitchStatement: public BreakableStatement { public: - SwitchStatement(Isolate* isolate, ZoneStringList* labels) - : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS), - tag_(NULL), - cases_(NULL) { - } - - DECLARE_NODE_TYPE(SwitchStatement) void Initialize(Expression* tag, ZoneList* cases) { @@ -787,7 +961,14 @@ class SwitchStatement: public BreakableStatement { Expression* tag() const { return tag_; } ZoneList* cases() const { return cases_; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + SwitchStatement(Isolate* isolate, ZoneStringList* labels) + : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS), + tag_(NULL), + cases_(NULL) { } private: Expression* tag_; @@ -802,22 +983,8 @@ class SwitchStatement: public BreakableStatement { // given if-statement has a then- or an else-part containing code. class IfStatement: public Statement { public: - IfStatement(Isolate* isolate, - Expression* condition, - Statement* then_statement, - Statement* else_statement) - : condition_(condition), - then_statement_(then_statement), - else_statement_(else_statement), - if_id_(GetNextId(isolate)), - then_id_(GetNextId(isolate)), - else_id_(GetNextId(isolate)) { - } - DECLARE_NODE_TYPE(IfStatement) - virtual bool IsInlineable() const; - bool HasThenStatement() const { return !then_statement()->IsEmpty(); } bool HasElseStatement() const { return !else_statement()->IsEmpty(); } @@ -829,6 +996,21 @@ class IfStatement: public Statement { int ThenId() const { return then_id_; } int ElseId() const { return else_id_; } + protected: + template friend class AstNodeFactory; + + IfStatement(Isolate* isolate, + Expression* condition, + Statement* then_statement, + Statement* else_statement) + : condition_(condition), + then_statement_(then_statement), + else_statement_(else_statement), + if_id_(GetNextId(isolate)), + then_id_(GetNextId(isolate)), + else_id_(GetNextId(isolate)) { + } + private: Expression* condition_; Statement* then_statement_; @@ -843,7 +1025,7 @@ class IfStatement: public Statement { // stack in the compiler; this should probably be reworked. class TargetCollector: public AstNode { public: - TargetCollector(): targets_(0) { } + TargetCollector() : targets_(0) { } // Adds a jump target to the collector. The collector stores a pointer not // a copy of the target to make binding work, so make sure not to pass in @@ -855,7 +1037,6 @@ class TargetCollector: public AstNode { virtual TargetCollector* AsTargetCollector() { return this; } ZoneList* targets() { return &targets_; } - virtual bool IsInlineable() const; private: ZoneList targets_; @@ -864,12 +1045,6 @@ class TargetCollector: public AstNode { class TryStatement: public Statement { public: - explicit TryStatement(int index, Block* try_block) - : index_(index), - try_block_(try_block), - escaping_targets_(NULL) { - } - void set_escaping_targets(ZoneList* targets) { escaping_targets_ = targets; } @@ -877,7 +1052,12 @@ class TryStatement: public Statement { int index() const { return index_; } Block* try_block() const { return try_block_; } ZoneList* escaping_targets() const { return escaping_targets_; } - virtual bool IsInlineable() const; + + protected: + TryStatement(int index, Block* try_block) + : index_(index), + try_block_(try_block), + escaping_targets_(NULL) { } private: // Unique (per-function) index of this handler. This is not an AST ID. @@ -890,6 +1070,15 @@ class TryStatement: public Statement { class TryCatchStatement: public TryStatement { public: + DECLARE_NODE_TYPE(TryCatchStatement) + + Scope* scope() { return scope_; } + Variable* variable() { return variable_; } + Block* catch_block() const { return catch_block_; } + + protected: + template friend class AstNodeFactory; + TryCatchStatement(int index, Block* try_block, Scope* scope, @@ -901,13 +1090,6 @@ class TryCatchStatement: public TryStatement { catch_block_(catch_block) { } - DECLARE_NODE_TYPE(TryCatchStatement) - - Scope* scope() { return scope_; } - Variable* variable() { return variable_; } - Block* catch_block() const { return catch_block_; } - virtual bool IsInlineable() const; - private: Scope* scope_; Variable* variable_; @@ -917,14 +1099,16 @@ class TryCatchStatement: public TryStatement { class TryFinallyStatement: public TryStatement { public: - TryFinallyStatement(int index, Block* try_block, Block* finally_block) - : TryStatement(index, try_block), - finally_block_(finally_block) { } - DECLARE_NODE_TYPE(TryFinallyStatement) Block* finally_block() const { return finally_block_; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + TryFinallyStatement(int index, Block* try_block, Block* finally_block) + : TryStatement(index, try_block), + finally_block_(finally_block) { } private: Block* finally_block_; @@ -934,7 +1118,11 @@ class TryFinallyStatement: public TryStatement { class DebuggerStatement: public Statement { public: DECLARE_NODE_TYPE(DebuggerStatement) - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + DebuggerStatement() {} }; @@ -942,15 +1130,15 @@ class EmptyStatement: public Statement { public: DECLARE_NODE_TYPE(EmptyStatement) - virtual bool IsInlineable() const; + protected: + template friend class AstNodeFactory; + + EmptyStatement() {} }; class Literal: public Expression { public: - Literal(Isolate* isolate, Handle handle) - : Expression(isolate), handle_(handle) { } - DECLARE_NODE_TYPE(Literal) // Check if this literal is identical to the other literal. @@ -989,7 +1177,13 @@ class Literal: public Expression { } Handle handle() const { return handle_; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + Literal(Isolate* isolate, Handle handle) + : Expression(isolate), + handle_(handle) { } private: Handle handle_; @@ -999,15 +1193,6 @@ class Literal: public Expression { // Base class for literals that needs space in the corresponding JSFunction. class MaterializedLiteral: public Expression { public: - MaterializedLiteral(Isolate* isolate, - int literal_index, - bool is_simple, - int depth) - : Expression(isolate), - literal_index_(literal_index), - is_simple_(is_simple), - depth_(depth) {} - virtual MaterializedLiteral* AsMaterializedLiteral() { return this; } int literal_index() { return literal_index_; } @@ -1017,7 +1202,16 @@ class MaterializedLiteral: public Expression { bool is_simple() const { return is_simple_; } int depth() const { return depth_; } - virtual bool IsInlineable() const; + + protected: + MaterializedLiteral(Isolate* isolate, + int literal_index, + bool is_simple, + int depth) + : Expression(isolate), + literal_index_(literal_index), + is_simple_(is_simple), + depth_(depth) {} private: int literal_index_; @@ -1044,7 +1238,6 @@ class ObjectLiteral: public MaterializedLiteral { }; Property(Literal* key, Expression* value); - Property(bool is_getter, FunctionLiteral* value); Literal* key() { return key_; } Expression* value() { return value_; } @@ -1055,6 +1248,12 @@ class ObjectLiteral: public MaterializedLiteral { void set_emit_store(bool emit_store); bool emit_store(); + protected: + template friend class AstNodeFactory; + + Property(bool is_getter, FunctionLiteral* value); + void set_key(Literal* key) { key_ = key; } + private: Literal* key_; Expression* value_; @@ -1062,20 +1261,6 @@ class ObjectLiteral: public MaterializedLiteral { bool emit_store_; }; - ObjectLiteral(Isolate* isolate, - Handle constant_properties, - ZoneList* properties, - int literal_index, - bool is_simple, - bool fast_elements, - int depth, - bool has_function) - : MaterializedLiteral(isolate, literal_index, is_simple, depth), - constant_properties_(constant_properties), - properties_(properties), - fast_elements_(fast_elements), - has_function_(has_function) {} - DECLARE_NODE_TYPE(ObjectLiteral) Handle constant_properties() const { @@ -1098,6 +1283,23 @@ class ObjectLiteral: public MaterializedLiteral { kHasFunction = 1 << 1 }; + protected: + template friend class AstNodeFactory; + + ObjectLiteral(Isolate* isolate, + Handle constant_properties, + ZoneList* properties, + int literal_index, + bool is_simple, + bool fast_elements, + int depth, + bool has_function) + : MaterializedLiteral(isolate, literal_index, is_simple, depth), + constant_properties_(constant_properties), + properties_(properties), + fast_elements_(fast_elements), + has_function_(has_function) {} + private: Handle constant_properties_; ZoneList* properties_; @@ -1109,6 +1311,14 @@ class ObjectLiteral: public MaterializedLiteral { // Node for capturing a regexp literal. class RegExpLiteral: public MaterializedLiteral { public: + DECLARE_NODE_TYPE(RegExpLiteral) + + Handle pattern() const { return pattern_; } + Handle flags() const { return flags_; } + + protected: + template friend class AstNodeFactory; + RegExpLiteral(Isolate* isolate, Handle pattern, Handle flags, @@ -1117,11 +1327,6 @@ class RegExpLiteral: public MaterializedLiteral { pattern_(pattern), flags_(flags) {} - DECLARE_NODE_TYPE(RegExpLiteral) - - Handle pattern() const { return pattern_; } - Handle flags() const { return flags_; } - private: Handle pattern_; Handle flags_; @@ -1131,6 +1336,17 @@ class RegExpLiteral: public MaterializedLiteral { // for minimizing the work when constructing it at runtime. class ArrayLiteral: public MaterializedLiteral { public: + DECLARE_NODE_TYPE(ArrayLiteral) + + Handle constant_elements() const { return constant_elements_; } + ZoneList* values() const { return values_; } + + // Return an AST id for an element that is used in simulate instructions. + int GetIdForElement(int i) { return first_element_id_ + i; } + + protected: + template friend class AstNodeFactory; + ArrayLiteral(Isolate* isolate, Handle constant_elements, ZoneList* values, @@ -1142,14 +1358,6 @@ class ArrayLiteral: public MaterializedLiteral { values_(values), first_element_id_(ReserveIdRange(isolate, values->length())) {} - DECLARE_NODE_TYPE(ArrayLiteral) - - Handle constant_elements() const { return constant_elements_; } - ZoneList* values() const { return values_; } - - // Return an AST id for an element that is used in simulate instructions. - int GetIdForElement(int i) { return first_element_id_ + i; } - private: Handle constant_elements_; ZoneList* values_; @@ -1159,21 +1367,12 @@ class ArrayLiteral: public MaterializedLiteral { class VariableProxy: public Expression { public: - VariableProxy(Isolate* isolate, Variable* var); - - VariableProxy(Isolate* isolate, - Handle name, - bool is_this, - int position = RelocInfo::kNoPosition); - DECLARE_NODE_TYPE(VariableProxy) virtual bool IsValidLeftHandSide() { return var_ == NULL ? true : var_->IsValidLeftHandSide(); } - virtual bool IsInlineable() const; - bool IsVariable(Handle n) { return !is_this() && name().is_identical_to(n); } @@ -1196,6 +1395,15 @@ class VariableProxy: public Expression { void BindTo(Variable* var); protected: + template friend class AstNodeFactory; + + VariableProxy(Isolate* isolate, Variable* var); + + VariableProxy(Isolate* isolate, + Handle name, + bool is_this, + int position); + Handle name_; Variable* var_; // resolved variable, or NULL bool is_this_; @@ -1209,24 +1417,9 @@ class VariableProxy: public Expression { class Property: public Expression { public: - Property(Isolate* isolate, - Expression* obj, - Expression* key, - int pos) - : Expression(isolate), - obj_(obj), - key_(key), - pos_(pos), - is_monomorphic_(false), - is_array_length_(false), - is_string_length_(false), - is_string_access_(false), - is_function_prototype_(false) { } - DECLARE_NODE_TYPE(Property) virtual bool IsValidLeftHandSide() { return true; } - virtual bool IsInlineable() const; Expression* obj() const { return obj_; } Expression* key() const { return key_; } @@ -1242,6 +1435,23 @@ class Property: public Expression { virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; } bool IsArrayLength() { return is_array_length_; } + protected: + template friend class AstNodeFactory; + + Property(Isolate* isolate, + Expression* obj, + Expression* key, + int pos) + : Expression(isolate), + obj_(obj), + key_(key), + pos_(pos), + is_monomorphic_(false), + is_array_length_(false), + is_string_length_(false), + is_string_access_(false), + is_function_prototype_(false) { } + private: Expression* obj_; Expression* key_; @@ -1258,23 +1468,8 @@ class Property: public Expression { class Call: public Expression { public: - Call(Isolate* isolate, - Expression* expression, - ZoneList* arguments, - int pos) - : Expression(isolate), - expression_(expression), - arguments_(arguments), - pos_(pos), - is_monomorphic_(false), - check_type_(RECEIVER_MAP_CHECK), - return_id_(GetNextId(isolate)) { - } - DECLARE_NODE_TYPE(Call) - virtual bool IsInlineable() const; - Expression* expression() const { return expression_; } ZoneList* arguments() const { return arguments_; } virtual int position() const { return pos_; } @@ -1299,6 +1494,21 @@ class Call: public Expression { bool return_is_recorded_; #endif + protected: + template friend class AstNodeFactory; + + Call(Isolate* isolate, + Expression* expression, + ZoneList* arguments, + int pos) + : Expression(isolate), + expression_(expression), + arguments_(arguments), + pos_(pos), + is_monomorphic_(false), + check_type_(RECEIVER_MAP_CHECK), + return_id_(GetNextId(isolate)) { } + private: Expression* expression_; ZoneList* arguments_; @@ -1317,6 +1527,15 @@ class Call: public Expression { class CallNew: public Expression { public: + DECLARE_NODE_TYPE(CallNew) + + Expression* expression() const { return expression_; } + ZoneList* arguments() const { return arguments_; } + virtual int position() const { return pos_; } + + protected: + template friend class AstNodeFactory; + CallNew(Isolate* isolate, Expression* expression, ZoneList* arguments, @@ -1326,14 +1545,6 @@ class CallNew: public Expression { arguments_(arguments), pos_(pos) { } - DECLARE_NODE_TYPE(CallNew) - - virtual bool IsInlineable() const; - - Expression* expression() const { return expression_; } - ZoneList* arguments() const { return arguments_; } - virtual int position() const { return pos_; } - private: Expression* expression_; ZoneList* arguments_; @@ -1347,6 +1558,16 @@ class CallNew: public Expression { // implemented in JavaScript (see "v8natives.js"). class CallRuntime: public Expression { public: + DECLARE_NODE_TYPE(CallRuntime) + + Handle name() const { return name_; } + const Runtime::Function* function() const { return function_; } + ZoneList* arguments() const { return arguments_; } + bool is_jsruntime() const { return function_ == NULL; } + + protected: + template friend class AstNodeFactory; + CallRuntime(Isolate* isolate, Handle name, const Runtime::Function* function, @@ -1356,15 +1577,6 @@ class CallRuntime: public Expression { function_(function), arguments_(arguments) { } - DECLARE_NODE_TYPE(CallRuntime) - - virtual bool IsInlineable() const; - - Handle name() const { return name_; } - const Runtime::Function* function() const { return function_; } - ZoneList* arguments() const { return arguments_; } - bool is_jsruntime() const { return function_ == NULL; } - private: Handle name_; const Runtime::Function* function_; @@ -1374,6 +1586,20 @@ class CallRuntime: public Expression { class UnaryOperation: public Expression { public: + DECLARE_NODE_TYPE(UnaryOperation) + + virtual bool ResultOverwriteAllowed(); + + Token::Value op() const { return op_; } + Expression* expression() const { return expression_; } + virtual int position() const { return pos_; } + + int MaterializeTrueId() { return materialize_true_id_; } + int MaterializeFalseId() { return materialize_false_id_; } + + protected: + template friend class AstNodeFactory; + UnaryOperation(Isolate* isolate, Token::Value op, Expression* expression, @@ -1391,19 +1617,6 @@ class UnaryOperation: public Expression { } } - DECLARE_NODE_TYPE(UnaryOperation) - - virtual bool IsInlineable() const; - - virtual bool ResultOverwriteAllowed(); - - Token::Value op() const { return op_; } - Expression* expression() const { return expression_; } - virtual int position() const { return pos_; } - - int MaterializeTrueId() { return materialize_true_id_; } - int MaterializeFalseId() { return materialize_false_id_; } - private: Token::Value op_; Expression* expression_; @@ -1418,22 +1631,8 @@ class UnaryOperation: public Expression { class BinaryOperation: public Expression { public: - BinaryOperation(Isolate* isolate, - Token::Value op, - Expression* left, - Expression* right, - int pos) - : Expression(isolate), op_(op), left_(left), right_(right), pos_(pos) { - ASSERT(Token::IsBinaryOp(op)); - right_id_ = (op == Token::AND || op == Token::OR) - ? static_cast(GetNextId(isolate)) - : AstNode::kNoNumber; - } - DECLARE_NODE_TYPE(BinaryOperation) - virtual bool IsInlineable() const; - virtual bool ResultOverwriteAllowed(); Token::Value op() const { return op_; } @@ -1444,6 +1643,21 @@ class BinaryOperation: public Expression { // Bailout support. int RightId() const { return right_id_; } + protected: + template friend class AstNodeFactory; + + BinaryOperation(Isolate* isolate, + Token::Value op, + Expression* left, + Expression* right, + int pos) + : Expression(isolate), op_(op), left_(left), right_(right), pos_(pos) { + ASSERT(Token::IsBinaryOp(op)); + right_id_ = (op == Token::AND || op == Token::OR) + ? GetNextId(isolate) + : AstNode::kNoNumber; + } + private: Token::Value op_; Expression* left_; @@ -1457,19 +1671,6 @@ class BinaryOperation: public Expression { class CountOperation: public Expression { public: - CountOperation(Isolate* isolate, - Token::Value op, - bool is_prefix, - Expression* expr, - int pos) - : Expression(isolate), - op_(op), - is_prefix_(is_prefix), - expression_(expr), - pos_(pos), - assignment_id_(GetNextId(isolate)), - count_id_(GetNextId(isolate)) {} - DECLARE_NODE_TYPE(CountOperation) bool is_prefix() const { return is_prefix_; } @@ -1485,8 +1686,6 @@ class CountOperation: public Expression { virtual void MarkAsStatement() { is_prefix_ = true; } - virtual bool IsInlineable() const; - void RecordTypeFeedback(TypeFeedbackOracle* oracle); virtual bool IsMonomorphic() { return is_monomorphic_; } virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; } @@ -1495,6 +1694,22 @@ class CountOperation: public Expression { int AssignmentId() const { return assignment_id_; } int CountId() const { return count_id_; } + protected: + template friend class AstNodeFactory; + + CountOperation(Isolate* isolate, + Token::Value op, + bool is_prefix, + Expression* expr, + int pos) + : Expression(isolate), + op_(op), + is_prefix_(is_prefix), + expression_(expr), + pos_(pos), + assignment_id_(GetNextId(isolate)), + count_id_(GetNextId(isolate)) {} + private: Token::Value op_; bool is_prefix_; @@ -1509,20 +1724,6 @@ class CountOperation: public Expression { class CompareOperation: public Expression { public: - CompareOperation(Isolate* isolate, - Token::Value op, - Expression* left, - Expression* right, - int pos) - : Expression(isolate), - op_(op), - left_(left), - right_(right), - pos_(pos), - compare_type_(NONE) { - ASSERT(Token::IsCompareOp(op)); - } - DECLARE_NODE_TYPE(CompareOperation) Token::Value op() const { return op_; } @@ -1530,8 +1731,6 @@ class CompareOperation: public Expression { Expression* right() const { return right_; } virtual int position() const { return pos_; } - virtual bool IsInlineable() const; - // Type feedback information. void RecordTypeFeedback(TypeFeedbackOracle* oracle); bool IsSmiCompare() { return compare_type_ == SMI_ONLY; } @@ -1542,6 +1741,23 @@ class CompareOperation: public Expression { bool IsLiteralCompareUndefined(Expression** expr); bool IsLiteralCompareNull(Expression** expr); + protected: + template friend class AstNodeFactory; + + CompareOperation(Isolate* isolate, + Token::Value op, + Expression* left, + Expression* right, + int pos) + : Expression(isolate), + op_(op), + left_(left), + right_(right), + pos_(pos), + compare_type_(NONE) { + ASSERT(Token::IsCompareOp(op)); + } + private: Token::Value op_; Expression* left_; @@ -1555,6 +1771,21 @@ class CompareOperation: public Expression { class Conditional: public Expression { public: + DECLARE_NODE_TYPE(Conditional) + + Expression* condition() const { return condition_; } + Expression* then_expression() const { return then_expression_; } + Expression* else_expression() const { return else_expression_; } + + int then_expression_position() const { return then_expression_position_; } + int else_expression_position() const { return else_expression_position_; } + + int ThenId() const { return then_id_; } + int ElseId() const { return else_id_; } + + protected: + template friend class AstNodeFactory; + Conditional(Isolate* isolate, Expression* condition, Expression* then_expression, @@ -1568,22 +1799,7 @@ class Conditional: public Expression { then_expression_position_(then_expression_position), else_expression_position_(else_expression_position), then_id_(GetNextId(isolate)), - else_id_(GetNextId(isolate)) { - } - - DECLARE_NODE_TYPE(Conditional) - - virtual bool IsInlineable() const; - - Expression* condition() const { return condition_; } - Expression* then_expression() const { return then_expression_; } - Expression* else_expression() const { return else_expression_; } - - int then_expression_position() const { return then_expression_position_; } - int else_expression_position() const { return else_expression_position_; } - - int ThenId() const { return then_id_; } - int ElseId() const { return else_id_; } + else_id_(GetNextId(isolate)) { } private: Expression* condition_; @@ -1598,16 +1814,8 @@ class Conditional: public Expression { class Assignment: public Expression { public: - Assignment(Isolate* isolate, - Token::Value op, - Expression* target, - Expression* value, - int pos); - DECLARE_NODE_TYPE(Assignment) - virtual bool IsInlineable() const; - Assignment* AsSimpleAssignment() { return !is_compound() ? this : NULL; } Token::Value binary_op() const; @@ -1639,6 +1847,25 @@ class Assignment: public Expression { int CompoundLoadId() const { return compound_load_id_; } int AssignmentId() const { return assignment_id_; } + protected: + template friend class AstNodeFactory; + + Assignment(Isolate* isolate, + Token::Value op, + Expression* target, + Expression* value, + int pos); + + template + void Init(Isolate* isolate, AstNodeFactory* factory) { + ASSERT(Token::IsAssignmentOp(op_)); + if (is_compound()) { + binary_operation_ = + factory->NewBinaryOperation(binary_op(), target_, value_, pos_ + 1); + compound_load_id_ = GetNextId(isolate); + } + } + private: Token::Value op_; Expression* target_; @@ -1658,14 +1885,16 @@ class Assignment: public Expression { class Throw: public Expression { public: - Throw(Isolate* isolate, Expression* exception, int pos) - : Expression(isolate), exception_(exception), pos_(pos) {} - DECLARE_NODE_TYPE(Throw) Expression* exception() const { return exception_; } virtual int position() const { return pos_; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + Throw(Isolate* isolate, Expression* exception, int pos) + : Expression(isolate), exception_(exception), pos_(pos) {} private: Expression* exception_; @@ -1681,38 +1910,6 @@ class FunctionLiteral: public Expression { DECLARATION }; - FunctionLiteral(Isolate* isolate, - Handle name, - Scope* scope, - ZoneList* body, - int materialized_literal_count, - int expected_property_count, - int handler_count, - bool has_only_simple_this_property_assignments, - Handle this_property_assignments, - int parameter_count, - Type type, - bool has_duplicate_parameters) - : Expression(isolate), - name_(name), - scope_(scope), - body_(body), - this_property_assignments_(this_property_assignments), - inferred_name_(isolate->factory()->empty_string()), - materialized_literal_count_(materialized_literal_count), - expected_property_count_(expected_property_count), - handler_count_(handler_count), - parameter_count_(parameter_count), - function_token_position_(RelocInfo::kNoPosition) { - bitfield_ = - HasOnlySimpleThisPropertyAssignments::encode( - has_only_simple_this_property_assignments) | - IsExpression::encode(type != DECLARATION) | - IsAnonymous::encode(type == ANONYMOUS_EXPRESSION) | - Pretenure::encode(false) | - HasDuplicateParameters::encode(has_duplicate_parameters); - } - DECLARE_NODE_TYPE(FunctionLiteral) Handle name() const { return name_; } @@ -1752,18 +1949,59 @@ class FunctionLiteral: public Expression { bool pretenure() { return Pretenure::decode(bitfield_); } void set_pretenure() { bitfield_ |= Pretenure::encode(true); } - virtual bool IsInlineable() const; bool has_duplicate_parameters() { return HasDuplicateParameters::decode(bitfield_); } + int ast_node_count() { return ast_properties_.node_count(); } + AstProperties::Flags* flags() { return ast_properties_.flags(); } + void set_ast_properties(AstProperties* ast_properties) { + ast_properties_ = *ast_properties; + } + + protected: + template friend class AstNodeFactory; + + FunctionLiteral(Isolate* isolate, + Handle name, + Scope* scope, + ZoneList* body, + int materialized_literal_count, + int expected_property_count, + int handler_count, + bool has_only_simple_this_property_assignments, + Handle this_property_assignments, + int parameter_count, + Type type, + bool has_duplicate_parameters) + : Expression(isolate), + name_(name), + scope_(scope), + body_(body), + this_property_assignments_(this_property_assignments), + inferred_name_(isolate->factory()->empty_string()), + materialized_literal_count_(materialized_literal_count), + expected_property_count_(expected_property_count), + handler_count_(handler_count), + parameter_count_(parameter_count), + function_token_position_(RelocInfo::kNoPosition) { + bitfield_ = + HasOnlySimpleThisPropertyAssignments::encode( + has_only_simple_this_property_assignments) | + IsExpression::encode(type != DECLARATION) | + IsAnonymous::encode(type == ANONYMOUS_EXPRESSION) | + Pretenure::encode(false) | + HasDuplicateParameters::encode(has_duplicate_parameters); + } + private: Handle name_; Scope* scope_; ZoneList* body_; Handle this_property_assignments_; Handle inferred_name_; + AstProperties ast_properties_; int materialized_literal_count_; int expected_property_count_; @@ -1782,17 +2020,20 @@ class FunctionLiteral: public Expression { class SharedFunctionInfoLiteral: public Expression { public: - SharedFunctionInfoLiteral( - Isolate* isolate, - Handle shared_function_info) - : Expression(isolate), shared_function_info_(shared_function_info) { } - DECLARE_NODE_TYPE(SharedFunctionInfoLiteral) Handle shared_function_info() const { return shared_function_info_; } - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + SharedFunctionInfoLiteral( + Isolate* isolate, + Handle shared_function_info) + : Expression(isolate), + shared_function_info_(shared_function_info) { } private: Handle shared_function_info_; @@ -1801,9 +2042,12 @@ class SharedFunctionInfoLiteral: public Expression { class ThisFunction: public Expression { public: - explicit ThisFunction(Isolate* isolate) : Expression(isolate) {} DECLARE_NODE_TYPE(ThisFunction) - virtual bool IsInlineable() const; + + protected: + template friend class AstNodeFactory; + + explicit ThisFunction(Isolate* isolate): Expression(isolate) {} }; @@ -2207,6 +2451,371 @@ class AstVisitor BASE_EMBEDDED { }; +// ---------------------------------------------------------------------------- +// Construction time visitor. + +class AstConstructionVisitor BASE_EMBEDDED { + public: + AstConstructionVisitor() { } + + AstProperties* ast_properties() { return &properties_; } + + private: + template friend class AstNodeFactory; + + // Node visitors. +#define DEF_VISIT(type) \ + void Visit##type(type* node); + AST_NODE_LIST(DEF_VISIT) +#undef DEF_VISIT + + void increase_node_count() { properties_.add_node_count(1); } + void add_flag(AstPropertiesFlag flag) { properties_.flags()->Add(flag); } + + AstProperties properties_; +}; + + +class AstNullVisitor BASE_EMBEDDED { + public: + // Node visitors. +#define DEF_VISIT(type) \ + void Visit##type(type* node) {} + AST_NODE_LIST(DEF_VISIT) +#undef DEF_VISIT +}; + + + +// ---------------------------------------------------------------------------- +// AstNode factory + +template +class AstNodeFactory BASE_EMBEDDED { + public: + explicit AstNodeFactory(Isolate* isolate) + : isolate_(isolate), + zone_(isolate_->zone()) { } + + Visitor* visitor() { return &visitor_; } + +#define VISIT_AND_RETURN(NodeType, node) \ + visitor_.Visit##NodeType((node)); \ + return node; + + VariableDeclaration* NewVariableDeclaration(VariableProxy* proxy, + VariableMode mode, + FunctionLiteral* fun, + Scope* scope) { + VariableDeclaration* decl = + new(zone_) VariableDeclaration(proxy, mode, fun, scope); + VISIT_AND_RETURN(VariableDeclaration, decl) + } + + ModuleDeclaration* NewModuleDeclaration(VariableProxy* proxy, + Module* module, + Scope* scope) { + ModuleDeclaration* decl = + new(zone_) ModuleDeclaration(proxy, module, scope); + VISIT_AND_RETURN(ModuleDeclaration, decl) + } + + ModuleLiteral* NewModuleLiteral(Block* body) { + ModuleLiteral* module = new(zone_) ModuleLiteral(body); + VISIT_AND_RETURN(ModuleLiteral, module) + } + + ModuleVariable* NewModuleVariable(Variable* var) { + ModuleVariable* module = new(zone_) ModuleVariable(var); + VISIT_AND_RETURN(ModuleLiteral, module) + } + + ModulePath* NewModulePath(Module* origin, Handle name) { + ModulePath* module = new(zone_) ModulePath(origin, name); + VISIT_AND_RETURN(ModuleLiteral, module) + } + + ModuleUrl* NewModuleUrl(Handle url) { + ModuleUrl* module = new(zone_) ModuleUrl(url); + VISIT_AND_RETURN(ModuleLiteral, module) + } + + Block* NewBlock(ZoneStringList* labels, + int capacity, + bool is_initializer_block) { + Block* block = new(zone_) Block( + isolate_, labels, capacity, is_initializer_block); + VISIT_AND_RETURN(Block, block) + } + +#define STATEMENT_WITH_LABELS(NodeType) \ + NodeType* New##NodeType(ZoneStringList* labels) { \ + NodeType* stmt = new(zone_) NodeType(isolate_, labels); \ + VISIT_AND_RETURN(NodeType, stmt); \ + } + STATEMENT_WITH_LABELS(DoWhileStatement) + STATEMENT_WITH_LABELS(WhileStatement) + STATEMENT_WITH_LABELS(ForStatement) + STATEMENT_WITH_LABELS(ForInStatement) + STATEMENT_WITH_LABELS(SwitchStatement) +#undef STATEMENT_WITH_LABELS + + ExpressionStatement* NewExpressionStatement(Expression* expression) { + ExpressionStatement* stmt = new(zone_) ExpressionStatement(expression); + VISIT_AND_RETURN(ExpressionStatement, stmt) + } + + ContinueStatement* NewContinueStatement(IterationStatement* target) { + ContinueStatement* stmt = new(zone_) ContinueStatement(target); + VISIT_AND_RETURN(ContinueStatement, stmt) + } + + BreakStatement* NewBreakStatement(BreakableStatement* target) { + BreakStatement* stmt = new(zone_) BreakStatement(target); + VISIT_AND_RETURN(BreakStatement, stmt) + } + + ReturnStatement* NewReturnStatement(Expression* expression) { + ReturnStatement* stmt = new(zone_) ReturnStatement(expression); + VISIT_AND_RETURN(ReturnStatement, stmt) + } + + WithStatement* NewWithStatement(Expression* expression, + Statement* statement) { + WithStatement* stmt = new(zone_) WithStatement(expression, statement); + VISIT_AND_RETURN(WithStatement, stmt) + } + + IfStatement* NewIfStatement(Expression* condition, + Statement* then_statement, + Statement* else_statement) { + IfStatement* stmt = new(zone_) IfStatement( + isolate_, condition, then_statement, else_statement); + VISIT_AND_RETURN(IfStatement, stmt) + } + + TryCatchStatement* NewTryCatchStatement(int index, + Block* try_block, + Scope* scope, + Variable* variable, + Block* catch_block) { + TryCatchStatement* stmt = new(zone_) TryCatchStatement( + index, try_block, scope, variable, catch_block); + VISIT_AND_RETURN(TryCatchStatement, stmt) + } + + TryFinallyStatement* NewTryFinallyStatement(int index, + Block* try_block, + Block* finally_block) { + TryFinallyStatement* stmt = + new(zone_) TryFinallyStatement(index, try_block, finally_block); + VISIT_AND_RETURN(TryFinallyStatement, stmt) + } + + DebuggerStatement* NewDebuggerStatement() { + DebuggerStatement* stmt = new(zone_) DebuggerStatement(); + VISIT_AND_RETURN(DebuggerStatement, stmt) + } + + EmptyStatement* NewEmptyStatement() { + return new(zone_) EmptyStatement(); + } + + Literal* NewLiteral(Handle handle) { + Literal* lit = new(zone_) Literal(isolate_, handle); + VISIT_AND_RETURN(Literal, lit) + } + + Literal* NewNumberLiteral(double number) { + return NewLiteral(isolate_->factory()->NewNumber(number, TENURED)); + } + + ObjectLiteral* NewObjectLiteral( + Handle constant_properties, + ZoneList* properties, + int literal_index, + bool is_simple, + bool fast_elements, + int depth, + bool has_function) { + ObjectLiteral* lit = new(zone_) ObjectLiteral( + isolate_, constant_properties, properties, literal_index, + is_simple, fast_elements, depth, has_function); + VISIT_AND_RETURN(ObjectLiteral, lit) + } + + ObjectLiteral::Property* NewObjectLiteralProperty(bool is_getter, + FunctionLiteral* value) { + ObjectLiteral::Property* prop = + new(zone_) ObjectLiteral::Property(is_getter, value); + prop->set_key(NewLiteral(value->name())); + return prop; // Not an AST node, will not be visited. + } + + RegExpLiteral* NewRegExpLiteral(Handle pattern, + Handle flags, + int literal_index) { + RegExpLiteral* lit = + new(zone_) RegExpLiteral(isolate_, pattern, flags, literal_index); + VISIT_AND_RETURN(RegExpLiteral, lit); + } + + ArrayLiteral* NewArrayLiteral(Handle constant_elements, + ZoneList* values, + int literal_index, + bool is_simple, + int depth) { + ArrayLiteral* lit = new(zone_) ArrayLiteral( + isolate_, constant_elements, values, literal_index, is_simple, depth); + VISIT_AND_RETURN(ArrayLiteral, lit) + } + + VariableProxy* NewVariableProxy(Variable* var) { + VariableProxy* proxy = new(zone_) VariableProxy(isolate_, var); + VISIT_AND_RETURN(VariableProxy, proxy) + } + + VariableProxy* NewVariableProxy(Handle name, + bool is_this, + int position = RelocInfo::kNoPosition) { + VariableProxy* proxy = + new(zone_) VariableProxy(isolate_, name, is_this, position); + VISIT_AND_RETURN(VariableProxy, proxy) + } + + Property* NewProperty(Expression* obj, Expression* key, int pos) { + Property* prop = new(zone_) Property(isolate_, obj, key, pos); + VISIT_AND_RETURN(Property, prop) + } + + Call* NewCall(Expression* expression, + ZoneList* arguments, + int pos) { + Call* call = new(zone_) Call(isolate_, expression, arguments, pos); + VISIT_AND_RETURN(Call, call) + } + + CallNew* NewCallNew(Expression* expression, + ZoneList* arguments, + int pos) { + CallNew* call = new(zone_) CallNew(isolate_, expression, arguments, pos); + VISIT_AND_RETURN(CallNew, call) + } + + CallRuntime* NewCallRuntime(Handle name, + const Runtime::Function* function, + ZoneList* arguments) { + CallRuntime* call = + new(zone_) CallRuntime(isolate_, name, function, arguments); + VISIT_AND_RETURN(CallRuntime, call) + } + + UnaryOperation* NewUnaryOperation(Token::Value op, + Expression* expression, + int pos) { + UnaryOperation* node = + new(zone_) UnaryOperation(isolate_, op, expression, pos); + VISIT_AND_RETURN(UnaryOperation, node) + } + + BinaryOperation* NewBinaryOperation(Token::Value op, + Expression* left, + Expression* right, + int pos) { + BinaryOperation* node = + new(zone_) BinaryOperation(isolate_, op, left, right, pos); + VISIT_AND_RETURN(BinaryOperation, node) + } + + CountOperation* NewCountOperation(Token::Value op, + bool is_prefix, + Expression* expr, + int pos) { + CountOperation* node = + new(zone_) CountOperation(isolate_, op, is_prefix, expr, pos); + VISIT_AND_RETURN(CountOperation, node) + } + + CompareOperation* NewCompareOperation(Token::Value op, + Expression* left, + Expression* right, + int pos) { + CompareOperation* node = + new(zone_) CompareOperation(isolate_, op, left, right, pos); + VISIT_AND_RETURN(CompareOperation, node) + } + + Conditional* NewConditional(Expression* condition, + Expression* then_expression, + Expression* else_expression, + int then_expression_position, + int else_expression_position) { + Conditional* cond = new(zone_) Conditional( + isolate_, condition, then_expression, else_expression, + then_expression_position, else_expression_position); + VISIT_AND_RETURN(Conditional, cond) + } + + Assignment* NewAssignment(Token::Value op, + Expression* target, + Expression* value, + int pos) { + Assignment* assign = + new(zone_) Assignment(isolate_, op, target, value, pos); + assign->Init(isolate_, this); + VISIT_AND_RETURN(Assignment, assign) + } + + Throw* NewThrow(Expression* exception, int pos) { + Throw* t = new(zone_) Throw(isolate_, exception, pos); + VISIT_AND_RETURN(Throw, t) + } + + FunctionLiteral* NewFunctionLiteral( + Handle name, + Scope* scope, + ZoneList* body, + int materialized_literal_count, + int expected_property_count, + int handler_count, + bool has_only_simple_this_property_assignments, + Handle this_property_assignments, + int parameter_count, + bool has_duplicate_parameters, + FunctionLiteral::Type type, + bool visit_with_visitor) { + FunctionLiteral* lit = new(zone_) FunctionLiteral( + isolate_, name, scope, body, + materialized_literal_count, expected_property_count, handler_count, + has_only_simple_this_property_assignments, this_property_assignments, + parameter_count, type, has_duplicate_parameters); + if (visit_with_visitor) { + visitor_.VisitFunctionLiteral(lit); + } + return lit; + } + + SharedFunctionInfoLiteral* NewSharedFunctionInfoLiteral( + Handle shared_function_info) { + SharedFunctionInfoLiteral* lit = + new(zone_) SharedFunctionInfoLiteral(isolate_, shared_function_info); + VISIT_AND_RETURN(SharedFunctionInfoLiteral, lit) + } + + ThisFunction* NewThisFunction() { + ThisFunction* fun = new(zone_) ThisFunction(isolate_); + VISIT_AND_RETURN(ThisFunction, fun) + } + +#undef VISIT_AND_RETURN + + private: + Isolate* isolate_; + Zone* zone_; + Visitor visitor_; +}; + + } } // namespace v8::internal #endif // V8_AST_H_ diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc index bcb1198..1badca7 100644 --- a/deps/v8/src/builtins.cc +++ b/deps/v8/src/builtins.cc @@ -978,7 +978,7 @@ BUILTIN(ArrayConcat) { return CallJsBuiltin(isolate, "ArrayConcat", args); } - if (!JSArray::cast(arg)->HasFastElements()) { + if (!JSArray::cast(arg)->HasFastSmiOnlyElements()) { elements_kind = FAST_ELEMENTS; } } diff --git a/deps/v8/src/codegen.cc b/deps/v8/src/codegen.cc index ceea7b9..3f65120 100644 --- a/deps/v8/src/codegen.cc +++ b/deps/v8/src/codegen.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -62,18 +62,15 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) { #ifdef DEBUG bool print_source = false; bool print_ast = false; - bool print_json_ast = false; const char* ftype; if (Isolate::Current()->bootstrapper()->IsActive()) { print_source = FLAG_print_builtin_source; print_ast = FLAG_print_builtin_ast; - print_json_ast = FLAG_print_builtin_json_ast; ftype = "builtin"; } else { print_source = FLAG_print_source; print_ast = FLAG_print_ast; - print_json_ast = FLAG_print_json_ast; Vector filter = CStrVector(FLAG_hydrogen_filter); if (print_source && !filter.is_empty()) { print_source = info->function()->name()->IsEqualTo(filter); @@ -81,9 +78,6 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) { if (print_ast && !filter.is_empty()) { print_ast = info->function()->name()->IsEqualTo(filter); } - if (print_json_ast && !filter.is_empty()) { - print_json_ast = info->function()->name()->IsEqualTo(filter); - } ftype = "user-defined"; } @@ -102,11 +96,6 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) { PrintF("--- AST ---\n%s\n", AstPrinter().PrintProgram(info->function())); } - - if (print_json_ast) { - JsonAstBuilder builder; - PrintF("%s", builder.BuildProgram(info->function())); - } #endif // DEBUG } diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc index ba4d202..aea889f 100644 --- a/deps/v8/src/compiler.cc +++ b/deps/v8/src/compiler.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -110,6 +110,18 @@ void CompilationInfo::DisableOptimization() { } +// Primitive functions are unlikely to be picked up by the stack-walking +// profiler, so they trigger their own optimization when they're called +// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time. +bool CompilationInfo::ShouldSelfOptimize() { + return FLAG_self_optimization && + FLAG_crankshaft && + !Serializer::enabled() && + !function()->flags()->Contains(kDontSelfOptimize) && + (shared_info().is_null() || !shared_info()->optimization_disabled()); +} + + void CompilationInfo::AbortOptimization() { Handle code(shared_info()->code()); SetCode(code); @@ -652,6 +664,9 @@ bool Compiler::CompileLazy(CompilationInfo* info) { // Check the function has compiled code. ASSERT(shared->is_compiled()); shared->set_code_age(0); + shared->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize)); + shared->set_dont_inline(lit->flags()->Contains(kDontInline)); + shared->set_ast_node_count(lit->ast_node_count()); if (info->AllowOptimize() && !shared->optimization_disabled()) { // If we're asked to always optimize, we compile the optimized @@ -750,6 +765,9 @@ void Compiler::SetFunctionInfo(Handle function_info, function_info->set_language_mode(lit->language_mode()); function_info->set_uses_arguments(lit->scope()->arguments() != NULL); function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters()); + function_info->set_ast_node_count(lit->ast_node_count()); + function_info->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize)); + function_info->set_dont_inline(lit->flags()->Contains(kDontInline)); } diff --git a/deps/v8/src/compiler.h b/deps/v8/src/compiler.h index 47eaeea..3825287 100644 --- a/deps/v8/src/compiler.h +++ b/deps/v8/src/compiler.h @@ -168,6 +168,9 @@ class CompilationInfo BASE_EMBEDDED { return V8::UseCrankshaft() && !closure_.is_null(); } + // Determines whether or not to insert a self-optimization header. + bool ShouldSelfOptimize(); + // Disable all optimization attempts of this info for the rest of the // current compilation pipeline. void AbortOptimization(); @@ -280,6 +283,9 @@ class Compiler : public AllStatic { static const int kMaxInliningLevels = 3; + // Call count before primitive functions trigger their own optimization. + static const int kCallsUntilPrimitiveOpt = 200; + // All routines return a SharedFunctionInfo. // If an error occurs an exception is raised and the return handle // contains NULL. diff --git a/deps/v8/src/cpu-profiler.cc b/deps/v8/src/cpu-profiler.cc index 953952a..3cbac77 100644 --- a/deps/v8/src/cpu-profiler.cc +++ b/deps/v8/src/cpu-profiler.cc @@ -1,4 +1,4 @@ -// Copyright 2010 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -42,7 +42,7 @@ namespace internal { static const int kEventsBufferSize = 256 * KB; static const int kTickSamplesBufferChunkSize = 64 * KB; static const int kTickSamplesBufferChunksCount = 16; -static const int kProfilerStackSize = 32 * KB; +static const int kProfilerStackSize = 64 * KB; ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator) diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc index 94e3270..ad35af6 100644 --- a/deps/v8/src/d8.cc +++ b/deps/v8/src/d8.cc @@ -1288,7 +1288,7 @@ bool Shell::SetOptions(int argc, char* argv[]) { options.use_preemption = true; argv[i] = NULL; #endif // V8_SHARED - } else if (strcmp(argv[i], "--no-preemption") == 0) { + } else if (strcmp(argv[i], "--nopreemption") == 0) { #ifdef V8_SHARED printf("D8 with shared library does not support multi-threading\n"); return false; diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h index 9cdea06..59e54dd 100644 --- a/deps/v8/src/flag-definitions.h +++ b/deps/v8/src/flag-definitions.h @@ -109,11 +109,13 @@ private: // Flags for experimental language features. DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false, "enable harmony block scoping") +DEFINE_bool(harmony_modules, false, "enable harmony modules") DEFINE_bool(harmony_proxies, false, "enable harmony proxies") DEFINE_bool(harmony_collections, false, "enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false, "enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) +DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) @@ -136,7 +138,6 @@ DEFINE_bool(use_gvn, true, "use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true, "use function inlining") DEFINE_bool(limit_inlining, true, "limit code size growth from inlining") -DEFINE_bool(eliminate_empty_blocks, true, "eliminate empty blocks") DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true, @@ -164,12 +165,19 @@ DEFINE_int(stress_runs, 0, "number of stress runs") DEFINE_bool(optimize_closures, true, "optimize closures") DEFINE_int(loop_weight, 1, "loop weight for representation inference") +// Experimental profiler changes. +DEFINE_bool(experimental_profiler, false, "enable all profiler experiments") +DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability") +DEFINE_bool(self_optimization, false, + "primitive functions trigger their own optimization") + +DEFINE_implication(experimental_profiler, watch_ic_patching) +DEFINE_implication(experimental_profiler, self_optimization) + // assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc DEFINE_bool(debug_code, false, "generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false, "emit comments in code disassembly") -DEFINE_bool(peephole_optimization, true, - "perform peephole optimizations in assembly code") DEFINE_bool(enable_sse2, true, "enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true, @@ -219,10 +227,8 @@ DEFINE_bool(lazy, true, "use lazy compilation") DEFINE_bool(trace_opt, false, "trace lazy optimization") DEFINE_bool(trace_opt_stats, false, "trace lazy optimization statistics") DEFINE_bool(opt, true, "use adaptive optimizations") -DEFINE_bool(opt_eagerly, false, "be more eager when adaptively optimizing") DEFINE_bool(always_opt, false, "always try to optimize functions") DEFINE_bool(prepare_always_opt, false, "prepare for turning on always opt") -DEFINE_bool(deopt, true, "support deoptimization") DEFINE_bool(trace_deopt, false, "trace deoptimization") // compiler.cc @@ -303,11 +309,10 @@ DEFINE_bool(native_code_counters, false, DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") -DEFINE_bool(cleanup_caches_in_maps_at_gc, true, - "Flush code caches in maps during mark compact cycle.") DEFINE_bool(never_compact, false, "Never perform compaction on full GC - testing only") -DEFINE_bool(compact_code_space, false, "Compact code space") +DEFINE_bool(compact_code_space, true, + "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and " "flush code caches in maps during mark compact cycle.") @@ -315,14 +320,6 @@ DEFINE_int(random_seed, 0, "Default seed for initializing random generator " "(0, the default, means to use system random).") -DEFINE_bool(canonicalize_object_literal_maps, true, - "Canonicalize maps for object literals.") - -DEFINE_int(max_map_space_pages, MapSpace::kMaxMapPageIndex - 1, - "Maximum number of pages in map space which still allows to encode " - "forwarding pointers. That's actually a constant, but it's useful " - "to control it with a flag for better testing.") - // objects.cc DEFINE_bool(use_verbose_printer, true, "allows verbose printing") @@ -443,9 +440,6 @@ DEFINE_bool(print_builtin_source, false, "pretty print source code for builtins") DEFINE_bool(print_ast, false, "print source AST") DEFINE_bool(print_builtin_ast, false, "print source AST for builtins") -DEFINE_bool(print_json_ast, false, "print source AST as JSON") -DEFINE_bool(print_builtin_json_ast, false, - "print source AST for builtins as JSON") DEFINE_string(stop_at, "", "function name where to insert a breakpoint") // compiler.cc @@ -475,10 +469,6 @@ DEFINE_bool(trace_normalization, // runtime.cc DEFINE_bool(trace_lazy, false, "trace lazy compilation") -// serialize.cc -DEFINE_bool(debug_serialization, false, - "write debug information into the snapshot.") - // spaces.cc DEFINE_bool(collect_heap_spill_statistics, false, "report heap spill statistics along with heap_stats " diff --git a/deps/v8/src/frames-inl.h b/deps/v8/src/frames-inl.h index af3ae3d..010233a 100644 --- a/deps/v8/src/frames-inl.h +++ b/deps/v8/src/frames-inl.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -77,18 +77,18 @@ inline StackHandler* StackHandler::FromAddress(Address address) { } -inline bool StackHandler::is_entry() const { - return kind() == ENTRY; +inline bool StackHandler::is_js_entry() const { + return kind() == JS_ENTRY; } -inline bool StackHandler::is_try_catch() const { - return kind() == TRY_CATCH; +inline bool StackHandler::is_catch() const { + return kind() == CATCH; } -inline bool StackHandler::is_try_finally() const { - return kind() == TRY_FINALLY; +inline bool StackHandler::is_finally() const { + return kind() == FINALLY; } diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc index 4402496..40df12c 100644 --- a/deps/v8/src/frames.cc +++ b/deps/v8/src/frames.cc @@ -1174,7 +1174,7 @@ void EntryFrame::Iterate(ObjectVisitor* v) const { StackHandlerIterator it(this, top_handler()); ASSERT(!it.done()); StackHandler* handler = it.handler(); - ASSERT(handler->is_entry()); + ASSERT(handler->is_js_entry()); handler->Iterate(v, LookupCode()); #ifdef DEBUG // Make sure that the entry frame does not contain more than one diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h index 2c5e571..e550f76 100644 --- a/deps/v8/src/frames.h +++ b/deps/v8/src/frames.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -85,15 +85,17 @@ class InnerPointerToCodeCache { class StackHandler BASE_EMBEDDED { public: enum Kind { - ENTRY, - TRY_CATCH, - TRY_FINALLY + JS_ENTRY, + CATCH, + FINALLY, + LAST_KIND = FINALLY }; static const int kKindWidth = 2; - static const int kOffsetWidth = 32 - kKindWidth; + STATIC_ASSERT(LAST_KIND < (1 << kKindWidth)); + static const int kIndexWidth = 32 - kKindWidth; class KindField: public BitField {}; - class OffsetField: public BitField {}; + class IndexField: public BitField {}; // Get the address of this stack handler. inline Address address() const; @@ -111,9 +113,9 @@ class StackHandler BASE_EMBEDDED { static inline StackHandler* FromAddress(Address address); // Testers - inline bool is_entry() const; - inline bool is_try_catch() const; - inline bool is_try_finally() const; + inline bool is_js_entry() const; + inline bool is_catch() const; + inline bool is_finally() const; private: // Accessors. diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc index 7282be1..5c5ba6b 100644 --- a/deps/v8/src/full-codegen.cc +++ b/deps/v8/src/full-codegen.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -51,7 +51,25 @@ void BreakableStatementChecker::Check(Expression* expr) { } -void BreakableStatementChecker::VisitDeclaration(Declaration* decl) { +void BreakableStatementChecker::VisitVariableDeclaration( + VariableDeclaration* decl) { +} + +void BreakableStatementChecker::VisitModuleDeclaration( + ModuleDeclaration* decl) { +} + + +void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) { +} + +void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) { +} + +void BreakableStatementChecker::VisitModulePath(ModulePath* module) { +} + +void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) { } @@ -297,6 +315,9 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { code->set_stack_check_table_offset(table_offset); CodeGenerator::PrintCode(code, info); info->SetCode(code); // May be an empty handle. + if (!code.is_null()) { + isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size()); + } #ifdef ENABLE_GDB_JIT_INTERFACE if (FLAG_gdbjit && !code.is_null()) { GDBJITLineInfo* lineinfo = @@ -380,7 +401,7 @@ void FullCodeGenerator::RecordJSReturnSite(Call* call) { void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) { // There's no need to prepare this code for bailouts from already optimized // code or code that can't be optimized. - if (!FLAG_deopt || !info_->HasDeoptimizationSupport()) return; + if (!info_->HasDeoptimizationSupport()) return; unsigned pc_and_state = StateField::encode(state) | PcField::encode(masm_->pc_offset()); BailoutEntry entry = { id, pc_and_state }; @@ -525,39 +546,40 @@ void FullCodeGenerator::DoTest(const TestContext* context) { void FullCodeGenerator::VisitDeclarations( ZoneList* declarations) { - int length = declarations->length(); - int global_count = 0; - for (int i = 0; i < length; i++) { - Declaration* decl = declarations->at(i); - EmitDeclaration(decl->proxy(), decl->mode(), decl->fun(), &global_count); - } + int save_global_count = global_count_; + global_count_ = 0; + + AstVisitor::VisitDeclarations(declarations); // Batch declare global functions and variables. - if (global_count > 0) { + if (global_count_ > 0) { Handle array = - isolate()->factory()->NewFixedArray(2 * global_count, TENURED); + isolate()->factory()->NewFixedArray(2 * global_count_, TENURED); + int length = declarations->length(); for (int j = 0, i = 0; i < length; i++) { - Declaration* decl = declarations->at(i); - Variable* var = decl->proxy()->var(); - - if (var->IsUnallocated()) { - array->set(j++, *(var->name())); - if (decl->fun() == NULL) { - if (var->binding_needs_init()) { - // In case this binding needs initialization use the hole. - array->set_the_hole(j++); + VariableDeclaration* decl = declarations->at(i)->AsVariableDeclaration(); + if (decl != NULL) { + Variable* var = decl->proxy()->var(); + + if (var->IsUnallocated()) { + array->set(j++, *(var->name())); + if (decl->fun() == NULL) { + if (var->binding_needs_init()) { + // In case this binding needs initialization use the hole. + array->set_the_hole(j++); + } else { + array->set_undefined(j++); + } } else { - array->set_undefined(j++); + Handle function = + Compiler::BuildFunctionInfo(decl->fun(), script()); + // Check for stack-overflow exception. + if (function.is_null()) { + SetStackOverflow(); + return; + } + array->set(j++, *function); } - } else { - Handle function = - Compiler::BuildFunctionInfo(decl->fun(), script()); - // Check for stack-overflow exception. - if (function.is_null()) { - SetStackOverflow(); - return; - } - array->set(j++, *function); } } } @@ -565,6 +587,38 @@ void FullCodeGenerator::VisitDeclarations( // declaration the global functions and variables. DeclareGlobals(array); } + + global_count_ = save_global_count; +} + + +void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) { + EmitDeclaration(decl->proxy(), decl->mode(), decl->fun()); +} + + +void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) { + // TODO(rossberg) +} + + +void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) { + // TODO(rossberg) +} + + +void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) { + // TODO(rossberg) +} + + +void FullCodeGenerator::VisitModulePath(ModulePath* module) { + // TODO(rossberg) +} + + +void FullCodeGenerator::VisitModuleUrl(ModuleUrl* decl) { + // TODO(rossberg) } @@ -1147,7 +1201,7 @@ void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) { // Try block code. Sets up the exception handler chain. __ bind(&try_entry); - __ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER, stmt->index()); + __ PushTryHandler(StackHandler::CATCH, stmt->index()); { TryCatch try_body(this); Visit(stmt->try_block()); } @@ -1204,7 +1258,7 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) { // Set up try handler. __ bind(&try_entry); - __ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER, stmt->index()); + __ PushTryHandler(StackHandler::FINALLY, stmt->index()); { TryFinally try_body(this, &finally_entry); Visit(stmt->try_block()); } diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h index 02f18d3..f9b7c38 100644 --- a/deps/v8/src/full-codegen.h +++ b/deps/v8/src/full-codegen.h @@ -83,6 +83,7 @@ class FullCodeGenerator: public AstVisitor { scope_(NULL), nesting_stack_(NULL), loop_depth_(0), + global_count_(0), context_(NULL), bailout_entries_(0), stack_checks_(2), // There's always at least one. @@ -416,10 +417,10 @@ class FullCodeGenerator: public AstVisitor { // Platform-specific code for a variable, constant, or function // declaration. Functions have an initial value. + // Increments global_count_ for unallocated variables. void EmitDeclaration(VariableProxy* proxy, VariableMode mode, - FunctionLiteral* function, - int* global_count); + FunctionLiteral* function); // Platform-specific code for checking the stack limit at the back edge of // a loop. @@ -767,6 +768,7 @@ class FullCodeGenerator: public AstVisitor { Label return_label_; NestedStatement* nesting_stack_; int loop_depth_; + int global_count_; const ExpressionContext* context_; ZoneList bailout_entries_; ZoneList stack_checks_; diff --git a/deps/v8/src/handles.cc b/deps/v8/src/handles.cc index 34eaddb..943a1c0 100644 --- a/deps/v8/src/handles.cc +++ b/deps/v8/src/handles.cc @@ -711,7 +711,7 @@ Handle GetEnumPropertyKeys(Handle object, isolate); } isolate->counters()->enum_cache_misses()->Increment(); - int num_enum = object->NumberOfEnumProperties(); + int num_enum = object->NumberOfLocalProperties(DONT_ENUM); Handle storage = isolate->factory()->NewFixedArray(num_enum); Handle sort_array = isolate->factory()->NewFixedArray(num_enum); Handle descs = @@ -735,7 +735,7 @@ Handle GetEnumPropertyKeys(Handle object, ASSERT(storage->length() == index); return storage; } else { - int num_enum = object->NumberOfEnumProperties(); + int num_enum = object->NumberOfLocalProperties(DONT_ENUM); Handle storage = isolate->factory()->NewFixedArray(num_enum); Handle sort_array = isolate->factory()->NewFixedArray(num_enum); object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array); diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc index 4cea933..b082886 100644 --- a/deps/v8/src/heap.cc +++ b/deps/v8/src/heap.cc @@ -1201,7 +1201,9 @@ void Heap::Scavenge() { promotion_queue_.Destroy(); LiveObjectList::UpdateReferencesForScavengeGC(); - isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); + if (!FLAG_watch_ic_patching) { + isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); + } incremental_marking()->UpdateMarkingDequeAfterScavenge(); ASSERT(new_space_front == new_space_.top()); @@ -2865,7 +2867,9 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) { share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER); share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER); share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER); - share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times)); + share->set_deopt_counter(FLAG_deopt_every_n_times); + share->set_profiler_ticks(0); + share->set_ast_node_count(0); // Set integer fields (smi or int, depending on the architecture). share->set_length(0); @@ -5839,10 +5843,7 @@ bool Heap::SetUp(bool create_heap_objects) { if (!code_space_->SetUp()) return false; // Initialize map space. - map_space_ = new MapSpace(this, - max_old_generation_size_, - FLAG_max_map_space_pages, - MAP_SPACE); + map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE); if (map_space_ == NULL) return false; if (!map_space_->SetUp()) return false; diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h index 67818d2..83e9b61 100644 --- a/deps/v8/src/heap.h +++ b/deps/v8/src/heap.h @@ -241,9 +241,10 @@ namespace internal { V(use_strict, "use strict") \ V(dot_symbol, ".") \ V(anonymous_function_symbol, "(anonymous function)") \ - V(compare_ic_symbol, ".compare_ic") \ + V(compare_ic_symbol, ".compare_ic") \ V(infinity_symbol, "Infinity") \ - V(minus_infinity_symbol, "-Infinity") + V(minus_infinity_symbol, "-Infinity") \ + V(hidden_stack_trace_symbol, "v8::hidden_stack_trace") // Forward declarations. class GCTracer; diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc index 4372c06..cdc3e23 100644 --- a/deps/v8/src/hydrogen-instructions.cc +++ b/deps/v8/src/hydrogen-instructions.cc @@ -893,6 +893,13 @@ void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) { void HCheckMap::PrintDataTo(StringStream* stream) { value()->PrintNameTo(stream); stream->Add(" %p", *map()); + if (mode() == REQUIRE_EXACT_MAP) { + stream->Add(" [EXACT]"); + } else if (!has_element_transitions_) { + stream->Add(" [EXACT*]"); + } else { + stream->Add(" [MATCH ELEMENTS]"); + } } diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h index b2b3a61..39e3950 100644 --- a/deps/v8/src/hydrogen-instructions.h +++ b/deps/v8/src/hydrogen-instructions.h @@ -186,6 +186,7 @@ class LChunkBuilder; V(InobjectFields) \ V(BackingStoreFields) \ V(ElementsKind) \ + V(ElementsPointer) \ V(ArrayElements) \ V(DoubleArrayElements) \ V(SpecializedArrayElements) \ @@ -646,6 +647,18 @@ class HValue: public ZoneObject { return gvn_flags_.ContainsAnyOf(AllObservableSideEffectsFlagSet()); } + GVNFlagSet DependsOnFlags() const { + GVNFlagSet result = gvn_flags_; + result.Intersect(AllDependsOnFlagSet()); + return result; + } + + GVNFlagSet SideEffectFlags() const { + GVNFlagSet result = gvn_flags_; + result.Intersect(AllSideEffectsFlagSet()); + return result; + } + GVNFlagSet ChangesFlags() const { GVNFlagSet result = gvn_flags_; result.Intersect(AllChangesFlagSet()); @@ -722,6 +735,15 @@ class HValue: public ZoneObject { representation_ = r; } + static GVNFlagSet AllDependsOnFlagSet() { + GVNFlagSet result; + // Create changes mask. +#define ADD_FLAG(type) result.Add(kDependsOn##type); + GVN_FLAG_LIST(ADD_FLAG) +#undef ADD_FLAG + return result; + } + static GVNFlagSet AllChangesFlagSet() { GVNFlagSet result; // Create changes mask. @@ -743,6 +765,8 @@ class HValue: public ZoneObject { static GVNFlagSet AllObservableSideEffectsFlagSet() { GVNFlagSet result = AllChangesFlagSet(); result.Remove(kChangesElementsKind); + result.Remove(kChangesElementsPointer); + result.Remove(kChangesMaps); return result; } @@ -1920,8 +1944,7 @@ class HLoadElements: public HUnaryOperation { explicit HLoadElements(HValue* value) : HUnaryOperation(value) { set_representation(Representation::Tagged()); SetFlag(kUseGVN); - SetGVNFlag(kDependsOnMaps); - SetGVNFlag(kDependsOnElementsKind); + SetGVNFlag(kDependsOnElementsPointer); } virtual Representation RequiredInputRepresentation(int index) { @@ -1972,6 +1995,11 @@ class HCheckMap: public HTemplateInstruction<2> { set_representation(Representation::Tagged()); SetFlag(kUseGVN); SetGVNFlag(kDependsOnMaps); + // If the map to check doesn't have the untransitioned elements, it must not + // be hoisted above TransitionElements instructions. + if (mode == REQUIRE_EXACT_MAP || !map->has_fast_smi_only_elements()) { + SetGVNFlag(kDependsOnElementsKind); + } has_element_transitions_ = map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL) != NULL || map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL) != NULL; @@ -4135,7 +4163,17 @@ class HTransitionElementsKind: public HTemplateInstruction<1> { transitioned_map_(transitioned_map) { SetOperandAt(0, object); SetFlag(kUseGVN); + SetGVNFlag(kDependsOnMaps); SetGVNFlag(kChangesElementsKind); + if (original_map->has_fast_double_elements()) { + SetGVNFlag(kChangesElementsPointer); + SetGVNFlag(kDependsOnElementsPointer); + SetGVNFlag(kDependsOnDoubleArrayElements); + } else if (transitioned_map->has_fast_double_elements()) { + SetGVNFlag(kChangesElementsPointer); + SetGVNFlag(kDependsOnElementsPointer); + SetGVNFlag(kDependsOnArrayElements); + } set_representation(Representation::Tagged()); } diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc index fdfadfa..e250587 100644 --- a/deps/v8/src/hydrogen.cc +++ b/deps/v8/src/hydrogen.cc @@ -70,7 +70,8 @@ HBasicBlock::HBasicBlock(HGraph* graph) deleted_phis_(4), parent_loop_header_(NULL), is_inline_return_target_(false), - is_deoptimizing_(false) { } + is_deoptimizing_(false), + dominates_loop_successors_(false) { } void HBasicBlock::AttachLoopInformation() { @@ -315,6 +316,62 @@ void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { } +void HBasicBlock::AssignLoopSuccessorDominators() { + // Mark blocks that dominate all subsequent reachable blocks inside their + // loop. Exploit the fact that blocks are sorted in reverse post order. When + // the loop is visited in increasing block id order, if the number of + // non-loop-exiting successor edges at the dominator_candidate block doesn't + // exceed the number of previously encountered predecessor edges, there is no + // path from the loop header to any block with higher id that doesn't go + // through the dominator_candidate block. In this case, the + // dominator_candidate block is guaranteed to dominate all blocks reachable + // from it with higher ids. + HBasicBlock* last = loop_information()->GetLastBackEdge(); + int outstanding_successors = 1; // one edge from the pre-header + // Header always dominates everything. + MarkAsLoopSuccessorDominator(); + for (int j = block_id(); j <= last->block_id(); ++j) { + HBasicBlock* dominator_candidate = graph_->blocks()->at(j); + for (HPredecessorIterator it(dominator_candidate); !it.Done(); + it.Advance()) { + HBasicBlock* predecessor = it.Current(); + // Don't count back edges. + if (predecessor->block_id() < dominator_candidate->block_id()) { + outstanding_successors--; + } + } + + // If more successors than predecessors have been seen in the loop up to + // now, it's not possible to guarantee that the current block dominates + // all of the blocks with higher IDs. In this case, assume conservatively + // that those paths through loop that don't go through the current block + // contain all of the loop's dependencies. Also be careful to record + // dominator information about the current loop that's being processed, + // and not nested loops, which will be processed when + // AssignLoopSuccessorDominators gets called on their header. + ASSERT(outstanding_successors >= 0); + HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); + if (outstanding_successors == 0 && + (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { + dominator_candidate->MarkAsLoopSuccessorDominator(); + } + HControlInstruction* end = dominator_candidate->end(); + for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { + HBasicBlock* successor = it.Current(); + // Only count successors that remain inside the loop and don't loop back + // to a loop header. + if (successor->block_id() > dominator_candidate->block_id() && + successor->block_id() <= last->block_id()) { + // Backwards edges must land on loop headers. + ASSERT(successor->block_id() > dominator_candidate->block_id() || + successor->IsLoopHeader()); + outstanding_successors++; + } + } + } +} + + int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { for (int i = 0; i < predecessors_.length(); ++i) { if (predecessors_[i] == predecessor) return i; @@ -646,9 +703,7 @@ Handle HGraph::Compile(CompilationInfo* info) { MacroAssembler assembler(info->isolate(), NULL, 0); LCodeGen generator(chunk, &assembler, info); - if (FLAG_eliminate_empty_blocks) { - chunk->MarkEmptyBlocks(); - } + chunk->MarkEmptyBlocks(); if (generator.GenerateCode()) { if (FLAG_trace_codegen) { @@ -752,10 +807,12 @@ void HGraph::Postorder(HBasicBlock* block, void HGraph::AssignDominators() { HPhase phase("Assign dominators", this); for (int i = 0; i < blocks_.length(); ++i) { - if (blocks_[i]->IsLoopHeader()) { + HBasicBlock* block = blocks_[i]; + if (block->IsLoopHeader()) { // Only the first predecessor of a loop header is from outside the loop. // All others are back edges, and thus cannot dominate the loop header. - blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first()); + block->AssignCommonDominator(block->predecessors()->first()); + block->AssignLoopSuccessorDominators(); } else { for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) { blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j)); @@ -1373,7 +1430,8 @@ class HGlobalValueNumberer BASE_EMBEDDED { void LoopInvariantCodeMotion(); void ProcessLoopBlock(HBasicBlock* block, HBasicBlock* before_loop, - GVNFlagSet loop_kills); + GVNFlagSet loop_kills, + GVNFlagSet* accumulated_first_time_depends); bool AllowCodeMotion(); bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header); @@ -1398,6 +1456,7 @@ class HGlobalValueNumberer BASE_EMBEDDED { bool HGlobalValueNumberer::Analyze() { + removed_side_effects_ = false; ComputeBlockSideEffects(); if (FLAG_loop_invariant_code_motion) { LoopInvariantCodeMotion(); @@ -1409,6 +1468,12 @@ bool HGlobalValueNumberer::Analyze() { void HGlobalValueNumberer::ComputeBlockSideEffects() { + // The Analyze phase of GVN can be called multiple times. Clear loop side + // effects before computing them to erase the contents from previous Analyze + // passes. + for (int i = 0; i < loop_side_effects_.length(); ++i) { + loop_side_effects_[i].RemoveAll(); + } for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { // Compute side effects for the block. HBasicBlock* block = graph_->blocks()->at(i); @@ -1446,18 +1511,22 @@ void HGlobalValueNumberer::LoopInvariantCodeMotion() { block->block_id(), side_effects.ToIntegral()); + GVNFlagSet accumulated_first_time_depends; HBasicBlock* last = block->loop_information()->GetLastBackEdge(); for (int j = block->block_id(); j <= last->block_id(); ++j) { - ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects); + ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects, + &accumulated_first_time_depends); } } } } -void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block, - HBasicBlock* loop_header, - GVNFlagSet loop_kills) { +void HGlobalValueNumberer::ProcessLoopBlock( + HBasicBlock* block, + HBasicBlock* loop_header, + GVNFlagSet loop_kills, + GVNFlagSet* accumulated_first_time_depends) { HBasicBlock* pre_header = loop_header->predecessors()->at(0); GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n", @@ -1466,25 +1535,65 @@ void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block, HInstruction* instr = block->first(); while (instr != NULL) { HInstruction* next = instr->next(); - if (instr->CheckFlag(HValue::kUseGVN) && - !instr->gvn_flags().ContainsAnyOf(depends_flags)) { - TraceGVN("Checking instruction %d (%s)\n", + bool hoisted = false; + if (instr->CheckFlag(HValue::kUseGVN)) { + TraceGVN("Checking instruction %d (%s) instruction GVN flags 0x%X, " + "loop kills 0x%X\n", instr->id(), - instr->Mnemonic()); - bool inputs_loop_invariant = true; - for (int i = 0; i < instr->OperandCount(); ++i) { - if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { - inputs_loop_invariant = false; - } + instr->Mnemonic(), + instr->gvn_flags().ToIntegral(), + depends_flags.ToIntegral()); + bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags); + if (!can_hoist && instr->IsTransitionElementsKind()) { + // It's only possible to hoist one time side effects if there are no + // dependencies on their changes from the loop header to the current + // instruction. + GVNFlagSet converted_changes = + HValue::ConvertChangesToDependsFlags(instr->ChangesFlags()); + TraceGVN("Checking dependencies on one-time instruction %d (%s) " + "converted changes 0x%X, accumulated depends 0x%X\n", + instr->id(), + instr->Mnemonic(), + converted_changes.ToIntegral(), + accumulated_first_time_depends->ToIntegral()); + // It's possible to hoist one-time side effects from the current loop + // loop only if they dominate all of the successor blocks in the same + // loop and there are not any instructions that have Changes/DependsOn + // that intervene between it and the beginning of the loop header. + bool in_nested_loop = block != loop_header && + ((block->parent_loop_header() != loop_header) || + block->IsLoopHeader()); + can_hoist = !in_nested_loop && + block->IsLoopSuccessorDominator() && + !accumulated_first_time_depends->ContainsAnyOf(converted_changes); } - if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { - TraceGVN("Found loop invariant instruction %d\n", instr->id()); - // Move the instruction out of the loop. - instr->Unlink(); - instr->InsertBefore(pre_header->end()); + if (can_hoist) { + bool inputs_loop_invariant = true; + for (int i = 0; i < instr->OperandCount(); ++i) { + if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { + inputs_loop_invariant = false; + } + } + + if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { + TraceGVN("Hoisting loop invariant instruction %d\n", instr->id()); + // Move the instruction out of the loop. + instr->Unlink(); + instr->InsertBefore(pre_header->end()); + if (instr->HasSideEffects()) removed_side_effects_ = true; + hoisted = true; + } } } + if (!hoisted) { + // If an instruction is not hoisted, we have to account for its side + // effects when hoisting later HTransitionElementsKind instructions. + accumulated_first_time_depends->Add(instr->DependsOnFlags()); + GVNFlagSet converted_changes = + HValue::ConvertChangesToDependsFlags(instr->SideEffectFlags()); + accumulated_first_time_depends->Add(converted_changes); + } instr = next; } } @@ -2334,7 +2443,7 @@ HGraph* HGraphBuilder::CreateGraph() { // Handle implicit declaration of the function name in named function // expressions before other declarations. if (scope->is_function_scope() && scope->function() != NULL) { - HandleDeclaration(scope->function(), CONST, NULL); + HandleVariableDeclaration(scope->function(), CONST, NULL); } VisitDeclarations(scope->declarations()); AddSimulate(AstNode::kDeclarationsId); @@ -2392,7 +2501,8 @@ HGraph* HGraphBuilder::CreateGraph() { // could only be discovered by removing side-effect-generating instructions // during the first pass. if (FLAG_smi_only_arrays && removed_side_effects) { - gvn.Analyze(); + removed_side_effects = gvn.Analyze(); + ASSERT(!removed_side_effects); } } @@ -4796,8 +4906,8 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { // Do a quick check on source code length to avoid parsing large // inlining candidates. - if ((FLAG_limit_inlining && target->shared()->SourceSize() > kMaxSourceSize) - || target->shared()->SourceSize() > kUnlimitedMaxSourceSize) { + if ((FLAG_limit_inlining && target_shared->SourceSize() > kMaxSourceSize) + || target_shared->SourceSize() > kUnlimitedMaxSourceSize) { TraceInline(target, caller, "target text too big"); return false; } @@ -4807,6 +4917,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { TraceInline(target, caller, "target not inlineable"); return false; } + if (target_shared->dont_inline() || target_shared->dont_crankshaft()) { + TraceInline(target, caller, "target contains unsupported syntax [early]"); + return false; + } + + int nodes_added = target_shared->ast_node_count(); + if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) || + nodes_added > kUnlimitedMaxInlinedSize) { + TraceInline(target, caller, "target AST is too large [early]"); + return false; + } #if !defined(V8_TARGET_ARCH_IA32) // Target must be able to use caller's context. @@ -4851,8 +4972,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { return false; } - int count_before = AstNode::Count(); - // Parse and allocate variables. CompilationInfo target_info(target); if (!ParserApi::Parse(&target_info, kNoParsingFlags) || @@ -4872,11 +4991,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { } FunctionLiteral* function = target_info.function(); - // Count the number of AST nodes added by inlining this call. - int nodes_added = AstNode::Count() - count_before; + // The following conditions must be checked again after re-parsing, because + // earlier the information might not have been complete due to lazy parsing. + nodes_added = function->ast_node_count(); if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) || nodes_added > kUnlimitedMaxInlinedSize) { - TraceInline(target, caller, "target AST is too large"); + TraceInline(target, caller, "target AST is too large [late]"); + return false; + } + AstProperties::Flags* flags(function->flags()); + if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) { + TraceInline(target, caller, "target contains unsupported syntax [late]"); return false; } @@ -4895,13 +5020,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { return false; } } - // All statements in the body must be inlineable. - for (int i = 0, count = function->body()->length(); i < count; ++i) { - if (!function->body()->at(i)->IsInlineable()) { - TraceInline(target, caller, "target contains unsupported syntax"); - return false; - } - } // Generate the deoptimization data for the unoptimized version of // the target function if we don't already have it. @@ -5050,10 +5168,41 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) { } -bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr, - HValue* receiver, - Handle receiver_map, - CheckType check_type) { +bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) { + if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; + BuiltinFunctionId id = expr->target()->shared()->builtin_function_id(); + switch (id) { + case kMathRound: + case kMathFloor: + case kMathAbs: + case kMathSqrt: + case kMathLog: + case kMathSin: + case kMathCos: + if (expr->arguments()->length() == 1) { + HValue* argument = Pop(); + HValue* context = environment()->LookupContext(); + Drop(1); // Receiver. + HUnaryMathOperation* op = + new(zone()) HUnaryMathOperation(context, argument, id); + op->set_position(expr->position()); + if (drop_extra) Drop(1); // Optionally drop the function. + ast_context()->ReturnInstruction(op, expr->id()); + return true; + } + break; + default: + // Not supported for inlining yet. + break; + } + return false; +} + + +bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr, + HValue* receiver, + Handle receiver_map, + CheckType check_type) { ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null()); // Try to inline calls like Math.* as operations in the calling function. if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; @@ -5147,7 +5296,7 @@ bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr, case kMathRandom: if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) { AddCheckConstantFunction(expr, receiver, receiver_map, true); - Drop(1); + Drop(1); // Receiver. HValue* context = environment()->LookupContext(); HGlobalObject* global_object = new(zone()) HGlobalObject(context); AddInstruction(global_object); @@ -5315,10 +5464,15 @@ void HGraphBuilder::VisitCall(Call* expr) { Handle receiver_map = (types == NULL || types->is_empty()) ? Handle::null() : types->first(); - if (TryInlineBuiltinFunction(expr, - receiver, - receiver_map, - expr->check_type())) { + if (TryInlineBuiltinMethodCall(expr, + receiver, + receiver_map, + expr->check_type())) { + if (FLAG_trace_inlining) { + PrintF("Inlining builtin "); + expr->target()->ShortPrint(); + PrintF("\n"); + } return; } @@ -5389,6 +5543,14 @@ void HGraphBuilder::VisitCall(Call* expr) { IsGlobalObject()); environment()->SetExpressionStackAt(receiver_index, global_receiver); + if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop. + if (FLAG_trace_inlining) { + PrintF("Inlining builtin "); + expr->target()->ShortPrint(); + PrintF("\n"); + } + return; + } if (TryInline(expr)) return; call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(), argument_count)); @@ -5415,6 +5577,16 @@ void HGraphBuilder::VisitCall(Call* expr) { PushAndAdd(receiver); CHECK_ALIVE(VisitExpressions(expr->arguments())); AddInstruction(new(zone()) HCheckFunction(function, expr->target())); + + if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function. + if (FLAG_trace_inlining) { + PrintF("Inlining builtin "); + expr->target()->ShortPrint(); + PrintF("\n"); + } + return; + } + if (TryInline(expr, true)) { // Drop function from environment. return; } else { @@ -6368,14 +6540,14 @@ void HGraphBuilder::VisitThisFunction(ThisFunction* expr) { } -void HGraphBuilder::VisitDeclaration(Declaration* decl) { - HandleDeclaration(decl->proxy(), decl->mode(), decl->fun()); +void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) { + HandleVariableDeclaration(decl->proxy(), decl->mode(), decl->fun()); } -void HGraphBuilder::HandleDeclaration(VariableProxy* proxy, - VariableMode mode, - FunctionLiteral* function) { +void HGraphBuilder::HandleVariableDeclaration(VariableProxy* proxy, + VariableMode mode, + FunctionLiteral* function) { Variable* var = proxy->var(); bool binding_needs_init = (mode == CONST || mode == CONST_HARMONY || mode == LET); @@ -6410,6 +6582,31 @@ void HGraphBuilder::HandleDeclaration(VariableProxy* proxy, } +void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) { + // TODO(rossberg) +} + + +void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) { + // TODO(rossberg) +} + + +void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) { + // TODO(rossberg) +} + + +void HGraphBuilder::VisitModulePath(ModulePath* module) { + // TODO(rossberg) +} + + +void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) { + // TODO(rossberg) +} + + // Generators for inline runtime functions. // Support for types. void HGraphBuilder::GenerateIsSmi(CallRuntime* call) { @@ -7200,7 +7397,10 @@ void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) { } PrintEmptyProperty("xhandlers"); - PrintEmptyProperty("flags"); + const char* flags = current->IsLoopSuccessorDominator() + ? "dom-loop-succ" + : ""; + PrintStringProperty("flags", flags); if (current->dominator() != NULL) { PrintBlockProperty("dominator", current->dominator()->block_id()); diff --git a/deps/v8/src/hydrogen.h b/deps/v8/src/hydrogen.h index 7e09505..bbd4841 100644 --- a/deps/v8/src/hydrogen.h +++ b/deps/v8/src/hydrogen.h @@ -126,6 +126,7 @@ class HBasicBlock: public ZoneObject { int PredecessorIndexOf(HBasicBlock* predecessor) const; void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); } void AssignCommonDominator(HBasicBlock* other); + void AssignLoopSuccessorDominators(); void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses) { FinishExit(CreateDeoptimize(has_uses)); @@ -149,6 +150,13 @@ class HBasicBlock: public ZoneObject { bool IsDeoptimizing() const { return is_deoptimizing_; } void MarkAsDeoptimizing() { is_deoptimizing_ = true; } + bool IsLoopSuccessorDominator() const { + return dominates_loop_successors_; + } + void MarkAsLoopSuccessorDominator() { + dominates_loop_successors_ = true; + } + inline Zone* zone(); #ifdef DEBUG @@ -182,6 +190,22 @@ class HBasicBlock: public ZoneObject { HBasicBlock* parent_loop_header_; bool is_inline_return_target_; bool is_deoptimizing_; + bool dominates_loop_successors_; +}; + + +class HPredecessorIterator BASE_EMBEDDED { + public: + explicit HPredecessorIterator(HBasicBlock* block) + : predecessor_list_(block->predecessors()), current_(0) { } + + bool Done() { return current_ >= predecessor_list_->length(); } + HBasicBlock* Current() { return predecessor_list_->at(current_); } + void Advance() { current_++; } + + private: + const ZoneList* predecessor_list_; + int current_; }; @@ -815,9 +839,9 @@ class HGraphBuilder: public AstVisitor { INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION) #undef INLINE_FUNCTION_GENERATOR_DECLARATION - void HandleDeclaration(VariableProxy* proxy, - VariableMode mode, - FunctionLiteral* function); + void HandleVariableDeclaration(VariableProxy* proxy, + VariableMode mode, + FunctionLiteral* function); void VisitDelete(UnaryOperation* expr); void VisitVoid(UnaryOperation* expr); @@ -918,10 +942,11 @@ class HGraphBuilder: public AstVisitor { bool TryCallApply(Call* expr); bool TryInline(Call* expr, bool drop_extra = false); - bool TryInlineBuiltinFunction(Call* expr, + bool TryInlineBuiltinMethodCall(Call* expr, HValue* receiver, Handle receiver_map, CheckType check_type); + bool TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra); // If --trace-inlining, print a line of the inlining trace. Inlining // succeeded if the reason string is NULL and failed if there is a diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc index 77048cb..b3a0b95 100644 --- a/deps/v8/src/ia32/code-stubs-ia32.cc +++ b/deps/v8/src/ia32/code-stubs-ia32.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -5022,7 +5022,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Invoke: Link this frame into the handler chain. There's only one // handler block in this code object, so its index is 0. __ bind(&invoke); - __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0); + __ PushTryHandler(StackHandler::JS_ENTRY, 0); // Clear any pending exceptions. __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc index 135db06..7bb4cff 100644 --- a/deps/v8/src/ia32/full-codegen-ia32.cc +++ b/deps/v8/src/ia32/full-codegen-ia32.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -129,6 +129,26 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { } #endif + // We can optionally optimize based on counters rather than statistical + // sampling. + if (info->ShouldSelfOptimize()) { + if (FLAG_trace_opt) { + PrintF("[adding self-optimization header to %s]\n", + *info->function()->debug_name()->ToCString()); + } + MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell( + Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt)); + JSGlobalPropertyCell* cell; + if (maybe_cell->To(&cell)) { + __ sub(Operand::Cell(Handle(cell)), + Immediate(Smi::FromInt(1))); + Handle compile_stub( + isolate()->builtins()->builtin(Builtins::kLazyRecompile)); + STATIC_ASSERT(kSmiTag == 0); + __ j(zero, compile_stub); + } + } + // Strict mode functions and builtins need to replace the receiver // with undefined when called as functions (without an explicit // receiver object). ecx is zero for method calls and non-zero for @@ -261,11 +281,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { // For named function expressions, declare the function name as a // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { - int ignored = 0; VariableProxy* proxy = scope()->function(); ASSERT(proxy->var()->mode() == CONST || proxy->var()->mode() == CONST_HARMONY); - EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); + ASSERT(proxy->var()->location() != Variable::UNALLOCATED); + EmitDeclaration(proxy, proxy->var()->mode(), NULL); } VisitDeclarations(scope()->declarations()); } @@ -681,8 +701,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, VariableMode mode, - FunctionLiteral* function, - int* global_count) { + FunctionLiteral* function) { // If it was not possible to allocate the variable at compile time, we // need to "declare" it at runtime to make sure it actually exists in the // local context. @@ -691,7 +710,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, (mode == CONST || mode == CONST_HARMONY || mode == LET); switch (variable->location()) { case Variable::UNALLOCATED: - ++(*global_count); + ++global_count_; break; case Variable::PARAMETER: @@ -771,9 +790,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, } -void FullCodeGenerator::VisitDeclaration(Declaration* decl) { } - - void FullCodeGenerator::DeclareGlobals(Handle pairs) { // Call the runtime to declare the globals. __ push(esi); // The context is the first argument. diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc index a7683c4..5a276f4 100644 --- a/deps/v8/src/ia32/lithium-codegen-ia32.cc +++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc @@ -622,7 +622,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { void LCodeGen::PopulateDeoptimizationData(Handle code) { int length = deoptimizations_.length(); if (length == 0) return; - ASSERT(FLAG_deopt); Handle data = factory()->NewDeoptimizationInputData(length, TENURED); diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc index 6a20808..9986c3e 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/macro-assembler-ia32.cc @@ -764,8 +764,7 @@ void MacroAssembler::LeaveApiExitFrame() { } -void MacroAssembler::PushTryHandler(CodeLocation try_location, - HandlerType type, +void MacroAssembler::PushTryHandler(StackHandler::Kind kind, int handler_index) { // Adjust this code if not the case. STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); @@ -776,25 +775,21 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); // We will build up the handler from the bottom by pushing on the stack. - // First compute the state and push the frame pointer and context. - unsigned state = StackHandler::OffsetField::encode(handler_index); - if (try_location == IN_JAVASCRIPT) { - push(ebp); - push(esi); - state |= (type == TRY_CATCH_HANDLER) - ? StackHandler::KindField::encode(StackHandler::TRY_CATCH) - : StackHandler::KindField::encode(StackHandler::TRY_FINALLY); - } else { - ASSERT(try_location == IN_JS_ENTRY); + // First push the frame pointer and context. + if (kind == StackHandler::JS_ENTRY) { // The frame pointer does not point to a JS frame so we save NULL for // ebp. We expect the code throwing an exception to check ebp before // dereferencing it to restore the context. push(Immediate(0)); // NULL frame pointer. push(Immediate(Smi::FromInt(0))); // No context. - state |= StackHandler::KindField::encode(StackHandler::ENTRY); + } else { + push(ebp); + push(esi); } - // Push the state and the code object. + unsigned state = + StackHandler::IndexField::encode(handler_index) | + StackHandler::KindField::encode(kind); push(Immediate(state)); Push(CodeObject()); @@ -904,7 +899,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, mov(esp, Operand(esp, StackHandlerConstants::kNextOffset)); bind(&check_kind); - STATIC_ASSERT(StackHandler::ENTRY == 0); + STATIC_ASSERT(StackHandler::JS_ENTRY == 0); test(Operand(esp, StackHandlerConstants::kStateOffset), Immediate(StackHandler::KindField::kMask)); j(not_zero, &fetch_next); diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h index d1d40eb..b06d801 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.h +++ b/deps/v8/src/ia32/macro-assembler-ia32.h @@ -491,9 +491,7 @@ class MacroAssembler: public Assembler { // Exception handling // Push a new try handler and link it into try handler chain. - void PushTryHandler(CodeLocation try_location, - HandlerType type, - int handler_index); + void PushTryHandler(StackHandler::Kind kind, int handler_index); // Unlink the stack handler on top of the stack from the try handler chain. void PopTryHandler(); diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc index f6f4241..9717869 100644 --- a/deps/v8/src/ia32/stub-cache-ia32.cc +++ b/deps/v8/src/ia32/stub-cache-ia32.cc @@ -1345,25 +1345,25 @@ Handle CallStubCompiler::CompileArrayPushCall( } else { Label call_builtin; - // Get the elements array of the object. - __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); - - // Check that the elements are in fast mode and writable. - __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), - Immediate(factory()->fixed_array_map())); - __ j(not_equal, &call_builtin); - if (argc == 1) { // Otherwise fall through to call builtin. Label attempt_to_grow_elements, with_write_barrier; + // Get the elements array of the object. + __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ cmp(FieldOperand(edi, HeapObject::kMapOffset), + Immediate(factory()->fixed_array_map())); + __ j(not_equal, &call_builtin); + // Get the array's length into eax and calculate new length. __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTag == 0); __ add(eax, Immediate(Smi::FromInt(argc))); - // Get the element's length into ecx. - __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset)); + // Get the elements' length into ecx. + __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // Check if we could survive without allocation. __ cmp(eax, ecx); @@ -1376,29 +1376,52 @@ Handle CallStubCompiler::CompileArrayPushCall( // Save new length. __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); - // Push the element. - __ lea(edx, FieldOperand(ebx, - eax, times_half_pointer_size, - FixedArray::kHeaderSize - argc * kPointerSize)); - __ mov(Operand(edx, 0), ecx); + // Store the value. + __ mov(FieldOperand(edi, + eax, + times_half_pointer_size, + FixedArray::kHeaderSize - argc * kPointerSize), + ecx); __ ret((argc + 1) * kPointerSize); __ bind(&with_write_barrier); - __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); - __ CheckFastObjectElements(edi, &call_builtin); + __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); + + if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { + Label fast_object, not_fast_object; + __ CheckFastObjectElements(ebx, ¬_fast_object, Label::kNear); + __ jmp(&fast_object); + // In case of fast smi-only, convert to fast object, otherwise bail out. + __ bind(¬_fast_object); + __ CheckFastSmiOnlyElements(ebx, &call_builtin); + // edi: elements array + // edx: receiver + // ebx: map + __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + FAST_ELEMENTS, + ebx, + edi, + &call_builtin); + ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm()); + // Restore edi. + __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset)); + __ bind(&fast_object); + } else { + __ CheckFastObjectElements(ebx, &call_builtin); + } // Save new length. __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); - // Push the element. - __ lea(edx, FieldOperand(ebx, + // Store the value. + __ lea(edx, FieldOperand(edi, eax, times_half_pointer_size, FixedArray::kHeaderSize - argc * kPointerSize)); __ mov(Operand(edx, 0), ecx); - __ RecordWrite(ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, + __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); __ ret((argc + 1) * kPointerSize); @@ -1408,11 +1431,11 @@ Handle CallStubCompiler::CompileArrayPushCall( __ jmp(&call_builtin); } - __ mov(edi, Operand(esp, argc * kPointerSize)); + __ mov(ebx, Operand(esp, argc * kPointerSize)); // Growing elements that are SMI-only requires special handling in case // the new element is non-Smi. For now, delegate to the builtin. Label no_fast_elements_check; - __ JumpIfSmi(edi, &no_fast_elements_check); + __ JumpIfSmi(ebx, &no_fast_elements_check); __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar); __ bind(&no_fast_elements_check); @@ -1431,7 +1454,7 @@ Handle CallStubCompiler::CompileArrayPushCall( __ mov(ecx, Operand::StaticVariable(new_space_allocation_top)); // Check if it's the end of elements. - __ lea(edx, FieldOperand(ebx, + __ lea(edx, FieldOperand(edi, eax, times_half_pointer_size, FixedArray::kHeaderSize - argc * kPointerSize)); __ cmp(edx, ecx); @@ -1444,7 +1467,7 @@ Handle CallStubCompiler::CompileArrayPushCall( __ mov(Operand::StaticVariable(new_space_allocation_top), ecx); // Push the argument... - __ mov(Operand(edx, 0), edi); + __ mov(Operand(edx, 0), ebx); // ... and fill the rest with holes. for (int i = 1; i < kAllocationDelta; i++) { __ mov(Operand(edx, i * kPointerSize), @@ -1456,13 +1479,13 @@ Handle CallStubCompiler::CompileArrayPushCall( // tell the incremental marker to rescan the object that we just grew. We // don't need to worry about the holes because they are in old space and // already marked black. - __ RecordWrite(ebx, edx, edi, kDontSaveFPRegs, OMIT_REMEMBERED_SET); + __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); // Restore receiver to edx as finish sequence assumes it's here. __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); // Increment element's and array's sizes. - __ add(FieldOperand(ebx, FixedArray::kLengthOffset), + __ add(FieldOperand(edi, FixedArray::kLengthOffset), Immediate(Smi::FromInt(kAllocationDelta))); // NOTE: This only happen in new-space, where we don't diff --git a/deps/v8/src/ic-inl.h b/deps/v8/src/ic-inl.h index 56cea81..4daf944 100644 --- a/deps/v8/src/ic-inl.h +++ b/deps/v8/src/ic-inl.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -29,6 +29,8 @@ #define V8_IC_INL_H_ #include "ic.h" + +#include "compiler.h" #include "debug.h" #include "macro-assembler.h" @@ -89,6 +91,7 @@ void IC::SetTargetAtAddress(Address address, Code* target) { Assembler::set_target_address_at(address, target->instruction_start()); target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address, target); + PostPatching(); } diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc index b084109..9846984 100644 --- a/deps/v8/src/ic.cc +++ b/deps/v8/src/ic.cc @@ -292,6 +292,31 @@ Failure* IC::ReferenceError(const char* type, Handle name) { } +void IC::PostPatching() { + if (FLAG_watch_ic_patching) { + Isolate::Current()->runtime_profiler()->NotifyICChanged(); + // We do not want to optimize until the ICs have settled down, + // so when they are patched, we postpone optimization for the + // current function and the functions above it on the stack that + // might want to inline this one. + StackFrameIterator it; + if (it.done()) return; + it.Advance(); + static const int kStackFramesToMark = Compiler::kMaxInliningLevels - 1; + for (int i = 0; i < kStackFramesToMark; ++i) { + if (it.done()) return; + StackFrame* raw_frame = it.frame(); + if (raw_frame->is_java_script()) { + JSFunction* function = + JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function()); + function->shared()->set_profiler_ticks(0); + } + it.Advance(); + } + } +} + + void IC::Clear(Address address) { Code* target = GetTargetAtAddress(address); diff --git a/deps/v8/src/ic.h b/deps/v8/src/ic.h index 94e83dc..d2c98c0 100644 --- a/deps/v8/src/ic.h +++ b/deps/v8/src/ic.h @@ -165,6 +165,7 @@ class IC { // Access the target code for the given IC address. static inline Code* GetTargetAtAddress(Address address); static inline void SetTargetAtAddress(Address address, Code* target); + static void PostPatching(); private: // Frame pointer for the frame that uses (calls) the IC. diff --git a/deps/v8/src/incremental-marking.cc b/deps/v8/src/incremental-marking.cc index 6248524..d034617 100644 --- a/deps/v8/src/incremental-marking.cc +++ b/deps/v8/src/incremental-marking.cc @@ -505,7 +505,8 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) { } is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) && - heap_->mark_compact_collector()->StartCompaction(); + heap_->mark_compact_collector()->StartCompaction( + MarkCompactCollector::INCREMENTAL_COMPACTION); state_ = MARKING; diff --git a/deps/v8/src/isolate.cc b/deps/v8/src/isolate.cc index 893a344..96c45b1 100644 --- a/deps/v8/src/isolate.cc +++ b/deps/v8/src/isolate.cc @@ -542,6 +542,18 @@ Handle Isolate::StackTraceString() { } +void Isolate::CaptureAndSetCurrentStackTraceFor(Handle error_object) { + if (capture_stack_trace_for_uncaught_exceptions_) { + // Capture stack trace for a detailed exception message. + Handle key = factory()->hidden_stack_trace_symbol(); + Handle stack_trace = CaptureCurrentStackTrace( + stack_trace_for_uncaught_exceptions_frame_limit_, + stack_trace_for_uncaught_exceptions_options_); + JSObject::SetHiddenProperty(error_object, key, stack_trace); + } +} + + Handle Isolate::CaptureCurrentStackTrace( int frame_limit, StackTrace::StackTraceOptions options) { // Ensure no negative values. @@ -1011,7 +1023,7 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally, // Find the top-most try-catch handler. StackHandler* handler = StackHandler::FromAddress(Isolate::handler(thread_local_top())); - while (handler != NULL && !handler->is_try_catch()) { + while (handler != NULL && !handler->is_catch()) { handler = handler->next(); } @@ -1037,22 +1049,39 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally, } -void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) { +bool Isolate::IsErrorObject(Handle obj) { + if (!obj->IsJSObject()) return false; + + String* error_key = *(factory()->LookupAsciiSymbol("$Error")); + Object* error_constructor = + js_builtins_object()->GetPropertyNoExceptionThrown(error_key); + + for (Object* prototype = *obj; !prototype->IsNull(); + prototype = prototype->GetPrototype()) { + if (!prototype->IsJSObject()) return false; + if (JSObject::cast(prototype)->map()->constructor() == error_constructor) { + return true; + } + } + return false; +} + + +void Isolate::DoThrow(Object* exception, MessageLocation* location) { ASSERT(!has_pending_exception()); HandleScope scope; - Object* exception_object = Smi::FromInt(0); - bool is_object = exception->ToObject(&exception_object); - Handle exception_handle(exception_object); + Handle exception_handle(exception); // Determine reporting and whether the exception is caught externally. bool catchable_by_javascript = is_catchable_by_javascript(exception); - // Only real objects can be caught by JS. - ASSERT(!catchable_by_javascript || is_object); bool can_be_caught_externally = false; bool should_report_exception = ShouldReportException(&can_be_caught_externally, catchable_by_javascript); bool report_exception = catchable_by_javascript && should_report_exception; + bool try_catch_needs_message = + can_be_caught_externally && try_catch_handler()->capture_message_; + bool bootstrapping = bootstrapper()->IsActive(); #ifdef ENABLE_DEBUGGER_SUPPORT // Notify debugger of exception. @@ -1061,34 +1090,52 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) { } #endif - // Generate the message. - Handle message_obj; - MessageLocation potential_computed_location; - bool try_catch_needs_message = - can_be_caught_externally && - try_catch_handler()->capture_message_; + // Generate the message if required. if (report_exception || try_catch_needs_message) { + MessageLocation potential_computed_location; if (location == NULL) { - // If no location was specified we use a computed one instead + // If no location was specified we use a computed one instead. ComputeLocation(&potential_computed_location); location = &potential_computed_location; } - if (!bootstrapper()->IsActive()) { - // It's not safe to try to make message objects or collect stack - // traces while the bootstrapper is active since the infrastructure - // may not have been properly initialized. + // It's not safe to try to make message objects or collect stack traces + // while the bootstrapper is active since the infrastructure may not have + // been properly initialized. + if (!bootstrapping) { Handle stack_trace; if (FLAG_trace_exception) stack_trace = StackTraceString(); Handle stack_trace_object; - if (report_exception && capture_stack_trace_for_uncaught_exceptions_) { + if (capture_stack_trace_for_uncaught_exceptions_) { + if (IsErrorObject(exception_handle)) { + // We fetch the stack trace that corresponds to this error object. + String* key = heap()->hidden_stack_trace_symbol(); + Object* stack_property = + JSObject::cast(*exception_handle)->GetHiddenProperty(key); + // Property lookup may have failed. In this case it's probably not + // a valid Error object. + if (stack_property->IsJSArray()) { + stack_trace_object = Handle(JSArray::cast(stack_property)); + } + } + if (stack_trace_object.is_null()) { + // Not an error object, we capture at throw site. stack_trace_object = CaptureCurrentStackTrace( stack_trace_for_uncaught_exceptions_frame_limit_, stack_trace_for_uncaught_exceptions_options_); + } } - ASSERT(is_object); // Can't use the handle unless there's a real object. - message_obj = MessageHandler::MakeMessageObject("uncaught_exception", - location, HandleVector(&exception_handle, 1), stack_trace, + Handle message_obj = MessageHandler::MakeMessageObject( + "uncaught_exception", + location, + HandleVector(&exception_handle, 1), + stack_trace, stack_trace_object); + thread_local_top()->pending_message_obj_ = *message_obj; + if (location != NULL) { + thread_local_top()->pending_message_script_ = *location->script(); + thread_local_top()->pending_message_start_pos_ = location->start_pos(); + thread_local_top()->pending_message_end_pos_ = location->end_pos(); + } } else if (location != NULL && !location->script().is_null()) { // We are bootstrapping and caught an error where the location is set // and we have a script for the location. @@ -1104,30 +1151,13 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) { // Save the message for reporting if the the exception remains uncaught. thread_local_top()->has_pending_message_ = report_exception; - if (!message_obj.is_null()) { - thread_local_top()->pending_message_obj_ = *message_obj; - if (location != NULL) { - thread_local_top()->pending_message_script_ = *location->script(); - thread_local_top()->pending_message_start_pos_ = location->start_pos(); - thread_local_top()->pending_message_end_pos_ = location->end_pos(); - } - } // Do not forget to clean catcher_ if currently thrown exception cannot // be caught. If necessary, ReThrow will update the catcher. thread_local_top()->catcher_ = can_be_caught_externally ? try_catch_handler() : NULL; - // NOTE: Notifying the debugger or generating the message - // may have caused new exceptions. For now, we just ignore - // that and set the pending exception to the original one. - if (is_object) { - set_pending_exception(*exception_handle); - } else { - // Failures are not on the heap so they neither need nor work with handles. - ASSERT(exception_handle->IsFailure()); - set_pending_exception(exception); - } + set_pending_exception(*exception_handle); } @@ -1163,8 +1193,8 @@ bool Isolate::IsExternallyCaught() { StackHandler* handler = StackHandler::FromAddress(Isolate::handler(thread_local_top())); while (handler != NULL && handler->address() < external_handler_address) { - ASSERT(!handler->is_try_catch()); - if (handler->is_try_finally()) return false; + ASSERT(!handler->is_catch()); + if (handler->is_finally()) return false; handler = handler->next(); } diff --git a/deps/v8/src/isolate.h b/deps/v8/src/isolate.h index 0e59903..5612630 100644 --- a/deps/v8/src/isolate.h +++ b/deps/v8/src/isolate.h @@ -362,7 +362,7 @@ typedef List DebugObjectCache; /* Serializer state. */ \ V(ExternalReferenceTable*, external_reference_table, NULL) \ /* AstNode state. */ \ - V(unsigned, ast_node_id, 0) \ + V(int, ast_node_id, 0) \ V(unsigned, ast_node_count, 0) \ /* SafeStackFrameIterator activations count. */ \ V(int, safe_stack_iterator_counter, 0) \ @@ -703,6 +703,8 @@ class Isolate { int frame_limit, StackTrace::StackTraceOptions options); + void CaptureAndSetCurrentStackTraceFor(Handle error_object); + // Returns if the top context may access the given global object. If // the result is false, the pending exception is guaranteed to be // set. @@ -729,7 +731,7 @@ class Isolate { // Promote a scheduled exception to pending. Asserts has_scheduled_exception. Failure* PromoteScheduledException(); - void DoThrow(MaybeObject* exception, MessageLocation* location); + void DoThrow(Object* exception, MessageLocation* location); // Checks if exception should be reported and finds out if it's // caught externally. bool ShouldReportException(bool* can_be_caught_externally, @@ -1141,6 +1143,10 @@ class Isolate { void InitializeDebugger(); + // Traverse prototype chain to find out whether the object is derived from + // the Error object. + bool IsErrorObject(Handle obj); + int stack_trace_nesting_level_; StringStream* incomplete_message_; // The preallocated memory thread singleton. diff --git a/deps/v8/src/list-inl.h b/deps/v8/src/list-inl.h index e2c358c..7c2c83f 100644 --- a/deps/v8/src/list-inl.h +++ b/deps/v8/src/list-inl.h @@ -72,9 +72,9 @@ void List::ResizeAdd(const T& element) { template void List::ResizeAddInternal(const T& element) { ASSERT(length_ >= capacity_); - // Grow the list capacity by 50%, but make sure to let it grow + // Grow the list capacity by 100%, but make sure to let it grow // even when the capacity is zero (possible initial case). - int new_capacity = 1 + capacity_ + (capacity_ >> 1); + int new_capacity = 1 + 2 * capacity_; // Since the element reference could be an element of the list, copy // it out of the old backing storage before resizing. T temp = element; diff --git a/deps/v8/src/macro-assembler.h b/deps/v8/src/macro-assembler.h index 364fdb6..7d4bbbc 100644 --- a/deps/v8/src/macro-assembler.h +++ b/deps/v8/src/macro-assembler.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -36,20 +36,6 @@ enum InvokeFlag { }; -enum CodeLocation { - IN_JAVASCRIPT, - IN_JS_ENTRY, - IN_C_ENTRY -}; - - -enum HandlerType { - TRY_CATCH_HANDLER, - TRY_FINALLY_HANDLER, - JS_ENTRY_HANDLER -}; - - // Types of uncatchable exceptions. enum UncatchableExceptionType { OUT_OF_MEMORY, diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc index ac2465e..1adb747 100644 --- a/deps/v8/src/mark-compact.cc +++ b/deps/v8/src/mark-compact.cc @@ -242,14 +242,14 @@ static void TraceFragmentation(PagedSpace* space) { } -bool MarkCompactCollector::StartCompaction() { +bool MarkCompactCollector::StartCompaction(CompactionMode mode) { if (!compacting_) { ASSERT(evacuation_candidates_.length() == 0); CollectEvacuationCandidates(heap()->old_pointer_space()); CollectEvacuationCandidates(heap()->old_data_space()); - if (FLAG_compact_code_space) { + if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) { CollectEvacuationCandidates(heap()->code_space()); } else if (FLAG_trace_fragmentation) { TraceFragmentation(heap()->code_space()); @@ -697,7 +697,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) { // Don't start compaction if we are in the middle of incremental // marking cycle. We did not collect any slots. if (!FLAG_never_compact && !was_marked_incrementally_) { - StartCompaction(); + StartCompaction(NON_INCREMENTAL_COMPACTION); } PagedSpaces spaces; @@ -809,6 +809,8 @@ class CodeFlusher { isolate_->heap()->mark_compact_collector()-> RecordCodeEntrySlot(slot, target); + RecordSharedFunctionInfoCodeSlot(shared); + candidate = next_candidate; } @@ -831,12 +833,21 @@ class CodeFlusher { candidate->set_code(lazy_compile); } + RecordSharedFunctionInfoCodeSlot(candidate); + candidate = next_candidate; } shared_function_info_candidates_head_ = NULL; } + void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) { + Object** slot = HeapObject::RawField(shared, + SharedFunctionInfo::kCodeOffset); + isolate_->heap()->mark_compact_collector()-> + RecordSlot(slot, slot, HeapObject::cast(*slot)); + } + static JSFunction** GetNextCandidateField(JSFunction* candidate) { return reinterpret_cast( candidate->address() + JSFunction::kCodeEntryOffset); @@ -1314,6 +1325,16 @@ class StaticMarkingVisitor : public StaticVisitorBase { re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii), code, heap); + + // Saving a copy might create a pointer into compaction candidate + // that was not observed by marker. This might happen if JSRegExp data + // was marked through the compilation cache before marker reached JSRegExp + // object. + FixedArray* data = FixedArray::cast(re->data()); + Object** slot = data->data_start() + JSRegExp::saved_code_index(is_ascii); + heap->mark_compact_collector()-> + RecordSlot(slot, slot, code); + // Set a number in the 0-255 range to guarantee no smi overflow. re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii), Smi::FromInt(heap->sweep_generation() & 0xff), @@ -2352,8 +2373,10 @@ void MarkCompactCollector::AfterMarking() { code_flusher_->ProcessCandidates(); } - // Clean up dead objects from the runtime profiler. - heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); + if (!FLAG_watch_ic_patching) { + // Clean up dead objects from the runtime profiler. + heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); + } } @@ -3360,9 +3383,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { heap_->UpdateReferencesInExternalStringTable( &UpdateReferenceInExternalStringTableEntry); - // Update JSFunction pointers from the runtime profiler. - heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( - &updating_visitor); + if (!FLAG_watch_ic_patching) { + // Update JSFunction pointers from the runtime profiler. + heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( + &updating_visitor); + } EvacuationWeakObjectRetainer evacuation_object_retainer; heap()->ProcessWeakReferences(&evacuation_object_retainer); diff --git a/deps/v8/src/mark-compact.h b/deps/v8/src/mark-compact.h index 135f220..dc4bcee 100644 --- a/deps/v8/src/mark-compact.h +++ b/deps/v8/src/mark-compact.h @@ -441,7 +441,12 @@ class MarkCompactCollector { // Performs a global garbage collection. void CollectGarbage(); - bool StartCompaction(); + enum CompactionMode { + INCREMENTAL_COMPACTION, + NON_INCREMENTAL_COMPACTION + }; + + bool StartCompaction(CompactionMode mode); void AbortCompaction(); diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js index 5310938..cd4add4 100644 --- a/deps/v8/src/messages.js +++ b/deps/v8/src/messages.js @@ -1078,9 +1078,9 @@ function captureStackTrace(obj, cons_opt) { if (stackTraceLimit < 0 || stackTraceLimit > 10000) { stackTraceLimit = 10000; } - var raw_stack = %CollectStackTrace(cons_opt - ? cons_opt - : captureStackTrace, stackTraceLimit); + var raw_stack = %CollectStackTrace(obj, + cons_opt ? cons_opt : captureStackTrace, + stackTraceLimit); DefineOneShotAccessor(obj, 'stack', function (obj) { return FormatRawStackTrace(obj, raw_stack); }); diff --git a/deps/v8/src/mips/assembler-mips.cc b/deps/v8/src/mips/assembler-mips.cc index 85b6ed8..9f803d9 100644 --- a/deps/v8/src/mips/assembler-mips.cc +++ b/deps/v8/src/mips/assembler-mips.cc @@ -1245,6 +1245,7 @@ void Assembler::and_(Register rd, Register rs, Register rt) { void Assembler::andi(Register rt, Register rs, int32_t j) { + ASSERT(is_uint16(j)); GenInstrImmediate(ANDI, rs, rt, j); } @@ -1255,6 +1256,7 @@ void Assembler::or_(Register rd, Register rs, Register rt) { void Assembler::ori(Register rt, Register rs, int32_t j) { + ASSERT(is_uint16(j)); GenInstrImmediate(ORI, rs, rt, j); } @@ -1265,6 +1267,7 @@ void Assembler::xor_(Register rd, Register rs, Register rt) { void Assembler::xori(Register rt, Register rs, int32_t j) { + ASSERT(is_uint16(j)); GenInstrImmediate(XORI, rs, rt, j); } @@ -1445,6 +1448,7 @@ void Assembler::swr(Register rd, const MemOperand& rs) { void Assembler::lui(Register rd, int32_t j) { + ASSERT(is_uint16(j)); GenInstrImmediate(LUI, zero_reg, rd, j); } diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc index d122e9a..259df21 100644 --- a/deps/v8/src/mips/builtins-mips.cc +++ b/deps/v8/src/mips/builtins-mips.cc @@ -116,7 +116,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm, Label* gc_required) { const int initial_capacity = JSArray::kPreallocatedArrayElements; STATIC_ASSERT(initial_capacity >= 0); - __ LoadGlobalInitialConstructedArrayMap(array_function, scratch2, scratch1); + __ LoadInitialArrayMap(array_function, scratch2, scratch1); // Allocate the JSArray object together with space for a fixed array with the // requested elements. @@ -212,8 +212,7 @@ static void AllocateJSArray(MacroAssembler* masm, bool fill_with_hole, Label* gc_required) { // Load the initial map from the array function. - __ LoadGlobalInitialConstructedArrayMap(array_function, scratch2, - elements_array_storage); + __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage); if (FLAG_debug_code) { // Assert that array size is not zero. __ Assert( @@ -924,22 +923,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // t4: JSObject __ bind(&allocated); __ push(t4); - - // Push the function and the allocated receiver from the stack. - // sp[0]: receiver (newly allocated object) - // sp[1]: constructor function - // sp[2]: number of arguments (smi-tagged) - __ lw(a1, MemOperand(sp, kPointerSize)); - __ MultiPushReversed(a1.bit() | t4.bit()); + __ push(t4); // Reload the number of arguments from the stack. - // a1: constructor function // sp[0]: receiver - // sp[1]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) - __ lw(a3, MemOperand(sp, 4 * kPointerSize)); + // sp[1]: receiver + // sp[2]: constructor function + // sp[3]: number of arguments (smi-tagged) + __ lw(a1, MemOperand(sp, 2 * kPointerSize)); + __ lw(a3, MemOperand(sp, 3 * kPointerSize)); // Set up pointer to last argument. __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); @@ -953,10 +945,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // a2: address of last argument (caller sp) // a3: number of arguments (smi-tagged) // sp[0]: receiver - // sp[1]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) + // sp[1]: receiver + // sp[2]: constructor function + // sp[3]: number of arguments (smi-tagged) Label loop, entry; __ jmp(&entry); __ bind(&loop); @@ -984,14 +975,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, NullCallWrapper(), CALL_AS_METHOD); } - // Pop the function from the stack. - // v0: result - // sp[0]: constructor function - // sp[2]: receiver - // sp[3]: constructor function - // sp[4]: number of arguments (smi-tagged) - __ Pop(); - // Restore context from the frame. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc index 1a0e7c3..852b3c9 100644 --- a/deps/v8/src/mips/code-stubs-mips.cc +++ b/deps/v8/src/mips/code-stubs-mips.cc @@ -4140,7 +4140,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Invoke: Link this frame into the handler chain. There's only one // handler block in this code object, so its index is 0. __ bind(&invoke); - __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0); + __ PushTryHandler(StackHandler::JS_ENTRY, 0); // If an exception not caught by another handler occurs, this handler // returns control to the code after the bal(&invoke) above, which // restores all kCalleeSaved registers (including cp and fp) to their diff --git a/deps/v8/src/mips/ic-mips.cc b/deps/v8/src/mips/ic-mips.cc index 3489936..c3cdb4c 100644 --- a/deps/v8/src/mips/ic-mips.cc +++ b/deps/v8/src/mips/ic-mips.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2012 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -1198,14 +1198,16 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, Label slow, array, extra, check_if_double_array; Label fast_object_with_map_check, fast_object_without_map_check; Label fast_double_with_map_check, fast_double_without_map_check; + Label transition_smi_elements, finish_object_store, non_double_value; + Label transition_double_elements; // Register usage. Register value = a0; Register key = a1; Register receiver = a2; - Register elements = a3; // Elements array of the receiver. + Register receiver_map = a3; Register elements_map = t2; - Register receiver_map = t3; + Register elements = t3; // Elements array of the receiver. // t0 and t1 are used as general scratch registers. // Check that the key is a smi. @@ -1298,9 +1300,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, __ mov(v0, value); __ bind(&non_smi_value); - // Escape to slow case when writing non-smi into smi-only array. - __ CheckFastObjectElements(receiver_map, scratch_value, &slow); + // Escape to elements kind transition case. + __ CheckFastObjectElements(receiver_map, scratch_value, + &transition_smi_elements); // Fast elements array, store the value to the elements backing store. + __ bind(&finish_object_store); __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize); __ Addu(address, address, scratch_value); @@ -1326,13 +1330,57 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, key, receiver, elements, + a3, t0, t1, t2, - t3, - &slow); + &transition_double_elements); __ Ret(USE_DELAY_SLOT); __ mov(v0, value); + + __ bind(&transition_smi_elements); + // Transition the array appropriately depending on the value type. + __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset)); + __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); + __ Branch(&non_double_value, ne, t0, Operand(at)); + + // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS -> + // FAST_DOUBLE_ELEMENTS and complete the store. + __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + FAST_DOUBLE_ELEMENTS, + receiver_map, + t0, + &slow); + ASSERT(receiver_map.is(a3)); // Transition code expects map in a3 + ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow); + __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); + __ jmp(&fast_double_without_map_check); + + __ bind(&non_double_value); + // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS + __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + FAST_ELEMENTS, + receiver_map, + t0, + &slow); + ASSERT(receiver_map.is(a3)); // Transition code expects map in a3 + ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm); + __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); + __ jmp(&finish_object_store); + + __ bind(&transition_double_elements); + // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a + // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and + // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS + __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, + FAST_ELEMENTS, + receiver_map, + t0, + &slow); + ASSERT(receiver_map.is(a3)); // Transition code expects map in a3 + ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow); + __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); + __ jmp(&finish_object_store); } diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc index 91cddd8..aead65c 100644 --- a/deps/v8/src/mips/lithium-codegen-mips.cc +++ b/deps/v8/src/mips/lithium-codegen-mips.cc @@ -640,7 +640,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, void LCodeGen::PopulateDeoptimizationData(Handle code) { int length = deoptimizations_.length(); if (length == 0) return; - ASSERT(FLAG_deopt); Handle data = factory()->NewDeoptimizationInputData(length, TENURED); diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc index 678b8b1..f4e043a 100644 --- a/deps/v8/src/mips/macro-assembler-mips.cc +++ b/deps/v8/src/mips/macro-assembler-mips.cc @@ -771,18 +771,18 @@ void MacroAssembler::li(Register rd, Operand j, bool gen2instr) { } else if (!(j.imm32_ & kHiMask)) { ori(rd, zero_reg, j.imm32_); } else if (!(j.imm32_ & kImm16Mask)) { - lui(rd, (j.imm32_ & kHiMask) >> kLuiShift); + lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); } else { - lui(rd, (j.imm32_ & kHiMask) >> kLuiShift); + lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); ori(rd, rd, (j.imm32_ & kImm16Mask)); } } else if (MustUseReg(j.rmode_) || gen2instr) { if (MustUseReg(j.rmode_)) { RecordRelocInfo(j.rmode_, j.imm32_); } - // We need always the same number of instructions as we may need to patch + // We always need the same number of instructions as we may need to patch // this code to load another value which may need 2 instructions to load. - lui(rd, (j.imm32_ & kHiMask) >> kLuiShift); + lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); ori(rd, rd, (j.imm32_ & kImm16Mask)); } } @@ -2576,8 +2576,7 @@ void MacroAssembler::DebugBreak() { // --------------------------------------------------------------------------- // Exception handling. -void MacroAssembler::PushTryHandler(CodeLocation try_location, - HandlerType type, +void MacroAssembler::PushTryHandler(StackHandler::Kind kind, int handler_index) { // Adjust this code if not the case. STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); @@ -2589,30 +2588,23 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, // For the JSEntry handler, we must preserve a0-a3 and s0. // t1-t3 are available. We will build up the handler from the bottom by - // pushing on the stack. First compute the state. - unsigned state = StackHandler::OffsetField::encode(handler_index); - if (try_location == IN_JAVASCRIPT) { - state |= (type == TRY_CATCH_HANDLER) - ? StackHandler::KindField::encode(StackHandler::TRY_CATCH) - : StackHandler::KindField::encode(StackHandler::TRY_FINALLY); - } else { - ASSERT(try_location == IN_JS_ENTRY); - state |= StackHandler::KindField::encode(StackHandler::ENTRY); - } - + // pushing on the stack. // Set up the code object (t1) and the state (t2) for pushing. + unsigned state = + StackHandler::IndexField::encode(handler_index) | + StackHandler::KindField::encode(kind); li(t1, Operand(CodeObject())); li(t2, Operand(state)); // Push the frame pointer, context, state, and code object. - if (try_location == IN_JAVASCRIPT) { - MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit()); - } else { + if (kind == StackHandler::JS_ENTRY) { ASSERT_EQ(Smi::FromInt(0), 0); // The second zero_reg indicates no context. // The first zero_reg is the NULL frame pointer. // The operands are reversed to match the order of MultiPush/Pop. Push(zero_reg, zero_reg, t2, t1); + } else { + MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit()); } // Link the current handler as the next handler. @@ -2727,7 +2719,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, lw(sp, MemOperand(sp, StackHandlerConstants::kNextOffset)); bind(&check_kind); - STATIC_ASSERT(StackHandler::ENTRY == 0); + STATIC_ASSERT(StackHandler::JS_ENTRY == 0); lw(a2, MemOperand(sp, StackHandlerConstants::kStateOffset)); And(a2, a2, Operand(StackHandler::KindField::kMask)); Branch(&fetch_next, ne, a2, Operand(zero_reg)); @@ -4279,26 +4271,41 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -void MacroAssembler::LoadGlobalInitialConstructedArrayMap( +void MacroAssembler::LoadTransitionedArrayMapConditional( + ElementsKind expected_kind, + ElementsKind transitioned_kind, + Register map_in_out, + Register scratch, + Label* no_map_match) { + // Load the global or builtins object from the current context. + lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); + lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); + + // Check that the function's map is the same as the expected cached map. + int expected_index = + Context::GetContextMapIndexFromElementsKind(expected_kind); + lw(at, MemOperand(scratch, Context::SlotOffset(expected_index))); + Branch(no_map_match, ne, map_in_out, Operand(at)); + + // Use the transitioned cached map. + int trans_index = + Context::GetContextMapIndexFromElementsKind(transitioned_kind); + lw(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index))); +} + + +void MacroAssembler::LoadInitialArrayMap( Register function_in, Register scratch, Register map_out) { ASSERT(!function_in.is(map_out)); Label done; lw(map_out, FieldMemOperand(function_in, JSFunction::kPrototypeOrInitialMapOffset)); if (!FLAG_smi_only_arrays) { - // Load the global or builtins object from the current context. - lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); - lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); - - // Check that the function's map is same as the cached map. - lw(at, MemOperand( - scratch, Context::SlotOffset(Context::SMI_JS_ARRAY_MAP_INDEX))); - Branch(&done, ne, map_out, Operand(at)); - - // Use the cached transitioned map. - lw(map_out, - MemOperand(scratch, - Context::SlotOffset(Context::OBJECT_JS_ARRAY_MAP_INDEX))); + LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + FAST_ELEMENTS, + map_out, + scratch, + &done); } bind(&done); } diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h index 24dfceb..69b3f9d 100644 --- a/deps/v8/src/mips/macro-assembler-mips.h +++ b/deps/v8/src/mips/macro-assembler-mips.h @@ -772,10 +772,21 @@ class MacroAssembler: public Assembler { void LoadContext(Register dst, int context_chain_length); - // Load the initial map for new Arrays of a given type. - void LoadGlobalInitialConstructedArrayMap(Register function_in, - Register scratch, - Register map_out); + // Conditionally load the cached Array transitioned map of type + // transitioned_kind from the global context if the map in register + // map_in_out is the cached Array map in the global context of + // expected_kind. + void LoadTransitionedArrayMapConditional( + ElementsKind expected_kind, + ElementsKind transitioned_kind, + Register map_in_out, + Register scratch, + Label* no_map_match); + + // Load the initial map for new Arrays from a JSFunction. + void LoadInitialArrayMap(Register function_in, + Register scratch, + Register map_out); void LoadGlobalFunction(int index, Register function); @@ -854,9 +865,7 @@ class MacroAssembler: public Assembler { // Exception handling. // Push a new try handler and link into try handler chain. - void PushTryHandler(CodeLocation try_location, - HandlerType type, - int handler_index); + void PushTryHandler(StackHandler::Kind kind, int handler_index); // Unlink the stack handler on top of the stack from the try handler chain. // Must preserve the result register. diff --git a/deps/v8/src/mips/stub-cache-mips.cc b/deps/v8/src/mips/stub-cache-mips.cc index 0051edf..ae56306 100644 --- a/deps/v8/src/mips/stub-cache-mips.cc +++ b/deps/v8/src/mips/stub-cache-mips.cc @@ -1468,28 +1468,28 @@ Handle CallStubCompiler::CompileArrayPushCall( __ Ret(); } else { Label call_builtin; - Register elements = a3; - Register end_elements = t1; - // Get the elements array of the object. - __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); - - // Check that the elements are in fast mode and writable. - __ CheckMap(elements, - v0, - Heap::kFixedArrayMapRootIndex, - &call_builtin, - DONT_DO_SMI_CHECK); - if (argc == 1) { // Otherwise fall through to call the builtin. Label attempt_to_grow_elements; + Register elements = t2; + Register end_elements = t1; + // Get the elements array of the object. + __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CheckMap(elements, + v0, + Heap::kFixedArrayMapRootIndex, + &call_builtin, + DONT_DO_SMI_CHECK); + // Get the array's length into v0 and calculate new length. __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTag == 0); __ Addu(v0, v0, Operand(Smi::FromInt(argc))); - // Get the element's length. + // Get the elements' length. __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); // Check if we could survive without allocation. @@ -1503,7 +1503,7 @@ Handle CallStubCompiler::CompileArrayPushCall( // Save new length. __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); - // Push the element. + // Store the value. // We may need a register containing the address end_elements below, // so write back the value in end_elements. __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); @@ -1519,13 +1519,33 @@ Handle CallStubCompiler::CompileArrayPushCall( __ bind(&with_write_barrier); - __ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ CheckFastObjectElements(t2, t2, &call_builtin); + __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset)); + + if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { + Label fast_object, not_fast_object; + __ CheckFastObjectElements(a3, t3, ¬_fast_object); + __ jmp(&fast_object); + // In case of fast smi-only, convert to fast object, otherwise bail out. + __ bind(¬_fast_object); + __ CheckFastSmiOnlyElements(a3, t3, &call_builtin); + // edx: receiver + // r3: map + __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, + FAST_ELEMENTS, + a3, + t3, + &call_builtin); + __ mov(a2, receiver); + ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm()); + __ bind(&fast_object); + } else { + __ CheckFastObjectElements(a3, a3, &call_builtin); + } // Save new length. __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); - // Push the element. + // Store the value. // We may need a register containing the address end_elements below, // so write back the value in end_elements. __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); @@ -1573,23 +1593,23 @@ Handle CallStubCompiler::CompileArrayPushCall( __ Addu(end_elements, elements, end_elements); __ Addu(end_elements, end_elements, Operand(kEndElementsOffset)); __ li(t3, Operand(new_space_allocation_top)); - __ lw(t2, MemOperand(t3)); - __ Branch(&call_builtin, ne, end_elements, Operand(t2)); + __ lw(a3, MemOperand(t3)); + __ Branch(&call_builtin, ne, end_elements, Operand(a3)); __ li(t5, Operand(new_space_allocation_limit)); __ lw(t5, MemOperand(t5)); - __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize)); - __ Branch(&call_builtin, hi, t2, Operand(t5)); + __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize)); + __ Branch(&call_builtin, hi, a3, Operand(t5)); // We fit and could grow elements. // Update new_space_allocation_top. - __ sw(t2, MemOperand(t3)); + __ sw(a3, MemOperand(t3)); // Push the argument. __ sw(a2, MemOperand(end_elements)); // Fill the rest with holes. - __ LoadRoot(t2, Heap::kTheHoleValueRootIndex); + __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); for (int i = 1; i < kAllocationDelta; i++) { - __ sw(t2, MemOperand(end_elements, i * kPointerSize)); + __ sw(a3, MemOperand(end_elements, i * kPointerSize)); } // Update elements' and array's sizes. diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h index 6eeba20..a5ea659 100644 --- a/deps/v8/src/objects-inl.h +++ b/deps/v8/src/objects-inl.h @@ -3530,6 +3530,8 @@ ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) ACCESSORS(SharedFunctionInfo, this_property_assignments, Object, kThisPropertyAssignmentsOffset) +SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset) + BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, kHiddenPrototypeBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit) @@ -3576,6 +3578,8 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints, SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count, kThisPropertyAssignmentsCountOffset) SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset) +SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset) +SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset) #else #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \ @@ -3626,6 +3630,9 @@ PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, this_property_assignments_count, kThisPropertyAssignmentsCountOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset) + +PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset) +PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset) #endif @@ -3708,6 +3715,9 @@ BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, kNameShouldPrintAsAnonymous) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous) +BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft, + kDontCrankshaft) +BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline) ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset) ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset) @@ -3777,16 +3787,6 @@ void SharedFunctionInfo::set_scope_info(ScopeInfo* value, } -Smi* SharedFunctionInfo::deopt_counter() { - return reinterpret_cast(READ_FIELD(this, kDeoptCounterOffset)); -} - - -void SharedFunctionInfo::set_deopt_counter(Smi* value) { - WRITE_FIELD(this, kDeoptCounterOffset, value); -} - - bool SharedFunctionInfo::is_compiled() { return code() != Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile); diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc index 5e472ba..aef0284 100644 --- a/deps/v8/src/objects.cc +++ b/deps/v8/src/objects.cc @@ -3773,12 +3773,15 @@ MaybeObject* JSObject::GetHiddenPropertiesDictionary(bool create_if_absent) { // code zero) it will always occupy the first entry if present. DescriptorArray* descriptors = this->map()->instance_descriptors(); if ((descriptors->number_of_descriptors() > 0) && - (descriptors->GetKey(0) == GetHeap()->hidden_symbol()) && - descriptors->IsProperty(0)) { - ASSERT(descriptors->GetType(0) == FIELD); - Object* hidden_store = - this->FastPropertyAt(descriptors->GetFieldIndex(0)); - return StringDictionary::cast(hidden_store); + (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) { + if (descriptors->GetType(0) == FIELD) { + Object* hidden_store = + this->FastPropertyAt(descriptors->GetFieldIndex(0)); + return StringDictionary::cast(hidden_store); + } else { + ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR || + descriptors->GetType(0) == MAP_TRANSITION); + } } } else { PropertyAttributes attributes; @@ -3819,11 +3822,14 @@ MaybeObject* JSObject::SetHiddenPropertiesDictionary( // code zero) it will always occupy the first entry if present. DescriptorArray* descriptors = this->map()->instance_descriptors(); if ((descriptors->number_of_descriptors() > 0) && - (descriptors->GetKey(0) == GetHeap()->hidden_symbol()) && - descriptors->IsProperty(0)) { - ASSERT(descriptors->GetType(0) == FIELD); - this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary); - return this; + (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) { + if (descriptors->GetType(0) == FIELD) { + this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary); + return this; + } else { + ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR || + descriptors->GetType(0) == MAP_TRANSITION); + } } } MaybeObject* store_result = @@ -4247,11 +4253,14 @@ bool JSReceiver::IsSimpleEnum() { } -int Map::NumberOfDescribedProperties() { +int Map::NumberOfDescribedProperties(PropertyAttributes filter) { int result = 0; DescriptorArray* descs = instance_descriptors(); for (int i = 0; i < descs->number_of_descriptors(); i++) { - if (descs->IsProperty(i)) result++; + PropertyDetails details(descs->GetDetails(i)); + if (descs->IsProperty(i) && (details.attributes() & filter) == 0) { + result++; + } } return result; } @@ -5502,7 +5511,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey { for (int i = 0; i < maps_->length(); ++i) { bool match_found = false; for (int j = 0; j < other_maps.length(); ++j) { - if (maps_->at(i)->EquivalentTo(*other_maps.at(j))) { + if (*(maps_->at(i)) == *(other_maps.at(j))) { match_found = true; break; } @@ -5721,6 +5730,11 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, } +static bool InsertionPointFound(String* key1, String* key2) { + return key1->Hash() > key2->Hash() || key1 == key2; +} + + MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor, TransitionFlag transition_flag) { // Transitions are only kept when inserting another transition. @@ -5793,28 +5807,24 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor, // Copy the descriptors, filtering out transitions and null descriptors, // and inserting or replacing a descriptor. - uint32_t descriptor_hash = descriptor->GetKey()->Hash(); - int from_index = 0; int to_index = 0; - - for (; from_index < number_of_descriptors(); from_index++) { - String* key = GetKey(from_index); - if (key->Hash() > descriptor_hash || key == descriptor->GetKey()) { - break; + int insertion_index = -1; + int from_index = 0; + while (from_index < number_of_descriptors()) { + if (insertion_index < 0 && + InsertionPointFound(GetKey(from_index), descriptor->GetKey())) { + insertion_index = to_index++; + if (replacing) from_index++; + } else { + if (!(IsNullDescriptor(from_index) || + (remove_transitions && IsTransitionOnly(from_index)))) { + new_descriptors->CopyFrom(to_index++, this, from_index, witness); + } + from_index++; } - if (IsNullDescriptor(from_index)) continue; - if (remove_transitions && IsTransitionOnly(from_index)) continue; - new_descriptors->CopyFrom(to_index++, this, from_index, witness); - } - - new_descriptors->Set(to_index++, descriptor, witness); - if (replacing) from_index++; - - for (; from_index < number_of_descriptors(); from_index++) { - if (IsNullDescriptor(from_index)) continue; - if (remove_transitions && IsTransitionOnly(from_index)) continue; - new_descriptors->CopyFrom(to_index++, this, from_index, witness); } + if (insertion_index < 0) insertion_index = to_index++; + new_descriptors->Set(insertion_index, descriptor, witness); ASSERT(to_index == new_descriptors->number_of_descriptors()); SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates()); @@ -5829,14 +5839,14 @@ MaybeObject* DescriptorArray::RemoveTransitions() { // not be allocated. // Compute the size of the map transition entries to be removed. - int num_removed = 0; + int new_number_of_descriptors = 0; for (int i = 0; i < number_of_descriptors(); i++) { - if (!IsProperty(i)) num_removed++; + if (IsProperty(i)) new_number_of_descriptors++; } // Allocate the new descriptor array. DescriptorArray* new_descriptors; - { MaybeObject* maybe_result = Allocate(number_of_descriptors() - num_removed); + { MaybeObject* maybe_result = Allocate(new_number_of_descriptors); if (!maybe_result->To(&new_descriptors)) { return maybe_result; } @@ -7606,13 +7616,10 @@ bool SharedFunctionInfo::HasSourceCode() { } -Object* SharedFunctionInfo::GetSourceCode() { - Isolate* isolate = GetIsolate(); - if (!HasSourceCode()) return isolate->heap()->undefined_value(); - HandleScope scope(isolate); - Object* source = Script::cast(script())->source(); - return *SubString(Handle(String::cast(source), isolate), - start_position(), end_position()); +Handle SharedFunctionInfo::GetSourceCode() { + if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value(); + Handle source(String::cast(Script::cast(script())->source())); + return SubString(source, start_position(), end_position()); } @@ -10355,24 +10362,9 @@ bool JSObject::HasRealNamedCallbackProperty(String* key) { int JSObject::NumberOfLocalProperties(PropertyAttributes filter) { - if (HasFastProperties()) { - DescriptorArray* descs = map()->instance_descriptors(); - int result = 0; - for (int i = 0; i < descs->number_of_descriptors(); i++) { - PropertyDetails details(descs->GetDetails(i)); - if (details.IsProperty() && (details.attributes() & filter) == 0) { - result++; - } - } - return result; - } else { - return property_dictionary()->NumberOfElementsFilterAttributes(filter); - } -} - - -int JSObject::NumberOfEnumProperties() { - return NumberOfLocalProperties(static_cast(DONT_ENUM)); + return HasFastProperties() ? + map()->NumberOfDescribedProperties(filter) : + property_dictionary()->NumberOfElementsFilterAttributes(filter); } @@ -10493,7 +10485,7 @@ void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) { // purpose of this function is to provide reflection information for the object // mirrors. void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) { - ASSERT(storage->length() >= (NumberOfLocalProperties(NONE) - index)); + ASSERT(storage->length() >= (NumberOfLocalProperties() - index)); if (HasFastProperties()) { DescriptorArray* descs = map()->instance_descriptors(); for (int i = 0; i < descs->number_of_descriptors(); i++) { diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h index 6edd6cc..bc18bf8 100644 --- a/deps/v8/src/objects.h +++ b/deps/v8/src/objects.h @@ -1638,7 +1638,7 @@ class JSObject: public JSReceiver { Handle key, Handle value); // Returns a failure if a GC is required. - MaybeObject* SetHiddenProperty(String* key, Object* value); + MUST_USE_RESULT MaybeObject* SetHiddenProperty(String* key, Object* value); // Gets the value of a hidden property with the given key. Returns undefined // if the property doesn't exist (or if called on a detached proxy), // otherwise returns the value set for the key. @@ -1807,9 +1807,7 @@ class JSObject: public JSReceiver { // Returns the number of properties on this object filtering out properties // with the specified attributes (ignoring interceptors). - int NumberOfLocalProperties(PropertyAttributes filter); - // Returns the number of enumerable properties (ignoring interceptors). - int NumberOfEnumProperties(); + int NumberOfLocalProperties(PropertyAttributes filter = NONE); // Fill in details for properties into storage starting at the specified // index. void GetLocalPropertyNames(FixedArray* storage, int index); @@ -4638,8 +4636,9 @@ class Map: public HeapObject { // Returns the next free property index (only valid for FAST MODE). int NextFreePropertyIndex(); - // Returns the number of properties described in instance_descriptors. - int NumberOfDescribedProperties(); + // Returns the number of properties described in instance_descriptors + // filtering out properties with the specified attributes. + int NumberOfDescribedProperties(PropertyAttributes filter = NONE); // Casting. static inline Map* cast(Object* obj); @@ -4697,12 +4696,6 @@ class Map: public HeapObject { // The "shared" flags of both this map and |other| are ignored. bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode); - // Returns true if this map and |other| describe equivalent objects. - // The "shared" flags of both this map and |other| are ignored. - bool EquivalentTo(Map* other) { - return EquivalentToForNormalization(other, KEEP_INOBJECT_PROPERTIES); - } - // Returns the contents of this map's descriptor array for the given string. // May return NULL. |safe_to_add_transition| is set to false and NULL // is returned if adding transitions is not allowed. @@ -5204,8 +5197,14 @@ class SharedFunctionInfo: public HeapObject { // A counter used to determine when to stress the deoptimizer with a // deopt. - inline Smi* deopt_counter(); - inline void set_deopt_counter(Smi* counter); + inline int deopt_counter(); + inline void set_deopt_counter(int counter); + + inline int profiler_ticks(); + inline void set_profiler_ticks(int ticks); + + inline int ast_node_count(); + inline void set_ast_node_count(int count); // Add information on assignments of the form this.x = ...; void SetThisPropertyAssignmentsInfo( @@ -5279,6 +5278,12 @@ class SharedFunctionInfo: public HeapObject { // through the API, which does not change this flag). DECL_BOOLEAN_ACCESSORS(is_anonymous) + // Indicates that the function cannot be crankshafted. + DECL_BOOLEAN_ACCESSORS(dont_crankshaft) + + // Indicates that the function cannot be inlined. + DECL_BOOLEAN_ACCESSORS(dont_inline) + // Indicates whether or not the code in the shared function support // deoptimization. inline bool has_deoptimization_support(); @@ -5316,7 +5321,7 @@ class SharedFunctionInfo: public HeapObject { // [source code]: Source code for the function. bool HasSourceCode(); - Object* GetSourceCode(); + Handle GetSourceCode(); inline int opt_count(); inline void set_opt_count(int opt_count); @@ -5373,12 +5378,12 @@ class SharedFunctionInfo: public HeapObject { kInferredNameOffset + kPointerSize; static const int kThisPropertyAssignmentsOffset = kInitialMapOffset + kPointerSize; - static const int kDeoptCounterOffset = + static const int kProfilerTicksOffset = kThisPropertyAssignmentsOffset + kPointerSize; #if V8_HOST_ARCH_32_BIT // Smi fields. static const int kLengthOffset = - kDeoptCounterOffset + kPointerSize; + kProfilerTicksOffset + kPointerSize; static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize; static const int kExpectedNofPropertiesOffset = kFormalParameterCountOffset + kPointerSize; @@ -5396,8 +5401,11 @@ class SharedFunctionInfo: public HeapObject { kCompilerHintsOffset + kPointerSize; static const int kOptCountOffset = kThisPropertyAssignmentsCountOffset + kPointerSize; + static const int kAstNodeCountOffset = kOptCountOffset + kPointerSize; + static const int kDeoptCounterOffset = + kAstNodeCountOffset + kPointerSize; // Total size. - static const int kSize = kOptCountOffset + kPointerSize; + static const int kSize = kDeoptCounterOffset + kPointerSize; #else // The only reason to use smi fields instead of int fields // is to allow iteration without maps decoding during @@ -5409,7 +5417,7 @@ class SharedFunctionInfo: public HeapObject { // word is not set and thus this word cannot be treated as pointer // to HeapObject during old space traversal. static const int kLengthOffset = - kDeoptCounterOffset + kPointerSize; + kProfilerTicksOffset + kPointerSize; static const int kFormalParameterCountOffset = kLengthOffset + kIntSize; @@ -5433,8 +5441,11 @@ class SharedFunctionInfo: public HeapObject { static const int kOptCountOffset = kThisPropertyAssignmentsCountOffset + kIntSize; + static const int kAstNodeCountOffset = kOptCountOffset + kIntSize; + static const int kDeoptCounterOffset = kAstNodeCountOffset + kIntSize; + // Total size. - static const int kSize = kOptCountOffset + kIntSize; + static const int kSize = kDeoptCounterOffset + kIntSize; #endif @@ -5481,6 +5492,8 @@ class SharedFunctionInfo: public HeapObject { kBoundFunction, kIsAnonymous, kNameShouldPrintAsAnonymous, + kDontCrankshaft, + kDontInline, kCompilerHintsCount // Pseudo entry }; diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc index 5be1a6d..c02cad9 100644 --- a/deps/v8/src/parser.cc +++ b/deps/v8/src/parser.cc @@ -481,62 +481,6 @@ class Parser::BlockState BASE_EMBEDDED { }; -class Parser::FunctionState BASE_EMBEDDED { - public: - FunctionState(Parser* parser, Scope* scope, Isolate* isolate); - ~FunctionState(); - - int NextMaterializedLiteralIndex() { - return next_materialized_literal_index_++; - } - int materialized_literal_count() { - return next_materialized_literal_index_ - JSFunction::kLiteralsPrefixSize; - } - - int NextHandlerIndex() { return next_handler_index_++; } - int handler_count() { return next_handler_index_; } - - void SetThisPropertyAssignmentInfo( - bool only_simple_this_property_assignments, - Handle this_property_assignments) { - only_simple_this_property_assignments_ = - only_simple_this_property_assignments; - this_property_assignments_ = this_property_assignments; - } - bool only_simple_this_property_assignments() { - return only_simple_this_property_assignments_; - } - Handle this_property_assignments() { - return this_property_assignments_; - } - - void AddProperty() { expected_property_count_++; } - int expected_property_count() { return expected_property_count_; } - - private: - // Used to assign an index to each literal that needs materialization in - // the function. Includes regexp literals, and boilerplate for object and - // array literals. - int next_materialized_literal_index_; - - // Used to assign a per-function index to try and catch handlers. - int next_handler_index_; - - // Properties count estimation. - int expected_property_count_; - - // Keeps track of assignments to properties of this. Used for - // optimizing constructors. - bool only_simple_this_property_assignments_; - Handle this_property_assignments_; - - Parser* parser_; - FunctionState* outer_function_state_; - Scope* outer_scope_; - unsigned saved_ast_node_id_; -}; - - Parser::FunctionState::FunctionState(Parser* parser, Scope* scope, Isolate* isolate) @@ -548,7 +492,8 @@ Parser::FunctionState::FunctionState(Parser* parser, parser_(parser), outer_function_state_(parser->current_function_state_), outer_scope_(parser->top_scope_), - saved_ast_node_id_(isolate->ast_node_id()) { + saved_ast_node_id_(isolate->ast_node_id()), + factory_(isolate) { parser->top_scope_ = scope; parser->current_function_state_ = this; isolate->set_ast_node_id(AstNode::kDeclarationsId + 1); @@ -602,12 +547,16 @@ Parser::Parser(Handle