+2012-02-09: Version 3.9.5
+
+ Removed unused command line flags.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-08: Version 3.9.4
+
+ Properly initialize element-transitioning array literals on ARM.
+ (issue 1930)
+
+ Bug fixes on all platforms.
+
+
+2012-02-07: Version 3.9.3
+
+ When rethrowing an exception, print the stack trace of its original
+ site instead of rethrow site (Chromium issue 60240).
+
+ Increased size of small stacks from 32k to 64k to avoid hitting limits
+ in Chromium (Chromium issue 112843).
+
+
2012-02-06: Version 3.9.2
Add timestamp to --trace-gc output. (issue 1932)
'CPPDEFINES': ['__C99FEATURES__'],
'CPPPATH' : [src_dir, '/usr/local/include'],
'LIBPATH' : ['/usr/local/lib'],
- 'CCFLAGS': ['-ansi', '-fno-omit-frame-pointer'],
+ 'CCFLAGS': ['-ansi'],
},
'os:netbsd': {
'CPPPATH' : [src_dir, '/usr/pkg/include'],
'V8_TARGET_ARCH_MIPS',
],
'conditions': [
+ [ 'target_arch=="mips"', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': ['-EL'],
+ 'ldflags': ['-EL'],
+ 'conditions': [
+ [ 'v8_use_mips_abi_hardfloat=="true"', {
+ 'cflags': ['-mhard-float'],
+ 'ldflags': ['-mhard-float'],
+ }, {
+ 'cflags': ['-msoft-float'],
+ 'ldflags': ['-msoft-float'],
+ }],
+ ['mips_arch_variant=="mips32r2"', {
+ 'cflags': ['-mips32r2', '-Wa,-mips32r2'],
+ }, {
+ 'cflags': ['-mips32', '-Wa,-mips32'],
+ }],
+ ],
+ }],
+ ],
+ }],
[ 'v8_can_use_fpu_instructions=="true"', {
'defines': [
'CAN_USE_FPU_INSTRUCTIONS',
'__mips_soft_float=1'
],
}],
+ ['mips_arch_variant=="mips32r2"', {
+ 'defines': ['_MIPS_ARCH_MIPS32R2',],
+ }],
# The MIPS assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
['host_arch=="x64"', {
}], # OS=="mac"
['OS=="win"', {
'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
'CharacterSet': '1',
},
'variables': {
'target_arch': 'ia32',
'v8_target_arch': 'mips',
+ 'mips_arch_variant': 'mips32r2',
},
}
kClosure = 5, // Function closure.
kRegExp = 6, // RegExp.
kHeapNumber = 7, // Number stored in the heap.
- kNative = 8 // Native object (not from V8 heap).
+ kNative = 8, // Native object (not from V8 heap).
+ kSynthetic = 9 // Synthetic object, usualy used for grouping
+ // snapshot items together.
};
/** Returns node type (see HeapGraphNode::Type). */
void Testing::PrepareStressRun(int run) {
static const char* kLazyOptimizations =
- "--prepare-always-opt --nolimit-inlining "
- "--noalways-opt --noopt-eagerly";
- static const char* kEagerOptimizations = "--opt-eagerly";
+ "--prepare-always-opt --nolimit-inlining --noalways-opt";
static const char* kForcedOptimizations = "--always-opt";
// If deoptimization stressed turn on frequent deoptimization. If no value
if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations);
} else {
- SetFlagsFromString(kEagerOptimizations);
SetFlagsFromString(kLazyOptimizations);
}
#else
if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations);
- } else if (run == GetStressRuns() - 2) {
- SetFlagsFromString(kEagerOptimizations);
- } else {
+ } else if (run != GetStressRuns() - 2) {
SetFlagsFromString(kLazyOptimizations);
}
#endif
// r4: JSObject
__ bind(&allocated);
__ push(r4);
+ __ push(r4);
- // Push the function and the allocated receiver from the stack.
- // sp[0]: receiver (newly allocated object)
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ ldr(r1, MemOperand(sp, kPointerSize));
- __ push(r1); // Constructor function.
- __ push(r4); // Receiver.
-
- // Reload the number of arguments from the stack.
- // r1: constructor function
+ // Reload the number of arguments and the constructor from the stack.
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
+ __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
+ __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
// Set up pointer to last argument.
__ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
// Copy arguments and receiver to the expression stack.
// r0: number of arguments
- // r2: address of last argument (caller sp)
// r1: constructor function
+ // r2: address of last argument (caller sp)
// r3: number of arguments (smi-tagged)
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
Label loop, entry;
__ b(&entry);
__ bind(&loop);
NullCallWrapper(), CALL_AS_METHOD);
}
- // Pop the function from the stack.
- // sp[0]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ pop();
-
// Restore context from the frame.
// r0: result
// sp[0]: receiver
// handler block in this code object, so its index is 0.
__ bind(&invoke);
// Must preserve r0-r4, r5-r7 are available.
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the bl(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their
// Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
__ bind(&double_elements);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10,
+ __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r2,
&slow_elements);
__ Ret();
}
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
}
#endif
+ // We can optionally optimize based on counters rather than statistical
+ // sampling.
+ if (info->ShouldSelfOptimize()) {
+ if (FLAG_trace_opt) {
+ PrintF("[adding self-optimization header to %s]\n",
+ *info->function()->debug_name()->ToCString());
+ }
+ MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
+ Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
+ JSGlobalPropertyCell* cell;
+ if (maybe_cell->To(&cell)) {
+ __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
+ __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
+ __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ Handle<Code> compile_stub(
+ isolate()->builtins()->builtin(Builtins::kLazyRecompile));
+ __ Jump(compile_stub, RelocInfo::CODE_TARGET, eq);
+ }
+ }
+
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
// receiver object). r5 is zero for method calls and non-zero for
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
+ ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
+ EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+ FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
(mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ ++global_count_;
break;
case Variable::PARAMETER:
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
-
-
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
#endif
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type,
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
// For the JSEntry handler, we must preserve r0-r4, r5-r7 are available.
// We will build up the handler from the bottom by pushing on the stack.
- // First compute the state.
- unsigned state = StackHandler::OffsetField::encode(handler_index);
- if (try_location == IN_JAVASCRIPT) {
- state |= (type == TRY_CATCH_HANDLER)
- ? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
- : StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
- } else {
- ASSERT(try_location == IN_JS_ENTRY);
- state |= StackHandler::KindField::encode(StackHandler::ENTRY);
- }
-
// Set up the code object (r5) and the state (r6) for pushing.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
mov(r5, Operand(CodeObject()));
mov(r6, Operand(state));
// Push the frame pointer, context, state, and code object.
- if (try_location == IN_JAVASCRIPT) {
- stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
- } else {
+ if (kind == StackHandler::JS_ENTRY) {
mov(r7, Operand(Smi::FromInt(0))); // Indicates no context.
mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer.
stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit());
+ } else {
+ stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
}
// Link the current handler as the next handler.
ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
- STATIC_ASSERT(StackHandler::ENTRY == 0);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset));
tst(r2, Operand(StackHandler::KindField::kMask));
b(ne, &fetch_next);
// Exception handling
// Push a new try handler and link into try handler chain.
- void PushTryHandler(CodeLocation try_location,
- HandlerType type,
- int handler_index);
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register.
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
- // the FastDoubleElements array elements, otherwise jump to fail.
+ // the FastDoubleElements array elements. Otherwise jump to fail, in which
+ // case scratch2, scratch3 and scratch4 are unmodified.
void StoreNumberToDoubleElements(Register value_reg,
Register key_reg,
Register receiver_reg,
__ Ret();
} else {
Label call_builtin;
- Register elements = r3;
- Register end_elements = r5;
- // Get the elements array of the object.
- __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ CheckMap(elements,
- r0,
- Heap::kFixedArrayMapRootIndex,
- &call_builtin,
- DONT_DO_SMI_CHECK);
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
+ Register elements = r6;
+ Register end_elements = r5;
+ // Get the elements array of the object.
+ __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ CheckMap(elements,
+ r0,
+ Heap::kFixedArrayMapRootIndex,
+ &call_builtin,
+ DONT_DO_SMI_CHECK);
+
+
// Get the array's length into r0 and calculate new length.
__ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ add(r0, r0, Operand(Smi::FromInt(argc)));
- // Get the element's length.
+ // Get the elements' length.
__ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
// Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ add(end_elements, elements,
__ bind(&with_write_barrier);
- __ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ CheckFastObjectElements(r6, r6, &call_builtin);
+ __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(r3, r7, ¬_fast_object);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(¬_fast_object);
+ __ CheckFastSmiOnlyElements(r3, r7, &call_builtin);
+ // edx: receiver
+ // r3: map
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ r3,
+ r7,
+ &call_builtin);
+ __ mov(r2, receiver);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(r3, r3, &call_builtin);
+ }
// Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ add(end_elements, elements,
Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ add(end_elements, end_elements, Operand(kEndElementsOffset));
__ mov(r7, Operand(new_space_allocation_top));
- __ ldr(r6, MemOperand(r7));
- __ cmp(end_elements, r6);
+ __ ldr(r3, MemOperand(r7));
+ __ cmp(end_elements, r3);
__ b(ne, &call_builtin);
__ mov(r9, Operand(new_space_allocation_limit));
__ ldr(r9, MemOperand(r9));
- __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
- __ cmp(r6, r9);
+ __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
+ __ cmp(r3, r9);
__ b(hi, &call_builtin);
// We fit and could grow elements.
// Update new_space_allocation_top.
- __ str(r6, MemOperand(r7));
+ __ str(r3, MemOperand(r7));
// Push the argument.
__ str(r2, MemOperand(end_elements));
// Fill the rest with holes.
- __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
- __ str(r6, MemOperand(end_elements, i * kPointerSize));
+ __ str(r3, MemOperand(end_elements, i * kPointerSize));
}
// Update elements' and array's sizes.
assignment_id_(GetNextId(isolate)),
block_start_(false),
block_end_(false),
- is_monomorphic_(false) {
- ASSERT(Token::IsAssignmentOp(op));
- if (is_compound()) {
- binary_operation_ =
- new(isolate->zone()) BinaryOperation(isolate,
- binary_op(),
- target,
- value,
- pos + 1);
- compound_load_id_ = GetNextId(isolate);
- }
-}
+ is_monomorphic_(false) { }
Token::Value Assignment::binary_op() const {
ObjectLiteral::Property::Property(bool is_getter, FunctionLiteral* value) {
- Isolate* isolate = Isolate::Current();
emit_store_ = true;
- key_ = new(isolate->zone()) Literal(isolate, value->name());
value_ = value;
kind_ = is_getter ? GETTER : SETTER;
}
// Inlining support
bool Declaration::IsInlineable() const {
- return proxy()->var()->IsStackAllocated() && fun() == NULL;
-}
-
-
-bool TargetCollector::IsInlineable() const {
- UNREACHABLE();
- return false;
-}
-
-
-bool ForInStatement::IsInlineable() const {
- return false;
-}
-
-
-bool WithStatement::IsInlineable() const {
- return false;
-}
-
-
-bool SwitchStatement::IsInlineable() const {
- return false;
-}
-
-
-bool TryStatement::IsInlineable() const {
- return false;
-}
-
-
-bool TryCatchStatement::IsInlineable() const {
- return false;
-}
-
-
-bool TryFinallyStatement::IsInlineable() const {
- return false;
-}
-
-
-bool DebuggerStatement::IsInlineable() const {
- return false;
-}
-
-
-bool Throw::IsInlineable() const {
- return exception()->IsInlineable();
-}
-
-
-bool MaterializedLiteral::IsInlineable() const {
- // TODO(1322): Allow materialized literals.
- return false;
-}
-
-
-bool FunctionLiteral::IsInlineable() const {
- // TODO(1322): Allow materialized literals.
- return false;
-}
-
-
-bool ThisFunction::IsInlineable() const {
- return true;
-}
-
-
-bool SharedFunctionInfoLiteral::IsInlineable() const {
- return false;
-}
-
-
-bool ForStatement::IsInlineable() const {
- return (init() == NULL || init()->IsInlineable())
- && (cond() == NULL || cond()->IsInlineable())
- && (next() == NULL || next()->IsInlineable())
- && body()->IsInlineable();
-}
-
-
-bool WhileStatement::IsInlineable() const {
- return cond()->IsInlineable()
- && body()->IsInlineable();
-}
-
-
-bool DoWhileStatement::IsInlineable() const {
- return cond()->IsInlineable()
- && body()->IsInlineable();
-}
-
-
-bool ContinueStatement::IsInlineable() const {
- return true;
-}
-
-
-bool BreakStatement::IsInlineable() const {
- return true;
-}
-
-
-bool EmptyStatement::IsInlineable() const {
- return true;
-}
-
-
-bool Literal::IsInlineable() const {
- return true;
-}
-
-
-bool Block::IsInlineable() const {
- const int count = statements_.length();
- for (int i = 0; i < count; ++i) {
- if (!statements_[i]->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool ExpressionStatement::IsInlineable() const {
- return expression()->IsInlineable();
-}
-
-
-bool IfStatement::IsInlineable() const {
- return condition()->IsInlineable()
- && then_statement()->IsInlineable()
- && else_statement()->IsInlineable();
-}
-
-
-bool ReturnStatement::IsInlineable() const {
- return expression()->IsInlineable();
-}
-
-
-bool Conditional::IsInlineable() const {
- return condition()->IsInlineable() && then_expression()->IsInlineable() &&
- else_expression()->IsInlineable();
+ return proxy()->var()->IsStackAllocated();
}
-
-bool VariableProxy::IsInlineable() const {
- return var()->IsUnallocated()
- || var()->IsStackAllocated()
- || var()->IsContextSlot();
-}
-
-
-bool Assignment::IsInlineable() const {
- return target()->IsInlineable() && value()->IsInlineable();
-}
-
-
-bool Property::IsInlineable() const {
- return obj()->IsInlineable() && key()->IsInlineable();
-}
-
-
-bool Call::IsInlineable() const {
- if (!expression()->IsInlineable()) return false;
- const int count = arguments()->length();
- for (int i = 0; i < count; ++i) {
- if (!arguments()->at(i)->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool CallNew::IsInlineable() const {
- if (!expression()->IsInlineable()) return false;
- const int count = arguments()->length();
- for (int i = 0; i < count; ++i) {
- if (!arguments()->at(i)->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool CallRuntime::IsInlineable() const {
- // Don't try to inline JS runtime calls because we don't (currently) even
- // optimize them.
- if (is_jsruntime()) return false;
- // Don't inline the %_ArgumentsLength or %_Arguments because their
- // implementation will not work. There is no stack frame to get them
- // from.
- if (function()->intrinsic_type == Runtime::INLINE &&
- (name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
- name()->IsEqualTo(CStrVector("_Arguments")))) {
- return false;
- }
- const int count = arguments()->length();
- for (int i = 0; i < count; ++i) {
- if (!arguments()->at(i)->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool UnaryOperation::IsInlineable() const {
- return expression()->IsInlineable();
-}
-
-
-bool BinaryOperation::IsInlineable() const {
- return left()->IsInlineable() && right()->IsInlineable();
-}
-
-
-bool CompareOperation::IsInlineable() const {
- return left()->IsInlineable() && right()->IsInlineable();
-}
-
-
-bool CountOperation::IsInlineable() const {
- return expression()->IsInlineable();
+bool VariableDeclaration::IsInlineable() const {
+ return Declaration::IsInlineable() && fun() == NULL;
}
entry_id_(AstNode::GetNextId(isolate)) {
}
+
+#define INCREASE_NODE_COUNT(NodeType) \
+ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+ increase_node_count(); \
+ }
+
+INCREASE_NODE_COUNT(VariableDeclaration)
+INCREASE_NODE_COUNT(ModuleDeclaration)
+INCREASE_NODE_COUNT(ModuleLiteral)
+INCREASE_NODE_COUNT(ModuleVariable)
+INCREASE_NODE_COUNT(ModulePath)
+INCREASE_NODE_COUNT(ModuleUrl)
+INCREASE_NODE_COUNT(Block)
+INCREASE_NODE_COUNT(ExpressionStatement)
+INCREASE_NODE_COUNT(EmptyStatement)
+INCREASE_NODE_COUNT(IfStatement)
+INCREASE_NODE_COUNT(ContinueStatement)
+INCREASE_NODE_COUNT(BreakStatement)
+INCREASE_NODE_COUNT(ReturnStatement)
+INCREASE_NODE_COUNT(Conditional)
+INCREASE_NODE_COUNT(Literal)
+INCREASE_NODE_COUNT(Assignment)
+INCREASE_NODE_COUNT(Throw)
+INCREASE_NODE_COUNT(Property)
+INCREASE_NODE_COUNT(UnaryOperation)
+INCREASE_NODE_COUNT(CountOperation)
+INCREASE_NODE_COUNT(BinaryOperation)
+INCREASE_NODE_COUNT(CompareOperation)
+INCREASE_NODE_COUNT(ThisFunction)
+
+#undef INCREASE_NODE_COUNT
+
+
+void AstConstructionVisitor::VisitWithStatement(WithStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) {
+ increase_node_count();
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitForStatement(ForStatement* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitTryFinallyStatement(
+ TryFinallyStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) {
+ increase_node_count();
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitSharedFunctionInfoLiteral(
+ SharedFunctionInfoLiteral* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) {
+ increase_node_count();
+ // In theory, we'd have to add:
+ // if(node->var()->IsLookupSlot()) { add_flag(kDontInline); }
+ // However, node->var() is usually not bound yet at VariableProxy creation
+ // time, and LOOKUP variables only result from constructs that cannot
+ // be inlined anyway.
+}
+
+
+void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) {
+ increase_node_count();
+ add_flag(kDontInline); // TODO(1322): Allow materialized literals.
+}
+
+
+void AstConstructionVisitor::VisitObjectLiteral(ObjectLiteral* node) {
+ increase_node_count();
+ add_flag(kDontInline); // TODO(1322): Allow materialized literals.
+}
+
+
+void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) {
+ increase_node_count();
+ add_flag(kDontInline); // TODO(1322): Allow materialized literals.
+}
+
+
+void AstConstructionVisitor::VisitCall(Call* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitCallNew(CallNew* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+ if (node->is_jsruntime()) {
+ // Don't try to inline JS runtime calls because we don't (currently) even
+ // optimize them.
+ add_flag(kDontInline);
+ } else if (node->function()->intrinsic_type == Runtime::INLINE &&
+ (node->name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
+ node->name()->IsEqualTo(CStrVector("_Arguments")))) {
+ // Don't inline the %_ArgumentsLength or %_Arguments because their
+ // implementation will not work. There is no stack frame to get them
+ // from.
+ add_flag(kDontInline);
+ }
+}
+
} } // namespace v8::internal
#include "small-pointer-list.h"
#include "smart-array-pointer.h"
#include "token.h"
+#include "utils.h"
#include "variables.h"
#include "zone-inl.h"
// Nodes of the abstract syntax tree. Only concrete classes are
// enumerated here.
+#define DECLARATION_NODE_LIST(V) \
+ V(VariableDeclaration) \
+ V(ModuleDeclaration) \
+
+#define MODULE_NODE_LIST(V) \
+ V(ModuleLiteral) \
+ V(ModuleVariable) \
+ V(ModulePath) \
+ V(ModuleUrl)
+
#define STATEMENT_NODE_LIST(V) \
V(Block) \
V(ExpressionStatement) \
V(ThisFunction)
#define AST_NODE_LIST(V) \
- V(Declaration) \
+ DECLARATION_NODE_LIST(V) \
+ MODULE_NODE_LIST(V) \
STATEMENT_NODE_LIST(V) \
EXPRESSION_NODE_LIST(V)
// Forward declarations
+class AstConstructionVisitor;
+template<class> class AstNodeFactory;
class AstVisitor;
+class Declaration;
+class Module;
class BreakableStatement;
class Expression;
class IterationStatement;
typedef ZoneList<Handle<Object> > ZoneObjectList;
+#define DECLARE_NODE_TYPE(type) \
+ virtual void Accept(AstVisitor* v); \
+ virtual AstNode::Type node_type() const { return AstNode::k##type; } \
+
+
+enum AstPropertiesFlag {
+ kDontInline,
+ kDontOptimize,
+ kDontSelfOptimize,
+ kDontSoftInline
+};
+
+
+class AstProperties BASE_EMBEDDED {
+ public:
+ class Flags : public EnumSet<AstPropertiesFlag, int> {};
+
+ AstProperties() : node_count_(0) { }
+
+ Flags* flags() { return &flags_; }
+ int node_count() { return node_count_; }
+ void add_node_count(int count) { node_count_ += count; }
+
+ private:
+ Flags flags_;
+ int node_count_;
+};
+
+
class AstNode: public ZoneObject {
public:
#define DECLARE_TYPE_ENUM(type) k##type,
// that emit code (function declarations).
static const int kDeclarationsId = 3;
- // Override ZoneObject's new to count allocated AST nodes.
void* operator new(size_t size, Zone* zone) {
- Isolate* isolate = zone->isolate();
- isolate->set_ast_node_count(isolate->ast_node_count() + 1);
return zone->New(static_cast<int>(size));
}
- AstNode() {}
+ AstNode() { }
virtual ~AstNode() { }
AST_NODE_LIST(DECLARE_NODE_FUNCTIONS)
#undef DECLARE_NODE_FUNCTIONS
+ virtual Declaration* AsDeclaration() { return NULL; }
virtual Statement* AsStatement() { return NULL; }
virtual Expression* AsExpression() { return NULL; }
virtual TargetCollector* AsTargetCollector() { return NULL; }
virtual IterationStatement* AsIterationStatement() { return NULL; }
virtual MaterializedLiteral* AsMaterializedLiteral() { return NULL; }
- // True if the node is simple enough for us to inline calls containing it.
- virtual bool IsInlineable() const = 0;
-
- static int Count() { return Isolate::Current()->ast_node_count(); }
static void ResetIds() { Isolate::Current()->set_ast_node_id(0); }
protected:
- static unsigned GetNextId(Isolate* isolate) {
+ static int GetNextId(Isolate* isolate) {
return ReserveIdRange(isolate, 1);
}
- static unsigned ReserveIdRange(Isolate* isolate, int n) {
- unsigned tmp = isolate->ast_node_id();
+ static int ReserveIdRange(Isolate* isolate, int n) {
+ int tmp = isolate->ast_node_id();
isolate->set_ast_node_id(tmp + n);
return tmp;
}
kTest
};
- explicit Expression(Isolate* isolate)
- : id_(GetNextId(isolate)),
- test_id_(GetNextId(isolate)) {}
-
virtual int position() const {
UNREACHABLE();
return 0;
unsigned id() const { return id_; }
unsigned test_id() const { return test_id_; }
+ protected:
+ explicit Expression(Isolate* isolate)
+ : id_(GetNextId(isolate)),
+ test_id_(GetNextId(isolate)) {}
+
private:
- unsigned id_;
- unsigned test_id_;
+ int id_;
+ int test_id_;
};
class Block: public BreakableStatement {
public:
- Block(Isolate* isolate,
- ZoneStringList* labels,
- int capacity,
- bool is_initializer_block)
- : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
- statements_(capacity),
- is_initializer_block_(is_initializer_block),
- block_scope_(NULL) {
- }
-
-
DECLARE_NODE_TYPE(Block)
- virtual bool IsInlineable() const;
-
void AddStatement(Statement* statement) { statements_.Add(statement); }
ZoneList<Statement*>* statements() { return &statements_; }
Scope* block_scope() const { return block_scope_; }
void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Block(Isolate* isolate,
+ ZoneStringList* labels,
+ int capacity,
+ bool is_initializer_block)
+ : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
+ statements_(capacity),
+ is_initializer_block_(is_initializer_block),
+ block_scope_(NULL) {
+ }
+
private:
ZoneList<Statement*> statements_;
bool is_initializer_block_;
class Declaration: public AstNode {
public:
+ VariableProxy* proxy() const { return proxy_; }
+ VariableMode mode() const { return mode_; }
+ Scope* scope() const { return scope_; }
+ virtual bool IsInlineable() const;
+
+ virtual Declaration* AsDeclaration() { return this; }
+ virtual VariableDeclaration* AsVariableDeclaration() { return NULL; }
+
+ protected:
Declaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* fun,
Scope* scope)
: proxy_(proxy),
mode_(mode),
- fun_(fun),
scope_(scope) {
ASSERT(mode == VAR ||
mode == CONST ||
mode == CONST_HARMONY ||
mode == LET);
- // At the moment there are no "const functions"'s in JavaScript...
- ASSERT(fun == NULL || mode == VAR || mode == LET);
}
- DECLARE_NODE_TYPE(Declaration)
+ private:
+ VariableProxy* proxy_;
+ VariableMode mode_;
+
+ // Nested scope from which the declaration originated.
+ Scope* scope_;
+};
+
+
+class VariableDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(VariableDeclaration)
+
+ virtual VariableDeclaration* AsVariableDeclaration() { return this; }
- VariableProxy* proxy() const { return proxy_; }
- VariableMode mode() const { return mode_; }
FunctionLiteral* fun() const { return fun_; } // may be NULL
virtual bool IsInlineable() const;
- Scope* scope() const { return scope_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ VariableDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* fun,
+ Scope* scope)
+ : Declaration(proxy, mode, scope),
+ fun_(fun) {
+ // At the moment there are no "const functions"'s in JavaScript...
+ ASSERT(fun == NULL || mode == VAR || mode == LET);
+ }
private:
- VariableProxy* proxy_;
- VariableMode mode_;
FunctionLiteral* fun_;
+};
- // Nested scope from which the declaration originated.
- Scope* scope_;
+
+class ModuleDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(ModuleDeclaration)
+
+ Module* module() const { return module_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ModuleDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope)
+ : Declaration(proxy, LET, scope),
+ module_(module) {
+ }
+
+ private:
+ Module* module_;
+};
+
+
+class Module: public AstNode {
+ // TODO(rossberg): stuff to come...
+ protected:
+ Module() {}
+};
+
+
+class ModuleLiteral: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModuleLiteral)
+
+ Block* body() const { return body_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ModuleLiteral(Block* body)
+ : body_(body) {
+ }
+
+ private:
+ Block* body_;
+};
+
+
+class ModuleVariable: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModuleVariable)
+
+ Variable* var() const { return var_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ModuleVariable(Variable* var)
+ : var_(var) {
+ }
+
+ private:
+ Variable* var_;
+};
+
+
+class ModulePath: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModulePath)
+
+ Module* module() const { return module_; }
+ Handle<String> name() const { return name_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ModulePath(Module* module, Handle<String> name)
+ : module_(module),
+ name_(name) {
+ }
+
+ private:
+ Module* module_;
+ Handle<String> name_;
+};
+
+
+class ModuleUrl: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModuleUrl)
+
+ Handle<String> url() const { return url_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ModuleUrl(Handle<String> url) : url_(url) {
+ }
+
+ private:
+ Handle<String> url_;
};
class DoWhileStatement: public IterationStatement {
public:
- DoWhileStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- cond_(NULL),
- condition_position_(-1),
- continue_id_(GetNextId(isolate)),
- back_edge_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(DoWhileStatement)
void Initialize(Expression* cond, Statement* body) {
virtual int StackCheckId() const { return back_edge_id_; }
int BackEdgeId() const { return back_edge_id_; }
- virtual bool IsInlineable() const;
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ DoWhileStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ cond_(NULL),
+ condition_position_(-1),
+ continue_id_(GetNextId(isolate)),
+ back_edge_id_(GetNextId(isolate)) {
+ }
private:
Expression* cond_;
class WhileStatement: public IterationStatement {
public:
- WhileStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- cond_(NULL),
- may_have_function_literal_(true),
- body_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(WhileStatement)
void Initialize(Expression* cond, Statement* body) {
void set_may_have_function_literal(bool value) {
may_have_function_literal_ = value;
}
- virtual bool IsInlineable() const;
// Bailout support.
virtual int ContinueId() const { return EntryId(); }
virtual int StackCheckId() const { return body_id_; }
int BodyId() const { return body_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ WhileStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ cond_(NULL),
+ may_have_function_literal_(true),
+ body_id_(GetNextId(isolate)) {
+ }
+
private:
Expression* cond_;
// True if there is a function literal subexpression in the condition.
class ForStatement: public IterationStatement {
public:
- ForStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- init_(NULL),
- cond_(NULL),
- next_(NULL),
- may_have_function_literal_(true),
- loop_variable_(NULL),
- continue_id_(GetNextId(isolate)),
- body_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(ForStatement)
void Initialize(Statement* init,
bool is_fast_smi_loop() { return loop_variable_ != NULL; }
Variable* loop_variable() { return loop_variable_; }
void set_loop_variable(Variable* var) { loop_variable_ = var; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ForStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ init_(NULL),
+ cond_(NULL),
+ next_(NULL),
+ may_have_function_literal_(true),
+ loop_variable_(NULL),
+ continue_id_(GetNextId(isolate)),
+ body_id_(GetNextId(isolate)) {
+ }
private:
Statement* init_;
class ForInStatement: public IterationStatement {
public:
- ForInStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- each_(NULL),
- enumerable_(NULL),
- assignment_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(ForInStatement)
void Initialize(Expression* each, Expression* enumerable, Statement* body) {
Expression* each() const { return each_; }
Expression* enumerable() const { return enumerable_; }
- virtual bool IsInlineable() const;
// Bailout support.
int AssignmentId() const { return assignment_id_; }
virtual int ContinueId() const { return EntryId(); }
virtual int StackCheckId() const { return EntryId(); }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ForInStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ each_(NULL),
+ enumerable_(NULL),
+ assignment_id_(GetNextId(isolate)) {
+ }
+
private:
Expression* each_;
Expression* enumerable_;
class ExpressionStatement: public Statement {
public:
- explicit ExpressionStatement(Expression* expression)
- : expression_(expression) { }
-
DECLARE_NODE_TYPE(ExpressionStatement)
- virtual bool IsInlineable() const;
-
void set_expression(Expression* e) { expression_ = e; }
Expression* expression() const { return expression_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ExpressionStatement(Expression* expression)
+ : expression_(expression) { }
+
private:
Expression* expression_;
};
class ContinueStatement: public Statement {
public:
- explicit ContinueStatement(IterationStatement* target)
- : target_(target) { }
-
DECLARE_NODE_TYPE(ContinueStatement)
IterationStatement* target() const { return target_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ContinueStatement(IterationStatement* target)
+ : target_(target) { }
private:
IterationStatement* target_;
class BreakStatement: public Statement {
public:
- explicit BreakStatement(BreakableStatement* target)
- : target_(target) { }
-
DECLARE_NODE_TYPE(BreakStatement)
BreakableStatement* target() const { return target_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit BreakStatement(BreakableStatement* target)
+ : target_(target) { }
private:
BreakableStatement* target_;
class ReturnStatement: public Statement {
public:
- explicit ReturnStatement(Expression* expression)
- : expression_(expression) { }
-
DECLARE_NODE_TYPE(ReturnStatement)
Expression* expression() const { return expression_; }
- virtual bool IsInlineable() const;
- private:
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ReturnStatement(Expression* expression)
+ : expression_(expression) { }
+
+ private:
Expression* expression_;
};
class WithStatement: public Statement {
public:
- WithStatement(Expression* expression, Statement* statement)
- : expression_(expression), statement_(statement) { }
-
DECLARE_NODE_TYPE(WithStatement)
Expression* expression() const { return expression_; }
Statement* statement() const { return statement_; }
- virtual bool IsInlineable() const;
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ WithStatement(Expression* expression, Statement* statement)
+ : expression_(expression),
+ statement_(statement) { }
private:
Expression* expression_;
class SwitchStatement: public BreakableStatement {
public:
- SwitchStatement(Isolate* isolate, ZoneStringList* labels)
- : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS),
- tag_(NULL),
- cases_(NULL) {
- }
-
-
DECLARE_NODE_TYPE(SwitchStatement)
void Initialize(Expression* tag, ZoneList<CaseClause*>* cases) {
Expression* tag() const { return tag_; }
ZoneList<CaseClause*>* cases() const { return cases_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ SwitchStatement(Isolate* isolate, ZoneStringList* labels)
+ : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS),
+ tag_(NULL),
+ cases_(NULL) { }
private:
Expression* tag_;
// given if-statement has a then- or an else-part containing code.
class IfStatement: public Statement {
public:
- IfStatement(Isolate* isolate,
- Expression* condition,
- Statement* then_statement,
- Statement* else_statement)
- : condition_(condition),
- then_statement_(then_statement),
- else_statement_(else_statement),
- if_id_(GetNextId(isolate)),
- then_id_(GetNextId(isolate)),
- else_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(IfStatement)
- virtual bool IsInlineable() const;
-
bool HasThenStatement() const { return !then_statement()->IsEmpty(); }
bool HasElseStatement() const { return !else_statement()->IsEmpty(); }
int ThenId() const { return then_id_; }
int ElseId() const { return else_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ IfStatement(Isolate* isolate,
+ Expression* condition,
+ Statement* then_statement,
+ Statement* else_statement)
+ : condition_(condition),
+ then_statement_(then_statement),
+ else_statement_(else_statement),
+ if_id_(GetNextId(isolate)),
+ then_id_(GetNextId(isolate)),
+ else_id_(GetNextId(isolate)) {
+ }
+
private:
Expression* condition_;
Statement* then_statement_;
// stack in the compiler; this should probably be reworked.
class TargetCollector: public AstNode {
public:
- TargetCollector(): targets_(0) { }
+ TargetCollector() : targets_(0) { }
// Adds a jump target to the collector. The collector stores a pointer not
// a copy of the target to make binding work, so make sure not to pass in
virtual TargetCollector* AsTargetCollector() { return this; }
ZoneList<Label*>* targets() { return &targets_; }
- virtual bool IsInlineable() const;
private:
ZoneList<Label*> targets_;
class TryStatement: public Statement {
public:
- explicit TryStatement(int index, Block* try_block)
- : index_(index),
- try_block_(try_block),
- escaping_targets_(NULL) {
- }
-
void set_escaping_targets(ZoneList<Label*>* targets) {
escaping_targets_ = targets;
}
int index() const { return index_; }
Block* try_block() const { return try_block_; }
ZoneList<Label*>* escaping_targets() const { return escaping_targets_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ TryStatement(int index, Block* try_block)
+ : index_(index),
+ try_block_(try_block),
+ escaping_targets_(NULL) { }
private:
// Unique (per-function) index of this handler. This is not an AST ID.
class TryCatchStatement: public TryStatement {
public:
+ DECLARE_NODE_TYPE(TryCatchStatement)
+
+ Scope* scope() { return scope_; }
+ Variable* variable() { return variable_; }
+ Block* catch_block() const { return catch_block_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
TryCatchStatement(int index,
Block* try_block,
Scope* scope,
catch_block_(catch_block) {
}
- DECLARE_NODE_TYPE(TryCatchStatement)
-
- Scope* scope() { return scope_; }
- Variable* variable() { return variable_; }
- Block* catch_block() const { return catch_block_; }
- virtual bool IsInlineable() const;
-
private:
Scope* scope_;
Variable* variable_;
class TryFinallyStatement: public TryStatement {
public:
- TryFinallyStatement(int index, Block* try_block, Block* finally_block)
- : TryStatement(index, try_block),
- finally_block_(finally_block) { }
-
DECLARE_NODE_TYPE(TryFinallyStatement)
Block* finally_block() const { return finally_block_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ TryFinallyStatement(int index, Block* try_block, Block* finally_block)
+ : TryStatement(index, try_block),
+ finally_block_(finally_block) { }
private:
Block* finally_block_;
class DebuggerStatement: public Statement {
public:
DECLARE_NODE_TYPE(DebuggerStatement)
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ DebuggerStatement() {}
};
public:
DECLARE_NODE_TYPE(EmptyStatement)
- virtual bool IsInlineable() const;
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ EmptyStatement() {}
};
class Literal: public Expression {
public:
- Literal(Isolate* isolate, Handle<Object> handle)
- : Expression(isolate), handle_(handle) { }
-
DECLARE_NODE_TYPE(Literal)
// Check if this literal is identical to the other literal.
}
Handle<Object> handle() const { return handle_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Literal(Isolate* isolate, Handle<Object> handle)
+ : Expression(isolate),
+ handle_(handle) { }
private:
Handle<Object> handle_;
// Base class for literals that needs space in the corresponding JSFunction.
class MaterializedLiteral: public Expression {
public:
- MaterializedLiteral(Isolate* isolate,
- int literal_index,
- bool is_simple,
- int depth)
- : Expression(isolate),
- literal_index_(literal_index),
- is_simple_(is_simple),
- depth_(depth) {}
-
virtual MaterializedLiteral* AsMaterializedLiteral() { return this; }
int literal_index() { return literal_index_; }
bool is_simple() const { return is_simple_; }
int depth() const { return depth_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ MaterializedLiteral(Isolate* isolate,
+ int literal_index,
+ bool is_simple,
+ int depth)
+ : Expression(isolate),
+ literal_index_(literal_index),
+ is_simple_(is_simple),
+ depth_(depth) {}
private:
int literal_index_;
};
Property(Literal* key, Expression* value);
- Property(bool is_getter, FunctionLiteral* value);
Literal* key() { return key_; }
Expression* value() { return value_; }
void set_emit_store(bool emit_store);
bool emit_store();
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Property(bool is_getter, FunctionLiteral* value);
+ void set_key(Literal* key) { key_ = key; }
+
private:
Literal* key_;
Expression* value_;
bool emit_store_;
};
- ObjectLiteral(Isolate* isolate,
- Handle<FixedArray> constant_properties,
- ZoneList<Property*>* properties,
- int literal_index,
- bool is_simple,
- bool fast_elements,
- int depth,
- bool has_function)
- : MaterializedLiteral(isolate, literal_index, is_simple, depth),
- constant_properties_(constant_properties),
- properties_(properties),
- fast_elements_(fast_elements),
- has_function_(has_function) {}
-
DECLARE_NODE_TYPE(ObjectLiteral)
Handle<FixedArray> constant_properties() const {
kHasFunction = 1 << 1
};
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ObjectLiteral(Isolate* isolate,
+ Handle<FixedArray> constant_properties,
+ ZoneList<Property*>* properties,
+ int literal_index,
+ bool is_simple,
+ bool fast_elements,
+ int depth,
+ bool has_function)
+ : MaterializedLiteral(isolate, literal_index, is_simple, depth),
+ constant_properties_(constant_properties),
+ properties_(properties),
+ fast_elements_(fast_elements),
+ has_function_(has_function) {}
+
private:
Handle<FixedArray> constant_properties_;
ZoneList<Property*>* properties_;
// Node for capturing a regexp literal.
class RegExpLiteral: public MaterializedLiteral {
public:
+ DECLARE_NODE_TYPE(RegExpLiteral)
+
+ Handle<String> pattern() const { return pattern_; }
+ Handle<String> flags() const { return flags_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
RegExpLiteral(Isolate* isolate,
Handle<String> pattern,
Handle<String> flags,
pattern_(pattern),
flags_(flags) {}
- DECLARE_NODE_TYPE(RegExpLiteral)
-
- Handle<String> pattern() const { return pattern_; }
- Handle<String> flags() const { return flags_; }
-
private:
Handle<String> pattern_;
Handle<String> flags_;
// for minimizing the work when constructing it at runtime.
class ArrayLiteral: public MaterializedLiteral {
public:
+ DECLARE_NODE_TYPE(ArrayLiteral)
+
+ Handle<FixedArray> constant_elements() const { return constant_elements_; }
+ ZoneList<Expression*>* values() const { return values_; }
+
+ // Return an AST id for an element that is used in simulate instructions.
+ int GetIdForElement(int i) { return first_element_id_ + i; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
ArrayLiteral(Isolate* isolate,
Handle<FixedArray> constant_elements,
ZoneList<Expression*>* values,
values_(values),
first_element_id_(ReserveIdRange(isolate, values->length())) {}
- DECLARE_NODE_TYPE(ArrayLiteral)
-
- Handle<FixedArray> constant_elements() const { return constant_elements_; }
- ZoneList<Expression*>* values() const { return values_; }
-
- // Return an AST id for an element that is used in simulate instructions.
- int GetIdForElement(int i) { return first_element_id_ + i; }
-
private:
Handle<FixedArray> constant_elements_;
ZoneList<Expression*>* values_;
class VariableProxy: public Expression {
public:
- VariableProxy(Isolate* isolate, Variable* var);
-
- VariableProxy(Isolate* isolate,
- Handle<String> name,
- bool is_this,
- int position = RelocInfo::kNoPosition);
-
DECLARE_NODE_TYPE(VariableProxy)
virtual bool IsValidLeftHandSide() {
return var_ == NULL ? true : var_->IsValidLeftHandSide();
}
- virtual bool IsInlineable() const;
-
bool IsVariable(Handle<String> n) {
return !is_this() && name().is_identical_to(n);
}
void BindTo(Variable* var);
protected:
+ template<class> friend class AstNodeFactory;
+
+ VariableProxy(Isolate* isolate, Variable* var);
+
+ VariableProxy(Isolate* isolate,
+ Handle<String> name,
+ bool is_this,
+ int position);
+
Handle<String> name_;
Variable* var_; // resolved variable, or NULL
bool is_this_;
class Property: public Expression {
public:
- Property(Isolate* isolate,
- Expression* obj,
- Expression* key,
- int pos)
- : Expression(isolate),
- obj_(obj),
- key_(key),
- pos_(pos),
- is_monomorphic_(false),
- is_array_length_(false),
- is_string_length_(false),
- is_string_access_(false),
- is_function_prototype_(false) { }
-
DECLARE_NODE_TYPE(Property)
virtual bool IsValidLeftHandSide() { return true; }
- virtual bool IsInlineable() const;
Expression* obj() const { return obj_; }
Expression* key() const { return key_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
bool IsArrayLength() { return is_array_length_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Property(Isolate* isolate,
+ Expression* obj,
+ Expression* key,
+ int pos)
+ : Expression(isolate),
+ obj_(obj),
+ key_(key),
+ pos_(pos),
+ is_monomorphic_(false),
+ is_array_length_(false),
+ is_string_length_(false),
+ is_string_access_(false),
+ is_function_prototype_(false) { }
+
private:
Expression* obj_;
Expression* key_;
class Call: public Expression {
public:
- Call(Isolate* isolate,
- Expression* expression,
- ZoneList<Expression*>* arguments,
- int pos)
- : Expression(isolate),
- expression_(expression),
- arguments_(arguments),
- pos_(pos),
- is_monomorphic_(false),
- check_type_(RECEIVER_MAP_CHECK),
- return_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(Call)
- virtual bool IsInlineable() const;
-
Expression* expression() const { return expression_; }
ZoneList<Expression*>* arguments() const { return arguments_; }
virtual int position() const { return pos_; }
bool return_is_recorded_;
#endif
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Call(Isolate* isolate,
+ Expression* expression,
+ ZoneList<Expression*>* arguments,
+ int pos)
+ : Expression(isolate),
+ expression_(expression),
+ arguments_(arguments),
+ pos_(pos),
+ is_monomorphic_(false),
+ check_type_(RECEIVER_MAP_CHECK),
+ return_id_(GetNextId(isolate)) { }
+
private:
Expression* expression_;
ZoneList<Expression*>* arguments_;
class CallNew: public Expression {
public:
+ DECLARE_NODE_TYPE(CallNew)
+
+ Expression* expression() const { return expression_; }
+ ZoneList<Expression*>* arguments() const { return arguments_; }
+ virtual int position() const { return pos_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
CallNew(Isolate* isolate,
Expression* expression,
ZoneList<Expression*>* arguments,
arguments_(arguments),
pos_(pos) { }
- DECLARE_NODE_TYPE(CallNew)
-
- virtual bool IsInlineable() const;
-
- Expression* expression() const { return expression_; }
- ZoneList<Expression*>* arguments() const { return arguments_; }
- virtual int position() const { return pos_; }
-
private:
Expression* expression_;
ZoneList<Expression*>* arguments_;
// implemented in JavaScript (see "v8natives.js").
class CallRuntime: public Expression {
public:
+ DECLARE_NODE_TYPE(CallRuntime)
+
+ Handle<String> name() const { return name_; }
+ const Runtime::Function* function() const { return function_; }
+ ZoneList<Expression*>* arguments() const { return arguments_; }
+ bool is_jsruntime() const { return function_ == NULL; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
CallRuntime(Isolate* isolate,
Handle<String> name,
const Runtime::Function* function,
function_(function),
arguments_(arguments) { }
- DECLARE_NODE_TYPE(CallRuntime)
-
- virtual bool IsInlineable() const;
-
- Handle<String> name() const { return name_; }
- const Runtime::Function* function() const { return function_; }
- ZoneList<Expression*>* arguments() const { return arguments_; }
- bool is_jsruntime() const { return function_ == NULL; }
-
private:
Handle<String> name_;
const Runtime::Function* function_;
class UnaryOperation: public Expression {
public:
+ DECLARE_NODE_TYPE(UnaryOperation)
+
+ virtual bool ResultOverwriteAllowed();
+
+ Token::Value op() const { return op_; }
+ Expression* expression() const { return expression_; }
+ virtual int position() const { return pos_; }
+
+ int MaterializeTrueId() { return materialize_true_id_; }
+ int MaterializeFalseId() { return materialize_false_id_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
UnaryOperation(Isolate* isolate,
Token::Value op,
Expression* expression,
}
}
- DECLARE_NODE_TYPE(UnaryOperation)
-
- virtual bool IsInlineable() const;
-
- virtual bool ResultOverwriteAllowed();
-
- Token::Value op() const { return op_; }
- Expression* expression() const { return expression_; }
- virtual int position() const { return pos_; }
-
- int MaterializeTrueId() { return materialize_true_id_; }
- int MaterializeFalseId() { return materialize_false_id_; }
-
private:
Token::Value op_;
Expression* expression_;
class BinaryOperation: public Expression {
public:
- BinaryOperation(Isolate* isolate,
- Token::Value op,
- Expression* left,
- Expression* right,
- int pos)
- : Expression(isolate), op_(op), left_(left), right_(right), pos_(pos) {
- ASSERT(Token::IsBinaryOp(op));
- right_id_ = (op == Token::AND || op == Token::OR)
- ? static_cast<int>(GetNextId(isolate))
- : AstNode::kNoNumber;
- }
-
DECLARE_NODE_TYPE(BinaryOperation)
- virtual bool IsInlineable() const;
-
virtual bool ResultOverwriteAllowed();
Token::Value op() const { return op_; }
// Bailout support.
int RightId() const { return right_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ BinaryOperation(Isolate* isolate,
+ Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos)
+ : Expression(isolate), op_(op), left_(left), right_(right), pos_(pos) {
+ ASSERT(Token::IsBinaryOp(op));
+ right_id_ = (op == Token::AND || op == Token::OR)
+ ? GetNextId(isolate)
+ : AstNode::kNoNumber;
+ }
+
private:
Token::Value op_;
Expression* left_;
class CountOperation: public Expression {
public:
- CountOperation(Isolate* isolate,
- Token::Value op,
- bool is_prefix,
- Expression* expr,
- int pos)
- : Expression(isolate),
- op_(op),
- is_prefix_(is_prefix),
- expression_(expr),
- pos_(pos),
- assignment_id_(GetNextId(isolate)),
- count_id_(GetNextId(isolate)) {}
-
DECLARE_NODE_TYPE(CountOperation)
bool is_prefix() const { return is_prefix_; }
virtual void MarkAsStatement() { is_prefix_ = true; }
- virtual bool IsInlineable() const;
-
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
int AssignmentId() const { return assignment_id_; }
int CountId() const { return count_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ CountOperation(Isolate* isolate,
+ Token::Value op,
+ bool is_prefix,
+ Expression* expr,
+ int pos)
+ : Expression(isolate),
+ op_(op),
+ is_prefix_(is_prefix),
+ expression_(expr),
+ pos_(pos),
+ assignment_id_(GetNextId(isolate)),
+ count_id_(GetNextId(isolate)) {}
+
private:
Token::Value op_;
bool is_prefix_;
class CompareOperation: public Expression {
public:
- CompareOperation(Isolate* isolate,
- Token::Value op,
- Expression* left,
- Expression* right,
- int pos)
- : Expression(isolate),
- op_(op),
- left_(left),
- right_(right),
- pos_(pos),
- compare_type_(NONE) {
- ASSERT(Token::IsCompareOp(op));
- }
-
DECLARE_NODE_TYPE(CompareOperation)
Token::Value op() const { return op_; }
Expression* right() const { return right_; }
virtual int position() const { return pos_; }
- virtual bool IsInlineable() const;
-
// Type feedback information.
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
bool IsSmiCompare() { return compare_type_ == SMI_ONLY; }
bool IsLiteralCompareUndefined(Expression** expr);
bool IsLiteralCompareNull(Expression** expr);
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ CompareOperation(Isolate* isolate,
+ Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos)
+ : Expression(isolate),
+ op_(op),
+ left_(left),
+ right_(right),
+ pos_(pos),
+ compare_type_(NONE) {
+ ASSERT(Token::IsCompareOp(op));
+ }
+
private:
Token::Value op_;
Expression* left_;
class Conditional: public Expression {
public:
+ DECLARE_NODE_TYPE(Conditional)
+
+ Expression* condition() const { return condition_; }
+ Expression* then_expression() const { return then_expression_; }
+ Expression* else_expression() const { return else_expression_; }
+
+ int then_expression_position() const { return then_expression_position_; }
+ int else_expression_position() const { return else_expression_position_; }
+
+ int ThenId() const { return then_id_; }
+ int ElseId() const { return else_id_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
Conditional(Isolate* isolate,
Expression* condition,
Expression* then_expression,
then_expression_position_(then_expression_position),
else_expression_position_(else_expression_position),
then_id_(GetNextId(isolate)),
- else_id_(GetNextId(isolate)) {
- }
-
- DECLARE_NODE_TYPE(Conditional)
-
- virtual bool IsInlineable() const;
-
- Expression* condition() const { return condition_; }
- Expression* then_expression() const { return then_expression_; }
- Expression* else_expression() const { return else_expression_; }
-
- int then_expression_position() const { return then_expression_position_; }
- int else_expression_position() const { return else_expression_position_; }
-
- int ThenId() const { return then_id_; }
- int ElseId() const { return else_id_; }
+ else_id_(GetNextId(isolate)) { }
private:
Expression* condition_;
class Assignment: public Expression {
public:
- Assignment(Isolate* isolate,
- Token::Value op,
- Expression* target,
- Expression* value,
- int pos);
-
DECLARE_NODE_TYPE(Assignment)
- virtual bool IsInlineable() const;
-
Assignment* AsSimpleAssignment() { return !is_compound() ? this : NULL; }
Token::Value binary_op() const;
int CompoundLoadId() const { return compound_load_id_; }
int AssignmentId() const { return assignment_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Assignment(Isolate* isolate,
+ Token::Value op,
+ Expression* target,
+ Expression* value,
+ int pos);
+
+ template<class Visitor>
+ void Init(Isolate* isolate, AstNodeFactory<Visitor>* factory) {
+ ASSERT(Token::IsAssignmentOp(op_));
+ if (is_compound()) {
+ binary_operation_ =
+ factory->NewBinaryOperation(binary_op(), target_, value_, pos_ + 1);
+ compound_load_id_ = GetNextId(isolate);
+ }
+ }
+
private:
Token::Value op_;
Expression* target_;
class Throw: public Expression {
public:
- Throw(Isolate* isolate, Expression* exception, int pos)
- : Expression(isolate), exception_(exception), pos_(pos) {}
-
DECLARE_NODE_TYPE(Throw)
Expression* exception() const { return exception_; }
virtual int position() const { return pos_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Throw(Isolate* isolate, Expression* exception, int pos)
+ : Expression(isolate), exception_(exception), pos_(pos) {}
private:
Expression* exception_;
DECLARATION
};
- FunctionLiteral(Isolate* isolate,
- Handle<String> name,
- Scope* scope,
- ZoneList<Statement*>* body,
- int materialized_literal_count,
- int expected_property_count,
- int handler_count,
- bool has_only_simple_this_property_assignments,
- Handle<FixedArray> this_property_assignments,
- int parameter_count,
- Type type,
- bool has_duplicate_parameters)
- : Expression(isolate),
- name_(name),
- scope_(scope),
- body_(body),
- this_property_assignments_(this_property_assignments),
- inferred_name_(isolate->factory()->empty_string()),
- materialized_literal_count_(materialized_literal_count),
- expected_property_count_(expected_property_count),
- handler_count_(handler_count),
- parameter_count_(parameter_count),
- function_token_position_(RelocInfo::kNoPosition) {
- bitfield_ =
- HasOnlySimpleThisPropertyAssignments::encode(
- has_only_simple_this_property_assignments) |
- IsExpression::encode(type != DECLARATION) |
- IsAnonymous::encode(type == ANONYMOUS_EXPRESSION) |
- Pretenure::encode(false) |
- HasDuplicateParameters::encode(has_duplicate_parameters);
- }
-
DECLARE_NODE_TYPE(FunctionLiteral)
Handle<String> name() const { return name_; }
bool pretenure() { return Pretenure::decode(bitfield_); }
void set_pretenure() { bitfield_ |= Pretenure::encode(true); }
- virtual bool IsInlineable() const;
bool has_duplicate_parameters() {
return HasDuplicateParameters::decode(bitfield_);
}
+ int ast_node_count() { return ast_properties_.node_count(); }
+ AstProperties::Flags* flags() { return ast_properties_.flags(); }
+ void set_ast_properties(AstProperties* ast_properties) {
+ ast_properties_ = *ast_properties;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ FunctionLiteral(Isolate* isolate,
+ Handle<String> name,
+ Scope* scope,
+ ZoneList<Statement*>* body,
+ int materialized_literal_count,
+ int expected_property_count,
+ int handler_count,
+ bool has_only_simple_this_property_assignments,
+ Handle<FixedArray> this_property_assignments,
+ int parameter_count,
+ Type type,
+ bool has_duplicate_parameters)
+ : Expression(isolate),
+ name_(name),
+ scope_(scope),
+ body_(body),
+ this_property_assignments_(this_property_assignments),
+ inferred_name_(isolate->factory()->empty_string()),
+ materialized_literal_count_(materialized_literal_count),
+ expected_property_count_(expected_property_count),
+ handler_count_(handler_count),
+ parameter_count_(parameter_count),
+ function_token_position_(RelocInfo::kNoPosition) {
+ bitfield_ =
+ HasOnlySimpleThisPropertyAssignments::encode(
+ has_only_simple_this_property_assignments) |
+ IsExpression::encode(type != DECLARATION) |
+ IsAnonymous::encode(type == ANONYMOUS_EXPRESSION) |
+ Pretenure::encode(false) |
+ HasDuplicateParameters::encode(has_duplicate_parameters);
+ }
+
private:
Handle<String> name_;
Scope* scope_;
ZoneList<Statement*>* body_;
Handle<FixedArray> this_property_assignments_;
Handle<String> inferred_name_;
+ AstProperties ast_properties_;
int materialized_literal_count_;
int expected_property_count_;
class SharedFunctionInfoLiteral: public Expression {
public:
- SharedFunctionInfoLiteral(
- Isolate* isolate,
- Handle<SharedFunctionInfo> shared_function_info)
- : Expression(isolate), shared_function_info_(shared_function_info) { }
-
DECLARE_NODE_TYPE(SharedFunctionInfoLiteral)
Handle<SharedFunctionInfo> shared_function_info() const {
return shared_function_info_;
}
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ SharedFunctionInfoLiteral(
+ Isolate* isolate,
+ Handle<SharedFunctionInfo> shared_function_info)
+ : Expression(isolate),
+ shared_function_info_(shared_function_info) { }
private:
Handle<SharedFunctionInfo> shared_function_info_;
class ThisFunction: public Expression {
public:
- explicit ThisFunction(Isolate* isolate) : Expression(isolate) {}
DECLARE_NODE_TYPE(ThisFunction)
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ThisFunction(Isolate* isolate): Expression(isolate) {}
};
};
+// ----------------------------------------------------------------------------
+// Construction time visitor.
+
+class AstConstructionVisitor BASE_EMBEDDED {
+ public:
+ AstConstructionVisitor() { }
+
+ AstProperties* ast_properties() { return &properties_; }
+
+ private:
+ template<class> friend class AstNodeFactory;
+
+ // Node visitors.
+#define DEF_VISIT(type) \
+ void Visit##type(type* node);
+ AST_NODE_LIST(DEF_VISIT)
+#undef DEF_VISIT
+
+ void increase_node_count() { properties_.add_node_count(1); }
+ void add_flag(AstPropertiesFlag flag) { properties_.flags()->Add(flag); }
+
+ AstProperties properties_;
+};
+
+
+class AstNullVisitor BASE_EMBEDDED {
+ public:
+ // Node visitors.
+#define DEF_VISIT(type) \
+ void Visit##type(type* node) {}
+ AST_NODE_LIST(DEF_VISIT)
+#undef DEF_VISIT
+};
+
+
+
+// ----------------------------------------------------------------------------
+// AstNode factory
+
+template<class Visitor>
+class AstNodeFactory BASE_EMBEDDED {
+ public:
+ explicit AstNodeFactory(Isolate* isolate)
+ : isolate_(isolate),
+ zone_(isolate_->zone()) { }
+
+ Visitor* visitor() { return &visitor_; }
+
+#define VISIT_AND_RETURN(NodeType, node) \
+ visitor_.Visit##NodeType((node)); \
+ return node;
+
+ VariableDeclaration* NewVariableDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* fun,
+ Scope* scope) {
+ VariableDeclaration* decl =
+ new(zone_) VariableDeclaration(proxy, mode, fun, scope);
+ VISIT_AND_RETURN(VariableDeclaration, decl)
+ }
+
+ ModuleDeclaration* NewModuleDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope) {
+ ModuleDeclaration* decl =
+ new(zone_) ModuleDeclaration(proxy, module, scope);
+ VISIT_AND_RETURN(ModuleDeclaration, decl)
+ }
+
+ ModuleLiteral* NewModuleLiteral(Block* body) {
+ ModuleLiteral* module = new(zone_) ModuleLiteral(body);
+ VISIT_AND_RETURN(ModuleLiteral, module)
+ }
+
+ ModuleVariable* NewModuleVariable(Variable* var) {
+ ModuleVariable* module = new(zone_) ModuleVariable(var);
+ VISIT_AND_RETURN(ModuleLiteral, module)
+ }
+
+ ModulePath* NewModulePath(Module* origin, Handle<String> name) {
+ ModulePath* module = new(zone_) ModulePath(origin, name);
+ VISIT_AND_RETURN(ModuleLiteral, module)
+ }
+
+ ModuleUrl* NewModuleUrl(Handle<String> url) {
+ ModuleUrl* module = new(zone_) ModuleUrl(url);
+ VISIT_AND_RETURN(ModuleLiteral, module)
+ }
+
+ Block* NewBlock(ZoneStringList* labels,
+ int capacity,
+ bool is_initializer_block) {
+ Block* block = new(zone_) Block(
+ isolate_, labels, capacity, is_initializer_block);
+ VISIT_AND_RETURN(Block, block)
+ }
+
+#define STATEMENT_WITH_LABELS(NodeType) \
+ NodeType* New##NodeType(ZoneStringList* labels) { \
+ NodeType* stmt = new(zone_) NodeType(isolate_, labels); \
+ VISIT_AND_RETURN(NodeType, stmt); \
+ }
+ STATEMENT_WITH_LABELS(DoWhileStatement)
+ STATEMENT_WITH_LABELS(WhileStatement)
+ STATEMENT_WITH_LABELS(ForStatement)
+ STATEMENT_WITH_LABELS(ForInStatement)
+ STATEMENT_WITH_LABELS(SwitchStatement)
+#undef STATEMENT_WITH_LABELS
+
+ ExpressionStatement* NewExpressionStatement(Expression* expression) {
+ ExpressionStatement* stmt = new(zone_) ExpressionStatement(expression);
+ VISIT_AND_RETURN(ExpressionStatement, stmt)
+ }
+
+ ContinueStatement* NewContinueStatement(IterationStatement* target) {
+ ContinueStatement* stmt = new(zone_) ContinueStatement(target);
+ VISIT_AND_RETURN(ContinueStatement, stmt)
+ }
+
+ BreakStatement* NewBreakStatement(BreakableStatement* target) {
+ BreakStatement* stmt = new(zone_) BreakStatement(target);
+ VISIT_AND_RETURN(BreakStatement, stmt)
+ }
+
+ ReturnStatement* NewReturnStatement(Expression* expression) {
+ ReturnStatement* stmt = new(zone_) ReturnStatement(expression);
+ VISIT_AND_RETURN(ReturnStatement, stmt)
+ }
+
+ WithStatement* NewWithStatement(Expression* expression,
+ Statement* statement) {
+ WithStatement* stmt = new(zone_) WithStatement(expression, statement);
+ VISIT_AND_RETURN(WithStatement, stmt)
+ }
+
+ IfStatement* NewIfStatement(Expression* condition,
+ Statement* then_statement,
+ Statement* else_statement) {
+ IfStatement* stmt = new(zone_) IfStatement(
+ isolate_, condition, then_statement, else_statement);
+ VISIT_AND_RETURN(IfStatement, stmt)
+ }
+
+ TryCatchStatement* NewTryCatchStatement(int index,
+ Block* try_block,
+ Scope* scope,
+ Variable* variable,
+ Block* catch_block) {
+ TryCatchStatement* stmt = new(zone_) TryCatchStatement(
+ index, try_block, scope, variable, catch_block);
+ VISIT_AND_RETURN(TryCatchStatement, stmt)
+ }
+
+ TryFinallyStatement* NewTryFinallyStatement(int index,
+ Block* try_block,
+ Block* finally_block) {
+ TryFinallyStatement* stmt =
+ new(zone_) TryFinallyStatement(index, try_block, finally_block);
+ VISIT_AND_RETURN(TryFinallyStatement, stmt)
+ }
+
+ DebuggerStatement* NewDebuggerStatement() {
+ DebuggerStatement* stmt = new(zone_) DebuggerStatement();
+ VISIT_AND_RETURN(DebuggerStatement, stmt)
+ }
+
+ EmptyStatement* NewEmptyStatement() {
+ return new(zone_) EmptyStatement();
+ }
+
+ Literal* NewLiteral(Handle<Object> handle) {
+ Literal* lit = new(zone_) Literal(isolate_, handle);
+ VISIT_AND_RETURN(Literal, lit)
+ }
+
+ Literal* NewNumberLiteral(double number) {
+ return NewLiteral(isolate_->factory()->NewNumber(number, TENURED));
+ }
+
+ ObjectLiteral* NewObjectLiteral(
+ Handle<FixedArray> constant_properties,
+ ZoneList<ObjectLiteral::Property*>* properties,
+ int literal_index,
+ bool is_simple,
+ bool fast_elements,
+ int depth,
+ bool has_function) {
+ ObjectLiteral* lit = new(zone_) ObjectLiteral(
+ isolate_, constant_properties, properties, literal_index,
+ is_simple, fast_elements, depth, has_function);
+ VISIT_AND_RETURN(ObjectLiteral, lit)
+ }
+
+ ObjectLiteral::Property* NewObjectLiteralProperty(bool is_getter,
+ FunctionLiteral* value) {
+ ObjectLiteral::Property* prop =
+ new(zone_) ObjectLiteral::Property(is_getter, value);
+ prop->set_key(NewLiteral(value->name()));
+ return prop; // Not an AST node, will not be visited.
+ }
+
+ RegExpLiteral* NewRegExpLiteral(Handle<String> pattern,
+ Handle<String> flags,
+ int literal_index) {
+ RegExpLiteral* lit =
+ new(zone_) RegExpLiteral(isolate_, pattern, flags, literal_index);
+ VISIT_AND_RETURN(RegExpLiteral, lit);
+ }
+
+ ArrayLiteral* NewArrayLiteral(Handle<FixedArray> constant_elements,
+ ZoneList<Expression*>* values,
+ int literal_index,
+ bool is_simple,
+ int depth) {
+ ArrayLiteral* lit = new(zone_) ArrayLiteral(
+ isolate_, constant_elements, values, literal_index, is_simple, depth);
+ VISIT_AND_RETURN(ArrayLiteral, lit)
+ }
+
+ VariableProxy* NewVariableProxy(Variable* var) {
+ VariableProxy* proxy = new(zone_) VariableProxy(isolate_, var);
+ VISIT_AND_RETURN(VariableProxy, proxy)
+ }
+
+ VariableProxy* NewVariableProxy(Handle<String> name,
+ bool is_this,
+ int position = RelocInfo::kNoPosition) {
+ VariableProxy* proxy =
+ new(zone_) VariableProxy(isolate_, name, is_this, position);
+ VISIT_AND_RETURN(VariableProxy, proxy)
+ }
+
+ Property* NewProperty(Expression* obj, Expression* key, int pos) {
+ Property* prop = new(zone_) Property(isolate_, obj, key, pos);
+ VISIT_AND_RETURN(Property, prop)
+ }
+
+ Call* NewCall(Expression* expression,
+ ZoneList<Expression*>* arguments,
+ int pos) {
+ Call* call = new(zone_) Call(isolate_, expression, arguments, pos);
+ VISIT_AND_RETURN(Call, call)
+ }
+
+ CallNew* NewCallNew(Expression* expression,
+ ZoneList<Expression*>* arguments,
+ int pos) {
+ CallNew* call = new(zone_) CallNew(isolate_, expression, arguments, pos);
+ VISIT_AND_RETURN(CallNew, call)
+ }
+
+ CallRuntime* NewCallRuntime(Handle<String> name,
+ const Runtime::Function* function,
+ ZoneList<Expression*>* arguments) {
+ CallRuntime* call =
+ new(zone_) CallRuntime(isolate_, name, function, arguments);
+ VISIT_AND_RETURN(CallRuntime, call)
+ }
+
+ UnaryOperation* NewUnaryOperation(Token::Value op,
+ Expression* expression,
+ int pos) {
+ UnaryOperation* node =
+ new(zone_) UnaryOperation(isolate_, op, expression, pos);
+ VISIT_AND_RETURN(UnaryOperation, node)
+ }
+
+ BinaryOperation* NewBinaryOperation(Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos) {
+ BinaryOperation* node =
+ new(zone_) BinaryOperation(isolate_, op, left, right, pos);
+ VISIT_AND_RETURN(BinaryOperation, node)
+ }
+
+ CountOperation* NewCountOperation(Token::Value op,
+ bool is_prefix,
+ Expression* expr,
+ int pos) {
+ CountOperation* node =
+ new(zone_) CountOperation(isolate_, op, is_prefix, expr, pos);
+ VISIT_AND_RETURN(CountOperation, node)
+ }
+
+ CompareOperation* NewCompareOperation(Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos) {
+ CompareOperation* node =
+ new(zone_) CompareOperation(isolate_, op, left, right, pos);
+ VISIT_AND_RETURN(CompareOperation, node)
+ }
+
+ Conditional* NewConditional(Expression* condition,
+ Expression* then_expression,
+ Expression* else_expression,
+ int then_expression_position,
+ int else_expression_position) {
+ Conditional* cond = new(zone_) Conditional(
+ isolate_, condition, then_expression, else_expression,
+ then_expression_position, else_expression_position);
+ VISIT_AND_RETURN(Conditional, cond)
+ }
+
+ Assignment* NewAssignment(Token::Value op,
+ Expression* target,
+ Expression* value,
+ int pos) {
+ Assignment* assign =
+ new(zone_) Assignment(isolate_, op, target, value, pos);
+ assign->Init(isolate_, this);
+ VISIT_AND_RETURN(Assignment, assign)
+ }
+
+ Throw* NewThrow(Expression* exception, int pos) {
+ Throw* t = new(zone_) Throw(isolate_, exception, pos);
+ VISIT_AND_RETURN(Throw, t)
+ }
+
+ FunctionLiteral* NewFunctionLiteral(
+ Handle<String> name,
+ Scope* scope,
+ ZoneList<Statement*>* body,
+ int materialized_literal_count,
+ int expected_property_count,
+ int handler_count,
+ bool has_only_simple_this_property_assignments,
+ Handle<FixedArray> this_property_assignments,
+ int parameter_count,
+ bool has_duplicate_parameters,
+ FunctionLiteral::Type type,
+ bool visit_with_visitor) {
+ FunctionLiteral* lit = new(zone_) FunctionLiteral(
+ isolate_, name, scope, body,
+ materialized_literal_count, expected_property_count, handler_count,
+ has_only_simple_this_property_assignments, this_property_assignments,
+ parameter_count, type, has_duplicate_parameters);
+ if (visit_with_visitor) {
+ visitor_.VisitFunctionLiteral(lit);
+ }
+ return lit;
+ }
+
+ SharedFunctionInfoLiteral* NewSharedFunctionInfoLiteral(
+ Handle<SharedFunctionInfo> shared_function_info) {
+ SharedFunctionInfoLiteral* lit =
+ new(zone_) SharedFunctionInfoLiteral(isolate_, shared_function_info);
+ VISIT_AND_RETURN(SharedFunctionInfoLiteral, lit)
+ }
+
+ ThisFunction* NewThisFunction() {
+ ThisFunction* fun = new(zone_) ThisFunction(isolate_);
+ VISIT_AND_RETURN(ThisFunction, fun)
+ }
+
+#undef VISIT_AND_RETURN
+
+ private:
+ Isolate* isolate_;
+ Zone* zone_;
+ Visitor visitor_;
+};
+
+
} } // namespace v8::internal
#endif // V8_AST_H_
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
- if (!JSArray::cast(arg)->HasFastElements()) {
+ if (!JSArray::cast(arg)->HasFastSmiOnlyElements()) {
elements_kind = FAST_ELEMENTS;
}
}
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
#ifdef DEBUG
bool print_source = false;
bool print_ast = false;
- bool print_json_ast = false;
const char* ftype;
if (Isolate::Current()->bootstrapper()->IsActive()) {
print_source = FLAG_print_builtin_source;
print_ast = FLAG_print_builtin_ast;
- print_json_ast = FLAG_print_builtin_json_ast;
ftype = "builtin";
} else {
print_source = FLAG_print_source;
print_ast = FLAG_print_ast;
- print_json_ast = FLAG_print_json_ast;
Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
if (print_source && !filter.is_empty()) {
print_source = info->function()->name()->IsEqualTo(filter);
if (print_ast && !filter.is_empty()) {
print_ast = info->function()->name()->IsEqualTo(filter);
}
- if (print_json_ast && !filter.is_empty()) {
- print_json_ast = info->function()->name()->IsEqualTo(filter);
- }
ftype = "user-defined";
}
PrintF("--- AST ---\n%s\n",
AstPrinter().PrintProgram(info->function()));
}
-
- if (print_json_ast) {
- JsonAstBuilder builder;
- PrintF("%s", builder.BuildProgram(info->function()));
- }
#endif // DEBUG
}
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
}
+// Primitive functions are unlikely to be picked up by the stack-walking
+// profiler, so they trigger their own optimization when they're called
+// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
+bool CompilationInfo::ShouldSelfOptimize() {
+ return FLAG_self_optimization &&
+ FLAG_crankshaft &&
+ !Serializer::enabled() &&
+ !function()->flags()->Contains(kDontSelfOptimize) &&
+ (shared_info().is_null() || !shared_info()->optimization_disabled());
+}
+
+
void CompilationInfo::AbortOptimization() {
Handle<Code> code(shared_info()->code());
SetCode(code);
// Check the function has compiled code.
ASSERT(shared->is_compiled());
shared->set_code_age(0);
+ shared->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize));
+ shared->set_dont_inline(lit->flags()->Contains(kDontInline));
+ shared->set_ast_node_count(lit->ast_node_count());
if (info->AllowOptimize() && !shared->optimization_disabled()) {
// If we're asked to always optimize, we compile the optimized
function_info->set_language_mode(lit->language_mode());
function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
+ function_info->set_ast_node_count(lit->ast_node_count());
+ function_info->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize));
+ function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
}
return V8::UseCrankshaft() && !closure_.is_null();
}
+ // Determines whether or not to insert a self-optimization header.
+ bool ShouldSelfOptimize();
+
// Disable all optimization attempts of this info for the rest of the
// current compilation pipeline.
void AbortOptimization();
static const int kMaxInliningLevels = 3;
+ // Call count before primitive functions trigger their own optimization.
+ static const int kCallsUntilPrimitiveOpt = 200;
+
// All routines return a SharedFunctionInfo.
// If an error occurs an exception is raised and the return handle
// contains NULL.
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
static const int kEventsBufferSize = 256 * KB;
static const int kTickSamplesBufferChunkSize = 64 * KB;
static const int kTickSamplesBufferChunksCount = 16;
-static const int kProfilerStackSize = 32 * KB;
+static const int kProfilerStackSize = 64 * KB;
ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator)
options.use_preemption = true;
argv[i] = NULL;
#endif // V8_SHARED
- } else if (strcmp(argv[i], "--no-preemption") == 0) {
+ } else if (strcmp(argv[i], "--nopreemption") == 0) {
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
// Flags for experimental language features.
DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
DEFINE_bool(harmony_scoping, false, "enable harmony block scoping")
+DEFINE_bool(harmony_modules, false, "enable harmony modules")
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
DEFINE_bool(harmony_collections, false,
"enable harmony collections (sets, maps, and weak maps)")
DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
DEFINE_implication(harmony, harmony_scoping)
+DEFINE_implication(harmony, harmony_modules)
DEFINE_implication(harmony, harmony_proxies)
DEFINE_implication(harmony, harmony_collections)
DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing")
DEFINE_bool(use_inlining, true, "use function inlining")
DEFINE_bool(limit_inlining, true, "limit code size growth from inlining")
-DEFINE_bool(eliminate_empty_blocks, true, "eliminate empty blocks")
DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion")
DEFINE_bool(collect_megamorphic_maps_from_stub_cache,
true,
DEFINE_bool(optimize_closures, true, "optimize closures")
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
+// Experimental profiler changes.
+DEFINE_bool(experimental_profiler, false, "enable all profiler experiments")
+DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
+DEFINE_bool(self_optimization, false,
+ "primitive functions trigger their own optimization")
+
+DEFINE_implication(experimental_profiler, watch_ic_patching)
+DEFINE_implication(experimental_profiler, self_optimization)
+
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false,
"generate extra code (assertions) for debugging")
DEFINE_bool(code_comments, false, "emit comments in code disassembly")
-DEFINE_bool(peephole_optimization, true,
- "perform peephole optimizations in assembly code")
DEFINE_bool(enable_sse2, true,
"enable use of SSE2 instructions if available")
DEFINE_bool(enable_sse3, true,
DEFINE_bool(trace_opt, false, "trace lazy optimization")
DEFINE_bool(trace_opt_stats, false, "trace lazy optimization statistics")
DEFINE_bool(opt, true, "use adaptive optimizations")
-DEFINE_bool(opt_eagerly, false, "be more eager when adaptively optimizing")
DEFINE_bool(always_opt, false, "always try to optimize functions")
DEFINE_bool(prepare_always_opt, false, "prepare for turning on always opt")
-DEFINE_bool(deopt, true, "support deoptimization")
DEFINE_bool(trace_deopt, false, "trace deoptimization")
// compiler.cc
DEFINE_bool(always_compact, false, "Perform compaction on every full GC")
DEFINE_bool(lazy_sweeping, true,
"Use lazy sweeping for old pointer and data spaces")
-DEFINE_bool(cleanup_caches_in_maps_at_gc, true,
- "Flush code caches in maps during mark compact cycle.")
DEFINE_bool(never_compact, false,
"Never perform compaction on full GC - testing only")
-DEFINE_bool(compact_code_space, false, "Compact code space")
+DEFINE_bool(compact_code_space, true,
+ "Compact code space on full non-incremental collections")
DEFINE_bool(cleanup_code_caches_at_gc, true,
"Flush inline caches prior to mark compact collection and "
"flush code caches in maps during mark compact cycle.")
"Default seed for initializing random generator "
"(0, the default, means to use system random).")
-DEFINE_bool(canonicalize_object_literal_maps, true,
- "Canonicalize maps for object literals.")
-
-DEFINE_int(max_map_space_pages, MapSpace::kMaxMapPageIndex - 1,
- "Maximum number of pages in map space which still allows to encode "
- "forwarding pointers. That's actually a constant, but it's useful "
- "to control it with a flag for better testing.")
-
// objects.cc
DEFINE_bool(use_verbose_printer, true, "allows verbose printing")
"pretty print source code for builtins")
DEFINE_bool(print_ast, false, "print source AST")
DEFINE_bool(print_builtin_ast, false, "print source AST for builtins")
-DEFINE_bool(print_json_ast, false, "print source AST as JSON")
-DEFINE_bool(print_builtin_json_ast, false,
- "print source AST for builtins as JSON")
DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
// compiler.cc
// runtime.cc
DEFINE_bool(trace_lazy, false, "trace lazy compilation")
-// serialize.cc
-DEFINE_bool(debug_serialization, false,
- "write debug information into the snapshot.")
-
// spaces.cc
DEFINE_bool(collect_heap_spill_statistics, false,
"report heap spill statistics along with heap_stats "
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
}
-inline bool StackHandler::is_entry() const {
- return kind() == ENTRY;
+inline bool StackHandler::is_js_entry() const {
+ return kind() == JS_ENTRY;
}
-inline bool StackHandler::is_try_catch() const {
- return kind() == TRY_CATCH;
+inline bool StackHandler::is_catch() const {
+ return kind() == CATCH;
}
-inline bool StackHandler::is_try_finally() const {
- return kind() == TRY_FINALLY;
+inline bool StackHandler::is_finally() const {
+ return kind() == FINALLY;
}
StackHandlerIterator it(this, top_handler());
ASSERT(!it.done());
StackHandler* handler = it.handler();
- ASSERT(handler->is_entry());
+ ASSERT(handler->is_js_entry());
handler->Iterate(v, LookupCode());
#ifdef DEBUG
// Make sure that the entry frame does not contain more than one
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
class StackHandler BASE_EMBEDDED {
public:
enum Kind {
- ENTRY,
- TRY_CATCH,
- TRY_FINALLY
+ JS_ENTRY,
+ CATCH,
+ FINALLY,
+ LAST_KIND = FINALLY
};
static const int kKindWidth = 2;
- static const int kOffsetWidth = 32 - kKindWidth;
+ STATIC_ASSERT(LAST_KIND < (1 << kKindWidth));
+ static const int kIndexWidth = 32 - kKindWidth;
class KindField: public BitField<StackHandler::Kind, 0, kKindWidth> {};
- class OffsetField: public BitField<unsigned, kKindWidth, kOffsetWidth> {};
+ class IndexField: public BitField<unsigned, kKindWidth, kIndexWidth> {};
// Get the address of this stack handler.
inline Address address() const;
static inline StackHandler* FromAddress(Address address);
// Testers
- inline bool is_entry() const;
- inline bool is_try_catch() const;
- inline bool is_try_finally() const;
+ inline bool is_js_entry() const;
+ inline bool is_catch() const;
+ inline bool is_finally() const;
private:
// Accessors.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
}
-void BreakableStatementChecker::VisitDeclaration(Declaration* decl) {
+void BreakableStatementChecker::VisitVariableDeclaration(
+ VariableDeclaration* decl) {
+}
+
+void BreakableStatementChecker::VisitModuleDeclaration(
+ ModuleDeclaration* decl) {
+}
+
+
+void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
+}
+
+void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
+}
+
+void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
+}
+
+void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
}
code->set_stack_check_table_offset(table_offset);
CodeGenerator::PrintCode(code, info);
info->SetCode(code); // May be an empty handle.
+ if (!code.is_null()) {
+ isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size());
+ }
#ifdef ENABLE_GDB_JIT_INTERFACE
if (FLAG_gdbjit && !code.is_null()) {
GDBJITLineInfo* lineinfo =
void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) {
// There's no need to prepare this code for bailouts from already optimized
// code or code that can't be optimized.
- if (!FLAG_deopt || !info_->HasDeoptimizationSupport()) return;
+ if (!info_->HasDeoptimizationSupport()) return;
unsigned pc_and_state =
StateField::encode(state) | PcField::encode(masm_->pc_offset());
BailoutEntry entry = { id, pc_and_state };
void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) {
- int length = declarations->length();
- int global_count = 0;
- for (int i = 0; i < length; i++) {
- Declaration* decl = declarations->at(i);
- EmitDeclaration(decl->proxy(), decl->mode(), decl->fun(), &global_count);
- }
+ int save_global_count = global_count_;
+ global_count_ = 0;
+
+ AstVisitor::VisitDeclarations(declarations);
// Batch declare global functions and variables.
- if (global_count > 0) {
+ if (global_count_ > 0) {
Handle<FixedArray> array =
- isolate()->factory()->NewFixedArray(2 * global_count, TENURED);
+ isolate()->factory()->NewFixedArray(2 * global_count_, TENURED);
+ int length = declarations->length();
for (int j = 0, i = 0; i < length; i++) {
- Declaration* decl = declarations->at(i);
- Variable* var = decl->proxy()->var();
-
- if (var->IsUnallocated()) {
- array->set(j++, *(var->name()));
- if (decl->fun() == NULL) {
- if (var->binding_needs_init()) {
- // In case this binding needs initialization use the hole.
- array->set_the_hole(j++);
+ VariableDeclaration* decl = declarations->at(i)->AsVariableDeclaration();
+ if (decl != NULL) {
+ Variable* var = decl->proxy()->var();
+
+ if (var->IsUnallocated()) {
+ array->set(j++, *(var->name()));
+ if (decl->fun() == NULL) {
+ if (var->binding_needs_init()) {
+ // In case this binding needs initialization use the hole.
+ array->set_the_hole(j++);
+ } else {
+ array->set_undefined(j++);
+ }
} else {
- array->set_undefined(j++);
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(decl->fun(), script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) {
+ SetStackOverflow();
+ return;
+ }
+ array->set(j++, *function);
}
- } else {
- Handle<SharedFunctionInfo> function =
- Compiler::BuildFunctionInfo(decl->fun(), script());
- // Check for stack-overflow exception.
- if (function.is_null()) {
- SetStackOverflow();
- return;
- }
- array->set(j++, *function);
}
}
}
// declaration the global functions and variables.
DeclareGlobals(array);
}
+
+ global_count_ = save_global_count;
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModulePath(ModulePath* module) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModuleUrl(ModuleUrl* decl) {
+ // TODO(rossberg)
}
// Try block code. Sets up the exception handler chain.
__ bind(&try_entry);
- __ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER, stmt->index());
+ __ PushTryHandler(StackHandler::CATCH, stmt->index());
{ TryCatch try_body(this);
Visit(stmt->try_block());
}
// Set up try handler.
__ bind(&try_entry);
- __ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER, stmt->index());
+ __ PushTryHandler(StackHandler::FINALLY, stmt->index());
{ TryFinally try_body(this, &finally_entry);
Visit(stmt->try_block());
}
scope_(NULL),
nesting_stack_(NULL),
loop_depth_(0),
+ global_count_(0),
context_(NULL),
bailout_entries_(0),
stack_checks_(2), // There's always at least one.
// Platform-specific code for a variable, constant, or function
// declaration. Functions have an initial value.
+ // Increments global_count_ for unallocated variables.
void EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count);
+ FunctionLiteral* function);
// Platform-specific code for checking the stack limit at the back edge of
// a loop.
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
+ int global_count_;
const ExpressionContext* context_;
ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_;
isolate);
}
isolate->counters()->enum_cache_misses()->Increment();
- int num_enum = object->NumberOfEnumProperties();
+ int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
Handle<DescriptorArray> descs =
ASSERT(storage->length() == index);
return storage;
} else {
- int num_enum = object->NumberOfEnumProperties();
+ int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array);
promotion_queue_.Destroy();
LiveObjectList::UpdateReferencesForScavengeGC();
- isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
+ if (!FLAG_watch_ic_patching) {
+ isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
+ }
incremental_marking()->UpdateMarkingDequeAfterScavenge();
ASSERT(new_space_front == new_space_.top());
share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER);
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
- share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times));
+ share->set_deopt_counter(FLAG_deopt_every_n_times);
+ share->set_profiler_ticks(0);
+ share->set_ast_node_count(0);
// Set integer fields (smi or int, depending on the architecture).
share->set_length(0);
if (!code_space_->SetUp()) return false;
// Initialize map space.
- map_space_ = new MapSpace(this,
- max_old_generation_size_,
- FLAG_max_map_space_pages,
- MAP_SPACE);
+ map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE);
if (map_space_ == NULL) return false;
if (!map_space_->SetUp()) return false;
V(use_strict, "use strict") \
V(dot_symbol, ".") \
V(anonymous_function_symbol, "(anonymous function)") \
- V(compare_ic_symbol, ".compare_ic") \
+ V(compare_ic_symbol, ".compare_ic") \
V(infinity_symbol, "Infinity") \
- V(minus_infinity_symbol, "-Infinity")
+ V(minus_infinity_symbol, "-Infinity") \
+ V(hidden_stack_trace_symbol, "v8::hidden_stack_trace")
// Forward declarations.
class GCTracer;
void HCheckMap::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
stream->Add(" %p", *map());
+ if (mode() == REQUIRE_EXACT_MAP) {
+ stream->Add(" [EXACT]");
+ } else if (!has_element_transitions_) {
+ stream->Add(" [EXACT*]");
+ } else {
+ stream->Add(" [MATCH ELEMENTS]");
+ }
}
V(InobjectFields) \
V(BackingStoreFields) \
V(ElementsKind) \
+ V(ElementsPointer) \
V(ArrayElements) \
V(DoubleArrayElements) \
V(SpecializedArrayElements) \
return gvn_flags_.ContainsAnyOf(AllObservableSideEffectsFlagSet());
}
+ GVNFlagSet DependsOnFlags() const {
+ GVNFlagSet result = gvn_flags_;
+ result.Intersect(AllDependsOnFlagSet());
+ return result;
+ }
+
+ GVNFlagSet SideEffectFlags() const {
+ GVNFlagSet result = gvn_flags_;
+ result.Intersect(AllSideEffectsFlagSet());
+ return result;
+ }
+
GVNFlagSet ChangesFlags() const {
GVNFlagSet result = gvn_flags_;
result.Intersect(AllChangesFlagSet());
representation_ = r;
}
+ static GVNFlagSet AllDependsOnFlagSet() {
+ GVNFlagSet result;
+ // Create changes mask.
+#define ADD_FLAG(type) result.Add(kDependsOn##type);
+ GVN_FLAG_LIST(ADD_FLAG)
+#undef ADD_FLAG
+ return result;
+ }
+
static GVNFlagSet AllChangesFlagSet() {
GVNFlagSet result;
// Create changes mask.
static GVNFlagSet AllObservableSideEffectsFlagSet() {
GVNFlagSet result = AllChangesFlagSet();
result.Remove(kChangesElementsKind);
+ result.Remove(kChangesElementsPointer);
+ result.Remove(kChangesMaps);
return result;
}
explicit HLoadElements(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetGVNFlag(kDependsOnMaps);
- SetGVNFlag(kDependsOnElementsKind);
+ SetGVNFlag(kDependsOnElementsPointer);
}
virtual Representation RequiredInputRepresentation(int index) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
+ // If the map to check doesn't have the untransitioned elements, it must not
+ // be hoisted above TransitionElements instructions.
+ if (mode == REQUIRE_EXACT_MAP || !map->has_fast_smi_only_elements()) {
+ SetGVNFlag(kDependsOnElementsKind);
+ }
has_element_transitions_ =
map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL) != NULL ||
map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL) != NULL;
transitioned_map_(transitioned_map) {
SetOperandAt(0, object);
SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
SetGVNFlag(kChangesElementsKind);
+ if (original_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kDependsOnElementsPointer);
+ SetGVNFlag(kDependsOnDoubleArrayElements);
+ } else if (transitioned_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kDependsOnElementsPointer);
+ SetGVNFlag(kDependsOnArrayElements);
+ }
set_representation(Representation::Tagged());
}
deleted_phis_(4),
parent_loop_header_(NULL),
is_inline_return_target_(false),
- is_deoptimizing_(false) { }
+ is_deoptimizing_(false),
+ dominates_loop_successors_(false) { }
void HBasicBlock::AttachLoopInformation() {
}
+void HBasicBlock::AssignLoopSuccessorDominators() {
+ // Mark blocks that dominate all subsequent reachable blocks inside their
+ // loop. Exploit the fact that blocks are sorted in reverse post order. When
+ // the loop is visited in increasing block id order, if the number of
+ // non-loop-exiting successor edges at the dominator_candidate block doesn't
+ // exceed the number of previously encountered predecessor edges, there is no
+ // path from the loop header to any block with higher id that doesn't go
+ // through the dominator_candidate block. In this case, the
+ // dominator_candidate block is guaranteed to dominate all blocks reachable
+ // from it with higher ids.
+ HBasicBlock* last = loop_information()->GetLastBackEdge();
+ int outstanding_successors = 1; // one edge from the pre-header
+ // Header always dominates everything.
+ MarkAsLoopSuccessorDominator();
+ for (int j = block_id(); j <= last->block_id(); ++j) {
+ HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
+ for (HPredecessorIterator it(dominator_candidate); !it.Done();
+ it.Advance()) {
+ HBasicBlock* predecessor = it.Current();
+ // Don't count back edges.
+ if (predecessor->block_id() < dominator_candidate->block_id()) {
+ outstanding_successors--;
+ }
+ }
+
+ // If more successors than predecessors have been seen in the loop up to
+ // now, it's not possible to guarantee that the current block dominates
+ // all of the blocks with higher IDs. In this case, assume conservatively
+ // that those paths through loop that don't go through the current block
+ // contain all of the loop's dependencies. Also be careful to record
+ // dominator information about the current loop that's being processed,
+ // and not nested loops, which will be processed when
+ // AssignLoopSuccessorDominators gets called on their header.
+ ASSERT(outstanding_successors >= 0);
+ HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
+ if (outstanding_successors == 0 &&
+ (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
+ dominator_candidate->MarkAsLoopSuccessorDominator();
+ }
+ HControlInstruction* end = dominator_candidate->end();
+ for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
+ HBasicBlock* successor = it.Current();
+ // Only count successors that remain inside the loop and don't loop back
+ // to a loop header.
+ if (successor->block_id() > dominator_candidate->block_id() &&
+ successor->block_id() <= last->block_id()) {
+ // Backwards edges must land on loop headers.
+ ASSERT(successor->block_id() > dominator_candidate->block_id() ||
+ successor->IsLoopHeader());
+ outstanding_successors++;
+ }
+ }
+ }
+}
+
+
int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
for (int i = 0; i < predecessors_.length(); ++i) {
if (predecessors_[i] == predecessor) return i;
MacroAssembler assembler(info->isolate(), NULL, 0);
LCodeGen generator(chunk, &assembler, info);
- if (FLAG_eliminate_empty_blocks) {
- chunk->MarkEmptyBlocks();
- }
+ chunk->MarkEmptyBlocks();
if (generator.GenerateCode()) {
if (FLAG_trace_codegen) {
void HGraph::AssignDominators() {
HPhase phase("Assign dominators", this);
for (int i = 0; i < blocks_.length(); ++i) {
- if (blocks_[i]->IsLoopHeader()) {
+ HBasicBlock* block = blocks_[i];
+ if (block->IsLoopHeader()) {
// Only the first predecessor of a loop header is from outside the loop.
// All others are back edges, and thus cannot dominate the loop header.
- blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first());
+ block->AssignCommonDominator(block->predecessors()->first());
+ block->AssignLoopSuccessorDominators();
} else {
for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
void LoopInvariantCodeMotion();
void ProcessLoopBlock(HBasicBlock* block,
HBasicBlock* before_loop,
- GVNFlagSet loop_kills);
+ GVNFlagSet loop_kills,
+ GVNFlagSet* accumulated_first_time_depends);
bool AllowCodeMotion();
bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
bool HGlobalValueNumberer::Analyze() {
+ removed_side_effects_ = false;
ComputeBlockSideEffects();
if (FLAG_loop_invariant_code_motion) {
LoopInvariantCodeMotion();
void HGlobalValueNumberer::ComputeBlockSideEffects() {
+ // The Analyze phase of GVN can be called multiple times. Clear loop side
+ // effects before computing them to erase the contents from previous Analyze
+ // passes.
+ for (int i = 0; i < loop_side_effects_.length(); ++i) {
+ loop_side_effects_[i].RemoveAll();
+ }
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
// Compute side effects for the block.
HBasicBlock* block = graph_->blocks()->at(i);
block->block_id(),
side_effects.ToIntegral());
+ GVNFlagSet accumulated_first_time_depends;
HBasicBlock* last = block->loop_information()->GetLastBackEdge();
for (int j = block->block_id(); j <= last->block_id(); ++j) {
- ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects);
+ ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
+ &accumulated_first_time_depends);
}
}
}
}
-void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
- HBasicBlock* loop_header,
- GVNFlagSet loop_kills) {
+void HGlobalValueNumberer::ProcessLoopBlock(
+ HBasicBlock* block,
+ HBasicBlock* loop_header,
+ GVNFlagSet loop_kills,
+ GVNFlagSet* accumulated_first_time_depends) {
HBasicBlock* pre_header = loop_header->predecessors()->at(0);
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n",
HInstruction* instr = block->first();
while (instr != NULL) {
HInstruction* next = instr->next();
- if (instr->CheckFlag(HValue::kUseGVN) &&
- !instr->gvn_flags().ContainsAnyOf(depends_flags)) {
- TraceGVN("Checking instruction %d (%s)\n",
+ bool hoisted = false;
+ if (instr->CheckFlag(HValue::kUseGVN)) {
+ TraceGVN("Checking instruction %d (%s) instruction GVN flags 0x%X, "
+ "loop kills 0x%X\n",
instr->id(),
- instr->Mnemonic());
- bool inputs_loop_invariant = true;
- for (int i = 0; i < instr->OperandCount(); ++i) {
- if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
- inputs_loop_invariant = false;
- }
+ instr->Mnemonic(),
+ instr->gvn_flags().ToIntegral(),
+ depends_flags.ToIntegral());
+ bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
+ if (!can_hoist && instr->IsTransitionElementsKind()) {
+ // It's only possible to hoist one time side effects if there are no
+ // dependencies on their changes from the loop header to the current
+ // instruction.
+ GVNFlagSet converted_changes =
+ HValue::ConvertChangesToDependsFlags(instr->ChangesFlags());
+ TraceGVN("Checking dependencies on one-time instruction %d (%s) "
+ "converted changes 0x%X, accumulated depends 0x%X\n",
+ instr->id(),
+ instr->Mnemonic(),
+ converted_changes.ToIntegral(),
+ accumulated_first_time_depends->ToIntegral());
+ // It's possible to hoist one-time side effects from the current loop
+ // loop only if they dominate all of the successor blocks in the same
+ // loop and there are not any instructions that have Changes/DependsOn
+ // that intervene between it and the beginning of the loop header.
+ bool in_nested_loop = block != loop_header &&
+ ((block->parent_loop_header() != loop_header) ||
+ block->IsLoopHeader());
+ can_hoist = !in_nested_loop &&
+ block->IsLoopSuccessorDominator() &&
+ !accumulated_first_time_depends->ContainsAnyOf(converted_changes);
}
- if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
- TraceGVN("Found loop invariant instruction %d\n", instr->id());
- // Move the instruction out of the loop.
- instr->Unlink();
- instr->InsertBefore(pre_header->end());
+ if (can_hoist) {
+ bool inputs_loop_invariant = true;
+ for (int i = 0; i < instr->OperandCount(); ++i) {
+ if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
+ inputs_loop_invariant = false;
+ }
+ }
+
+ if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
+ TraceGVN("Hoisting loop invariant instruction %d\n", instr->id());
+ // Move the instruction out of the loop.
+ instr->Unlink();
+ instr->InsertBefore(pre_header->end());
+ if (instr->HasSideEffects()) removed_side_effects_ = true;
+ hoisted = true;
+ }
}
}
+ if (!hoisted) {
+ // If an instruction is not hoisted, we have to account for its side
+ // effects when hoisting later HTransitionElementsKind instructions.
+ accumulated_first_time_depends->Add(instr->DependsOnFlags());
+ GVNFlagSet converted_changes =
+ HValue::ConvertChangesToDependsFlags(instr->SideEffectFlags());
+ accumulated_first_time_depends->Add(converted_changes);
+ }
instr = next;
}
}
// Handle implicit declaration of the function name in named function
// expressions before other declarations.
if (scope->is_function_scope() && scope->function() != NULL) {
- HandleDeclaration(scope->function(), CONST, NULL);
+ HandleVariableDeclaration(scope->function(), CONST, NULL);
}
VisitDeclarations(scope->declarations());
AddSimulate(AstNode::kDeclarationsId);
// could only be discovered by removing side-effect-generating instructions
// during the first pass.
if (FLAG_smi_only_arrays && removed_side_effects) {
- gvn.Analyze();
+ removed_side_effects = gvn.Analyze();
+ ASSERT(!removed_side_effects);
}
}
// Do a quick check on source code length to avoid parsing large
// inlining candidates.
- if ((FLAG_limit_inlining && target->shared()->SourceSize() > kMaxSourceSize)
- || target->shared()->SourceSize() > kUnlimitedMaxSourceSize) {
+ if ((FLAG_limit_inlining && target_shared->SourceSize() > kMaxSourceSize)
+ || target_shared->SourceSize() > kUnlimitedMaxSourceSize) {
TraceInline(target, caller, "target text too big");
return false;
}
TraceInline(target, caller, "target not inlineable");
return false;
}
+ if (target_shared->dont_inline() || target_shared->dont_crankshaft()) {
+ TraceInline(target, caller, "target contains unsupported syntax [early]");
+ return false;
+ }
+
+ int nodes_added = target_shared->ast_node_count();
+ if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
+ nodes_added > kUnlimitedMaxInlinedSize) {
+ TraceInline(target, caller, "target AST is too large [early]");
+ return false;
+ }
#if !defined(V8_TARGET_ARCH_IA32)
// Target must be able to use caller's context.
return false;
}
- int count_before = AstNode::Count();
-
// Parse and allocate variables.
CompilationInfo target_info(target);
if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
}
FunctionLiteral* function = target_info.function();
- // Count the number of AST nodes added by inlining this call.
- int nodes_added = AstNode::Count() - count_before;
+ // The following conditions must be checked again after re-parsing, because
+ // earlier the information might not have been complete due to lazy parsing.
+ nodes_added = function->ast_node_count();
if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
nodes_added > kUnlimitedMaxInlinedSize) {
- TraceInline(target, caller, "target AST is too large");
+ TraceInline(target, caller, "target AST is too large [late]");
+ return false;
+ }
+ AstProperties::Flags* flags(function->flags());
+ if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) {
+ TraceInline(target, caller, "target contains unsupported syntax [late]");
return false;
}
return false;
}
}
- // All statements in the body must be inlineable.
- for (int i = 0, count = function->body()->length(); i < count; ++i) {
- if (!function->body()->at(i)->IsInlineable()) {
- TraceInline(target, caller, "target contains unsupported syntax");
- return false;
- }
- }
// Generate the deoptimization data for the unoptimized version of
// the target function if we don't already have it.
}
-bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr,
- HValue* receiver,
- Handle<Map> receiver_map,
- CheckType check_type) {
+bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
+ if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
+ BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
+ switch (id) {
+ case kMathRound:
+ case kMathFloor:
+ case kMathAbs:
+ case kMathSqrt:
+ case kMathLog:
+ case kMathSin:
+ case kMathCos:
+ if (expr->arguments()->length() == 1) {
+ HValue* argument = Pop();
+ HValue* context = environment()->LookupContext();
+ Drop(1); // Receiver.
+ HUnaryMathOperation* op =
+ new(zone()) HUnaryMathOperation(context, argument, id);
+ op->set_position(expr->position());
+ if (drop_extra) Drop(1); // Optionally drop the function.
+ ast_context()->ReturnInstruction(op, expr->id());
+ return true;
+ }
+ break;
+ default:
+ // Not supported for inlining yet.
+ break;
+ }
+ return false;
+}
+
+
+bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
+ HValue* receiver,
+ Handle<Map> receiver_map,
+ CheckType check_type) {
ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null());
// Try to inline calls like Math.* as operations in the calling function.
if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
case kMathRandom:
if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
- Drop(1);
+ Drop(1); // Receiver.
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
AddInstruction(global_object);
Handle<Map> receiver_map = (types == NULL || types->is_empty())
? Handle<Map>::null()
: types->first();
- if (TryInlineBuiltinFunction(expr,
- receiver,
- receiver_map,
- expr->check_type())) {
+ if (TryInlineBuiltinMethodCall(expr,
+ receiver,
+ receiver_map,
+ expr->check_type())) {
+ if (FLAG_trace_inlining) {
+ PrintF("Inlining builtin ");
+ expr->target()->ShortPrint();
+ PrintF("\n");
+ }
return;
}
IsGlobalObject());
environment()->SetExpressionStackAt(receiver_index, global_receiver);
+ if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop.
+ if (FLAG_trace_inlining) {
+ PrintF("Inlining builtin ");
+ expr->target()->ShortPrint();
+ PrintF("\n");
+ }
+ return;
+ }
if (TryInline(expr)) return;
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
argument_count));
PushAndAdd(receiver);
CHECK_ALIVE(VisitExpressions(expr->arguments()));
AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
+
+ if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
+ if (FLAG_trace_inlining) {
+ PrintF("Inlining builtin ");
+ expr->target()->ShortPrint();
+ PrintF("\n");
+ }
+ return;
+ }
+
if (TryInline(expr, true)) { // Drop function from environment.
return;
} else {
}
-void HGraphBuilder::VisitDeclaration(Declaration* decl) {
- HandleDeclaration(decl->proxy(), decl->mode(), decl->fun());
+void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
+ HandleVariableDeclaration(decl->proxy(), decl->mode(), decl->fun());
}
-void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function) {
+void HGraphBuilder::HandleVariableDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* function) {
Variable* var = proxy->var();
bool binding_needs_init =
(mode == CONST || mode == CONST_HARMONY || mode == LET);
}
+void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) {
+ // TODO(rossberg)
+}
+
+
+void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
+ // TODO(rossberg)
+}
+
+
+void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
+ // TODO(rossberg)
+}
+
+
+void HGraphBuilder::VisitModulePath(ModulePath* module) {
+ // TODO(rossberg)
+}
+
+
+void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
+ // TODO(rossberg)
+}
+
+
// Generators for inline runtime functions.
// Support for types.
void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
}
PrintEmptyProperty("xhandlers");
- PrintEmptyProperty("flags");
+ const char* flags = current->IsLoopSuccessorDominator()
+ ? "dom-loop-succ"
+ : "";
+ PrintStringProperty("flags", flags);
if (current->dominator() != NULL) {
PrintBlockProperty("dominator", current->dominator()->block_id());
int PredecessorIndexOf(HBasicBlock* predecessor) const;
void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); }
void AssignCommonDominator(HBasicBlock* other);
+ void AssignLoopSuccessorDominators();
void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses) {
FinishExit(CreateDeoptimize(has_uses));
bool IsDeoptimizing() const { return is_deoptimizing_; }
void MarkAsDeoptimizing() { is_deoptimizing_ = true; }
+ bool IsLoopSuccessorDominator() const {
+ return dominates_loop_successors_;
+ }
+ void MarkAsLoopSuccessorDominator() {
+ dominates_loop_successors_ = true;
+ }
+
inline Zone* zone();
#ifdef DEBUG
HBasicBlock* parent_loop_header_;
bool is_inline_return_target_;
bool is_deoptimizing_;
+ bool dominates_loop_successors_;
+};
+
+
+class HPredecessorIterator BASE_EMBEDDED {
+ public:
+ explicit HPredecessorIterator(HBasicBlock* block)
+ : predecessor_list_(block->predecessors()), current_(0) { }
+
+ bool Done() { return current_ >= predecessor_list_->length(); }
+ HBasicBlock* Current() { return predecessor_list_->at(current_); }
+ void Advance() { current_++; }
+
+ private:
+ const ZoneList<HBasicBlock*>* predecessor_list_;
+ int current_;
};
INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION)
#undef INLINE_FUNCTION_GENERATOR_DECLARATION
- void HandleDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function);
+ void HandleVariableDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* function);
void VisitDelete(UnaryOperation* expr);
void VisitVoid(UnaryOperation* expr);
bool TryCallApply(Call* expr);
bool TryInline(Call* expr, bool drop_extra = false);
- bool TryInlineBuiltinFunction(Call* expr,
+ bool TryInlineBuiltinMethodCall(Call* expr,
HValue* receiver,
Handle<Map> receiver_map,
CheckType check_type);
+ bool TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra);
// If --trace-inlining, print a line of the inlining trace. Inlining
// succeeded if the reason string is NULL and failed if there is a
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0.
__ bind(&invoke);
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions.
__ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
}
#endif
+ // We can optionally optimize based on counters rather than statistical
+ // sampling.
+ if (info->ShouldSelfOptimize()) {
+ if (FLAG_trace_opt) {
+ PrintF("[adding self-optimization header to %s]\n",
+ *info->function()->debug_name()->ToCString());
+ }
+ MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
+ Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
+ JSGlobalPropertyCell* cell;
+ if (maybe_cell->To(&cell)) {
+ __ sub(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
+ Immediate(Smi::FromInt(1)));
+ Handle<Code> compile_stub(
+ isolate()->builtins()->builtin(Builtins::kLazyRecompile));
+ STATIC_ASSERT(kSmiTag == 0);
+ __ j(zero, compile_stub);
+ }
+ }
+
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
// receiver object). ecx is zero for method calls and non-zero for
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
+ ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
+ EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+ FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
(mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ ++global_count_;
break;
case Variable::PARAMETER:
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
-
-
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(esi); // The context is the first argument.
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
}
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type,
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
// We will build up the handler from the bottom by pushing on the stack.
- // First compute the state and push the frame pointer and context.
- unsigned state = StackHandler::OffsetField::encode(handler_index);
- if (try_location == IN_JAVASCRIPT) {
- push(ebp);
- push(esi);
- state |= (type == TRY_CATCH_HANDLER)
- ? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
- : StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
- } else {
- ASSERT(try_location == IN_JS_ENTRY);
+ // First push the frame pointer and context.
+ if (kind == StackHandler::JS_ENTRY) {
// The frame pointer does not point to a JS frame so we save NULL for
// ebp. We expect the code throwing an exception to check ebp before
// dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer.
push(Immediate(Smi::FromInt(0))); // No context.
- state |= StackHandler::KindField::encode(StackHandler::ENTRY);
+ } else {
+ push(ebp);
+ push(esi);
}
-
// Push the state and the code object.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
push(Immediate(state));
Push(CodeObject());
mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
- STATIC_ASSERT(StackHandler::ENTRY == 0);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
test(Operand(esp, StackHandlerConstants::kStateOffset),
Immediate(StackHandler::KindField::kMask));
j(not_zero, &fetch_next);
// Exception handling
// Push a new try handler and link it into try handler chain.
- void PushTryHandler(CodeLocation try_location,
- HandlerType type,
- int handler_index);
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();
} else {
Label call_builtin;
- // Get the elements array of the object.
- __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
- Immediate(factory()->fixed_array_map()));
- __ j(not_equal, &call_builtin);
-
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
+ // Get the elements array of the object.
+ __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
+ Immediate(factory()->fixed_array_map()));
+ __ j(not_equal, &call_builtin);
+
// Get the array's length into eax and calculate new length.
__ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ add(eax, Immediate(Smi::FromInt(argc)));
- // Get the element's length into ecx.
- __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
+ // Get the elements' length into ecx.
+ __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
__ cmp(eax, ecx);
// Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
- // Push the element.
- __ lea(edx, FieldOperand(ebx,
- eax, times_half_pointer_size,
- FixedArray::kHeaderSize - argc * kPointerSize));
- __ mov(Operand(edx, 0), ecx);
+ // Store the value.
+ __ mov(FieldOperand(edi,
+ eax,
+ times_half_pointer_size,
+ FixedArray::kHeaderSize - argc * kPointerSize),
+ ecx);
__ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier);
- __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
- __ CheckFastObjectElements(edi, &call_builtin);
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(ebx, ¬_fast_object, Label::kNear);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(¬_fast_object);
+ __ CheckFastSmiOnlyElements(ebx, &call_builtin);
+ // edi: elements array
+ // edx: receiver
+ // ebx: map
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ ebx,
+ edi,
+ &call_builtin);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ // Restore edi.
+ __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(ebx, &call_builtin);
+ }
// Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
- // Push the element.
- __ lea(edx, FieldOperand(ebx,
+ // Store the value.
+ __ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ mov(Operand(edx, 0), ecx);
- __ RecordWrite(ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ ret((argc + 1) * kPointerSize);
__ jmp(&call_builtin);
}
- __ mov(edi, Operand(esp, argc * kPointerSize));
+ __ mov(ebx, Operand(esp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check;
- __ JumpIfSmi(edi, &no_fast_elements_check);
+ __ JumpIfSmi(ebx, &no_fast_elements_check);
__ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
__ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check);
__ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
// Check if it's the end of elements.
- __ lea(edx, FieldOperand(ebx,
+ __ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ cmp(edx, ecx);
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
// Push the argument...
- __ mov(Operand(edx, 0), edi);
+ __ mov(Operand(edx, 0), ebx);
// ... and fill the rest with holes.
for (int i = 1; i < kAllocationDelta; i++) {
__ mov(Operand(edx, i * kPointerSize),
// tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and
// already marked black.
- __ RecordWrite(ebx, edx, edi, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
+ __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to edx as finish sequence assumes it's here.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes.
- __ add(FieldOperand(ebx, FixedArray::kLengthOffset),
+ __ add(FieldOperand(edi, FixedArray::kLengthOffset),
Immediate(Smi::FromInt(kAllocationDelta)));
// NOTE: This only happen in new-space, where we don't
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
#define V8_IC_INL_H_
#include "ic.h"
+
+#include "compiler.h"
#include "debug.h"
#include "macro-assembler.h"
Assembler::set_target_address_at(address, target->instruction_start());
target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address,
target);
+ PostPatching();
}
}
+void IC::PostPatching() {
+ if (FLAG_watch_ic_patching) {
+ Isolate::Current()->runtime_profiler()->NotifyICChanged();
+ // We do not want to optimize until the ICs have settled down,
+ // so when they are patched, we postpone optimization for the
+ // current function and the functions above it on the stack that
+ // might want to inline this one.
+ StackFrameIterator it;
+ if (it.done()) return;
+ it.Advance();
+ static const int kStackFramesToMark = Compiler::kMaxInliningLevels - 1;
+ for (int i = 0; i < kStackFramesToMark; ++i) {
+ if (it.done()) return;
+ StackFrame* raw_frame = it.frame();
+ if (raw_frame->is_java_script()) {
+ JSFunction* function =
+ JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function());
+ function->shared()->set_profiler_ticks(0);
+ }
+ it.Advance();
+ }
+ }
+}
+
+
void IC::Clear(Address address) {
Code* target = GetTargetAtAddress(address);
// Access the target code for the given IC address.
static inline Code* GetTargetAtAddress(Address address);
static inline void SetTargetAtAddress(Address address, Code* target);
+ static void PostPatching();
private:
// Frame pointer for the frame that uses (calls) the IC.
}
is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
- heap_->mark_compact_collector()->StartCompaction();
+ heap_->mark_compact_collector()->StartCompaction(
+ MarkCompactCollector::INCREMENTAL_COMPACTION);
state_ = MARKING;
}
+void Isolate::CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object) {
+ if (capture_stack_trace_for_uncaught_exceptions_) {
+ // Capture stack trace for a detailed exception message.
+ Handle<String> key = factory()->hidden_stack_trace_symbol();
+ Handle<JSArray> stack_trace = CaptureCurrentStackTrace(
+ stack_trace_for_uncaught_exceptions_frame_limit_,
+ stack_trace_for_uncaught_exceptions_options_);
+ JSObject::SetHiddenProperty(error_object, key, stack_trace);
+ }
+}
+
+
Handle<JSArray> Isolate::CaptureCurrentStackTrace(
int frame_limit, StackTrace::StackTraceOptions options) {
// Ensure no negative values.
// Find the top-most try-catch handler.
StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top()));
- while (handler != NULL && !handler->is_try_catch()) {
+ while (handler != NULL && !handler->is_catch()) {
handler = handler->next();
}
}
-void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
+bool Isolate::IsErrorObject(Handle<Object> obj) {
+ if (!obj->IsJSObject()) return false;
+
+ String* error_key = *(factory()->LookupAsciiSymbol("$Error"));
+ Object* error_constructor =
+ js_builtins_object()->GetPropertyNoExceptionThrown(error_key);
+
+ for (Object* prototype = *obj; !prototype->IsNull();
+ prototype = prototype->GetPrototype()) {
+ if (!prototype->IsJSObject()) return false;
+ if (JSObject::cast(prototype)->map()->constructor() == error_constructor) {
+ return true;
+ }
+ }
+ return false;
+}
+
+
+void Isolate::DoThrow(Object* exception, MessageLocation* location) {
ASSERT(!has_pending_exception());
HandleScope scope;
- Object* exception_object = Smi::FromInt(0);
- bool is_object = exception->ToObject(&exception_object);
- Handle<Object> exception_handle(exception_object);
+ Handle<Object> exception_handle(exception);
// Determine reporting and whether the exception is caught externally.
bool catchable_by_javascript = is_catchable_by_javascript(exception);
- // Only real objects can be caught by JS.
- ASSERT(!catchable_by_javascript || is_object);
bool can_be_caught_externally = false;
bool should_report_exception =
ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
bool report_exception = catchable_by_javascript && should_report_exception;
+ bool try_catch_needs_message =
+ can_be_caught_externally && try_catch_handler()->capture_message_;
+ bool bootstrapping = bootstrapper()->IsActive();
#ifdef ENABLE_DEBUGGER_SUPPORT
// Notify debugger of exception.
}
#endif
- // Generate the message.
- Handle<Object> message_obj;
- MessageLocation potential_computed_location;
- bool try_catch_needs_message =
- can_be_caught_externally &&
- try_catch_handler()->capture_message_;
+ // Generate the message if required.
if (report_exception || try_catch_needs_message) {
+ MessageLocation potential_computed_location;
if (location == NULL) {
- // If no location was specified we use a computed one instead
+ // If no location was specified we use a computed one instead.
ComputeLocation(&potential_computed_location);
location = &potential_computed_location;
}
- if (!bootstrapper()->IsActive()) {
- // It's not safe to try to make message objects or collect stack
- // traces while the bootstrapper is active since the infrastructure
- // may not have been properly initialized.
+ // It's not safe to try to make message objects or collect stack traces
+ // while the bootstrapper is active since the infrastructure may not have
+ // been properly initialized.
+ if (!bootstrapping) {
Handle<String> stack_trace;
if (FLAG_trace_exception) stack_trace = StackTraceString();
Handle<JSArray> stack_trace_object;
- if (report_exception && capture_stack_trace_for_uncaught_exceptions_) {
+ if (capture_stack_trace_for_uncaught_exceptions_) {
+ if (IsErrorObject(exception_handle)) {
+ // We fetch the stack trace that corresponds to this error object.
+ String* key = heap()->hidden_stack_trace_symbol();
+ Object* stack_property =
+ JSObject::cast(*exception_handle)->GetHiddenProperty(key);
+ // Property lookup may have failed. In this case it's probably not
+ // a valid Error object.
+ if (stack_property->IsJSArray()) {
+ stack_trace_object = Handle<JSArray>(JSArray::cast(stack_property));
+ }
+ }
+ if (stack_trace_object.is_null()) {
+ // Not an error object, we capture at throw site.
stack_trace_object = CaptureCurrentStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_);
+ }
}
- ASSERT(is_object); // Can't use the handle unless there's a real object.
- message_obj = MessageHandler::MakeMessageObject("uncaught_exception",
- location, HandleVector<Object>(&exception_handle, 1), stack_trace,
+ Handle<Object> message_obj = MessageHandler::MakeMessageObject(
+ "uncaught_exception",
+ location,
+ HandleVector<Object>(&exception_handle, 1),
+ stack_trace,
stack_trace_object);
+ thread_local_top()->pending_message_obj_ = *message_obj;
+ if (location != NULL) {
+ thread_local_top()->pending_message_script_ = *location->script();
+ thread_local_top()->pending_message_start_pos_ = location->start_pos();
+ thread_local_top()->pending_message_end_pos_ = location->end_pos();
+ }
} else if (location != NULL && !location->script().is_null()) {
// We are bootstrapping and caught an error where the location is set
// and we have a script for the location.
// Save the message for reporting if the the exception remains uncaught.
thread_local_top()->has_pending_message_ = report_exception;
- if (!message_obj.is_null()) {
- thread_local_top()->pending_message_obj_ = *message_obj;
- if (location != NULL) {
- thread_local_top()->pending_message_script_ = *location->script();
- thread_local_top()->pending_message_start_pos_ = location->start_pos();
- thread_local_top()->pending_message_end_pos_ = location->end_pos();
- }
- }
// Do not forget to clean catcher_ if currently thrown exception cannot
// be caught. If necessary, ReThrow will update the catcher.
thread_local_top()->catcher_ = can_be_caught_externally ?
try_catch_handler() : NULL;
- // NOTE: Notifying the debugger or generating the message
- // may have caused new exceptions. For now, we just ignore
- // that and set the pending exception to the original one.
- if (is_object) {
- set_pending_exception(*exception_handle);
- } else {
- // Failures are not on the heap so they neither need nor work with handles.
- ASSERT(exception_handle->IsFailure());
- set_pending_exception(exception);
- }
+ set_pending_exception(*exception_handle);
}
StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top()));
while (handler != NULL && handler->address() < external_handler_address) {
- ASSERT(!handler->is_try_catch());
- if (handler->is_try_finally()) return false;
+ ASSERT(!handler->is_catch());
+ if (handler->is_finally()) return false;
handler = handler->next();
}
/* Serializer state. */ \
V(ExternalReferenceTable*, external_reference_table, NULL) \
/* AstNode state. */ \
- V(unsigned, ast_node_id, 0) \
+ V(int, ast_node_id, 0) \
V(unsigned, ast_node_count, 0) \
/* SafeStackFrameIterator activations count. */ \
V(int, safe_stack_iterator_counter, 0) \
int frame_limit,
StackTrace::StackTraceOptions options);
+ void CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object);
+
// Returns if the top context may access the given global object. If
// the result is false, the pending exception is guaranteed to be
// set.
// Promote a scheduled exception to pending. Asserts has_scheduled_exception.
Failure* PromoteScheduledException();
- void DoThrow(MaybeObject* exception, MessageLocation* location);
+ void DoThrow(Object* exception, MessageLocation* location);
// Checks if exception should be reported and finds out if it's
// caught externally.
bool ShouldReportException(bool* can_be_caught_externally,
void InitializeDebugger();
+ // Traverse prototype chain to find out whether the object is derived from
+ // the Error object.
+ bool IsErrorObject(Handle<Object> obj);
+
int stack_trace_nesting_level_;
StringStream* incomplete_message_;
// The preallocated memory thread singleton.
template<typename T, class P>
void List<T, P>::ResizeAddInternal(const T& element) {
ASSERT(length_ >= capacity_);
- // Grow the list capacity by 50%, but make sure to let it grow
+ // Grow the list capacity by 100%, but make sure to let it grow
// even when the capacity is zero (possible initial case).
- int new_capacity = 1 + capacity_ + (capacity_ >> 1);
+ int new_capacity = 1 + 2 * capacity_;
// Since the element reference could be an element of the list, copy
// it out of the old backing storage before resizing.
T temp = element;
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
};
-enum CodeLocation {
- IN_JAVASCRIPT,
- IN_JS_ENTRY,
- IN_C_ENTRY
-};
-
-
-enum HandlerType {
- TRY_CATCH_HANDLER,
- TRY_FINALLY_HANDLER,
- JS_ENTRY_HANDLER
-};
-
-
// Types of uncatchable exceptions.
enum UncatchableExceptionType {
OUT_OF_MEMORY,
}
-bool MarkCompactCollector::StartCompaction() {
+bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
if (!compacting_) {
ASSERT(evacuation_candidates_.length() == 0);
CollectEvacuationCandidates(heap()->old_pointer_space());
CollectEvacuationCandidates(heap()->old_data_space());
- if (FLAG_compact_code_space) {
+ if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) {
CollectEvacuationCandidates(heap()->code_space());
} else if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->code_space());
// Don't start compaction if we are in the middle of incremental
// marking cycle. We did not collect any slots.
if (!FLAG_never_compact && !was_marked_incrementally_) {
- StartCompaction();
+ StartCompaction(NON_INCREMENTAL_COMPACTION);
}
PagedSpaces spaces;
isolate_->heap()->mark_compact_collector()->
RecordCodeEntrySlot(slot, target);
+ RecordSharedFunctionInfoCodeSlot(shared);
+
candidate = next_candidate;
}
candidate->set_code(lazy_compile);
}
+ RecordSharedFunctionInfoCodeSlot(candidate);
+
candidate = next_candidate;
}
shared_function_info_candidates_head_ = NULL;
}
+ void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
+ Object** slot = HeapObject::RawField(shared,
+ SharedFunctionInfo::kCodeOffset);
+ isolate_->heap()->mark_compact_collector()->
+ RecordSlot(slot, slot, HeapObject::cast(*slot));
+ }
+
static JSFunction** GetNextCandidateField(JSFunction* candidate) {
return reinterpret_cast<JSFunction**>(
candidate->address() + JSFunction::kCodeEntryOffset);
re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii),
code,
heap);
+
+ // Saving a copy might create a pointer into compaction candidate
+ // that was not observed by marker. This might happen if JSRegExp data
+ // was marked through the compilation cache before marker reached JSRegExp
+ // object.
+ FixedArray* data = FixedArray::cast(re->data());
+ Object** slot = data->data_start() + JSRegExp::saved_code_index(is_ascii);
+ heap->mark_compact_collector()->
+ RecordSlot(slot, slot, code);
+
// Set a number in the 0-255 range to guarantee no smi overflow.
re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii),
Smi::FromInt(heap->sweep_generation() & 0xff),
code_flusher_->ProcessCandidates();
}
- // Clean up dead objects from the runtime profiler.
- heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
+ if (!FLAG_watch_ic_patching) {
+ // Clean up dead objects from the runtime profiler.
+ heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
+ }
}
heap_->UpdateReferencesInExternalStringTable(
&UpdateReferenceInExternalStringTableEntry);
- // Update JSFunction pointers from the runtime profiler.
- heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
- &updating_visitor);
+ if (!FLAG_watch_ic_patching) {
+ // Update JSFunction pointers from the runtime profiler.
+ heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
+ &updating_visitor);
+ }
EvacuationWeakObjectRetainer evacuation_object_retainer;
heap()->ProcessWeakReferences(&evacuation_object_retainer);
// Performs a global garbage collection.
void CollectGarbage();
- bool StartCompaction();
+ enum CompactionMode {
+ INCREMENTAL_COMPACTION,
+ NON_INCREMENTAL_COMPACTION
+ };
+
+ bool StartCompaction(CompactionMode mode);
void AbortCompaction();
if (stackTraceLimit < 0 || stackTraceLimit > 10000) {
stackTraceLimit = 10000;
}
- var raw_stack = %CollectStackTrace(cons_opt
- ? cons_opt
- : captureStackTrace, stackTraceLimit);
+ var raw_stack = %CollectStackTrace(obj,
+ cons_opt ? cons_opt : captureStackTrace,
+ stackTraceLimit);
DefineOneShotAccessor(obj, 'stack', function (obj) {
return FormatRawStackTrace(obj, raw_stack);
});
void Assembler::andi(Register rt, Register rs, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(ANDI, rs, rt, j);
}
void Assembler::ori(Register rt, Register rs, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(ORI, rs, rt, j);
}
void Assembler::xori(Register rt, Register rs, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(XORI, rs, rt, j);
}
void Assembler::lui(Register rd, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(LUI, zero_reg, rd, j);
}
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadGlobalInitialConstructedArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
- __ LoadGlobalInitialConstructedArrayMap(array_function, scratch2,
- elements_array_storage);
+ __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ Assert(
// t4: JSObject
__ bind(&allocated);
__ push(t4);
-
- // Push the function and the allocated receiver from the stack.
- // sp[0]: receiver (newly allocated object)
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ lw(a1, MemOperand(sp, kPointerSize));
- __ MultiPushReversed(a1.bit() | t4.bit());
+ __ push(t4);
// Reload the number of arguments from the stack.
- // a1: constructor function
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ lw(a3, MemOperand(sp, 4 * kPointerSize));
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
+ __ lw(a1, MemOperand(sp, 2 * kPointerSize));
+ __ lw(a3, MemOperand(sp, 3 * kPointerSize));
// Set up pointer to last argument.
__ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
// a2: address of last argument (caller sp)
// a3: number of arguments (smi-tagged)
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
Label loop, entry;
__ jmp(&entry);
__ bind(&loop);
NullCallWrapper(), CALL_AS_METHOD);
}
- // Pop the function from the stack.
- // v0: result
- // sp[0]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ Pop();
-
// Restore context from the frame.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
// Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0.
__ bind(&invoke);
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the bal(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
Label slow, array, extra, check_if_double_array;
Label fast_object_with_map_check, fast_object_without_map_check;
Label fast_double_with_map_check, fast_double_without_map_check;
+ Label transition_smi_elements, finish_object_store, non_double_value;
+ Label transition_double_elements;
// Register usage.
Register value = a0;
Register key = a1;
Register receiver = a2;
- Register elements = a3; // Elements array of the receiver.
+ Register receiver_map = a3;
Register elements_map = t2;
- Register receiver_map = t3;
+ Register elements = t3; // Elements array of the receiver.
// t0 and t1 are used as general scratch registers.
// Check that the key is a smi.
__ mov(v0, value);
__ bind(&non_smi_value);
- // Escape to slow case when writing non-smi into smi-only array.
- __ CheckFastObjectElements(receiver_map, scratch_value, &slow);
+ // Escape to elements kind transition case.
+ __ CheckFastObjectElements(receiver_map, scratch_value,
+ &transition_smi_elements);
// Fast elements array, store the value to the elements backing store.
+ __ bind(&finish_object_store);
__ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
__ Addu(address, address, scratch_value);
key,
receiver,
elements,
+ a3,
t0,
t1,
t2,
- t3,
- &slow);
+ &transition_double_elements);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, value);
+
+ __ bind(&transition_smi_elements);
+ // Transition the array appropriately depending on the value type.
+ __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
+ __ Branch(&non_double_value, ne, t0, Operand(at));
+
+ // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ receiver_map,
+ t0,
+ &slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ t0,
+ &slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ t0,
+ &slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
}
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
} else if (!(j.imm32_ & kHiMask)) {
ori(rd, zero_reg, j.imm32_);
} else if (!(j.imm32_ & kImm16Mask)) {
- lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
+ lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
} else {
- lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
+ lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask));
}
} else if (MustUseReg(j.rmode_) || gen2instr) {
if (MustUseReg(j.rmode_)) {
RecordRelocInfo(j.rmode_, j.imm32_);
}
- // We need always the same number of instructions as we may need to patch
+ // We always need the same number of instructions as we may need to patch
// this code to load another value which may need 2 instructions to load.
- lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
+ lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask));
}
}
// ---------------------------------------------------------------------------
// Exception handling.
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type,
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
// For the JSEntry handler, we must preserve a0-a3 and s0.
// t1-t3 are available. We will build up the handler from the bottom by
- // pushing on the stack. First compute the state.
- unsigned state = StackHandler::OffsetField::encode(handler_index);
- if (try_location == IN_JAVASCRIPT) {
- state |= (type == TRY_CATCH_HANDLER)
- ? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
- : StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
- } else {
- ASSERT(try_location == IN_JS_ENTRY);
- state |= StackHandler::KindField::encode(StackHandler::ENTRY);
- }
-
+ // pushing on the stack.
// Set up the code object (t1) and the state (t2) for pushing.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
li(t1, Operand(CodeObject()));
li(t2, Operand(state));
// Push the frame pointer, context, state, and code object.
- if (try_location == IN_JAVASCRIPT) {
- MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
- } else {
+ if (kind == StackHandler::JS_ENTRY) {
ASSERT_EQ(Smi::FromInt(0), 0);
// The second zero_reg indicates no context.
// The first zero_reg is the NULL frame pointer.
// The operands are reversed to match the order of MultiPush/Pop.
Push(zero_reg, zero_reg, t2, t1);
+ } else {
+ MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
}
// Link the current handler as the next handler.
lw(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
- STATIC_ASSERT(StackHandler::ENTRY == 0);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
lw(a2, MemOperand(sp, StackHandlerConstants::kStateOffset));
And(a2, a2, Operand(StackHandler::KindField::kMask));
Branch(&fetch_next, ne, a2, Operand(zero_reg));
}
-void MacroAssembler::LoadGlobalInitialConstructedArrayMap(
+void MacroAssembler::LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match) {
+ // Load the global or builtins object from the current context.
+ lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
+
+ // Check that the function's map is the same as the expected cached map.
+ int expected_index =
+ Context::GetContextMapIndexFromElementsKind(expected_kind);
+ lw(at, MemOperand(scratch, Context::SlotOffset(expected_index)));
+ Branch(no_map_match, ne, map_in_out, Operand(at));
+
+ // Use the transitioned cached map.
+ int trans_index =
+ Context::GetContextMapIndexFromElementsKind(transitioned_kind);
+ lw(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
+}
+
+
+void MacroAssembler::LoadInitialArrayMap(
Register function_in, Register scratch, Register map_out) {
ASSERT(!function_in.is(map_out));
Label done;
lw(map_out, FieldMemOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- // Load the global or builtins object from the current context.
- lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
-
- // Check that the function's map is same as the cached map.
- lw(at, MemOperand(
- scratch, Context::SlotOffset(Context::SMI_JS_ARRAY_MAP_INDEX)));
- Branch(&done, ne, map_out, Operand(at));
-
- // Use the cached transitioned map.
- lw(map_out,
- MemOperand(scratch,
- Context::SlotOffset(Context::OBJECT_JS_ARRAY_MAP_INDEX)));
+ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ map_out,
+ scratch,
+ &done);
}
bind(&done);
}
void LoadContext(Register dst, int context_chain_length);
- // Load the initial map for new Arrays of a given type.
- void LoadGlobalInitialConstructedArrayMap(Register function_in,
- Register scratch,
- Register map_out);
+ // Conditionally load the cached Array transitioned map of type
+ // transitioned_kind from the global context if the map in register
+ // map_in_out is the cached Array map in the global context of
+ // expected_kind.
+ void LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match);
+
+ // Load the initial map for new Arrays from a JSFunction.
+ void LoadInitialArrayMap(Register function_in,
+ Register scratch,
+ Register map_out);
void LoadGlobalFunction(int index, Register function);
// Exception handling.
// Push a new try handler and link into try handler chain.
- void PushTryHandler(CodeLocation try_location,
- HandlerType type,
- int handler_index);
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register.
__ Ret();
} else {
Label call_builtin;
- Register elements = a3;
- Register end_elements = t1;
- // Get the elements array of the object.
- __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ CheckMap(elements,
- v0,
- Heap::kFixedArrayMapRootIndex,
- &call_builtin,
- DONT_DO_SMI_CHECK);
-
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
+ Register elements = t2;
+ Register end_elements = t1;
+ // Get the elements array of the object.
+ __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ CheckMap(elements,
+ v0,
+ Heap::kFixedArrayMapRootIndex,
+ &call_builtin,
+ DONT_DO_SMI_CHECK);
+
// Get the array's length into v0 and calculate new length.
__ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ Addu(v0, v0, Operand(Smi::FromInt(argc)));
- // Get the element's length.
+ // Get the elements' length.
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
// Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
__ bind(&with_write_barrier);
- __ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ CheckFastObjectElements(t2, t2, &call_builtin);
+ __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(a3, t3, ¬_fast_object);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(¬_fast_object);
+ __ CheckFastSmiOnlyElements(a3, t3, &call_builtin);
+ // edx: receiver
+ // r3: map
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ a3,
+ t3,
+ &call_builtin);
+ __ mov(a2, receiver);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(a3, a3, &call_builtin);
+ }
// Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(end_elements, elements, end_elements);
__ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
__ li(t3, Operand(new_space_allocation_top));
- __ lw(t2, MemOperand(t3));
- __ Branch(&call_builtin, ne, end_elements, Operand(t2));
+ __ lw(a3, MemOperand(t3));
+ __ Branch(&call_builtin, ne, end_elements, Operand(a3));
__ li(t5, Operand(new_space_allocation_limit));
__ lw(t5, MemOperand(t5));
- __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
- __ Branch(&call_builtin, hi, t2, Operand(t5));
+ __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
+ __ Branch(&call_builtin, hi, a3, Operand(t5));
// We fit and could grow elements.
// Update new_space_allocation_top.
- __ sw(t2, MemOperand(t3));
+ __ sw(a3, MemOperand(t3));
// Push the argument.
__ sw(a2, MemOperand(end_elements));
// Fill the rest with holes.
- __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
- __ sw(t2, MemOperand(end_elements, i * kPointerSize));
+ __ sw(a3, MemOperand(end_elements, i * kPointerSize));
}
// Update elements' and array's sizes.
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
+SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
+
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
kHiddenPrototypeBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
+SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
+
+PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
+PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#endif
kNameShouldPrintAsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
+ kDontCrankshaft)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
}
-Smi* SharedFunctionInfo::deopt_counter() {
- return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
-}
-
-
-void SharedFunctionInfo::set_deopt_counter(Smi* value) {
- WRITE_FIELD(this, kDeoptCounterOffset, value);
-}
-
-
bool SharedFunctionInfo::is_compiled() {
return code() !=
Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
- (descriptors->GetKey(0) == GetHeap()->hidden_symbol()) &&
- descriptors->IsProperty(0)) {
- ASSERT(descriptors->GetType(0) == FIELD);
- Object* hidden_store =
- this->FastPropertyAt(descriptors->GetFieldIndex(0));
- return StringDictionary::cast(hidden_store);
+ (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
+ if (descriptors->GetType(0) == FIELD) {
+ Object* hidden_store =
+ this->FastPropertyAt(descriptors->GetFieldIndex(0));
+ return StringDictionary::cast(hidden_store);
+ } else {
+ ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
+ descriptors->GetType(0) == MAP_TRANSITION);
+ }
}
} else {
PropertyAttributes attributes;
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
- (descriptors->GetKey(0) == GetHeap()->hidden_symbol()) &&
- descriptors->IsProperty(0)) {
- ASSERT(descriptors->GetType(0) == FIELD);
- this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary);
- return this;
+ (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
+ if (descriptors->GetType(0) == FIELD) {
+ this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary);
+ return this;
+ } else {
+ ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
+ descriptors->GetType(0) == MAP_TRANSITION);
+ }
}
}
MaybeObject* store_result =
}
-int Map::NumberOfDescribedProperties() {
+int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
int result = 0;
DescriptorArray* descs = instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->IsProperty(i)) result++;
+ PropertyDetails details(descs->GetDetails(i));
+ if (descs->IsProperty(i) && (details.attributes() & filter) == 0) {
+ result++;
+ }
}
return result;
}
for (int i = 0; i < maps_->length(); ++i) {
bool match_found = false;
for (int j = 0; j < other_maps.length(); ++j) {
- if (maps_->at(i)->EquivalentTo(*other_maps.at(j))) {
+ if (*(maps_->at(i)) == *(other_maps.at(j))) {
match_found = true;
break;
}
}
+static bool InsertionPointFound(String* key1, String* key2) {
+ return key1->Hash() > key2->Hash() || key1 == key2;
+}
+
+
MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
TransitionFlag transition_flag) {
// Transitions are only kept when inserting another transition.
// Copy the descriptors, filtering out transitions and null descriptors,
// and inserting or replacing a descriptor.
- uint32_t descriptor_hash = descriptor->GetKey()->Hash();
- int from_index = 0;
int to_index = 0;
-
- for (; from_index < number_of_descriptors(); from_index++) {
- String* key = GetKey(from_index);
- if (key->Hash() > descriptor_hash || key == descriptor->GetKey()) {
- break;
+ int insertion_index = -1;
+ int from_index = 0;
+ while (from_index < number_of_descriptors()) {
+ if (insertion_index < 0 &&
+ InsertionPointFound(GetKey(from_index), descriptor->GetKey())) {
+ insertion_index = to_index++;
+ if (replacing) from_index++;
+ } else {
+ if (!(IsNullDescriptor(from_index) ||
+ (remove_transitions && IsTransitionOnly(from_index)))) {
+ new_descriptors->CopyFrom(to_index++, this, from_index, witness);
+ }
+ from_index++;
}
- if (IsNullDescriptor(from_index)) continue;
- if (remove_transitions && IsTransitionOnly(from_index)) continue;
- new_descriptors->CopyFrom(to_index++, this, from_index, witness);
- }
-
- new_descriptors->Set(to_index++, descriptor, witness);
- if (replacing) from_index++;
-
- for (; from_index < number_of_descriptors(); from_index++) {
- if (IsNullDescriptor(from_index)) continue;
- if (remove_transitions && IsTransitionOnly(from_index)) continue;
- new_descriptors->CopyFrom(to_index++, this, from_index, witness);
}
+ if (insertion_index < 0) insertion_index = to_index++;
+ new_descriptors->Set(insertion_index, descriptor, witness);
ASSERT(to_index == new_descriptors->number_of_descriptors());
SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates());
// not be allocated.
// Compute the size of the map transition entries to be removed.
- int num_removed = 0;
+ int new_number_of_descriptors = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
- if (!IsProperty(i)) num_removed++;
+ if (IsProperty(i)) new_number_of_descriptors++;
}
// Allocate the new descriptor array.
DescriptorArray* new_descriptors;
- { MaybeObject* maybe_result = Allocate(number_of_descriptors() - num_removed);
+ { MaybeObject* maybe_result = Allocate(new_number_of_descriptors);
if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
return maybe_result;
}
}
-Object* SharedFunctionInfo::GetSourceCode() {
- Isolate* isolate = GetIsolate();
- if (!HasSourceCode()) return isolate->heap()->undefined_value();
- HandleScope scope(isolate);
- Object* source = Script::cast(script())->source();
- return *SubString(Handle<String>(String::cast(source), isolate),
- start_position(), end_position());
+Handle<Object> SharedFunctionInfo::GetSourceCode() {
+ if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
+ Handle<String> source(String::cast(Script::cast(script())->source()));
+ return SubString(source, start_position(), end_position());
}
int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
- if (HasFastProperties()) {
- DescriptorArray* descs = map()->instance_descriptors();
- int result = 0;
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details(descs->GetDetails(i));
- if (details.IsProperty() && (details.attributes() & filter) == 0) {
- result++;
- }
- }
- return result;
- } else {
- return property_dictionary()->NumberOfElementsFilterAttributes(filter);
- }
-}
-
-
-int JSObject::NumberOfEnumProperties() {
- return NumberOfLocalProperties(static_cast<PropertyAttributes>(DONT_ENUM));
+ return HasFastProperties() ?
+ map()->NumberOfDescribedProperties(filter) :
+ property_dictionary()->NumberOfElementsFilterAttributes(filter);
}
// purpose of this function is to provide reflection information for the object
// mirrors.
void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) {
- ASSERT(storage->length() >= (NumberOfLocalProperties(NONE) - index));
+ ASSERT(storage->length() >= (NumberOfLocalProperties() - index));
if (HasFastProperties()) {
DescriptorArray* descs = map()->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
Handle<String> key,
Handle<Object> value);
// Returns a failure if a GC is required.
- MaybeObject* SetHiddenProperty(String* key, Object* value);
+ MUST_USE_RESULT MaybeObject* SetHiddenProperty(String* key, Object* value);
// Gets the value of a hidden property with the given key. Returns undefined
// if the property doesn't exist (or if called on a detached proxy),
// otherwise returns the value set for the key.
// Returns the number of properties on this object filtering out properties
// with the specified attributes (ignoring interceptors).
- int NumberOfLocalProperties(PropertyAttributes filter);
- // Returns the number of enumerable properties (ignoring interceptors).
- int NumberOfEnumProperties();
+ int NumberOfLocalProperties(PropertyAttributes filter = NONE);
// Fill in details for properties into storage starting at the specified
// index.
void GetLocalPropertyNames(FixedArray* storage, int index);
// Returns the next free property index (only valid for FAST MODE).
int NextFreePropertyIndex();
- // Returns the number of properties described in instance_descriptors.
- int NumberOfDescribedProperties();
+ // Returns the number of properties described in instance_descriptors
+ // filtering out properties with the specified attributes.
+ int NumberOfDescribedProperties(PropertyAttributes filter = NONE);
// Casting.
static inline Map* cast(Object* obj);
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
- // Returns true if this map and |other| describe equivalent objects.
- // The "shared" flags of both this map and |other| are ignored.
- bool EquivalentTo(Map* other) {
- return EquivalentToForNormalization(other, KEEP_INOBJECT_PROPERTIES);
- }
-
// Returns the contents of this map's descriptor array for the given string.
// May return NULL. |safe_to_add_transition| is set to false and NULL
// is returned if adding transitions is not allowed.
// A counter used to determine when to stress the deoptimizer with a
// deopt.
- inline Smi* deopt_counter();
- inline void set_deopt_counter(Smi* counter);
+ inline int deopt_counter();
+ inline void set_deopt_counter(int counter);
+
+ inline int profiler_ticks();
+ inline void set_profiler_ticks(int ticks);
+
+ inline int ast_node_count();
+ inline void set_ast_node_count(int count);
// Add information on assignments of the form this.x = ...;
void SetThisPropertyAssignmentsInfo(
// through the API, which does not change this flag).
DECL_BOOLEAN_ACCESSORS(is_anonymous)
+ // Indicates that the function cannot be crankshafted.
+ DECL_BOOLEAN_ACCESSORS(dont_crankshaft)
+
+ // Indicates that the function cannot be inlined.
+ DECL_BOOLEAN_ACCESSORS(dont_inline)
+
// Indicates whether or not the code in the shared function support
// deoptimization.
inline bool has_deoptimization_support();
// [source code]: Source code for the function.
bool HasSourceCode();
- Object* GetSourceCode();
+ Handle<Object> GetSourceCode();
inline int opt_count();
inline void set_opt_count(int opt_count);
kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize;
- static const int kDeoptCounterOffset =
+ static const int kProfilerTicksOffset =
kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
- kDeoptCounterOffset + kPointerSize;
+ kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
kCompilerHintsOffset + kPointerSize;
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kPointerSize;
+ static const int kAstNodeCountOffset = kOptCountOffset + kPointerSize;
+ static const int kDeoptCounterOffset =
+ kAstNodeCountOffset + kPointerSize;
// Total size.
- static const int kSize = kOptCountOffset + kPointerSize;
+ static const int kSize = kDeoptCounterOffset + kPointerSize;
#else
// The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
- kDeoptCounterOffset + kPointerSize;
+ kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kIntSize;
+ static const int kAstNodeCountOffset = kOptCountOffset + kIntSize;
+ static const int kDeoptCounterOffset = kAstNodeCountOffset + kIntSize;
+
// Total size.
- static const int kSize = kOptCountOffset + kIntSize;
+ static const int kSize = kDeoptCounterOffset + kIntSize;
#endif
kBoundFunction,
kIsAnonymous,
kNameShouldPrintAsAnonymous,
+ kDontCrankshaft,
+ kDontInline,
kCompilerHintsCount // Pseudo entry
};
};
-class Parser::FunctionState BASE_EMBEDDED {
- public:
- FunctionState(Parser* parser, Scope* scope, Isolate* isolate);
- ~FunctionState();
-
- int NextMaterializedLiteralIndex() {
- return next_materialized_literal_index_++;
- }
- int materialized_literal_count() {
- return next_materialized_literal_index_ - JSFunction::kLiteralsPrefixSize;
- }
-
- int NextHandlerIndex() { return next_handler_index_++; }
- int handler_count() { return next_handler_index_; }
-
- void SetThisPropertyAssignmentInfo(
- bool only_simple_this_property_assignments,
- Handle<FixedArray> this_property_assignments) {
- only_simple_this_property_assignments_ =
- only_simple_this_property_assignments;
- this_property_assignments_ = this_property_assignments;
- }
- bool only_simple_this_property_assignments() {
- return only_simple_this_property_assignments_;
- }
- Handle<FixedArray> this_property_assignments() {
- return this_property_assignments_;
- }
-
- void AddProperty() { expected_property_count_++; }
- int expected_property_count() { return expected_property_count_; }
-
- private:
- // Used to assign an index to each literal that needs materialization in
- // the function. Includes regexp literals, and boilerplate for object and
- // array literals.
- int next_materialized_literal_index_;
-
- // Used to assign a per-function index to try and catch handlers.
- int next_handler_index_;
-
- // Properties count estimation.
- int expected_property_count_;
-
- // Keeps track of assignments to properties of this. Used for
- // optimizing constructors.
- bool only_simple_this_property_assignments_;
- Handle<FixedArray> this_property_assignments_;
-
- Parser* parser_;
- FunctionState* outer_function_state_;
- Scope* outer_scope_;
- unsigned saved_ast_node_id_;
-};
-
-
Parser::FunctionState::FunctionState(Parser* parser,
Scope* scope,
Isolate* isolate)
parser_(parser),
outer_function_state_(parser->current_function_state_),
outer_scope_(parser->top_scope_),
- saved_ast_node_id_(isolate->ast_node_id()) {
+ saved_ast_node_id_(isolate->ast_node_id()),
+ factory_(isolate) {
parser->top_scope_ = scope;
parser->current_function_state_ = this;
isolate->set_ast_node_id(AstNode::kDeclarationsId + 1);
fni_(NULL),
allow_natives_syntax_((parser_flags & kAllowNativesSyntax) != 0),
allow_lazy_((parser_flags & kAllowLazy) != 0),
+ allow_modules_((parser_flags & kAllowModules) != 0),
stack_overflow_(false),
parenthesized_function_(false) {
AstNode::ResetIds();
if ((parser_flags & kLanguageModeMask) == EXTENDED_MODE) {
scanner().SetHarmonyScoping(true);
}
+ if ((parser_flags & kAllowModules) != 0) {
+ scanner().SetHarmonyModules(true);
+ }
}
}
if (ok) {
- result = new(zone()) FunctionLiteral(
- isolate(),
+ result = factory()->NewFunctionLiteral(
no_name,
top_scope_,
body,
function_state.only_simple_this_property_assignments(),
function_state.this_property_assignments(),
0,
+ false, // Does not have duplicate parameters.
FunctionLiteral::ANONYMOUS_EXPRESSION,
- false); // Does not have duplicate parameters.
+ false); // Top-level literal doesn't count for the AST's properties.
+ result->set_ast_properties(factory()->visitor()->ast_properties());
} else if (stack_overflow_) {
isolate()->StackOverflow();
}
case Token::SEMICOLON:
Next();
- return EmptyStatement();
+ return factory()->NewEmptyStatement();
case Token::IF:
stmt = ParseIfStatement(labels, ok);
// one must take great care not to treat it as a
// fall-through. It is much easier just to wrap the entire
// try-statement in a statement block and put the labels there
- Block* result = new(zone()) Block(isolate(), labels, 1, false);
+ Block* result = factory()->NewBlock(labels, 1, false);
Target target(&this->target_stack_, result);
TryStatement* statement = ParseTryStatement(CHECK_OK);
if (statement) {
// a performance issue since it may lead to repeated
// Runtime::DeclareContextSlot() calls.
VariableProxy* proxy = declaration_scope->NewUnresolved(
- name, scanner().location().beg_pos);
+ factory(), name, scanner().location().beg_pos);
declaration_scope->AddDeclaration(
- new(zone()) Declaration(proxy, mode, fun, top_scope_));
+ factory()->NewVariableDeclaration(proxy, mode, fun, top_scope_));
if ((mode == CONST || mode == CONST_HARMONY) &&
declaration_scope->is_global_scope()) {
// introduced dynamically when we meet their declarations, whereas
// other functions are set up when entering the surrounding scope.
SharedFunctionInfoLiteral* lit =
- new(zone()) SharedFunctionInfoLiteral(isolate(), shared);
+ factory()->NewSharedFunctionInfoLiteral(shared);
VariableProxy* var = Declare(name, VAR, NULL, true, CHECK_OK);
- return new(zone()) ExpressionStatement(new(zone()) Assignment(
- isolate(), Token::INIT_VAR, var, lit, RelocInfo::kNoPosition));
+ return factory()->NewExpressionStatement(
+ factory()->NewAssignment(
+ Token::INIT_VAR, var, lit, RelocInfo::kNoPosition));
}
// initial value upon entering the corresponding scope.
VariableMode mode = is_extended_mode() ? LET : VAR;
Declare(name, mode, fun, true, CHECK_OK);
- return EmptyStatement();
+ return factory()->NewEmptyStatement();
}
// (ECMA-262, 3rd, 12.2)
//
// Construct block expecting 16 statements.
- Block* result = new(zone()) Block(isolate(), labels, 16, false);
+ Block* result = factory()->NewBlock(labels, 16, false);
Target target(&this->target_stack_, result);
Expect(Token::LBRACE, CHECK_OK);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
// '{' SourceElement* '}'
// Construct block expecting 16 statements.
- Block* body = new(zone()) Block(isolate(), labels, 16, false);
+ Block* body = factory()->NewBlock(labels, 16, false);
Scope* block_scope = NewScope(top_scope_, BLOCK_SCOPE);
// Parse the statements and collect escaping labels.
// If the variable declaration declares exactly one non-const
-// variable, then *var is set to that variable. In all other cases,
-// *var is untouched; in particular, it is the caller's responsibility
+// variable, then *out is set to that variable. In all other cases,
+// *out is untouched; in particular, it is the caller's responsibility
// to initialize it properly. This mechanism is used for the parsing
// of 'for-in' loops.
Block* Parser::ParseVariableDeclarations(
// is inside an initializer block, it is ignored.
//
// Create new block with one expected declaration.
- Block* block = new(zone()) Block(isolate(), NULL, 1, true);
+ Block* block = factory()->NewBlock(NULL, 1, true);
int nvars = 0; // the number of variables declared
Handle<String> name;
do {
// Compute the arguments for the runtime call.
ZoneList<Expression*>* arguments = new(zone()) ZoneList<Expression*>(3);
// We have at least 1 parameter.
- arguments->Add(NewLiteral(name));
+ arguments->Add(factory()->NewLiteral(name));
CallRuntime* initialize;
if (is_const) {
// and add it to the initialization statement block.
// Note that the function does different things depending on
// the number of arguments (1 or 2).
- initialize =
- new(zone()) CallRuntime(
- isolate(),
- isolate()->factory()->InitializeConstGlobal_symbol(),
- Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
- arguments);
+ initialize = factory()->NewCallRuntime(
+ isolate()->factory()->InitializeConstGlobal_symbol(),
+ Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
+ arguments);
} else {
// Add strict mode.
// We may want to pass singleton to avoid Literal allocations.
LanguageMode language_mode = initialization_scope->language_mode();
- arguments->Add(NewNumberLiteral(language_mode));
+ arguments->Add(factory()->NewNumberLiteral(language_mode));
// Be careful not to assign a value to the global variable if
// we're in a with. The initialization value should not
// and add it to the initialization statement block.
// Note that the function does different things depending on
// the number of arguments (2 or 3).
- initialize =
- new(zone()) CallRuntime(
- isolate(),
- isolate()->factory()->InitializeVarGlobal_symbol(),
- Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
- arguments);
+ initialize = factory()->NewCallRuntime(
+ isolate()->factory()->InitializeVarGlobal_symbol(),
+ Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
+ arguments);
}
- block->AddStatement(new(zone()) ExpressionStatement(initialize));
+ block->AddStatement(factory()->NewExpressionStatement(initialize));
} else if (needs_init) {
// Constant initializations always assign to the declared constant which
// is always at the function scope level. This is only relevant for
ASSERT(proxy->var() != NULL);
ASSERT(value != NULL);
Assignment* assignment =
- new(zone()) Assignment(isolate(), init_op, proxy, value, position);
- block->AddStatement(new(zone()) ExpressionStatement(assignment));
+ factory()->NewAssignment(init_op, proxy, value, position);
+ block->AddStatement(factory()->NewExpressionStatement(assignment));
value = NULL;
}
// 'var' initializations are simply assignments (with all the consequences
// if they are inside a 'with' statement - they may change a 'with' object
// property).
- VariableProxy* proxy = initialization_scope->NewUnresolved(name);
+ VariableProxy* proxy =
+ initialization_scope->NewUnresolved(factory(), name);
Assignment* assignment =
- new(zone()) Assignment(isolate(), init_op, proxy, value, position);
- block->AddStatement(new(zone()) ExpressionStatement(assignment));
+ factory()->NewAssignment(init_op, proxy, value, position);
+ block->AddStatement(factory()->NewExpressionStatement(assignment));
}
if (fni_ != NULL) fni_->Leave();
// Parsed expression statement.
ExpectSemicolon(CHECK_OK);
- return new(zone()) ExpressionStatement(expr);
+ return factory()->NewExpressionStatement(expr);
}
Next();
else_statement = ParseStatement(labels, CHECK_OK);
} else {
- else_statement = EmptyStatement();
+ else_statement = factory()->NewEmptyStatement();
}
- return new(zone()) IfStatement(
- isolate(), condition, then_statement, else_statement);
+ return factory()->NewIfStatement(condition, then_statement, else_statement);
}
return NULL;
}
ExpectSemicolon(CHECK_OK);
- return new(zone()) ContinueStatement(target);
+ return factory()->NewContinueStatement(target);
}
// Parse labeled break statements that target themselves into
// empty statements, e.g. 'l1: l2: l3: break l2;'
if (!label.is_null() && ContainsLabel(labels, label)) {
- return EmptyStatement();
+ ExpectSemicolon(CHECK_OK);
+ return factory()->NewEmptyStatement();
}
BreakableStatement* target = NULL;
target = LookupBreakTarget(label, CHECK_OK);
return NULL;
}
ExpectSemicolon(CHECK_OK);
- return new(zone()) BreakStatement(target);
+ return factory()->NewBreakStatement(target);
}
tok == Token::RBRACE ||
tok == Token::EOS) {
ExpectSemicolon(CHECK_OK);
- result = new(zone()) ReturnStatement(GetLiteralUndefined());
+ result = factory()->NewReturnStatement(GetLiteralUndefined());
} else {
Expression* expr = ParseExpression(true, CHECK_OK);
ExpectSemicolon(CHECK_OK);
- result = new(zone()) ReturnStatement(expr);
+ result = factory()->NewReturnStatement(expr);
}
// An ECMAScript program is considered syntactically incorrect if it
declaration_scope->is_eval_scope()) {
Handle<String> type = isolate()->factory()->illegal_return_symbol();
Expression* throw_error = NewThrowSyntaxError(type, Handle<Object>::null());
- return new(zone()) ExpressionStatement(throw_error);
+ return factory()->NewExpressionStatement(throw_error);
}
return result;
}
stmt = ParseStatement(labels, CHECK_OK);
with_scope->set_end_position(scanner().location().end_pos);
}
- return new(zone()) WithStatement(expr, stmt);
+ return factory()->NewWithStatement(expr, stmt);
}
// SwitchStatement ::
// 'switch' '(' Expression ')' '{' CaseClause* '}'
- SwitchStatement* statement = new(zone()) SwitchStatement(isolate(), labels);
+ SwitchStatement* statement = factory()->NewSwitchStatement(labels);
Target target(&this->target_stack_, statement);
Expect(Token::SWITCH, CHECK_OK);
Expression* exception = ParseExpression(true, CHECK_OK);
ExpectSemicolon(CHECK_OK);
- return new(zone()) ExpressionStatement(
- new(zone()) Throw(isolate(), exception, pos));
+ return factory()->NewExpressionStatement(factory()->NewThrow(exception, pos));
}
// If we have both, create an inner try/catch.
ASSERT(catch_scope != NULL && catch_variable != NULL);
int index = current_function_state_->NextHandlerIndex();
- TryCatchStatement* statement = new(zone()) TryCatchStatement(index,
- try_block,
- catch_scope,
- catch_variable,
- catch_block);
+ TryCatchStatement* statement = factory()->NewTryCatchStatement(
+ index, try_block, catch_scope, catch_variable, catch_block);
statement->set_escaping_targets(try_collector.targets());
- try_block = new(zone()) Block(isolate(), NULL, 1, false);
+ try_block = factory()->NewBlock(NULL, 1, false);
try_block->AddStatement(statement);
catch_block = NULL; // Clear to indicate it's been handled.
}
ASSERT(finally_block == NULL);
ASSERT(catch_scope != NULL && catch_variable != NULL);
int index = current_function_state_->NextHandlerIndex();
- result = new(zone()) TryCatchStatement(index,
- try_block,
- catch_scope,
- catch_variable,
- catch_block);
+ result = factory()->NewTryCatchStatement(
+ index, try_block, catch_scope, catch_variable, catch_block);
} else {
ASSERT(finally_block != NULL);
int index = current_function_state_->NextHandlerIndex();
- result = new(zone()) TryFinallyStatement(index,
- try_block,
- finally_block);
+ result = factory()->NewTryFinallyStatement(index, try_block, finally_block);
// Combine the jump targets of the try block and the possible catch block.
try_collector.targets()->AddAll(*catch_collector.targets());
}
// DoStatement ::
// 'do' Statement 'while' '(' Expression ')' ';'
- DoWhileStatement* loop = new(zone()) DoWhileStatement(isolate(), labels);
+ DoWhileStatement* loop = factory()->NewDoWhileStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::DO, CHECK_OK);
// WhileStatement ::
// 'while' '(' Expression ')' Statement
- WhileStatement* loop = new(zone()) WhileStatement(isolate(), labels);
+ WhileStatement* loop = factory()->NewWhileStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::WHILE, CHECK_OK);
ParseVariableDeclarations(kForStatement, NULL, &name, CHECK_OK);
if (peek() == Token::IN && !name.is_null()) {
- VariableProxy* each = top_scope_->NewUnresolved(name);
- ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
+ VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
+ ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
Statement* body = ParseStatement(NULL, CHECK_OK);
loop->Initialize(each, enumerable, body);
- Block* result = new(zone()) Block(isolate(), NULL, 2, false);
+ Block* result = factory()->NewBlock(NULL, 2, false);
result->AddStatement(variable_statement);
result->AddStatement(loop);
top_scope_ = saved_scope;
// TODO(keuchel): Move the temporary variable to the block scope, after
// implementing stack allocated block scoped variables.
Variable* temp = top_scope_->DeclarationScope()->NewTemporary(name);
- VariableProxy* temp_proxy = new(zone()) VariableProxy(isolate(), temp);
- VariableProxy* each = top_scope_->NewUnresolved(name);
- ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
+ VariableProxy* temp_proxy = factory()->NewVariableProxy(temp);
+ VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
+ ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
Expect(Token::RPAREN, CHECK_OK);
Statement* body = ParseStatement(NULL, CHECK_OK);
- Block* body_block = new(zone()) Block(isolate(), NULL, 3, false);
- Assignment* assignment = new(zone()) Assignment(isolate(),
- Token::ASSIGN,
- each,
- temp_proxy,
- RelocInfo::kNoPosition);
+ Block* body_block = factory()->NewBlock(NULL, 3, false);
+ Assignment* assignment = factory()->NewAssignment(
+ Token::ASSIGN, each, temp_proxy, RelocInfo::kNoPosition);
Statement* assignment_statement =
- new(zone()) ExpressionStatement(assignment);
+ factory()->NewExpressionStatement(assignment);
body_block->AddStatement(variable_statement);
body_block->AddStatement(assignment_statement);
body_block->AddStatement(body);
isolate()->factory()->invalid_lhs_in_for_in_symbol();
expression = NewThrowReferenceError(type);
}
- ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
+ ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
return loop;
} else {
- init = new(zone()) ExpressionStatement(expression);
+ init = factory()->NewExpressionStatement(expression);
}
}
}
// Standard 'for' loop
- ForStatement* loop = new(zone()) ForStatement(isolate(), labels);
+ ForStatement* loop = factory()->NewForStatement(labels);
Target target(&this->target_stack_, loop);
// Parsed initializer at this point.
Statement* next = NULL;
if (peek() != Token::RPAREN) {
Expression* exp = ParseExpression(true, CHECK_OK);
- next = new(zone()) ExpressionStatement(exp);
+ next = factory()->NewExpressionStatement(exp);
}
Expect(Token::RPAREN, CHECK_OK);
// for (; c; n) b
// }
ASSERT(init != NULL);
- Block* result = new(zone()) Block(isolate(), NULL, 2, false);
+ Block* result = factory()->NewBlock(NULL, 2, false);
result->AddStatement(init);
result->AddStatement(loop);
result->set_block_scope(for_scope);
Expect(Token::COMMA, CHECK_OK);
int position = scanner().location().beg_pos;
Expression* right = ParseAssignmentExpression(accept_IN, CHECK_OK);
- result = new(zone()) BinaryOperation(
- isolate(), Token::COMMA, result, right, position);
+ result =
+ factory()->NewBinaryOperation(Token::COMMA, result, right, position);
}
return result;
}
fni_->Leave();
}
- return new(zone()) Assignment(isolate(), op, expression, right, pos);
+ return factory()->NewAssignment(op, expression, right, pos);
}
Expect(Token::COLON, CHECK_OK);
int right_position = scanner().peek_location().beg_pos;
Expression* right = ParseAssignmentExpression(accept_IN, CHECK_OK);
- return new(zone()) Conditional(
- isolate(), expression, left, right, left_position, right_position);
+ return factory()->NewConditional(
+ expression, left, right, left_position, right_position);
}
switch (op) {
case Token::ADD:
- x = NewNumberLiteral(x_val + y_val);
+ x = factory()->NewNumberLiteral(x_val + y_val);
continue;
case Token::SUB:
- x = NewNumberLiteral(x_val - y_val);
+ x = factory()->NewNumberLiteral(x_val - y_val);
continue;
case Token::MUL:
- x = NewNumberLiteral(x_val * y_val);
+ x = factory()->NewNumberLiteral(x_val * y_val);
continue;
case Token::DIV:
- x = NewNumberLiteral(x_val / y_val);
+ x = factory()->NewNumberLiteral(x_val / y_val);
continue;
- case Token::BIT_OR:
- x = NewNumberLiteral(DoubleToInt32(x_val) | DoubleToInt32(y_val));
+ case Token::BIT_OR: {
+ int value = DoubleToInt32(x_val) | DoubleToInt32(y_val);
+ x = factory()->NewNumberLiteral(value);
continue;
- case Token::BIT_AND:
- x = NewNumberLiteral(DoubleToInt32(x_val) & DoubleToInt32(y_val));
+ }
+ case Token::BIT_AND: {
+ int value = DoubleToInt32(x_val) & DoubleToInt32(y_val);
+ x = factory()->NewNumberLiteral(value);
continue;
- case Token::BIT_XOR:
- x = NewNumberLiteral(DoubleToInt32(x_val) ^ DoubleToInt32(y_val));
+ }
+ case Token::BIT_XOR: {
+ int value = DoubleToInt32(x_val) ^ DoubleToInt32(y_val);
+ x = factory()->NewNumberLiteral(value);
continue;
+ }
case Token::SHL: {
int value = DoubleToInt32(x_val) << (DoubleToInt32(y_val) & 0x1f);
- x = NewNumberLiteral(value);
+ x = factory()->NewNumberLiteral(value);
continue;
}
case Token::SHR: {
uint32_t shift = DoubleToInt32(y_val) & 0x1f;
uint32_t value = DoubleToUint32(x_val) >> shift;
- x = NewNumberLiteral(value);
+ x = factory()->NewNumberLiteral(value);
continue;
}
case Token::SAR: {
uint32_t shift = DoubleToInt32(y_val) & 0x1f;
int value = ArithmeticShiftRight(DoubleToInt32(x_val), shift);
- x = NewNumberLiteral(value);
+ x = factory()->NewNumberLiteral(value);
continue;
}
default:
case Token::NE_STRICT: cmp = Token::EQ_STRICT; break;
default: break;
}
- x = new(zone()) CompareOperation(isolate(), cmp, x, y, position);
+ x = factory()->NewCompareOperation(cmp, x, y, position);
if (cmp != op) {
// The comparison was negated - add a NOT.
- x = new(zone()) UnaryOperation(isolate(), Token::NOT, x, position);
+ x = factory()->NewUnaryOperation(Token::NOT, x, position);
}
} else {
// We have a "normal" binary operation.
- x = new(zone()) BinaryOperation(isolate(), op, x, y, position);
+ x = factory()->NewBinaryOperation(op, x, y, position);
}
}
}
// Convert the literal to a boolean condition and negate it.
bool condition = literal->ToBoolean()->IsTrue();
Handle<Object> result(isolate()->heap()->ToBoolean(!condition));
- return NewLiteral(result);
+ return factory()->NewLiteral(result);
} else if (literal->IsNumber()) {
// Compute some expressions involving only number literals.
double value = literal->Number();
case Token::ADD:
return expression;
case Token::SUB:
- return NewNumberLiteral(-value);
+ return factory()->NewNumberLiteral(-value);
case Token::BIT_NOT:
- return NewNumberLiteral(~DoubleToInt32(value));
+ return factory()->NewNumberLiteral(~DoubleToInt32(value));
default:
break;
}
}
}
- return new(zone()) UnaryOperation(isolate(), op, expression, position);
+ return factory()->NewUnaryOperation(op, expression, position);
} else if (Token::IsCountOp(op)) {
op = Next();
MarkAsLValue(expression);
int position = scanner().location().beg_pos;
- return new(zone()) CountOperation(isolate(),
- op,
- true /* prefix */,
- expression,
- position);
+ return factory()->NewCountOperation(op,
+ true /* prefix */,
+ expression,
+ position);
} else {
return ParsePostfixExpression(ok);
Token::Value next = Next();
int position = scanner().location().beg_pos;
expression =
- new(zone()) CountOperation(isolate(),
- next,
- false /* postfix */,
- expression,
- position);
+ factory()->NewCountOperation(next,
+ false /* postfix */,
+ expression,
+ position);
}
return expression;
}
Consume(Token::LBRACK);
int pos = scanner().location().beg_pos;
Expression* index = ParseExpression(true, CHECK_OK);
- result = new(zone()) Property(isolate(), result, index, pos);
+ result = factory()->NewProperty(result, index, pos);
Expect(Token::RBRACK, CHECK_OK);
break;
}
callee->IsVariable(isolate()->factory()->eval_symbol())) {
top_scope_->DeclarationScope()->RecordEvalCall();
}
- result = NewCall(result, args, pos);
+ result = factory()->NewCall(result, args, pos);
break;
}
Consume(Token::PERIOD);
int pos = scanner().location().beg_pos;
Handle<String> name = ParseIdentifierName(CHECK_OK);
- result = new(zone()) Property(isolate(),
- result,
- NewLiteral(name),
- pos);
+ result =
+ factory()->NewProperty(result, factory()->NewLiteral(name), pos);
if (fni_ != NULL) fni_->PushLiteralName(name);
break;
}
if (!stack->is_empty()) {
int last = stack->pop();
- result = new(zone()) CallNew(isolate(),
- result,
- new(zone()) ZoneList<Expression*>(0),
- last);
+ result = factory()->NewCallNew(
+ result, new(zone()) ZoneList<Expression*>(0), last);
}
return result;
}
Consume(Token::LBRACK);
int pos = scanner().location().beg_pos;
Expression* index = ParseExpression(true, CHECK_OK);
- result = new(zone()) Property(isolate(), result, index, pos);
+ result = factory()->NewProperty(result, index, pos);
if (fni_ != NULL) {
if (index->IsPropertyName()) {
fni_->PushLiteralName(index->AsLiteral()->AsPropertyName());
Consume(Token::PERIOD);
int pos = scanner().location().beg_pos;
Handle<String> name = ParseIdentifierName(CHECK_OK);
- result = new(zone()) Property(isolate(),
- result,
- NewLiteral(name),
- pos);
+ result =
+ factory()->NewProperty(result, factory()->NewLiteral(name), pos);
if (fni_ != NULL) fni_->PushLiteralName(name);
break;
}
// Consume one of the new prefixes (already parsed).
ZoneList<Expression*>* args = ParseArguments(CHECK_OK);
int last = stack->pop();
- result = new(zone()) CallNew(isolate(), result, args, last);
+ result = factory()->NewCallNew(result, args, last);
break;
}
default:
Expect(Token::DEBUGGER, CHECK_OK);
ExpectSemicolon(CHECK_OK);
- return new(zone()) DebuggerStatement();
+ return factory()->NewDebuggerStatement();
}
switch (peek()) {
case Token::THIS: {
Consume(Token::THIS);
- result = new(zone()) VariableProxy(isolate(), top_scope_->receiver());
+ result = factory()->NewVariableProxy(top_scope_->receiver());
break;
}
case Token::NULL_LITERAL:
Consume(Token::NULL_LITERAL);
- result = new(zone()) Literal(
- isolate(), isolate()->factory()->null_value());
+ result = factory()->NewLiteral(isolate()->factory()->null_value());
break;
case Token::TRUE_LITERAL:
Consume(Token::TRUE_LITERAL);
- result = new(zone()) Literal(
- isolate(), isolate()->factory()->true_value());
+ result = factory()->NewLiteral(isolate()->factory()->true_value());
break;
case Token::FALSE_LITERAL:
Consume(Token::FALSE_LITERAL);
- result = new(zone()) Literal(
- isolate(), isolate()->factory()->false_value());
+ result = factory()->NewLiteral(isolate()->factory()->false_value());
break;
case Token::IDENTIFIER:
case Token::FUTURE_STRICT_RESERVED_WORD: {
Handle<String> name = ParseIdentifier(CHECK_OK);
if (fni_ != NULL) fni_->PushVariableName(name);
- result = top_scope_->NewUnresolved(name, scanner().location().beg_pos);
+ result = top_scope_->NewUnresolved(
+ factory(), name, scanner().location().beg_pos);
break;
}
double value = StringToDouble(isolate()->unicode_cache(),
scanner().literal_ascii_string(),
ALLOW_HEX | ALLOW_OCTALS);
- result = NewNumberLiteral(value);
+ result = factory()->NewNumberLiteral(value);
break;
}
case Token::STRING: {
Consume(Token::STRING);
Handle<String> symbol = GetSymbol(CHECK_OK);
- result = NewLiteral(symbol);
+ result = factory()->NewLiteral(symbol);
if (fni_ != NULL) fni_->PushLiteralName(symbol);
break;
}
literals->set(0, Smi::FromInt(elements_kind));
literals->set(1, *element_values);
- return new(zone()) ArrayLiteral(
- isolate(), literals, values, literal_index, is_simple, depth);
+ return factory()->NewArrayLiteral(
+ literals, values, literal_index, is_simple, depth);
}
CHECK_OK);
// Allow any number of parameters for compatibilty with JSC.
// Specification only allows zero parameters for get and one for set.
- ObjectLiteral::Property* property =
- new(zone()) ObjectLiteral::Property(is_getter, value);
- return property;
+ return factory()->NewObjectLiteralProperty(is_getter, value);
} else {
ReportUnexpectedToken(next);
*ok = false;
}
// Failed to parse as get/set property, so it's just a property
// called "get" or "set".
- key = NewLiteral(id);
+ key = factory()->NewLiteral(id);
break;
}
case Token::STRING: {
if (fni_ != NULL) fni_->PushLiteralName(string);
uint32_t index;
if (!string.is_null() && string->AsArrayIndex(&index)) {
- key = NewNumberLiteral(index);
+ key = factory()->NewNumberLiteral(index);
break;
}
- key = NewLiteral(string);
+ key = factory()->NewLiteral(string);
break;
}
case Token::NUMBER: {
double value = StringToDouble(isolate()->unicode_cache(),
scanner().literal_ascii_string(),
ALLOW_HEX | ALLOW_OCTALS);
- key = NewNumberLiteral(value);
+ key = factory()->NewNumberLiteral(value);
break;
}
default:
if (Token::IsKeyword(next)) {
Consume(next);
Handle<String> string = GetSymbol(CHECK_OK);
- key = NewLiteral(string);
+ key = factory()->NewLiteral(string);
} else {
// Unexpected token.
Token::Value next = Next();
&is_simple,
&fast_elements,
&depth);
- return new(zone()) ObjectLiteral(isolate(),
- constant_properties,
- properties,
- literal_index,
- is_simple,
- fast_elements,
- depth,
- has_function);
+ return factory()->NewObjectLiteral(constant_properties,
+ properties,
+ literal_index,
+ is_simple,
+ fast_elements,
+ depth,
+ has_function);
}
Handle<String> js_flags = NextLiteralString(TENURED);
Next();
- return new(zone()) RegExpLiteral(
- isolate(), js_pattern, js_flags, literal_index);
+ return factory()->NewRegExpLiteral(js_pattern, js_flags, literal_index);
}
class SingletonLogger : public ParserRecorder {
public:
SingletonLogger() : has_error_(false), start_(-1), end_(-1) { }
- ~SingletonLogger() { }
+ virtual ~SingletonLogger() { }
void Reset() { has_error_ = false; }
bool only_simple_this_property_assignments;
Handle<FixedArray> this_property_assignments;
bool has_duplicate_parameters = false;
+ AstProperties ast_properties;
// Parse function body.
{ FunctionState function_state(this, scope, isolate());
top_scope_->SetScopeName(function_name);
} else {
fvar_mode = CONST;
}
- fvar = top_scope_->DeclareFunctionVar(function_name, fvar_mode);
+ fvar =
+ top_scope_->DeclareFunctionVar(function_name, fvar_mode, factory());
}
// Determine whether the function will be lazily compiled.
if (!is_lazily_compiled) {
body = new(zone()) ZoneList<Statement*>(8);
if (fvar != NULL) {
- VariableProxy* fproxy = top_scope_->NewUnresolved(function_name);
+ VariableProxy* fproxy =
+ top_scope_->NewUnresolved(factory(), function_name);
fproxy->BindTo(fvar);
- body->Add(new(zone()) ExpressionStatement(
- new(zone()) Assignment(isolate(),
- fvar_init_op,
- fproxy,
- new(zone()) ThisFunction(isolate()),
- RelocInfo::kNoPosition)));
+ body->Add(factory()->NewExpressionStatement(
+ factory()->NewAssignment(fvar_init_op,
+ fproxy,
+ factory()->NewThisFunction(),
+ RelocInfo::kNoPosition)));
}
ParseSourceElements(body, Token::RBRACE, CHECK_OK);
scope->end_position(),
CHECK_OK);
}
+ ast_properties = *factory()->visitor()->ast_properties();
}
if (is_extended_mode()) {
}
FunctionLiteral* function_literal =
- new(zone()) FunctionLiteral(isolate(),
- function_name,
- scope,
- body,
- materialized_literal_count,
- expected_property_count,
- handler_count,
- only_simple_this_property_assignments,
- this_property_assignments,
- num_parameters,
- type,
- has_duplicate_parameters);
+ factory()->NewFunctionLiteral(function_name,
+ scope,
+ body,
+ materialized_literal_count,
+ expected_property_count,
+ handler_count,
+ only_simple_this_property_assignments,
+ this_property_assignments,
+ num_parameters,
+ has_duplicate_parameters,
+ type,
+ true);
function_literal->set_function_token_position(function_token_position);
+ function_literal->set_ast_properties(&ast_properties);
if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal);
return function_literal;
NULL,
stack_limit,
do_allow_lazy,
- allow_natives_syntax_);
+ allow_natives_syntax_,
+ allow_modules_);
}
preparser::PreParser::PreParseResult result =
reusable_preparser_->PreParseLazyFunction(top_scope_->language_mode(),
}
// We have a valid intrinsics call or a call to a builtin.
- return new(zone()) CallRuntime(isolate(), name, function, args);
+ return factory()->NewCallRuntime(name, function, args);
}
Literal* Parser::GetLiteralUndefined() {
- return NewLiteral(isolate()->factory()->undefined_value());
+ return factory()->NewLiteral(isolate()->factory()->undefined_value());
}
Literal* Parser::GetLiteralTheHole() {
- return NewLiteral(isolate()->factory()->the_hole_value());
-}
-
-
-Literal* Parser::GetLiteralNumber(double value) {
- return NewNumberLiteral(value);
+ return factory()->NewLiteral(isolate()->factory()->the_hole_value());
}
}
-Literal* Parser::NewNumberLiteral(double number) {
- return NewLiteral(isolate()->factory()->NewNumber(number, TENURED));
-}
-
-
Expression* Parser::NewThrowReferenceError(Handle<String> type) {
return NewThrowError(isolate()->factory()->MakeReferenceError_symbol(),
type, HandleVector<Object>(NULL, 0));
elements, FAST_ELEMENTS, TENURED);
ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2);
- args->Add(NewLiteral(type));
- args->Add(NewLiteral(array));
- CallRuntime* call_constructor = new(zone()) CallRuntime(isolate(),
- constructor,
- NULL,
- args);
- return new(zone()) Throw(isolate(),
- call_constructor,
- scanner().location().beg_pos);
+ args->Add(factory()->NewLiteral(type));
+ args->Add(factory()->NewLiteral(array));
+ CallRuntime* call_constructor =
+ factory()->NewCallRuntime(constructor, NULL, args);
+ return factory()->NewThrow(call_constructor, scanner().location().beg_pos);
}
// ----------------------------------------------------------------------------
// Harmony scoping is requested.
parsing_flags |= EXTENDED_MODE;
}
+ if (!info->is_native() && FLAG_harmony_modules) {
+ parsing_flags |= kAllowModules;
+ }
if (FLAG_allow_natives_syntax || info->is_native()) {
- // We requre %identifier(..) syntax.
+ // We require %identifier(..) syntax.
parsing_flags |= kAllowNativesSyntax;
}
if (info->is_lazy()) {
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
v8::Extension* extension,
ScriptDataImpl* pre_data);
virtual ~Parser() {
- if (reusable_preparser_ != NULL) {
- delete reusable_preparser_;
- }
+ delete reusable_preparser_;
+ reusable_preparser_ = NULL;
}
// Returns NULL if parsing failed.
};
class BlockState;
- class FunctionState;
+
+ class FunctionState BASE_EMBEDDED {
+ public:
+ FunctionState(Parser* parser,
+ Scope* scope,
+ Isolate* isolate);
+ ~FunctionState();
+
+ int NextMaterializedLiteralIndex() {
+ return next_materialized_literal_index_++;
+ }
+ int materialized_literal_count() {
+ return next_materialized_literal_index_ - JSFunction::kLiteralsPrefixSize;
+ }
+
+ int NextHandlerIndex() { return next_handler_index_++; }
+ int handler_count() { return next_handler_index_; }
+
+ void SetThisPropertyAssignmentInfo(
+ bool only_simple_this_property_assignments,
+ Handle<FixedArray> this_property_assignments) {
+ only_simple_this_property_assignments_ =
+ only_simple_this_property_assignments;
+ this_property_assignments_ = this_property_assignments;
+ }
+ bool only_simple_this_property_assignments() {
+ return only_simple_this_property_assignments_;
+ }
+ Handle<FixedArray> this_property_assignments() {
+ return this_property_assignments_;
+ }
+
+ void AddProperty() { expected_property_count_++; }
+ int expected_property_count() { return expected_property_count_; }
+
+ AstNodeFactory<AstConstructionVisitor>* factory() { return &factory_; }
+
+ private:
+ // Used to assign an index to each literal that needs materialization in
+ // the function. Includes regexp literals, and boilerplate for object and
+ // array literals.
+ int next_materialized_literal_index_;
+
+ // Used to assign a per-function index to try and catch handlers.
+ int next_handler_index_;
+
+ // Properties count estimation.
+ int expected_property_count_;
+
+ // Keeps track of assignments to properties of this. Used for
+ // optimizing constructors.
+ bool only_simple_this_property_assignments_;
+ Handle<FixedArray> this_property_assignments_;
+
+ Parser* parser_;
+ FunctionState* outer_function_state_;
+ Scope* outer_scope_;
+ int saved_ast_node_id_;
+ AstNodeFactory<AstConstructionVisitor> factory_;
+ };
+
+
+
FunctionLiteral* ParseLazy(CompilationInfo* info,
UC16CharacterStream* source,
// Get odd-ball literals.
Literal* GetLiteralUndefined();
Literal* GetLiteralTheHole();
- Literal* GetLiteralNumber(double value);
Handle<String> ParseIdentifier(bool* ok);
Handle<String> ParseIdentifierOrStrictReservedWord(
// Factory methods.
- Statement* EmptyStatement() {
- static v8::internal::EmptyStatement* empty =
- ::new v8::internal::EmptyStatement();
- return empty;
- }
-
Scope* NewScope(Scope* parent, ScopeType type);
Handle<String> LookupSymbol(int symbol_id);
Handle<String> LookupCachedSymbol(int symbol_id);
- Expression* NewCall(Expression* expression,
- ZoneList<Expression*>* arguments,
- int pos) {
- return new(zone()) Call(isolate(), expression, arguments, pos);
- }
-
- inline Literal* NewLiteral(Handle<Object> handle) {
- return new(zone()) Literal(isolate(), handle);
- }
-
- // Create a number literal.
- Literal* NewNumberLiteral(double value);
-
// Generate AST node that throw a ReferenceError with the given type.
Expression* NewThrowReferenceError(Handle<String> type);
preparser::PreParser::PreParseResult LazyParseFunctionLiteral(
SingletonLogger* logger);
+ AstNodeFactory<AstConstructionVisitor>* factory() {
+ return current_function_state_->factory();
+ }
+
Isolate* isolate_;
ZoneList<Handle<String> > symbol_cache_;
Mode mode_;
bool allow_natives_syntax_;
bool allow_lazy_;
+ bool allow_modules_;
bool stack_overflow_;
// If true, the next (and immediately following) function literal is
// preceded by a parenthesis.
FULL_INTERVAL
};
- static const int kSignalSenderStackSize = 32 * KB;
+ static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
FULL_INTERVAL
};
- static const int kSignalSenderStackSize = 32 * KB;
+ static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
class SamplerThread : public Thread {
public:
- static const int kSamplerThreadStackSize = 32 * KB;
+ static const int kSamplerThreadStackSize = 64 * KB;
explicit SamplerThread(int interval)
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
FULL_INTERVAL
};
- static const int kSignalSenderStackSize = 32 * KB;
+ static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
FULL_INTERVAL
};
- static const int kSignalSenderStackSize = 32 * KB;
+ static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
class SamplerThread : public Thread {
public:
- static const int kSamplerThreadStackSize = 32 * KB;
+ static const int kSamplerThreadStackSize = 64 * KB;
explicit SamplerThread(int interval)
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
i::ParserRecorder* log,
uintptr_t stack_limit,
bool allow_lazy,
- bool allow_natives_syntax)
+ bool allow_natives_syntax,
+ bool allow_modules)
: scanner_(scanner),
log_(log),
scope_(NULL),
strict_mode_violation_type_(NULL),
stack_overflow_(false),
allow_lazy_(allow_lazy),
+ allow_modules_(allow_modules),
allow_natives_syntax_(allow_natives_syntax),
parenthesized_function_(false),
harmony_scoping_(scanner->HarmonyScoping()) { }
uintptr_t stack_limit) {
bool allow_lazy = (flags & i::kAllowLazy) != 0;
bool allow_natives_syntax = (flags & i::kAllowNativesSyntax) != 0;
- return PreParser(scanner, log, stack_limit,
- allow_lazy, allow_natives_syntax).PreParse();
+ bool allow_modules = (flags & i::kAllowModules) != 0;
+ return PreParser(scanner, log, stack_limit, allow_lazy,
+ allow_natives_syntax, allow_modules).PreParse();
}
// Parses a single function literal, from the opening parentheses before
const char* strict_mode_violation_type_;
bool stack_overflow_;
bool allow_lazy_;
+ bool allow_modules_;
bool allow_natives_syntax_;
bool parenthesized_function_;
bool harmony_scoping_;
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
}
-void PrettyPrinter::VisitDeclaration(Declaration* node) {
+void PrettyPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
Print("var ");
PrintLiteral(node->proxy()->name(), false);
if (node->fun() != NULL) {
}
+void PrettyPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
+ Print("module ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(" = ");
+ Visit(node->module());
+ Print(";");
+}
+
+
+void PrettyPrinter::VisitModuleLiteral(ModuleLiteral* node) {
+ VisitBlock(node->body());
+}
+
+
+void PrettyPrinter::VisitModuleVariable(ModuleVariable* node) {
+ PrintLiteral(node->var()->name(), false);
+}
+
+
+void PrettyPrinter::VisitModulePath(ModulePath* node) {
+ Visit(node->module());
+ Print(".");
+ PrintLiteral(node->name(), false);
+}
+
+
+void PrettyPrinter::VisitModuleUrl(ModuleUrl* node) {
+ Print("at ");
+ PrintLiteral(node->url(), true);
+}
+
+
void PrettyPrinter::VisitExpressionStatement(ExpressionStatement* node) {
Visit(node->expression());
Print(";");
}
-void AstPrinter::VisitDeclaration(Declaration* node) {
+void AstPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
if (node->fun() == NULL) {
// var or const declarations
PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
}
+void AstPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
+ IndentedScope indent(this, "MODULE");
+ PrintLiteralIndented("NAME", node->proxy()->name(), true);
+ Visit(node->module());
+}
+
+
+void AstPrinter::VisitModuleLiteral(ModuleLiteral* node) {
+ VisitBlock(node->body());
+}
+
+
+void AstPrinter::VisitModuleVariable(ModuleVariable* node) {
+ PrintLiteralIndented("VARIABLE", node->var()->name(), false);
+}
+
+
+void AstPrinter::VisitModulePath(ModulePath* node) {
+ IndentedScope indent(this, "PATH");
+ PrintIndentedVisit("MODULE", node->module());
+ PrintLiteralIndented("NAME", node->name(), false);
+}
+
+
+void AstPrinter::VisitModuleUrl(ModuleUrl* node) {
+ PrintLiteralIndented("URL", node->url(), true);
+}
+
+
void AstPrinter::VisitExpressionStatement(ExpressionStatement* node) {
Visit(node->expression());
}
IndentedScope indent(this, "THIS-FUNCTION");
}
-
-TagScope::TagScope(JsonAstBuilder* builder, const char* name)
- : builder_(builder), next_(builder->tag()), has_body_(false) {
- if (next_ != NULL) {
- next_->use();
- builder->Print(",\n");
- }
- builder->set_tag(this);
- builder->PrintIndented("[");
- builder->Print("\"%s\"", name);
- builder->increase_indent(JsonAstBuilder::kTagIndentSize);
-}
-
-
-TagScope::~TagScope() {
- builder_->decrease_indent(JsonAstBuilder::kTagIndentSize);
- if (has_body_) {
- builder_->Print("\n");
- builder_->PrintIndented("]");
- } else {
- builder_->Print("]");
- }
- builder_->set_tag(next_);
-}
-
-
-AttributesScope::AttributesScope(JsonAstBuilder* builder)
- : builder_(builder), attribute_count_(0) {
- builder->set_attributes(this);
- builder->tag()->use();
- builder->Print(",\n");
- builder->PrintIndented("{");
- builder->increase_indent(JsonAstBuilder::kAttributesIndentSize);
-}
-
-
-AttributesScope::~AttributesScope() {
- builder_->decrease_indent(JsonAstBuilder::kAttributesIndentSize);
- if (attribute_count_ > 1) {
- builder_->Print("\n");
- builder_->PrintIndented("}");
- } else {
- builder_->Print("}");
- }
- builder_->set_attributes(NULL);
-}
-
-
-const char* JsonAstBuilder::BuildProgram(FunctionLiteral* program) {
- Init();
- Visit(program);
- Print("\n");
- return Output();
-}
-
-
-void JsonAstBuilder::AddAttributePrefix(const char* name) {
- if (attributes()->is_used()) {
- Print(",\n");
- PrintIndented("\"");
- } else {
- Print("\"");
- }
- Print("%s\":", name);
- attributes()->use();
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, Handle<String> value) {
- SmartArrayPointer<char> value_string = value->ToCString();
- AddAttributePrefix(name);
- Print("\"%s\"", *value_string);
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, const char* value) {
- AddAttributePrefix(name);
- Print("\"%s\"", value);
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, int value) {
- AddAttributePrefix(name);
- Print("%d", value);
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, bool value) {
- AddAttributePrefix(name);
- Print(value ? "true" : "false");
-}
-
-
-void JsonAstBuilder::VisitBlock(Block* stmt) {
- TagScope tag(this, "Block");
- VisitStatements(stmt->statements());
-}
-
-
-void JsonAstBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
- TagScope tag(this, "ExpressionStatement");
- Visit(stmt->expression());
-}
-
-
-void JsonAstBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
- TagScope tag(this, "EmptyStatement");
-}
-
-
-void JsonAstBuilder::VisitIfStatement(IfStatement* stmt) {
- TagScope tag(this, "IfStatement");
- Visit(stmt->condition());
- Visit(stmt->then_statement());
- Visit(stmt->else_statement());
-}
-
-
-void JsonAstBuilder::VisitContinueStatement(ContinueStatement* stmt) {
- TagScope tag(this, "ContinueStatement");
-}
-
-
-void JsonAstBuilder::VisitBreakStatement(BreakStatement* stmt) {
- TagScope tag(this, "BreakStatement");
-}
-
-
-void JsonAstBuilder::VisitReturnStatement(ReturnStatement* stmt) {
- TagScope tag(this, "ReturnStatement");
- Visit(stmt->expression());
-}
-
-
-void JsonAstBuilder::VisitWithStatement(WithStatement* stmt) {
- TagScope tag(this, "WithStatement");
- Visit(stmt->expression());
- Visit(stmt->statement());
-}
-
-
-void JsonAstBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
- TagScope tag(this, "SwitchStatement");
-}
-
-
-void JsonAstBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
- TagScope tag(this, "DoWhileStatement");
- Visit(stmt->body());
- Visit(stmt->cond());
-}
-
-
-void JsonAstBuilder::VisitWhileStatement(WhileStatement* stmt) {
- TagScope tag(this, "WhileStatement");
- Visit(stmt->cond());
- Visit(stmt->body());
-}
-
-
-void JsonAstBuilder::VisitForStatement(ForStatement* stmt) {
- TagScope tag(this, "ForStatement");
- if (stmt->init() != NULL) Visit(stmt->init());
- if (stmt->cond() != NULL) Visit(stmt->cond());
- Visit(stmt->body());
- if (stmt->next() != NULL) Visit(stmt->next());
-}
-
-
-void JsonAstBuilder::VisitForInStatement(ForInStatement* stmt) {
- TagScope tag(this, "ForInStatement");
- Visit(stmt->each());
- Visit(stmt->enumerable());
- Visit(stmt->body());
-}
-
-
-void JsonAstBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
- TagScope tag(this, "TryCatchStatement");
- { AttributesScope attributes(this);
- AddAttribute("variable", stmt->variable()->name());
- }
- Visit(stmt->try_block());
- Visit(stmt->catch_block());
-}
-
-
-void JsonAstBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
- TagScope tag(this, "TryFinallyStatement");
- Visit(stmt->try_block());
- Visit(stmt->finally_block());
-}
-
-
-void JsonAstBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
- TagScope tag(this, "DebuggerStatement");
-}
-
-
-void JsonAstBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
- TagScope tag(this, "FunctionLiteral");
- {
- AttributesScope attributes(this);
- AddAttribute("name", expr->name());
- }
- VisitDeclarations(expr->scope()->declarations());
- VisitStatements(expr->body());
-}
-
-
-void JsonAstBuilder::VisitSharedFunctionInfoLiteral(
- SharedFunctionInfoLiteral* expr) {
- TagScope tag(this, "SharedFunctionInfoLiteral");
-}
-
-
-void JsonAstBuilder::VisitConditional(Conditional* expr) {
- TagScope tag(this, "Conditional");
-}
-
-
-void JsonAstBuilder::VisitVariableProxy(VariableProxy* expr) {
- TagScope tag(this, "Variable");
- {
- AttributesScope attributes(this);
- Variable* var = expr->var();
- AddAttribute("name", var->name());
- switch (var->location()) {
- case Variable::UNALLOCATED:
- AddAttribute("location", "UNALLOCATED");
- break;
- case Variable::PARAMETER:
- AddAttribute("location", "PARAMETER");
- AddAttribute("index", var->index());
- break;
- case Variable::LOCAL:
- AddAttribute("location", "LOCAL");
- AddAttribute("index", var->index());
- break;
- case Variable::CONTEXT:
- AddAttribute("location", "CONTEXT");
- AddAttribute("index", var->index());
- break;
- case Variable::LOOKUP:
- AddAttribute("location", "LOOKUP");
- break;
- }
- }
-}
-
-
-void JsonAstBuilder::VisitLiteral(Literal* expr) {
- TagScope tag(this, "Literal");
- {
- AttributesScope attributes(this);
- Handle<Object> handle = expr->handle();
- if (handle->IsString()) {
- AddAttribute("handle", Handle<String>(String::cast(*handle)));
- } else if (handle->IsSmi()) {
- AddAttribute("handle", Smi::cast(*handle)->value());
- }
- }
-}
-
-
-void JsonAstBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
- TagScope tag(this, "RegExpLiteral");
-}
-
-
-void JsonAstBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
- TagScope tag(this, "ObjectLiteral");
-}
-
-
-void JsonAstBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
- TagScope tag(this, "ArrayLiteral");
-}
-
-
-void JsonAstBuilder::VisitAssignment(Assignment* expr) {
- TagScope tag(this, "Assignment");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->target());
- Visit(expr->value());
-}
-
-
-void JsonAstBuilder::VisitThrow(Throw* expr) {
- TagScope tag(this, "Throw");
- Visit(expr->exception());
-}
-
-
-void JsonAstBuilder::VisitProperty(Property* expr) {
- TagScope tag(this, "Property");
- Visit(expr->obj());
- Visit(expr->key());
-}
-
-
-void JsonAstBuilder::VisitCall(Call* expr) {
- TagScope tag(this, "Call");
- Visit(expr->expression());
- VisitExpressions(expr->arguments());
-}
-
-
-void JsonAstBuilder::VisitCallNew(CallNew* expr) {
- TagScope tag(this, "CallNew");
- Visit(expr->expression());
- VisitExpressions(expr->arguments());
-}
-
-
-void JsonAstBuilder::VisitCallRuntime(CallRuntime* expr) {
- TagScope tag(this, "CallRuntime");
- {
- AttributesScope attributes(this);
- AddAttribute("name", expr->name());
- }
- VisitExpressions(expr->arguments());
-}
-
-
-void JsonAstBuilder::VisitUnaryOperation(UnaryOperation* expr) {
- TagScope tag(this, "UnaryOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->expression());
-}
-
-
-void JsonAstBuilder::VisitCountOperation(CountOperation* expr) {
- TagScope tag(this, "CountOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("is_prefix", expr->is_prefix());
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->expression());
-}
-
-
-void JsonAstBuilder::VisitBinaryOperation(BinaryOperation* expr) {
- TagScope tag(this, "BinaryOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->left());
- Visit(expr->right());
-}
-
-
-void JsonAstBuilder::VisitCompareOperation(CompareOperation* expr) {
- TagScope tag(this, "CompareOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->left());
- Visit(expr->right());
-}
-
-
-void JsonAstBuilder::VisitThisFunction(ThisFunction* expr) {
- TagScope tag(this, "ThisFunction");
-}
-
-
-void JsonAstBuilder::VisitDeclaration(Declaration* decl) {
- TagScope tag(this, "Declaration");
- {
- AttributesScope attributes(this);
- AddAttribute("mode", Variable::Mode2String(decl->mode()));
- }
- Visit(decl->proxy());
- if (decl->fun() != NULL) Visit(decl->fun());
-}
-
-
#endif // DEBUG
} } // namespace v8::internal
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
int indent_;
};
-
-// Forward declaration of helper classes.
-class TagScope;
-class AttributesScope;
-
-// Build a C string containing a JSON representation of a function's
-// AST. The representation is based on JsonML (www.jsonml.org).
-class JsonAstBuilder: public PrettyPrinter {
- public:
- JsonAstBuilder()
- : indent_(0), top_tag_scope_(NULL), attributes_scope_(NULL) {
- }
- virtual ~JsonAstBuilder() {}
-
- // Controls the indentation of subsequent lines of a tag body after
- // the first line.
- static const int kTagIndentSize = 2;
-
- // Controls the indentation of subsequent lines of an attributes
- // blocks's body after the first line.
- static const int kAttributesIndentSize = 1;
-
- // Construct a JSON representation of a function literal.
- const char* BuildProgram(FunctionLiteral* program);
-
- // Print text indented by the current indentation level.
- void PrintIndented(const char* text) { Print("%*s%s", indent_, "", text); }
-
- // Change the indentation level.
- void increase_indent(int amount) { indent_ += amount; }
- void decrease_indent(int amount) { indent_ -= amount; }
-
- // The builder maintains a stack of opened AST node constructors.
- // Each node constructor corresponds to a JsonML tag.
- TagScope* tag() { return top_tag_scope_; }
- void set_tag(TagScope* scope) { top_tag_scope_ = scope; }
-
- // The builder maintains a pointer to the currently opened attributes
- // of current AST node or NULL if the attributes are not opened.
- AttributesScope* attributes() { return attributes_scope_; }
- void set_attributes(AttributesScope* scope) { attributes_scope_ = scope; }
-
- // Add an attribute to the currently opened attributes.
- void AddAttribute(const char* name, Handle<String> value);
- void AddAttribute(const char* name, const char* value);
- void AddAttribute(const char* name, int value);
- void AddAttribute(const char* name, bool value);
-
- // AST node visit functions.
-#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
- AST_NODE_LIST(DECLARE_VISIT)
-#undef DECLARE_VISIT
-
- private:
- int indent_;
- TagScope* top_tag_scope_;
- AttributesScope* attributes_scope_;
-
- // Utility function used by AddAttribute implementations.
- void AddAttributePrefix(const char* name);
-};
-
-
-// The JSON AST builder keeps a stack of open element tags (AST node
-// constructors from the current iteration point to the root of the
-// AST). TagScope is a helper class to manage the opening and closing
-// of tags, the indentation of their bodies, and comma separating their
-// contents.
-class TagScope BASE_EMBEDDED {
- public:
- TagScope(JsonAstBuilder* builder, const char* name);
- ~TagScope();
-
- void use() { has_body_ = true; }
-
- private:
- JsonAstBuilder* builder_;
- TagScope* next_;
- bool has_body_;
-};
-
-
-// AttributesScope is a helper class to manage the opening and closing
-// of attribute blocks, the indentation of their bodies, and comma
-// separating their contents. JsonAstBuilder::AddAttribute adds an
-// attribute to the currently open AttributesScope. They cannot be
-// nested so the builder keeps an optional single scope rather than a
-// stack.
-class AttributesScope BASE_EMBEDDED {
- public:
- explicit AttributesScope(JsonAstBuilder* builder);
- ~AttributesScope();
-
- bool is_used() { return attribute_count_ > 0; }
- void use() { ++attribute_count_; }
-
- private:
- JsonAstBuilder* builder_;
- int attribute_count_;
-};
-
#endif // DEBUG
} } // namespace v8::internal
case kRegExp: return "/regexp/";
case kHeapNumber: return "/number/";
case kNative: return "/native/";
+ case kSynthetic: return "/synthetic/";
default: return "???";
}
}
NativeObjectsExplorer* explorer_;
};
+
+class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
+ public:
+ BasicHeapEntriesAllocator(
+ HeapSnapshot* snapshot,
+ HeapEntry::Type entries_type)
+ : snapshot_(snapshot),
+ collection_(snapshot_->collection()),
+ entries_type_(entries_type) {
+ }
+ virtual HeapEntry* AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count);
+ private:
+ HeapSnapshot* snapshot_;
+ HeapSnapshotsCollection* collection_;
+ HeapEntry::Type entries_type_;
+};
+
+
+HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count) {
+ v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
+ intptr_t elements = info->GetElementCount();
+ intptr_t size = info->GetSizeInBytes();
+ return snapshot_->AddEntry(
+ entries_type_,
+ elements != -1 ?
+ collection_->names()->GetFormatted(
+ "%s / %" V8_PTR_PREFIX "d entries",
+ info->GetLabel(),
+ info->GetElementCount()) :
+ collection_->names()->GetCopy(info->GetLabel()),
+ HeapObjectsMap::GenerateId(info),
+ size != -1 ? static_cast<int>(size) : 0,
+ children_count,
+ retainers_count);
+}
+
+
NativeObjectsExplorer::NativeObjectsExplorer(
HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
: snapshot_(snapshot),
objects_by_info_(RetainedInfosMatch),
native_groups_(StringsMatch),
filler_(NULL) {
+ synthetic_entries_allocator_ =
+ new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
+ native_entries_allocator_ =
+ new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
}
reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
info->Dispose();
}
-}
-
-
-HeapEntry* NativeObjectsExplorer::AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count) {
- v8::RetainedObjectInfo* info =
- reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
- intptr_t elements = info->GetElementCount();
- intptr_t size = info->GetSizeInBytes();
- return snapshot_->AddEntry(
- HeapEntry::kNative,
- elements != -1 ?
- collection_->names()->GetFormatted(
- "%s / %" V8_PTR_PREFIX "d entries",
- info->GetLabel(),
- info->GetElementCount()) :
- collection_->names()->GetCopy(info->GetLabel()),
- HeapObjectsMap::GenerateId(info),
- size != -1 ? static_cast<int>(size) : 0,
- children_count,
- retainers_count);
+ delete synthetic_entries_allocator_;
+ delete native_entries_allocator_;
}
for (int i = 0; i < groups->length(); ++i) {
ImplicitRefGroup* group = groups->at(i);
HeapObject* parent = *group->parent_;
- HeapEntry* parent_entry = filler_->FindOrAddEntry(parent, this);
+ HeapEntry* parent_entry =
+ filler_->FindOrAddEntry(parent, native_entries_allocator_);
ASSERT(parent_entry != NULL);
Object*** children = group->children_;
for (size_t j = 0; j < group->length_; ++j) {
Object* child = *children[j];
- HeapEntry* child_entry = filler_->FindOrAddEntry(child, this);
+ HeapEntry* child_entry =
+ filler_->FindOrAddEntry(child, native_entries_allocator_);
filler_->SetNamedReference(
HeapGraphEdge::kInternal,
parent, parent_entry,
void NativeObjectsExplorer::SetNativeRootReference(
v8::RetainedObjectInfo* info) {
- HeapEntry* child_entry = filler_->FindOrAddEntry(info, this);
+ HeapEntry* child_entry =
+ filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(child_entry != NULL);
NativeGroupRetainedObjectInfo* group_info =
FindOrAddGroupInfo(info->GetGroupLabel());
- HeapEntry* group_entry = filler_->FindOrAddEntry(group_info, this);
+ HeapEntry* group_entry =
+ filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
filler_->SetNamedAutoIndexReference(
HeapGraphEdge::kInternal,
group_info, group_entry,
HeapObject* wrapper, v8::RetainedObjectInfo* info) {
HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
ASSERT(wrapper_entry != NULL);
- HeapEntry* info_entry = filler_->FindOrAddEntry(info, this);
+ HeapEntry* info_entry =
+ filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(info_entry != NULL);
filler_->SetNamedReference(HeapGraphEdge::kInternal,
wrapper, wrapper_entry,
entry = native_groups_.Next(entry)) {
NativeGroupRetainedObjectInfo* group_info =
static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
- HeapEntry* group_entry = filler_->FindOrAddEntry(group_info, this);
+ HeapEntry* group_entry =
+ filler_->FindOrAddEntry(group_info, native_entries_allocator_);
ASSERT(group_entry != NULL);
filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement,
"," JSON_S("closure")
"," JSON_S("regexp")
"," JSON_S("number")
- "," JSON_S("native"))
+ "," JSON_S("native")
+ "," JSON_S("synthetic"))
"," JSON_S("string")
"," JSON_S("number")
"," JSON_S("number")
kClosure = v8::HeapGraphNode::kClosure,
kRegExp = v8::HeapGraphNode::kRegExp,
kHeapNumber = v8::HeapGraphNode::kHeapNumber,
- kNative = v8::HeapGraphNode::kNative
+ kNative = v8::HeapGraphNode::kNative,
+ kSynthetic = v8::HeapGraphNode::kSynthetic
};
HeapEntry() { }
DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer);
};
+
class NativeGroupRetainedObjectInfo;
+
// An implementation of retained native objects extractor.
-class NativeObjectsExplorer : public HeapEntriesAllocator {
+class NativeObjectsExplorer {
public:
NativeObjectsExplorer(HeapSnapshot* snapshot,
SnapshottingProgressReportingInterface* progress);
virtual ~NativeObjectsExplorer();
- virtual HeapEntry* AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count);
void AddRootEntries(SnapshotFillerInterface* filler);
int EstimateObjectsCount();
bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
// RetainedObjectInfo* -> List<HeapObject*>*
HashMap objects_by_info_;
HashMap native_groups_;
+ HeapEntriesAllocator* synthetic_entries_allocator_;
+ HeapEntriesAllocator* native_entries_allocator_;
// Used during references extraction.
SnapshotFillerInterface* filler_;
PropertyType type() { return TypeField::decode(value_); }
- bool IsProperty() {
- return IsRealProperty(type());
- }
-
PropertyAttributes attributes() { return AttributesField::decode(value_); }
int index() { return StorageField::decode(value_); }
// Is the result is a property excluding transitions and the null
// descriptor?
bool IsProperty() {
- return IsFound() && GetPropertyDetails().IsProperty();
+ return IsFound() && IsRealProperty(GetPropertyDetails().type());
}
bool IsCacheable() { return cacheable_; }
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
: result_(result),
result_assigned_(false),
is_set_(false),
- in_try_(false) {
- }
+ in_try_(false),
+ factory_(isolate()) { }
+
+ virtual ~Processor() { }
void Process(ZoneList<Statement*>* statements);
bool result_assigned() const { return result_assigned_; }
+ AstNodeFactory<AstNullVisitor>* factory() {
+ return &factory_;
+ }
+
private:
Variable* result_;
bool is_set_;
bool in_try_;
+ AstNodeFactory<AstNullVisitor> factory_;
+
Expression* SetResult(Expression* value) {
result_assigned_ = true;
- Zone* zone = isolate()->zone();
- VariableProxy* result_proxy = new(zone) VariableProxy(isolate(), result_);
- return new(zone) Assignment(isolate(),
- Token::ASSIGN,
- result_proxy,
- value,
- RelocInfo::kNoPosition);
+ VariableProxy* result_proxy = factory()->NewVariableProxy(result_);
+ return factory()->NewAssignment(
+ Token::ASSIGN, result_proxy, value, RelocInfo::kNoPosition);
}
// Node visitors.
// Do nothing:
-void Processor::VisitDeclaration(Declaration* node) {}
+void Processor::VisitVariableDeclaration(VariableDeclaration* node) {}
+void Processor::VisitModuleDeclaration(ModuleDeclaration* node) {}
+void Processor::VisitModuleLiteral(ModuleLiteral* node) {}
+void Processor::VisitModuleVariable(ModuleVariable* node) {}
+void Processor::VisitModulePath(ModulePath* node) {}
+void Processor::VisitModuleUrl(ModuleUrl* node) {}
void Processor::VisitEmptyStatement(EmptyStatement* node) {}
void Processor::VisitReturnStatement(ReturnStatement* node) {}
void Processor::VisitDebuggerStatement(DebuggerStatement* node) {}
if (processor.result_assigned()) {
ASSERT(function->end_position() != RelocInfo::kNoPosition);
- Isolate* isolate = info->isolate();
- Zone* zone = isolate->zone();
// Set the position of the assignment statement one character past the
// source code, such that it definitely is not in the source code range
// of an immediate inner scope. For example in
// the end position of the function generated for executing the eval code
// coincides with the end of the with scope which is the position of '1'.
int position = function->end_position();
- VariableProxy* result_proxy = new(zone) VariableProxy(
- isolate, result->name(), false, position);
+ VariableProxy* result_proxy = processor.factory()->NewVariableProxy(
+ result->name(), false, position);
result_proxy->BindTo(result);
- Statement* result_statement = new(zone) ReturnStatement(result_proxy);
+ Statement* result_statement =
+ processor.factory()->NewReturnStatement(result_proxy);
result_statement->set_statement_pos(position);
body->Add(result_statement);
}
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// Optimization sampler constants.
static const int kSamplerFrameCount = 2;
+
+// Constants for statistical profiler.
static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
static const int kSamplerTicksBetweenThresholdAdjustment = 32;
static const int kSizeLimit = 1500;
+// Constants for counter based profiler.
+
+// Number of times a function has to be seen on the stack before it is
+// optimized.
+static const int kProfilerTicksBeforeOptimization = 2;
+
+// Maximum size in bytes of generated code for a function to be optimized
+// the very first time it is seen on the stack.
+static const int kMaxSizeEarlyOpt = 500;
+
Atomic32 RuntimeProfiler::state_ = 0;
// TODO(isolates): Create the semaphore lazily and clean it up when no
}
-void RuntimeProfiler::Optimize(JSFunction* function) {
+void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
ASSERT(function->IsOptimizable());
if (FLAG_trace_opt) {
PrintF("[marking ");
function->PrintName();
PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
- PrintF(" for recompilation");
+ PrintF(" for recompilation, reason: %s", reason);
PrintF("]\n");
}
JavaScriptFrame* frame = it.frame();
JSFunction* function = JSFunction::cast(frame->function());
- // Adjust threshold each time we have processed
- // a certain number of ticks.
- if (sampler_ticks_until_threshold_adjustment_ > 0) {
- sampler_ticks_until_threshold_adjustment_--;
- if (sampler_ticks_until_threshold_adjustment_ <= 0) {
- // If the threshold is not already at the minimum
- // modify and reset the ticks until next adjustment.
- if (sampler_threshold_ > kSamplerThresholdMin) {
- sampler_threshold_ -= kSamplerThresholdDelta;
- sampler_ticks_until_threshold_adjustment_ =
- kSamplerTicksBetweenThresholdAdjustment;
+ if (!FLAG_watch_ic_patching) {
+ // Adjust threshold each time we have processed
+ // a certain number of ticks.
+ if (sampler_ticks_until_threshold_adjustment_ > 0) {
+ sampler_ticks_until_threshold_adjustment_--;
+ if (sampler_ticks_until_threshold_adjustment_ <= 0) {
+ // If the threshold is not already at the minimum
+ // modify and reset the ticks until next adjustment.
+ if (sampler_threshold_ > kSamplerThresholdMin) {
+ sampler_threshold_ -= kSamplerThresholdDelta;
+ sampler_ticks_until_threshold_adjustment_ =
+ kSamplerTicksBetweenThresholdAdjustment;
+ }
}
}
}
// Do not record non-optimizable functions.
if (!function->IsOptimizable()) continue;
- samples[sample_count++] = function;
- int function_size = function->shared()->SourceSize();
- int threshold_size_factor = (function_size > kSizeLimit)
- ? sampler_threshold_size_factor_
- : 1;
+ if (FLAG_watch_ic_patching) {
+ int ticks = function->shared()->profiler_ticks();
+
+ if (ticks >= kProfilerTicksBeforeOptimization) {
+ // If this particular function hasn't had any ICs patched for enough
+ // ticks, optimize it now.
+ Optimize(function, "hot and stable");
+ } else if (!any_ic_changed_ &&
+ function->shared()->code()->instruction_size() < kMaxSizeEarlyOpt) {
+ // If no IC was patched since the last tick and this function is very
+ // small, optimistically optimize it now.
+ Optimize(function, "small function");
+ } else if (!code_generated_ &&
+ !any_ic_changed_ &&
+ total_code_generated_ > 0 &&
+ total_code_generated_ < 2000) {
+ // If no code was generated and no IC was patched since the last tick,
+ // but a little code has already been generated since last Reset(),
+ // then type info might already be stable and we can optimize now.
+ Optimize(function, "stable on startup");
+ } else {
+ function->shared()->set_profiler_ticks(ticks + 1);
+ }
+ } else { // !FLAG_counting_profiler
+ samples[sample_count++] = function;
+
+ int function_size = function->shared()->SourceSize();
+ int threshold_size_factor = (function_size > kSizeLimit)
+ ? sampler_threshold_size_factor_
+ : 1;
- int threshold = sampler_threshold_ * threshold_size_factor;
+ int threshold = sampler_threshold_ * threshold_size_factor;
- if (LookupSample(function) >= threshold) {
- Optimize(function);
+ if (LookupSample(function) >= threshold) {
+ Optimize(function, "sampler window lookup");
+ }
}
}
-
- // Add the collected functions as samples. It's important not to do
- // this as part of collecting them because this will interfere with
- // the sample lookup in case of recursive functions.
- for (int i = 0; i < sample_count; i++) {
- AddSample(samples[i], kSamplerFrameWeight[i]);
+ if (FLAG_watch_ic_patching) {
+ any_ic_changed_ = false;
+ code_generated_ = false;
+ } else { // !FLAG_counting_profiler
+ // Add the collected functions as samples. It's important not to do
+ // this as part of collecting them because this will interfere with
+ // the sample lookup in case of recursive functions.
+ for (int i = 0; i < sample_count; i++) {
+ AddSample(samples[i], kSamplerFrameWeight[i]);
+ }
}
}
void RuntimeProfiler::SetUp() {
ASSERT(has_been_globally_set_up_);
- ClearSampleBuffer();
+ if (!FLAG_watch_ic_patching) {
+ ClearSampleBuffer();
+ }
// If the ticker hasn't already started, make sure to do so to get
// the ticks for the runtime profiler.
if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
void RuntimeProfiler::Reset() {
- sampler_threshold_ = kSamplerThresholdInit;
- sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
- sampler_ticks_until_threshold_adjustment_ =
- kSamplerTicksBetweenThresholdAdjustment;
+ if (FLAG_watch_ic_patching) {
+ total_code_generated_ = 0;
+ } else { // !FLAG_counting_profiler
+ sampler_threshold_ = kSamplerThresholdInit;
+ sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
+ sampler_ticks_until_threshold_adjustment_ =
+ kSamplerTicksBetweenThresholdAdjustment;
+ }
}
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
Object** SamplerWindowAddress();
int SamplerWindowSize();
+ void NotifyICChanged() { any_ic_changed_ = true; }
+
+ void NotifyCodeGenerated(int generated_code_size) {
+ if (FLAG_watch_ic_patching) {
+ code_generated_ = true;
+ total_code_generated_ += generated_code_size;
+ }
+ }
+
// Rate limiting support.
// VM thread interface.
static void HandleWakeUp(Isolate* isolate);
- void Optimize(JSFunction* function);
+ void Optimize(JSFunction* function, const char* reason);
void AttemptOnStackReplacement(JSFunction* function);
int sampler_window_position_;
int sampler_window_weight_[kSamplerWindowSize];
+ bool any_ic_changed_;
+ bool code_generated_;
+ int total_code_generated_;
+
// Possible state values:
// -1 => the profiler thread is waiting on the semaphore
// 0 or positive => the number of isolates running JavaScript code.
}
} else {
{ MaybeObject* maybe_result =
- heap->AllocateFixedArray(copy->NumberOfLocalProperties(NONE));
+ heap->AllocateFixedArray(copy->NumberOfLocalProperties());
if (!maybe_result->ToObject(&result)) return maybe_result;
}
FixedArray* names = FixedArray::cast(result);
Isolate* isolate = context->GetIsolate();
int properties_length = constant_properties->length();
int number_of_properties = properties_length / 2;
- if (FLAG_canonicalize_object_literal_maps) {
- // Check that there are only symbols and array indices among keys.
- int number_of_symbol_keys = 0;
- for (int p = 0; p != properties_length; p += 2) {
- Object* key = constant_properties->get(p);
- uint32_t element_index = 0;
- if (key->IsSymbol()) {
- number_of_symbol_keys++;
- } else if (key->ToArrayIndex(&element_index)) {
- // An index key does not require space in the property backing store.
- number_of_properties--;
- } else {
- // Bail out as a non-symbol non-index key makes caching impossible.
- // ASSERT to make sure that the if condition after the loop is false.
- ASSERT(number_of_symbol_keys != number_of_properties);
- break;
- }
+ // Check that there are only symbols and array indices among keys.
+ int number_of_symbol_keys = 0;
+ for (int p = 0; p != properties_length; p += 2) {
+ Object* key = constant_properties->get(p);
+ uint32_t element_index = 0;
+ if (key->IsSymbol()) {
+ number_of_symbol_keys++;
+ } else if (key->ToArrayIndex(&element_index)) {
+ // An index key does not require space in the property backing store.
+ number_of_properties--;
+ } else {
+ // Bail out as a non-symbol non-index key makes caching impossible.
+ // ASSERT to make sure that the if condition after the loop is false.
+ ASSERT(number_of_symbol_keys != number_of_properties);
+ break;
}
- // If we only have symbols and array indices among keys then we can
- // use the map cache in the global context.
- const int kMaxKeys = 10;
- if ((number_of_symbol_keys == number_of_properties) &&
- (number_of_symbol_keys < kMaxKeys)) {
- // Create the fixed array with the key.
- Handle<FixedArray> keys =
- isolate->factory()->NewFixedArray(number_of_symbol_keys);
- if (number_of_symbol_keys > 0) {
- int index = 0;
- for (int p = 0; p < properties_length; p += 2) {
- Object* key = constant_properties->get(p);
- if (key->IsSymbol()) {
- keys->set(index++, key);
- }
+ }
+ // If we only have symbols and array indices among keys then we can
+ // use the map cache in the global context.
+ const int kMaxKeys = 10;
+ if ((number_of_symbol_keys == number_of_properties) &&
+ (number_of_symbol_keys < kMaxKeys)) {
+ // Create the fixed array with the key.
+ Handle<FixedArray> keys =
+ isolate->factory()->NewFixedArray(number_of_symbol_keys);
+ if (number_of_symbol_keys > 0) {
+ int index = 0;
+ for (int p = 0; p < properties_length; p += 2) {
+ Object* key = constant_properties->get(p);
+ if (key->IsSymbol()) {
+ keys->set(index++, key);
}
- ASSERT(index == number_of_symbol_keys);
}
- *is_result_from_cache = true;
- return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
+ ASSERT(index == number_of_symbol_keys);
}
+ *is_result_from_cache = true;
+ return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
}
*is_result_from_cache = false;
return isolate->factory()->CopyMap(
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetSourceCode) {
- NoHandleAllocation ha;
+ HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
- return f->shared()->GetSourceCode();
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
+ Handle<SharedFunctionInfo> shared(f->shared());
+ return *shared->GetSourceCode();
}
return *isolate->factory()->NewJSArray(0);
}
int n;
- n = jsproto->NumberOfLocalProperties(static_cast<PropertyAttributes>(NONE));
+ n = jsproto->NumberOfLocalProperties();
local_property_count[i] = n;
total_property_count += n;
if (i < length - 1) {
ASSERT(args.length() == 1);
Handle<JSFunction> function = args.at<JSFunction>(0);
+ function->shared()->set_profiler_ticks(0);
+
// If the function is not compiled ignore the lazy
// recompilation. This can happen if the debugger is activated and
// the function is returned to the not compiled state.
// element segments each containing a receiver, function, code and
// native code offset.
RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) {
- ASSERT_EQ(args.length(), 2);
- Handle<Object> caller = args.at<Object>(0);
- CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[1]);
+ ASSERT_EQ(args.length(), 3);
+ CONVERT_ARG_CHECKED(JSObject, error_object, 0);
+ Handle<Object> caller = args.at<Object>(1);
+ CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[2]);
HandleScope scope(isolate);
Factory* factory = isolate->factory();
iter.Advance();
}
Handle<JSArray> result = factory->NewJSArrayWithElements(elements);
+ // Capture and attach a more detailed stack trace if necessary.
+ isolate->CaptureAndSetCurrentStackTraceFor(error_object);
result->set_length(Smi::FromInt(cursor));
return *result;
}
F(FunctionIsAPIFunction, 1, 1) \
F(FunctionIsBuiltin, 1, 1) \
F(GetScript, 1, 1) \
- F(CollectStackTrace, 2, 1) \
+ F(CollectStackTrace, 3, 1) \
F(GetV8Version, 0, 1) \
\
F(ClassOf, 1, 1) \
Scanner::Scanner(UnicodeCache* unicode_cache)
: unicode_cache_(unicode_cache),
octal_pos_(Location::invalid()),
- harmony_scoping_(false) { }
+ harmony_scoping_(false),
+ harmony_modules_(false) { }
void Scanner::Initialize(UC16CharacterStream* source) {
KEYWORD_GROUP('e') \
KEYWORD("else", Token::ELSE) \
KEYWORD("enum", Token::FUTURE_RESERVED_WORD) \
- KEYWORD("export", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("export", harmony_modules \
+ ? Token::EXPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("extends", Token::FUTURE_RESERVED_WORD) \
KEYWORD_GROUP('f') \
KEYWORD("false", Token::FALSE_LITERAL) \
KEYWORD_GROUP('i') \
KEYWORD("if", Token::IF) \
KEYWORD("implements", Token::FUTURE_STRICT_RESERVED_WORD) \
- KEYWORD("import", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("import", harmony_modules \
+ ? Token::IMPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("in", Token::IN) \
KEYWORD("instanceof", Token::INSTANCEOF) \
KEYWORD("interface", Token::FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('l') \
KEYWORD("let", harmony_scoping \
? Token::LET : Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD_GROUP('m') \
+ KEYWORD("module", harmony_modules \
+ ? Token::MODULE : Token::IDENTIFIER) \
KEYWORD_GROUP('n') \
KEYWORD("new", Token::NEW) \
KEYWORD("null", Token::NULL_LITERAL) \
static Token::Value KeywordOrIdentifierToken(const char* input,
int input_length,
- bool harmony_scoping) {
+ bool harmony_scoping,
+ bool harmony_modules) {
ASSERT(input_length >= 1);
const int kMinLength = 2;
const int kMaxLength = 10;
Vector<const char> chars = next_.literal_chars->ascii_literal();
return KeywordOrIdentifierToken(chars.start(),
chars.length(),
- harmony_scoping_);
+ harmony_scoping_,
+ harmony_modules_);
}
return Token::IDENTIFIER;
// STRICT_MODE,
// EXTENDED_MODE,
kLanguageModeMask = 0x03,
- kAllowLazy = 4,
- kAllowNativesSyntax = 8
+ kAllowLazy = 0x04,
+ kAllowNativesSyntax = 0x08,
+ kAllowModules = 0x10
};
STATIC_ASSERT((kLanguageModeMask & CLASSIC_MODE) == CLASSIC_MODE);
bool HarmonyScoping() const {
return harmony_scoping_;
}
- void SetHarmonyScoping(bool block_scoping) {
- harmony_scoping_ = block_scoping;
+ void SetHarmonyScoping(bool scoping) {
+ harmony_scoping_ = scoping;
+ }
+ bool HarmonyModules() const {
+ return harmony_modules_;
+ }
+ void SetHarmonyModules(bool modules) {
+ harmony_modules_ = modules;
}
// Whether there is a multi-line comment that contains a
// line-terminator after the current token, and before the next.
bool has_multiline_comment_before_next_;
- // Whether we scan 'let' as a keyword for harmony block scoped
- // let bindings.
+ // Whether we scan 'let' as a keyword for harmony block-scoped let bindings.
bool harmony_scoping_;
+ // Whether we scan 'module', 'import', 'export' as keywords.
+ bool harmony_modules_;
};
} } // namespace v8::internal
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
top = top->outer_scope();
}
- // Allocated the variables.
- top->AllocateVariables(info->global_scope());
+ // Allocate the variables.
+ {
+ AstNodeFactory<AstNullVisitor> ast_node_factory(info->isolate());
+ top->AllocateVariables(info->global_scope(), &ast_node_factory);
+ }
#ifdef DEBUG
if (info->isolate()->bootstrapper()->IsActive()
}
-Variable* Scope::LookupFunctionVar(Handle<String> name) {
+Variable* Scope::LookupFunctionVar(Handle<String> name,
+ AstNodeFactory<AstNullVisitor>* factory) {
if (function_ != NULL && function_->name().is_identical_to(name)) {
return function_->var();
} else if (!scope_info_.is_null()) {
VariableMode mode;
int index = scope_info_->FunctionContextSlotIndex(*name, &mode);
if (index < 0) return NULL;
- Variable* var = DeclareFunctionVar(name, mode);
+ Variable* var = DeclareFunctionVar(name, mode, factory);
var->AllocateTo(Variable::CONTEXT, index);
return var;
} else {
}
-Variable* Scope::DeclareFunctionVar(Handle<String> name, VariableMode mode) {
- ASSERT(is_function_scope() && function_ == NULL);
- Variable* function_var = new Variable(
- this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
- function_ = new(isolate_->zone()) VariableProxy(isolate_, function_var);
- return function_var;
-}
-
-
void Scope::DeclareParameter(Handle<String> name, VariableMode mode) {
ASSERT(!already_resolved());
ASSERT(is_function_scope());
}
-VariableProxy* Scope::NewUnresolved(Handle<String> name, int position) {
- // Note that we must not share the unresolved variables with
- // the same name because they may be removed selectively via
- // RemoveUnresolved().
- ASSERT(!already_resolved());
- VariableProxy* proxy = new(isolate_->zone()) VariableProxy(
- isolate_, name, false, position);
- unresolved_.Add(proxy);
- return proxy;
-}
-
-
void Scope::RemoveUnresolved(VariableProxy* var) {
// Most likely (always?) any variable we want to remove
// was just added before, so we search backwards.
}
-void Scope::AllocateVariables(Scope* global_scope) {
+void Scope::AllocateVariables(Scope* global_scope,
+ AstNodeFactory<AstNullVisitor>* factory) {
// 1) Propagate scope information.
bool outer_scope_calls_non_strict_eval = false;
if (outer_scope_ != NULL) {
PropagateScopeInfo(outer_scope_calls_non_strict_eval);
// 2) Resolve variables.
- ResolveVariablesRecursively(global_scope);
+ ResolveVariablesRecursively(global_scope, factory);
// 3) Allocate variables.
AllocateVariablesRecursively();
Variable* Scope::LookupRecursive(Handle<String> name,
- BindingKind* binding_kind) {
+ BindingKind* binding_kind,
+ AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(binding_kind != NULL);
// Try to find the variable in this scope.
Variable* var = LocalLookup(name);
// if any. We can do this for all scopes, since the function variable is
// only present - if at all - for function scopes.
*binding_kind = UNBOUND;
- var = LookupFunctionVar(name);
+ var = LookupFunctionVar(name, factory);
if (var != NULL) {
*binding_kind = BOUND;
} else if (outer_scope_ != NULL) {
- var = outer_scope_->LookupRecursive(name, binding_kind);
+ var = outer_scope_->LookupRecursive(name, binding_kind, factory);
if (*binding_kind == BOUND && (is_function_scope() || is_with_scope())) {
var->ForceContextAllocation();
}
void Scope::ResolveVariable(Scope* global_scope,
- VariableProxy* proxy) {
+ VariableProxy* proxy,
+ AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(global_scope == NULL || global_scope->is_global_scope());
// If the proxy is already resolved there's nothing to do
// Otherwise, try to resolve the variable.
BindingKind binding_kind;
- Variable* var = LookupRecursive(proxy->name(), &binding_kind);
+ Variable* var = LookupRecursive(proxy->name(), &binding_kind, factory);
switch (binding_kind) {
case BOUND:
// We found a variable binding.
}
-void Scope::ResolveVariablesRecursively(Scope* global_scope) {
+void Scope::ResolveVariablesRecursively(
+ Scope* global_scope,
+ AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(global_scope == NULL || global_scope->is_global_scope());
// Resolve unresolved variables for this scope.
for (int i = 0; i < unresolved_.length(); i++) {
- ResolveVariable(global_scope, unresolved_[i]);
+ ResolveVariable(global_scope, unresolved_[i], factory);
}
// Resolve unresolved variables for inner scopes.
for (int i = 0; i < inner_scopes_.length(); i++) {
- inner_scopes_[i]->ResolveVariablesRecursively(global_scope);
+ inner_scopes_[i]->ResolveVariablesRecursively(global_scope, factory);
}
}
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// between this scope and the outer scope. (ECMA-262, 3rd., requires that
// the name of named function literal is kept in an intermediate scope
// in between this scope and the next outer scope.)
- Variable* LookupFunctionVar(Handle<String> name);
+ Variable* LookupFunctionVar(Handle<String> name,
+ AstNodeFactory<AstNullVisitor>* factory);
// Lookup a variable in this scope or outer scopes.
// Returns the variable or NULL if not found.
// Declare the function variable for a function literal. This variable
// is in an intermediate scope between this function scope and the the
// outer scope. Only possible for function scopes; at most one variable.
- Variable* DeclareFunctionVar(Handle<String> name, VariableMode mode);
+ template<class Visitor>
+ Variable* DeclareFunctionVar(Handle<String> name,
+ VariableMode mode,
+ AstNodeFactory<Visitor>* factory) {
+ ASSERT(is_function_scope() && function_ == NULL);
+ Variable* function_var = new Variable(
+ this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
+ function_ = factory->NewVariableProxy(function_var);
+ return function_var;
+ }
// Declare a parameter in this scope. When there are duplicated
// parameters the rightmost one 'wins'. However, the implementation
Variable* DeclareGlobal(Handle<String> name);
// Create a new unresolved variable.
- VariableProxy* NewUnresolved(Handle<String> name,
- int position = RelocInfo::kNoPosition);
+ template<class Visitor>
+ VariableProxy* NewUnresolved(AstNodeFactory<Visitor>* factory,
+ Handle<String> name,
+ int position = RelocInfo::kNoPosition) {
+ // Note that we must not share the unresolved variables with
+ // the same name because they may be removed selectively via
+ // RemoveUnresolved().
+ ASSERT(!already_resolved());
+ VariableProxy* proxy = factory->NewVariableProxy(name, false, position);
+ unresolved_.Add(proxy);
+ return proxy;
+ }
// Remove a unresolved variable. During parsing, an unresolved variable
// may have been added optimistically, but then only the variable name
// In the case of code compiled and run using 'eval', the context
// parameter is the context in which eval was called. In all other
// cases the context parameter is an empty handle.
- void AllocateVariables(Scope* global_scope);
+ void AllocateVariables(Scope* global_scope,
+ AstNodeFactory<AstNullVisitor>* factory);
// Current number of var or const locals.
int num_var_or_const() { return num_var_or_const_; }
// scope. If the code is executed because of a call to 'eval', the context
// parameter should be set to the calling context of 'eval'.
Variable* LookupRecursive(Handle<String> name,
- BindingKind* binding_kind);
+ BindingKind* binding_kind,
+ AstNodeFactory<AstNullVisitor>* factory);
void ResolveVariable(Scope* global_scope,
- VariableProxy* proxy);
- void ResolveVariablesRecursively(Scope* global_scope);
+ VariableProxy* proxy,
+ AstNodeFactory<AstNullVisitor>* factory);
+ void ResolveVariablesRecursively(Scope* global_scope,
+ AstNodeFactory<AstNullVisitor>* factory);
// Scope analysis.
bool PropagateScopeInfo(bool outer_scope_calls_non_strict_eval);
class MapSpace : public FixedSpace {
public:
// Creates a map space object with a maximum capacity.
- MapSpace(Heap* heap,
- intptr_t max_capacity,
- int max_map_space_pages,
- AllocationSpace id)
+ MapSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id)
: FixedSpace(heap, max_capacity, id, Map::kSize, "map"),
- max_map_space_pages_(max_map_space_pages) {
+ max_map_space_pages_(kMaxMapPageIndex - 1) {
}
// Given an index, returns the page address.
T(FUTURE_RESERVED_WORD, NULL, 0) \
T(FUTURE_STRICT_RESERVED_WORD, NULL, 0) \
K(CONST, "const", 0) \
+ K(EXPORT, "export", 0) \
+ K(IMPORT, "import", 0) \
K(LET, "let", 0) \
+ K(MODULE, "module", 0) \
\
/* Illegal token - not able to scan. */ \
T(ILLEGAL, "ILLEGAL", 0) \
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
RuntimeProfiler::GlobalSetup();
- // Peephole optimization might interfere with deoptimization.
- FLAG_peephole_optimization = !use_crankshaft_;
-
ElementsAccessor::InitializeOncePerProcess();
if (FLAG_stress_compaction) {
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 9
-#define BUILD_NUMBER 2
+#define BUILD_NUMBER 5
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0.
__ bind(&invoke);
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions.
__ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
}
#endif
+ // We can optionally optimize based on counters rather than statistical
+ // sampling.
+ if (info->ShouldSelfOptimize()) {
+ if (FLAG_trace_opt) {
+ PrintF("[adding self-optimization header to %s]\n",
+ *info->function()->debug_name()->ToCString());
+ }
+ MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
+ Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
+ JSGlobalPropertyCell* cell;
+ if (maybe_cell->To(&cell)) {
+ __ movq(rax, Handle<JSGlobalPropertyCell>(cell),
+ RelocInfo::EMBEDDED_OBJECT);
+ __ SmiAddConstant(FieldOperand(rax, JSGlobalPropertyCell::kValueOffset),
+ Smi::FromInt(-1));
+ Handle<Code> compile_stub(
+ isolate()->builtins()->builtin(Builtins::kLazyRecompile));
+ __ j(zero, compile_stub, RelocInfo::CODE_TARGET);
+ }
+ }
+
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
// receiver object). rcx is zero for method calls and non-zero for
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
+ ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
+ EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+ FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
(mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ ++global_count_;
break;
case Variable::PARAMETER:
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
-
-
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(rsi); // The context is the first argument.
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
DeoptimizeIf(no_condition, instr->environment());
}
} else if (right->IsStackSlot()) {
- __ or_(kScratchRegister, ToOperand(right));
+ __ orl(kScratchRegister, ToOperand(right));
DeoptimizeIf(sign, instr->environment());
} else {
// Test the non-zero operand for negative sign.
- __ or_(kScratchRegister, ToRegister(right));
+ __ orl(kScratchRegister, ToRegister(right));
DeoptimizeIf(sign, instr->environment());
}
__ bind(&done);
}
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type,
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
// We will build up the handler from the bottom by pushing on the stack.
- // First compute the state and push the frame pointer and context.
- unsigned state = StackHandler::OffsetField::encode(handler_index);
- if (try_location == IN_JAVASCRIPT) {
- push(rbp);
- push(rsi);
- state |= (type == TRY_CATCH_HANDLER)
- ? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
- : StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
- } else {
- ASSERT(try_location == IN_JS_ENTRY);
+ // First push the frame pointer and context.
+ if (kind == StackHandler::JS_ENTRY) {
// The frame pointer does not point to a JS frame so we save NULL for
// rbp. We expect the code throwing an exception to check rbp before
// dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer.
Push(Smi::FromInt(0)); // No context.
- state |= StackHandler::KindField::encode(StackHandler::ENTRY);
+ } else {
+ push(rbp);
+ push(rsi);
}
// Push the state and the code object.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
push(Immediate(state));
Push(CodeObject());
movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
- STATIC_ASSERT(StackHandler::ENTRY == 0);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
testl(Operand(rsp, StackHandlerConstants::kStateOffset),
Immediate(StackHandler::KindField::kMask));
j(not_zero, &fetch_next);
// Exception handling
// Push a new try handler and link it into try handler chain.
- void PushTryHandler(CodeLocation try_location,
- HandlerType type,
- int handler_index);
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();
} else {
Label call_builtin;
- // Get the elements array of the object.
- __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
- factory()->fixed_array_map());
- __ j(not_equal, &call_builtin);
-
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
+ // Get the elements array of the object.
+ __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
+ factory()->fixed_array_map());
+ __ j(not_equal, &call_builtin);
+
// Get the array's length into rax and calculate new length.
__ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
__ addl(rax, Immediate(argc));
- // Get the element's length into rcx.
- __ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
+ // Get the elements' length into rcx.
+ __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
__ cmpl(rax, rcx);
// Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
- // Push the element.
- __ lea(rdx, FieldOperand(rbx,
- rax, times_pointer_size,
- FixedArray::kHeaderSize - argc * kPointerSize));
- __ movq(Operand(rdx, 0), rcx);
+ // Store the value.
+ __ movq(FieldOperand(rdi,
+ rax,
+ times_pointer_size,
+ FixedArray::kHeaderSize - argc * kPointerSize),
+ rcx);
__ Integer32ToSmi(rax, rax); // Return new length as smi.
__ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier);
- __ movq(rdi, FieldOperand(rdx, HeapObject::kMapOffset));
- __ CheckFastObjectElements(rdi, &call_builtin);
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(rbx, ¬_fast_object, Label::kNear);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(¬_fast_object);
+ __ CheckFastSmiOnlyElements(rbx, &call_builtin);
+ // rdx: receiver
+ // rbx: map
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ rbx,
+ r10,
+ &call_builtin);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(rbx, &call_builtin);
+ }
+
+ __ CheckFastObjectElements(rbx, &call_builtin);
// Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
- // Push the element.
- __ lea(rdx, FieldOperand(rbx,
+ // Store the value.
+ __ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ movq(Operand(rdx, 0), rcx);
- __ RecordWrite(rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ Integer32ToSmi(rax, rax); // Return new length as smi.
__ jmp(&call_builtin);
}
- __ movq(rdi, Operand(rsp, argc * kPointerSize));
+ __ movq(rbx, Operand(rsp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check;
- __ JumpIfSmi(rdi, &no_fast_elements_check);
+ __ JumpIfSmi(rbx, &no_fast_elements_check);
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
__ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check);
__ Load(rcx, new_space_allocation_top);
// Check if it's the end of elements.
- __ lea(rdx, FieldOperand(rbx,
+ __ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ cmpq(rdx, rcx);
__ Store(new_space_allocation_top, rcx);
// Push the argument...
- __ movq(Operand(rdx, 0), rdi);
+ __ movq(Operand(rdx, 0), rbx);
// ... and fill the rest with holes.
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
// tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and
// already marked black.
- __ RecordWrite(rbx, rdx, rdi, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
+ __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to rdx as finish sequence assumes it's here.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes.
- __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset),
+ __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
Smi::FromInt(kAllocationDelta));
// Make new length a smi before returning it.
}
+static void RethrowStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ // Use the frame where JavaScript is called from.
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ int frame_count = stack_trace->GetFrameCount();
+ CHECK_EQ(3, frame_count);
+ int line_number[] = {1, 2, 5};
+ for (int i = 0; i < frame_count; i++) {
+ CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
+ }
+}
+
+
+// Test that we only return the stack trace at the site where the exception
+// is first thrown (not where it is rethrown).
+TEST(RethrowStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ // We make sure that
+ // - the stack trace of the ReferenceError in g() is reported.
+ // - the stack trace is not overwritten when e1 is rethrown by t().
+ // - the stack trace of e2 does not overwrite that of e1.
+ const char* source =
+ "function g() { error; } \n"
+ "function f() { g(); } \n"
+ "function t(e) { throw e; } \n"
+ "try { \n"
+ " f(); \n"
+ "} catch (e1) { \n"
+ " try { \n"
+ " error; \n"
+ " } catch (e2) { \n"
+ " t(e1); \n"
+ " } \n"
+ "} \n";
+ v8::V8::AddMessageListener(RethrowStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowStackTraceHandler);
+}
+
+
+static void RethrowPrimitiveStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ int frame_count = stack_trace->GetFrameCount();
+ CHECK_EQ(2, frame_count);
+ int line_number[] = {3, 7};
+ for (int i = 0; i < frame_count; i++) {
+ CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
+ }
+}
+
+
+// Test that we do not recognize identity for primitive exceptions.
+TEST(RethrowPrimitiveStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ // We do not capture stack trace for non Error objects on creation time.
+ // Instead, we capture the stack trace on last throw.
+ const char* source =
+ "function g() { throw 404; } \n"
+ "function f() { g(); } \n"
+ "function t(e) { throw e; } \n"
+ "try { \n"
+ " f(); \n"
+ "} catch (e1) { \n"
+ " t(e1) \n"
+ "} \n";
+ v8::V8::AddMessageListener(RethrowPrimitiveStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowPrimitiveStackTraceHandler);
+}
+
+
+static void RethrowExistingStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ // Use the frame where JavaScript is called from.
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ CHECK_EQ(1, stack_trace->GetFrameCount());
+ CHECK_EQ(1, stack_trace->GetFrame(0)->GetLineNumber());
+}
+
+
+// Test that the stack trace is captured when the error object is created and
+// not where it is thrown.
+TEST(RethrowExistingStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ const char* source =
+ "var e = new Error(); \n"
+ "throw e; \n";
+ v8::V8::AddMessageListener(RethrowExistingStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowExistingStackTraceHandler);
+}
+
+
+static void RethrowBogusErrorStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ // Use the frame where JavaScript is called from.
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ CHECK_EQ(1, stack_trace->GetFrameCount());
+ CHECK_EQ(2, stack_trace->GetFrame(0)->GetLineNumber());
+}
+
+
+// Test that the stack trace is captured where the bogus Error object is thrown.
+TEST(RethrowBogusErrorStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ const char* source =
+ "var e = {__proto__: new Error()} \n"
+ "throw e; \n";
+ v8::V8::AddMessageListener(RethrowBogusErrorStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowBogusErrorStackTraceHandler);
+}
+
+
v8::Handle<Value> AnalyzeStackOfEvalWithSourceURL(const v8::Arguments& args) {
v8::HandleScope scope;
v8::Handle<v8::StackTrace> stackTrace =
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
CHECK_EQ(0, list->length());
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- AstNode* node = new(ZONE) EmptyStatement();
+ AstNodeFactory<AstNullVisitor> factory(Isolate::Current());
+ AstNode* node = factory.NewEmptyStatement();
list->Add(node);
CHECK_EQ(1, list->length());
CHECK_EQ(node, list->at(0));
}
const v8::HeapGraphNode* native_group_aaa = GetNode(
- snapshot->GetRoot(), v8::HeapGraphNode::kNative, "aaa-group");
+ snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "aaa-group");
CHECK_NE(NULL, native_group_aaa);
CHECK_EQ(1, native_group_aaa->GetChildrenCount());
const v8::HeapGraphNode* aaa = GetNode(
CHECK_EQ(2, aaa->GetChildrenCount());
const v8::HeapGraphNode* native_group_ccc = GetNode(
- snapshot->GetRoot(), v8::HeapGraphNode::kNative, "ccc-group");
+ snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "ccc-group");
const v8::HeapGraphNode* ccc = GetNode(
native_group_ccc, v8::HeapGraphNode::kNative, "ccc");
CHECK_NE(NULL, ccc);
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
intptr_t booted_memory = MemoryInUse();
if (sizeof(initial_memory) == 8) {
if (v8::internal::Snapshot::IsEnabled()) {
- CHECK_LE(booted_memory - initial_memory, 6654 * 1024); // 6444.
+ CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6476.
} else {
- CHECK_LE(booted_memory - initial_memory, 6777 * 1024); // 6596.
+ CHECK_LE(booted_memory - initial_memory, 6809 * 1024); // 6628.
}
} else {
if (v8::internal::Snapshot::IsEnabled()) {
- CHECK_LE(booted_memory - initial_memory, 6500 * 1024); // 6356.
+ CHECK_LE(booted_memory - initial_memory, 6532 * 1024); // 6388.
} else {
- CHECK_LE(booted_memory - initial_memory, 6654 * 1024); // 6424
+ CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6456
}
}
}
{
i::Utf8ToUC16CharacterStream stream(keyword, length);
i::Scanner scanner(&unicode_cache);
- // The scanner should parse 'let' as Token::LET for this test.
+ // The scanner should parse Harmony keywords for this test.
scanner.SetHarmonyScoping(true);
+ scanner.SetHarmonyModules(true);
scanner.Initialize(&stream);
CHECK_EQ(key_token.token, scanner.Next());
CHECK_EQ(i::Token::EOS, scanner.Next());
Array.prototype.toString = oldToString;
}
-var a = new Array(123123123);
-assertEquals(123123122, String(a).length);
-assertEquals(123123122, a.join(",").length);
-assertEquals(246246244, a.join("oo").length);
+var a = new Array(123123);
+assertEquals(123122, String(a).length);
+assertEquals(123122, a.join(",").length);
+assertEquals(246244, a.join("oo").length);
a = new Array(Math.pow(2,32) - 1); // Max length.
assertEquals("", a.join(""));
for (var i = 0; i < a.length; i++) a[i] = undefined;
a[5] = "ab";
a[90000] = "cd";
-assertEquals("abcd", a.join("")); // Must not throw.
\ No newline at end of file
+assertEquals("abcd", a.join("")); // Must not throw.
--- /dev/null
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --max-new-space-size=256 --allow-natives-syntax
+
+// Test inlining of Math.floor when assigned to a global.
+var flo = Math.floor;
+var test_id = 0;
+
+function testFloor(expect, input) {
+ var test = new Function('n',
+ '"' + (test_id++) + '";return flo(n)');
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ %OptimizeFunctionOnNextCall(test);
+ assertEquals(expect, test(input));
+}
+
+function zero() {
+ var x = 0.5;
+ return (function() { return x - 0.5; })();
+}
+
+function test() {
+ testFloor(0, 0);
+ testFloor(0, zero());
+ testFloor(-0, -0);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+ testFloor(NaN, NaN);
+
+ // Ensure that a negative zero coming from Math.floor is properly handled
+ // by other operations.
+ function ifloor(x) {
+ return 1 / Math.floor(x);
+ }
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ %OptimizeFunctionOnNextCall(ifloor);
+ assertEquals(-Infinity, ifloor(-0));
+
+ testFloor(0, 0.1);
+ testFloor(0, 0.49999999999999994);
+ testFloor(0, 0.5);
+ testFloor(0, 0.7);
+ testFloor(-1, -0.1);
+ testFloor(-1, -0.49999999999999994);
+ testFloor(-1, -0.5);
+ testFloor(-1, -0.7);
+ testFloor(1, 1);
+ testFloor(1, 1.1);
+ testFloor(1, 1.5);
+ testFloor(1, 1.7);
+ testFloor(-1, -1);
+ testFloor(-2, -1.1);
+ testFloor(-2, -1.5);
+ testFloor(-2, -1.7);
+
+ testFloor(0, Number.MIN_VALUE);
+ testFloor(-1, -Number.MIN_VALUE);
+ testFloor(Number.MAX_VALUE, Number.MAX_VALUE);
+ testFloor(-Number.MAX_VALUE, -Number.MAX_VALUE);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+
+ // 2^30 is a smi boundary.
+ var two_30 = 1 << 30;
+
+ testFloor(two_30, two_30);
+ testFloor(two_30, two_30 + 0.1);
+ testFloor(two_30, two_30 + 0.5);
+ testFloor(two_30, two_30 + 0.7);
+
+ testFloor(two_30 - 1, two_30 - 1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.5);
+ testFloor(two_30 - 1, two_30 - 1 + 0.7);
+
+ testFloor(-two_30, -two_30);
+ testFloor(-two_30, -two_30 + 0.1);
+ testFloor(-two_30, -two_30 + 0.5);
+ testFloor(-two_30, -two_30 + 0.7);
+
+ testFloor(-two_30 + 1, -two_30 + 1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.5);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.7);
+
+ // 2^52 is a precision boundary.
+ var two_52 = (1 << 30) * (1 << 22);
+
+ testFloor(two_52, two_52);
+ testFloor(two_52, two_52 + 0.1);
+ assertEquals(two_52, two_52 + 0.5);
+ testFloor(two_52, two_52 + 0.5);
+ assertEquals(two_52 + 1, two_52 + 0.7);
+ testFloor(two_52 + 1, two_52 + 0.7);
+
+ testFloor(two_52 - 1, two_52 - 1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.5);
+ testFloor(two_52 - 1, two_52 - 1 + 0.7);
+
+ testFloor(-two_52, -two_52);
+ testFloor(-two_52, -two_52 + 0.1);
+ testFloor(-two_52, -two_52 + 0.5);
+ testFloor(-two_52, -two_52 + 0.7);
+
+ testFloor(-two_52 + 1, -two_52 + 1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.5);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.7);
+}
+
+
+// Test in a loop to cover the custom IC and GC-related issues.
+for (var i = 0; i < 50; i++) {
+ test();
+}
+
+
+// Regression test for a bug where a negative zero coming from Math.floor
+// was not properly handled by other operations.
+function floorsum(i, n) {
+ var ret = Math.floor(n);
+ while (--i > 0) {
+ ret += Math.floor(n);
+ }
+ return ret;
+}
+assertEquals(-0, floorsum(1, -0));
+%OptimizeFunctionOnNextCall(floorsum);
+// The optimized function will deopt. Run it with enough iterations to try
+// to optimize via OSR (triggering the bug).
+assertEquals(-0, floorsum(100000, -0));
--- /dev/null
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --max-new-space-size=256 --allow-natives-syntax
+
+// Test inlining of Math.floor when assigned to a local.
+var test_id = 0;
+
+function testFloor(expect, input) {
+ var test = new Function('n',
+ '"' + (test_id++) +
+ '";var f = Math.floor; return f(n)');
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ %OptimizeFunctionOnNextCall(test);
+ assertEquals(expect, test(input));
+}
+
+function zero() {
+ var x = 0.5;
+ return (function() { return x - 0.5; })();
+}
+
+function test() {
+ testFloor(0, 0);
+ testFloor(0, zero());
+ testFloor(-0, -0);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+ testFloor(NaN, NaN);
+
+ // Ensure that a negative zero coming from Math.floor is properly handled
+ // by other operations.
+ function ifloor(x) {
+ return 1 / Math.floor(x);
+ }
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ %OptimizeFunctionOnNextCall(ifloor);
+ assertEquals(-Infinity, ifloor(-0));
+
+ testFloor(0, 0.1);
+ testFloor(0, 0.49999999999999994);
+ testFloor(0, 0.5);
+ testFloor(0, 0.7);
+ testFloor(-1, -0.1);
+ testFloor(-1, -0.49999999999999994);
+ testFloor(-1, -0.5);
+ testFloor(-1, -0.7);
+ testFloor(1, 1);
+ testFloor(1, 1.1);
+ testFloor(1, 1.5);
+ testFloor(1, 1.7);
+ testFloor(-1, -1);
+ testFloor(-2, -1.1);
+ testFloor(-2, -1.5);
+ testFloor(-2, -1.7);
+
+ testFloor(0, Number.MIN_VALUE);
+ testFloor(-1, -Number.MIN_VALUE);
+ testFloor(Number.MAX_VALUE, Number.MAX_VALUE);
+ testFloor(-Number.MAX_VALUE, -Number.MAX_VALUE);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+
+ // 2^30 is a smi boundary.
+ var two_30 = 1 << 30;
+
+ testFloor(two_30, two_30);
+ testFloor(two_30, two_30 + 0.1);
+ testFloor(two_30, two_30 + 0.5);
+ testFloor(two_30, two_30 + 0.7);
+
+ testFloor(two_30 - 1, two_30 - 1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.5);
+ testFloor(two_30 - 1, two_30 - 1 + 0.7);
+
+ testFloor(-two_30, -two_30);
+ testFloor(-two_30, -two_30 + 0.1);
+ testFloor(-two_30, -two_30 + 0.5);
+ testFloor(-two_30, -two_30 + 0.7);
+
+ testFloor(-two_30 + 1, -two_30 + 1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.5);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.7);
+
+ // 2^52 is a precision boundary.
+ var two_52 = (1 << 30) * (1 << 22);
+
+ testFloor(two_52, two_52);
+ testFloor(two_52, two_52 + 0.1);
+ assertEquals(two_52, two_52 + 0.5);
+ testFloor(two_52, two_52 + 0.5);
+ assertEquals(two_52 + 1, two_52 + 0.7);
+ testFloor(two_52 + 1, two_52 + 0.7);
+
+ testFloor(two_52 - 1, two_52 - 1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.5);
+ testFloor(two_52 - 1, two_52 - 1 + 0.7);
+
+ testFloor(-two_52, -two_52);
+ testFloor(-two_52, -two_52 + 0.1);
+ testFloor(-two_52, -two_52 + 0.5);
+ testFloor(-two_52, -two_52 + 0.7);
+
+ testFloor(-two_52 + 1, -two_52 + 1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.5);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.7);
+}
+
+
+// Test in a loop to cover the custom IC and GC-related issues.
+for (var i = 0; i < 50; i++) {
+ test();
+}
+
+
+// Regression test for a bug where a negative zero coming from Math.floor
+// was not properly handled by other operations.
+function floorsum(i, n) {
+ var ret = Math.floor(n);
+ while (--i > 0) {
+ ret += Math.floor(n);
+ }
+ return ret;
+}
+assertEquals(-0, floorsum(1, -0));
+%OptimizeFunctionOnNextCall(floorsum);
+// The optimized function will deopt. Run it with enough iterations to try
+// to optimize via OSR (triggering the bug).
+assertEquals(-0, floorsum(100000, -0));
} catch (e) {
}
os.mkdirp(TEST_DIR);
- os.chdir(TEST_DIR);
try {
// Check the chdir worked.
os.system('ls', [TEST_DIR]);
// Simple create dir.
- os.mkdirp("dir");
+ os.mkdirp(TEST_DIR + "/dir");
// Create dir in dir.
- os.mkdirp("dir/foo");
+ os.mkdirp(TEST_DIR + "/dir/foo");
// Check that they are there.
- os.system('ls', ['dir/foo']);
+ os.system('ls', [TEST_DIR + '/dir/foo']);
// Check that we can detect when something is not there.
- assertThrows("os.system('ls', ['dir/bar']);", "dir not there");
+ assertThrows("os.system('ls', [TEST_DIR + '/dir/bar']);", "dir not there");
// Check that mkdirp makes intermediate directories.
- os.mkdirp("dir2/foo");
- os.system("ls", ["dir2/foo"]);
+ os.mkdirp(TEST_DIR + "/dir2/foo");
+ os.system("ls", [TEST_DIR + "/dir2/foo"]);
// Check that mkdirp doesn't mind if the dir is already there.
- os.mkdirp("dir2/foo");
- os.mkdirp("dir2/foo/");
+ os.mkdirp(TEST_DIR + "/dir2/foo");
+ os.mkdirp(TEST_DIR + "/dir2/foo/");
// Check that mkdirp can cope with trailing /
- os.mkdirp("dir3/");
- os.system("ls", ["dir3"]);
+ os.mkdirp(TEST_DIR + "/dir3/");
+ os.system("ls", [TEST_DIR + "/dir3"]);
// Check that we get an error if the name is taken by a file.
- os.system("sh", ["-c", "echo foo > file1"]);
- os.system("ls", ["file1"]);
- assertThrows("os.mkdirp('file1');", "mkdir over file1");
- assertThrows("os.mkdirp('file1/foo');", "mkdir over file2");
- assertThrows("os.mkdirp('file1/');", "mkdir over file3");
- assertThrows("os.mkdirp('file1/foo/');", "mkdir over file4");
+ os.system("sh", ["-c", "echo foo > " + TEST_DIR + "/file1"]);
+ os.system("ls", [TEST_DIR + "/file1"]);
+ assertThrows("os.mkdirp(TEST_DIR + '/file1');", "mkdir over file1");
+ assertThrows("os.mkdirp(TEST_DIR + '/file1/foo');", "mkdir over file2");
+ assertThrows("os.mkdirp(TEST_DIR + '/file1/');", "mkdir over file3");
+ assertThrows("os.mkdirp(TEST_DIR + '/file1/foo/');", "mkdir over file4");
// Create a dir we cannot read.
- os.mkdirp("dir4", 0);
+ os.mkdirp(TEST_DIR + "/dir4", 0);
// This test fails if you are root since root can read any dir.
- assertThrows("os.chdir('dir4');", "chdir dir4 I");
- os.rmdir("dir4");
- assertThrows("os.chdir('dir4');", "chdir dir4 II");
- // Set umask.
+ assertThrows("os.chdir(TEST_DIR + '/dir4');", "chdir dir4 I");
+ os.rmdir(TEST_DIR + "/dir4");
+ assertThrows("os.chdir(TEST_DIR + '/dir4');", "chdir dir4 II");
+
+ // Set umask. This changes the umask for the whole process and is
+ // the reason why the test cannot be run multi-threaded.
var old_umask = os.umask(0777);
// Create a dir we cannot read.
- os.mkdirp("dir5");
+ os.mkdirp(TEST_DIR + "/dir5");
// This test fails if you are root since root can read any dir.
- assertThrows("os.chdir('dir5');", "cd dir5 I");
- os.rmdir("dir5");
- assertThrows("os.chdir('dir5');", "chdir dir5 II");
+ assertThrows("os.chdir(TEST_DIR + '/dir5');", "cd dir5 I");
+ os.rmdir(TEST_DIR + "/dir5");
+ assertThrows("os.chdir(TEST_DIR + '/dir5');", "chdir dir5 II");
os.umask(old_umask);
- os.mkdirp("hest/fisk/../fisk/ged");
- os.system("ls", ["hest/fisk/ged"]);
+ os.mkdirp(TEST_DIR + "/hest/fisk/../fisk/ged");
+ os.system("ls", [TEST_DIR + "/hest/fisk/ged"]);
os.setenv("FOO", "bar");
var environment = os.system("printenv");
--- /dev/null
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays
+
+// Ensure that ElementsKind transitions in various situations are hoisted (or
+// not hoisted) correctly, don't change the semantics programs and don't trigger
+// deopt through hoisting in important situations.
+
+support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6));
+
+if (support_smi_only_arrays) {
+ print("Tests include smi-only arrays.");
+} else {
+ print("Tests do NOT include smi-only arrays.");
+}
+
+if (support_smi_only_arrays) {
+ // Make sure that a simple elements array transitions inside a loop before
+ // stores to an array gets hoisted in a way that doesn't generate a deopt in
+ // simple cases.}
+ function testDoubleConversion4(a) {
+ var object = new Object();
+ a[0] = 0;
+ var count = 3;
+ do {
+ a[0] = object;
+ } while (--count > 0);
+ }
+
+ testDoubleConversion4(new Array(5));
+ %OptimizeFunctionOnNextCall(testDoubleConversion4);
+ testDoubleConversion4(new Array(5));
+ testDoubleConversion4(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testDoubleConversion4));
+
+ // Make sure that non-element related map checks that are not preceded by
+ // transitions in a loop still get hoisted in a way that doesn't generate a
+ // deopt in simple cases.
+ function testExactMapHoisting(a) {
+ var object = new Object();
+ a.foo = 0;
+ a[0] = 0;
+ a[1] = 1;
+ var count = 3;
+ do {
+ a.foo = object; // This map check should be hoistable
+ a[1] = object;
+ result = a.foo == object && a[1] == object;
+ } while (--count > 0);
+ }
+
+ testExactMapHoisting(new Array(5));
+ %OptimizeFunctionOnNextCall(testExactMapHoisting);
+ testExactMapHoisting(new Array(5));
+ testExactMapHoisting(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting));
+
+ // Make sure that non-element related map checks do NOT get hoisted if they
+ // depend on an elements transition before them and it's not possible to hoist
+ // that transition.
+ function testExactMapHoisting2(a) {
+ var object = new Object();
+ a.foo = 0;
+ a[0] = 0;
+ a[1] = 1;
+ var count = 3;
+ do {
+ if (a.bar === undefined) {
+ a[1] = 2.5;
+ }
+ a.foo = object; // This map check should NOT be hoistable because it
+ // includes a check for the FAST_ELEMENTS map as well as
+ // the FAST_DOUBLE_ELEMENTS map, which depends on the
+ // double transition above in the if, which cannot be
+ // hoisted.
+ } while (--count > 0);
+ }
+
+ testExactMapHoisting2(new Array(5));
+ %OptimizeFunctionOnNextCall(testExactMapHoisting2);
+ testExactMapHoisting2(new Array(5));
+ testExactMapHoisting2(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting2));
+
+ // Make sure that non-element related map checks do get hoisted if they use
+ // the transitioned map for the check and all transitions that they depend
+ // upon can hoisted, too.
+ function testExactMapHoisting3(a) {
+ var object = new Object();
+ a.foo = 0;
+ a[0] = 0;
+ a[1] = 1;
+ var count = 3;
+ do {
+ a[1] = 2.5;
+ a.foo = object; // This map check should be hoistable because all elements
+ // transitions in the loop can also be hoisted.
+ } while (--count > 0);
+ }
+
+ var add_transition = new Array(5);
+ add_transition.foo = 0;
+ add_transition[0] = new Object(); // For FAST_ELEMENT transition to be created
+ testExactMapHoisting3(new Array(5));
+ %OptimizeFunctionOnNextCall(testExactMapHoisting3);
+ testExactMapHoisting3(new Array(5));
+ testExactMapHoisting3(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting3));
+
+ function testDominatingTransitionHoisting1(a) {
+ var object = new Object();
+ a[0] = 0;
+ var count = 3;
+ do {
+ if (a.baz != true) {
+ a[1] = 2.5;
+ }
+ a[0] = object;
+ } while (--count > 3);
+ }
+
+ testDominatingTransitionHoisting1(new Array(5));
+ %OptimizeFunctionOnNextCall(testDominatingTransitionHoisting1);
+ testDominatingTransitionHoisting1(new Array(5));
+ testDominatingTransitionHoisting1(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testDominatingTransitionHoisting1));
+
+ function testHoistingWithSideEffect(a) {
+ var object = new Object();
+ a[0] = 0;
+ var count = 3;
+ do {
+ assertTrue(true);
+ a[0] = object;
+ } while (--count > 3);
+ }
+
+ testHoistingWithSideEffect(new Array(5));
+ %OptimizeFunctionOnNextCall(testHoistingWithSideEffect);
+ testHoistingWithSideEffect(new Array(5));
+ testHoistingWithSideEffect(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testHoistingWithSideEffect));
+}
##############################################################################
-# Issue 1845: http://code.google.com/p/v8/issues/detail?id=1845
-harmony/proxies-example-membrane: SKIP
-
# NewGC: BUG(1719) slow to collect arrays over several contexts.
regress/regress-524: SKIP
debug-liveedit-patch-positions-replace: SKIP
##############################################################################
+[ $isolates ]
+
+# This test sets the umask on a per-process basis and hence cannot be
+# used in multi-threaded runs.
+d8-os: SKIP
+
+##############################################################################
[ $arch == arm ]
# Slow tests which times out in debug mode.
--- /dev/null
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// For http://code.google.com/p/v8/issues/detail?id=1924
+
+a: break a;
+a: b: break a;
+a: b: break b;
+assertThrows("a: break a a", SyntaxError)
+assertThrows("a: break a 1", SyntaxError)
+assertThrows("a: break a ''", SyntaxError)
+assertThrows("a: break a var b", SyntaxError)
+assertThrows("a: break a {}", SyntaxError)
+
+a: if (0) break a;
+b: if (0) {break b;} else {}
+c: if (0) break c; else {}
+d: if (0) break d; else break d;
--- /dev/null
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// This tests that concatenating a fast smi-only array and a fast object array
+// results in a fast object array.
+
+var fast_array = ['a', 'b'];
+var array = fast_array.concat(fast_array);
+
+assertTrue(%HasFastElements(fast_array));
+assertTrue(%HasFastElements(array));
\ No newline at end of file
// Deep cons tree.
var nested_1 = "";
-for (var i = 0; i < 1000000; i++) nested_1 += "y";
+for (var i = 0; i < 100000; i++) nested_1 += "y";
var nested_1_result = prefix1024 + nested_1 + "aa";
nested_1 = prefix1024 + nested_1 + "z";
test_replace(nested_1, nested_1_result, "z", "aa");
var nested_2 = "\u2244";
-for (var i = 0; i < 1000000; i++) nested_2 += "y";
+for (var i = 0; i < 100000; i++) nested_2 += "y";
var nested_2_result = prefix1024 + nested_2 + "aa";
nested_2 = prefix1024 + nested_2 + "\u2012";
test_replace(nested_2, nested_2_result, "\u2012", "aa");
--- /dev/null
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Inspired by and based on:
+# http://src.chromium.org/viewvc/chrome/trunk/src/tools/bash-completion
+
+# Flag completion rule for bash.
+# To load in your shell, "source path/to/this/file".
+
+v8_source=$(readlink -f $(dirname $BASH_SOURCE)/..)
+
+_v8_flag() {
+ local cur defines targets
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ defines=$(cat src/flag-definitions.h \
+ | grep "^DEFINE" \
+ | grep -v "DEFINE_implication" \
+ | sed -e 's/_/-/g')
+ targets=$(echo "$defines" \
+ | sed -ne 's/^DEFINE-[^(]*(\([^,]*\).*/--\1/p'; \
+ echo "$defines" \
+ | sed -ne 's/^DEFINE-bool(\([^,]*\).*/--no\1/p'; \
+ cat src/d8.cc \
+ | grep "strcmp(argv\[i\]" \
+ | sed -ne 's/^[^"]*"--\([^"]*\)".*/--\1/p')
+ COMPREPLY=($(compgen -W "$targets" -- "$cur"))
+ return 0
+}
+
+complete -F _v8_flag -f d8
def GetTimeout(self, testcase, mode):
result = self.timeout * TIMEOUT_SCALEFACTOR[mode]
if '--stress-opt' in self.GetVmFlags(testcase, mode):
- return result * 2
+ return result * 4
else:
return result