no_const_pool_before_ = 0;
last_const_pool_end_ = 0;
last_bound_pos_ = 0;
- ast_id_for_reloc_info_ = kNoASTId;
}
}
}
ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
- if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
- ASSERT(ast_id_for_reloc_info_ != kNoASTId);
- RelocInfo reloc_info_with_ast_id(pc_, rmode, ast_id_for_reloc_info_);
- ast_id_for_reloc_info_ = kNoASTId;
- reloc_info_writer.Write(&reloc_info_with_ast_id);
- } else {
- reloc_info_writer.Write(&rinfo);
- }
+ reloc_info_writer.Write(&rinfo);
}
}
// Mark address of a debug break slot.
void RecordDebugBreakSlot();
- // Record the AST id of the CallIC being compiled, so that it can be placed
- // in the relocation information.
- void RecordAstId(unsigned ast_id) { ast_id_for_reloc_info_ = ast_id; }
-
// Record a comment relocation entry that can be used by a disassembler.
// Use --code-comments to enable.
void RecordComment(const char* msg);
void CheckConstPool(bool force_emit, bool require_jump);
protected:
- // Relocation for a type-recording IC has the AST id added to it. This
- // member variable is a way to pass the information from the call site to
- // the relocation info.
- unsigned ast_id_for_reloc_info_;
-
bool emit_debug_code() const { return emit_debug_code_; }
int buffer_space() const { return reloc_info_writer.pos() - pc_; }
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Value in r0 is ignored (declarations are statements).
}
}
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- EmitCallIC(ic, &patch_site, clause->label()->id());
+ EmitCallIC(ic, &patch_site);
__ cmp(r0, Operand(0));
__ b(ne, &next_test);
__ Drop(1); // Switch value is no longer needed.
}
-void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow) {
- Register current = cp;
- Register next = r1;
- Register temp = r2;
-
- Scope* s = scope();
- while (s != NULL) {
- if (s->num_heap_slots() > 0) {
- if (s->calls_eval()) {
- // Check that extension is NULL.
- __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
- __ tst(temp, temp);
- __ b(ne, slow);
- }
- // Load next context in chain.
- __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
- // Walk the rest of the chain without clobbering cp.
- current = next;
- }
- // If no outer scope calls eval, we do not need to check more
- // context extensions.
- if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
- s = s->outer_scope();
- }
-
- if (s->is_eval_scope()) {
- Label loop, fast;
- if (!current.is(next)) {
- __ Move(next, current);
- }
- __ bind(&loop);
- // Terminate at global context.
- __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
- __ cmp(temp, ip);
- __ b(eq, &fast);
- // Check that extension is NULL.
- __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
- __ tst(temp, temp);
- __ b(ne, slow);
- // Load next context in chain.
- __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
- __ b(&loop);
- __ bind(&fast);
- }
-
- __ ldr(r0, GlobalObjectOperand());
- __ mov(r2, Operand(slot->var()->name()));
- RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
- ? RelocInfo::CODE_TARGET
- : RelocInfo::CODE_TARGET_CONTEXT;
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- EmitCallIC(ic, mode, AstNode::kNoNumber);
-}
-
-
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
Slot* slot,
Label* slow) {
__ mov(r0, Operand(key_literal->handle()));
Handle<Code> ic =
isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
__ jmp(done);
}
}
}
+void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
+ Slot* slot,
+ TypeofState typeof_state,
+ Label* slow) {
+ Register current = cp;
+ Register next = r1;
+ Register temp = r2;
+
+ Scope* s = scope();
+ while (s != NULL) {
+ if (s->num_heap_slots() > 0) {
+ if (s->calls_eval()) {
+ // Check that extension is NULL.
+ __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
+ __ tst(temp, temp);
+ __ b(ne, slow);
+ }
+ // Load next context in chain.
+ __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
+ __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+ // Walk the rest of the chain without clobbering cp.
+ current = next;
+ }
+ // If no outer scope calls eval, we do not need to check more
+ // context extensions.
+ if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+ s = s->outer_scope();
+ }
+
+ if (s->is_eval_scope()) {
+ Label loop, fast;
+ if (!current.is(next)) {
+ __ Move(next, current);
+ }
+ __ bind(&loop);
+ // Terminate at global context.
+ __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+ __ cmp(temp, ip);
+ __ b(eq, &fast);
+ // Check that extension is NULL.
+ __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
+ __ tst(temp, temp);
+ __ b(ne, slow);
+ // Load next context in chain.
+ __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
+ __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+ __ b(&loop);
+ __ bind(&fast);
+ }
+
+ __ ldr(r0, GlobalObjectOperand());
+ __ mov(r2, Operand(slot->var()->name()));
+ RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
+ ? RelocInfo::CODE_TARGET
+ : RelocInfo::CODE_TARGET_CONTEXT;
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ EmitCallIC(ic, mode);
+}
+
+
void FullCodeGenerator::EmitVariableLoad(Variable* var) {
// Four cases: non-this global variables, lookup slots, all other
// types of slots, and parameters that rewrite to explicit property
__ ldr(r0, GlobalObjectOperand());
__ mov(r2, Operand(var->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(r0);
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
context()->Plug(r0);
}
}
VisitForAccumulatorValue(value);
__ mov(r2, Operand(key->handle()));
__ ldr(r1, MemOperand(sp));
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, key->id());
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
SetSourcePosition(expr->position() + 1);
AccumulatorValueContext context(this);
if (ShouldInlineSmiCase(op)) {
- EmitInlineSmiBinaryOp(expr->binary_operation(),
+ EmitInlineSmiBinaryOp(expr,
op,
mode,
expr->target(),
expr->value());
} else {
- EmitBinaryOp(expr->binary_operation(), op, mode);
+ EmitBinaryOp(op, mode);
}
// Deoptimization point in case the binary operation may have side effects.
__ mov(r2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name r0 and r2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- if (prop->is_synthetic()) {
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
- } else {
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id());
- }
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
SetSourcePosition(prop->position());
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- if (prop->is_synthetic()) {
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
- } else {
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id());
- }
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
-void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
+void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Expression* left_expr,
__ bind(&stub_call);
TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site, expr->id());
+ EmitCallIC(stub.GetCode(), &patch_site);
__ jmp(&done);
__ bind(&smi_case);
}
-void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
- Token::Value op,
+void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
__ pop(r1);
TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), NULL, expr->id());
+ EmitCallIC(stub.GetCode(), NULL);
context()->Plug(r0);
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
case KEYED_PROPERTY: {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
- unsigned ast_id =
- (mode == RelocInfo::CODE_TARGET_WITH_ID) ? expr->id() : kNoASTId;
- EmitCallIC(ic, mode, ast_id);
+ EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
__ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- EmitCallIC(ic, mode, expr->id());
+ EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
- EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET_WITH_ID);
+ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
// For a synthetic property use keyed load IC followed by function call,
- // for a regular property use keyed EmitCallIC.
+ // for a regular property use keyed CallIC.
if (prop->is_synthetic()) {
// Do not visit the object and key subexpressions (they are shared
// by all occurrences of the same rewritten parameter).
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
__ ldr(r1, GlobalObjectOperand());
__ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
__ Push(r0, r1); // Function, receiver.
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
- EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET_WITH_ID);
+ EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
}
}
} else {
__ mov(r2, Operand(expr->name()));
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
SetSourcePosition(expr->position());
TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
// Store the value returned in r0.
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailout(expr, TOS_REG);
context()->Plug(r0);
} else if (proxy != NULL &&
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- EmitCallIC(ic, &patch_site, expr->id());
+ EmitCallIC(ic, &patch_site);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ cmp(r0, Operand(0));
Split(cond, if_true, if_false, fall_through);
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
- RelocInfo::Mode mode,
- unsigned ast_id) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
- mode == RelocInfo::CODE_TARGET_CONTEXT ||
- mode == RelocInfo::CODE_TARGET_WITH_ID);
+ mode == RelocInfo::CODE_TARGET_CONTEXT);
Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
default:
break;
}
- if (mode == RelocInfo::CODE_TARGET_WITH_ID) {
- ASSERT(ast_id != kNoASTId);
- __ CallWithAstId(ic, mode, ast_id);
- } else {
- ASSERT(ast_id == kNoASTId);
- __ Call(ic, mode);
- }
+ __ Call(ic, mode);
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
- JumpPatchSite* patch_site,
- unsigned ast_id) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
default:
break;
}
-
- if (ast_id != kNoASTId) {
- __ CallWithAstId(ic, RelocInfo::CODE_TARGET_WITH_ID, ast_id);
- } else {
- __ Call(ic, RelocInfo::CODE_TARGET);
- }
+ __ Call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
}
-void MacroAssembler::Call(intptr_t target,
- RelocInfo::Mode rmode,
- Condition cond) {
+void MacroAssembler::Call(
+ intptr_t target, RelocInfo::Mode rmode, Condition cond) {
// Block constant pool for the call instruction sequence.
BlockConstPoolScope block_const_pool(this);
#ifdef DEBUG
}
-void MacroAssembler::CallWithAstId(Handle<Code> code,
- RelocInfo::Mode rmode,
- unsigned ast_id,
- Condition cond) {
-#ifdef DEBUG
- int pre_position = pc_offset();
-#endif
-
- ASSERT(rmode == RelocInfo::CODE_TARGET_WITH_ID);
- ASSERT(ast_id != kNoASTId);
- ASSERT(ast_id_for_reloc_info_ == kNoASTId);
- ast_id_for_reloc_info_ = ast_id;
- // 'code' is always generated ARM code, never THUMB code
- Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
-
-#ifdef DEBUG
- int post_position = pc_offset();
- CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position);
-#endif
-}
-
-
-void MacroAssembler::Call(Handle<Code> code,
- RelocInfo::Mode rmode,
- Condition cond) {
+void MacroAssembler::Call(
+ Handle<Code> code, RelocInfo::Mode rmode, Condition cond) {
#ifdef DEBUG
int pre_position = pc_offset();
#endif
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
int CallSize(byte* target, RelocInfo::Mode rmode, Condition cond = al);
void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
int CallSize(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
- void Call(Handle<Code> code,
- RelocInfo::Mode rmode,
- Condition cond = al);
- void CallWithAstId(Handle<Code> code,
- RelocInfo::Mode rmode,
- unsigned ast_id,
- Condition cond = al);
+ void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
void Ret(Condition cond = al);
// Emit code to discard a non-negative number of pointer-sized elements
void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
int CallSize(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
- void Call(intptr_t target,
- RelocInfo::Mode rmode,
- Condition cond = al);
+ void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
// Helper functions for generating invokes.
void InvokePrologue(const ParameterCount& expected,
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2006-2009 the V8 project authors. All rights reserved.
#include "v8.h"
// -----------------------------------------------------------------------------
// Implementation of RelocInfoWriter and RelocIterator
//
-// Relocation information is written backwards in memory, from high addresses
-// towards low addresses, byte by byte. Therefore, in the encodings listed
-// below, the first byte listed it at the highest address, and successive
-// bytes in the record are at progressively lower addresses.
-//
// Encoding
//
// The most common modes are given single-byte encodings. Also, it is
// easy to identify the type of reloc info and skip unwanted modes in
// an iteration.
//
-// The encoding relies on the fact that there are fewer than 14
-// different non-compactly encoded relocation modes.
+// The encoding relies on the fact that there are less than 14
+// different relocation modes.
//
-// The first byte of a relocation record has a tag in its low 2 bits:
-// Here are the record schemes, depending on the low tag and optional higher
-// tags.
+// embedded_object: [6 bits pc delta] 00
//
-// Low tag:
-// 00: embedded_object: [6-bit pc delta] 00
+// code_taget: [6 bits pc delta] 01
//
-// 01: code_target: [6-bit pc delta] 01
+// position: [6 bits pc delta] 10,
+// [7 bits signed data delta] 0
//
-// 10: short_data_record: [6-bit pc delta] 10 followed by
-// [6-bit data delta] [2-bit data type tag]
+// statement_position: [6 bits pc delta] 10,
+// [7 bits signed data delta] 1
//
-// 11: long_record [2-bit high tag][4 bit middle_tag] 11
-// followed by variable data depending on type.
+// any nondata mode: 00 [4 bits rmode] 11, // rmode: 0..13 only
+// 00 [6 bits pc delta]
//
-// 2-bit data type tags, used in short_data_record and data_jump long_record:
-// code_target_with_id: 00
-// position: 01
-// statement_position: 10
-// comment: 11 (not used in short_data_record)
+// pc-jump: 00 1111 11,
+// 00 [6 bits pc delta]
//
-// Long record format:
-// 4-bit middle_tag:
-// 0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2
-// (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM,
-// and is between 0000 and 1100)
-// The format is:
-// 00 [4 bit middle_tag] 11 followed by
-// 00 [6 bit pc delta]
+// pc-jump: 01 1111 11,
+// (variable length) 7 - 26 bit pc delta, written in chunks of 7
+// bits, the lowest 7 bits written first.
//
-// 1101: not used (would allow one more relocation mode to be added)
-// 1110: long_data_record
-// The format is: [2-bit data_type_tag] 1110 11
-// signed intptr_t, lowest byte written first
-// (except data_type code_target_with_id, which
-// is followed by a signed int, not intptr_t.)
+// data-jump + pos: 00 1110 11,
+// signed intptr_t, lowest byte written first
+//
+// data-jump + st.pos: 01 1110 11,
+// signed intptr_t, lowest byte written first
+//
+// data-jump + comm.: 10 1110 11,
+// signed intptr_t, lowest byte written first
//
-// 1111: long_pc_jump
-// The format is:
-// pc-jump: 00 1111 11,
-// 00 [6 bits pc delta]
-// or
-// pc-jump (variable length):
-// 01 1111 11,
-// [7 bits data] 0
-// ...
-// [7 bits data] 1
-// (Bits 6..31 of pc delta, with leading zeroes
-// dropped, and last non-zero chunk tagged with 1.)
-
-
const int kMaxRelocModes = 14;
const int kTagBits = 2;
const int kTagMask = (1 << kTagBits) - 1;
const int kExtraTagBits = 4;
-const int kLocatableTypeTagBits = 2;
-const int kSmallDataBits = kBitsPerByte - kLocatableTypeTagBits;
+const int kPositionTypeTagBits = 1;
+const int kSmallDataBits = kBitsPerByte - kPositionTypeTagBits;
const int kEmbeddedObjectTag = 0;
const int kCodeTargetTag = 1;
-const int kLocatableTag = 2;
+const int kPositionTag = 2;
const int kDefaultTag = 3;
-const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1;
+const int kPCJumpTag = (1 << kExtraTagBits) - 1;
const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
const int kLastChunkTag = 1;
-const int kDataJumpExtraTag = kPCJumpExtraTag - 1;
+const int kDataJumpTag = kPCJumpTag - 1;
-const int kCodeWithIdTag = 0;
-const int kNonstatementPositionTag = 1;
-const int kStatementPositionTag = 2;
-const int kCommentTag = 3;
+const int kNonstatementPositionTag = 0;
+const int kStatementPositionTag = 1;
+const int kCommentTag = 2;
uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
// Otherwise write a variable length PC jump for the bits that do
// not fit in the kSmallPCDeltaBits bits.
if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
- WriteExtraTag(kPCJumpExtraTag, kVariableLengthPCJumpTopTag);
+ WriteExtraTag(kPCJumpTag, kVariableLengthPCJumpTopTag);
uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
ASSERT(pc_jump > 0);
// Write kChunkBits size chunks of the pc_jump.
void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) {
- *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag);
+ *--pos_ = static_cast<byte>(data_delta << kPositionTypeTagBits | tag);
}
}
-void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
- WriteExtraTag(kDataJumpExtraTag, top_tag);
- for (int i = 0; i < kIntSize; i++) {
- *--pos_ = static_cast<byte>(data_delta);
- // Signed right shift is arithmetic shift. Tested in test-utils.cc.
- data_delta = data_delta >> kBitsPerByte;
- }
-}
-
void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
- WriteExtraTag(kDataJumpExtraTag, top_tag);
+ WriteExtraTag(kDataJumpTag, top_tag);
for (int i = 0; i < kIntptrSize; i++) {
*--pos_ = static_cast<byte>(data_delta);
- // Signed right shift is arithmetic shift. Tested in test-utils.cc.
+ // Signed right shift is arithmetic shift. Tested in test-utils.cc.
data_delta = data_delta >> kBitsPerByte;
}
}
byte* begin_pos = pos_;
#endif
ASSERT(rinfo->pc() - last_pc_ >= 0);
- ASSERT(RelocInfo::NUMBER_OF_MODES - RelocInfo::LAST_COMPACT_ENUM <=
- kMaxRelocModes);
+ ASSERT(RelocInfo::NUMBER_OF_MODES <= kMaxRelocModes);
// Use unsigned delta-encoding for pc.
uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
RelocInfo::Mode rmode = rinfo->rmode();
} else if (rmode == RelocInfo::CODE_TARGET) {
WriteTaggedPC(pc_delta, kCodeTargetTag);
ASSERT(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
- } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
- // Use signed delta-encoding for id.
- ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
- int id_delta = static_cast<int>(rinfo->data()) - last_id_;
- // Check if delta is small enough to fit in a tagged byte.
- if (is_intn(id_delta, kSmallDataBits)) {
- WriteTaggedPC(pc_delta, kLocatableTag);
- WriteTaggedData(id_delta, kCodeWithIdTag);
- } else {
- // Otherwise, use costly encoding.
- WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
- WriteExtraTaggedIntData(id_delta, kCodeWithIdTag);
- }
- last_id_ = static_cast<int>(rinfo->data());
} else if (RelocInfo::IsPosition(rmode)) {
- // Use signed delta-encoding for position.
- ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
- int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
- int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
- : kStatementPositionTag;
- // Check if delta is small enough to fit in a tagged byte.
- if (is_intn(pos_delta, kSmallDataBits)) {
- WriteTaggedPC(pc_delta, kLocatableTag);
- WriteTaggedData(pos_delta, pos_type_tag);
+ // Use signed delta-encoding for data.
+ intptr_t data_delta = rinfo->data() - last_data_;
+ int pos_type_tag = rmode == RelocInfo::POSITION ? kNonstatementPositionTag
+ : kStatementPositionTag;
+ // Check if data is small enough to fit in a tagged byte.
+ // We cannot use is_intn because data_delta is not an int32_t.
+ if (data_delta >= -(1 << (kSmallDataBits-1)) &&
+ data_delta < 1 << (kSmallDataBits-1)) {
+ WriteTaggedPC(pc_delta, kPositionTag);
+ WriteTaggedData(data_delta, pos_type_tag);
+ last_data_ = rinfo->data();
} else {
// Otherwise, use costly encoding.
- WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
- WriteExtraTaggedIntData(pos_delta, pos_type_tag);
+ WriteExtraTaggedPC(pc_delta, kPCJumpTag);
+ WriteExtraTaggedData(data_delta, pos_type_tag);
+ last_data_ = rinfo->data();
}
- last_position_ = static_cast<int>(rinfo->data());
} else if (RelocInfo::IsComment(rmode)) {
// Comments are normally not generated, so we use the costly encoding.
- WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
- WriteExtraTaggedData(rinfo->data(), kCommentTag);
+ WriteExtraTaggedPC(pc_delta, kPCJumpTag);
+ WriteExtraTaggedData(rinfo->data() - last_data_, kCommentTag);
+ last_data_ = rinfo->data();
ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
} else {
- ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM);
- int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
// For all other modes we simply use the mode as the extra tag.
// None of these modes need a data component.
- ASSERT(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag);
- WriteExtraTaggedPC(pc_delta, saved_mode);
+ ASSERT(rmode < kPCJumpTag && rmode < kDataJumpTag);
+ WriteExtraTaggedPC(pc_delta, rmode);
}
last_pc_ = rinfo->pc();
#ifdef DEBUG
}
-void RelocIterator::AdvanceReadId() {
- int x = 0;
- for (int i = 0; i < kIntSize; i++) {
- x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
- }
- last_id_ += x;
- rinfo_.data_ = last_id_;
-}
-
-
-void RelocIterator::AdvanceReadPosition() {
- int x = 0;
- for (int i = 0; i < kIntSize; i++) {
- x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
- }
- last_position_ += x;
- rinfo_.data_ = last_position_;
-}
-
-
void RelocIterator::AdvanceReadData() {
intptr_t x = 0;
for (int i = 0; i < kIntptrSize; i++) {
x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
}
- rinfo_.data_ = x;
+ rinfo_.data_ += x;
}
}
-inline int RelocIterator::GetLocatableTypeTag() {
- return *pos_ & ((1 << kLocatableTypeTagBits) - 1);
-}
-
-
-inline void RelocIterator::ReadTaggedId() {
- int8_t signed_b = *pos_;
- // Signed right shift is arithmetic shift. Tested in test-utils.cc.
- last_id_ += signed_b >> kLocatableTypeTagBits;
- rinfo_.data_ = last_id_;
+inline int RelocIterator::GetPositionTypeTag() {
+ return *pos_ & ((1 << kPositionTypeTagBits) - 1);
}
-inline void RelocIterator::ReadTaggedPosition() {
+inline void RelocIterator::ReadTaggedData() {
int8_t signed_b = *pos_;
// Signed right shift is arithmetic shift. Tested in test-utils.cc.
- last_position_ += signed_b >> kLocatableTypeTagBits;
- rinfo_.data_ = last_position_;
+ rinfo_.data_ += signed_b >> kPositionTypeTagBits;
}
-static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
- ASSERT(tag == kNonstatementPositionTag ||
- tag == kStatementPositionTag);
- return (tag == kNonstatementPositionTag) ?
- RelocInfo::POSITION :
- RelocInfo::STATEMENT_POSITION;
+inline RelocInfo::Mode RelocIterator::DebugInfoModeFromTag(int tag) {
+ if (tag == kStatementPositionTag) {
+ return RelocInfo::STATEMENT_POSITION;
+ } else if (tag == kNonstatementPositionTag) {
+ return RelocInfo::POSITION;
+ } else {
+ ASSERT(tag == kCommentTag);
+ return RelocInfo::COMMENT;
+ }
}
} else if (tag == kCodeTargetTag) {
ReadTaggedPC();
if (SetMode(RelocInfo::CODE_TARGET)) return;
- } else if (tag == kLocatableTag) {
+ } else if (tag == kPositionTag) {
ReadTaggedPC();
Advance();
- int locatable_tag = GetLocatableTypeTag();
- if (locatable_tag == kCodeWithIdTag) {
- if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
- ReadTaggedId();
- return;
- }
- } else {
- // Compact encoding is never used for comments,
- // so it must be a position.
- ASSERT(locatable_tag == kNonstatementPositionTag ||
- locatable_tag == kStatementPositionTag);
- if (mode_mask_ & RelocInfo::kPositionMask) {
- ReadTaggedPosition();
- if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
- }
+ // Check if we want source positions.
+ if (mode_mask_ & RelocInfo::kPositionMask) {
+ ReadTaggedData();
+ if (SetMode(DebugInfoModeFromTag(GetPositionTypeTag()))) return;
}
} else {
ASSERT(tag == kDefaultTag);
int extra_tag = GetExtraTag();
- if (extra_tag == kPCJumpExtraTag) {
+ if (extra_tag == kPCJumpTag) {
int top_tag = GetTopTag();
if (top_tag == kVariableLengthPCJumpTopTag) {
AdvanceReadVariableLengthPCJump();
} else {
AdvanceReadPC();
}
- } else if (extra_tag == kDataJumpExtraTag) {
- int locatable_tag = GetTopTag();
- if (locatable_tag == kCodeWithIdTag) {
- if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
- AdvanceReadId();
- return;
- }
- Advance(kIntSize);
- } else if (locatable_tag != kCommentTag) {
- ASSERT(locatable_tag == kNonstatementPositionTag ||
- locatable_tag == kStatementPositionTag);
- if (mode_mask_ & RelocInfo::kPositionMask) {
- AdvanceReadPosition();
- if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
- } else {
- Advance(kIntSize);
- }
+ } else if (extra_tag == kDataJumpTag) {
+ // Check if we want debug modes (the only ones with data).
+ if (mode_mask_ & RelocInfo::kDebugMask) {
+ int top_tag = GetTopTag();
+ AdvanceReadData();
+ if (SetMode(DebugInfoModeFromTag(top_tag))) return;
} else {
- ASSERT(locatable_tag == kCommentTag);
- if (SetMode(RelocInfo::COMMENT)) {
- AdvanceReadData();
- return;
- }
+ // Otherwise, just skip over the data.
Advance(kIntptrSize);
}
} else {
AdvanceReadPC();
- int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
- if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return;
+ if (SetMode(static_cast<RelocInfo::Mode>(extra_tag))) return;
}
}
}
end_ = code->relocation_start();
done_ = false;
mode_mask_ = mode_mask;
- last_id_ = 0;
- last_position_ = 0;
if (mode_mask_ == 0) pos_ = end_;
next();
}
end_ = pos_ - desc.reloc_size;
done_ = false;
mode_mask_ = mode_mask;
- last_id_ = 0;
- last_position_ = 0;
if (mode_mask_ == 0) pos_ = end_;
next();
}
return "debug break";
case RelocInfo::CODE_TARGET:
return "code target";
- case RelocInfo::CODE_TARGET_WITH_ID:
- return "code target with id";
case RelocInfo::GLOBAL_PROPERTY_CELL:
return "global property cell";
case RelocInfo::RUNTIME_ENTRY:
Code* code = Code::GetCodeFromTargetAddress(target_address());
PrintF(out, " (%s) (%p)", Code::Kind2String(code->kind()),
target_address());
- if (rmode_ == CODE_TARGET_WITH_ID) {
- PrintF(" (id=%d)", static_cast<int>(data_));
- }
} else if (IsPosition(rmode_)) {
PrintF(out, " (%" V8_PTR_PREFIX "d)", data());
} else if (rmode_ == RelocInfo::RUNTIME_ENTRY &&
#endif
case CONSTRUCT_CALL:
case CODE_TARGET_CONTEXT:
- case CODE_TARGET_WITH_ID:
case CODE_TARGET: {
// convert inline target address to code object
Address addr = target_address();
namespace v8 {
namespace internal {
-const unsigned kNoASTId = -1;
+
// -----------------------------------------------------------------------------
// Platform independent assembler base class.
enum Mode {
// Please note the order is important (see IsCodeTarget, IsGCRelocMode).
- CODE_TARGET, // Code target which is not any of the above.
- CODE_TARGET_WITH_ID,
CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor.
CODE_TARGET_CONTEXT, // Code target used for contextual loads and stores.
DEBUG_BREAK, // Code target for the debugger statement.
+ CODE_TARGET, // Code target which is not any of the above.
EMBEDDED_OBJECT,
GLOBAL_PROPERTY_CELL,
// add more as needed
// Pseudo-types
- NUMBER_OF_MODES, // There are at most 14 modes with noncompact encoding.
+ NUMBER_OF_MODES, // must be no greater than 14 - see RelocInfoWriter
NONE, // never recorded
- LAST_CODE_ENUM = DEBUG_BREAK,
- LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL,
- // Modes <= LAST_COMPACT_ENUM are guaranteed to have compact encoding.
- LAST_COMPACT_ENUM = CODE_TARGET_WITH_ID
+ LAST_CODE_ENUM = CODE_TARGET,
+ LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL
};
static const int kCodeTargetMask = (1 << (LAST_CODE_ENUM + 1)) - 1;
static const int kPositionMask = 1 << POSITION | 1 << STATEMENT_POSITION;
- static const int kDataMask =
- (1 << CODE_TARGET_WITH_ID) | kPositionMask | (1 << COMMENT);
+ static const int kDebugMask = kPositionMask | 1 << COMMENT;
static const int kApplyMask; // Modes affected by apply. Depends on arch.
private:
// lower addresses.
class RelocInfoWriter BASE_EMBEDDED {
public:
- RelocInfoWriter() : pos_(NULL),
- last_pc_(NULL),
- last_id_(0),
- last_position_(0) {}
- RelocInfoWriter(byte* pos, byte* pc) : pos_(pos),
- last_pc_(pc),
- last_id_(0),
- last_position_(0) {}
+ RelocInfoWriter() : pos_(NULL), last_pc_(NULL), last_data_(0) {}
+ RelocInfoWriter(byte* pos, byte* pc) : pos_(pos), last_pc_(pc),
+ last_data_(0) {}
byte* pos() const { return pos_; }
byte* last_pc() const { return last_pc_; }
inline uint32_t WriteVariableLengthPCJump(uint32_t pc_delta);
inline void WriteTaggedPC(uint32_t pc_delta, int tag);
inline void WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag);
- inline void WriteExtraTaggedIntData(int data_delta, int top_tag);
inline void WriteExtraTaggedData(intptr_t data_delta, int top_tag);
inline void WriteTaggedData(intptr_t data_delta, int tag);
inline void WriteExtraTag(int extra_tag, int top_tag);
byte* pos_;
byte* last_pc_;
- int last_id_;
- int last_position_;
+ intptr_t last_data_;
DISALLOW_COPY_AND_ASSIGN(RelocInfoWriter);
};
int GetTopTag();
void ReadTaggedPC();
void AdvanceReadPC();
- void AdvanceReadId();
- void AdvanceReadPosition();
void AdvanceReadData();
void AdvanceReadVariableLengthPCJump();
- int GetLocatableTypeTag();
- void ReadTaggedId();
- void ReadTaggedPosition();
+ int GetPositionTypeTag();
+ void ReadTaggedData();
+
+ static RelocInfo::Mode DebugInfoModeFromTag(int tag);
// If the given mode is wanted, set it in rinfo_ and return true.
// Else return false. Used for efficiently skipping unwanted modes.
RelocInfo rinfo_;
bool done_;
int mode_mask_;
- int last_id_;
- int last_position_;
DISALLOW_COPY_AND_ASSIGN(RelocIterator);
};
} else {
out.AddFormatted(" %s", Code::Kind2String(kind));
}
- if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
- out.AddFormatted(" (id = %d)", static_cast<int>(relocinfo.data()));
- }
} else if (rmode == RelocInfo::RUNTIME_ENTRY &&
Isolate::Current()->deoptimizer_data() != NULL) {
// A runtime entry reloinfo might be a deoptimization bailout.
if (ShouldInlineSmiCase(op)) {
EmitInlineSmiBinaryOp(expr, op, mode, left, right);
} else {
- EmitBinaryOp(expr, op, mode);
+ EmitBinaryOp(op, mode);
}
break;
}
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// Apply the compound assignment operator. Expects the left operand on top
// of the stack and the right one in the accumulator.
- void EmitBinaryOp(BinaryOperation* expr,
- Token::Value op,
+ void EmitBinaryOp(Token::Value op,
OverwriteMode mode);
// Helper functions for generating inlined smi code for certain
// binary operations.
- void EmitInlineSmiBinaryOp(BinaryOperation* expr,
+ void EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Expression* left,
static Register context_register();
// Helper for calling an IC stub.
- void EmitCallIC(Handle<Code> ic,
- RelocInfo::Mode mode,
- unsigned ast_id);
+ void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode);
// Calling an IC stub with a patch site. Passing NULL for patch_site
// or non NULL patch_site which is not activated indicates no inlined smi code
// and emits a nop after the IC call.
- void EmitCallIC(Handle<Code> ic,
- JumpPatchSite* patch_site,
- unsigned ast_id);
+ void EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site);
// Set fields in the stack frame. Offsets are the frame pointer relative
// offsets defined in, e.g., StandardFrameConstants.
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2006-2008 the V8 project authors. All rights reserved.
// A light-weight IA32 Assembler.
}
-void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, unsigned id) {
- if (rmode == RelocInfo::CODE_TARGET && id != kNoASTId) {
- RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, static_cast<intptr_t>(id));
- } else if (rmode != RelocInfo::NONE) {
- RecordRelocInfo(rmode);
- }
+void Assembler::emit(uint32_t x, RelocInfo::Mode rmode) {
+ if (rmode != RelocInfo::NONE) RecordRelocInfo(rmode);
emit(x);
}
}
-void Assembler::call(Handle<Code> code,
- RelocInfo::Mode rmode,
- unsigned ast_id) {
+void Assembler::call(Handle<Code> code, RelocInfo::Mode rmode) {
positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
last_pc_ = pc_;
ASSERT(RelocInfo::IsCodeTarget(rmode));
EMIT(0xE8);
- emit(reinterpret_cast<intptr_t>(code.location()), rmode, ast_id);
+ emit(reinterpret_cast<intptr_t>(code.location()), rmode);
}
void call(Label* L);
void call(byte* entry, RelocInfo::Mode rmode);
void call(const Operand& adr);
- void call(Handle<Code> code,
- RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ void call(Handle<Code> code, RelocInfo::Mode rmode);
// Jumps
void jmp(Label* L); // unconditional jump to L
void GrowBuffer();
inline void emit(uint32_t x);
inline void emit(Handle<Object> handle);
- inline void emit(uint32_t x,
- RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ inline void emit(uint32_t x, RelocInfo::Mode rmode);
inline void emit(const Immediate& x);
inline void emit_w(const Immediate& x);
#define __ ACCESS_MASM(masm_)
-static unsigned GetPropertyId(Property* property) {
- if (property->is_synthetic()) return AstNode::kNoNumber;
- return property->id();
-}
-
class JumpPatchSite BASE_EMBEDDED {
public:
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
}
}
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- EmitCallIC(ic, &patch_site, clause->label()->id());
+ EmitCallIC(ic, &patch_site);
__ test(eax, Operand(eax));
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
- EmitCallIC(ic, mode, AstNode::kNoNumber);
+ EmitCallIC(ic, mode);
}
__ mov(eax, Immediate(key_literal->handle()));
Handle<Code> ic =
isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
__ jmp(done);
}
}
__ mov(eax, GlobalObjectOperand());
__ mov(ecx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(eax);
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
// Do a keyed property load.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Drop key and object left on the stack by IC.
context()->Plug(eax);
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
SetSourcePosition(expr->position() + 1);
AccumulatorValueContext context(this);
if (ShouldInlineSmiCase(op)) {
- EmitInlineSmiBinaryOp(expr->binary_operation(),
+ EmitInlineSmiBinaryOp(expr,
op,
mode,
expr->target(),
expr->value());
} else {
- EmitBinaryOp(expr->binary_operation(), op, mode);
+ EmitBinaryOp(op, mode);
}
// Deoptimization point in case the binary operation may have side effects.
Literal* key = prop->key()->AsLiteral();
__ mov(ecx, Immediate(key->handle()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
-void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
+void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Expression* left,
__ bind(&stub_call);
__ mov(eax, ecx);
TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site, expr->id());
+ EmitCallIC(stub.GetCode(), &patch_site);
__ jmp(&done);
// Smi case.
}
-void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
- Token::Value op,
+void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
__ pop(edx);
TypeRecordingBinaryOpStub stub(op, mode);
- // NULL signals no inlined smi code.
- EmitCallIC(stub.GetCode(), NULL, expr->id());
+ EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
context()->Plug(eax);
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
case KEYED_PROPERTY: {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
arg_count, in_loop);
- EmitCallIC(ic, mode, expr->id());
+ EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
arg_count, in_loop);
__ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
- EmitCallIC(ic, mode, expr->id());
+ EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Push result (function).
__ push(eax);
// Push Global receiver.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
arg_count, in_loop);
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
} else {
__ mov(edx, eax);
__ mov(eax, Immediate(Smi::FromInt(1)));
TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
- EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
// Store the value returned in eax.
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
// Result is on the stack
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailout(expr, TOS_REG);
context()->Plug(eax);
} else if (proxy != NULL &&
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- EmitCallIC(ic, &patch_site, expr->id());
+ EmitCallIC(ic, &patch_site);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
- RelocInfo::Mode mode,
- unsigned ast_id) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
switch (ic->kind()) {
default:
break;
}
- __ call(ic, mode, ast_id);
+ __ call(ic, mode);
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
- JumpPatchSite* patch_site,
- unsigned ast_id) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
default:
break;
}
- __ call(ic, RelocInfo::CODE_TARGET, ast_id);
+ __ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
}
-void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
+void MacroAssembler::CallStub(CodeStub* stub) {
ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
- call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
+ call(stub->GetCode(), RelocInfo::CODE_TARGET);
}
// Runtime calls
// Call a code stub. Generate the code if necessary.
- void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
+ void CallStub(CodeStub* stub);
// Call a code stub and return the code object called. Try to generate
// the code if necessary. Do not perform a GC but instead return a retry
}
-Handle<Object> TypeFeedbackOracle::GetInfo(unsigned ast_id) {
- int entry = dictionary_->FindEntry(ast_id);
+Handle<Object> TypeFeedbackOracle::GetInfo(int pos) {
+ int entry = dictionary_->FindEntry(pos);
return entry != NumberDictionary::kNotFound
? Handle<Object>(dictionary_->ValueAt(entry))
: Isolate::Current()->factory()->undefined_value();
bool TypeFeedbackOracle::LoadIsMonomorphic(Property* expr) {
- Handle<Object> map_or_code(GetInfo(expr->id()));
+ Handle<Object> map_or_code(GetInfo(expr->position()));
if (map_or_code->IsMap()) return true;
if (map_or_code->IsCode()) {
Handle<Code> code(Code::cast(*map_or_code));
bool TypeFeedbackOracle::StoreIsMonomorphic(Expression* expr) {
- Handle<Object> map_or_code(GetInfo(expr->id()));
+ Handle<Object> map_or_code(GetInfo(expr->position()));
if (map_or_code->IsMap()) return true;
if (map_or_code->IsCode()) {
Handle<Code> code(Code::cast(*map_or_code));
bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
- Handle<Object> value = GetInfo(expr->id());
+ Handle<Object> value = GetInfo(expr->position());
return value->IsMap() || value->IsSmi();
}
Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
ASSERT(LoadIsMonomorphic(expr));
Handle<Object> map_or_code(
- Handle<HeapObject>::cast(GetInfo(expr->id())));
+ Handle<HeapObject>::cast(GetInfo(expr->position())));
if (map_or_code->IsCode()) {
Handle<Code> code(Code::cast(*map_or_code));
return Handle<Map>(code->FindFirstMap());
Handle<Map> TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) {
ASSERT(StoreIsMonomorphic(expr));
Handle<HeapObject> map_or_code(
- Handle<HeapObject>::cast(GetInfo(expr->id())));
+ Handle<HeapObject>::cast(GetInfo(expr->position())));
if (map_or_code->IsCode()) {
Handle<Code> code(Code::cast(*map_or_code));
return Handle<Map>(code->FindFirstMap());
ZoneMapList* TypeFeedbackOracle::LoadReceiverTypes(Property* expr,
Handle<String> name) {
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
- return CollectReceiverTypes(expr->id(), name, flags);
+ return CollectReceiverTypes(expr->position(), name, flags);
}
ZoneMapList* TypeFeedbackOracle::StoreReceiverTypes(Assignment* expr,
Handle<String> name) {
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL);
- return CollectReceiverTypes(expr->id(), name, flags);
+ return CollectReceiverTypes(expr->position(), name, flags);
}
OWN_MAP,
NOT_IN_LOOP,
arity);
- return CollectReceiverTypes(expr->id(), name, flags);
+ return CollectReceiverTypes(expr->position(), name, flags);
}
CheckType TypeFeedbackOracle::GetCallCheckType(Call* expr) {
- Handle<Object> value = GetInfo(expr->id());
+ Handle<Object> value = GetInfo(expr->position());
if (!value->IsSmi()) return RECEIVER_MAP_CHECK;
CheckType check = static_cast<CheckType>(Smi::cast(*value)->value());
ASSERT(check != RECEIVER_MAP_CHECK);
ExternalArrayType TypeFeedbackOracle::GetKeyedLoadExternalArrayType(
Property* expr) {
- Handle<Object> stub = GetInfo(expr->id());
+ Handle<Object> stub = GetInfo(expr->position());
ASSERT(stub->IsCode());
return Code::cast(*stub)->external_array_type();
}
ExternalArrayType TypeFeedbackOracle::GetKeyedStoreExternalArrayType(
Expression* expr) {
- Handle<Object> stub = GetInfo(expr->id());
+ Handle<Object> stub = GetInfo(expr->position());
ASSERT(stub->IsCode());
return Code::cast(*stub)->external_array_type();
}
bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) {
- return *GetInfo(expr->id()) ==
+ return *GetInfo(expr->position()) ==
Isolate::Current()->builtins()->builtin(id);
}
TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr) {
- Handle<Object> object = GetInfo(expr->id());
+ Handle<Object> object = GetInfo(expr->position());
TypeInfo unknown = TypeInfo::Unknown();
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr) {
- Handle<Object> object = GetInfo(expr->id());
+ Handle<Object> object = GetInfo(expr->position());
TypeInfo unknown = TypeInfo::Unknown();
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
- Handle<Object> object = GetInfo(clause->label()->id());
+ Handle<Object> object = GetInfo(clause->position());
TypeInfo unknown = TypeInfo::Unknown();
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
}
-ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
+ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(int position,
Handle<String> name,
Code::Flags flags) {
Isolate* isolate = Isolate::Current();
- Handle<Object> object = GetInfo(ast_id);
+ Handle<Object> object = GetInfo(position);
if (object->IsUndefined() || object->IsSmi()) return NULL;
if (*object == isolate->builtins()->builtin(Builtins::kStoreIC_GlobalProxy)) {
}
-void TypeFeedbackOracle::SetInfo(unsigned ast_id, Object* target) {
- ASSERT(dictionary_->FindEntry(ast_id) == NumberDictionary::kNotFound);
- MaybeObject* maybe_result = dictionary_->AtNumberPut(ast_id, target);
+void TypeFeedbackOracle::SetInfo(int position, Object* target) {
+ MaybeObject* maybe_result = dictionary_->AtNumberPut(position, target);
USE(maybe_result);
#ifdef DEBUG
Object* result;
const int kInitialCapacity = 16;
List<int> code_positions(kInitialCapacity);
- List<unsigned> ast_ids(kInitialCapacity);
- CollectIds(*code, &code_positions, &ast_ids);
+ List<int> source_positions(kInitialCapacity);
+ CollectPositions(*code, &code_positions, &source_positions);
ASSERT(dictionary_.is_null()); // Only initialize once.
dictionary_ = isolate->factory()->NewNumberDictionary(
code_positions.length());
- const int length = code_positions.length();
- ASSERT(ast_ids.length() == length);
+ int length = code_positions.length();
+ ASSERT(source_positions.length() == length);
for (int i = 0; i < length; i++) {
AssertNoAllocation no_allocation;
RelocInfo info(code->instruction_start() + code_positions[i],
RelocInfo::CODE_TARGET, 0);
Code* target = Code::GetCodeFromTargetAddress(info.target_address());
- unsigned id = ast_ids[i];
+ int position = source_positions[i];
InlineCacheState state = target->ic_state();
Code::Kind kind = target->kind();
if (kind == Code::TYPE_RECORDING_BINARY_OP_IC ||
kind == Code::COMPARE_IC) {
- SetInfo(id, target);
+ // TODO(kasperl): Avoid having multiple ICs with the same
+ // position by making sure that we have position information
+ // recorded for all binary ICs.
+ int entry = dictionary_->FindEntry(position);
+ if (entry == NumberDictionary::kNotFound) {
+ SetInfo(position, target);
+ }
} else if (state == MONOMORPHIC) {
if (kind == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC ||
kind == Code::KEYED_EXTERNAL_ARRAY_STORE_IC) {
- SetInfo(id, target);
- } else if (kind != Code::CALL_IC ||
- target->check_type() == RECEIVER_MAP_CHECK) {
+ SetInfo(position, target);
+ } else if (target->kind() != Code::CALL_IC ||
+ target->check_type() == RECEIVER_MAP_CHECK) {
Map* map = target->FindFirstMap();
if (map == NULL) {
- SetInfo(id, target);
+ SetInfo(position, target);
} else {
- SetInfo(id, map);
+ SetInfo(position, map);
}
} else {
ASSERT(target->kind() == Code::CALL_IC);
CheckType check = target->check_type();
ASSERT(check != RECEIVER_MAP_CHECK);
- SetInfo(id, Smi::FromInt(check));
+ SetInfo(position, Smi::FromInt(check));
}
} else if (state == MEGAMORPHIC) {
- SetInfo(id, target);
+ SetInfo(position, target);
}
}
// Allocate handle in the parent scope.
}
-void TypeFeedbackOracle::CollectIds(Code* code,
- List<int>* code_positions,
- List<unsigned>* ast_ids) {
+void TypeFeedbackOracle::CollectPositions(Code* code,
+ List<int>* code_positions,
+ List<int>* source_positions) {
AssertNoAllocation no_allocation;
- int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
+ int position = 0;
+ // Because the ICs we use for global variables access in the full
+ // code generator do not have any meaningful positions, we avoid
+ // collecting those by filtering out contextual code targets.
+ int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
+ RelocInfo::kPositionMask;
for (RelocIterator it(code, mask); !it.done(); it.next()) {
RelocInfo* info = it.rinfo();
- ASSERT(RelocInfo::IsCodeTarget(info->rmode()));
- Code* target = Code::GetCodeFromTargetAddress(info->target_address());
- if (target->is_inline_cache_stub()) {
- InlineCacheState state = target->ic_state();
- Code::Kind kind = target->kind();
- if (kind == Code::TYPE_RECORDING_BINARY_OP_IC) {
- if (target->type_recording_binary_op_type() ==
- TRBinaryOpIC::GENERIC) {
- continue;
+ RelocInfo::Mode mode = info->rmode();
+ if (RelocInfo::IsCodeTarget(mode)) {
+ Code* target = Code::GetCodeFromTargetAddress(info->target_address());
+ if (target->is_inline_cache_stub()) {
+ InlineCacheState state = target->ic_state();
+ Code::Kind kind = target->kind();
+ if (kind == Code::TYPE_RECORDING_BINARY_OP_IC) {
+ if (target->type_recording_binary_op_type() ==
+ TRBinaryOpIC::GENERIC) {
+ continue;
+ }
+ } else if (kind == Code::COMPARE_IC) {
+ if (target->compare_state() == CompareIC::GENERIC) continue;
+ } else {
+ if (state != MONOMORPHIC && state != MEGAMORPHIC) continue;
}
- } else if (kind == Code::COMPARE_IC) {
- if (target->compare_state() == CompareIC::GENERIC) continue;
- } else {
- if (state != MONOMORPHIC && state != MEGAMORPHIC) continue;
+ code_positions->Add(
+ static_cast<int>(info->pc() - code->instruction_start()));
+ source_positions->Add(position);
}
- code_positions->Add(
- static_cast<int>(info->pc() - code->instruction_start()));
- ast_ids->Add(static_cast<unsigned>(info->data()));
+ } else {
+ ASSERT(RelocInfo::IsPosition(mode));
+ position = static_cast<int>(info->data());
}
}
}
namespace internal {
// Unknown
-// | \____________
-// | |
-// Primitive Non-primitive
-// | \_______ |
-// | | |
-// Number String |
-// / \ | |
-// Double Integer32 | /
-// | | / /
-// | Smi / /
-// | | / __/
-// Uninitialized.
+// | |
+// | \--------------|
+// Primitive Non-primitive
+// | \--------| |
+// Number String |
+// / | | |
+// Double Integer32 | /
+// | | / /
+// | Smi / /
+// | | / /
+// | | / /
+// Uninitialized.--/
class TypeInfo {
public:
TypeInfo SwitchType(CaseClause* clause);
private:
- ZoneMapList* CollectReceiverTypes(unsigned ast_id,
+ ZoneMapList* CollectReceiverTypes(int position,
Handle<String> name,
Code::Flags flags);
- void SetInfo(unsigned ast_id, Object* target);
+ void SetInfo(int position, Object* target);
void PopulateMap(Handle<Code> code);
- void CollectIds(Code* code,
- List<int>* code_positions,
- List<unsigned>* ast_ids);
+ void CollectPositions(Code* code,
+ List<int>* code_positions,
+ List<int>* source_positions);
// Returns an element from the backing store. Returns undefined if
// there is no information.
- Handle<Object> GetInfo(unsigned ast_id);
+ Handle<Object> GetInfo(int pos);
Handle<Context> global_context_;
Handle<NumberDictionary> dictionary_;
}
-void Assembler::emit_code_target(Handle<Code> target,
- RelocInfo::Mode rmode,
- unsigned ast_id) {
+void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) {
ASSERT(RelocInfo::IsCodeTarget(rmode));
- if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
- RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id);
- } else {
- RecordRelocInfo(rmode);
- }
+ RecordRelocInfo(rmode);
int current = code_targets_.length();
if (current > 0 && code_targets_.last().is_identical_to(target)) {
// Optimization if we keep jumping to the same code target.
}
-void Assembler::call(Handle<Code> target,
- RelocInfo::Mode rmode,
- unsigned ast_id) {
+void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) {
positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
// 1110 1000 #32-bit disp.
emit(0xE8);
- emit_code_target(target, rmode, ast_id);
+ emit_code_target(target, rmode);
}
// Calls
// Call near relative 32-bit displacement, relative to next instruction.
void call(Label* L);
- void call(Handle<Code> target,
- RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ void call(Handle<Code> target, RelocInfo::Mode rmode);
// Calls directly to the given address using a relative offset.
// Should only ever be used in Code objects for calls within the
inline void emitl(uint32_t x);
inline void emitq(uint64_t x, RelocInfo::Mode rmode);
inline void emitw(uint16_t x);
- inline void emit_code_target(Handle<Code> target,
- RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ inline void emit_code_target(Handle<Code> target, RelocInfo::Mode rmode);
void emit(Immediate x) { emitl(x.value_); }
// Emits a REX prefix that encodes a 64-bit operand size and
#define __ ACCESS_MASM(masm_)
-static unsigned GetPropertyId(Property* property) {
- if (property->is_synthetic()) return AstNode::kNoNumber;
- return property->id();
-}
-
-
class JumpPatchSite BASE_EMBEDDED {
public:
explicit JumpPatchSite(MacroAssembler* masm)
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
}
}
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- EmitCallIC(ic, &patch_site, clause->label()->id());
+ EmitCallIC(ic, &patch_site);
__ testq(rax, rax);
__ j(not_equal, &next_test);
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
- EmitCallIC(ic, mode, AstNode::kNoNumber);
+ EmitCallIC(ic, mode);
}
__ Move(rax, key_literal->handle());
Handle<Code> ic =
isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
__ jmp(done);
}
}
__ Move(rcx, var->name());
__ movq(rax, GlobalObjectOperand());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(rax);
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
// Do a keyed property load.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
context()->Plug(rax);
}
}
__ movq(rdx, Operand(rsp, 0));
if (property->emit_store()) {
Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(key->id(), NO_REGISTERS);
}
break;
SetSourcePosition(expr->position() + 1);
AccumulatorValueContext context(this);
if (ShouldInlineSmiCase(op)) {
- EmitInlineSmiBinaryOp(expr->binary_operation(),
+ EmitInlineSmiBinaryOp(expr,
op,
mode,
expr->target(),
expr->value());
} else {
- EmitBinaryOp(expr->binary_operation(), op, mode);
+ EmitBinaryOp(op, mode);
}
// Deoptimization point in case the binary operation may have side effects.
PrepareForBailout(expr->binary_operation(), TOS_REG);
Literal* key = prop->key()->AsLiteral();
__ Move(rcx, key->handle());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
-void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
+void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Expression* left,
__ bind(&stub_call);
__ movq(rax, rcx);
TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site, expr->id());
+ EmitCallIC(stub.GetCode(), &patch_site);
__ jmp(&done);
__ bind(&smi_case);
}
-void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
- Token::Value op,
+void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
__ pop(rdx);
TypeRecordingBinaryOpStub stub(op, mode);
- // NULL signals no inlined smi code.
- EmitCallIC(stub.GetCode(), NULL, expr->id());
+ EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
context()->Plug(rax);
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
case KEYED_PROPERTY: {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic =
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
- EmitCallIC(ic, mode, expr->id());
+ EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Handle<Code> ic =
ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
- EmitCallIC(ic, mode, expr->id());
+ EmitCallIC(ic, mode);
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Push result (function).
__ push(rax);
// Push Global receiver.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic =
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
} else {
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(1));
}
- EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
// Store the value returned in rax.
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
- EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailout(expr, TOS_REG);
context()->Plug(rax);
} else if (proxy != NULL &&
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- EmitCallIC(ic, &patch_site, expr->id());
+ EmitCallIC(ic, &patch_site);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ testq(rax, rax);
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
- RelocInfo::Mode mode,
- unsigned ast_id) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
Counters* counters = isolate()->counters();
default:
break;
}
- __ call(ic, mode, ast_id);
+ __ call(ic, mode);
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
- JumpPatchSite* patch_site,
- unsigned ast_id) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
default:
break;
}
- __ call(ic, RelocInfo::CODE_TARGET, ast_id);
+ __ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
}
-void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
+void MacroAssembler::CallStub(CodeStub* stub) {
ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
- Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET);
}
}
-void MacroAssembler::Call(Handle<Code> code_object,
- RelocInfo::Mode rmode,
- unsigned ast_id) {
+void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
#ifdef DEBUG
int end_position = pc_offset() + CallSize(code_object);
#endif
ASSERT(RelocInfo::IsCodeTarget(rmode));
- call(code_object, rmode, ast_id);
+ call(code_object, rmode);
#ifdef DEBUG
CHECK_EQ(end_position, pc_offset());
#endif
void Call(Address destination, RelocInfo::Mode rmode);
void Call(ExternalReference ext);
- void Call(Handle<Code> code_object,
- RelocInfo::Mode rmode,
- unsigned ast_id = kNoASTId);
+ void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
// The size of the code generated for different call instructions.
int CallSize(Address destination, RelocInfo::Mode rmode) {
// Runtime calls
// Call a code stub.
- void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
+ void CallStub(CodeStub* stub);
// Call a code stub and return the code object called. Try to generate
// the code if necessary. Do not perform a GC but instead return a retry
CHECK(Debug::HasDebugInfo(shared));
TestBreakLocationIterator it1(Debug::GetDebugInfo(shared));
it1.FindBreakLocationFromPosition(position);
- v8::internal::RelocInfo::Mode actual_mode = it1.it()->rinfo()->rmode();
- if (actual_mode == v8::internal::RelocInfo::CODE_TARGET_WITH_ID) {
- actual_mode = v8::internal::RelocInfo::CODE_TARGET;
- }
- CHECK_EQ(mode, actual_mode);
+ CHECK_EQ(mode, it1.it()->rinfo()->rmode());
if (mode != v8::internal::RelocInfo::JS_RETURN) {
CHECK_EQ(debug_break,
Code::GetCodeFromTargetAddress(it1.it()->rinfo()->target_address()));
CHECK(debug->EnsureDebugInfo(shared));
TestBreakLocationIterator it2(Debug::GetDebugInfo(shared));
it2.FindBreakLocationFromPosition(position);
- actual_mode = it2.it()->rinfo()->rmode();
- if (actual_mode == v8::internal::RelocInfo::CODE_TARGET_WITH_ID) {
- actual_mode = v8::internal::RelocInfo::CODE_TARGET;
- }
- CHECK_EQ(mode, actual_mode);
+ CHECK_EQ(mode, it2.it()->rinfo()->rmode());
if (mode == v8::internal::RelocInfo::JS_RETURN) {
CHECK(!Debug::IsDebugBreakAtReturn(it2.it()->rinfo()));
}