__ CompareObjectType(r7, r0, r0, CODE_TYPE);
__ b(ne, &runtime);
- // r3: encoding of subject string (1 if ascii, 0 if two_byte);
+ // r3: encoding of subject string (1 if ASCII, 0 if two_byte);
// r7: code
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
__ mov(r1, Operand(r1, ASR, kSmiTagSize));
// r1: previous index
- // r3: encoding of subject string (1 if ascii, 0 if two_byte);
+ // r3: encoding of subject string (1 if ASCII, 0 if two_byte);
// r7: code
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
__ b(ne, &slow_case_);
__ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
- // At this point code register contains smi tagged ascii char code.
+ // At this point code register contains smi tagged ASCII char code.
STATIC_ASSERT(kSmiTag == 0);
__ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
__ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
Register symbol_table = c2;
__ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
- // Load undefined value
Register undefined = scratch4;
__ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
// mask: capacity mask
// first_symbol_table_element: address of the first element of
// the symbol table
+ // undefined: the undefined object
// scratch: -
// Perform a number of probes in the symbol table.
kPointerSizeLog2));
// If entry is undefined no string with this hash can be found.
- __ cmp(candidate, undefined);
+ Label is_string;
+ __ CompareObjectType(candidate, scratch, scratch, ODDBALL_TYPE);
+ __ b(ne, &is_string);
+
+ __ cmp(undefined, candidate);
__ b(eq, not_found);
+ // Must be null (deleted entry).
+ if (FLAG_debug_code) {
+ __ LoadRoot(ip, Heap::kNullValueRootIndex);
+ __ cmp(ip, candidate);
+ __ Assert(eq, "oddball in symbol table is not undefined or null");
+ }
+ __ jmp(&next_probe[i]);
+
+ __ bind(&is_string);
+
+ // Check that the candidate is a non-external ASCII string. The instance
+ // type is still in the scratch register from the CompareObjectType
+ // operation.
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
// If length is not 2 the string is not a candidate.
__ ldr(scratch, FieldMemOperand(candidate, String::kLengthOffset));
__ cmp(scratch, Operand(Smi::FromInt(2)));
__ b(ne, &next_probe[i]);
- // Check that the candidate is a non-external ascii string.
- __ ldr(scratch, FieldMemOperand(candidate, HeapObject::kMapOffset));
- __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
- __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch,
- &next_probe[i]);
-
// Check if the two characters match.
// Assumes that word load is little endian.
__ ldrh(scratch, FieldMemOperand(candidate, SeqAsciiString::kHeaderSize));
// r3: from index (untaged smi)
// r5: string.
// r7 (a.k.a. from): from offset (smi)
- // Check for flat ascii string.
+ // Check for flat ASCII string.
Label non_ascii_flat;
__ tst(r1, Operand(kStringEncodingMask));
STATIC_ASSERT(kTwoByteStringTag == 0);
__ bind(¬_same);
- // Check that both objects are sequential ascii strings.
+ // Check that both objects are sequential ASCII strings.
__ JumpIfNotBothSequentialAsciiStrings(r1, r0, r2, r3, &runtime);
- // Compare flat ascii strings natively. Remove arguments from stack first.
+ // Compare flat ASCII strings natively. Remove arguments from stack first.
__ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5);
// Adding two lengths can't overflow.
STATIC_ASSERT(String::kMaxLength < String::kMaxLength * 2);
__ add(r6, r2, Operand(r3));
- // Use the runtime system when adding two one character strings, as it
- // contains optimizations for this specific case using the symbol table.
+ // Use the symbol table when adding two one character strings, as it
+ // helps later optimizations to return a symbol here.
__ cmp(r6, Operand(2));
__ b(ne, &longer_than_two);
- // Check that both strings are non-external ascii strings.
+ // Check that both strings are non-external ASCII strings.
if (flags_ != NO_STRING_ADD_FLAGS) {
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
__ b(hs, &string_add_runtime);
// If result is not supposed to be flat, allocate a cons string object.
- // If both strings are ascii the result is an ascii cons string.
+ // If both strings are ASCII the result is an ASCII cons string.
if (flags_ != NO_STRING_ADD_FLAGS) {
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
- // to contain only ascii characters.
+ // to contain only ASCII characters.
// r4: first instance type.
// r5: second instance type.
__ tst(r4, Operand(kAsciiDataHintMask));
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
- __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
- deferred->Branch(lt);
+ __ CompareObjectType(object, tmp1, tmp2, JS_ARRAY_TYPE);
+ deferred->Branch(ne);
__ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
__ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
deferred->Branch(ne);
scratch1, scratch2);
-
// Load the value, key and receiver from the stack.
bool value_is_harmless = frame_->KnownSmiAt(0);
if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
__ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
deferred->Branch(ne);
- // Check that the key is within bounds. Both the key and the length of
- // the JSArray are smis. Use unsigned comparison to handle negative keys.
- __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
- __ cmp(scratch1, key);
- deferred->Branch(ls); // Unsigned less equal.
-
// Get the elements array from the receiver.
__ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
if (!value_is_harmless && wb_info != LIKELY_SMI) {
}
// Check that the elements array is not a dictionary.
__ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
+
// The following instructions are the part of the inlined store keyed
// property code which can be patched. Therefore the exact number of
// instructions generated need to be fixed, so the constant pool is blocked
__ cmp(scratch2, scratch3);
deferred->Branch(ne);
+ // Check that the key is within bounds. Both the key and the length of
+ // the JSArray are smis (because the fixed array check above ensures the
+ // elements are in fast case). Use unsigned comparison to handle negative
+ // keys.
+ __ ldr(scratch3, FieldMemOperand(receiver, JSArray::kLengthOffset));
+ __ cmp(scratch3, key);
+ deferred->Branch(ls); // Unsigned less equal.
+
// Store the value.
__ add(scratch1, scratch1,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
static int GetInlinedKeyedLoadInstructionsAfterPatch() {
return FLAG_debug_code ? 32 : 13;
}
- static const int kInlinedKeyedStoreInstructionsAfterPatch = 5;
+ static const int kInlinedKeyedStoreInstructionsAfterPatch = 8;
static int GetInlinedNamedStoreInstructionsAfterPatch() {
ASSERT(inlined_write_barrier_size_ != -1);
return inlined_write_barrier_size_ + 4;
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
- __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_OBJECT_TYPE);
- __ b(lt, &slow_case);
+ __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
+ __ b(ne, &slow_case);
// Map is now in scratch1.
__ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
// Handle exceptionally long strings in the runtime system.
__ j(overflow, &string_add_runtime);
- // Use the runtime system when adding two one character strings, as it
- // contains optimizations for this specific case using the symbol table.
+ // Use the symbol table when adding two one character strings, as it
+ // helps later optimizations to return a symbol here.
__ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
__ j(not_equal, &longer_than_two);
// If entry is undefined no string with this hash can be found.
__ cmp(candidate, Factory::undefined_value());
__ j(equal, not_found);
+ __ cmp(candidate, Factory::null_value());
+ __ j(equal, &next_probe[i]);
// If length is not 2 the string is not a candidate.
__ cmp(FieldOperand(candidate, String::kLengthOffset),
__ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
deferred->Branch(not_equal);
- // Check that the key is within bounds. Both the key and the length of
- // the JSArray are smis. Use unsigned comparison to handle negative keys.
- __ cmp(key.reg(),
- FieldOperand(receiver.reg(), JSArray::kLengthOffset));
- deferred->Branch(above_equal);
-
// Get the elements array from the receiver and check that it is not a
// dictionary.
__ mov(tmp.reg(),
Immediate(Factory::fixed_array_map()));
deferred->Branch(not_equal);
+ // Check that the key is within bounds. Both the key and the length of
+ // the JSArray are smis (because the fixed array check above ensures the
+ // elements are in fast case). Use unsigned comparison to handle negative
+ // keys.
+ __ cmp(key.reg(),
+ FieldOperand(receiver.reg(), JSArray::kLengthOffset));
+ deferred->Branch(above_equal);
+
// Store the value.
__ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
__ IncrementCounter(&Counters::keyed_store_inline, 1);
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
- __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
- __ j(below, &slow_case);
+ __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
+ __ j(not_equal, &slow_case);
__ test_b(FieldOperand(temp, Map::kBitFieldOffset),
KeyedLoadIC::kSlowCaseBitFieldMask);
__ j(not_zero, &slow_case);
// Move sign extended immediate to memory location.
void movq(const Operand& dst, Immediate value);
- // New x64 instructions to load a 64-bit immediate into a register.
+ // Instructions to load a 64-bit immediate into a register.
// All 64-bit immediates must have a relocation mode.
void movq(Register dst, void* ptr, RelocInfo::Mode rmode);
void movq(Register dst, int64_t value, RelocInfo::Mode rmode);
void repmovsl();
void repmovsq();
- // New x64 instruction to load from an immediate 64-bit pointer into RAX.
+ // Instruction to load from an immediate 64-bit pointer into RAX.
void load_rax(void* ptr, RelocInfo::Mode rmode);
void load_rax(ExternalReference ext);
Label slow;
__ JumpIfNotSmi(rdx, &slow);
- // Check if the calling frame is an arguments adaptor frame.
+ // Check if the calling frame is an arguments adaptor frame. We look at the
+ // context offset, and if the frame is not a regular one, then we find a
+ // Smi instead of the context. We can't use SmiCompare here, because that
+ // only works for comparing two smis.
Label adaptor;
__ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor);
// Check index against formal parameters count limit passed in
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor_frame);
// Get the length from the frame.
// Look at the length of the result of adding the two strings.
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
__ SmiAdd(rbx, rbx, rcx);
- // Use the runtime system when adding two one character strings, as it
- // contains optimizations for this specific case using the symbol table.
+ // Use the symbol table when adding two one character strings, as it
+ // helps later optimizations to return a symbol here.
__ SmiCompare(rbx, Smi::FromInt(2));
__ j(not_equal, &longer_than_two);
FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
__ decl(mask);
- Register undefined = scratch4;
- __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
+ Register map = scratch4;
// Registers
// chars: two character string, char 1 in byte 0 and char 2 in byte 1.
// hash: hash of two character string (32-bit int)
// symbol_table: symbol table
// mask: capacity mask (32-bit int)
- // undefined: undefined value
+ // map: -
// scratch: -
// Perform a number of probes in the symbol table.
}
__ andl(scratch, mask);
- // Load the entry from the symble table.
+ // Load the entry from the symbol table.
Register candidate = scratch; // Scratch register contains candidate.
STATIC_ASSERT(SymbolTable::kEntrySize == 1);
__ movq(candidate,
SymbolTable::kElementsStartOffset));
// If entry is undefined no string with this hash can be found.
- __ cmpq(candidate, undefined);
+ NearLabel is_string;
+ __ CmpObjectType(candidate, ODDBALL_TYPE, map);
+ __ j(not_equal, &is_string);
+
+ __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
__ j(equal, not_found);
+ // Must be null (deleted entry).
+ __ jmp(&next_probe[i]);
+
+ __ bind(&is_string);
// If length is not 2 the string is not a candidate.
__ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
Register temp = kScratchRegister;
// Check that the candidate is a non-external ascii string.
- __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
- __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
+ __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(
temp, temp, &next_probe[i]);
__ AbortIfNotNumber(value.reg());
}
// Smi => false iff zero.
- __ SmiCompare(value.reg(), Smi::FromInt(0));
+ __ Cmp(value.reg(), Smi::FromInt(0));
if (value.is_smi()) {
value.Unuse();
dest->Split(not_zero);
dest->false_target()->Branch(equal);
// Smi => false iff zero.
- __ SmiCompare(value.reg(), Smi::FromInt(0));
+ __ Cmp(value.reg(), Smi::FromInt(0));
dest->false_target()->Branch(equal);
Condition is_smi = masm_->CheckSmi(value.reg());
dest->true_target()->Branch(is_smi);
true, overwrite_mode);
} else {
// Set the flags based on the operation, type and loop nesting level.
- // Bit operations always assume they likely operate on Smis. Still only
+ // Bit operations always assume they likely operate on smis. Still only
// generate the inline Smi check code if this operation is part of a loop.
// For all other operations only inline the Smi check code for likely smis
// if the operation is part of a loop.
if (cc == equal) {
Label comparison_done;
__ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
- Smi::FromInt(1));
+ Smi::FromInt(1));
__ j(not_equal, &comparison_done);
uint8_t char_value =
static_cast<uint8_t>(String::cast(*right_val)->Get(0));
// CompareStub and the inline code both support all values of cc.
}
// Implement comparison against a constant Smi, inlining the case
- // where both sides are Smis.
+ // where both sides are smis.
left_side->ToRegister();
Register left_reg = left_side->reg();
Smi* constant_smi = Smi::cast(*right_side->handle());
__ AbortIfNotSmi(left_reg);
}
// Test smi equality and comparison by signed int comparison.
- // Both sides are smis, so we can use an Immediate.
__ SmiCompare(left_reg, constant_smi);
left_side->Unuse();
right_side->Unuse();
JumpTarget is_smi;
if (cc == equal) {
// We can do the equality comparison before the smi check.
- __ SmiCompare(left_reg, constant_smi);
+ __ Cmp(left_reg, constant_smi);
dest->true_target()->Branch(equal);
Condition left_is_smi = masm_->CheckSmi(left_reg);
dest->false_target()->Branch(left_is_smi);
// adaptor frame below it.
Label invoke, adapted;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adapted);
// No arguments adaptor frame. Copy fixed number of arguments.
__ movq(rbx, rax);
// If the property has been removed while iterating, we just skip it.
- __ SmiCompare(rbx, Smi::FromInt(0));
+ __ Cmp(rbx, Smi::FromInt(0));
node->continue_target()->Branch(equal);
end_del_check.Bind();
// Skip the arguments adaptor frame if it exists.
Label check_frame_marker;
- __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
- __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
- Smi::FromInt(StackFrame::CONSTRUCT));
+ __ Cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
+ Smi::FromInt(StackFrame::CONSTRUCT));
fp.Unuse();
destination()->Split(equal);
}
// Check if the calling frame is an arguments adaptor frame.
__ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &exit);
// Arguments adaptor case: Read the arguments length from the
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
- __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
- deferred->Branch(below);
+ __ CmpObjectType(object.reg(), JS_ARRAY_TYPE, tmp1.reg());
+ deferred->Branch(not_equal);
__ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
deferred->Branch(not_zero);
Label done;
__ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
- // Possible optimization: do a check that both values are Smis
+ // Possible optimization: do a check that both values are smis
// (or them and test against Smi mask.)
__ movq(tmp2.reg(), tmp1.reg());
__ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
deferred->Branch(not_equal);
- // Check that the key is within bounds. Both the key and the length of
- // the JSArray are smis. Use unsigned comparison to handle negative keys.
- __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
- key.reg());
- deferred->Branch(below_equal);
-
// Get the elements array from the receiver and check that it is not a
// dictionary.
__ movq(tmp.reg(),
kScratchRegister);
deferred->Branch(not_equal);
+ // Check that the key is within bounds. Both the key and the length of
+ // the JSArray are smis (because the fixed array check above ensures the
+ // elements are in fast case). Use unsigned comparison to handle negative
+ // keys.
+ __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
+ key.reg());
+ deferred->Branch(below_equal);
+
// Store the value.
SmiIndex index =
masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
__ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
__ j(equal, if_false);
STATIC_ASSERT(kSmiTag == 0);
- __ SmiCompare(result_register(), Smi::FromInt(0));
+ __ Cmp(result_register(), Smi::FromInt(0));
__ j(equal, if_false);
Condition is_smi = masm_->CheckSmi(result_register());
__ j(is_smi, if_true);
__ push(rcx); // Enumerable.
__ push(rbx); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
- __ SmiCompare(rax, Smi::FromInt(0));
+ __ Cmp(rax, Smi::FromInt(0));
__ j(equal, loop_statement.continue_target());
__ movq(rbx, rax);
// Skip the arguments adaptor frame if it exists.
Label check_frame_marker;
- __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
- __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
- Smi::FromInt(StackFrame::CONSTRUCT));
+ __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
+ Smi::FromInt(StackFrame::CONSTRUCT));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
// Check if the calling frame is an arguments adaptor frame.
__ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &exit);
// Arguments adaptor case: Read the arguments length from the
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
- __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
- __ j(below, &slow_case);
+ __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
+ __ j(not_equal, &slow_case);
__ testb(FieldOperand(temp, Map::kBitFieldOffset),
Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
__ j(not_zero, &slow_case);
__ j(equal, true_label);
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ j(equal, false_label);
- __ SmiCompare(reg, Smi::FromInt(0));
+ __ Cmp(reg, Smi::FromInt(0));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
// Check for arguments adapter frame.
NearLabel done, adapted;
__ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ SmiCompare(Operand(result, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adapted);
// No arguments adaptor frame.
// Skip the arguments adaptor frame if it exists.
NearLabel check_frame_marker;
- __ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
- __ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset),
- Smi::FromInt(StackFrame::CONSTRUCT));
+ __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
+ Smi::FromInt(StackFrame::CONSTRUCT));
}
ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
// First, check if a write barrier is even needed. The tests below
- // catch stores of Smis and stores into young gen.
+ // catch stores of smis and stores into the young generation.
Label done;
JumpIfSmi(value, &done);
ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
// First, check if a write barrier is even needed. The tests below
- // catch stores of Smis and stores into young gen.
+ // catch stores of smis and stores into the young generation.
Label done;
JumpIfSmi(value, &done);
}
-void MacroAssembler::SmiCompare(Register dst, Register src) {
- cmpq(dst, src);
+void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
+ if (FLAG_debug_code) {
+ AbortIfNotSmi(smi1);
+ AbortIfNotSmi(smi2);
+ }
+ cmpq(smi1, smi2);
}
void MacroAssembler::SmiCompare(Register dst, Smi* src) {
+ if (FLAG_debug_code) {
+ AbortIfNotSmi(dst);
+ }
+ Cmp(dst, src);
+}
+
+
+void MacroAssembler::Cmp(Register dst, Smi* src) {
ASSERT(!dst.is(kScratchRegister));
if (src->value() == 0) {
testq(dst, dst);
void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
+ if (FLAG_debug_code) {
+ AbortIfNotSmi(dst);
+ AbortIfNotSmi(src);
+ }
cmpq(dst, src);
}
void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
+ if (FLAG_debug_code) {
+ AbortIfNotSmi(dst);
+ AbortIfNotSmi(src);
+ }
cmpq(dst, src);
}
void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
+ if (FLAG_debug_code) {
+ AbortIfNotSmi(dst);
+ }
cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
}
+void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
+ // The Operand cannot use the smi register.
+ Register smi_reg = GetSmiConstant(src);
+ ASSERT(!dst.AddressUsesRegister(smi_reg));
+ cmpq(dst, smi_reg);
+}
+
+
void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
}
void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
if (source->IsSmi()) {
- SmiCompare(dst, Smi::cast(*source));
+ Cmp(dst, Smi::cast(*source));
} else {
Move(kScratchRegister, source);
cmpq(dst, kScratchRegister);
void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
if (source->IsSmi()) {
- SmiCompare(dst, Smi::cast(*source));
+ Cmp(dst, Smi::cast(*source));
} else {
ASSERT(source->IsHeapObject());
movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
void MacroAssembler::AbortIfNotSmi(Register object) {
- NearLabel ok;
+ Condition is_smi = CheckSmi(object);
+ Assert(is_smi, "Operand is not a smi");
+}
+
+
+void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Condition is_smi = CheckSmi(object);
Assert(is_smi, "Operand is not a smi");
}
int power);
- // Simple comparison of smis.
- void SmiCompare(Register dst, Register src);
+ // Simple comparison of smis. Both sides must be known smis to use these,
+ // otherwise use Cmp.
+ void SmiCompare(Register smi1, Register smi2);
void SmiCompare(Register dst, Smi* src);
void SmiCompare(Register dst, const Operand& src);
void SmiCompare(const Operand& dst, Register src);
void Move(const Operand& dst, Handle<Object> source);
void Cmp(Register dst, Handle<Object> source);
void Cmp(const Operand& dst, Handle<Object> source);
+ void Cmp(Register dst, Smi* src);
+ void Cmp(const Operand& dst, Smi* src);
void Push(Handle<Object> source);
// Emit code to discard a non-negative number of pointer-sized elements
// Abort execution if argument is not a smi. Used in debug code.
void AbortIfNotSmi(Register object);
+ void AbortIfNotSmi(const Operand& object);
// Abort execution if argument is a string. Used in debug code.
void AbortIfNotString(Register object);
__ j(less_equal, exit);
}
} else {
- __ SmiCompare(rcx, rcx);
+ __ cmpq(rcx, rcx);
__ movl(rax, Immediate(id + 11));
__ j(not_equal, exit);
__ incq(rax);
// Test that we can compare smis for equality (and more).
TEST(SmiCompare) {
+ v8::V8::Initialize();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer =
- static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+ static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
&actual_size,
true));
CHECK(buffer);
__ movl(rcx, Immediate(0));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(0)));
- __ SmiCompare(rcx, rdx);
+ __ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(2)); // Test number.
__ movl(rcx, Immediate(1024));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(1024)));
- __ SmiCompare(rcx, rdx);
+ __ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(3)); // Test number.
__ movl(rcx, Immediate(-1));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(-1)));
- __ SmiCompare(rcx, rdx);
+ __ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(4)); // Test number.
__ movl(rcx, Immediate(Smi::kMaxValue));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMaxValue)));
- __ SmiCompare(rcx, rdx);
+ __ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(5)); // Test number.
__ movl(rcx, Immediate(Smi::kMinValue));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMinValue)));
- __ SmiCompare(rcx, rdx);
+ __ cmpq(rcx, rdx);
__ j(not_equal, &exit);
// Different target register.
__ movl(rcx, Immediate(0));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(0)));
- __ SmiCompare(r8, rdx);
+ __ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(7)); // Test number.
__ movl(rcx, Immediate(1024));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(1024)));
- __ SmiCompare(r8, rdx);
+ __ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(8)); // Test number.
__ movl(rcx, Immediate(-1));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(-1)));
- __ SmiCompare(r8, rdx);
+ __ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(9)); // Test number.
__ movl(rcx, Immediate(Smi::kMaxValue));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMaxValue)));
- __ SmiCompare(r8, rdx);
+ __ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(10)); // Test number.
__ movl(rcx, Immediate(Smi::kMinValue));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMinValue)));
- __ SmiCompare(r8, rdx);
+ __ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rcx, x, RelocInfo::NONE);
__ movq(r11, rcx);
__ Integer64PlusConstantToSmi(rdx, rcx, y);
- __ SmiCompare(rdx, r8);
+ __ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ Integer64PlusConstantToSmi(rcx, rcx, y);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
}
__ SmiNeg(r9, rcx, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiNeg(rcx, rcx, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
} else {
Label smi_ok, smi_ok2;
__ jmp(exit);
__ bind(&smi_ok);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ jmp(exit);
__ bind(&smi_ok2);
__ incq(rax);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
}
}
__ movl(rax, Immediate(id)); // Test number.
__ SmiAdd(r9, rcx, rdx, exit);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAdd(rcx, rcx, rdx, exit); \
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ movl(rcx, Immediate(first));
__ incq(rax);
__ SmiAddConstant(r9, rcx, Smi::FromInt(second));
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ SmiAddConstant(rcx, rcx, Smi::FromInt(second));
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ movl(rcx, Immediate(first));
__ incq(rax);
__ SmiAddConstant(r9, rcx, Smi::FromInt(second), exit);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAddConstant(rcx, rcx, Smi::FromInt(second), exit);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
}
__ movl(rax, Immediate(id)); // Test 0.
__ SmiSub(r9, rcx, rdx, exit);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax); // Test 1.
__ SmiSub(rcx, rcx, rdx, exit);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ Move(rcx, Smi::FromInt(first));
__ incq(rax); // Test 2.
__ SmiSubConstant(r9, rcx, Smi::FromInt(second));
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax); // Test 3.
__ SmiSubConstant(rcx, rcx, Smi::FromInt(second));
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ Move(rcx, Smi::FromInt(first));
__ incq(rax); // Test 4.
__ SmiSubConstant(r9, rcx, Smi::FromInt(second), exit);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax); // Test 5.
__ SmiSubConstant(rcx, rcx, Smi::FromInt(second), exit);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
}
__ Move(r8, Smi::FromIntptr(result));
__ SmiMul(r9, rcx, rdx, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiMul(rcx, rcx, rdx, exit);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
} else {
__ movl(rax, Immediate(id + 8));
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiMul(rcx, rcx, rdx, &overflow_ok2);
__ bind(&overflow_ok2);
// 31-bit version doesn't preserve rcx on failure.
// __ incq(rax);
- // __ SmiCompare(r11, rcx);
+ // __ cmpq(r11, rcx);
// __ j(not_equal, exit);
}
}
__ SmiDiv(r9, rcx, r14, exit);
// Might have destroyed rcx and r14.
__ incq(r15);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(r15);
__ movq(rcx, r11);
__ Move(r14, Smi::FromInt(y));
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
__ SmiDiv(rcx, rcx, r14, exit);
__ incq(r15);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
} else {
// Division fails.
__ bind(&fail_ok);
__ incq(r15);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
__ bind(&fail_ok2);
__ incq(r15);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
}
__ SmiMod(r9, rcx, r14, exit);
__ incq(r15);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(r15);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
__ SmiMod(rcx, rcx, r14, exit);
__ incq(r15);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
} else {
// Modulo fails.
__ bind(&fail_ok);
__ incq(r15);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
__ bind(&fail_ok2);
__ incq(r15);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
}
}
ASSERT(index.reg.is(rcx) || index.reg.is(rdx));
__ shl(index.reg, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(x) << i);
- __ SmiCompare(index.reg, r8);
+ __ cmpq(index.reg, r8);
__ j(not_equal, exit);
__ incq(rax);
__ Move(rcx, Smi::FromInt(x));
ASSERT(index.reg.is(rcx));
__ shl(rcx, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(x) << i);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
ASSERT(index.reg.is(rcx) || index.reg.is(rdx));
__ shl(index.reg, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(-x) << i);
- __ SmiCompare(index.reg, r8);
+ __ cmpq(index.reg, r8);
__ j(not_equal, exit);
__ incq(rax);
__ Move(rcx, Smi::FromInt(x));
ASSERT(index.reg.is(rcx));
__ shl(rcx, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(-x) << i);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
}
__ SelectNonSmi(r9, rcx, rdx, exit);
__ incq(rax);
- __ SmiCompare(r9, rdx);
+ __ cmpq(r9, rdx);
__ j(not_equal, exit);
__ incq(rax);
__ SelectNonSmi(r9, rcx, rdx, exit);
__ incq(rax);
- __ SmiCompare(r9, rcx);
+ __ cmpq(r9, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ Move(rdx, Smi::FromInt(y));
__ Move(r8, Smi::FromInt(result));
__ SmiAnd(r9, rcx, rdx);
- __ SmiCompare(r8, r9);
+ __ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAnd(rcx, rcx, rdx);
- __ SmiCompare(r8, rcx);
+ __ cmpq(r8, rcx);
__ j(not_equal, exit);
__ movq(rcx, r11);
__ incq(rax);
__ SmiAndConstant(r9, rcx, Smi::FromInt(y));
- __ SmiCompare(r8, r9);
+ __ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAndConstant(rcx, rcx, Smi::FromInt(y));
- __ SmiCompare(r8, rcx);
+ __ cmpq(r8, rcx);
__ j(not_equal, exit);
}
__ Move(rdx, Smi::FromInt(y));
__ Move(r8, Smi::FromInt(result));
__ SmiOr(r9, rcx, rdx);
- __ SmiCompare(r8, r9);
+ __ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiOr(rcx, rcx, rdx);
- __ SmiCompare(r8, rcx);
+ __ cmpq(r8, rcx);
__ j(not_equal, exit);
__ movq(rcx, r11);
__ incq(rax);
__ SmiOrConstant(r9, rcx, Smi::FromInt(y));
- __ SmiCompare(r8, r9);
+ __ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiOrConstant(rcx, rcx, Smi::FromInt(y));
- __ SmiCompare(r8, rcx);
+ __ cmpq(r8, rcx);
__ j(not_equal, exit);
}
__ Move(rdx, Smi::FromInt(y));
__ Move(r8, Smi::FromInt(result));
__ SmiXor(r9, rcx, rdx);
- __ SmiCompare(r8, r9);
+ __ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiXor(rcx, rcx, rdx);
- __ SmiCompare(r8, rcx);
+ __ cmpq(r8, rcx);
__ j(not_equal, exit);
__ movq(rcx, r11);
__ incq(rax);
__ SmiXorConstant(r9, rcx, Smi::FromInt(y));
- __ SmiCompare(r8, r9);
+ __ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiXorConstant(rcx, rcx, Smi::FromInt(y));
- __ SmiCompare(r8, rcx);
+ __ cmpq(r8, rcx);
__ j(not_equal, exit);
}
__ movq(r11, rcx);
__ SmiNot(r9, rcx);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx);
+ __ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiNot(rcx, rcx);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
}
__ SmiShiftLeftConstant(r9, rcx, shift);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiShiftLeftConstant(rcx, rcx, shift);
__ incq(rax);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiShiftLeft(r9, rdx, rcx);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiShiftLeft(r9, rdx, r11);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiShiftLeft(rdx, rdx, r11);
__ incq(rax);
- __ SmiCompare(rdx, r8);
+ __ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiShiftLogicalRightConstant(r9, rcx, shift, exit);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiShiftLogicalRight(r9, rdx, rcx, exit);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiShiftLogicalRight(r9, rdx, r11, exit);
__ incq(rax);
- __ SmiCompare(r9, r8);
+ __ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ bind(&fail_ok);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(rax);
__ bind(&fail_ok3);
__ incq(rax);
- __ SmiCompare(rcx, r11);
+ __ cmpq(rcx, r11);
__ j(not_equal, exit);
__ addq(rax, Immediate(3));
__ Move(rcx, Smi::FromInt(x));
__ SmiShiftArithmeticRightConstant(rcx, rcx, shift);
- __ SmiCompare(rcx, r8);
+ __ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
__ Move(r11, Smi::FromInt(shift));
__ SmiShiftArithmeticRight(rdx, rdx, r11);
- __ SmiCompare(rdx, r8);
+ __ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
__ Move(rcx, Smi::FromInt(x));
__ movq(r11, rcx);
__ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rcx, power);
- __ SmiCompare(rdx, r8);
+ __ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
- __ SmiCompare(r11, rcx); // rcx unchanged.
+ __ cmpq(r11, rcx); // rcx unchanged.
__ j(not_equal, exit);
__ incq(rax);
__ PositiveSmiTimesPowerOfTwoToInteger64(rcx, rcx, power);
- __ SmiCompare(rdx, r8);
+ __ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
}
TEST(PositiveSmiTimesPowerOfTwoToInteger64) {
+ v8::V8::Initialize();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer =
- static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
+ static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
&actual_size,
true));
CHECK(buffer);