if (FLAG_debug_code) {
// Initial map for the builtin InternalArray functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
- __ And(t0, a2, Operand(kSmiTagMask));
+ __ SmiTst(a2, t0);
__ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
if (FLAG_debug_code) {
// Initial map for the builtin Array functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
- __ And(t0, a2, Operand(kSmiTagMask));
+ __ SmiTst(a2, t0);
__ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
// Check that the RegExp has been compiled (data contains a fixed array).
__ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset));
if (FLAG_debug_code) {
- __ And(t0, regexp_data, Operand(kSmiTagMask));
+ __ SmiTst(regexp_data, t0);
__ Check(nz,
kUnexpectedTypeForRegExpDataFixedArrayExpected,
t0,
// Initial map for the builtin Array function should be a map.
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
- __ And(at, a3, Operand(kSmiTagMask));
+ __ SmiTst(a3, at);
__ Assert(ne, kUnexpectedInitialMapForArrayFunction,
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
// Initial map for the builtin Array function should be a map.
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
- __ And(at, a3, Operand(kSmiTagMask));
+ __ SmiTst(a3, at);
__ Assert(ne, kUnexpectedInitialMapForArrayFunction,
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
&if_true, &if_false, &fall_through);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- __ And(t0, v0, Operand(kSmiTagMask));
+ __ SmiTst(v0, t0);
Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
&if_true, &if_false, &fall_through);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- __ And(at, v0, Operand(kSmiTagMask | 0x80000000));
+ __ NonNegativeSmiTst(v0, at);
Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
__ Pop(index, value);
if (FLAG_debug_code) {
- __ And(at, value, Operand(kSmiTagMask));
+ __ SmiTst(value, at);
__ ThrowIf(ne, kNonSmiValue, at, Operand(zero_reg));
- __ And(at, index, Operand(kSmiTagMask));
+ __ SmiTst(index, at);
__ ThrowIf(ne, kNonSmiIndex, at, Operand(zero_reg));
__ SmiUntag(index, index);
static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
__ Pop(index, value);
if (FLAG_debug_code) {
- __ And(at, value, Operand(kSmiTagMask));
+ __ SmiTst(value, at);
__ ThrowIf(ne, kNonSmiValue, at, Operand(zero_reg));
- __ And(at, index, Operand(kSmiTagMask));
+ __ SmiTst(index, at);
__ ThrowIf(ne, kNonSmiIndex, at, Operand(zero_reg));
__ SmiUntag(index, index);
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
ASSERT(!scratch.is(scratch0()));
ASSERT(!scratch.is(object));
- __ And(at, object, Operand(kSmiTagMask));
+ __ SmiTst(object, at);
DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
__ GetObjectType(object, scratch, scratch);
DeoptimizeIf(ne, instr->environment(), scratch, Operand(JS_DATE_TYPE));
__ JumpIfSmi(reg, instr->TrueLabel(chunk_));
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
- __ And(at, reg, Operand(kSmiTagMask));
+ __ SmiTst(reg, at);
DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
}
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
- __ And(scratch, result, Operand(kSmiTagMask));
+ __ SmiTst(result, scratch);
DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
} else {
__ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
__ Branch(&global_object, eq, receiver, Operand(scratch));
// Deoptimize if the receiver is not a JS object.
- __ And(scratch, receiver, Operand(kSmiTagMask));
+ __ SmiTst(receiver, scratch);
DeoptimizeIf(eq, instr->environment(), scratch, Operand(zero_reg));
__ GetObjectType(receiver, scratch, scratch);
if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
Register value = ToRegister(instr->value());
if (!instr->hydrogen()->value()->type().IsHeapObject()) {
- __ And(scratch, value, Operand(kSmiTagMask));
+ __ SmiTst(value, scratch);
DeoptimizeIf(eq, instr->environment(), scratch, Operand(zero_reg));
}
} else if (FLAG_track_double_fields && representation.IsDouble()) {
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
LOperand* input = instr->value();
- __ And(at, ToRegister(input), Operand(kSmiTagMask));
+ __ SmiTst(ToRegister(input), at);
DeoptimizeIf(ne, instr->environment(), at, Operand(zero_reg));
}
void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
if (!instr->hydrogen()->value()->IsHeapObject()) {
LOperand* input = instr->value();
- __ And(at, ToRegister(input), Operand(kSmiTagMask));
+ __ SmiTst(ToRegister(input), at);
DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
}
}
instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(v0, scratch0());
}
- __ And(at, scratch0(), Operand(kSmiTagMask));
+ __ SmiTst(scratch0(), at);
DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
}
void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
- And(t0, object, Operand(kSmiTagMask));
+ SmiTst(object, t0);
Check(ne, kOperandIsASmiAndNotAString, t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
- And(t0, object, Operand(kSmiTagMask));
+ SmiTst(object, t0);
Check(ne, kOperandIsASmiAndNotAName, t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
Register scratch,
uint32_t encoding_mask) {
Label is_object;
- And(at, string, Operand(kSmiTagMask));
+ SmiTst(string, at);
ThrowIf(eq, kNonObject, at, Operand(zero_reg));
lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
// string length without using a temp register, it is restored at the end of
// this function.
Label index_tag_ok, index_tag_bad;
- // On ARM TrySmiTag is used here.
- AdduAndCheckForOverflow(index, index, index, scratch);
- BranchOnOverflow(&index_tag_bad, scratch);
+ TrySmiTag(index, scratch, &index_tag_bad);
Branch(&index_tag_ok);
bind(&index_tag_bad);
Throw(kIndexIsTooLarge);
Addu(dst, src, src);
}
+ // Try to convert int32 to smi. If the value is to large, preserve
+ // the original value and jump to not_a_smi. Destroys scratch and
+ // sets flags.
+ void TrySmiTag(Register reg, Register scratch, Label* not_a_smi) {
+ TrySmiTag(reg, reg, scratch, not_a_smi);
+ }
+ void TrySmiTag(Register dst,
+ Register src,
+ Register scratch,
+ Label* not_a_smi) {
+ SmiTagCheckOverflow(at, src, scratch);
+ BranchOnOverflow(not_a_smi, scratch);
+ mov(dst, at);
+ }
+
void SmiUntag(Register reg) {
sra(reg, reg, kSmiTagSize);
}
sra(dst, src, kSmiTagSize);
}
+ // Test if the register contains a smi.
+ inline void SmiTst(Register value, Register scratch) {
+ And(scratch, value, Operand(kSmiTagMask));
+ }
+ inline void NonNegativeSmiTst(Register value, Register scratch) {
+ And(scratch, value, Operand(kSmiTagMask | kSmiSignMask));
+ }
+
// Untag the source value into destination and jump if source is a smi.
// Souce and destination can be the same register.
void UntagAndJumpIfSmi(Register dst, Register src, Label* smi_case);
// If the argument is a smi, just return.
STATIC_ASSERT(kSmiTag == 0);
- __ And(t0, v0, Operand(kSmiTagMask));
+ __ SmiTst(v0, t0);
__ DropAndRet(argc + 1, eq, t0, Operand(zero_reg));
__ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);