2 * Copyright (C) 2008 Apple Inc.
3 * Copyright (C) 2009, 2010 University of Szeged
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
15 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
16 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
19 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef MacroAssemblerARM_h
29 #define MacroAssemblerARM_h
31 #if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)
33 #include "ARMAssembler.h"
34 #include "AbstractMacroAssembler.h"
38 class MacroAssemblerARM : public AbstractMacroAssembler<ARMAssembler> {
39 static const int DoubleConditionMask = 0x0f;
40 static const int DoubleConditionBitSpecial = 0x10;
41 COMPILE_ASSERT(!(DoubleConditionBitSpecial & DoubleConditionMask), DoubleConditionBitSpecial_should_not_interfere_with_ARMAssembler_Condition_codes);
43 typedef ARMRegisters::FPRegisterID FPRegisterID;
45 enum RelationalCondition {
46 Equal = ARMAssembler::EQ,
47 NotEqual = ARMAssembler::NE,
48 Above = ARMAssembler::HI,
49 AboveOrEqual = ARMAssembler::CS,
50 Below = ARMAssembler::CC,
51 BelowOrEqual = ARMAssembler::LS,
52 GreaterThan = ARMAssembler::GT,
53 GreaterThanOrEqual = ARMAssembler::GE,
54 LessThan = ARMAssembler::LT,
55 LessThanOrEqual = ARMAssembler::LE
58 enum ResultCondition {
59 Overflow = ARMAssembler::VS,
60 Signed = ARMAssembler::MI,
61 Zero = ARMAssembler::EQ,
62 NonZero = ARMAssembler::NE
65 enum DoubleCondition {
66 // These conditions will only evaluate to true if the comparison is ordered - i.e. neither operand is NaN.
67 DoubleEqual = ARMAssembler::EQ,
68 DoubleNotEqual = ARMAssembler::NE | DoubleConditionBitSpecial,
69 DoubleGreaterThan = ARMAssembler::GT,
70 DoubleGreaterThanOrEqual = ARMAssembler::GE,
71 DoubleLessThan = ARMAssembler::CC,
72 DoubleLessThanOrEqual = ARMAssembler::LS,
73 // If either operand is NaN, these conditions always evaluate to true.
74 DoubleEqualOrUnordered = ARMAssembler::EQ | DoubleConditionBitSpecial,
75 DoubleNotEqualOrUnordered = ARMAssembler::NE,
76 DoubleGreaterThanOrUnordered = ARMAssembler::HI,
77 DoubleGreaterThanOrEqualOrUnordered = ARMAssembler::CS,
78 DoubleLessThanOrUnordered = ARMAssembler::LT,
79 DoubleLessThanOrEqualOrUnordered = ARMAssembler::LE,
82 static const RegisterID stackPointerRegister = ARMRegisters::sp;
83 static const RegisterID linkRegister = ARMRegisters::lr;
85 static const Scale ScalePtr = TimesFour;
87 void add32(RegisterID src, RegisterID dest)
89 m_assembler.adds_r(dest, dest, src);
92 void add32(RegisterID op1, RegisterID op2, RegisterID dest)
94 m_assembler.adds_r(dest, op1, op2);
97 void add32(TrustedImm32 imm, Address address)
99 load32(address, ARMRegisters::S1);
100 add32(imm, ARMRegisters::S1);
101 store32(ARMRegisters::S1, address);
104 void add32(TrustedImm32 imm, RegisterID dest)
106 m_assembler.adds_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
109 void add32(AbsoluteAddress src, RegisterID dest)
111 move(TrustedImmPtr(src.m_ptr), ARMRegisters::S1);
112 m_assembler.dtr_u(ARMAssembler::LoadUint32, ARMRegisters::S1, ARMRegisters::S1, 0);
113 add32(ARMRegisters::S1, dest);
116 void add32(Address src, RegisterID dest)
118 load32(src, ARMRegisters::S1);
119 add32(ARMRegisters::S1, dest);
122 void add32(RegisterID src, TrustedImm32 imm, RegisterID dest)
124 m_assembler.adds_r(dest, src, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
127 void and32(RegisterID src, RegisterID dest)
129 m_assembler.ands_r(dest, dest, src);
132 void and32(RegisterID op1, RegisterID op2, RegisterID dest)
134 m_assembler.ands_r(dest, op1, op2);
137 void and32(TrustedImm32 imm, RegisterID dest)
139 ARMWord w = m_assembler.getImm(imm.m_value, ARMRegisters::S0, true);
140 if (w & ARMAssembler::OP2_INV_IMM)
141 m_assembler.bics_r(dest, dest, w & ~ARMAssembler::OP2_INV_IMM);
143 m_assembler.ands_r(dest, dest, w);
146 void and32(TrustedImm32 imm, RegisterID src, RegisterID dest)
148 ARMWord w = m_assembler.getImm(imm.m_value, ARMRegisters::S0, true);
149 if (w & ARMAssembler::OP2_INV_IMM)
150 m_assembler.bics_r(dest, src, w & ~ARMAssembler::OP2_INV_IMM);
152 m_assembler.ands_r(dest, src, w);
155 void lshift32(RegisterID shiftAmount, RegisterID dest)
157 lshift32(dest, shiftAmount, dest);
160 void lshift32(RegisterID src, RegisterID shiftAmount, RegisterID dest)
162 ARMWord w = ARMAssembler::getOp2Byte(0x1f);
163 m_assembler.and_r(ARMRegisters::S0, shiftAmount, w);
165 m_assembler.movs_r(dest, m_assembler.lsl_r(src, ARMRegisters::S0));
168 void lshift32(TrustedImm32 imm, RegisterID dest)
170 m_assembler.movs_r(dest, m_assembler.lsl(dest, imm.m_value & 0x1f));
173 void lshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
175 m_assembler.movs_r(dest, m_assembler.lsl(src, imm.m_value & 0x1f));
178 void mul32(RegisterID op1, RegisterID op2, RegisterID dest)
182 move(op2, ARMRegisters::S0);
183 op2 = ARMRegisters::S0;
185 // Swap the operands.
186 RegisterID tmp = op1;
191 m_assembler.muls_r(dest, op1, op2);
194 void mul32(RegisterID src, RegisterID dest)
196 mul32(src, dest, dest);
199 void mul32(TrustedImm32 imm, RegisterID src, RegisterID dest)
201 move(imm, ARMRegisters::S0);
202 m_assembler.muls_r(dest, src, ARMRegisters::S0);
205 void neg32(RegisterID srcDest)
207 m_assembler.rsbs_r(srcDest, srcDest, ARMAssembler::getOp2Byte(0));
210 void or32(RegisterID src, RegisterID dest)
212 m_assembler.orrs_r(dest, dest, src);
215 void or32(TrustedImm32 imm, RegisterID dest)
217 m_assembler.orrs_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
220 void or32(TrustedImm32 imm, RegisterID src, RegisterID dest)
222 m_assembler.orrs_r(dest, src, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
225 void or32(RegisterID op1, RegisterID op2, RegisterID dest)
227 m_assembler.orrs_r(dest, op1, op2);
230 void rshift32(RegisterID shiftAmount, RegisterID dest)
232 rshift32(dest, shiftAmount, dest);
235 void rshift32(RegisterID src, RegisterID shiftAmount, RegisterID dest)
237 ARMWord w = ARMAssembler::getOp2Byte(0x1f);
238 m_assembler.and_r(ARMRegisters::S0, shiftAmount, w);
240 m_assembler.movs_r(dest, m_assembler.asr_r(src, ARMRegisters::S0));
243 void rshift32(TrustedImm32 imm, RegisterID dest)
245 rshift32(dest, imm, dest);
248 void rshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
250 m_assembler.movs_r(dest, m_assembler.asr(src, imm.m_value & 0x1f));
253 void urshift32(RegisterID shiftAmount, RegisterID dest)
255 urshift32(dest, shiftAmount, dest);
258 void urshift32(RegisterID src, RegisterID shiftAmount, RegisterID dest)
260 ARMWord w = ARMAssembler::getOp2Byte(0x1f);
261 m_assembler.and_r(ARMRegisters::S0, shiftAmount, w);
263 m_assembler.movs_r(dest, m_assembler.lsr_r(src, ARMRegisters::S0));
266 void urshift32(TrustedImm32 imm, RegisterID dest)
268 m_assembler.movs_r(dest, m_assembler.lsr(dest, imm.m_value & 0x1f));
271 void urshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
273 m_assembler.movs_r(dest, m_assembler.lsr(src, imm.m_value & 0x1f));
276 void sub32(RegisterID src, RegisterID dest)
278 m_assembler.subs_r(dest, dest, src);
281 void sub32(TrustedImm32 imm, RegisterID dest)
283 m_assembler.subs_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
286 void sub32(TrustedImm32 imm, Address address)
288 load32(address, ARMRegisters::S1);
289 sub32(imm, ARMRegisters::S1);
290 store32(ARMRegisters::S1, address);
293 void sub32(Address src, RegisterID dest)
295 load32(src, ARMRegisters::S1);
296 sub32(ARMRegisters::S1, dest);
299 void sub32(RegisterID src, TrustedImm32 imm, RegisterID dest)
301 m_assembler.subs_r(dest, src, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
304 void xor32(RegisterID src, RegisterID dest)
306 m_assembler.eors_r(dest, dest, src);
309 void xor32(RegisterID op1, RegisterID op2, RegisterID dest)
311 m_assembler.eors_r(dest, op1, op2);
314 void xor32(TrustedImm32 imm, RegisterID dest)
316 if (imm.m_value == -1)
317 m_assembler.mvns_r(dest, dest);
319 m_assembler.eors_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
322 void xor32(TrustedImm32 imm, RegisterID src, RegisterID dest)
324 if (imm.m_value == -1)
325 m_assembler.mvns_r(dest, src);
327 m_assembler.eors_r(dest, src, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
330 void countLeadingZeros32(RegisterID src, RegisterID dest)
332 #if WTF_ARM_ARCH_AT_LEAST(5)
333 m_assembler.clz_r(dest, src);
337 ASSERT_NOT_REACHED();
341 void load8(ImplicitAddress address, RegisterID dest)
343 m_assembler.dataTransfer32(ARMAssembler::LoadUint8, dest, address.base, address.offset);
346 void load8(BaseIndex address, RegisterID dest)
348 m_assembler.baseIndexTransfer32(ARMAssembler::LoadUint8, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
351 void load8Signed(BaseIndex address, RegisterID dest)
353 m_assembler.baseIndexTransfer16(ARMAssembler::LoadInt8, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
356 void load16(ImplicitAddress address, RegisterID dest)
358 m_assembler.dataTransfer16(ARMAssembler::LoadUint16, dest, address.base, address.offset);
361 void load16(BaseIndex address, RegisterID dest)
363 m_assembler.baseIndexTransfer16(ARMAssembler::LoadUint16, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
366 void load16Signed(BaseIndex address, RegisterID dest)
368 m_assembler.baseIndexTransfer16(ARMAssembler::LoadInt16, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
371 void load32(ImplicitAddress address, RegisterID dest)
373 m_assembler.dataTransfer32(ARMAssembler::LoadUint32, dest, address.base, address.offset);
376 void load32(BaseIndex address, RegisterID dest)
378 m_assembler.baseIndexTransfer32(ARMAssembler::LoadUint32, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
381 #if CPU(ARMV5_OR_LOWER)
382 void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest);
384 void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest)
386 load32(address, dest);
390 void load16Unaligned(BaseIndex address, RegisterID dest)
392 load16(address, dest);
395 ConvertibleLoadLabel convertibleLoadPtr(Address address, RegisterID dest)
397 ConvertibleLoadLabel result(this);
398 ASSERT(address.offset >= 0 && address.offset <= 255);
399 m_assembler.dtr_u(ARMAssembler::LoadUint32, dest, address.base, address.offset);
403 DataLabel32 load32WithAddressOffsetPatch(Address address, RegisterID dest)
405 DataLabel32 dataLabel(this);
406 m_assembler.ldr_un_imm(ARMRegisters::S0, 0);
407 m_assembler.dtr_ur(ARMAssembler::LoadUint32, dest, address.base, ARMRegisters::S0);
411 static bool isCompactPtrAlignedAddressOffset(ptrdiff_t value)
413 return value >= -4095 && value <= 4095;
416 DataLabelCompact load32WithCompactAddressOffsetPatch(Address address, RegisterID dest)
418 DataLabelCompact dataLabel(this);
419 ASSERT(isCompactPtrAlignedAddressOffset(address.offset));
420 if (address.offset >= 0)
421 m_assembler.dtr_u(ARMAssembler::LoadUint32, dest, address.base, address.offset);
423 m_assembler.dtr_d(ARMAssembler::LoadUint32, dest, address.base, address.offset);
427 DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
429 DataLabel32 dataLabel(this);
430 m_assembler.ldr_un_imm(ARMRegisters::S0, 0);
431 m_assembler.dtr_ur(ARMAssembler::StoreUint32, src, address.base, ARMRegisters::S0);
435 void store8(RegisterID src, BaseIndex address)
437 m_assembler.baseIndexTransfer32(ARMAssembler::StoreUint8, src, address.base, address.index, static_cast<int>(address.scale), address.offset);
440 void store16(RegisterID src, BaseIndex address)
442 m_assembler.baseIndexTransfer16(ARMAssembler::StoreUint16, src, address.base, address.index, static_cast<int>(address.scale), address.offset);
445 void store32(RegisterID src, ImplicitAddress address)
447 m_assembler.dataTransfer32(ARMAssembler::StoreUint32, src, address.base, address.offset);
450 void store32(RegisterID src, BaseIndex address)
452 m_assembler.baseIndexTransfer32(ARMAssembler::StoreUint32, src, address.base, address.index, static_cast<int>(address.scale), address.offset);
455 void store32(TrustedImm32 imm, ImplicitAddress address)
457 move(imm, ARMRegisters::S1);
458 store32(ARMRegisters::S1, address);
461 void store32(TrustedImm32 imm, BaseIndex address)
463 move(imm, ARMRegisters::S1);
464 m_assembler.baseIndexTransfer32(ARMAssembler::StoreUint32, ARMRegisters::S1, address.base, address.index, static_cast<int>(address.scale), address.offset);
467 void store32(RegisterID src, void* address)
469 m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address));
470 m_assembler.dtr_u(ARMAssembler::StoreUint32, src, ARMRegisters::S0, 0);
473 void store32(TrustedImm32 imm, void* address)
475 m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address));
476 m_assembler.moveImm(imm.m_value, ARMRegisters::S1);
477 m_assembler.dtr_u(ARMAssembler::StoreUint32, ARMRegisters::S1, ARMRegisters::S0, 0);
480 void pop(RegisterID dest)
482 m_assembler.pop_r(dest);
485 void push(RegisterID src)
487 m_assembler.push_r(src);
490 void push(Address address)
492 load32(address, ARMRegisters::S1);
493 push(ARMRegisters::S1);
496 void push(TrustedImm32 imm)
498 move(imm, ARMRegisters::S0);
499 push(ARMRegisters::S0);
502 void move(TrustedImm32 imm, RegisterID dest)
504 m_assembler.moveImm(imm.m_value, dest);
507 void move(RegisterID src, RegisterID dest)
510 m_assembler.mov_r(dest, src);
513 void move(TrustedImmPtr imm, RegisterID dest)
515 move(TrustedImm32(imm), dest);
518 void swap(RegisterID reg1, RegisterID reg2)
520 m_assembler.mov_r(ARMRegisters::S0, reg1);
521 m_assembler.mov_r(reg1, reg2);
522 m_assembler.mov_r(reg2, ARMRegisters::S0);
525 void signExtend32ToPtr(RegisterID src, RegisterID dest)
531 void zeroExtend32ToPtr(RegisterID src, RegisterID dest)
537 Jump branch8(RelationalCondition cond, Address left, TrustedImm32 right)
539 load8(left, ARMRegisters::S1);
540 return branch32(cond, ARMRegisters::S1, right);
543 Jump branch8(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
545 ASSERT(!(right.m_value & 0xFFFFFF00));
546 load8(left, ARMRegisters::S1);
547 return branch32(cond, ARMRegisters::S1, right);
550 Jump branch32(RelationalCondition cond, RegisterID left, RegisterID right, int useConstantPool = 0)
552 m_assembler.cmp_r(left, right);
553 return Jump(m_assembler.jmp(ARMCondition(cond), useConstantPool));
556 Jump branch32(RelationalCondition cond, RegisterID left, TrustedImm32 right, int useConstantPool = 0)
559 // In order to remove warning
560 ARMWord tmp = ((unsigned)right.m_value == 0x80000000) ? ARMAssembler::INVALID_IMM : m_assembler.getOp2(-right.m_value);
562 ARMWord tmp = (right.m_value == 0x80000000) ? ARMAssembler::INVALID_IMM : m_assembler.getOp2(-right.m_value);
564 if (tmp != ARMAssembler::INVALID_IMM)
565 m_assembler.cmn_r(left, tmp);
567 m_assembler.cmp_r(left, m_assembler.getImm(right.m_value, ARMRegisters::S0));
568 return Jump(m_assembler.jmp(ARMCondition(cond), useConstantPool));
571 Jump branch32(RelationalCondition cond, RegisterID left, Address right)
573 load32(right, ARMRegisters::S1);
574 return branch32(cond, left, ARMRegisters::S1);
577 Jump branch32(RelationalCondition cond, Address left, RegisterID right)
579 load32(left, ARMRegisters::S1);
580 return branch32(cond, ARMRegisters::S1, right);
583 Jump branch32(RelationalCondition cond, Address left, TrustedImm32 right)
585 load32(left, ARMRegisters::S1);
586 return branch32(cond, ARMRegisters::S1, right);
589 Jump branch32(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
591 load32(left, ARMRegisters::S1);
592 return branch32(cond, ARMRegisters::S1, right);
595 Jump branch32WithUnalignedHalfWords(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
597 load32WithUnalignedHalfWords(left, ARMRegisters::S1);
598 return branch32(cond, ARMRegisters::S1, right);
601 Jump branchTest8(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
603 load8(address, ARMRegisters::S1);
604 return branchTest32(cond, ARMRegisters::S1, mask);
607 Jump branchTest8(ResultCondition cond, AbsoluteAddress address, TrustedImm32 mask = TrustedImm32(-1))
609 move(TrustedImmPtr(address.m_ptr), ARMRegisters::S1);
610 load8(Address(ARMRegisters::S1), ARMRegisters::S1);
611 return branchTest32(cond, ARMRegisters::S1, mask);
614 Jump branchTest32(ResultCondition cond, RegisterID reg, RegisterID mask)
616 ASSERT((cond == Zero) || (cond == NonZero));
617 m_assembler.tst_r(reg, mask);
618 return Jump(m_assembler.jmp(ARMCondition(cond)));
621 Jump branchTest32(ResultCondition cond, RegisterID reg, TrustedImm32 mask = TrustedImm32(-1))
623 ASSERT((cond == Zero) || (cond == NonZero));
624 ARMWord w = m_assembler.getImm(mask.m_value, ARMRegisters::S0, true);
625 if (w & ARMAssembler::OP2_INV_IMM)
626 m_assembler.bics_r(ARMRegisters::S0, reg, w & ~ARMAssembler::OP2_INV_IMM);
628 m_assembler.tst_r(reg, w);
629 return Jump(m_assembler.jmp(ARMCondition(cond)));
632 Jump branchTest32(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
634 load32(address, ARMRegisters::S1);
635 return branchTest32(cond, ARMRegisters::S1, mask);
638 Jump branchTest32(ResultCondition cond, BaseIndex address, TrustedImm32 mask = TrustedImm32(-1))
640 load32(address, ARMRegisters::S1);
641 return branchTest32(cond, ARMRegisters::S1, mask);
646 return Jump(m_assembler.jmp());
649 void jump(RegisterID target)
651 m_assembler.bx(target);
654 void jump(Address address)
656 load32(address, ARMRegisters::pc);
659 void jump(AbsoluteAddress address)
661 move(TrustedImmPtr(address.m_ptr), ARMRegisters::S0);
662 load32(Address(ARMRegisters::S0, 0), ARMRegisters::pc);
665 Jump branchAdd32(ResultCondition cond, RegisterID src, RegisterID dest)
667 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
669 return Jump(m_assembler.jmp(ARMCondition(cond)));
672 Jump branchAdd32(ResultCondition cond, RegisterID op1, RegisterID op2, RegisterID dest)
674 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
675 add32(op1, op2, dest);
676 return Jump(m_assembler.jmp(ARMCondition(cond)));
679 Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
681 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
683 return Jump(m_assembler.jmp(ARMCondition(cond)));
686 Jump branchAdd32(ResultCondition cond, RegisterID src, TrustedImm32 imm, RegisterID dest)
688 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
689 add32(src, imm, dest);
690 return Jump(m_assembler.jmp(ARMCondition(cond)));
693 Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, AbsoluteAddress dest)
695 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
697 return Jump(m_assembler.jmp(ARMCondition(cond)));
700 void mull32(RegisterID op1, RegisterID op2, RegisterID dest)
704 move(op2, ARMRegisters::S0);
705 op2 = ARMRegisters::S0;
707 // Swap the operands.
708 RegisterID tmp = op1;
713 m_assembler.mull_r(ARMRegisters::S1, dest, op1, op2);
714 m_assembler.cmp_r(ARMRegisters::S1, m_assembler.asr(dest, 31));
717 Jump branchMul32(ResultCondition cond, RegisterID src1, RegisterID src2, RegisterID dest)
719 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
720 if (cond == Overflow) {
721 mull32(src1, src2, dest);
725 mul32(src1, src2, dest);
726 return Jump(m_assembler.jmp(ARMCondition(cond)));
729 Jump branchMul32(ResultCondition cond, RegisterID src, RegisterID dest)
731 return branchMul32(cond, src, dest, dest);
734 Jump branchMul32(ResultCondition cond, TrustedImm32 imm, RegisterID src, RegisterID dest)
736 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
737 if (cond == Overflow) {
738 move(imm, ARMRegisters::S0);
739 mull32(ARMRegisters::S0, src, dest);
743 mul32(imm, src, dest);
744 return Jump(m_assembler.jmp(ARMCondition(cond)));
747 Jump branchSub32(ResultCondition cond, RegisterID src, RegisterID dest)
749 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
751 return Jump(m_assembler.jmp(ARMCondition(cond)));
754 Jump branchSub32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
756 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
758 return Jump(m_assembler.jmp(ARMCondition(cond)));
761 Jump branchSub32(ResultCondition cond, RegisterID src, TrustedImm32 imm, RegisterID dest)
763 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
764 sub32(src, imm, dest);
765 return Jump(m_assembler.jmp(ARMCondition(cond)));
768 Jump branchSub32(ResultCondition cond, RegisterID op1, RegisterID op2, RegisterID dest)
770 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
771 m_assembler.subs_r(dest, op1, op2);
772 return Jump(m_assembler.jmp(ARMCondition(cond)));
775 Jump branchNeg32(ResultCondition cond, RegisterID srcDest)
777 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
779 return Jump(m_assembler.jmp(ARMCondition(cond)));
782 Jump branchOr32(ResultCondition cond, RegisterID src, RegisterID dest)
784 ASSERT((cond == Signed) || (cond == Zero) || (cond == NonZero));
786 return Jump(m_assembler.jmp(ARMCondition(cond)));
796 m_assembler.loadBranchTarget(ARMRegisters::S1, ARMAssembler::AL, true);
797 return Call(m_assembler.blx(ARMRegisters::S1), Call::LinkableNear);
800 Call call(RegisterID target)
802 return Call(m_assembler.blx(target), Call::None);
805 void call(Address address)
807 call32(address.base, address.offset);
812 m_assembler.bx(linkRegister);
815 void compare32(RelationalCondition cond, RegisterID left, RegisterID right, RegisterID dest)
817 m_assembler.cmp_r(left, right);
818 m_assembler.mov_r(dest, ARMAssembler::getOp2Byte(0));
819 m_assembler.mov_r(dest, ARMAssembler::getOp2Byte(1), ARMCondition(cond));
822 void compare32(RelationalCondition cond, RegisterID left, TrustedImm32 right, RegisterID dest)
824 m_assembler.cmp_r(left, m_assembler.getImm(right.m_value, ARMRegisters::S0));
825 m_assembler.mov_r(dest, ARMAssembler::getOp2Byte(0));
826 m_assembler.mov_r(dest, ARMAssembler::getOp2Byte(1), ARMCondition(cond));
829 void compare8(RelationalCondition cond, Address left, TrustedImm32 right, RegisterID dest)
831 load8(left, ARMRegisters::S1);
832 compare32(cond, ARMRegisters::S1, right, dest);
835 void test32(ResultCondition cond, RegisterID reg, TrustedImm32 mask, RegisterID dest)
837 if (mask.m_value == -1)
838 m_assembler.cmp_r(0, reg);
840 m_assembler.tst_r(reg, m_assembler.getImm(mask.m_value, ARMRegisters::S0));
841 m_assembler.mov_r(dest, ARMAssembler::getOp2Byte(0));
842 m_assembler.mov_r(dest, ARMAssembler::getOp2Byte(1), ARMCondition(cond));
845 void test32(ResultCondition cond, Address address, TrustedImm32 mask, RegisterID dest)
847 load32(address, ARMRegisters::S1);
848 test32(cond, ARMRegisters::S1, mask, dest);
851 void test8(ResultCondition cond, Address address, TrustedImm32 mask, RegisterID dest)
853 load8(address, ARMRegisters::S1);
854 test32(cond, ARMRegisters::S1, mask, dest);
857 void add32(TrustedImm32 imm, RegisterID src, RegisterID dest)
859 m_assembler.add_r(dest, src, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
862 void add32(TrustedImm32 imm, AbsoluteAddress address)
864 m_assembler.ldr_un_imm(ARMRegisters::S1, reinterpret_cast<ARMWord>(address.m_ptr));
865 m_assembler.dtr_u(ARMAssembler::LoadUint32, ARMRegisters::S1, ARMRegisters::S1, 0);
866 add32(imm, ARMRegisters::S1);
867 m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address.m_ptr));
868 m_assembler.dtr_u(ARMAssembler::StoreUint32, ARMRegisters::S1, ARMRegisters::S0, 0);
871 void sub32(TrustedImm32 imm, AbsoluteAddress address)
873 m_assembler.ldr_un_imm(ARMRegisters::S1, reinterpret_cast<ARMWord>(address.m_ptr));
874 m_assembler.dtr_u(ARMAssembler::LoadUint32, ARMRegisters::S1, ARMRegisters::S1, 0);
875 sub32(imm, ARMRegisters::S1);
876 m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address.m_ptr));
877 m_assembler.dtr_u(ARMAssembler::StoreUint32, ARMRegisters::S1, ARMRegisters::S0, 0);
880 void load32(const void* address, RegisterID dest)
882 m_assembler.ldr_un_imm(ARMRegisters::S0, reinterpret_cast<ARMWord>(address));
883 m_assembler.dtr_u(ARMAssembler::LoadUint32, dest, ARMRegisters::S0, 0);
886 Jump branch32(RelationalCondition cond, AbsoluteAddress left, RegisterID right)
888 load32(left.m_ptr, ARMRegisters::S1);
889 return branch32(cond, ARMRegisters::S1, right);
892 Jump branch32(RelationalCondition cond, AbsoluteAddress left, TrustedImm32 right)
894 load32(left.m_ptr, ARMRegisters::S1);
895 return branch32(cond, ARMRegisters::S1, right);
898 void relativeTableJump(RegisterID index, int scale)
900 ASSERT(scale >= 0 && scale <= 31);
901 m_assembler.add_r(ARMRegisters::pc, ARMRegisters::pc, m_assembler.lsl(index, scale));
903 // NOP the default prefetching
904 m_assembler.mov_r(ARMRegisters::r0, ARMRegisters::r0);
909 ensureSpace(2 * sizeof(ARMWord), sizeof(ARMWord));
910 m_assembler.loadBranchTarget(ARMRegisters::S1, ARMAssembler::AL, true);
911 return Call(m_assembler.blx(ARMRegisters::S1), Call::Linkable);
914 Call tailRecursiveCall()
916 return Call::fromTailJump(jump());
919 Call makeTailRecursiveCall(Jump oldJump)
921 return Call::fromTailJump(oldJump);
924 DataLabelPtr moveWithPatch(TrustedImmPtr initialValue, RegisterID dest)
926 DataLabelPtr dataLabel(this);
927 m_assembler.ldr_un_imm(dest, reinterpret_cast<ARMWord>(initialValue.m_value));
931 Jump branchPtrWithPatch(RelationalCondition cond, RegisterID left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
933 dataLabel = moveWithPatch(initialRightValue, ARMRegisters::S1);
934 Jump jump = branch32(cond, left, ARMRegisters::S1, true);
938 Jump branchPtrWithPatch(RelationalCondition cond, Address left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
940 load32(left, ARMRegisters::S1);
941 dataLabel = moveWithPatch(initialRightValue, ARMRegisters::S0);
942 Jump jump = branch32(cond, ARMRegisters::S0, ARMRegisters::S1, true);
946 DataLabelPtr storePtrWithPatch(TrustedImmPtr initialValue, ImplicitAddress address)
948 DataLabelPtr dataLabel = moveWithPatch(initialValue, ARMRegisters::S1);
949 store32(ARMRegisters::S1, address);
953 DataLabelPtr storePtrWithPatch(ImplicitAddress address)
955 return storePtrWithPatch(TrustedImmPtr(0), address);
958 // Floating point operators
959 static bool supportsFloatingPoint()
961 return s_isVFPPresent;
964 static bool supportsFloatingPointTruncate()
969 static bool supportsFloatingPointSqrt()
971 return s_isVFPPresent;
973 static bool supportsFloatingPointAbs() { return false; }
975 void loadFloat(BaseIndex address, FPRegisterID dest)
977 m_assembler.baseIndexTransferFloat(ARMAssembler::LoadFloat, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
980 void loadDouble(ImplicitAddress address, FPRegisterID dest)
982 m_assembler.dataTransferFloat(ARMAssembler::LoadDouble, dest, address.base, address.offset);
985 void loadDouble(BaseIndex address, FPRegisterID dest)
987 m_assembler.baseIndexTransferFloat(ARMAssembler::LoadDouble, dest, address.base, address.index, static_cast<int>(address.scale), address.offset);
990 void loadDouble(const void* address, FPRegisterID dest)
992 move(TrustedImm32(reinterpret_cast<ARMWord>(address)), ARMRegisters::S0);
993 m_assembler.fdtr_u(ARMAssembler::LoadDouble, dest, ARMRegisters::S0, 0);
996 void storeFloat(FPRegisterID src, BaseIndex address)
998 m_assembler.baseIndexTransferFloat(ARMAssembler::StoreFloat, src, address.base, address.index, static_cast<int>(address.scale), address.offset);
1001 void storeDouble(FPRegisterID src, ImplicitAddress address)
1003 m_assembler.dataTransferFloat(ARMAssembler::StoreDouble, src, address.base, address.offset);
1006 void storeDouble(FPRegisterID src, BaseIndex address)
1008 m_assembler.baseIndexTransferFloat(ARMAssembler::StoreDouble, src, address.base, address.index, static_cast<int>(address.scale), address.offset);
1011 void storeDouble(FPRegisterID src, const void* address)
1013 move(TrustedImm32(reinterpret_cast<ARMWord>(address)), ARMRegisters::S0);
1014 m_assembler.dataTransferFloat(ARMAssembler::StoreDouble, src, ARMRegisters::S0, 0);
1017 void moveDouble(FPRegisterID src, FPRegisterID dest)
1020 m_assembler.vmov_f64_r(dest, src);
1023 void addDouble(FPRegisterID src, FPRegisterID dest)
1025 m_assembler.vadd_f64_r(dest, dest, src);
1028 void addDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
1030 m_assembler.vadd_f64_r(dest, op1, op2);
1033 void addDouble(Address src, FPRegisterID dest)
1035 loadDouble(src, ARMRegisters::SD0);
1036 addDouble(ARMRegisters::SD0, dest);
1039 void addDouble(AbsoluteAddress address, FPRegisterID dest)
1041 loadDouble(address.m_ptr, ARMRegisters::SD0);
1042 addDouble(ARMRegisters::SD0, dest);
1045 void divDouble(FPRegisterID src, FPRegisterID dest)
1047 m_assembler.vdiv_f64_r(dest, dest, src);
1050 void divDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
1052 m_assembler.vdiv_f64_r(dest, op1, op2);
1055 void divDouble(Address src, FPRegisterID dest)
1057 ASSERT_NOT_REACHED(); // Untested
1058 loadDouble(src, ARMRegisters::SD0);
1059 divDouble(ARMRegisters::SD0, dest);
1062 void subDouble(FPRegisterID src, FPRegisterID dest)
1064 m_assembler.vsub_f64_r(dest, dest, src);
1067 void subDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
1069 m_assembler.vsub_f64_r(dest, op1, op2);
1072 void subDouble(Address src, FPRegisterID dest)
1074 loadDouble(src, ARMRegisters::SD0);
1075 subDouble(ARMRegisters::SD0, dest);
1078 void mulDouble(FPRegisterID src, FPRegisterID dest)
1080 m_assembler.vmul_f64_r(dest, dest, src);
1083 void mulDouble(Address src, FPRegisterID dest)
1085 loadDouble(src, ARMRegisters::SD0);
1086 mulDouble(ARMRegisters::SD0, dest);
1089 void mulDouble(FPRegisterID op1, FPRegisterID op2, FPRegisterID dest)
1091 m_assembler.vmul_f64_r(dest, op1, op2);
1094 void sqrtDouble(FPRegisterID src, FPRegisterID dest)
1096 m_assembler.vsqrt_f64_r(dest, src);
1099 void absDouble(FPRegisterID src, FPRegisterID dest)
1101 m_assembler.vabs_f64_r(dest, src);
1104 void negateDouble(FPRegisterID src, FPRegisterID dest)
1106 m_assembler.vneg_f64_r(dest, src);
1109 void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
1111 m_assembler.vmov_vfp32_r(dest << 1, src);
1112 m_assembler.vcvt_f64_s32_r(dest, dest << 1);
1115 void convertInt32ToDouble(Address src, FPRegisterID dest)
1117 load32(src, ARMRegisters::S1);
1118 convertInt32ToDouble(ARMRegisters::S1, dest);
1121 void convertInt32ToDouble(AbsoluteAddress src, FPRegisterID dest)
1123 move(TrustedImmPtr(src.m_ptr), ARMRegisters::S1);
1124 load32(Address(ARMRegisters::S1), ARMRegisters::S1);
1125 convertInt32ToDouble(ARMRegisters::S1, dest);
1128 void convertFloatToDouble(FPRegisterID src, FPRegisterID dst)
1130 m_assembler.vcvt_f64_f32_r(dst, src);
1133 void convertDoubleToFloat(FPRegisterID src, FPRegisterID dst)
1135 m_assembler.vcvt_f32_f64_r(dst, src);
1138 Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
1140 m_assembler.vcmp_f64_r(left, right);
1141 m_assembler.vmrs_apsr();
1142 if (cond & DoubleConditionBitSpecial)
1143 m_assembler.cmp_r(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::VS);
1144 return Jump(m_assembler.jmp(static_cast<ARMAssembler::Condition>(cond & ~DoubleConditionMask)));
1147 // Truncates 'src' to an integer, and places the resulting 'dest'.
1148 // If the result is not representable as a 32 bit value, branch.
1149 // May also branch for some values that are representable in 32 bits
1150 // (specifically, in this case, INT_MIN).
1151 enum BranchTruncateType { BranchIfTruncateFailed, BranchIfTruncateSuccessful };
1152 Jump branchTruncateDoubleToInt32(FPRegisterID src, RegisterID dest, BranchTruncateType branchType = BranchIfTruncateFailed)
1154 truncateDoubleToInt32(src, dest);
1156 m_assembler.add_r(ARMRegisters::S0, dest, ARMAssembler::getOp2Byte(1));
1157 m_assembler.bic_r(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Byte(1));
1159 ARMWord w = ARMAssembler::getOp2(0x80000000);
1160 ASSERT(w != ARMAssembler::INVALID_IMM);
1161 m_assembler.cmp_r(ARMRegisters::S0, w);
1162 return Jump(m_assembler.jmp(branchType == BranchIfTruncateFailed ? ARMAssembler::EQ : ARMAssembler::NE));
1165 Jump branchTruncateDoubleToUint32(FPRegisterID src, RegisterID dest, BranchTruncateType branchType = BranchIfTruncateFailed)
1167 truncateDoubleToUint32(src, dest);
1169 m_assembler.add_r(ARMRegisters::S0, dest, ARMAssembler::getOp2Byte(1));
1170 m_assembler.bic_r(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Byte(1));
1172 m_assembler.cmp_r(ARMRegisters::S0, ARMAssembler::getOp2Byte(0));
1173 return Jump(m_assembler.jmp(branchType == BranchIfTruncateFailed ? ARMAssembler::EQ : ARMAssembler::NE));
1176 // Result is undefined if the value is outside of the integer range.
1177 void truncateDoubleToInt32(FPRegisterID src, RegisterID dest)
1179 m_assembler.vcvt_s32_f64_r(ARMRegisters::SD0 << 1, src);
1180 m_assembler.vmov_arm32_r(dest, ARMRegisters::SD0 << 1);
1183 void truncateDoubleToUint32(FPRegisterID src, RegisterID dest)
1185 m_assembler.vcvt_u32_f64_r(ARMRegisters::SD0 << 1, src);
1186 m_assembler.vmov_arm32_r(dest, ARMRegisters::SD0 << 1);
1189 // Convert 'src' to an integer, and places the resulting 'dest'.
1190 // If the result is not representable as a 32 bit value, branch.
1191 // May also branch for some values that are representable in 32 bits
1192 // (specifically, in this case, 0).
1193 void branchConvertDoubleToInt32(FPRegisterID src, RegisterID dest, JumpList& failureCases, FPRegisterID fpTemp)
1195 m_assembler.vcvt_s32_f64_r(ARMRegisters::SD0 << 1, src);
1196 m_assembler.vmov_arm32_r(dest, ARMRegisters::SD0 << 1);
1198 // Convert the integer result back to float & compare to the original value - if not equal or unordered (NaN) then jump.
1199 m_assembler.vcvt_f64_s32_r(ARMRegisters::SD0, ARMRegisters::SD0 << 1);
1200 failureCases.append(branchDouble(DoubleNotEqualOrUnordered, src, ARMRegisters::SD0));
1202 // If the result is zero, it might have been -0.0, and 0.0 equals to -0.0
1203 failureCases.append(branchTest32(Zero, dest));
1206 Jump branchDoubleNonZero(FPRegisterID reg, FPRegisterID scratch)
1208 m_assembler.mov_r(ARMRegisters::S0, ARMAssembler::getOp2Byte(0));
1209 convertInt32ToDouble(ARMRegisters::S0, scratch);
1210 return branchDouble(DoubleNotEqual, reg, scratch);
1213 Jump branchDoubleZeroOrNaN(FPRegisterID reg, FPRegisterID scratch)
1215 m_assembler.mov_r(ARMRegisters::S0, ARMAssembler::getOp2Byte(0));
1216 convertInt32ToDouble(ARMRegisters::S0, scratch);
1217 return branchDouble(DoubleEqualOrUnordered, reg, scratch);
1220 // Invert a relational condition, e.g. == becomes !=, < becomes >=, etc.
1221 static RelationalCondition invert(RelationalCondition cond)
1223 ASSERT((static_cast<uint32_t>(cond & 0x0fffffff)) == 0 && static_cast<uint32_t>(cond) < static_cast<uint32_t>(ARMAssembler::AL));
1224 return static_cast<RelationalCondition>(cond ^ 0x10000000);
1232 static FunctionPtr readCallTarget(CodeLocationCall call)
1234 return FunctionPtr(reinterpret_cast<void(*)()>(ARMAssembler::readCallTarget(call.dataLocation())));
1237 static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
1239 ARMAssembler::replaceWithJump(instructionStart.dataLocation(), destination.dataLocation());
1242 static ptrdiff_t maxJumpReplacementSize()
1244 ARMAssembler::maxJumpReplacementSize();
1249 ARMAssembler::Condition ARMCondition(RelationalCondition cond)
1251 return static_cast<ARMAssembler::Condition>(cond);
1254 ARMAssembler::Condition ARMCondition(ResultCondition cond)
1256 return static_cast<ARMAssembler::Condition>(cond);
1259 void ensureSpace(int insnSpace, int constSpace)
1261 m_assembler.ensureSpace(insnSpace, constSpace);
1264 int sizeOfConstantPool()
1266 return m_assembler.sizeOfConstantPool();
1269 void call32(RegisterID base, int32_t offset)
1271 load32(Address(base, offset), ARMRegisters::S1);
1272 m_assembler.blx(ARMRegisters::S1);
1276 friend class LinkBuffer;
1277 friend class RepatchBuffer;
1279 static void linkCall(void* code, Call call, FunctionPtr function)
1281 ARMAssembler::linkCall(code, call.m_label, function.value());
1284 static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
1286 ARMAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
1289 static void repatchCall(CodeLocationCall call, FunctionPtr destination)
1291 ARMAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
1294 static const bool s_isVFPPresent;
1299 #endif // ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)
1301 #endif // MacroAssemblerARM_h