2 * Copyright (C) 2009-2011 STMicroelectronics. All rights reserved.
3 * Copyright (C) 2008 Apple Inc. All rights reserved.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #ifndef MacroAssemblerSH4_h
28 #define MacroAssemblerSH4_h
30 #if ENABLE(ASSEMBLER) && CPU(SH4)
32 #include "SH4Assembler.h"
33 #include "AbstractMacroAssembler.h"
34 #include <wtf/Assertions.h>
38 class MacroAssemblerSH4 : public AbstractMacroAssembler<SH4Assembler> {
40 typedef SH4Assembler::FPRegisterID FPRegisterID;
42 static const Scale ScalePtr = TimesFour;
43 static const FPRegisterID fscratch = SH4Registers::fr10;
44 static const RegisterID stackPointerRegister = SH4Registers::sp;
45 static const RegisterID linkRegister = SH4Registers::pr;
46 static const RegisterID scratchReg3 = SH4Registers::r13;
48 static const int MaximumCompactPtrAlignedAddressOffset = 60;
50 enum RelationalCondition {
51 Equal = SH4Assembler::EQ,
52 NotEqual = SH4Assembler::NE,
53 Above = SH4Assembler::HI,
54 AboveOrEqual = SH4Assembler::HS,
55 Below = SH4Assembler::LI,
56 BelowOrEqual = SH4Assembler::LS,
57 GreaterThan = SH4Assembler::GT,
58 GreaterThanOrEqual = SH4Assembler::GE,
59 LessThan = SH4Assembler::LT,
60 LessThanOrEqual = SH4Assembler::LE
63 enum ResultCondition {
64 Overflow = SH4Assembler::OF,
65 Signed = SH4Assembler::SI,
66 Zero = SH4Assembler::EQ,
67 NonZero = SH4Assembler::NE
70 enum DoubleCondition {
71 // These conditions will only evaluate to true if the comparison is ordered - i.e. neither operand is NaN.
72 DoubleEqual = SH4Assembler::EQ,
73 DoubleNotEqual = SH4Assembler::NE,
74 DoubleGreaterThan = SH4Assembler::GT,
75 DoubleGreaterThanOrEqual = SH4Assembler::GE,
76 DoubleLessThan = SH4Assembler::LT,
77 DoubleLessThanOrEqual = SH4Assembler::LE,
78 // If either operand is NaN, these conditions always evaluate to true.
79 DoubleEqualOrUnordered = SH4Assembler::EQU,
80 DoubleNotEqualOrUnordered = SH4Assembler::NEU,
81 DoubleGreaterThanOrUnordered = SH4Assembler::GTU,
82 DoubleGreaterThanOrEqualOrUnordered = SH4Assembler::GEU,
83 DoubleLessThanOrUnordered = SH4Assembler::LTU,
84 DoubleLessThanOrEqualOrUnordered = SH4Assembler::LEU,
87 RegisterID claimScratch()
89 return m_assembler.claimScratch();
92 void releaseScratch(RegisterID reg)
94 m_assembler.releaseScratch(reg);
97 // Integer arithmetic operations
99 void add32(RegisterID src, RegisterID dest)
101 m_assembler.addlRegReg(src, dest);
104 void add32(TrustedImm32 imm, RegisterID dest)
106 if (m_assembler.isImmediate(imm.m_value)) {
107 m_assembler.addlImm8r(imm.m_value, dest);
111 RegisterID scr = claimScratch();
112 m_assembler.loadConstant(imm.m_value, scr);
113 m_assembler.addlRegReg(scr, dest);
117 void add32(TrustedImm32 imm, RegisterID src, RegisterID dest)
120 m_assembler.movlRegReg(src, dest);
124 void add32(TrustedImm32 imm, Address address)
126 RegisterID scr = claimScratch();
127 load32(address, scr);
129 store32(scr, address);
133 void add32(Address src, RegisterID dest)
135 RegisterID scr = claimScratch();
137 m_assembler.addlRegReg(scr, dest);
141 void and32(RegisterID src, RegisterID dest)
143 m_assembler.andlRegReg(src, dest);
146 void and32(TrustedImm32 imm, RegisterID dest)
148 if ((imm.m_value <= 255) && (imm.m_value >= 0) && (dest == SH4Registers::r0)) {
149 m_assembler.andlImm8r(imm.m_value, dest);
153 RegisterID scr = claimScratch();
154 m_assembler.loadConstant((imm.m_value), scr);
155 m_assembler.andlRegReg(scr, dest);
159 void and32(TrustedImm32 imm, RegisterID src, RegisterID dest)
170 void lshift32(RegisterID shiftamount, RegisterID dest)
172 if (shiftamount == SH4Registers::r0)
173 m_assembler.andlImm8r(0x1f, shiftamount);
175 RegisterID scr = claimScratch();
176 m_assembler.loadConstant(0x1f, scr);
177 m_assembler.andlRegReg(scr, shiftamount);
180 m_assembler.shllRegReg(dest, shiftamount);
183 void rshift32(int imm, RegisterID dest)
185 RegisterID scr = claimScratch();
186 m_assembler.loadConstant(-imm, scr);
187 m_assembler.shaRegReg(dest, scr);
191 void lshift32(TrustedImm32 imm, RegisterID dest)
196 if ((imm.m_value == 1) || (imm.m_value == 2) || (imm.m_value == 8) || (imm.m_value == 16)) {
197 m_assembler.shllImm8r(imm.m_value, dest);
201 RegisterID scr = claimScratch();
202 m_assembler.loadConstant((imm.m_value & 0x1f) , scr);
203 m_assembler.shllRegReg(dest, scr);
207 void lshift32(RegisterID src, TrustedImm32 shiftamount, RegisterID dest)
212 lshift32(shiftamount, dest);
215 void mul32(RegisterID src, RegisterID dest)
217 m_assembler.imullRegReg(src, dest);
218 m_assembler.stsmacl(dest);
221 void mul32(TrustedImm32 imm, RegisterID src, RegisterID dest)
223 RegisterID scr = claimScratch();
231 void or32(RegisterID src, RegisterID dest)
233 m_assembler.orlRegReg(src, dest);
236 void or32(TrustedImm32 imm, RegisterID dest)
238 if ((imm.m_value <= 255) && (imm.m_value >= 0) && (dest == SH4Registers::r0)) {
239 m_assembler.orlImm8r(imm.m_value, dest);
243 RegisterID scr = claimScratch();
244 m_assembler.loadConstant(imm.m_value, scr);
245 m_assembler.orlRegReg(scr, dest);
249 void or32(RegisterID op1, RegisterID op2, RegisterID dest)
253 else if (op1 == dest)
262 void or32(TrustedImm32 imm, RegisterID src, RegisterID dest)
273 void xor32(TrustedImm32 imm, RegisterID src, RegisterID dest)
284 void rshift32(RegisterID shiftamount, RegisterID dest)
286 if (shiftamount == SH4Registers::r0)
287 m_assembler.andlImm8r(0x1f, shiftamount);
289 RegisterID scr = claimScratch();
290 m_assembler.loadConstant(0x1f, scr);
291 m_assembler.andlRegReg(scr, shiftamount);
294 m_assembler.neg(shiftamount, shiftamount);
295 m_assembler.shaRegReg(dest, shiftamount);
298 void rshift32(TrustedImm32 imm, RegisterID dest)
300 if (imm.m_value & 0x1f)
301 rshift32(imm.m_value & 0x1f, dest);
304 void rshift32(RegisterID src, TrustedImm32 imm, RegisterID dest)
311 void sub32(RegisterID src, RegisterID dest)
313 m_assembler.sublRegReg(src, dest);
316 void sub32(TrustedImm32 imm, AbsoluteAddress address, RegisterID scratchReg)
318 RegisterID result = claimScratch();
320 m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scratchReg);
321 m_assembler.movlMemReg(scratchReg, result);
323 if (m_assembler.isImmediate(-imm.m_value))
324 m_assembler.addlImm8r(-imm.m_value, result);
326 m_assembler.loadConstant(imm.m_value, scratchReg3);
327 m_assembler.sublRegReg(scratchReg3, result);
330 store32(result, scratchReg);
331 releaseScratch(result);
334 void sub32(TrustedImm32 imm, AbsoluteAddress address)
336 RegisterID result = claimScratch();
337 RegisterID scratchReg = claimScratch();
339 m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scratchReg);
340 m_assembler.movlMemReg(scratchReg, result);
342 if (m_assembler.isImmediate(-imm.m_value))
343 m_assembler.addlImm8r(-imm.m_value, result);
345 m_assembler.loadConstant(imm.m_value, scratchReg3);
346 m_assembler.sublRegReg(scratchReg3, result);
349 store32(result, scratchReg);
350 releaseScratch(result);
351 releaseScratch(scratchReg);
354 void add32(TrustedImm32 imm, AbsoluteAddress address, RegisterID scratchReg)
356 RegisterID result = claimScratch();
358 m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scratchReg);
359 m_assembler.movlMemReg(scratchReg, result);
361 if (m_assembler.isImmediate(imm.m_value))
362 m_assembler.addlImm8r(imm.m_value, result);
364 m_assembler.loadConstant(imm.m_value, scratchReg3);
365 m_assembler.addlRegReg(scratchReg3, result);
368 store32(result, scratchReg);
369 releaseScratch(result);
372 void add32(TrustedImm32 imm, AbsoluteAddress address)
374 RegisterID result = claimScratch();
375 RegisterID scratchReg = claimScratch();
377 m_assembler.loadConstant(reinterpret_cast<uint32_t>(address.m_ptr), scratchReg);
378 m_assembler.movlMemReg(scratchReg, result);
380 if (m_assembler.isImmediate(imm.m_value))
381 m_assembler.addlImm8r(imm.m_value, result);
383 m_assembler.loadConstant(imm.m_value, scratchReg3);
384 m_assembler.addlRegReg(scratchReg3, result);
387 store32(result, scratchReg);
388 releaseScratch(result);
389 releaseScratch(scratchReg);
392 void sub32(TrustedImm32 imm, RegisterID dest)
394 if (m_assembler.isImmediate(-imm.m_value)) {
395 m_assembler.addlImm8r(-imm.m_value, dest);
399 RegisterID scr = claimScratch();
400 m_assembler.loadConstant(imm.m_value, scr);
401 m_assembler.sublRegReg(scr, dest);
405 void sub32(Address src, RegisterID dest)
407 RegisterID scr = claimScratch();
409 m_assembler.sublRegReg(scr, dest);
413 void xor32(RegisterID src, RegisterID dest)
415 m_assembler.xorlRegReg(src, dest);
418 void xor32(TrustedImm32 imm, RegisterID srcDest)
420 if (imm.m_value == -1) {
421 m_assembler.notlReg(srcDest, srcDest);
425 if ((srcDest != SH4Registers::r0) || (imm.m_value > 255) || (imm.m_value < 0)) {
426 RegisterID scr = claimScratch();
427 m_assembler.loadConstant((imm.m_value), scr);
428 m_assembler.xorlRegReg(scr, srcDest);
433 m_assembler.xorlImm8r(imm.m_value, srcDest);
436 void compare32(int imm, RegisterID dst, RelationalCondition cond)
438 if (((cond == Equal) || (cond == NotEqual)) && (dst == SH4Registers::r0) && m_assembler.isImmediate(imm)) {
439 m_assembler.cmpEqImmR0(imm, dst);
443 RegisterID scr = claimScratch();
444 m_assembler.loadConstant(imm, scr);
445 m_assembler.cmplRegReg(scr, dst, SH4Condition(cond));
449 void compare32(int offset, RegisterID base, RegisterID left, RelationalCondition cond)
451 RegisterID scr = claimScratch();
453 m_assembler.movlMemReg(base, scr);
454 m_assembler.cmplRegReg(scr, left, SH4Condition(cond));
459 if ((offset < 0) || (offset >= 64)) {
460 m_assembler.loadConstant(offset, scr);
461 m_assembler.addlRegReg(base, scr);
462 m_assembler.movlMemReg(scr, scr);
463 m_assembler.cmplRegReg(scr, left, SH4Condition(cond));
468 m_assembler.movlMemReg(offset >> 2, base, scr);
469 m_assembler.cmplRegReg(scr, left, SH4Condition(cond));
473 void testImm(int imm, int offset, RegisterID base)
475 RegisterID scr = claimScratch();
476 RegisterID scr1 = claimScratch();
478 if ((offset < 0) || (offset >= 64)) {
479 m_assembler.loadConstant(offset, scr);
480 m_assembler.addlRegReg(base, scr);
481 m_assembler.movlMemReg(scr, scr);
483 m_assembler.movlMemReg(offset >> 2, base, scr);
485 m_assembler.movlMemReg(base, scr);
486 if (m_assembler.isImmediate(imm))
487 m_assembler.movImm8(imm, scr1);
489 m_assembler.loadConstant(imm, scr1);
491 m_assembler.testlRegReg(scr, scr1);
493 releaseScratch(scr1);
496 void testlImm(int imm, RegisterID dst)
498 if ((dst == SH4Registers::r0) && (imm <= 255) && (imm >= 0)) {
499 m_assembler.testlImm8r(imm, dst);
503 RegisterID scr = claimScratch();
504 m_assembler.loadConstant(imm, scr);
505 m_assembler.testlRegReg(scr, dst);
509 void compare32(RegisterID right, int offset, RegisterID base, RelationalCondition cond)
512 RegisterID scr = claimScratch();
513 m_assembler.movlMemReg(base, scr);
514 m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
519 if ((offset < 0) || (offset >= 64)) {
520 RegisterID scr = claimScratch();
521 m_assembler.loadConstant(offset, scr);
522 m_assembler.addlRegReg(base, scr);
523 m_assembler.movlMemReg(scr, scr);
524 m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
529 RegisterID scr = claimScratch();
530 m_assembler.movlMemReg(offset >> 2, base, scr);
531 m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
535 void compare32(int imm, int offset, RegisterID base, RelationalCondition cond)
538 RegisterID scr = claimScratch();
539 RegisterID scr1 = claimScratch();
540 m_assembler.movlMemReg(base, scr);
541 m_assembler.loadConstant(imm, scr1);
542 m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
543 releaseScratch(scr1);
548 if ((offset < 0) || (offset >= 64)) {
549 RegisterID scr = claimScratch();
550 RegisterID scr1 = claimScratch();
551 m_assembler.loadConstant(offset, scr);
552 m_assembler.addlRegReg(base, scr);
553 m_assembler.movlMemReg(scr, scr);
554 m_assembler.loadConstant(imm, scr1);
555 m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
556 releaseScratch(scr1);
561 RegisterID scr = claimScratch();
562 RegisterID scr1 = claimScratch();
563 m_assembler.movlMemReg(offset >> 2, base, scr);
564 m_assembler.loadConstant(imm, scr1);
565 m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
566 releaseScratch(scr1);
570 // Memory access operation
572 void load32(ImplicitAddress address, RegisterID dest)
574 load32(address.base, address.offset, dest);
577 void load8(ImplicitAddress address, RegisterID dest)
579 load8(address.base, address.offset, dest);
582 void load8(BaseIndex address, RegisterID dest)
584 RegisterID scr = claimScratch();
585 move(address.index, scr);
586 lshift32(TrustedImm32(address.scale), scr);
587 add32(address.base, scr);
588 load8(scr, address.offset, dest);
592 void load32(BaseIndex address, RegisterID dest)
594 RegisterID scr = claimScratch();
595 move(address.index, scr);
596 lshift32(TrustedImm32(address.scale), scr);
597 add32(address.base, scr);
598 load32(scr, address.offset, dest);
602 void load32(const void* address, RegisterID dest)
604 m_assembler.loadConstant(reinterpret_cast<uint32_t>(const_cast<void*>(address)), dest);
605 m_assembler.movlMemReg(dest, dest);
608 void load32(RegisterID base, int offset, RegisterID dest)
611 m_assembler.movlMemReg(base, dest);
615 if ((offset >= 0) && (offset < 64)) {
616 m_assembler.movlMemReg(offset >> 2, base, dest);
620 if ((dest == SH4Registers::r0) && (dest != base)) {
621 m_assembler.loadConstant((offset), dest);
622 m_assembler.movlR0mr(base, dest);
628 scr = claimScratch();
631 m_assembler.loadConstant((offset), scr);
632 m_assembler.addlRegReg(base, scr);
633 m_assembler.movlMemReg(scr, dest);
639 void load8(RegisterID base, int offset, RegisterID dest)
642 m_assembler.movbMemReg(base, dest);
643 m_assembler.extub(dest, dest);
647 if ((offset > 0) && (offset < 64) && (dest == SH4Registers::r0)) {
648 m_assembler.movbMemReg(offset, base, dest);
649 m_assembler.extub(dest, dest);
654 m_assembler.loadConstant((offset), dest);
655 m_assembler.addlRegReg(base, dest);
656 m_assembler.movbMemReg(dest, dest);
657 m_assembler.extub(dest, dest);
661 RegisterID scr = claimScratch();
662 m_assembler.loadConstant((offset), scr);
663 m_assembler.addlRegReg(base, scr);
664 m_assembler.movbMemReg(scr, dest);
665 m_assembler.extub(dest, dest);
669 void load32(RegisterID r0, RegisterID src, RegisterID dst)
671 ASSERT(r0 == SH4Registers::r0);
672 m_assembler.movlR0mr(src, dst);
675 void load32(RegisterID src, RegisterID dst)
677 m_assembler.movlMemReg(src, dst);
680 void load16(ImplicitAddress address, RegisterID dest)
682 if (!address.offset) {
683 m_assembler.movwMemReg(address.base, dest);
688 if ((address.offset > 0) && (address.offset < 64) && (dest == SH4Registers::r0)) {
689 m_assembler.movwMemReg(address.offset, address.base, dest);
694 if (address.base != dest) {
695 m_assembler.loadConstant((address.offset), dest);
696 m_assembler.addlRegReg(address.base, dest);
697 m_assembler.movwMemReg(dest, dest);
702 RegisterID scr = claimScratch();
703 m_assembler.loadConstant((address.offset), scr);
704 m_assembler.addlRegReg(address.base, scr);
705 m_assembler.movwMemReg(scr, dest);
710 void load16Unaligned(BaseIndex address, RegisterID dest)
713 RegisterID scr = claimScratch();
714 RegisterID scr1 = claimScratch();
716 move(address.index, scr);
717 lshift32(TrustedImm32(address.scale), scr);
720 add32(TrustedImm32(address.offset), scr);
722 add32(address.base, scr);
724 add32(TrustedImm32(1), scr);
726 m_assembler.shllImm8r(8, dest);
730 releaseScratch(scr1);
733 void load16(RegisterID src, RegisterID dest)
735 m_assembler.movwMemReg(src, dest);
739 void load16(RegisterID r0, RegisterID src, RegisterID dest)
741 ASSERT(r0 == SH4Registers::r0);
742 m_assembler.movwR0mr(src, dest);
746 void load16(BaseIndex address, RegisterID dest)
748 RegisterID scr = claimScratch();
750 move(address.index, scr);
751 lshift32(TrustedImm32(address.scale), scr);
754 add32(TrustedImm32(address.offset), scr);
755 if (address.base == SH4Registers::r0)
756 load16(address.base, scr, dest);
758 add32(address.base, scr);
765 void store32(RegisterID src, ImplicitAddress address)
767 RegisterID scr = claimScratch();
768 store32(src, address.offset, address.base, scr);
772 void store32(RegisterID src, int offset, RegisterID base, RegisterID scr)
775 m_assembler.movlRegMem(src, base);
779 if ((offset >=0) && (offset < 64)) {
780 m_assembler.movlRegMem(src, offset >> 2, base);
784 m_assembler.loadConstant((offset), scr);
785 if (scr == SH4Registers::r0) {
786 m_assembler.movlRegMemr0(src, base);
790 m_assembler.addlRegReg(base, scr);
791 m_assembler.movlRegMem(src, scr);
794 void store32(RegisterID src, RegisterID offset, RegisterID base)
796 ASSERT(offset == SH4Registers::r0);
797 m_assembler.movlRegMemr0(src, base);
800 void store32(RegisterID src, RegisterID dst)
802 m_assembler.movlRegMem(src, dst);
805 void store32(TrustedImm32 imm, ImplicitAddress address)
807 RegisterID scr = claimScratch();
808 RegisterID scr1 = claimScratch();
809 m_assembler.loadConstant((imm.m_value), scr);
810 store32(scr, address.offset, address.base, scr1);
812 releaseScratch(scr1);
815 void store32(RegisterID src, BaseIndex address)
817 RegisterID scr = claimScratch();
819 move(address.index, scr);
820 lshift32(TrustedImm32(address.scale), scr);
821 add32(address.base, scr);
822 store32(src, Address(scr, address.offset));
827 void store32(TrustedImm32 imm, void* address)
829 RegisterID scr = claimScratch();
830 RegisterID scr1 = claimScratch();
831 m_assembler.loadConstant((imm.m_value), scr);
832 m_assembler.loadConstant(reinterpret_cast<uint32_t>(address), scr1);
833 m_assembler.movlRegMem(scr, scr1);
835 releaseScratch(scr1);
838 void store32(RegisterID src, void* address)
840 RegisterID scr = claimScratch();
841 m_assembler.loadConstant(reinterpret_cast<uint32_t>(address), scr);
842 m_assembler.movlRegMem(src, scr);
846 DataLabel32 load32WithAddressOffsetPatch(Address address, RegisterID dest)
848 RegisterID scr = claimScratch();
849 DataLabel32 label(this);
850 m_assembler.loadConstantUnReusable(address.offset, scr);
851 m_assembler.addlRegReg(address.base, scr);
852 m_assembler.movlMemReg(scr, dest);
857 DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
859 RegisterID scr = claimScratch();
860 DataLabel32 label(this);
861 m_assembler.loadConstantUnReusable(address.offset, scr);
862 m_assembler.addlRegReg(address.base, scr);
863 m_assembler.movlRegMem(src, scr);
868 DataLabelCompact load32WithCompactAddressOffsetPatch(Address address, RegisterID dest)
870 DataLabelCompact dataLabel(this);
871 ASSERT(address.offset <= MaximumCompactPtrAlignedAddressOffset);
872 ASSERT(address.offset >= 0);
873 m_assembler.movlMemRegCompact(address.offset >> 2, address.base, dest);
877 // Floating-point operations
879 static bool supportsFloatingPoint() { return true; }
880 static bool supportsFloatingPointTruncate() { return true; }
881 static bool supportsFloatingPointSqrt() { return true; }
882 static bool supportsFloatingPointAbs() { return false; }
884 void loadDouble(ImplicitAddress address, FPRegisterID dest)
886 RegisterID scr = claimScratch();
888 m_assembler.loadConstant(address.offset, scr);
889 if (address.base == SH4Registers::r0) {
890 m_assembler.fmovsReadr0r(scr, (FPRegisterID)(dest + 1));
891 m_assembler.addlImm8r(4, scr);
892 m_assembler.fmovsReadr0r(scr, dest);
897 m_assembler.addlRegReg(address.base, scr);
898 m_assembler.fmovsReadrminc(scr, (FPRegisterID)(dest + 1));
899 m_assembler.fmovsReadrm(scr, dest);
903 void loadDouble(const void* address, FPRegisterID dest)
905 RegisterID scr = claimScratch();
906 m_assembler.loadConstant(reinterpret_cast<uint32_t>(address), scr);
907 m_assembler.fmovsReadrminc(scr, (FPRegisterID)(dest + 1));
908 m_assembler.fmovsReadrm(scr, dest);
912 void storeDouble(FPRegisterID src, ImplicitAddress address)
914 RegisterID scr = claimScratch();
915 m_assembler.loadConstant(address.offset, scr);
916 m_assembler.addlRegReg(address.base, scr);
917 m_assembler.fmovsWriterm((FPRegisterID)(src + 1), scr);
918 m_assembler.addlImm8r(4, scr);
919 m_assembler.fmovsWriterm(src, scr);
923 void addDouble(FPRegisterID src, FPRegisterID dest)
925 m_assembler.daddRegReg(src, dest);
928 void addDouble(Address address, FPRegisterID dest)
930 loadDouble(address, fscratch);
931 addDouble(fscratch, dest);
934 void subDouble(FPRegisterID src, FPRegisterID dest)
936 m_assembler.dsubRegReg(src, dest);
939 void subDouble(Address address, FPRegisterID dest)
941 loadDouble(address, fscratch);
942 subDouble(fscratch, dest);
945 void mulDouble(FPRegisterID src, FPRegisterID dest)
947 m_assembler.dmulRegReg(src, dest);
950 void mulDouble(Address address, FPRegisterID dest)
952 loadDouble(address, fscratch);
953 mulDouble(fscratch, dest);
956 void divDouble(FPRegisterID src, FPRegisterID dest)
958 m_assembler.ddivRegReg(src, dest);
961 void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
963 m_assembler.ldsrmfpul(src);
964 m_assembler.floatfpulDreg(dest);
967 void convertInt32ToDouble(AbsoluteAddress src, FPRegisterID dest)
969 RegisterID scr = claimScratch();
970 m_assembler.loadConstant(reinterpret_cast<uint32_t>(src.m_ptr), scr);
971 convertInt32ToDouble(scr, dest);
975 void convertInt32ToDouble(Address src, FPRegisterID dest)
977 RegisterID scr = claimScratch();
979 convertInt32ToDouble(scr, dest);
983 void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest)
985 RegisterID scr = claimScratch();
986 RegisterID scr1 = claimScratch();
990 if (dest != SH4Registers::r0)
991 move(SH4Registers::r0, scr1);
993 move(address.index, scr);
994 lshift32(TrustedImm32(address.scale), scr);
995 add32(address.base, scr);
998 add32(TrustedImm32(address.offset), scr);
1000 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 68, sizeof(uint32_t));
1001 move(scr, SH4Registers::r0);
1002 m_assembler.andlImm8r(0x3, SH4Registers::r0);
1003 m_assembler.cmpEqImmR0(0x0, SH4Registers::r0);
1004 m_jump = Jump(m_assembler.jne(), SH4Assembler::JumpNear);
1005 if (dest != SH4Registers::r0)
1006 move(scr1, SH4Registers::r0);
1009 end.append(Jump(m_assembler.bra(), SH4Assembler::JumpNear));
1012 m_assembler.andlImm8r(0x1, SH4Registers::r0);
1013 m_assembler.cmpEqImmR0(0x0, SH4Registers::r0);
1015 if (dest != SH4Registers::r0)
1016 move(scr1, SH4Registers::r0);
1018 m_jump = Jump(m_assembler.jne(), SH4Assembler::JumpNear);
1020 add32(TrustedImm32(2), scr);
1022 m_assembler.shllImm8r(16, dest);
1024 end.append(Jump(m_assembler.bra(), SH4Assembler::JumpNear));
1028 add32(TrustedImm32(1), scr);
1030 m_assembler.shllImm8r(8, dest);
1032 add32(TrustedImm32(2), scr);
1034 m_assembler.shllImm8r(8, dest);
1035 m_assembler.shllImm8r(16, dest);
1039 releaseScratch(scr);
1040 releaseScratch(scr1);
1043 Jump branch32WithUnalignedHalfWords(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
1045 RegisterID scr = scratchReg3;
1046 load32WithUnalignedHalfWords(left, scr);
1047 if (((cond == Equal) || (cond == NotEqual)) && !right.m_value)
1048 m_assembler.testlRegReg(scr, scr);
1050 compare32(right.m_value, scr, cond);
1052 if (cond == NotEqual)
1053 return branchFalse();
1054 return branchTrue();
1057 Jump branchDoubleNonZero(FPRegisterID reg, FPRegisterID scratch)
1059 m_assembler.movImm8(0, scratchReg3);
1060 convertInt32ToDouble(scratchReg3, scratch);
1061 return branchDouble(DoubleNotEqual, reg, scratch);
1064 Jump branchDoubleZeroOrNaN(FPRegisterID reg, FPRegisterID scratch)
1066 m_assembler.movImm8(0, scratchReg3);
1067 convertInt32ToDouble(scratchReg3, scratch);
1068 return branchDouble(DoubleEqualOrUnordered, reg, scratch);
1071 Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
1073 if (cond == DoubleEqual) {
1074 m_assembler.dcmppeq(right, left);
1075 return branchTrue();
1078 if (cond == DoubleNotEqual) {
1079 RegisterID scr = claimScratch();
1081 m_assembler.loadConstant(0x7fbfffff, scratchReg3);
1082 m_assembler.dcnvds(right);
1083 m_assembler.stsfpulReg(scr);
1084 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1085 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
1086 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1087 m_assembler.dcnvds(left);
1088 m_assembler.stsfpulReg(scr);
1089 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1090 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1091 m_assembler.dcmppeq(right, left);
1092 releaseScratch(scr);
1093 Jump m_jump = branchFalse();
1098 if (cond == DoubleGreaterThan) {
1099 m_assembler.dcmppgt(right, left);
1100 return branchTrue();
1103 if (cond == DoubleGreaterThanOrEqual) {
1104 m_assembler.dcmppgt(left, right);
1105 return branchFalse();
1108 if (cond == DoubleLessThan) {
1109 m_assembler.dcmppgt(left, right);
1110 return branchTrue();
1113 if (cond == DoubleLessThanOrEqual) {
1114 m_assembler.dcmppgt(right, left);
1115 return branchFalse();
1118 if (cond == DoubleEqualOrUnordered) {
1119 RegisterID scr = claimScratch();
1121 m_assembler.loadConstant(0x7fbfffff, scratchReg3);
1122 m_assembler.dcnvds(right);
1123 m_assembler.stsfpulReg(scr);
1124 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1125 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
1126 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1127 m_assembler.dcnvds(left);
1128 m_assembler.stsfpulReg(scr);
1129 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1130 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1131 m_assembler.dcmppeq(left, right);
1132 Jump m_jump = Jump(m_assembler.je());
1134 m_assembler.extraInstrForBranch(scr);
1135 releaseScratch(scr);
1139 if (cond == DoubleGreaterThanOrUnordered) {
1140 RegisterID scr = claimScratch();
1142 m_assembler.loadConstant(0x7fbfffff, scratchReg3);
1143 m_assembler.dcnvds(right);
1144 m_assembler.stsfpulReg(scr);
1145 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1146 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
1147 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1148 m_assembler.dcnvds(left);
1149 m_assembler.stsfpulReg(scr);
1150 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1151 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1152 m_assembler.dcmppgt(right, left);
1153 Jump m_jump = Jump(m_assembler.je());
1155 m_assembler.extraInstrForBranch(scr);
1156 releaseScratch(scr);
1160 if (cond == DoubleGreaterThanOrEqualOrUnordered) {
1161 RegisterID scr = claimScratch();
1163 m_assembler.loadConstant(0x7fbfffff, scratchReg3);
1164 m_assembler.dcnvds(right);
1165 m_assembler.stsfpulReg(scr);
1166 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1167 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
1168 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1169 m_assembler.dcnvds(left);
1170 m_assembler.stsfpulReg(scr);
1171 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1172 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1173 m_assembler.dcmppgt(left, right);
1174 Jump m_jump = Jump(m_assembler.jne());
1176 m_assembler.extraInstrForBranch(scr);
1177 releaseScratch(scr);
1181 if (cond == DoubleLessThanOrUnordered) {
1182 RegisterID scr = claimScratch();
1184 m_assembler.loadConstant(0x7fbfffff, scratchReg3);
1185 m_assembler.dcnvds(right);
1186 m_assembler.stsfpulReg(scr);
1187 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1188 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
1189 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1190 m_assembler.dcnvds(left);
1191 m_assembler.stsfpulReg(scr);
1192 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1193 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1194 m_assembler.dcmppgt(left, right);
1195 Jump m_jump = Jump(m_assembler.je());
1197 m_assembler.extraInstrForBranch(scr);
1198 releaseScratch(scr);
1202 if (cond == DoubleLessThanOrEqualOrUnordered) {
1203 RegisterID scr = claimScratch();
1205 m_assembler.loadConstant(0x7fbfffff, scratchReg3);
1206 m_assembler.dcnvds(right);
1207 m_assembler.stsfpulReg(scr);
1208 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1209 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
1210 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1211 m_assembler.dcnvds(left);
1212 m_assembler.stsfpulReg(scr);
1213 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1214 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1215 m_assembler.dcmppgt(right, left);
1216 Jump m_jump = Jump(m_assembler.jne());
1218 m_assembler.extraInstrForBranch(scr);
1219 releaseScratch(scr);
1223 ASSERT(cond == DoubleNotEqualOrUnordered);
1224 RegisterID scr = claimScratch();
1226 m_assembler.loadConstant(0x7fbfffff, scratchReg3);
1227 m_assembler.dcnvds(right);
1228 m_assembler.stsfpulReg(scr);
1229 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1230 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 22, sizeof(uint32_t));
1231 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1232 m_assembler.dcnvds(left);
1233 m_assembler.stsfpulReg(scr);
1234 m_assembler.cmplRegReg(scratchReg3, scr, SH4Condition(Equal));
1235 end.append(Jump(m_assembler.je(), SH4Assembler::JumpNear));
1236 m_assembler.dcmppeq(right, left);
1237 Jump m_jump = Jump(m_assembler.jne());
1239 m_assembler.extraInstrForBranch(scr);
1240 releaseScratch(scr);
1246 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 6, sizeof(uint32_t));
1247 Jump m_jump = Jump(m_assembler.je());
1248 m_assembler.extraInstrForBranch(scratchReg3);
1254 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 6, sizeof(uint32_t));
1255 Jump m_jump = Jump(m_assembler.jne());
1256 m_assembler.extraInstrForBranch(scratchReg3);
1260 Jump branch32(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
1262 RegisterID scr = claimScratch();
1263 move(left.index, scr);
1264 lshift32(TrustedImm32(left.scale), scr);
1265 add32(left.base, scr);
1266 load32(scr, left.offset, scr);
1267 compare32(right.m_value, scr, cond);
1268 releaseScratch(scr);
1270 if (cond == NotEqual)
1271 return branchFalse();
1272 return branchTrue();
1275 void sqrtDouble(FPRegisterID src, FPRegisterID dest)
1278 m_assembler.dmovRegReg(src, dest);
1279 m_assembler.dsqrt(dest);
1282 void absDouble(FPRegisterID, FPRegisterID)
1284 ASSERT_NOT_REACHED();
1287 Jump branchTest8(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
1289 RegisterID addressTempRegister = claimScratch();
1290 load8(address, addressTempRegister);
1291 Jump jmp = branchTest32(cond, addressTempRegister, mask);
1292 releaseScratch(addressTempRegister);
1296 Jump branchTest8(ResultCondition cond, AbsoluteAddress address, TrustedImm32 mask = TrustedImm32(-1))
1298 RegisterID addressTempRegister = claimScratch();
1299 move(TrustedImmPtr(address.m_ptr), addressTempRegister);
1300 load8(Address(addressTempRegister), addressTempRegister);
1301 Jump jmp = branchTest32(cond, addressTempRegister, mask);
1302 releaseScratch(addressTempRegister);
1306 void signExtend32ToPtr(RegisterID src, RegisterID dest)
1312 Jump branch8(RelationalCondition cond, Address left, TrustedImm32 right)
1314 RegisterID addressTempRegister = claimScratch();
1315 load8(left, addressTempRegister);
1316 Jump jmp = branch32(cond, addressTempRegister, right);
1317 releaseScratch(addressTempRegister);
1321 void compare8(RelationalCondition cond, Address left, TrustedImm32 right, RegisterID dest)
1323 RegisterID addressTempRegister = claimScratch();
1324 load8(left, addressTempRegister);
1325 compare32(cond, addressTempRegister, right, dest);
1326 releaseScratch(addressTempRegister);
1329 Jump branchTruncateDoubleToInt32(FPRegisterID src, RegisterID dest)
1331 m_assembler.ftrcdrmfpul(src);
1332 m_assembler.stsfpulReg(dest);
1333 m_assembler.loadConstant(0x7fffffff, scratchReg3);
1334 m_assembler.cmplRegReg(dest, scratchReg3, SH4Condition(Equal));
1335 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 14, sizeof(uint32_t));
1336 m_assembler.branch(BT_OPCODE, 2);
1337 m_assembler.addlImm8r(1, scratchReg3);
1338 m_assembler.cmplRegReg(dest, scratchReg3, SH4Condition(Equal));
1339 return branchTrue();
1342 // Stack manipulation operations
1344 void pop(RegisterID dest)
1346 m_assembler.popReg(dest);
1349 void push(RegisterID src)
1351 m_assembler.pushReg(src);
1354 void push(Address address)
1356 if (!address.offset) {
1361 if ((address.offset < 0) || (address.offset >= 64)) {
1362 RegisterID scr = claimScratch();
1363 m_assembler.loadConstant(address.offset, scr);
1364 m_assembler.addlRegReg(address.base, scr);
1365 m_assembler.movlMemReg(scr, SH4Registers::sp);
1366 m_assembler.addlImm8r(-4, SH4Registers::sp);
1367 releaseScratch(scr);
1371 m_assembler.movlMemReg(address.offset >> 2, address.base, SH4Registers::sp);
1372 m_assembler.addlImm8r(-4, SH4Registers::sp);
1375 void push(TrustedImm32 imm)
1377 RegisterID scr = claimScratch();
1378 m_assembler.loadConstant(imm.m_value, scr);
1380 releaseScratch(scr);
1383 // Register move operations
1385 void move(TrustedImm32 imm, RegisterID dest)
1387 m_assembler.loadConstant(imm.m_value, dest);
1390 DataLabelPtr moveWithPatch(TrustedImmPtr initialValue, RegisterID dest)
1392 m_assembler.ensureSpace(m_assembler.maxInstructionSize, sizeof(uint32_t));
1393 DataLabelPtr dataLabel(this);
1394 m_assembler.loadConstantUnReusable(reinterpret_cast<uint32_t>(initialValue.m_value), dest);
1398 void move(RegisterID src, RegisterID dest)
1401 m_assembler.movlRegReg(src, dest);
1404 void move(TrustedImmPtr imm, RegisterID dest)
1406 m_assembler.loadConstant(imm.asIntptr(), dest);
1409 void extuw(RegisterID src, RegisterID dst)
1411 m_assembler.extuw(src, dst);
1414 void compare32(RelationalCondition cond, RegisterID left, RegisterID right, RegisterID dest)
1416 m_assembler.cmplRegReg(right, left, SH4Condition(cond));
1417 if (cond != NotEqual) {
1418 m_assembler.movt(dest);
1422 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 4);
1423 m_assembler.movImm8(0, dest);
1424 m_assembler.branch(BT_OPCODE, 0);
1425 m_assembler.movImm8(1, dest);
1428 void compare32(RelationalCondition cond, RegisterID left, TrustedImm32 right, RegisterID dest)
1432 compare32(cond, left, dest, dest);
1436 RegisterID scr = claimScratch();
1438 compare32(cond, left, scr, dest);
1439 releaseScratch(scr);
1442 void test8(ResultCondition cond, Address address, TrustedImm32 mask, RegisterID dest)
1444 ASSERT((cond == Zero) || (cond == NonZero));
1446 load8(address, dest);
1447 if (mask.m_value == -1)
1448 compare32(0, dest, static_cast<RelationalCondition>(cond));
1450 testlImm(mask.m_value, dest);
1451 if (cond != NonZero) {
1452 m_assembler.movt(dest);
1456 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 4);
1457 m_assembler.movImm8(0, dest);
1458 m_assembler.branch(BT_OPCODE, 0);
1459 m_assembler.movImm8(1, dest);
1462 void loadPtrLinkReg(ImplicitAddress address)
1464 RegisterID scr = claimScratch();
1465 load32(address, scr);
1466 m_assembler.ldspr(scr);
1467 releaseScratch(scr);
1470 Jump branch32(RelationalCondition cond, RegisterID left, RegisterID right)
1472 m_assembler.cmplRegReg(right, left, SH4Condition(cond));
1473 /* BT label => BF off
1478 if (cond == NotEqual)
1479 return branchFalse();
1480 return branchTrue();
1483 Jump branch32(RelationalCondition cond, RegisterID left, TrustedImm32 right)
1485 if (((cond == Equal) || (cond == NotEqual)) && !right.m_value)
1486 m_assembler.testlRegReg(left, left);
1488 compare32(right.m_value, left, cond);
1490 if (cond == NotEqual)
1491 return branchFalse();
1492 return branchTrue();
1495 Jump branch32(RelationalCondition cond, RegisterID left, Address right)
1497 compare32(right.offset, right.base, left, cond);
1498 if (cond == NotEqual)
1499 return branchFalse();
1500 return branchTrue();
1503 Jump branch32(RelationalCondition cond, Address left, RegisterID right)
1505 compare32(right, left.offset, left.base, cond);
1506 if (cond == NotEqual)
1507 return branchFalse();
1508 return branchTrue();
1511 Jump branch32(RelationalCondition cond, Address left, TrustedImm32 right)
1513 compare32(right.m_value, left.offset, left.base, cond);
1514 if (cond == NotEqual)
1515 return branchFalse();
1516 return branchTrue();
1519 Jump branch32(RelationalCondition cond, AbsoluteAddress left, RegisterID right)
1521 RegisterID scr = claimScratch();
1523 move(TrustedImm32(reinterpret_cast<uint32_t>(left.m_ptr)), scr);
1524 m_assembler.cmplRegReg(right, scr, SH4Condition(cond));
1525 releaseScratch(scr);
1527 if (cond == NotEqual)
1528 return branchFalse();
1529 return branchTrue();
1532 Jump branch32(RelationalCondition cond, AbsoluteAddress left, TrustedImm32 right)
1534 RegisterID addressTempRegister = claimScratch();
1536 m_assembler.loadConstant(reinterpret_cast<uint32_t>(left.m_ptr), addressTempRegister);
1537 m_assembler.movlMemReg(addressTempRegister, addressTempRegister);
1538 compare32(right.m_value, addressTempRegister, cond);
1539 releaseScratch(addressTempRegister);
1541 if (cond == NotEqual)
1542 return branchFalse();
1543 return branchTrue();
1546 Jump branch8(RelationalCondition cond, BaseIndex left, TrustedImm32 right)
1548 ASSERT(!(right.m_value & 0xFFFFFF00));
1549 RegisterID scr = claimScratch();
1551 move(left.index, scr);
1552 lshift32(TrustedImm32(left.scale), scr);
1555 add32(TrustedImm32(left.offset), scr);
1556 add32(left.base, scr);
1558 RegisterID scr1 = claimScratch();
1559 m_assembler.loadConstant(right.m_value, scr1);
1560 releaseScratch(scr);
1561 releaseScratch(scr1);
1563 return branch32(cond, scr, scr1);
1566 Jump branchTest32(ResultCondition cond, RegisterID reg, RegisterID mask)
1568 ASSERT((cond == Zero) || (cond == NonZero));
1570 m_assembler.testlRegReg(reg, mask);
1572 if (cond == NotEqual)
1573 return branchFalse();
1574 return branchTrue();
1577 Jump branchTest32(ResultCondition cond, RegisterID reg, TrustedImm32 mask = TrustedImm32(-1))
1579 ASSERT((cond == Zero) || (cond == NonZero));
1581 if (mask.m_value == -1)
1582 m_assembler.testlRegReg(reg, reg);
1584 testlImm(mask.m_value, reg);
1586 if (cond == NotEqual)
1587 return branchFalse();
1588 return branchTrue();
1591 Jump branchTest32(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
1593 ASSERT((cond == Zero) || (cond == NonZero));
1595 if (mask.m_value == -1)
1596 compare32(0, address.offset, address.base, static_cast<RelationalCondition>(cond));
1598 testImm(mask.m_value, address.offset, address.base);
1600 if (cond == NotEqual)
1601 return branchFalse();
1602 return branchTrue();
1605 Jump branchTest32(ResultCondition cond, BaseIndex address, TrustedImm32 mask = TrustedImm32(-1))
1607 RegisterID scr = claimScratch();
1609 move(address.index, scr);
1610 lshift32(TrustedImm32(address.scale), scr);
1611 add32(address.base, scr);
1612 load32(scr, address.offset, scr);
1614 if (mask.m_value == -1)
1615 m_assembler.testlRegReg(scr, scr);
1617 testlImm(mask.m_value, scr);
1619 releaseScratch(scr);
1621 if (cond == NotEqual)
1622 return branchFalse();
1623 return branchTrue();
1628 return Jump(m_assembler.jmp());
1631 void jump(RegisterID target)
1633 m_assembler.jmpReg(target);
1636 void jump(Address address)
1638 RegisterID scr = claimScratch();
1640 if ((address.offset < 0) || (address.offset >= 64)) {
1641 m_assembler.loadConstant(address.offset, scr);
1642 m_assembler.addlRegReg(address.base, scr);
1643 m_assembler.movlMemReg(scr, scr);
1644 } else if (address.offset)
1645 m_assembler.movlMemReg(address.offset >> 2, address.base, scr);
1647 m_assembler.movlMemReg(address.base, scr);
1648 m_assembler.jmpReg(scr);
1650 releaseScratch(scr);
1653 // Arithmetic control flow operations
1655 Jump branchAdd32(ResultCondition cond, RegisterID src, RegisterID dest)
1657 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
1659 if (cond == Overflow) {
1660 m_assembler.addvlRegReg(src, dest);
1661 return branchTrue();
1664 if (cond == Signed) {
1665 m_assembler.addlRegReg(src, dest);
1666 // Check if dest is negative
1667 m_assembler.cmppz(dest);
1668 return branchFalse();
1671 m_assembler.addlRegReg(src, dest);
1672 compare32(0, dest, Equal);
1674 if (cond == NotEqual)
1675 return branchFalse();
1676 return branchTrue();
1679 Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
1681 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
1683 move(imm, scratchReg3);
1684 return branchAdd32(cond, scratchReg3, dest);
1687 Jump branchAdd32(ResultCondition cond, RegisterID src, TrustedImm32 imm, RegisterID dest)
1689 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
1694 if (cond == Overflow) {
1695 move(imm, scratchReg3);
1696 m_assembler.addvlRegReg(scratchReg3, dest);
1697 return branchTrue();
1702 if (cond == Signed) {
1703 m_assembler.cmppz(dest);
1704 return branchFalse();
1707 compare32(0, dest, Equal);
1709 if (cond == NotEqual)
1710 return branchFalse();
1711 return branchTrue();
1714 Jump branchMul32(ResultCondition cond, RegisterID src, RegisterID dest)
1716 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
1718 if (cond == Overflow) {
1719 RegisterID scr1 = claimScratch();
1720 RegisterID scr = claimScratch();
1721 m_assembler.dmullRegReg(src, dest);
1722 m_assembler.stsmacl(dest);
1723 m_assembler.movImm8(-31, scr);
1724 m_assembler.movlRegReg(dest, scr1);
1725 m_assembler.shaRegReg(scr1, scr);
1726 m_assembler.stsmach(scr);
1727 m_assembler.cmplRegReg(scr, scr1, SH4Condition(Equal));
1728 releaseScratch(scr1);
1729 releaseScratch(scr);
1730 return branchFalse();
1733 m_assembler.imullRegReg(src, dest);
1734 m_assembler.stsmacl(dest);
1735 if (cond == Signed) {
1736 // Check if dest is negative
1737 m_assembler.cmppz(dest);
1738 return branchFalse();
1741 compare32(0, dest, static_cast<RelationalCondition>(cond));
1743 if (cond == NotEqual)
1744 return branchFalse();
1745 return branchTrue();
1748 Jump branchMul32(ResultCondition cond, TrustedImm32 imm, RegisterID src, RegisterID dest)
1750 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
1752 move(imm, scratchReg3);
1756 return branchMul32(cond, scratchReg3, dest);
1759 Jump branchSub32(ResultCondition cond, RegisterID src, RegisterID dest)
1761 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
1763 if (cond == Overflow) {
1764 m_assembler.subvlRegReg(src, dest);
1765 return branchTrue();
1768 if (cond == Signed) {
1769 // Check if dest is negative
1770 m_assembler.sublRegReg(src, dest);
1771 compare32(0, dest, LessThan);
1772 return branchTrue();
1776 compare32(0, dest, static_cast<RelationalCondition>(cond));
1778 if (cond == NotEqual)
1779 return branchFalse();
1780 return branchTrue();
1783 Jump branchSub32(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
1785 ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
1787 move(imm, scratchReg3);
1788 return branchSub32(cond, scratchReg3, dest);
1791 Jump branchSub32(ResultCondition cond, RegisterID src, TrustedImm32 imm, RegisterID dest)
1793 move(imm, scratchReg3);
1796 return branchSub32(cond, scratchReg3, dest);
1799 Jump branchSub32(ResultCondition cond, RegisterID src1, RegisterID src2, RegisterID dest)
1803 return branchSub32(cond, src2, dest);
1806 Jump branchOr32(ResultCondition cond, RegisterID src, RegisterID dest)
1808 ASSERT((cond == Signed) || (cond == Zero) || (cond == NonZero));
1810 if (cond == Signed) {
1812 compare32(0, dest, static_cast<RelationalCondition>(LessThan));
1813 return branchTrue();
1817 compare32(0, dest, static_cast<RelationalCondition>(cond));
1819 if (cond == NotEqual)
1820 return branchFalse();
1821 return branchTrue();
1824 void branchConvertDoubleToInt32(FPRegisterID src, RegisterID dest, JumpList& failureCases, FPRegisterID fpTemp)
1826 m_assembler.ftrcdrmfpul(src);
1827 m_assembler.stsfpulReg(dest);
1828 convertInt32ToDouble(dest, fscratch);
1829 failureCases.append(branchDouble(DoubleNotEqualOrUnordered, fscratch, src));
1831 if (dest == SH4Registers::r0)
1832 m_assembler.cmpEqImmR0(0, dest);
1834 m_assembler.movImm8(0, scratchReg3);
1835 m_assembler.cmplRegReg(scratchReg3, dest, SH4Condition(Equal));
1837 failureCases.append(branchTrue());
1840 void neg32(RegisterID dst)
1842 m_assembler.neg(dst, dst);
1845 void urshift32(RegisterID shiftamount, RegisterID dest)
1847 if (shiftamount == SH4Registers::r0)
1848 m_assembler.andlImm8r(0x1f, shiftamount);
1850 RegisterID scr = claimScratch();
1851 m_assembler.loadConstant(0x1f, scr);
1852 m_assembler.andlRegReg(scr, shiftamount);
1853 releaseScratch(scr);
1855 m_assembler.neg(shiftamount, shiftamount);
1856 m_assembler.shllRegReg(dest, shiftamount);
1859 void urshift32(TrustedImm32 imm, RegisterID dest)
1861 RegisterID scr = claimScratch();
1862 m_assembler.loadConstant(-(imm.m_value & 0x1f), scr);
1863 m_assembler.shaRegReg(dest, scr);
1864 releaseScratch(scr);
1867 void urshift32(RegisterID src, TrustedImm32 shiftamount, RegisterID dest)
1872 urshift32(shiftamount, dest);
1877 return Call(m_assembler.call(), Call::Linkable);
1882 return Call(m_assembler.call(), Call::LinkableNear);
1885 Call call(RegisterID target)
1887 return Call(m_assembler.call(target), Call::None);
1890 void call(Address address, RegisterID target)
1892 load32(address.base, address.offset, target);
1893 m_assembler.ensureSpace(m_assembler.maxInstructionSize + 2);
1894 m_assembler.branch(JSR_OPCODE, target);
1904 Jump branchPtrWithPatch(RelationalCondition cond, RegisterID left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
1906 RegisterID dataTempRegister = claimScratch();
1908 dataLabel = moveWithPatch(initialRightValue, dataTempRegister);
1909 m_assembler.cmplRegReg(dataTempRegister, left, SH4Condition(cond));
1910 releaseScratch(dataTempRegister);
1912 if (cond == NotEqual)
1913 return branchFalse();
1914 return branchTrue();
1917 Jump branchPtrWithPatch(RelationalCondition cond, Address left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
1919 RegisterID scr = claimScratch();
1921 m_assembler.loadConstant(left.offset, scr);
1922 m_assembler.addlRegReg(left.base, scr);
1923 m_assembler.movlMemReg(scr, scr);
1924 RegisterID scr1 = claimScratch();
1925 dataLabel = moveWithPatch(initialRightValue, scr1);
1926 m_assembler.cmplRegReg(scr1, scr, SH4Condition(cond));
1927 releaseScratch(scr);
1928 releaseScratch(scr1);
1930 if (cond == NotEqual)
1931 return branchFalse();
1932 return branchTrue();
1941 DataLabelPtr storePtrWithPatch(TrustedImmPtr initialValue, ImplicitAddress address)
1943 RegisterID scr = claimScratch();
1944 DataLabelPtr label = moveWithPatch(initialValue, scr);
1945 store32(scr, address);
1946 releaseScratch(scr);
1950 DataLabelPtr storePtrWithPatch(ImplicitAddress address) { return storePtrWithPatch(TrustedImmPtr(0), address); }
1952 int sizeOfConstantPool()
1954 return m_assembler.sizeOfConstantPool();
1957 Call tailRecursiveCall()
1959 RegisterID scr = claimScratch();
1961 m_assembler.loadConstantUnReusable(0x0, scr, true);
1962 Jump m_jump = Jump(m_assembler.jmp(scr));
1963 releaseScratch(scr);
1965 return Call::fromTailJump(m_jump);
1968 Call makeTailRecursiveCall(Jump oldJump)
1971 return tailRecursiveCall();
1979 static FunctionPtr readCallTarget(CodeLocationCall call)
1981 return FunctionPtr(reinterpret_cast<void(*)()>(SH4Assembler::readCallTarget(call.dataLocation())));
1984 static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
1986 ASSERT_NOT_REACHED();
1989 static ptrdiff_t maxJumpReplacementSize()
1991 ASSERT_NOT_REACHED();
1996 SH4Assembler::Condition SH4Condition(RelationalCondition cond)
1998 return static_cast<SH4Assembler::Condition>(cond);
2001 SH4Assembler::Condition SH4Condition(ResultCondition cond)
2003 return static_cast<SH4Assembler::Condition>(cond);
2006 friend class LinkBuffer;
2007 friend class RepatchBuffer;
2009 static void linkCall(void*, Call, FunctionPtr);
2010 static void repatchCall(CodeLocationCall, CodeLocationLabel);
2011 static void repatchCall(CodeLocationCall, FunctionPtr);
2016 #endif // ENABLE(ASSEMBLER)
2018 #endif // MacroAssemblerSH4_h