1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2013 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (IP_REGNUM 12) ; Scratch register
34 (SP_REGNUM 13) ; Stack pointer
35 (LR_REGNUM 14) ; Return address register
36 (PC_REGNUM 15) ; Program counter
37 (LAST_ARM_REGNUM 15) ;
38 (CC_REGNUM 100) ; Condition code pseudo register
39 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
42 ;; 3rd operand to select_dominance_cc_mode
49 ;; conditional compare combination
60 ;;---------------------------------------------------------------------------
63 ;; Processor type. This is created automatically from arm-cores.def.
64 (include "arm-tune.md")
66 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
67 ; generating ARM code. This is used to control the length of some insn
68 ; patterns that share the same RTL in both ARM and Thumb code.
69 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
71 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
72 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
74 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
75 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
77 ;; Operand number of an input operand that is shifted. Zero if the
78 ;; given instruction does not shift one of its input operands.
79 (define_attr "shift" "" (const_int 0))
81 ; Floating Point Unit. If we only have floating point emulation, then there
82 ; is no point in scheduling the floating point insns. (Well, for best
83 ; performance we should try and group them together).
84 (define_attr "fpu" "none,vfp"
85 (const (symbol_ref "arm_fpu_attr")))
87 ; LENGTH of an instruction (in bytes)
88 (define_attr "length" ""
91 ; The architecture which supports the instruction (or alternative).
92 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
93 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
94 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
95 ; arm_arch6. This attribute is used to compute attribute "enabled",
96 ; use type "any" to enable an alternative in all cases.
97 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,neon_onlya8,nota8,neon_nota8,iwmmxt,iwmmxt2"
100 (define_attr "arch_enabled" "no,yes"
101 (cond [(eq_attr "arch" "any")
104 (and (eq_attr "arch" "a")
105 (match_test "TARGET_ARM"))
108 (and (eq_attr "arch" "t")
109 (match_test "TARGET_THUMB"))
112 (and (eq_attr "arch" "t1")
113 (match_test "TARGET_THUMB1"))
116 (and (eq_attr "arch" "t2")
117 (match_test "TARGET_THUMB2"))
120 (and (eq_attr "arch" "32")
121 (match_test "TARGET_32BIT"))
124 (and (eq_attr "arch" "v6")
125 (match_test "TARGET_32BIT && arm_arch6"))
128 (and (eq_attr "arch" "nov6")
129 (match_test "TARGET_32BIT && !arm_arch6"))
132 (and (eq_attr "arch" "onlya8")
133 (eq_attr "tune" "cortexa8"))
136 (and (eq_attr "arch" "neon_onlya8")
137 (eq_attr "tune" "cortexa8")
138 (match_test "TARGET_NEON"))
141 (and (eq_attr "arch" "nota8")
142 (not (eq_attr "tune" "cortexa8")))
145 (and (eq_attr "arch" "neon_nota8")
146 (not (eq_attr "tune" "cortexa8"))
147 (match_test "TARGET_NEON"))
150 (and (eq_attr "arch" "iwmmxt2")
151 (match_test "TARGET_REALLY_IWMMXT2"))
152 (const_string "yes")]
154 (const_string "no")))
156 (define_attr "opt" "any,speed,size"
157 (const_string "any"))
159 (define_attr "opt_enabled" "no,yes"
160 (cond [(eq_attr "opt" "any")
163 (and (eq_attr "opt" "speed")
164 (match_test "optimize_function_for_speed_p (cfun)"))
167 (and (eq_attr "opt" "size")
168 (match_test "optimize_function_for_size_p (cfun)"))
169 (const_string "yes")]
170 (const_string "no")))
172 ; Allows an insn to disable certain alternatives for reasons other than
174 (define_attr "insn_enabled" "no,yes"
175 (const_string "yes"))
177 ; Enable all alternatives that are both arch_enabled and insn_enabled.
178 (define_attr "enabled" "no,yes"
179 (cond [(eq_attr "insn_enabled" "no")
182 (eq_attr "arch_enabled" "no")
185 (eq_attr "opt_enabled" "no")
187 (const_string "yes")))
189 ; POOL_RANGE is how far away from a constant pool entry that this insn
190 ; can be placed. If the distance is zero, then this insn will never
191 ; reference the pool.
192 ; Note that for Thumb constant pools the PC value is rounded down to the
193 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
194 ; Thumb insns) should be set to <max_range> - 2.
195 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
196 ; before its address. It is set to <max_range> - (8 + <data_size>).
197 (define_attr "arm_pool_range" "" (const_int 0))
198 (define_attr "thumb2_pool_range" "" (const_int 0))
199 (define_attr "arm_neg_pool_range" "" (const_int 0))
200 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
202 (define_attr "pool_range" ""
203 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
204 (attr "arm_pool_range")))
205 (define_attr "neg_pool_range" ""
206 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
207 (attr "arm_neg_pool_range")))
209 ; An assembler sequence may clobber the condition codes without us knowing.
210 ; If such an insn references the pool, then we have no way of knowing how,
211 ; so use the most conservative value for pool_range.
212 (define_asm_attributes
213 [(set_attr "conds" "clob")
214 (set_attr "length" "4")
215 (set_attr "pool_range" "250")])
217 ;; The instruction used to implement a particular pattern. This
218 ;; information is used by pipeline descriptions to provide accurate
219 ;; scheduling information.
222 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,sat,other"
223 (const_string "other"))
225 ; TYPE attribute is used to detect floating point instructions which, if
226 ; running on a co-processor can run in parallel with other, basic instructions
227 ; If write-buffer scheduling is enabled then it can also be used in the
228 ; scheduling of writes.
230 ; Classification of each insn
231 ; Note: vfp.md has different meanings for some of these, and some further
232 ; types as well. See that file for details.
233 ; simple_alu_imm a simple alu instruction that doesn't hit memory or fp
234 ; regs or have a shifted source operand and has an immediate
235 ; operand. This currently only tracks very basic immediate
237 ; alu_reg any alu instruction that doesn't hit memory or fp
238 ; regs or have a shifted source operand
239 ; and does not have an immediate operand. This is
241 ; simple_alu_shift covers UXTH, UXTB, SXTH, SXTB
242 ; alu_shift any data instruction that doesn't hit memory or fp
243 ; regs, but has a source operand shifted by a constant
244 ; alu_shift_reg any data instruction that doesn't hit memory or fp
245 ; regs, but has a source operand shifted by a register value
246 ; mult a multiply instruction
247 ; block blockage insn, this blocks all functional units
248 ; float a floating point arithmetic operation (subject to expansion)
249 ; fdivd DFmode floating point division
250 ; fdivs SFmode floating point division
251 ; f_load[sd] A single/double load from memory. Used for VFP unit.
252 ; f_store[sd] A single/double store to memory. Used for VFP unit.
253 ; f_flag a transfer of co-processor flags to the CPSR
254 ; f_2_r transfer float to core (no memory needed)
255 ; r_2_f transfer core to float
256 ; f_cvt convert floating<->integral
258 ; call a subroutine call
259 ; load_byte load byte(s) from memory to arm registers
260 ; load1 load 1 word from memory to arm registers
261 ; load2 load 2 words from memory to arm registers
262 ; load3 load 3 words from memory to arm registers
263 ; load4 load 4 words from memory to arm registers
264 ; store store 1 word to memory from arm registers
265 ; store2 store 2 words
266 ; store3 store 3 words
267 ; store4 store 4 (or more) words
320 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,\
321 umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
322 (const_string "mult")
323 (const_string "alu_reg")))
325 ; Is this an (integer side) multiply with a 64-bit result?
326 (define_attr "mul64" "no,yes"
329 "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
331 (const_string "no")))
333 ; wtype for WMMX insn scheduling purposes.
335 "none,wor,wxor,wand,wandn,wmov,tmcrr,tmrrc,wldr,wstr,tmcr,tmrc,wadd,wsub,wmul,wmac,wavg2,tinsr,textrm,wshufh,wcmpeq,wcmpgt,wmax,wmin,wpack,wunpckih,wunpckil,wunpckeh,wunpckel,wror,wsra,wsrl,wsll,wmadd,tmia,tmiaph,tmiaxy,tbcst,tmovmsk,wacc,waligni,walignr,tandc,textrc,torc,torvsc,wsad,wabs,wabsdiff,waddsubhx,wsubaddhx,wavg4,wmulw,wqmulm,wqmulwm,waddbhus,wqmiaxy,wmiaxy,wmiawxy,wmerge" (const_string "none"))
337 ; Load scheduling, set from the arm_ld_sched variable
338 ; initialized by arm_option_override()
339 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
341 ;; Classification of NEON instructions for scheduling purposes.
342 (define_attr "neon_type"
353 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
354 neon_mul_qqq_8_16_32_ddd_32,\
355 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
356 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
358 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
359 neon_mla_qqq_32_qqd_32_scalar,\
360 neon_mul_ddd_16_scalar_32_16_long_scalar,\
361 neon_mul_qqd_32_scalar,\
362 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
367 neon_vqshl_vrshl_vqrshl_qqq,\
369 neon_fp_vadd_ddd_vabs_dd,\
370 neon_fp_vadd_qqq_vabs_qq,\
376 neon_fp_vmla_ddd_scalar,\
377 neon_fp_vmla_qqq_scalar,\
378 neon_fp_vrecps_vrsqrts_ddd,\
379 neon_fp_vrecps_vrsqrts_qqq,\
387 neon_vld2_2_regs_vld1_vld2_all_lanes,\
390 neon_vst1_1_2_regs_vst2_2_regs,\
392 neon_vst2_4_regs_vst3_vst4,\
394 neon_vld1_vld2_lane,\
395 neon_vld3_vld4_lane,\
396 neon_vst1_vst2_lane,\
397 neon_vst3_vst4_lane,\
398 neon_vld3_vld4_all_lanes,\
406 (const_string "none"))
408 ; condition codes: this one is used by final_prescan_insn to speed up
409 ; conditionalizing instructions. It saves having to scan the rtl to see if
410 ; it uses or alters the condition codes.
412 ; USE means that the condition codes are used by the insn in the process of
413 ; outputting code, this means (at present) that we can't use the insn in
416 ; SET means that the purpose of the insn is to set the condition codes in a
417 ; well defined manner.
419 ; CLOB means that the condition codes are altered in an undefined manner, if
420 ; they are altered at all
422 ; UNCONDITIONAL means the instruction can not be conditionally executed and
423 ; that the instruction does not use or alter the condition codes.
425 ; NOCOND means that the instruction does not use or alter the condition
426 ; codes but can be converted into a conditionally exectuted instruction.
428 (define_attr "conds" "use,set,clob,unconditional,nocond"
430 (ior (eq_attr "is_thumb1" "yes")
431 (eq_attr "type" "call"))
432 (const_string "clob")
433 (if_then_else (eq_attr "neon_type" "none")
434 (const_string "nocond")
435 (const_string "unconditional"))))
437 ; Predicable means that the insn can be conditionally executed based on
438 ; an automatically added predicate (additional patterns are generated by
439 ; gen...). We default to 'no' because no Thumb patterns match this rule
440 ; and not all ARM patterns do.
441 (define_attr "predicable" "no,yes" (const_string "no"))
443 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
444 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
445 ; suffer blockages enough to warrant modelling this (and it can adversely
446 ; affect the schedule).
447 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
449 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
450 ; to stall the processor. Used with model_wbuf above.
451 (define_attr "write_conflict" "no,yes"
452 (if_then_else (eq_attr "type"
455 (const_string "no")))
457 ; Classify the insns into those that take one cycle and those that take more
458 ; than one on the main cpu execution unit.
459 (define_attr "core_cycles" "single,multi"
460 (if_then_else (eq_attr "type"
461 "simple_alu_imm,alu_reg,\
462 simple_alu_shift,alu_shift,\
464 (const_string "single")
465 (const_string "multi")))
467 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
468 ;; distant label. Only applicable to Thumb code.
469 (define_attr "far_jump" "yes,no" (const_string "no"))
472 ;; The number of machine instructions this pattern expands to.
473 ;; Used for Thumb-2 conditional execution.
474 (define_attr "ce_count" "" (const_int 1))
476 ;;---------------------------------------------------------------------------
479 (include "unspecs.md")
481 ;;---------------------------------------------------------------------------
484 (include "iterators.md")
486 ;;---------------------------------------------------------------------------
489 (include "predicates.md")
490 (include "constraints.md")
492 ;;---------------------------------------------------------------------------
493 ;; Pipeline descriptions
495 (define_attr "tune_cortexr4" "yes,no"
497 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
499 (const_string "no"))))
501 ;; True if the generic scheduling description should be used.
503 (define_attr "generic_sched" "yes,no"
505 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexm4,marvell_pj4")
506 (eq_attr "tune_cortexr4" "yes"))
508 (const_string "yes"))))
510 (define_attr "generic_vfp" "yes,no"
512 (and (eq_attr "fpu" "vfp")
513 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexm4,marvell_pj4")
514 (eq_attr "tune_cortexr4" "no"))
516 (const_string "no"))))
518 (include "marvell-f-iwmmxt.md")
519 (include "arm-generic.md")
520 (include "arm926ejs.md")
521 (include "arm1020e.md")
522 (include "arm1026ejs.md")
523 (include "arm1136jfs.md")
525 (include "fa606te.md")
526 (include "fa626te.md")
527 (include "fmp626.md")
528 (include "fa726te.md")
529 (include "cortex-a5.md")
530 (include "cortex-a7.md")
531 (include "cortex-a8.md")
532 (include "cortex-a9.md")
533 (include "cortex-a15.md")
534 (include "cortex-r4.md")
535 (include "cortex-r4f.md")
536 (include "cortex-m4.md")
537 (include "cortex-m4-fpu.md")
539 (include "marvell-pj4.md")
542 ;;---------------------------------------------------------------------------
547 ;; Note: For DImode insns, there is normally no reason why operands should
548 ;; not be in the same register, what we don't want is for something being
549 ;; written to partially overlap something that is an input.
551 (define_expand "adddi3"
553 [(set (match_operand:DI 0 "s_register_operand" "")
554 (plus:DI (match_operand:DI 1 "s_register_operand" "")
555 (match_operand:DI 2 "arm_adddi_operand" "")))
556 (clobber (reg:CC CC_REGNUM))])]
561 if (!REG_P (operands[1]))
562 operands[1] = force_reg (DImode, operands[1]);
563 if (!REG_P (operands[2]))
564 operands[2] = force_reg (DImode, operands[2]);
569 (define_insn "*thumb1_adddi3"
570 [(set (match_operand:DI 0 "register_operand" "=l")
571 (plus:DI (match_operand:DI 1 "register_operand" "%0")
572 (match_operand:DI 2 "register_operand" "l")))
573 (clobber (reg:CC CC_REGNUM))
576 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
577 [(set_attr "length" "4")]
580 (define_insn_and_split "*arm_adddi3"
581 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
582 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
583 (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
584 (clobber (reg:CC CC_REGNUM))]
585 "TARGET_32BIT && !TARGET_NEON"
587 "TARGET_32BIT && reload_completed
588 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
589 [(parallel [(set (reg:CC_C CC_REGNUM)
590 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
592 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
593 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
594 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
597 operands[3] = gen_highpart (SImode, operands[0]);
598 operands[0] = gen_lowpart (SImode, operands[0]);
599 operands[4] = gen_highpart (SImode, operands[1]);
600 operands[1] = gen_lowpart (SImode, operands[1]);
601 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
602 operands[2] = gen_lowpart (SImode, operands[2]);
604 [(set_attr "conds" "clob")
605 (set_attr "length" "8")]
608 (define_insn_and_split "*adddi_sesidi_di"
609 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
610 (plus:DI (sign_extend:DI
611 (match_operand:SI 2 "s_register_operand" "r,r"))
612 (match_operand:DI 1 "s_register_operand" "0,r")))
613 (clobber (reg:CC CC_REGNUM))]
616 "TARGET_32BIT && reload_completed"
617 [(parallel [(set (reg:CC_C CC_REGNUM)
618 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
620 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
621 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
624 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
627 operands[3] = gen_highpart (SImode, operands[0]);
628 operands[0] = gen_lowpart (SImode, operands[0]);
629 operands[4] = gen_highpart (SImode, operands[1]);
630 operands[1] = gen_lowpart (SImode, operands[1]);
631 operands[2] = gen_lowpart (SImode, operands[2]);
633 [(set_attr "conds" "clob")
634 (set_attr "length" "8")]
637 (define_insn_and_split "*adddi_zesidi_di"
638 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
639 (plus:DI (zero_extend:DI
640 (match_operand:SI 2 "s_register_operand" "r,r"))
641 (match_operand:DI 1 "s_register_operand" "0,r")))
642 (clobber (reg:CC CC_REGNUM))]
645 "TARGET_32BIT && reload_completed"
646 [(parallel [(set (reg:CC_C CC_REGNUM)
647 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
649 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
650 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
651 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
654 operands[3] = gen_highpart (SImode, operands[0]);
655 operands[0] = gen_lowpart (SImode, operands[0]);
656 operands[4] = gen_highpart (SImode, operands[1]);
657 operands[1] = gen_lowpart (SImode, operands[1]);
658 operands[2] = gen_lowpart (SImode, operands[2]);
660 [(set_attr "conds" "clob")
661 (set_attr "length" "8")]
664 (define_expand "addsi3"
665 [(set (match_operand:SI 0 "s_register_operand" "")
666 (plus:SI (match_operand:SI 1 "s_register_operand" "")
667 (match_operand:SI 2 "reg_or_int_operand" "")))]
670 if (TARGET_32BIT && CONST_INT_P (operands[2]))
672 arm_split_constant (PLUS, SImode, NULL_RTX,
673 INTVAL (operands[2]), operands[0], operands[1],
674 optimize && can_create_pseudo_p ());
680 ; If there is a scratch available, this will be faster than synthesizing the
683 [(match_scratch:SI 3 "r")
684 (set (match_operand:SI 0 "arm_general_register_operand" "")
685 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
686 (match_operand:SI 2 "const_int_operand" "")))]
688 !(const_ok_for_arm (INTVAL (operands[2]))
689 || const_ok_for_arm (-INTVAL (operands[2])))
690 && const_ok_for_arm (~INTVAL (operands[2]))"
691 [(set (match_dup 3) (match_dup 2))
692 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
696 ;; The r/r/k alternative is required when reloading the address
697 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
698 ;; put the duplicated register first, and not try the commutative version.
699 (define_insn_and_split "*arm_addsi3"
700 [(set (match_operand:SI 0 "s_register_operand" "=rk, r,k, r,r, k, r, k,k,r, k, r")
701 (plus:SI (match_operand:SI 1 "s_register_operand" "%0, rk,k, r,rk,k, rk,k,r,rk,k, rk")
702 (match_operand:SI 2 "reg_or_int_operand" "rk, rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
714 subw%?\\t%0, %1, #%n2
715 subw%?\\t%0, %1, #%n2
718 && CONST_INT_P (operands[2])
719 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
720 && (reload_completed || !arm_eliminable_register (operands[1]))"
721 [(clobber (const_int 0))]
723 arm_split_constant (PLUS, SImode, curr_insn,
724 INTVAL (operands[2]), operands[0],
728 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,16")
729 (set_attr "predicable" "yes")
730 (set_attr "arch" "t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
731 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
732 (const_string "simple_alu_imm")
733 (const_string "alu_reg")))
737 (define_insn_and_split "*thumb1_addsi3"
738 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
739 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
740 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
743 static const char * const asms[] =
745 \"add\\t%0, %0, %2\",
746 \"sub\\t%0, %0, #%n2\",
747 \"add\\t%0, %1, %2\",
748 \"add\\t%0, %0, %2\",
749 \"add\\t%0, %0, %2\",
750 \"add\\t%0, %1, %2\",
751 \"add\\t%0, %1, %2\",
756 if ((which_alternative == 2 || which_alternative == 6)
757 && CONST_INT_P (operands[2])
758 && INTVAL (operands[2]) < 0)
759 return \"sub\\t%0, %1, #%n2\";
760 return asms[which_alternative];
762 "&& reload_completed && CONST_INT_P (operands[2])
763 && ((operands[1] != stack_pointer_rtx
764 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
765 || (operands[1] == stack_pointer_rtx
766 && INTVAL (operands[2]) > 1020))"
767 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
768 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
770 HOST_WIDE_INT offset = INTVAL (operands[2]);
771 if (operands[1] == stack_pointer_rtx)
777 else if (offset < -255)
780 operands[3] = GEN_INT (offset);
781 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
783 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
786 ;; Reloading and elimination of the frame pointer can
787 ;; sometimes cause this optimization to be missed.
789 [(set (match_operand:SI 0 "arm_general_register_operand" "")
790 (match_operand:SI 1 "const_int_operand" ""))
792 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
794 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
795 && (INTVAL (operands[1]) & 3) == 0"
796 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
800 (define_insn "addsi3_compare0"
801 [(set (reg:CC_NOOV CC_REGNUM)
803 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
804 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
807 (plus:SI (match_dup 1) (match_dup 2)))]
813 [(set_attr "conds" "set")
814 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")]
817 (define_insn "*addsi3_compare0_scratch"
818 [(set (reg:CC_NOOV CC_REGNUM)
820 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
821 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
828 [(set_attr "conds" "set")
829 (set_attr "predicable" "yes")
830 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")
834 (define_insn "*compare_negsi_si"
835 [(set (reg:CC_Z CC_REGNUM)
837 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
838 (match_operand:SI 1 "s_register_operand" "r")))]
841 [(set_attr "conds" "set")
842 (set_attr "predicable" "yes")]
845 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
846 ;; addend is a constant.
847 (define_insn "*cmpsi2_addneg"
848 [(set (reg:CC CC_REGNUM)
850 (match_operand:SI 1 "s_register_operand" "r,r")
851 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
852 (set (match_operand:SI 0 "s_register_operand" "=r,r")
853 (plus:SI (match_dup 1)
854 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
855 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
858 sub%.\\t%0, %1, #%n3"
859 [(set_attr "conds" "set")]
862 ;; Convert the sequence
864 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
868 ;; bcs dest ((unsigned)rn >= 1)
869 ;; similarly for the beq variant using bcc.
870 ;; This is a common looping idiom (while (n--))
872 [(set (match_operand:SI 0 "arm_general_register_operand" "")
873 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
875 (set (match_operand 2 "cc_register" "")
876 (compare (match_dup 0) (const_int -1)))
878 (if_then_else (match_operator 3 "equality_operator"
879 [(match_dup 2) (const_int 0)])
880 (match_operand 4 "" "")
881 (match_operand 5 "" "")))]
882 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
886 (match_dup 1) (const_int 1)))
887 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
889 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
892 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
893 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
896 operands[2], const0_rtx);"
899 ;; The next four insns work because they compare the result with one of
900 ;; the operands, and we know that the use of the condition code is
901 ;; either GEU or LTU, so we can use the carry flag from the addition
902 ;; instead of doing the compare a second time.
903 (define_insn "*addsi3_compare_op1"
904 [(set (reg:CC_C CC_REGNUM)
906 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
907 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
909 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
910 (plus:SI (match_dup 1) (match_dup 2)))]
916 [(set_attr "conds" "set")
917 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
920 (define_insn "*addsi3_compare_op2"
921 [(set (reg:CC_C CC_REGNUM)
923 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
924 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
926 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
927 (plus:SI (match_dup 1) (match_dup 2)))]
932 sub%.\\t%0, %1, #%n2"
933 [(set_attr "conds" "set")
934 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
937 (define_insn "*compare_addsi2_op0"
938 [(set (reg:CC_C CC_REGNUM)
940 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
941 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
948 [(set_attr "conds" "set")
949 (set_attr "predicable" "yes")
950 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
953 (define_insn "*compare_addsi2_op1"
954 [(set (reg:CC_C CC_REGNUM)
956 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
957 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
964 [(set_attr "conds" "set")
965 (set_attr "predicable" "yes")
966 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
969 (define_insn "*addsi3_carryin_<optab>"
970 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
971 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r")
972 (match_operand:SI 2 "arm_not_operand" "rI,K"))
973 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
977 sbc%?\\t%0, %1, #%B2"
978 [(set_attr "conds" "use")]
981 (define_insn "*addsi3_carryin_alt2_<optab>"
982 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
983 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
984 (match_operand:SI 1 "s_register_operand" "%r,r"))
985 (match_operand:SI 2 "arm_rhs_operand" "rI,K")))]
989 sbc%?\\t%0, %1, #%B2"
990 [(set_attr "conds" "use")]
993 (define_insn "*addsi3_carryin_shift_<optab>"
994 [(set (match_operand:SI 0 "s_register_operand" "=r")
996 (match_operator:SI 2 "shift_operator"
997 [(match_operand:SI 3 "s_register_operand" "r")
998 (match_operand:SI 4 "reg_or_int_operand" "rM")])
999 (match_operand:SI 1 "s_register_operand" "r"))
1000 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1002 "adc%?\\t%0, %1, %3%S2"
1003 [(set_attr "conds" "use")
1004 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1005 (const_string "alu_shift")
1006 (const_string "alu_shift_reg")))]
1009 (define_insn "*addsi3_carryin_clobercc_<optab>"
1010 [(set (match_operand:SI 0 "s_register_operand" "=r")
1011 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1012 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1013 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1014 (clobber (reg:CC CC_REGNUM))]
1016 "adc%.\\t%0, %1, %2"
1017 [(set_attr "conds" "set")]
1020 (define_expand "incscc"
1021 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1022 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1023 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1024 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1029 (define_insn "*arm_incscc"
1030 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1031 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1032 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1033 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1037 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1038 [(set_attr "conds" "use")
1039 (set_attr "length" "4,8")]
1042 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1044 [(set (match_operand:SI 0 "s_register_operand" "")
1045 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1046 (match_operand:SI 2 "s_register_operand" ""))
1048 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1050 [(set (match_dup 3) (match_dup 1))
1051 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1053 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1056 (define_expand "addsf3"
1057 [(set (match_operand:SF 0 "s_register_operand" "")
1058 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1059 (match_operand:SF 2 "s_register_operand" "")))]
1060 "TARGET_32BIT && TARGET_HARD_FLOAT"
1064 (define_expand "adddf3"
1065 [(set (match_operand:DF 0 "s_register_operand" "")
1066 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1067 (match_operand:DF 2 "s_register_operand" "")))]
1068 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1072 (define_expand "subdi3"
1074 [(set (match_operand:DI 0 "s_register_operand" "")
1075 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1076 (match_operand:DI 2 "s_register_operand" "")))
1077 (clobber (reg:CC CC_REGNUM))])]
1082 if (!REG_P (operands[1]))
1083 operands[1] = force_reg (DImode, operands[1]);
1084 if (!REG_P (operands[2]))
1085 operands[2] = force_reg (DImode, operands[2]);
1090 (define_insn "*arm_subdi3"
1091 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1092 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1093 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1094 (clobber (reg:CC CC_REGNUM))]
1095 "TARGET_32BIT && !TARGET_NEON"
1096 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1097 [(set_attr "conds" "clob")
1098 (set_attr "length" "8")]
1101 (define_insn "*thumb_subdi3"
1102 [(set (match_operand:DI 0 "register_operand" "=l")
1103 (minus:DI (match_operand:DI 1 "register_operand" "0")
1104 (match_operand:DI 2 "register_operand" "l")))
1105 (clobber (reg:CC CC_REGNUM))]
1107 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1108 [(set_attr "length" "4")]
1111 (define_insn "*subdi_di_zesidi"
1112 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1113 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1115 (match_operand:SI 2 "s_register_operand" "r,r"))))
1116 (clobber (reg:CC CC_REGNUM))]
1118 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1119 [(set_attr "conds" "clob")
1120 (set_attr "length" "8")]
1123 (define_insn "*subdi_di_sesidi"
1124 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1125 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1127 (match_operand:SI 2 "s_register_operand" "r,r"))))
1128 (clobber (reg:CC CC_REGNUM))]
1130 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1131 [(set_attr "conds" "clob")
1132 (set_attr "length" "8")]
1135 (define_insn "*subdi_zesidi_di"
1136 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1137 (minus:DI (zero_extend:DI
1138 (match_operand:SI 2 "s_register_operand" "r,r"))
1139 (match_operand:DI 1 "s_register_operand" "0,r")))
1140 (clobber (reg:CC CC_REGNUM))]
1142 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1143 [(set_attr "conds" "clob")
1144 (set_attr "length" "8")]
1147 (define_insn "*subdi_sesidi_di"
1148 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1149 (minus:DI (sign_extend:DI
1150 (match_operand:SI 2 "s_register_operand" "r,r"))
1151 (match_operand:DI 1 "s_register_operand" "0,r")))
1152 (clobber (reg:CC CC_REGNUM))]
1154 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1155 [(set_attr "conds" "clob")
1156 (set_attr "length" "8")]
1159 (define_insn "*subdi_zesidi_zesidi"
1160 [(set (match_operand:DI 0 "s_register_operand" "=r")
1161 (minus:DI (zero_extend:DI
1162 (match_operand:SI 1 "s_register_operand" "r"))
1164 (match_operand:SI 2 "s_register_operand" "r"))))
1165 (clobber (reg:CC CC_REGNUM))]
1167 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1168 [(set_attr "conds" "clob")
1169 (set_attr "length" "8")]
1172 (define_expand "subsi3"
1173 [(set (match_operand:SI 0 "s_register_operand" "")
1174 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1175 (match_operand:SI 2 "s_register_operand" "")))]
1178 if (CONST_INT_P (operands[1]))
1182 arm_split_constant (MINUS, SImode, NULL_RTX,
1183 INTVAL (operands[1]), operands[0],
1184 operands[2], optimize && can_create_pseudo_p ());
1187 else /* TARGET_THUMB1 */
1188 operands[1] = force_reg (SImode, operands[1]);
1193 (define_insn "thumb1_subsi3_insn"
1194 [(set (match_operand:SI 0 "register_operand" "=l")
1195 (minus:SI (match_operand:SI 1 "register_operand" "l")
1196 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1199 [(set_attr "length" "2")
1200 (set_attr "conds" "set")])
1202 ; ??? Check Thumb-2 split length
1203 (define_insn_and_split "*arm_subsi3_insn"
1204 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,rk,r")
1205 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,r,k,?n")
1206 (match_operand:SI 2 "reg_or_int_operand" "r,I,r,r, r")))]
1214 "&& (CONST_INT_P (operands[1])
1215 && !const_ok_for_arm (INTVAL (operands[1])))"
1216 [(clobber (const_int 0))]
1218 arm_split_constant (MINUS, SImode, curr_insn,
1219 INTVAL (operands[1]), operands[0], operands[2], 0);
1222 [(set_attr "length" "4,4,4,4,16")
1223 (set_attr "predicable" "yes")
1224 (set_attr "type" "*,simple_alu_imm,*,*,*")]
1228 [(match_scratch:SI 3 "r")
1229 (set (match_operand:SI 0 "arm_general_register_operand" "")
1230 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1231 (match_operand:SI 2 "arm_general_register_operand" "")))]
1233 && !const_ok_for_arm (INTVAL (operands[1]))
1234 && const_ok_for_arm (~INTVAL (operands[1]))"
1235 [(set (match_dup 3) (match_dup 1))
1236 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1240 (define_insn "*subsi3_compare0"
1241 [(set (reg:CC_NOOV CC_REGNUM)
1243 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1244 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1246 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1247 (minus:SI (match_dup 1) (match_dup 2)))]
1253 [(set_attr "conds" "set")
1254 (set_attr "type" "simple_alu_imm,*,*")]
1257 (define_insn "*subsi3_compare"
1258 [(set (reg:CC CC_REGNUM)
1259 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1260 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1261 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1262 (minus:SI (match_dup 1) (match_dup 2)))]
1268 [(set_attr "conds" "set")
1269 (set_attr "type" "simple_alu_imm,*,*")]
1272 (define_expand "decscc"
1273 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1274 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1275 (match_operator:SI 2 "arm_comparison_operator"
1276 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1281 (define_insn "*arm_decscc"
1282 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1283 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1284 (match_operator:SI 2 "arm_comparison_operator"
1285 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1289 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1290 [(set_attr "conds" "use")
1291 (set_attr "length" "*,8")
1292 (set_attr "type" "simple_alu_imm,*")]
1295 (define_expand "subsf3"
1296 [(set (match_operand:SF 0 "s_register_operand" "")
1297 (minus:SF (match_operand:SF 1 "s_register_operand" "")
1298 (match_operand:SF 2 "s_register_operand" "")))]
1299 "TARGET_32BIT && TARGET_HARD_FLOAT"
1303 (define_expand "subdf3"
1304 [(set (match_operand:DF 0 "s_register_operand" "")
1305 (minus:DF (match_operand:DF 1 "s_register_operand" "")
1306 (match_operand:DF 2 "s_register_operand" "")))]
1307 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1312 ;; Multiplication insns
1314 (define_expand "mulsi3"
1315 [(set (match_operand:SI 0 "s_register_operand" "")
1316 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1317 (match_operand:SI 1 "s_register_operand" "")))]
1322 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1323 (define_insn "*arm_mulsi3"
1324 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1325 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1326 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1327 "TARGET_32BIT && !arm_arch6"
1328 "mul%?\\t%0, %2, %1"
1329 [(set_attr "insn" "mul")
1330 (set_attr "predicable" "yes")]
1333 (define_insn "*arm_mulsi3_v6"
1334 [(set (match_operand:SI 0 "s_register_operand" "=r")
1335 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1336 (match_operand:SI 2 "s_register_operand" "r")))]
1337 "TARGET_32BIT && arm_arch6"
1338 "mul%?\\t%0, %1, %2"
1339 [(set_attr "insn" "mul")
1340 (set_attr "predicable" "yes")]
1343 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1344 ; 1 and 2; are the same, because reload will make operand 0 match
1345 ; operand 1 without realizing that this conflicts with operand 2. We fix
1346 ; this by adding another alternative to match this case, and then `reload'
1347 ; it ourselves. This alternative must come first.
1348 (define_insn "*thumb_mulsi3"
1349 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1350 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1351 (match_operand:SI 2 "register_operand" "l,l,l")))]
1352 "TARGET_THUMB1 && !arm_arch6"
1354 if (which_alternative < 2)
1355 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1357 return \"mul\\t%0, %2\";
1359 [(set_attr "length" "4,4,2")
1360 (set_attr "insn" "mul")]
1363 (define_insn "*thumb_mulsi3_v6"
1364 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1365 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1366 (match_operand:SI 2 "register_operand" "l,0,0")))]
1367 "TARGET_THUMB1 && arm_arch6"
1372 [(set_attr "length" "2")
1373 (set_attr "insn" "mul")]
1376 (define_insn "*mulsi3_compare0"
1377 [(set (reg:CC_NOOV CC_REGNUM)
1378 (compare:CC_NOOV (mult:SI
1379 (match_operand:SI 2 "s_register_operand" "r,r")
1380 (match_operand:SI 1 "s_register_operand" "%0,r"))
1382 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1383 (mult:SI (match_dup 2) (match_dup 1)))]
1384 "TARGET_ARM && !arm_arch6"
1385 "mul%.\\t%0, %2, %1"
1386 [(set_attr "conds" "set")
1387 (set_attr "insn" "muls")]
1390 (define_insn "*mulsi3_compare0_v6"
1391 [(set (reg:CC_NOOV CC_REGNUM)
1392 (compare:CC_NOOV (mult:SI
1393 (match_operand:SI 2 "s_register_operand" "r")
1394 (match_operand:SI 1 "s_register_operand" "r"))
1396 (set (match_operand:SI 0 "s_register_operand" "=r")
1397 (mult:SI (match_dup 2) (match_dup 1)))]
1398 "TARGET_ARM && arm_arch6 && optimize_size"
1399 "mul%.\\t%0, %2, %1"
1400 [(set_attr "conds" "set")
1401 (set_attr "insn" "muls")]
1404 (define_insn "*mulsi_compare0_scratch"
1405 [(set (reg:CC_NOOV CC_REGNUM)
1406 (compare:CC_NOOV (mult:SI
1407 (match_operand:SI 2 "s_register_operand" "r,r")
1408 (match_operand:SI 1 "s_register_operand" "%0,r"))
1410 (clobber (match_scratch:SI 0 "=&r,&r"))]
1411 "TARGET_ARM && !arm_arch6"
1412 "mul%.\\t%0, %2, %1"
1413 [(set_attr "conds" "set")
1414 (set_attr "insn" "muls")]
1417 (define_insn "*mulsi_compare0_scratch_v6"
1418 [(set (reg:CC_NOOV CC_REGNUM)
1419 (compare:CC_NOOV (mult:SI
1420 (match_operand:SI 2 "s_register_operand" "r")
1421 (match_operand:SI 1 "s_register_operand" "r"))
1423 (clobber (match_scratch:SI 0 "=r"))]
1424 "TARGET_ARM && arm_arch6 && optimize_size"
1425 "mul%.\\t%0, %2, %1"
1426 [(set_attr "conds" "set")
1427 (set_attr "insn" "muls")]
1430 ;; Unnamed templates to match MLA instruction.
1432 (define_insn "*mulsi3addsi"
1433 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1435 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1436 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1437 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1438 "TARGET_32BIT && !arm_arch6"
1439 "mla%?\\t%0, %2, %1, %3"
1440 [(set_attr "insn" "mla")
1441 (set_attr "predicable" "yes")]
1444 (define_insn "*mulsi3addsi_v6"
1445 [(set (match_operand:SI 0 "s_register_operand" "=r")
1447 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1448 (match_operand:SI 1 "s_register_operand" "r"))
1449 (match_operand:SI 3 "s_register_operand" "r")))]
1450 "TARGET_32BIT && arm_arch6"
1451 "mla%?\\t%0, %2, %1, %3"
1452 [(set_attr "insn" "mla")
1453 (set_attr "predicable" "yes")]
1456 (define_insn "*mulsi3addsi_compare0"
1457 [(set (reg:CC_NOOV CC_REGNUM)
1460 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1461 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1462 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1464 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1465 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1467 "TARGET_ARM && arm_arch6"
1468 "mla%.\\t%0, %2, %1, %3"
1469 [(set_attr "conds" "set")
1470 (set_attr "insn" "mlas")]
1473 (define_insn "*mulsi3addsi_compare0_v6"
1474 [(set (reg:CC_NOOV CC_REGNUM)
1477 (match_operand:SI 2 "s_register_operand" "r")
1478 (match_operand:SI 1 "s_register_operand" "r"))
1479 (match_operand:SI 3 "s_register_operand" "r"))
1481 (set (match_operand:SI 0 "s_register_operand" "=r")
1482 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1484 "TARGET_ARM && arm_arch6 && optimize_size"
1485 "mla%.\\t%0, %2, %1, %3"
1486 [(set_attr "conds" "set")
1487 (set_attr "insn" "mlas")]
1490 (define_insn "*mulsi3addsi_compare0_scratch"
1491 [(set (reg:CC_NOOV CC_REGNUM)
1494 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1495 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1496 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1498 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1499 "TARGET_ARM && !arm_arch6"
1500 "mla%.\\t%0, %2, %1, %3"
1501 [(set_attr "conds" "set")
1502 (set_attr "insn" "mlas")]
1505 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1506 [(set (reg:CC_NOOV CC_REGNUM)
1509 (match_operand:SI 2 "s_register_operand" "r")
1510 (match_operand:SI 1 "s_register_operand" "r"))
1511 (match_operand:SI 3 "s_register_operand" "r"))
1513 (clobber (match_scratch:SI 0 "=r"))]
1514 "TARGET_ARM && arm_arch6 && optimize_size"
1515 "mla%.\\t%0, %2, %1, %3"
1516 [(set_attr "conds" "set")
1517 (set_attr "insn" "mlas")]
1520 (define_insn "*mulsi3subsi"
1521 [(set (match_operand:SI 0 "s_register_operand" "=r")
1523 (match_operand:SI 3 "s_register_operand" "r")
1524 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1525 (match_operand:SI 1 "s_register_operand" "r"))))]
1526 "TARGET_32BIT && arm_arch_thumb2"
1527 "mls%?\\t%0, %2, %1, %3"
1528 [(set_attr "insn" "mla")
1529 (set_attr "predicable" "yes")]
1532 (define_expand "maddsidi4"
1533 [(set (match_operand:DI 0 "s_register_operand" "")
1536 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1537 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1538 (match_operand:DI 3 "s_register_operand" "")))]
1539 "TARGET_32BIT && arm_arch3m"
1542 (define_insn "*mulsidi3adddi"
1543 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1546 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1547 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1548 (match_operand:DI 1 "s_register_operand" "0")))]
1549 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1550 "smlal%?\\t%Q0, %R0, %3, %2"
1551 [(set_attr "insn" "smlal")
1552 (set_attr "predicable" "yes")]
1555 (define_insn "*mulsidi3adddi_v6"
1556 [(set (match_operand:DI 0 "s_register_operand" "=r")
1559 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1560 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1561 (match_operand:DI 1 "s_register_operand" "0")))]
1562 "TARGET_32BIT && arm_arch6"
1563 "smlal%?\\t%Q0, %R0, %3, %2"
1564 [(set_attr "insn" "smlal")
1565 (set_attr "predicable" "yes")]
1568 ;; 32x32->64 widening multiply.
1569 ;; As with mulsi3, the only difference between the v3-5 and v6+
1570 ;; versions of these patterns is the requirement that the output not
1571 ;; overlap the inputs, but that still means we have to have a named
1572 ;; expander and two different starred insns.
1574 (define_expand "mulsidi3"
1575 [(set (match_operand:DI 0 "s_register_operand" "")
1577 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1578 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1579 "TARGET_32BIT && arm_arch3m"
1583 (define_insn "*mulsidi3_nov6"
1584 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1586 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1587 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1588 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1589 "smull%?\\t%Q0, %R0, %1, %2"
1590 [(set_attr "insn" "smull")
1591 (set_attr "predicable" "yes")]
1594 (define_insn "*mulsidi3_v6"
1595 [(set (match_operand:DI 0 "s_register_operand" "=r")
1597 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1598 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1599 "TARGET_32BIT && arm_arch6"
1600 "smull%?\\t%Q0, %R0, %1, %2"
1601 [(set_attr "insn" "smull")
1602 (set_attr "predicable" "yes")]
1605 (define_expand "umulsidi3"
1606 [(set (match_operand:DI 0 "s_register_operand" "")
1608 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1609 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1610 "TARGET_32BIT && arm_arch3m"
1614 (define_insn "*umulsidi3_nov6"
1615 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1617 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1618 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1619 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1620 "umull%?\\t%Q0, %R0, %1, %2"
1621 [(set_attr "insn" "umull")
1622 (set_attr "predicable" "yes")]
1625 (define_insn "*umulsidi3_v6"
1626 [(set (match_operand:DI 0 "s_register_operand" "=r")
1628 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1629 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1630 "TARGET_32BIT && arm_arch6"
1631 "umull%?\\t%Q0, %R0, %1, %2"
1632 [(set_attr "insn" "umull")
1633 (set_attr "predicable" "yes")]
1636 (define_expand "umaddsidi4"
1637 [(set (match_operand:DI 0 "s_register_operand" "")
1640 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1641 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1642 (match_operand:DI 3 "s_register_operand" "")))]
1643 "TARGET_32BIT && arm_arch3m"
1646 (define_insn "*umulsidi3adddi"
1647 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1650 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1651 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1652 (match_operand:DI 1 "s_register_operand" "0")))]
1653 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1654 "umlal%?\\t%Q0, %R0, %3, %2"
1655 [(set_attr "insn" "umlal")
1656 (set_attr "predicable" "yes")]
1659 (define_insn "*umulsidi3adddi_v6"
1660 [(set (match_operand:DI 0 "s_register_operand" "=r")
1663 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1664 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1665 (match_operand:DI 1 "s_register_operand" "0")))]
1666 "TARGET_32BIT && arm_arch6"
1667 "umlal%?\\t%Q0, %R0, %3, %2"
1668 [(set_attr "insn" "umlal")
1669 (set_attr "predicable" "yes")]
1672 (define_expand "smulsi3_highpart"
1674 [(set (match_operand:SI 0 "s_register_operand" "")
1678 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1679 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1681 (clobber (match_scratch:SI 3 ""))])]
1682 "TARGET_32BIT && arm_arch3m"
1686 (define_insn "*smulsi3_highpart_nov6"
1687 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1691 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1692 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1694 (clobber (match_scratch:SI 3 "=&r,&r"))]
1695 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1696 "smull%?\\t%3, %0, %2, %1"
1697 [(set_attr "insn" "smull")
1698 (set_attr "predicable" "yes")]
1701 (define_insn "*smulsi3_highpart_v6"
1702 [(set (match_operand:SI 0 "s_register_operand" "=r")
1706 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1707 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1709 (clobber (match_scratch:SI 3 "=r"))]
1710 "TARGET_32BIT && arm_arch6"
1711 "smull%?\\t%3, %0, %2, %1"
1712 [(set_attr "insn" "smull")
1713 (set_attr "predicable" "yes")]
1716 (define_expand "umulsi3_highpart"
1718 [(set (match_operand:SI 0 "s_register_operand" "")
1722 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1723 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1725 (clobber (match_scratch:SI 3 ""))])]
1726 "TARGET_32BIT && arm_arch3m"
1730 (define_insn "*umulsi3_highpart_nov6"
1731 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1735 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1736 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1738 (clobber (match_scratch:SI 3 "=&r,&r"))]
1739 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1740 "umull%?\\t%3, %0, %2, %1"
1741 [(set_attr "insn" "umull")
1742 (set_attr "predicable" "yes")]
1745 (define_insn "*umulsi3_highpart_v6"
1746 [(set (match_operand:SI 0 "s_register_operand" "=r")
1750 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1751 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1753 (clobber (match_scratch:SI 3 "=r"))]
1754 "TARGET_32BIT && arm_arch6"
1755 "umull%?\\t%3, %0, %2, %1"
1756 [(set_attr "insn" "umull")
1757 (set_attr "predicable" "yes")]
1760 (define_insn "mulhisi3"
1761 [(set (match_operand:SI 0 "s_register_operand" "=r")
1762 (mult:SI (sign_extend:SI
1763 (match_operand:HI 1 "s_register_operand" "%r"))
1765 (match_operand:HI 2 "s_register_operand" "r"))))]
1766 "TARGET_DSP_MULTIPLY"
1767 "smulbb%?\\t%0, %1, %2"
1768 [(set_attr "insn" "smulxy")
1769 (set_attr "predicable" "yes")]
1772 (define_insn "*mulhisi3tb"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1774 (mult:SI (ashiftrt:SI
1775 (match_operand:SI 1 "s_register_operand" "r")
1778 (match_operand:HI 2 "s_register_operand" "r"))))]
1779 "TARGET_DSP_MULTIPLY"
1780 "smultb%?\\t%0, %1, %2"
1781 [(set_attr "insn" "smulxy")
1782 (set_attr "predicable" "yes")]
1785 (define_insn "*mulhisi3bt"
1786 [(set (match_operand:SI 0 "s_register_operand" "=r")
1787 (mult:SI (sign_extend:SI
1788 (match_operand:HI 1 "s_register_operand" "r"))
1790 (match_operand:SI 2 "s_register_operand" "r")
1792 "TARGET_DSP_MULTIPLY"
1793 "smulbt%?\\t%0, %1, %2"
1794 [(set_attr "insn" "smulxy")
1795 (set_attr "predicable" "yes")]
1798 (define_insn "*mulhisi3tt"
1799 [(set (match_operand:SI 0 "s_register_operand" "=r")
1800 (mult:SI (ashiftrt:SI
1801 (match_operand:SI 1 "s_register_operand" "r")
1804 (match_operand:SI 2 "s_register_operand" "r")
1806 "TARGET_DSP_MULTIPLY"
1807 "smultt%?\\t%0, %1, %2"
1808 [(set_attr "insn" "smulxy")
1809 (set_attr "predicable" "yes")]
1812 (define_insn "maddhisi4"
1813 [(set (match_operand:SI 0 "s_register_operand" "=r")
1814 (plus:SI (mult:SI (sign_extend:SI
1815 (match_operand:HI 1 "s_register_operand" "r"))
1817 (match_operand:HI 2 "s_register_operand" "r")))
1818 (match_operand:SI 3 "s_register_operand" "r")))]
1819 "TARGET_DSP_MULTIPLY"
1820 "smlabb%?\\t%0, %1, %2, %3"
1821 [(set_attr "insn" "smlaxy")
1822 (set_attr "predicable" "yes")]
1825 ;; Note: there is no maddhisi4ibt because this one is canonical form
1826 (define_insn "*maddhisi4tb"
1827 [(set (match_operand:SI 0 "s_register_operand" "=r")
1828 (plus:SI (mult:SI (ashiftrt:SI
1829 (match_operand:SI 1 "s_register_operand" "r")
1832 (match_operand:HI 2 "s_register_operand" "r")))
1833 (match_operand:SI 3 "s_register_operand" "r")))]
1834 "TARGET_DSP_MULTIPLY"
1835 "smlatb%?\\t%0, %1, %2, %3"
1836 [(set_attr "insn" "smlaxy")
1837 (set_attr "predicable" "yes")]
1840 (define_insn "*maddhisi4tt"
1841 [(set (match_operand:SI 0 "s_register_operand" "=r")
1842 (plus:SI (mult:SI (ashiftrt:SI
1843 (match_operand:SI 1 "s_register_operand" "r")
1846 (match_operand:SI 2 "s_register_operand" "r")
1848 (match_operand:SI 3 "s_register_operand" "r")))]
1849 "TARGET_DSP_MULTIPLY"
1850 "smlatt%?\\t%0, %1, %2, %3"
1851 [(set_attr "insn" "smlaxy")
1852 (set_attr "predicable" "yes")]
1855 (define_insn "maddhidi4"
1856 [(set (match_operand:DI 0 "s_register_operand" "=r")
1858 (mult:DI (sign_extend:DI
1859 (match_operand:HI 1 "s_register_operand" "r"))
1861 (match_operand:HI 2 "s_register_operand" "r")))
1862 (match_operand:DI 3 "s_register_operand" "0")))]
1863 "TARGET_DSP_MULTIPLY"
1864 "smlalbb%?\\t%Q0, %R0, %1, %2"
1865 [(set_attr "insn" "smlalxy")
1866 (set_attr "predicable" "yes")])
1868 ;; Note: there is no maddhidi4ibt because this one is canonical form
1869 (define_insn "*maddhidi4tb"
1870 [(set (match_operand:DI 0 "s_register_operand" "=r")
1872 (mult:DI (sign_extend:DI
1874 (match_operand:SI 1 "s_register_operand" "r")
1877 (match_operand:HI 2 "s_register_operand" "r")))
1878 (match_operand:DI 3 "s_register_operand" "0")))]
1879 "TARGET_DSP_MULTIPLY"
1880 "smlaltb%?\\t%Q0, %R0, %1, %2"
1881 [(set_attr "insn" "smlalxy")
1882 (set_attr "predicable" "yes")])
1884 (define_insn "*maddhidi4tt"
1885 [(set (match_operand:DI 0 "s_register_operand" "=r")
1887 (mult:DI (sign_extend:DI
1889 (match_operand:SI 1 "s_register_operand" "r")
1893 (match_operand:SI 2 "s_register_operand" "r")
1895 (match_operand:DI 3 "s_register_operand" "0")))]
1896 "TARGET_DSP_MULTIPLY"
1897 "smlaltt%?\\t%Q0, %R0, %1, %2"
1898 [(set_attr "insn" "smlalxy")
1899 (set_attr "predicable" "yes")])
1901 (define_expand "mulsf3"
1902 [(set (match_operand:SF 0 "s_register_operand" "")
1903 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1904 (match_operand:SF 2 "s_register_operand" "")))]
1905 "TARGET_32BIT && TARGET_HARD_FLOAT"
1909 (define_expand "muldf3"
1910 [(set (match_operand:DF 0 "s_register_operand" "")
1911 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1912 (match_operand:DF 2 "s_register_operand" "")))]
1913 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1919 (define_expand "divsf3"
1920 [(set (match_operand:SF 0 "s_register_operand" "")
1921 (div:SF (match_operand:SF 1 "s_register_operand" "")
1922 (match_operand:SF 2 "s_register_operand" "")))]
1923 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
1926 (define_expand "divdf3"
1927 [(set (match_operand:DF 0 "s_register_operand" "")
1928 (div:DF (match_operand:DF 1 "s_register_operand" "")
1929 (match_operand:DF 2 "s_register_operand" "")))]
1930 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1933 ;; Boolean and,ior,xor insns
1935 ;; Split up double word logical operations
1937 ;; Split up simple DImode logical operations. Simply perform the logical
1938 ;; operation on the upper and lower halves of the registers.
1940 [(set (match_operand:DI 0 "s_register_operand" "")
1941 (match_operator:DI 6 "logical_binary_operator"
1942 [(match_operand:DI 1 "s_register_operand" "")
1943 (match_operand:DI 2 "s_register_operand" "")]))]
1944 "TARGET_32BIT && reload_completed
1945 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1946 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1947 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1948 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1951 operands[3] = gen_highpart (SImode, operands[0]);
1952 operands[0] = gen_lowpart (SImode, operands[0]);
1953 operands[4] = gen_highpart (SImode, operands[1]);
1954 operands[1] = gen_lowpart (SImode, operands[1]);
1955 operands[5] = gen_highpart (SImode, operands[2]);
1956 operands[2] = gen_lowpart (SImode, operands[2]);
1961 [(set (match_operand:DI 0 "s_register_operand" "")
1962 (match_operator:DI 6 "logical_binary_operator"
1963 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1964 (match_operand:DI 1 "s_register_operand" "")]))]
1965 "TARGET_32BIT && reload_completed"
1966 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1967 (set (match_dup 3) (match_op_dup:SI 6
1968 [(ashiftrt:SI (match_dup 2) (const_int 31))
1972 operands[3] = gen_highpart (SImode, operands[0]);
1973 operands[0] = gen_lowpart (SImode, operands[0]);
1974 operands[4] = gen_highpart (SImode, operands[1]);
1975 operands[1] = gen_lowpart (SImode, operands[1]);
1976 operands[5] = gen_highpart (SImode, operands[2]);
1977 operands[2] = gen_lowpart (SImode, operands[2]);
1981 ;; The zero extend of operand 2 means we can just copy the high part of
1982 ;; operand1 into operand0.
1984 [(set (match_operand:DI 0 "s_register_operand" "")
1986 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1987 (match_operand:DI 1 "s_register_operand" "")))]
1988 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1989 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1990 (set (match_dup 3) (match_dup 4))]
1993 operands[4] = gen_highpart (SImode, operands[1]);
1994 operands[3] = gen_highpart (SImode, operands[0]);
1995 operands[0] = gen_lowpart (SImode, operands[0]);
1996 operands[1] = gen_lowpart (SImode, operands[1]);
2000 ;; The zero extend of operand 2 means we can just copy the high part of
2001 ;; operand1 into operand0.
2003 [(set (match_operand:DI 0 "s_register_operand" "")
2005 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2006 (match_operand:DI 1 "s_register_operand" "")))]
2007 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2008 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2009 (set (match_dup 3) (match_dup 4))]
2012 operands[4] = gen_highpart (SImode, operands[1]);
2013 operands[3] = gen_highpart (SImode, operands[0]);
2014 operands[0] = gen_lowpart (SImode, operands[0]);
2015 operands[1] = gen_lowpart (SImode, operands[1]);
2019 (define_expand "anddi3"
2020 [(set (match_operand:DI 0 "s_register_operand" "")
2021 (and:DI (match_operand:DI 1 "s_register_operand" "")
2022 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2027 (define_insn "*anddi3_insn"
2028 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2029 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2030 (match_operand:DI 2 "s_register_operand" "r,r")))]
2031 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2033 [(set_attr "length" "8")]
2036 (define_insn_and_split "*anddi_zesidi_di"
2037 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2038 (and:DI (zero_extend:DI
2039 (match_operand:SI 2 "s_register_operand" "r,r"))
2040 (match_operand:DI 1 "s_register_operand" "0,r")))]
2043 "TARGET_32BIT && reload_completed"
2044 ; The zero extend of operand 2 clears the high word of the output
2046 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2047 (set (match_dup 3) (const_int 0))]
2050 operands[3] = gen_highpart (SImode, operands[0]);
2051 operands[0] = gen_lowpart (SImode, operands[0]);
2052 operands[1] = gen_lowpart (SImode, operands[1]);
2054 [(set_attr "length" "8")]
2057 (define_insn "*anddi_sesdi_di"
2058 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2059 (and:DI (sign_extend:DI
2060 (match_operand:SI 2 "s_register_operand" "r,r"))
2061 (match_operand:DI 1 "s_register_operand" "0,r")))]
2064 [(set_attr "length" "8")]
2067 (define_expand "andsi3"
2068 [(set (match_operand:SI 0 "s_register_operand" "")
2069 (and:SI (match_operand:SI 1 "s_register_operand" "")
2070 (match_operand:SI 2 "reg_or_int_operand" "")))]
2075 if (CONST_INT_P (operands[2]))
2077 if (INTVAL (operands[2]) == 255 && arm_arch6)
2079 operands[1] = convert_to_mode (QImode, operands[1], 1);
2080 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2084 arm_split_constant (AND, SImode, NULL_RTX,
2085 INTVAL (operands[2]), operands[0],
2087 optimize && can_create_pseudo_p ());
2092 else /* TARGET_THUMB1 */
2094 if (!CONST_INT_P (operands[2]))
2096 rtx tmp = force_reg (SImode, operands[2]);
2097 if (rtx_equal_p (operands[0], operands[1]))
2101 operands[2] = operands[1];
2109 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2111 operands[2] = force_reg (SImode,
2112 GEN_INT (~INTVAL (operands[2])));
2114 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2119 for (i = 9; i <= 31; i++)
2121 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2123 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2127 else if ((((HOST_WIDE_INT) 1) << i) - 1
2128 == ~INTVAL (operands[2]))
2130 rtx shift = GEN_INT (i);
2131 rtx reg = gen_reg_rtx (SImode);
2133 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2134 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2140 operands[2] = force_reg (SImode, operands[2]);
2146 ; ??? Check split length for Thumb-2
2147 (define_insn_and_split "*arm_andsi3_insn"
2148 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
2149 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r,r")
2150 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2154 bic%?\\t%0, %1, #%B2
2158 && CONST_INT_P (operands[2])
2159 && !(const_ok_for_arm (INTVAL (operands[2]))
2160 || const_ok_for_arm (~INTVAL (operands[2])))"
2161 [(clobber (const_int 0))]
2163 arm_split_constant (AND, SImode, curr_insn,
2164 INTVAL (operands[2]), operands[0], operands[1], 0);
2167 [(set_attr "length" "4,4,4,16")
2168 (set_attr "predicable" "yes")
2169 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,simple_alu_imm")]
2172 (define_insn "*thumb1_andsi3_insn"
2173 [(set (match_operand:SI 0 "register_operand" "=l")
2174 (and:SI (match_operand:SI 1 "register_operand" "%0")
2175 (match_operand:SI 2 "register_operand" "l")))]
2178 [(set_attr "length" "2")
2179 (set_attr "type" "simple_alu_imm")
2180 (set_attr "conds" "set")])
2182 (define_insn "*andsi3_compare0"
2183 [(set (reg:CC_NOOV CC_REGNUM)
2185 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2186 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2188 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2189 (and:SI (match_dup 1) (match_dup 2)))]
2193 bic%.\\t%0, %1, #%B2
2195 [(set_attr "conds" "set")
2196 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2199 (define_insn "*andsi3_compare0_scratch"
2200 [(set (reg:CC_NOOV CC_REGNUM)
2202 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2203 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2205 (clobber (match_scratch:SI 2 "=X,r,X"))]
2209 bic%.\\t%2, %0, #%B1
2211 [(set_attr "conds" "set")
2212 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2215 (define_insn "*zeroextractsi_compare0_scratch"
2216 [(set (reg:CC_NOOV CC_REGNUM)
2217 (compare:CC_NOOV (zero_extract:SI
2218 (match_operand:SI 0 "s_register_operand" "r")
2219 (match_operand 1 "const_int_operand" "n")
2220 (match_operand 2 "const_int_operand" "n"))
2223 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2224 && INTVAL (operands[1]) > 0
2225 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2226 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2228 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2229 << INTVAL (operands[2]));
2230 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2233 [(set_attr "conds" "set")
2234 (set_attr "predicable" "yes")
2235 (set_attr "type" "simple_alu_imm")]
2238 (define_insn_and_split "*ne_zeroextractsi"
2239 [(set (match_operand:SI 0 "s_register_operand" "=r")
2240 (ne:SI (zero_extract:SI
2241 (match_operand:SI 1 "s_register_operand" "r")
2242 (match_operand:SI 2 "const_int_operand" "n")
2243 (match_operand:SI 3 "const_int_operand" "n"))
2245 (clobber (reg:CC CC_REGNUM))]
2247 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2248 && INTVAL (operands[2]) > 0
2249 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2250 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2253 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2254 && INTVAL (operands[2]) > 0
2255 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2256 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2257 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2258 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2260 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2262 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2263 (match_dup 0) (const_int 1)))]
2265 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2266 << INTVAL (operands[3]));
2268 [(set_attr "conds" "clob")
2269 (set (attr "length")
2270 (if_then_else (eq_attr "is_thumb" "yes")
2275 (define_insn_and_split "*ne_zeroextractsi_shifted"
2276 [(set (match_operand:SI 0 "s_register_operand" "=r")
2277 (ne:SI (zero_extract:SI
2278 (match_operand:SI 1 "s_register_operand" "r")
2279 (match_operand:SI 2 "const_int_operand" "n")
2282 (clobber (reg:CC CC_REGNUM))]
2286 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2287 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2289 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2291 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2292 (match_dup 0) (const_int 1)))]
2294 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2296 [(set_attr "conds" "clob")
2297 (set_attr "length" "8")]
2300 (define_insn_and_split "*ite_ne_zeroextractsi"
2301 [(set (match_operand:SI 0 "s_register_operand" "=r")
2302 (if_then_else:SI (ne (zero_extract:SI
2303 (match_operand:SI 1 "s_register_operand" "r")
2304 (match_operand:SI 2 "const_int_operand" "n")
2305 (match_operand:SI 3 "const_int_operand" "n"))
2307 (match_operand:SI 4 "arm_not_operand" "rIK")
2309 (clobber (reg:CC CC_REGNUM))]
2311 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2312 && INTVAL (operands[2]) > 0
2313 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2314 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2315 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2318 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2319 && INTVAL (operands[2]) > 0
2320 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2321 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2322 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2323 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2324 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2326 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2328 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2329 (match_dup 0) (match_dup 4)))]
2331 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2332 << INTVAL (operands[3]));
2334 [(set_attr "conds" "clob")
2335 (set_attr "length" "8")]
2338 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2339 [(set (match_operand:SI 0 "s_register_operand" "=r")
2340 (if_then_else:SI (ne (zero_extract:SI
2341 (match_operand:SI 1 "s_register_operand" "r")
2342 (match_operand:SI 2 "const_int_operand" "n")
2345 (match_operand:SI 3 "arm_not_operand" "rIK")
2347 (clobber (reg:CC CC_REGNUM))]
2348 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2350 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2351 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2352 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2354 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2356 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2357 (match_dup 0) (match_dup 3)))]
2359 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2361 [(set_attr "conds" "clob")
2362 (set_attr "length" "8")]
2366 [(set (match_operand:SI 0 "s_register_operand" "")
2367 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2368 (match_operand:SI 2 "const_int_operand" "")
2369 (match_operand:SI 3 "const_int_operand" "")))
2370 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2372 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2373 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2375 HOST_WIDE_INT temp = INTVAL (operands[2]);
2377 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2378 operands[3] = GEN_INT (32 - temp);
2382 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2384 [(set (match_operand:SI 0 "s_register_operand" "")
2385 (match_operator:SI 1 "shiftable_operator"
2386 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2387 (match_operand:SI 3 "const_int_operand" "")
2388 (match_operand:SI 4 "const_int_operand" ""))
2389 (match_operand:SI 5 "s_register_operand" "")]))
2390 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2392 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2395 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2398 HOST_WIDE_INT temp = INTVAL (operands[3]);
2400 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2401 operands[4] = GEN_INT (32 - temp);
2406 [(set (match_operand:SI 0 "s_register_operand" "")
2407 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2408 (match_operand:SI 2 "const_int_operand" "")
2409 (match_operand:SI 3 "const_int_operand" "")))]
2411 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2412 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2414 HOST_WIDE_INT temp = INTVAL (operands[2]);
2416 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2417 operands[3] = GEN_INT (32 - temp);
2422 [(set (match_operand:SI 0 "s_register_operand" "")
2423 (match_operator:SI 1 "shiftable_operator"
2424 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2425 (match_operand:SI 3 "const_int_operand" "")
2426 (match_operand:SI 4 "const_int_operand" ""))
2427 (match_operand:SI 5 "s_register_operand" "")]))
2428 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2430 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2433 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2436 HOST_WIDE_INT temp = INTVAL (operands[3]);
2438 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2439 operands[4] = GEN_INT (32 - temp);
2443 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2444 ;;; represented by the bitfield, then this will produce incorrect results.
2445 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2446 ;;; which have a real bit-field insert instruction, the truncation happens
2447 ;;; in the bit-field insert instruction itself. Since arm does not have a
2448 ;;; bit-field insert instruction, we would have to emit code here to truncate
2449 ;;; the value before we insert. This loses some of the advantage of having
2450 ;;; this insv pattern, so this pattern needs to be reevalutated.
2452 (define_expand "insv"
2453 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2454 (match_operand 1 "general_operand" "")
2455 (match_operand 2 "general_operand" ""))
2456 (match_operand 3 "reg_or_int_operand" ""))]
2457 "TARGET_ARM || arm_arch_thumb2"
2460 int start_bit = INTVAL (operands[2]);
2461 int width = INTVAL (operands[1]);
2462 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2463 rtx target, subtarget;
2465 if (arm_arch_thumb2)
2467 if (unaligned_access && MEM_P (operands[0])
2468 && s_register_operand (operands[3], GET_MODE (operands[3]))
2469 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2473 if (BYTES_BIG_ENDIAN)
2474 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2479 base_addr = adjust_address (operands[0], SImode,
2480 start_bit / BITS_PER_UNIT);
2481 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2485 rtx tmp = gen_reg_rtx (HImode);
2487 base_addr = adjust_address (operands[0], HImode,
2488 start_bit / BITS_PER_UNIT);
2489 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2490 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2494 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2496 bool use_bfi = TRUE;
2498 if (CONST_INT_P (operands[3]))
2500 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2504 emit_insn (gen_insv_zero (operands[0], operands[1],
2509 /* See if the set can be done with a single orr instruction. */
2510 if (val == mask && const_ok_for_arm (val << start_bit))
2516 if (!REG_P (operands[3]))
2517 operands[3] = force_reg (SImode, operands[3]);
2519 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2528 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2531 target = copy_rtx (operands[0]);
2532 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2533 subreg as the final target. */
2534 if (GET_CODE (target) == SUBREG)
2536 subtarget = gen_reg_rtx (SImode);
2537 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2538 < GET_MODE_SIZE (SImode))
2539 target = SUBREG_REG (target);
2544 if (CONST_INT_P (operands[3]))
2546 /* Since we are inserting a known constant, we may be able to
2547 reduce the number of bits that we have to clear so that
2548 the mask becomes simple. */
2549 /* ??? This code does not check to see if the new mask is actually
2550 simpler. It may not be. */
2551 rtx op1 = gen_reg_rtx (SImode);
2552 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2553 start of this pattern. */
2554 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2555 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2557 emit_insn (gen_andsi3 (op1, operands[0],
2558 gen_int_mode (~mask2, SImode)));
2559 emit_insn (gen_iorsi3 (subtarget, op1,
2560 gen_int_mode (op3_value << start_bit, SImode)));
2562 else if (start_bit == 0
2563 && !(const_ok_for_arm (mask)
2564 || const_ok_for_arm (~mask)))
2566 /* A Trick, since we are setting the bottom bits in the word,
2567 we can shift operand[3] up, operand[0] down, OR them together
2568 and rotate the result back again. This takes 3 insns, and
2569 the third might be mergeable into another op. */
2570 /* The shift up copes with the possibility that operand[3] is
2571 wider than the bitfield. */
2572 rtx op0 = gen_reg_rtx (SImode);
2573 rtx op1 = gen_reg_rtx (SImode);
2575 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2576 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2577 emit_insn (gen_iorsi3 (op1, op1, op0));
2578 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2580 else if ((width + start_bit == 32)
2581 && !(const_ok_for_arm (mask)
2582 || const_ok_for_arm (~mask)))
2584 /* Similar trick, but slightly less efficient. */
2586 rtx op0 = gen_reg_rtx (SImode);
2587 rtx op1 = gen_reg_rtx (SImode);
2589 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2590 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2591 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2592 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2596 rtx op0 = gen_int_mode (mask, SImode);
2597 rtx op1 = gen_reg_rtx (SImode);
2598 rtx op2 = gen_reg_rtx (SImode);
2600 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2602 rtx tmp = gen_reg_rtx (SImode);
2604 emit_insn (gen_movsi (tmp, op0));
2608 /* Mask out any bits in operand[3] that are not needed. */
2609 emit_insn (gen_andsi3 (op1, operands[3], op0));
2611 if (CONST_INT_P (op0)
2612 && (const_ok_for_arm (mask << start_bit)
2613 || const_ok_for_arm (~(mask << start_bit))))
2615 op0 = gen_int_mode (~(mask << start_bit), SImode);
2616 emit_insn (gen_andsi3 (op2, operands[0], op0));
2620 if (CONST_INT_P (op0))
2622 rtx tmp = gen_reg_rtx (SImode);
2624 emit_insn (gen_movsi (tmp, op0));
2629 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2631 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2635 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2637 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2640 if (subtarget != target)
2642 /* If TARGET is still a SUBREG, then it must be wider than a word,
2643 so we must be careful only to set the subword we were asked to. */
2644 if (GET_CODE (target) == SUBREG)
2645 emit_move_insn (target, subtarget);
2647 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2654 (define_insn "insv_zero"
2655 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2656 (match_operand:SI 1 "const_int_operand" "M")
2657 (match_operand:SI 2 "const_int_operand" "M"))
2661 [(set_attr "length" "4")
2662 (set_attr "predicable" "yes")]
2665 (define_insn "insv_t2"
2666 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2667 (match_operand:SI 1 "const_int_operand" "M")
2668 (match_operand:SI 2 "const_int_operand" "M"))
2669 (match_operand:SI 3 "s_register_operand" "r"))]
2671 "bfi%?\t%0, %3, %2, %1"
2672 [(set_attr "length" "4")
2673 (set_attr "predicable" "yes")]
2676 ; constants for op 2 will never be given to these patterns.
2677 (define_insn_and_split "*anddi_notdi_di"
2678 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2679 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2680 (match_operand:DI 2 "s_register_operand" "r,0")))]
2683 "TARGET_32BIT && reload_completed
2684 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2685 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2686 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2687 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2690 operands[3] = gen_highpart (SImode, operands[0]);
2691 operands[0] = gen_lowpart (SImode, operands[0]);
2692 operands[4] = gen_highpart (SImode, operands[1]);
2693 operands[1] = gen_lowpart (SImode, operands[1]);
2694 operands[5] = gen_highpart (SImode, operands[2]);
2695 operands[2] = gen_lowpart (SImode, operands[2]);
2697 [(set_attr "length" "8")
2698 (set_attr "predicable" "yes")]
2701 (define_insn_and_split "*anddi_notzesidi_di"
2702 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2703 (and:DI (not:DI (zero_extend:DI
2704 (match_operand:SI 2 "s_register_operand" "r,r")))
2705 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2708 bic%?\\t%Q0, %Q1, %2
2710 ; (not (zero_extend ...)) allows us to just copy the high word from
2711 ; operand1 to operand0.
2714 && operands[0] != operands[1]"
2715 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2716 (set (match_dup 3) (match_dup 4))]
2719 operands[3] = gen_highpart (SImode, operands[0]);
2720 operands[0] = gen_lowpart (SImode, operands[0]);
2721 operands[4] = gen_highpart (SImode, operands[1]);
2722 operands[1] = gen_lowpart (SImode, operands[1]);
2724 [(set_attr "length" "4,8")
2725 (set_attr "predicable" "yes")]
2728 (define_insn_and_split "*anddi_notsesidi_di"
2729 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2730 (and:DI (not:DI (sign_extend:DI
2731 (match_operand:SI 2 "s_register_operand" "r,r")))
2732 (match_operand:DI 1 "s_register_operand" "0,r")))]
2735 "TARGET_32BIT && reload_completed"
2736 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2737 (set (match_dup 3) (and:SI (not:SI
2738 (ashiftrt:SI (match_dup 2) (const_int 31)))
2742 operands[3] = gen_highpart (SImode, operands[0]);
2743 operands[0] = gen_lowpart (SImode, operands[0]);
2744 operands[4] = gen_highpart (SImode, operands[1]);
2745 operands[1] = gen_lowpart (SImode, operands[1]);
2747 [(set_attr "length" "8")
2748 (set_attr "predicable" "yes")]
2751 (define_insn "andsi_notsi_si"
2752 [(set (match_operand:SI 0 "s_register_operand" "=r")
2753 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2754 (match_operand:SI 1 "s_register_operand" "r")))]
2756 "bic%?\\t%0, %1, %2"
2757 [(set_attr "predicable" "yes")]
2760 (define_insn "thumb1_bicsi3"
2761 [(set (match_operand:SI 0 "register_operand" "=l")
2762 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2763 (match_operand:SI 2 "register_operand" "0")))]
2766 [(set_attr "length" "2")
2767 (set_attr "conds" "set")])
2769 (define_insn "andsi_not_shiftsi_si"
2770 [(set (match_operand:SI 0 "s_register_operand" "=r")
2771 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2772 [(match_operand:SI 2 "s_register_operand" "r")
2773 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2774 (match_operand:SI 1 "s_register_operand" "r")))]
2776 "bic%?\\t%0, %1, %2%S4"
2777 [(set_attr "predicable" "yes")
2778 (set_attr "shift" "2")
2779 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2780 (const_string "alu_shift")
2781 (const_string "alu_shift_reg")))]
2784 (define_insn "*andsi_notsi_si_compare0"
2785 [(set (reg:CC_NOOV CC_REGNUM)
2787 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2788 (match_operand:SI 1 "s_register_operand" "r"))
2790 (set (match_operand:SI 0 "s_register_operand" "=r")
2791 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2793 "bic%.\\t%0, %1, %2"
2794 [(set_attr "conds" "set")]
2797 (define_insn "*andsi_notsi_si_compare0_scratch"
2798 [(set (reg:CC_NOOV CC_REGNUM)
2800 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2801 (match_operand:SI 1 "s_register_operand" "r"))
2803 (clobber (match_scratch:SI 0 "=r"))]
2805 "bic%.\\t%0, %1, %2"
2806 [(set_attr "conds" "set")]
2809 (define_expand "iordi3"
2810 [(set (match_operand:DI 0 "s_register_operand" "")
2811 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2812 (match_operand:DI 2 "neon_logic_op2" "")))]
2817 (define_insn "*iordi3_insn"
2818 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2819 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2820 (match_operand:DI 2 "s_register_operand" "r,r")))]
2821 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2823 [(set_attr "length" "8")
2824 (set_attr "predicable" "yes")]
2827 (define_insn "*iordi_zesidi_di"
2828 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2829 (ior:DI (zero_extend:DI
2830 (match_operand:SI 2 "s_register_operand" "r,r"))
2831 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2834 orr%?\\t%Q0, %Q1, %2
2836 [(set_attr "length" "4,8")
2837 (set_attr "predicable" "yes")]
2840 (define_insn "*iordi_sesidi_di"
2841 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2842 (ior:DI (sign_extend:DI
2843 (match_operand:SI 2 "s_register_operand" "r,r"))
2844 (match_operand:DI 1 "s_register_operand" "0,r")))]
2847 [(set_attr "length" "8")
2848 (set_attr "predicable" "yes")]
2851 (define_expand "iorsi3"
2852 [(set (match_operand:SI 0 "s_register_operand" "")
2853 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2854 (match_operand:SI 2 "reg_or_int_operand" "")))]
2857 if (CONST_INT_P (operands[2]))
2861 arm_split_constant (IOR, SImode, NULL_RTX,
2862 INTVAL (operands[2]), operands[0], operands[1],
2863 optimize && can_create_pseudo_p ());
2866 else /* TARGET_THUMB1 */
2868 rtx tmp = force_reg (SImode, operands[2]);
2869 if (rtx_equal_p (operands[0], operands[1]))
2873 operands[2] = operands[1];
2881 (define_insn_and_split "*iorsi3_insn"
2882 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
2883 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r,r")
2884 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2888 orn%?\\t%0, %1, #%B2
2892 && CONST_INT_P (operands[2])
2893 && !(const_ok_for_arm (INTVAL (operands[2]))
2894 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2895 [(clobber (const_int 0))]
2897 arm_split_constant (IOR, SImode, curr_insn,
2898 INTVAL (operands[2]), operands[0], operands[1], 0);
2901 [(set_attr "length" "4,4,4,16")
2902 (set_attr "arch" "32,t2,32,32")
2903 (set_attr "predicable" "yes")
2904 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,*")]
2907 (define_insn "*thumb1_iorsi3_insn"
2908 [(set (match_operand:SI 0 "register_operand" "=l")
2909 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2910 (match_operand:SI 2 "register_operand" "l")))]
2913 [(set_attr "length" "2")
2914 (set_attr "conds" "set")])
2917 [(match_scratch:SI 3 "r")
2918 (set (match_operand:SI 0 "arm_general_register_operand" "")
2919 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2920 (match_operand:SI 2 "const_int_operand" "")))]
2922 && !const_ok_for_arm (INTVAL (operands[2]))
2923 && const_ok_for_arm (~INTVAL (operands[2]))"
2924 [(set (match_dup 3) (match_dup 2))
2925 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2929 (define_insn "*iorsi3_compare0"
2930 [(set (reg:CC_NOOV CC_REGNUM)
2931 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
2932 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2934 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2935 (ior:SI (match_dup 1) (match_dup 2)))]
2937 "orr%.\\t%0, %1, %2"
2938 [(set_attr "conds" "set")
2939 (set_attr "type" "simple_alu_imm,*")]
2942 (define_insn "*iorsi3_compare0_scratch"
2943 [(set (reg:CC_NOOV CC_REGNUM)
2944 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
2945 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2947 (clobber (match_scratch:SI 0 "=r,r"))]
2949 "orr%.\\t%0, %1, %2"
2950 [(set_attr "conds" "set")
2951 (set_attr "type" "simple_alu_imm, *")]
2954 (define_expand "xordi3"
2955 [(set (match_operand:DI 0 "s_register_operand" "")
2956 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2957 (match_operand:DI 2 "s_register_operand" "")))]
2962 (define_insn "*xordi3_insn"
2963 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2964 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2965 (match_operand:DI 2 "s_register_operand" "r,r")))]
2966 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2968 [(set_attr "length" "8")
2969 (set_attr "predicable" "yes")]
2972 (define_insn "*xordi_zesidi_di"
2973 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2974 (xor:DI (zero_extend:DI
2975 (match_operand:SI 2 "s_register_operand" "r,r"))
2976 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2979 eor%?\\t%Q0, %Q1, %2
2981 [(set_attr "length" "4,8")
2982 (set_attr "predicable" "yes")]
2985 (define_insn "*xordi_sesidi_di"
2986 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2987 (xor:DI (sign_extend:DI
2988 (match_operand:SI 2 "s_register_operand" "r,r"))
2989 (match_operand:DI 1 "s_register_operand" "0,r")))]
2992 [(set_attr "length" "8")
2993 (set_attr "predicable" "yes")]
2996 (define_expand "xorsi3"
2997 [(set (match_operand:SI 0 "s_register_operand" "")
2998 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2999 (match_operand:SI 2 "reg_or_int_operand" "")))]
3001 "if (CONST_INT_P (operands[2]))
3005 arm_split_constant (XOR, SImode, NULL_RTX,
3006 INTVAL (operands[2]), operands[0], operands[1],
3007 optimize && can_create_pseudo_p ());
3010 else /* TARGET_THUMB1 */
3012 rtx tmp = force_reg (SImode, operands[2]);
3013 if (rtx_equal_p (operands[0], operands[1]))
3017 operands[2] = operands[1];
3024 (define_insn_and_split "*arm_xorsi3"
3025 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3026 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
3027 (match_operand:SI 2 "reg_or_int_operand" "I,r,?n")))]
3034 && CONST_INT_P (operands[2])
3035 && !const_ok_for_arm (INTVAL (operands[2]))"
3036 [(clobber (const_int 0))]
3038 arm_split_constant (XOR, SImode, curr_insn,
3039 INTVAL (operands[2]), operands[0], operands[1], 0);
3042 [(set_attr "length" "4,4,16")
3043 (set_attr "predicable" "yes")
3044 (set_attr "type" "simple_alu_imm,*,*")]
3047 (define_insn "*thumb1_xorsi3_insn"
3048 [(set (match_operand:SI 0 "register_operand" "=l")
3049 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3050 (match_operand:SI 2 "register_operand" "l")))]
3053 [(set_attr "length" "2")
3054 (set_attr "conds" "set")
3055 (set_attr "type" "simple_alu_imm")]
3058 (define_insn "*xorsi3_compare0"
3059 [(set (reg:CC_NOOV CC_REGNUM)
3060 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3061 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3063 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3064 (xor:SI (match_dup 1) (match_dup 2)))]
3066 "eor%.\\t%0, %1, %2"
3067 [(set_attr "conds" "set")
3068 (set_attr "type" "simple_alu_imm,*")]
3071 (define_insn "*xorsi3_compare0_scratch"
3072 [(set (reg:CC_NOOV CC_REGNUM)
3073 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3074 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3078 [(set_attr "conds" "set")
3079 (set_attr "type" "simple_alu_imm, *")]
3082 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3083 ; (NOT D) we can sometimes merge the final NOT into one of the following
3087 [(set (match_operand:SI 0 "s_register_operand" "")
3088 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3089 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3090 (match_operand:SI 3 "arm_rhs_operand" "")))
3091 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3093 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3094 (not:SI (match_dup 3))))
3095 (set (match_dup 0) (not:SI (match_dup 4)))]
3099 (define_insn "*andsi_iorsi3_notsi"
3100 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3101 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3102 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3103 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3105 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3106 [(set_attr "length" "8")
3107 (set_attr "ce_count" "2")
3108 (set_attr "predicable" "yes")]
3111 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3112 ; insns are available?
3114 [(set (match_operand:SI 0 "s_register_operand" "")
3115 (match_operator:SI 1 "logical_binary_operator"
3116 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3117 (match_operand:SI 3 "const_int_operand" "")
3118 (match_operand:SI 4 "const_int_operand" ""))
3119 (match_operator:SI 9 "logical_binary_operator"
3120 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3121 (match_operand:SI 6 "const_int_operand" ""))
3122 (match_operand:SI 7 "s_register_operand" "")])]))
3123 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3125 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3126 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3129 [(ashift:SI (match_dup 2) (match_dup 4))
3133 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3136 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3140 [(set (match_operand:SI 0 "s_register_operand" "")
3141 (match_operator:SI 1 "logical_binary_operator"
3142 [(match_operator:SI 9 "logical_binary_operator"
3143 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3144 (match_operand:SI 6 "const_int_operand" ""))
3145 (match_operand:SI 7 "s_register_operand" "")])
3146 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3147 (match_operand:SI 3 "const_int_operand" "")
3148 (match_operand:SI 4 "const_int_operand" ""))]))
3149 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3151 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3152 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3155 [(ashift:SI (match_dup 2) (match_dup 4))
3159 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3162 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3166 [(set (match_operand:SI 0 "s_register_operand" "")
3167 (match_operator:SI 1 "logical_binary_operator"
3168 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3169 (match_operand:SI 3 "const_int_operand" "")
3170 (match_operand:SI 4 "const_int_operand" ""))
3171 (match_operator:SI 9 "logical_binary_operator"
3172 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3173 (match_operand:SI 6 "const_int_operand" ""))
3174 (match_operand:SI 7 "s_register_operand" "")])]))
3175 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3177 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3178 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3181 [(ashift:SI (match_dup 2) (match_dup 4))
3185 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3188 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3192 [(set (match_operand:SI 0 "s_register_operand" "")
3193 (match_operator:SI 1 "logical_binary_operator"
3194 [(match_operator:SI 9 "logical_binary_operator"
3195 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3196 (match_operand:SI 6 "const_int_operand" ""))
3197 (match_operand:SI 7 "s_register_operand" "")])
3198 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3199 (match_operand:SI 3 "const_int_operand" "")
3200 (match_operand:SI 4 "const_int_operand" ""))]))
3201 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3203 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3204 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3207 [(ashift:SI (match_dup 2) (match_dup 4))
3211 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3214 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3218 ;; Minimum and maximum insns
3220 (define_expand "smaxsi3"
3222 (set (match_operand:SI 0 "s_register_operand" "")
3223 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3224 (match_operand:SI 2 "arm_rhs_operand" "")))
3225 (clobber (reg:CC CC_REGNUM))])]
3228 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3230 /* No need for a clobber of the condition code register here. */
3231 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3232 gen_rtx_SMAX (SImode, operands[1],
3238 (define_insn "*smax_0"
3239 [(set (match_operand:SI 0 "s_register_operand" "=r")
3240 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3243 "bic%?\\t%0, %1, %1, asr #31"
3244 [(set_attr "predicable" "yes")]
3247 (define_insn "*smax_m1"
3248 [(set (match_operand:SI 0 "s_register_operand" "=r")
3249 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3252 "orr%?\\t%0, %1, %1, asr #31"
3253 [(set_attr "predicable" "yes")]
3256 (define_insn "*arm_smax_insn"
3257 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3258 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3259 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3260 (clobber (reg:CC CC_REGNUM))]
3263 cmp\\t%1, %2\;movlt\\t%0, %2
3264 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3265 [(set_attr "conds" "clob")
3266 (set_attr "length" "8,12")]
3269 (define_expand "sminsi3"
3271 (set (match_operand:SI 0 "s_register_operand" "")
3272 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3273 (match_operand:SI 2 "arm_rhs_operand" "")))
3274 (clobber (reg:CC CC_REGNUM))])]
3277 if (operands[2] == const0_rtx)
3279 /* No need for a clobber of the condition code register here. */
3280 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3281 gen_rtx_SMIN (SImode, operands[1],
3287 (define_insn "*smin_0"
3288 [(set (match_operand:SI 0 "s_register_operand" "=r")
3289 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3292 "and%?\\t%0, %1, %1, asr #31"
3293 [(set_attr "predicable" "yes")]
3296 (define_insn "*arm_smin_insn"
3297 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3298 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3299 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3300 (clobber (reg:CC CC_REGNUM))]
3303 cmp\\t%1, %2\;movge\\t%0, %2
3304 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3305 [(set_attr "conds" "clob")
3306 (set_attr "length" "8,12")]
3309 (define_expand "umaxsi3"
3311 (set (match_operand:SI 0 "s_register_operand" "")
3312 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3313 (match_operand:SI 2 "arm_rhs_operand" "")))
3314 (clobber (reg:CC CC_REGNUM))])]
3319 (define_insn "*arm_umaxsi3"
3320 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3321 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3322 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3323 (clobber (reg:CC CC_REGNUM))]
3326 cmp\\t%1, %2\;movcc\\t%0, %2
3327 cmp\\t%1, %2\;movcs\\t%0, %1
3328 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3329 [(set_attr "conds" "clob")
3330 (set_attr "length" "8,8,12")]
3333 (define_expand "uminsi3"
3335 (set (match_operand:SI 0 "s_register_operand" "")
3336 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3337 (match_operand:SI 2 "arm_rhs_operand" "")))
3338 (clobber (reg:CC CC_REGNUM))])]
3343 (define_insn "*arm_uminsi3"
3344 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3345 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3346 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3347 (clobber (reg:CC CC_REGNUM))]
3350 cmp\\t%1, %2\;movcs\\t%0, %2
3351 cmp\\t%1, %2\;movcc\\t%0, %1
3352 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3353 [(set_attr "conds" "clob")
3354 (set_attr "length" "8,8,12")]
3357 (define_insn "*store_minmaxsi"
3358 [(set (match_operand:SI 0 "memory_operand" "=m")
3359 (match_operator:SI 3 "minmax_operator"
3360 [(match_operand:SI 1 "s_register_operand" "r")
3361 (match_operand:SI 2 "s_register_operand" "r")]))
3362 (clobber (reg:CC CC_REGNUM))]
3365 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3366 operands[1], operands[2]);
3367 output_asm_insn (\"cmp\\t%1, %2\", operands);
3369 output_asm_insn (\"ite\t%d3\", operands);
3370 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3371 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3374 [(set_attr "conds" "clob")
3375 (set (attr "length")
3376 (if_then_else (eq_attr "is_thumb" "yes")
3379 (set_attr "type" "store1")]
3382 ; Reject the frame pointer in operand[1], since reloading this after
3383 ; it has been eliminated can cause carnage.
3384 (define_insn "*minmax_arithsi"
3385 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3386 (match_operator:SI 4 "shiftable_operator"
3387 [(match_operator:SI 5 "minmax_operator"
3388 [(match_operand:SI 2 "s_register_operand" "r,r")
3389 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3390 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3391 (clobber (reg:CC CC_REGNUM))]
3392 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3395 enum rtx_code code = GET_CODE (operands[4]);
3398 if (which_alternative != 0 || operands[3] != const0_rtx
3399 || (code != PLUS && code != IOR && code != XOR))
3404 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3405 operands[2], operands[3]);
3406 output_asm_insn (\"cmp\\t%2, %3\", operands);
3410 output_asm_insn (\"ite\\t%d5\", operands);
3412 output_asm_insn (\"it\\t%d5\", operands);
3414 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3416 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3419 [(set_attr "conds" "clob")
3420 (set (attr "length")
3421 (if_then_else (eq_attr "is_thumb" "yes")
3426 (define_code_iterator SAT [smin smax])
3427 (define_code_iterator SATrev [smin smax])
3428 (define_code_attr SATlo [(smin "1") (smax "2")])
3429 (define_code_attr SAThi [(smin "2") (smax "1")])
3431 (define_insn "*satsi_<SAT:code>"
3432 [(set (match_operand:SI 0 "s_register_operand" "=r")
3433 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
3434 (match_operand:SI 1 "const_int_operand" "i"))
3435 (match_operand:SI 2 "const_int_operand" "i")))]
3436 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3437 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3441 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3442 &mask, &signed_sat))
3445 operands[1] = GEN_INT (mask);
3447 return "ssat%?\t%0, %1, %3";
3449 return "usat%?\t%0, %1, %3";
3451 [(set_attr "predicable" "yes")
3452 (set_attr "insn" "sat")])
3454 (define_insn "*satsi_<SAT:code>_shift"
3455 [(set (match_operand:SI 0 "s_register_operand" "=r")
3456 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
3457 [(match_operand:SI 4 "s_register_operand" "r")
3458 (match_operand:SI 5 "const_int_operand" "i")])
3459 (match_operand:SI 1 "const_int_operand" "i"))
3460 (match_operand:SI 2 "const_int_operand" "i")))]
3461 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3462 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3466 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3467 &mask, &signed_sat))
3470 operands[1] = GEN_INT (mask);
3472 return "ssat%?\t%0, %1, %4%S3";
3474 return "usat%?\t%0, %1, %4%S3";
3476 [(set_attr "predicable" "yes")
3477 (set_attr "insn" "sat")
3478 (set_attr "shift" "3")
3479 (set_attr "type" "alu_shift")])
3481 ;; Shift and rotation insns
3483 (define_expand "ashldi3"
3484 [(set (match_operand:DI 0 "s_register_operand" "")
3485 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3486 (match_operand:SI 2 "general_operand" "")))]
3491 /* Delay the decision whether to use NEON or core-regs until
3492 register allocation. */
3493 emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
3498 /* Only the NEON case can handle in-memory shift counts. */
3499 if (!reg_or_int_operand (operands[2], SImode))
3500 operands[2] = force_reg (SImode, operands[2]);
3503 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3504 ; /* No special preparation statements; expand pattern as above. */
3507 rtx scratch1, scratch2;
3509 if (CONST_INT_P (operands[2])
3510 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3512 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3516 /* Ideally we should use iwmmxt here if we could know that operands[1]
3517 ends up already living in an iwmmxt register. Otherwise it's
3518 cheaper to have the alternate code being generated than moving
3519 values to iwmmxt regs and back. */
3521 /* If we're optimizing for size, we prefer the libgcc calls. */
3522 if (optimize_function_for_size_p (cfun))
3525 /* Expand operation using core-registers.
3526 'FAIL' would achieve the same thing, but this is a bit smarter. */
3527 scratch1 = gen_reg_rtx (SImode);
3528 scratch2 = gen_reg_rtx (SImode);
3529 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3530 operands[2], scratch1, scratch2);
3536 (define_insn "arm_ashldi3_1bit"
3537 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3538 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3540 (clobber (reg:CC CC_REGNUM))]
3542 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3543 [(set_attr "conds" "clob")
3544 (set_attr "length" "8")]
3547 (define_expand "ashlsi3"
3548 [(set (match_operand:SI 0 "s_register_operand" "")
3549 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3550 (match_operand:SI 2 "arm_rhs_operand" "")))]
3553 if (CONST_INT_P (operands[2])
3554 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3556 emit_insn (gen_movsi (operands[0], const0_rtx));
3562 (define_insn "*thumb1_ashlsi3"
3563 [(set (match_operand:SI 0 "register_operand" "=l,l")
3564 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3565 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3568 [(set_attr "length" "2")
3569 (set_attr "conds" "set")])
3571 (define_expand "ashrdi3"
3572 [(set (match_operand:DI 0 "s_register_operand" "")
3573 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3574 (match_operand:SI 2 "reg_or_int_operand" "")))]
3579 /* Delay the decision whether to use NEON or core-regs until
3580 register allocation. */
3581 emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
3585 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3586 ; /* No special preparation statements; expand pattern as above. */
3589 rtx scratch1, scratch2;
3591 if (CONST_INT_P (operands[2])
3592 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3594 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3598 /* Ideally we should use iwmmxt here if we could know that operands[1]
3599 ends up already living in an iwmmxt register. Otherwise it's
3600 cheaper to have the alternate code being generated than moving
3601 values to iwmmxt regs and back. */
3603 /* If we're optimizing for size, we prefer the libgcc calls. */
3604 if (optimize_function_for_size_p (cfun))
3607 /* Expand operation using core-registers.
3608 'FAIL' would achieve the same thing, but this is a bit smarter. */
3609 scratch1 = gen_reg_rtx (SImode);
3610 scratch2 = gen_reg_rtx (SImode);
3611 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3612 operands[2], scratch1, scratch2);
3618 (define_insn "arm_ashrdi3_1bit"
3619 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3620 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3622 (clobber (reg:CC CC_REGNUM))]
3624 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3625 [(set_attr "conds" "clob")
3626 (set_attr "insn" "mov")
3627 (set_attr "length" "8")]
3630 (define_expand "ashrsi3"
3631 [(set (match_operand:SI 0 "s_register_operand" "")
3632 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3633 (match_operand:SI 2 "arm_rhs_operand" "")))]
3636 if (CONST_INT_P (operands[2])
3637 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3638 operands[2] = GEN_INT (31);
3642 (define_insn "*thumb1_ashrsi3"
3643 [(set (match_operand:SI 0 "register_operand" "=l,l")
3644 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3645 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3648 [(set_attr "length" "2")
3649 (set_attr "conds" "set")])
3651 (define_expand "lshrdi3"
3652 [(set (match_operand:DI 0 "s_register_operand" "")
3653 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3654 (match_operand:SI 2 "reg_or_int_operand" "")))]
3659 /* Delay the decision whether to use NEON or core-regs until
3660 register allocation. */
3661 emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
3665 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3666 ; /* No special preparation statements; expand pattern as above. */
3669 rtx scratch1, scratch2;
3671 if (CONST_INT_P (operands[2])
3672 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3674 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3678 /* Ideally we should use iwmmxt here if we could know that operands[1]
3679 ends up already living in an iwmmxt register. Otherwise it's
3680 cheaper to have the alternate code being generated than moving
3681 values to iwmmxt regs and back. */
3683 /* If we're optimizing for size, we prefer the libgcc calls. */
3684 if (optimize_function_for_size_p (cfun))
3687 /* Expand operation using core-registers.
3688 'FAIL' would achieve the same thing, but this is a bit smarter. */
3689 scratch1 = gen_reg_rtx (SImode);
3690 scratch2 = gen_reg_rtx (SImode);
3691 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3692 operands[2], scratch1, scratch2);
3698 (define_insn "arm_lshrdi3_1bit"
3699 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3700 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3702 (clobber (reg:CC CC_REGNUM))]
3704 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3705 [(set_attr "conds" "clob")
3706 (set_attr "insn" "mov")
3707 (set_attr "length" "8")]
3710 (define_expand "lshrsi3"
3711 [(set (match_operand:SI 0 "s_register_operand" "")
3712 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3713 (match_operand:SI 2 "arm_rhs_operand" "")))]
3716 if (CONST_INT_P (operands[2])
3717 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3719 emit_insn (gen_movsi (operands[0], const0_rtx));
3725 (define_insn "*thumb1_lshrsi3"
3726 [(set (match_operand:SI 0 "register_operand" "=l,l")
3727 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3728 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3731 [(set_attr "length" "2")
3732 (set_attr "conds" "set")])
3734 (define_expand "rotlsi3"
3735 [(set (match_operand:SI 0 "s_register_operand" "")
3736 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3737 (match_operand:SI 2 "reg_or_int_operand" "")))]
3740 if (CONST_INT_P (operands[2]))
3741 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3744 rtx reg = gen_reg_rtx (SImode);
3745 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3751 (define_expand "rotrsi3"
3752 [(set (match_operand:SI 0 "s_register_operand" "")
3753 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3754 (match_operand:SI 2 "arm_rhs_operand" "")))]
3759 if (CONST_INT_P (operands[2])
3760 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3761 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3763 else /* TARGET_THUMB1 */
3765 if (CONST_INT_P (operands [2]))
3766 operands [2] = force_reg (SImode, operands[2]);
3771 (define_insn "*thumb1_rotrsi3"
3772 [(set (match_operand:SI 0 "register_operand" "=l")
3773 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3774 (match_operand:SI 2 "register_operand" "l")))]
3777 [(set_attr "length" "2")]
3780 (define_insn "*arm_shiftsi3"
3781 [(set (match_operand:SI 0 "s_register_operand" "=r")
3782 (match_operator:SI 3 "shift_operator"
3783 [(match_operand:SI 1 "s_register_operand" "r")
3784 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3786 "* return arm_output_shift(operands, 0);"
3787 [(set_attr "predicable" "yes")
3788 (set_attr "shift" "1")
3789 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3790 (const_string "alu_shift")
3791 (const_string "alu_shift_reg")))]
3794 (define_insn "*shiftsi3_compare0"
3795 [(set (reg:CC_NOOV CC_REGNUM)
3796 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3797 [(match_operand:SI 1 "s_register_operand" "r")
3798 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3800 (set (match_operand:SI 0 "s_register_operand" "=r")
3801 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3803 "* return arm_output_shift(operands, 1);"
3804 [(set_attr "conds" "set")
3805 (set_attr "shift" "1")
3806 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3807 (const_string "alu_shift")
3808 (const_string "alu_shift_reg")))]
3811 (define_insn "*shiftsi3_compare0_scratch"
3812 [(set (reg:CC_NOOV CC_REGNUM)
3813 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3814 [(match_operand:SI 1 "s_register_operand" "r")
3815 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3817 (clobber (match_scratch:SI 0 "=r"))]
3819 "* return arm_output_shift(operands, 1);"
3820 [(set_attr "conds" "set")
3821 (set_attr "shift" "1")]
3824 (define_insn "*not_shiftsi"
3825 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3826 (not:SI (match_operator:SI 3 "shift_operator"
3827 [(match_operand:SI 1 "s_register_operand" "r,r")
3828 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3831 [(set_attr "predicable" "yes")
3832 (set_attr "shift" "1")
3833 (set_attr "insn" "mvn")
3834 (set_attr "arch" "32,a")
3835 (set_attr "type" "alu_shift,alu_shift_reg")])
3837 (define_insn "*not_shiftsi_compare0"
3838 [(set (reg:CC_NOOV CC_REGNUM)
3840 (not:SI (match_operator:SI 3 "shift_operator"
3841 [(match_operand:SI 1 "s_register_operand" "r,r")
3842 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3844 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3845 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3848 [(set_attr "conds" "set")
3849 (set_attr "shift" "1")
3850 (set_attr "insn" "mvn")
3851 (set_attr "arch" "32,a")
3852 (set_attr "type" "alu_shift,alu_shift_reg")])
3854 (define_insn "*not_shiftsi_compare0_scratch"
3855 [(set (reg:CC_NOOV CC_REGNUM)
3857 (not:SI (match_operator:SI 3 "shift_operator"
3858 [(match_operand:SI 1 "s_register_operand" "r,r")
3859 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3861 (clobber (match_scratch:SI 0 "=r,r"))]
3864 [(set_attr "conds" "set")
3865 (set_attr "shift" "1")
3866 (set_attr "insn" "mvn")
3867 (set_attr "arch" "32,a")
3868 (set_attr "type" "alu_shift,alu_shift_reg")])
3870 ;; We don't really have extzv, but defining this using shifts helps
3871 ;; to reduce register pressure later on.
3873 (define_expand "extzv"
3874 [(set (match_operand 0 "s_register_operand" "")
3875 (zero_extract (match_operand 1 "nonimmediate_operand" "")
3876 (match_operand 2 "const_int_operand" "")
3877 (match_operand 3 "const_int_operand" "")))]
3878 "TARGET_THUMB1 || arm_arch_thumb2"
3881 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3882 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3884 if (arm_arch_thumb2)
3886 HOST_WIDE_INT width = INTVAL (operands[2]);
3887 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3889 if (unaligned_access && MEM_P (operands[1])
3890 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3894 if (BYTES_BIG_ENDIAN)
3895 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3900 base_addr = adjust_address (operands[1], SImode,
3901 bitpos / BITS_PER_UNIT);
3902 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3906 rtx dest = operands[0];
3907 rtx tmp = gen_reg_rtx (SImode);
3909 /* We may get a paradoxical subreg here. Strip it off. */
3910 if (GET_CODE (dest) == SUBREG
3911 && GET_MODE (dest) == SImode
3912 && GET_MODE (SUBREG_REG (dest)) == HImode)
3913 dest = SUBREG_REG (dest);
3915 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3918 base_addr = adjust_address (operands[1], HImode,
3919 bitpos / BITS_PER_UNIT);
3920 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3921 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3925 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3927 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3935 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3938 operands[3] = GEN_INT (rshift);
3942 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3946 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3947 operands[3], gen_reg_rtx (SImode)));
3952 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3954 (define_expand "extzv_t1"
3955 [(set (match_operand:SI 4 "s_register_operand" "")
3956 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
3957 (match_operand:SI 2 "const_int_operand" "")))
3958 (set (match_operand:SI 0 "s_register_operand" "")
3959 (lshiftrt:SI (match_dup 4)
3960 (match_operand:SI 3 "const_int_operand" "")))]
3964 (define_expand "extv"
3965 [(set (match_operand 0 "s_register_operand" "")
3966 (sign_extract (match_operand 1 "nonimmediate_operand" "")
3967 (match_operand 2 "const_int_operand" "")
3968 (match_operand 3 "const_int_operand" "")))]
3971 HOST_WIDE_INT width = INTVAL (operands[2]);
3972 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3974 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3975 && (bitpos % BITS_PER_UNIT) == 0)
3979 if (BYTES_BIG_ENDIAN)
3980 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3984 base_addr = adjust_address (operands[1], SImode,
3985 bitpos / BITS_PER_UNIT);
3986 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3990 rtx dest = operands[0];
3991 rtx tmp = gen_reg_rtx (SImode);
3993 /* We may get a paradoxical subreg here. Strip it off. */
3994 if (GET_CODE (dest) == SUBREG
3995 && GET_MODE (dest) == SImode
3996 && GET_MODE (SUBREG_REG (dest)) == HImode)
3997 dest = SUBREG_REG (dest);
3999 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4002 base_addr = adjust_address (operands[1], HImode,
4003 bitpos / BITS_PER_UNIT);
4004 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4005 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4010 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4012 else if (GET_MODE (operands[0]) == SImode
4013 && GET_MODE (operands[1]) == SImode)
4015 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4023 ; Helper to expand register forms of extv with the proper modes.
4025 (define_expand "extv_regsi"
4026 [(set (match_operand:SI 0 "s_register_operand" "")
4027 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4028 (match_operand 2 "const_int_operand" "")
4029 (match_operand 3 "const_int_operand" "")))]
4034 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4036 (define_insn "unaligned_loadsi"
4037 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4038 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4039 UNSPEC_UNALIGNED_LOAD))]
4040 "unaligned_access && TARGET_32BIT"
4041 "ldr%?\t%0, %1\t@ unaligned"
4042 [(set_attr "arch" "t2,any")
4043 (set_attr "length" "2,4")
4044 (set_attr "predicable" "yes")
4045 (set_attr "type" "load1")])
4047 (define_insn "unaligned_loadhis"
4048 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4050 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4051 UNSPEC_UNALIGNED_LOAD)))]
4052 "unaligned_access && TARGET_32BIT"
4053 "ldr%(sh%)\t%0, %1\t@ unaligned"
4054 [(set_attr "arch" "t2,any")
4055 (set_attr "length" "2,4")
4056 (set_attr "predicable" "yes")
4057 (set_attr "type" "load_byte")])
4059 (define_insn "unaligned_loadhiu"
4060 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4062 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4063 UNSPEC_UNALIGNED_LOAD)))]
4064 "unaligned_access && TARGET_32BIT"
4065 "ldr%(h%)\t%0, %1\t@ unaligned"
4066 [(set_attr "arch" "t2,any")
4067 (set_attr "length" "2,4")
4068 (set_attr "predicable" "yes")
4069 (set_attr "type" "load_byte")])
4071 (define_insn "unaligned_storesi"
4072 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4073 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4074 UNSPEC_UNALIGNED_STORE))]
4075 "unaligned_access && TARGET_32BIT"
4076 "str%?\t%1, %0\t@ unaligned"
4077 [(set_attr "arch" "t2,any")
4078 (set_attr "length" "2,4")
4079 (set_attr "predicable" "yes")
4080 (set_attr "type" "store1")])
4082 (define_insn "unaligned_storehi"
4083 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4084 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4085 UNSPEC_UNALIGNED_STORE))]
4086 "unaligned_access && TARGET_32BIT"
4087 "str%(h%)\t%1, %0\t@ unaligned"
4088 [(set_attr "arch" "t2,any")
4089 (set_attr "length" "2,4")
4090 (set_attr "predicable" "yes")
4091 (set_attr "type" "store1")])
4093 (define_insn "*extv_reg"
4094 [(set (match_operand:SI 0 "s_register_operand" "=r")
4095 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4096 (match_operand:SI 2 "const_int_operand" "M")
4097 (match_operand:SI 3 "const_int_operand" "M")))]
4099 "sbfx%?\t%0, %1, %3, %2"
4100 [(set_attr "length" "4")
4101 (set_attr "predicable" "yes")]
4104 (define_insn "extzv_t2"
4105 [(set (match_operand:SI 0 "s_register_operand" "=r")
4106 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4107 (match_operand:SI 2 "const_int_operand" "M")
4108 (match_operand:SI 3 "const_int_operand" "M")))]
4110 "ubfx%?\t%0, %1, %3, %2"
4111 [(set_attr "length" "4")
4112 (set_attr "predicable" "yes")]
4116 ;; Division instructions
4117 (define_insn "divsi3"
4118 [(set (match_operand:SI 0 "s_register_operand" "=r")
4119 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4120 (match_operand:SI 2 "s_register_operand" "r")))]
4122 "sdiv%?\t%0, %1, %2"
4123 [(set_attr "predicable" "yes")
4124 (set_attr "insn" "sdiv")]
4127 (define_insn "udivsi3"
4128 [(set (match_operand:SI 0 "s_register_operand" "=r")
4129 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4130 (match_operand:SI 2 "s_register_operand" "r")))]
4132 "udiv%?\t%0, %1, %2"
4133 [(set_attr "predicable" "yes")
4134 (set_attr "insn" "udiv")]
4138 ;; Unary arithmetic insns
4140 (define_expand "negdi2"
4142 [(set (match_operand:DI 0 "s_register_operand" "")
4143 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4144 (clobber (reg:CC CC_REGNUM))])]
4149 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4155 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4156 ;; The first alternative allows the common case of a *full* overlap.
4157 (define_insn "*arm_negdi2"
4158 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4159 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4160 (clobber (reg:CC CC_REGNUM))]
4162 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4163 [(set_attr "conds" "clob")
4164 (set_attr "length" "8")]
4167 (define_insn "*thumb1_negdi2"
4168 [(set (match_operand:DI 0 "register_operand" "=&l")
4169 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4170 (clobber (reg:CC CC_REGNUM))]
4172 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4173 [(set_attr "length" "6")]
4176 (define_expand "negsi2"
4177 [(set (match_operand:SI 0 "s_register_operand" "")
4178 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4183 (define_insn "*arm_negsi2"
4184 [(set (match_operand:SI 0 "s_register_operand" "=r")
4185 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4187 "rsb%?\\t%0, %1, #0"
4188 [(set_attr "predicable" "yes")]
4191 (define_insn "*thumb1_negsi2"
4192 [(set (match_operand:SI 0 "register_operand" "=l")
4193 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4196 [(set_attr "length" "2")]
4199 (define_expand "negsf2"
4200 [(set (match_operand:SF 0 "s_register_operand" "")
4201 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4202 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4206 (define_expand "negdf2"
4207 [(set (match_operand:DF 0 "s_register_operand" "")
4208 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4209 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4212 ;; abssi2 doesn't really clobber the condition codes if a different register
4213 ;; is being set. To keep things simple, assume during rtl manipulations that
4214 ;; it does, but tell the final scan operator the truth. Similarly for
4217 (define_expand "abssi2"
4219 [(set (match_operand:SI 0 "s_register_operand" "")
4220 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4221 (clobber (match_dup 2))])]
4225 operands[2] = gen_rtx_SCRATCH (SImode);
4227 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4230 (define_insn "*arm_abssi2"
4231 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4232 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4233 (clobber (reg:CC CC_REGNUM))]
4236 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4237 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
4238 [(set_attr "conds" "clob,*")
4239 (set_attr "shift" "1")
4240 (set_attr "predicable" "no, yes")
4241 (set_attr "length" "8")]
4244 (define_insn_and_split "*thumb1_abssi2"
4245 [(set (match_operand:SI 0 "s_register_operand" "=l")
4246 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4247 (clobber (match_scratch:SI 2 "=&l"))]
4250 "TARGET_THUMB1 && reload_completed"
4251 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4252 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4253 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4255 [(set_attr "length" "6")]
4258 (define_insn "*arm_neg_abssi2"
4259 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4260 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4261 (clobber (reg:CC CC_REGNUM))]
4264 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4265 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
4266 [(set_attr "conds" "clob,*")
4267 (set_attr "shift" "1")
4268 (set_attr "predicable" "no, yes")
4269 (set_attr "length" "8")]
4272 (define_insn_and_split "*thumb1_neg_abssi2"
4273 [(set (match_operand:SI 0 "s_register_operand" "=l")
4274 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4275 (clobber (match_scratch:SI 2 "=&l"))]
4278 "TARGET_THUMB1 && reload_completed"
4279 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4280 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4281 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4283 [(set_attr "length" "6")]
4286 (define_expand "abssf2"
4287 [(set (match_operand:SF 0 "s_register_operand" "")
4288 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4289 "TARGET_32BIT && TARGET_HARD_FLOAT"
4292 (define_expand "absdf2"
4293 [(set (match_operand:DF 0 "s_register_operand" "")
4294 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4295 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4298 (define_expand "sqrtsf2"
4299 [(set (match_operand:SF 0 "s_register_operand" "")
4300 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4301 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4304 (define_expand "sqrtdf2"
4305 [(set (match_operand:DF 0 "s_register_operand" "")
4306 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4307 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4310 (define_insn_and_split "one_cmpldi2"
4311 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
4312 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
4319 "TARGET_32BIT && reload_completed
4320 && arm_general_register_operand (operands[0], DImode)"
4321 [(set (match_dup 0) (not:SI (match_dup 1)))
4322 (set (match_dup 2) (not:SI (match_dup 3)))]
4325 operands[2] = gen_highpart (SImode, operands[0]);
4326 operands[0] = gen_lowpart (SImode, operands[0]);
4327 operands[3] = gen_highpart (SImode, operands[1]);
4328 operands[1] = gen_lowpart (SImode, operands[1]);
4330 [(set_attr "length" "*,8,8,*")
4331 (set_attr "predicable" "no,yes,yes,no")
4332 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
4333 (set_attr "arch" "neon_nota8,*,*,neon_onlya8")]
4336 (define_expand "one_cmplsi2"
4337 [(set (match_operand:SI 0 "s_register_operand" "")
4338 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4343 (define_insn "*arm_one_cmplsi2"
4344 [(set (match_operand:SI 0 "s_register_operand" "=r")
4345 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4348 [(set_attr "predicable" "yes")
4349 (set_attr "insn" "mvn")]
4352 (define_insn "*thumb1_one_cmplsi2"
4353 [(set (match_operand:SI 0 "register_operand" "=l")
4354 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4357 [(set_attr "length" "2")
4358 (set_attr "insn" "mvn")]
4361 (define_insn "*notsi_compare0"
4362 [(set (reg:CC_NOOV CC_REGNUM)
4363 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4365 (set (match_operand:SI 0 "s_register_operand" "=r")
4366 (not:SI (match_dup 1)))]
4369 [(set_attr "conds" "set")
4370 (set_attr "insn" "mvn")]
4373 (define_insn "*notsi_compare0_scratch"
4374 [(set (reg:CC_NOOV CC_REGNUM)
4375 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4377 (clobber (match_scratch:SI 0 "=r"))]
4380 [(set_attr "conds" "set")
4381 (set_attr "insn" "mvn")]
4384 ;; Fixed <--> Floating conversion insns
4386 (define_expand "floatsihf2"
4387 [(set (match_operand:HF 0 "general_operand" "")
4388 (float:HF (match_operand:SI 1 "general_operand" "")))]
4392 rtx op1 = gen_reg_rtx (SFmode);
4393 expand_float (op1, operands[1], 0);
4394 op1 = convert_to_mode (HFmode, op1, 0);
4395 emit_move_insn (operands[0], op1);
4400 (define_expand "floatdihf2"
4401 [(set (match_operand:HF 0 "general_operand" "")
4402 (float:HF (match_operand:DI 1 "general_operand" "")))]
4406 rtx op1 = gen_reg_rtx (SFmode);
4407 expand_float (op1, operands[1], 0);
4408 op1 = convert_to_mode (HFmode, op1, 0);
4409 emit_move_insn (operands[0], op1);
4414 (define_expand "floatsisf2"
4415 [(set (match_operand:SF 0 "s_register_operand" "")
4416 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4417 "TARGET_32BIT && TARGET_HARD_FLOAT"
4421 (define_expand "floatsidf2"
4422 [(set (match_operand:DF 0 "s_register_operand" "")
4423 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4424 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4428 (define_expand "fix_trunchfsi2"
4429 [(set (match_operand:SI 0 "general_operand" "")
4430 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4434 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4435 expand_fix (operands[0], op1, 0);
4440 (define_expand "fix_trunchfdi2"
4441 [(set (match_operand:DI 0 "general_operand" "")
4442 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4446 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4447 expand_fix (operands[0], op1, 0);
4452 (define_expand "fix_truncsfsi2"
4453 [(set (match_operand:SI 0 "s_register_operand" "")
4454 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4455 "TARGET_32BIT && TARGET_HARD_FLOAT"
4459 (define_expand "fix_truncdfsi2"
4460 [(set (match_operand:SI 0 "s_register_operand" "")
4461 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4462 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4468 (define_expand "truncdfsf2"
4469 [(set (match_operand:SF 0 "s_register_operand" "")
4471 (match_operand:DF 1 "s_register_operand" "")))]
4472 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4476 /* DFmode -> HFmode conversions have to go through SFmode. */
4477 (define_expand "truncdfhf2"
4478 [(set (match_operand:HF 0 "general_operand" "")
4480 (match_operand:DF 1 "general_operand" "")))]
4485 op1 = convert_to_mode (SFmode, operands[1], 0);
4486 op1 = convert_to_mode (HFmode, op1, 0);
4487 emit_move_insn (operands[0], op1);
4492 ;; Zero and sign extension instructions.
4494 (define_insn "zero_extend<mode>di2"
4495 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
4496 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
4497 "<qhs_zextenddi_cstr>")))]
4498 "TARGET_32BIT <qhs_zextenddi_cond>"
4500 [(set_attr "length" "8,4,8,8")
4501 (set_attr "arch" "neon_nota8,*,*,neon_onlya8")
4502 (set_attr "ce_count" "2")
4503 (set_attr "predicable" "yes")]
4506 (define_insn "extend<mode>di2"
4507 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
4508 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4509 "<qhs_extenddi_cstr>")))]
4510 "TARGET_32BIT <qhs_sextenddi_cond>"
4512 [(set_attr "length" "8,4,8,8,8")
4513 (set_attr "ce_count" "2")
4514 (set_attr "shift" "1")
4515 (set_attr "predicable" "yes")
4516 (set_attr "arch" "neon_nota8,*,a,t,neon_onlya8")]
4519 ;; Splits for all extensions to DImode
4521 [(set (match_operand:DI 0 "s_register_operand" "")
4522 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4523 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
4524 [(set (match_dup 0) (match_dup 1))]
4526 rtx lo_part = gen_lowpart (SImode, operands[0]);
4527 enum machine_mode src_mode = GET_MODE (operands[1]);
4529 if (REG_P (operands[0])
4530 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4531 emit_clobber (operands[0]);
4532 if (!REG_P (lo_part) || src_mode != SImode
4533 || !rtx_equal_p (lo_part, operands[1]))
4535 if (src_mode == SImode)
4536 emit_move_insn (lo_part, operands[1]);
4538 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4539 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4540 operands[1] = lo_part;
4542 operands[0] = gen_highpart (SImode, operands[0]);
4543 operands[1] = const0_rtx;
4547 [(set (match_operand:DI 0 "s_register_operand" "")
4548 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4549 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
4550 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4552 rtx lo_part = gen_lowpart (SImode, operands[0]);
4553 enum machine_mode src_mode = GET_MODE (operands[1]);
4555 if (REG_P (operands[0])
4556 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4557 emit_clobber (operands[0]);
4559 if (!REG_P (lo_part) || src_mode != SImode
4560 || !rtx_equal_p (lo_part, operands[1]))
4562 if (src_mode == SImode)
4563 emit_move_insn (lo_part, operands[1]);
4565 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4566 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4567 operands[1] = lo_part;
4569 operands[0] = gen_highpart (SImode, operands[0]);
4572 (define_expand "zero_extendhisi2"
4573 [(set (match_operand:SI 0 "s_register_operand" "")
4574 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4577 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4579 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4582 if (!arm_arch6 && !MEM_P (operands[1]))
4584 rtx t = gen_lowpart (SImode, operands[1]);
4585 rtx tmp = gen_reg_rtx (SImode);
4586 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4587 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4593 [(set (match_operand:SI 0 "s_register_operand" "")
4594 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4595 "!TARGET_THUMB2 && !arm_arch6"
4596 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4597 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4599 operands[2] = gen_lowpart (SImode, operands[1]);
4602 (define_insn "*thumb1_zero_extendhisi2"
4603 [(set (match_operand:SI 0 "register_operand" "=l,l")
4604 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4609 if (which_alternative == 0 && arm_arch6)
4610 return "uxth\t%0, %1";
4611 if (which_alternative == 0)
4614 mem = XEXP (operands[1], 0);
4616 if (GET_CODE (mem) == CONST)
4617 mem = XEXP (mem, 0);
4619 if (GET_CODE (mem) == PLUS)
4621 rtx a = XEXP (mem, 0);
4623 /* This can happen due to bugs in reload. */
4624 if (REG_P (a) && REGNO (a) == SP_REGNUM)
4627 ops[0] = operands[0];
4630 output_asm_insn ("mov\t%0, %1", ops);
4632 XEXP (mem, 0) = operands[0];
4636 return "ldrh\t%0, %1";
4638 [(set_attr_alternative "length"
4639 [(if_then_else (eq_attr "is_arch6" "yes")
4640 (const_int 2) (const_int 4))
4642 (set_attr "type" "simple_alu_shift, load_byte")]
4645 (define_insn "*arm_zero_extendhisi2"
4646 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4647 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4648 "TARGET_ARM && arm_arch4 && !arm_arch6"
4652 [(set_attr "type" "alu_shift,load_byte")
4653 (set_attr "predicable" "yes")]
4656 (define_insn "*arm_zero_extendhisi2_v6"
4657 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4658 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4659 "TARGET_ARM && arm_arch6"
4663 [(set_attr "predicable" "yes")
4664 (set_attr "type" "simple_alu_shift,load_byte")]
4667 (define_insn "*arm_zero_extendhisi2addsi"
4668 [(set (match_operand:SI 0 "s_register_operand" "=r")
4669 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4670 (match_operand:SI 2 "s_register_operand" "r")))]
4672 "uxtah%?\\t%0, %2, %1"
4673 [(set_attr "type" "alu_shift")
4674 (set_attr "predicable" "yes")]
4677 (define_expand "zero_extendqisi2"
4678 [(set (match_operand:SI 0 "s_register_operand" "")
4679 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4682 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4684 emit_insn (gen_andsi3 (operands[0],
4685 gen_lowpart (SImode, operands[1]),
4689 if (!arm_arch6 && !MEM_P (operands[1]))
4691 rtx t = gen_lowpart (SImode, operands[1]);
4692 rtx tmp = gen_reg_rtx (SImode);
4693 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4694 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4700 [(set (match_operand:SI 0 "s_register_operand" "")
4701 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4703 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4704 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4706 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4709 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4714 (define_insn "*thumb1_zero_extendqisi2"
4715 [(set (match_operand:SI 0 "register_operand" "=l,l")
4716 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4717 "TARGET_THUMB1 && !arm_arch6"
4721 [(set_attr "length" "4,2")
4722 (set_attr "type" "alu_shift,load_byte")
4723 (set_attr "pool_range" "*,32")]
4726 (define_insn "*thumb1_zero_extendqisi2_v6"
4727 [(set (match_operand:SI 0 "register_operand" "=l,l")
4728 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4729 "TARGET_THUMB1 && arm_arch6"
4733 [(set_attr "length" "2")
4734 (set_attr "type" "simple_alu_shift,load_byte")]
4737 (define_insn "*arm_zero_extendqisi2"
4738 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4739 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4740 "TARGET_ARM && !arm_arch6"
4743 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4744 [(set_attr "length" "8,4")
4745 (set_attr "type" "alu_shift,load_byte")
4746 (set_attr "predicable" "yes")]
4749 (define_insn "*arm_zero_extendqisi2_v6"
4750 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4751 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4752 "TARGET_ARM && arm_arch6"
4755 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4756 [(set_attr "type" "simple_alu_shift,load_byte")
4757 (set_attr "predicable" "yes")]
4760 (define_insn "*arm_zero_extendqisi2addsi"
4761 [(set (match_operand:SI 0 "s_register_operand" "=r")
4762 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4763 (match_operand:SI 2 "s_register_operand" "r")))]
4765 "uxtab%?\\t%0, %2, %1"
4766 [(set_attr "predicable" "yes")
4767 (set_attr "insn" "xtab")
4768 (set_attr "type" "alu_shift")]
4772 [(set (match_operand:SI 0 "s_register_operand" "")
4773 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4774 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4775 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4776 [(set (match_dup 2) (match_dup 1))
4777 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4782 [(set (match_operand:SI 0 "s_register_operand" "")
4783 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4784 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4785 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4786 [(set (match_dup 2) (match_dup 1))
4787 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4793 [(set (match_operand:SI 0 "s_register_operand" "")
4794 (ior_xor:SI (and:SI (ashift:SI
4795 (match_operand:SI 1 "s_register_operand" "")
4796 (match_operand:SI 2 "const_int_operand" ""))
4797 (match_operand:SI 3 "const_int_operand" ""))
4799 (match_operator 5 "subreg_lowpart_operator"
4800 [(match_operand:SI 4 "s_register_operand" "")]))))]
4802 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4803 == (GET_MODE_MASK (GET_MODE (operands[5]))
4804 & (GET_MODE_MASK (GET_MODE (operands[5]))
4805 << (INTVAL (operands[2])))))"
4806 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4808 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4809 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4812 (define_insn "*compareqi_eq0"
4813 [(set (reg:CC_Z CC_REGNUM)
4814 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4818 [(set_attr "conds" "set")
4819 (set_attr "predicable" "yes")]
4822 (define_expand "extendhisi2"
4823 [(set (match_operand:SI 0 "s_register_operand" "")
4824 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4829 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4832 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4834 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4838 if (!arm_arch6 && !MEM_P (operands[1]))
4840 rtx t = gen_lowpart (SImode, operands[1]);
4841 rtx tmp = gen_reg_rtx (SImode);
4842 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4843 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4850 [(set (match_operand:SI 0 "register_operand" "")
4851 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4852 (clobber (match_scratch:SI 2 ""))])]
4854 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4855 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4857 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4860 ;; We used to have an early-clobber on the scratch register here.
4861 ;; However, there's a bug somewhere in reload which means that this
4862 ;; can be partially ignored during spill allocation if the memory
4863 ;; address also needs reloading; this causes us to die later on when
4864 ;; we try to verify the operands. Fortunately, we don't really need
4865 ;; the early-clobber: we can always use operand 0 if operand 2
4866 ;; overlaps the address.
4867 (define_insn "thumb1_extendhisi2"
4868 [(set (match_operand:SI 0 "register_operand" "=l,l")
4869 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4870 (clobber (match_scratch:SI 2 "=X,l"))]
4877 if (which_alternative == 0 && !arm_arch6)
4879 if (which_alternative == 0)
4880 return \"sxth\\t%0, %1\";
4882 mem = XEXP (operands[1], 0);
4884 /* This code used to try to use 'V', and fix the address only if it was
4885 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4886 range of QImode offsets, and offsettable_address_p does a QImode
4889 if (GET_CODE (mem) == CONST)
4890 mem = XEXP (mem, 0);
4892 if (GET_CODE (mem) == LABEL_REF)
4893 return \"ldr\\t%0, %1\";
4895 if (GET_CODE (mem) == PLUS)
4897 rtx a = XEXP (mem, 0);
4898 rtx b = XEXP (mem, 1);
4900 if (GET_CODE (a) == LABEL_REF
4902 return \"ldr\\t%0, %1\";
4905 return \"ldrsh\\t%0, %1\";
4913 ops[2] = const0_rtx;
4916 gcc_assert (REG_P (ops[1]));
4918 ops[0] = operands[0];
4919 if (reg_mentioned_p (operands[2], ops[1]))
4922 ops[3] = operands[2];
4923 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4926 [(set_attr_alternative "length"
4927 [(if_then_else (eq_attr "is_arch6" "yes")
4928 (const_int 2) (const_int 4))
4930 (set_attr "type" "simple_alu_shift,load_byte")
4931 (set_attr "pool_range" "*,1018")]
4934 ;; This pattern will only be used when ldsh is not available
4935 (define_expand "extendhisi2_mem"
4936 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4938 (zero_extend:SI (match_dup 7)))
4939 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4940 (set (match_operand:SI 0 "" "")
4941 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4946 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4948 mem1 = change_address (operands[1], QImode, addr);
4949 mem2 = change_address (operands[1], QImode,
4950 plus_constant (Pmode, addr, 1));
4951 operands[0] = gen_lowpart (SImode, operands[0]);
4953 operands[2] = gen_reg_rtx (SImode);
4954 operands[3] = gen_reg_rtx (SImode);
4955 operands[6] = gen_reg_rtx (SImode);
4958 if (BYTES_BIG_ENDIAN)
4960 operands[4] = operands[2];
4961 operands[5] = operands[3];
4965 operands[4] = operands[3];
4966 operands[5] = operands[2];
4972 [(set (match_operand:SI 0 "register_operand" "")
4973 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4975 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4976 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4978 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4981 (define_insn "*arm_extendhisi2"
4982 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4983 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4984 "TARGET_ARM && arm_arch4 && !arm_arch6"
4988 [(set_attr "length" "8,4")
4989 (set_attr "type" "alu_shift,load_byte")
4990 (set_attr "predicable" "yes")
4991 (set_attr "pool_range" "*,256")
4992 (set_attr "neg_pool_range" "*,244")]
4995 ;; ??? Check Thumb-2 pool range
4996 (define_insn "*arm_extendhisi2_v6"
4997 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4998 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4999 "TARGET_32BIT && arm_arch6"
5003 [(set_attr "type" "simple_alu_shift,load_byte")
5004 (set_attr "predicable" "yes")
5005 (set_attr "pool_range" "*,256")
5006 (set_attr "neg_pool_range" "*,244")]
5009 (define_insn "*arm_extendhisi2addsi"
5010 [(set (match_operand:SI 0 "s_register_operand" "=r")
5011 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5012 (match_operand:SI 2 "s_register_operand" "r")))]
5014 "sxtah%?\\t%0, %2, %1"
5017 (define_expand "extendqihi2"
5019 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5021 (set (match_operand:HI 0 "s_register_operand" "")
5022 (ashiftrt:SI (match_dup 2)
5027 if (arm_arch4 && MEM_P (operands[1]))
5029 emit_insn (gen_rtx_SET (VOIDmode,
5031 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5034 if (!s_register_operand (operands[1], QImode))
5035 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5036 operands[0] = gen_lowpart (SImode, operands[0]);
5037 operands[1] = gen_lowpart (SImode, operands[1]);
5038 operands[2] = gen_reg_rtx (SImode);
5042 (define_insn "*arm_extendqihi_insn"
5043 [(set (match_operand:HI 0 "s_register_operand" "=r")
5044 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5045 "TARGET_ARM && arm_arch4"
5046 "ldr%(sb%)\\t%0, %1"
5047 [(set_attr "type" "load_byte")
5048 (set_attr "predicable" "yes")
5049 (set_attr "pool_range" "256")
5050 (set_attr "neg_pool_range" "244")]
5053 (define_expand "extendqisi2"
5054 [(set (match_operand:SI 0 "s_register_operand" "")
5055 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
5058 if (!arm_arch4 && MEM_P (operands[1]))
5059 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5061 if (!arm_arch6 && !MEM_P (operands[1]))
5063 rtx t = gen_lowpart (SImode, operands[1]);
5064 rtx tmp = gen_reg_rtx (SImode);
5065 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5066 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5072 [(set (match_operand:SI 0 "register_operand" "")
5073 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5075 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5076 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5078 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5081 (define_insn "*arm_extendqisi"
5082 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5083 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5084 "TARGET_ARM && arm_arch4 && !arm_arch6"
5088 [(set_attr "length" "8,4")
5089 (set_attr "type" "alu_shift,load_byte")
5090 (set_attr "predicable" "yes")
5091 (set_attr "pool_range" "*,256")
5092 (set_attr "neg_pool_range" "*,244")]
5095 (define_insn "*arm_extendqisi_v6"
5096 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5098 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5099 "TARGET_ARM && arm_arch6"
5103 [(set_attr "type" "simple_alu_shift,load_byte")
5104 (set_attr "predicable" "yes")
5105 (set_attr "pool_range" "*,256")
5106 (set_attr "neg_pool_range" "*,244")]
5109 (define_insn "*arm_extendqisi2addsi"
5110 [(set (match_operand:SI 0 "s_register_operand" "=r")
5111 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5112 (match_operand:SI 2 "s_register_operand" "r")))]
5114 "sxtab%?\\t%0, %2, %1"
5115 [(set_attr "type" "alu_shift")
5116 (set_attr "insn" "xtab")
5117 (set_attr "predicable" "yes")]
5121 [(set (match_operand:SI 0 "register_operand" "")
5122 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5123 "TARGET_THUMB1 && reload_completed"
5124 [(set (match_dup 0) (match_dup 2))
5125 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5127 rtx addr = XEXP (operands[1], 0);
5129 if (GET_CODE (addr) == CONST)
5130 addr = XEXP (addr, 0);
5132 if (GET_CODE (addr) == PLUS
5133 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5134 /* No split necessary. */
5137 if (GET_CODE (addr) == PLUS
5138 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5141 if (reg_overlap_mentioned_p (operands[0], addr))
5143 rtx t = gen_lowpart (QImode, operands[0]);
5144 emit_move_insn (t, operands[1]);
5145 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5151 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5152 operands[2] = const0_rtx;
5154 else if (GET_CODE (addr) != PLUS)
5156 else if (REG_P (XEXP (addr, 0)))
5158 operands[2] = XEXP (addr, 1);
5159 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5163 operands[2] = XEXP (addr, 0);
5164 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5167 operands[3] = change_address (operands[1], QImode, addr);
5171 [(set (match_operand:SI 0 "register_operand" "")
5172 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5173 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5174 (set (match_operand:SI 3 "register_operand" "")
5175 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5177 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5178 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5179 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5180 && (peep2_reg_dead_p (3, operands[0])
5181 || rtx_equal_p (operands[0], operands[3]))
5182 && (peep2_reg_dead_p (3, operands[2])
5183 || rtx_equal_p (operands[2], operands[3]))"
5184 [(set (match_dup 2) (match_dup 1))
5185 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5187 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5188 operands[4] = change_address (operands[4], QImode, addr);
5191 (define_insn "thumb1_extendqisi2"
5192 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5193 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5198 if (which_alternative == 0 && arm_arch6)
5199 return "sxtb\\t%0, %1";
5200 if (which_alternative == 0)
5203 addr = XEXP (operands[1], 0);
5204 if (GET_CODE (addr) == PLUS
5205 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5206 return "ldrsb\\t%0, %1";
5210 [(set_attr_alternative "length"
5211 [(if_then_else (eq_attr "is_arch6" "yes")
5212 (const_int 2) (const_int 4))
5214 (if_then_else (eq_attr "is_arch6" "yes")
5215 (const_int 4) (const_int 6))])
5216 (set_attr "type" "simple_alu_shift,load_byte,load_byte")]
5219 (define_expand "extendsfdf2"
5220 [(set (match_operand:DF 0 "s_register_operand" "")
5221 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5222 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5226 /* HFmode -> DFmode conversions have to go through SFmode. */
5227 (define_expand "extendhfdf2"
5228 [(set (match_operand:DF 0 "general_operand" "")
5229 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5234 op1 = convert_to_mode (SFmode, operands[1], 0);
5235 op1 = convert_to_mode (DFmode, op1, 0);
5236 emit_insn (gen_movdf (operands[0], op1));
5241 ;; Move insns (including loads and stores)
5243 ;; XXX Just some ideas about movti.
5244 ;; I don't think these are a good idea on the arm, there just aren't enough
5246 ;;(define_expand "loadti"
5247 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5248 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5251 ;;(define_expand "storeti"
5252 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5253 ;; (match_operand:TI 1 "s_register_operand" ""))]
5256 ;;(define_expand "movti"
5257 ;; [(set (match_operand:TI 0 "general_operand" "")
5258 ;; (match_operand:TI 1 "general_operand" ""))]
5264 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5265 ;; operands[1] = copy_to_reg (operands[1]);
5266 ;; if (MEM_P (operands[0]))
5267 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5268 ;; else if (MEM_P (operands[1]))
5269 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5273 ;; emit_insn (insn);
5277 ;; Recognize garbage generated above.
5280 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5281 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5285 ;; register mem = (which_alternative < 3);
5286 ;; register const char *template;
5288 ;; operands[mem] = XEXP (operands[mem], 0);
5289 ;; switch (which_alternative)
5291 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5292 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5293 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5294 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5295 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5296 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5298 ;; output_asm_insn (template, operands);
5302 (define_expand "movdi"
5303 [(set (match_operand:DI 0 "general_operand" "")
5304 (match_operand:DI 1 "general_operand" ""))]
5307 if (can_create_pseudo_p ())
5309 if (!REG_P (operands[0]))
5310 operands[1] = force_reg (DImode, operands[1]);
5315 (define_insn "*arm_movdi"
5316 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5317 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5319 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5321 && ( register_operand (operands[0], DImode)
5322 || register_operand (operands[1], DImode))"
5324 switch (which_alternative)
5331 return output_move_double (operands, true, NULL);
5334 [(set_attr "length" "8,12,16,8,8")
5335 (set_attr "type" "*,*,*,load2,store2")
5336 (set_attr "arm_pool_range" "*,*,*,1020,*")
5337 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5338 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5339 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5343 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5344 (match_operand:ANY64 1 "const_double_operand" ""))]
5347 && (arm_const_double_inline_cost (operands[1])
5348 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5351 arm_split_constant (SET, SImode, curr_insn,
5352 INTVAL (gen_lowpart (SImode, operands[1])),
5353 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5354 arm_split_constant (SET, SImode, curr_insn,
5355 INTVAL (gen_highpart_mode (SImode,
5356 GET_MODE (operands[0]),
5358 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5363 ; If optimizing for size, or if we have load delay slots, then
5364 ; we want to split the constant into two separate operations.
5365 ; In both cases this may split a trivial part into a single data op
5366 ; leaving a single complex constant to load. We can also get longer
5367 ; offsets in a LDR which means we get better chances of sharing the pool
5368 ; entries. Finally, we can normally do a better job of scheduling
5369 ; LDR instructions than we can with LDM.
5370 ; This pattern will only match if the one above did not.
5372 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5373 (match_operand:ANY64 1 "const_double_operand" ""))]
5374 "TARGET_ARM && reload_completed
5375 && arm_const_double_by_parts (operands[1])"
5376 [(set (match_dup 0) (match_dup 1))
5377 (set (match_dup 2) (match_dup 3))]
5379 operands[2] = gen_highpart (SImode, operands[0]);
5380 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5382 operands[0] = gen_lowpart (SImode, operands[0]);
5383 operands[1] = gen_lowpart (SImode, operands[1]);
5388 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5389 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5390 "TARGET_EITHER && reload_completed"
5391 [(set (match_dup 0) (match_dup 1))
5392 (set (match_dup 2) (match_dup 3))]
5394 operands[2] = gen_highpart (SImode, operands[0]);
5395 operands[3] = gen_highpart (SImode, operands[1]);
5396 operands[0] = gen_lowpart (SImode, operands[0]);
5397 operands[1] = gen_lowpart (SImode, operands[1]);
5399 /* Handle a partial overlap. */
5400 if (rtx_equal_p (operands[0], operands[3]))
5402 rtx tmp0 = operands[0];
5403 rtx tmp1 = operands[1];
5405 operands[0] = operands[2];
5406 operands[1] = operands[3];
5413 ;; We can't actually do base+index doubleword loads if the index and
5414 ;; destination overlap. Split here so that we at least have chance to
5417 [(set (match_operand:DI 0 "s_register_operand" "")
5418 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5419 (match_operand:SI 2 "s_register_operand" ""))))]
5421 && reg_overlap_mentioned_p (operands[0], operands[1])
5422 && reg_overlap_mentioned_p (operands[0], operands[2])"
5424 (plus:SI (match_dup 1)
5427 (mem:DI (match_dup 4)))]
5429 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5433 ;;; ??? This should have alternatives for constants.
5434 ;;; ??? This was originally identical to the movdf_insn pattern.
5435 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5436 ;;; thumb_reorg with a memory reference.
5437 (define_insn "*thumb1_movdi_insn"
5438 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5439 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5441 && ( register_operand (operands[0], DImode)
5442 || register_operand (operands[1], DImode))"
5445 switch (which_alternative)
5449 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5450 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5451 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5453 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5455 operands[1] = GEN_INT (- INTVAL (operands[1]));
5456 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5458 return \"ldmia\\t%1, {%0, %H0}\";
5460 return \"stmia\\t%0, {%1, %H1}\";
5462 return thumb_load_double_from_address (operands);
5464 operands[2] = gen_rtx_MEM (SImode,
5465 plus_constant (Pmode, XEXP (operands[0], 0), 4));
5466 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5469 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5470 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5471 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5474 [(set_attr "length" "4,4,6,2,2,6,4,4")
5475 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5476 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5477 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
5480 (define_expand "movsi"
5481 [(set (match_operand:SI 0 "general_operand" "")
5482 (match_operand:SI 1 "general_operand" ""))]
5486 rtx base, offset, tmp;
5490 /* Everything except mem = const or mem = mem can be done easily. */
5491 if (MEM_P (operands[0]))
5492 operands[1] = force_reg (SImode, operands[1]);
5493 if (arm_general_register_operand (operands[0], SImode)
5494 && CONST_INT_P (operands[1])
5495 && !(const_ok_for_arm (INTVAL (operands[1]))
5496 || const_ok_for_arm (~INTVAL (operands[1]))))
5498 arm_split_constant (SET, SImode, NULL_RTX,
5499 INTVAL (operands[1]), operands[0], NULL_RTX,
5500 optimize && can_create_pseudo_p ());
5504 else /* TARGET_THUMB1... */
5506 if (can_create_pseudo_p ())
5508 if (!REG_P (operands[0]))
5509 operands[1] = force_reg (SImode, operands[1]);
5513 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5515 split_const (operands[1], &base, &offset);
5516 if (GET_CODE (base) == SYMBOL_REF
5517 && !offset_within_block_p (base, INTVAL (offset)))
5519 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5520 emit_move_insn (tmp, base);
5521 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5526 /* Recognize the case where operand[1] is a reference to thread-local
5527 data and load its address to a register. */
5528 if (arm_tls_referenced_p (operands[1]))
5530 rtx tmp = operands[1];
5533 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5535 addend = XEXP (XEXP (tmp, 0), 1);
5536 tmp = XEXP (XEXP (tmp, 0), 0);
5539 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5540 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5542 tmp = legitimize_tls_address (tmp,
5543 !can_create_pseudo_p () ? operands[0] : 0);
5546 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5547 tmp = force_operand (tmp, operands[0]);
5552 && (CONSTANT_P (operands[1])
5553 || symbol_mentioned_p (operands[1])
5554 || label_mentioned_p (operands[1])))
5555 operands[1] = legitimize_pic_address (operands[1], SImode,
5556 (!can_create_pseudo_p ()
5563 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5564 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5565 ;; so this does not matter.
5566 (define_insn "*arm_movt"
5567 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5568 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5569 (match_operand:SI 2 "general_operand" "i")))]
5571 "movt%?\t%0, #:upper16:%c2"
5572 [(set_attr "predicable" "yes")
5573 (set_attr "length" "4")]
5576 (define_insn "*arm_movsi_insn"
5577 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5578 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5579 "TARGET_ARM && ! TARGET_IWMMXT
5580 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5581 && ( register_operand (operands[0], SImode)
5582 || register_operand (operands[1], SImode))"
5590 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,simple_alu_imm,load1,store1")
5591 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5592 (set_attr "predicable" "yes")
5593 (set_attr "pool_range" "*,*,*,*,4096,*")
5594 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5598 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5599 (match_operand:SI 1 "const_int_operand" ""))]
5601 && (!(const_ok_for_arm (INTVAL (operands[1]))
5602 || const_ok_for_arm (~INTVAL (operands[1]))))"
5603 [(clobber (const_int 0))]
5605 arm_split_constant (SET, SImode, NULL_RTX,
5606 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5611 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5612 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5613 ;; and lo_sum would be merged back into memory load at cprop. However,
5614 ;; if the default is to prefer movt/movw rather than a load from the constant
5615 ;; pool, the performance is better.
5617 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5618 (match_operand:SI 1 "general_operand" ""))]
5620 && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5621 && !flag_pic && !target_word_relocations
5622 && !arm_tls_referenced_p (operands[1])"
5623 [(clobber (const_int 0))]
5625 arm_emit_movpair (operands[0], operands[1]);
5629 (define_insn "*thumb1_movsi_insn"
5630 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5631 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5633 && ( register_operand (operands[0], SImode)
5634 || register_operand (operands[1], SImode))"
5645 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5646 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5647 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
5648 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5651 [(set (match_operand:SI 0 "register_operand" "")
5652 (match_operand:SI 1 "const_int_operand" ""))]
5653 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5654 [(set (match_dup 2) (match_dup 1))
5655 (set (match_dup 0) (neg:SI (match_dup 2)))]
5658 operands[1] = GEN_INT (- INTVAL (operands[1]));
5659 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5664 [(set (match_operand:SI 0 "register_operand" "")
5665 (match_operand:SI 1 "const_int_operand" ""))]
5666 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5667 [(set (match_dup 2) (match_dup 1))
5668 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5671 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5672 unsigned HOST_WIDE_INT mask = 0xff;
5675 for (i = 0; i < 25; i++)
5676 if ((val & (mask << i)) == val)
5679 /* Don't split if the shift is zero. */
5683 operands[1] = GEN_INT (val >> i);
5684 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5685 operands[3] = GEN_INT (i);
5689 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
5691 [(set (match_operand:SI 0 "register_operand" "")
5692 (match_operand:SI 1 "const_int_operand" ""))]
5693 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
5694 [(set (match_dup 2) (match_dup 1))
5695 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
5698 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
5699 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5700 operands[3] = GEN_INT (255);
5704 ;; When generating pic, we need to load the symbol offset into a register.
5705 ;; So that the optimizer does not confuse this with a normal symbol load
5706 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5707 ;; since that is the only type of relocation we can use.
5709 ;; Wrap calculation of the whole PIC address in a single pattern for the
5710 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5711 ;; a PIC address involves two loads from memory, so we want to CSE it
5712 ;; as often as possible.
5713 ;; This pattern will be split into one of the pic_load_addr_* patterns
5714 ;; and a move after GCSE optimizations.
5716 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5717 (define_expand "calculate_pic_address"
5718 [(set (match_operand:SI 0 "register_operand" "")
5719 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5720 (unspec:SI [(match_operand:SI 2 "" "")]
5725 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5727 [(set (match_operand:SI 0 "register_operand" "")
5728 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5729 (unspec:SI [(match_operand:SI 2 "" "")]
5732 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5733 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5734 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5737 ;; operand1 is the memory address to go into
5738 ;; pic_load_addr_32bit.
5739 ;; operand2 is the PIC label to be emitted
5740 ;; from pic_add_dot_plus_eight.
5741 ;; We do this to allow hoisting of the entire insn.
5742 (define_insn_and_split "pic_load_addr_unified"
5743 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5744 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5745 (match_operand:SI 2 "" "")]
5746 UNSPEC_PIC_UNIFIED))]
5749 "&& reload_completed"
5750 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5751 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5752 (match_dup 2)] UNSPEC_PIC_BASE))]
5753 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5754 [(set_attr "type" "load1,load1,load1")
5755 (set_attr "pool_range" "4096,4094,1022")
5756 (set_attr "neg_pool_range" "4084,0,0")
5757 (set_attr "arch" "a,t2,t1")
5758 (set_attr "length" "8,6,4")]
5761 ;; The rather odd constraints on the following are to force reload to leave
5762 ;; the insn alone, and to force the minipool generation pass to then move
5763 ;; the GOT symbol to memory.
5765 (define_insn "pic_load_addr_32bit"
5766 [(set (match_operand:SI 0 "s_register_operand" "=r")
5767 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5768 "TARGET_32BIT && flag_pic"
5770 [(set_attr "type" "load1")
5771 (set (attr "pool_range")
5772 (if_then_else (eq_attr "is_thumb" "no")
5775 (set (attr "neg_pool_range")
5776 (if_then_else (eq_attr "is_thumb" "no")
5781 (define_insn "pic_load_addr_thumb1"
5782 [(set (match_operand:SI 0 "s_register_operand" "=l")
5783 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5784 "TARGET_THUMB1 && flag_pic"
5786 [(set_attr "type" "load1")
5787 (set (attr "pool_range") (const_int 1018))]
5790 (define_insn "pic_add_dot_plus_four"
5791 [(set (match_operand:SI 0 "register_operand" "=r")
5792 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5794 (match_operand 2 "" "")]
5798 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5799 INTVAL (operands[2]));
5800 return \"add\\t%0, %|pc\";
5802 [(set_attr "length" "2")]
5805 (define_insn "pic_add_dot_plus_eight"
5806 [(set (match_operand:SI 0 "register_operand" "=r")
5807 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5809 (match_operand 2 "" "")]
5813 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5814 INTVAL (operands[2]));
5815 return \"add%?\\t%0, %|pc, %1\";
5817 [(set_attr "predicable" "yes")]
5820 (define_insn "tls_load_dot_plus_eight"
5821 [(set (match_operand:SI 0 "register_operand" "=r")
5822 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5824 (match_operand 2 "" "")]
5828 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5829 INTVAL (operands[2]));
5830 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5832 [(set_attr "predicable" "yes")]
5835 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5836 ;; followed by a load. These sequences can be crunched down to
5837 ;; tls_load_dot_plus_eight by a peephole.
5840 [(set (match_operand:SI 0 "register_operand" "")
5841 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5843 (match_operand 1 "" "")]
5845 (set (match_operand:SI 2 "arm_general_register_operand" "")
5846 (mem:SI (match_dup 0)))]
5847 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5849 (mem:SI (unspec:SI [(match_dup 3)
5856 (define_insn "pic_offset_arm"
5857 [(set (match_operand:SI 0 "register_operand" "=r")
5858 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5859 (unspec:SI [(match_operand:SI 2 "" "X")]
5860 UNSPEC_PIC_OFFSET))))]
5861 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5862 "ldr%?\\t%0, [%1,%2]"
5863 [(set_attr "type" "load1")]
5866 (define_expand "builtin_setjmp_receiver"
5867 [(label_ref (match_operand 0 "" ""))]
5871 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5873 if (arm_pic_register != INVALID_REGNUM)
5874 arm_load_pic_register (1UL << 3);
5878 ;; If copying one reg to another we can set the condition codes according to
5879 ;; its value. Such a move is common after a return from subroutine and the
5880 ;; result is being tested against zero.
5882 (define_insn "*movsi_compare0"
5883 [(set (reg:CC CC_REGNUM)
5884 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5886 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5892 [(set_attr "conds" "set")
5893 (set_attr "type" "simple_alu_imm,simple_alu_imm")]
5896 ;; Subroutine to store a half word from a register into memory.
5897 ;; Operand 0 is the source register (HImode)
5898 ;; Operand 1 is the destination address in a register (SImode)
5900 ;; In both this routine and the next, we must be careful not to spill
5901 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5902 ;; can generate unrecognizable rtl.
5904 (define_expand "storehi"
5905 [;; store the low byte
5906 (set (match_operand 1 "" "") (match_dup 3))
5907 ;; extract the high byte
5909 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5910 ;; store the high byte
5911 (set (match_dup 4) (match_dup 5))]
5915 rtx op1 = operands[1];
5916 rtx addr = XEXP (op1, 0);
5917 enum rtx_code code = GET_CODE (addr);
5919 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5921 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5923 operands[4] = adjust_address (op1, QImode, 1);
5924 operands[1] = adjust_address (operands[1], QImode, 0);
5925 operands[3] = gen_lowpart (QImode, operands[0]);
5926 operands[0] = gen_lowpart (SImode, operands[0]);
5927 operands[2] = gen_reg_rtx (SImode);
5928 operands[5] = gen_lowpart (QImode, operands[2]);
5932 (define_expand "storehi_bigend"
5933 [(set (match_dup 4) (match_dup 3))
5935 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5936 (set (match_operand 1 "" "") (match_dup 5))]
5940 rtx op1 = operands[1];
5941 rtx addr = XEXP (op1, 0);
5942 enum rtx_code code = GET_CODE (addr);
5944 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5946 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5948 operands[4] = adjust_address (op1, QImode, 1);
5949 operands[1] = adjust_address (operands[1], QImode, 0);
5950 operands[3] = gen_lowpart (QImode, operands[0]);
5951 operands[0] = gen_lowpart (SImode, operands[0]);
5952 operands[2] = gen_reg_rtx (SImode);
5953 operands[5] = gen_lowpart (QImode, operands[2]);
5957 ;; Subroutine to store a half word integer constant into memory.
5958 (define_expand "storeinthi"
5959 [(set (match_operand 0 "" "")
5960 (match_operand 1 "" ""))
5961 (set (match_dup 3) (match_dup 2))]
5965 HOST_WIDE_INT value = INTVAL (operands[1]);
5966 rtx addr = XEXP (operands[0], 0);
5967 rtx op0 = operands[0];
5968 enum rtx_code code = GET_CODE (addr);
5970 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5972 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5974 operands[1] = gen_reg_rtx (SImode);
5975 if (BYTES_BIG_ENDIAN)
5977 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5978 if ((value & 255) == ((value >> 8) & 255))
5979 operands[2] = operands[1];
5982 operands[2] = gen_reg_rtx (SImode);
5983 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5988 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5989 if ((value & 255) == ((value >> 8) & 255))
5990 operands[2] = operands[1];
5993 operands[2] = gen_reg_rtx (SImode);
5994 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5998 operands[3] = adjust_address (op0, QImode, 1);
5999 operands[0] = adjust_address (operands[0], QImode, 0);
6000 operands[2] = gen_lowpart (QImode, operands[2]);
6001 operands[1] = gen_lowpart (QImode, operands[1]);
6005 (define_expand "storehi_single_op"
6006 [(set (match_operand:HI 0 "memory_operand" "")
6007 (match_operand:HI 1 "general_operand" ""))]
6008 "TARGET_32BIT && arm_arch4"
6010 if (!s_register_operand (operands[1], HImode))
6011 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6015 (define_expand "movhi"
6016 [(set (match_operand:HI 0 "general_operand" "")
6017 (match_operand:HI 1 "general_operand" ""))]
6022 if (can_create_pseudo_p ())
6024 if (MEM_P (operands[0]))
6028 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6031 if (CONST_INT_P (operands[1]))
6032 emit_insn (gen_storeinthi (operands[0], operands[1]));
6035 if (MEM_P (operands[1]))
6036 operands[1] = force_reg (HImode, operands[1]);
6037 if (BYTES_BIG_ENDIAN)
6038 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6040 emit_insn (gen_storehi (operands[1], operands[0]));
6044 /* Sign extend a constant, and keep it in an SImode reg. */
6045 else if (CONST_INT_P (operands[1]))
6047 rtx reg = gen_reg_rtx (SImode);
6048 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6050 /* If the constant is already valid, leave it alone. */
6051 if (!const_ok_for_arm (val))
6053 /* If setting all the top bits will make the constant
6054 loadable in a single instruction, then set them.
6055 Otherwise, sign extend the number. */
6057 if (const_ok_for_arm (~(val | ~0xffff)))
6059 else if (val & 0x8000)
6063 emit_insn (gen_movsi (reg, GEN_INT (val)));
6064 operands[1] = gen_lowpart (HImode, reg);
6066 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6067 && MEM_P (operands[1]))
6069 rtx reg = gen_reg_rtx (SImode);
6071 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6072 operands[1] = gen_lowpart (HImode, reg);
6074 else if (!arm_arch4)
6076 if (MEM_P (operands[1]))
6079 rtx offset = const0_rtx;
6080 rtx reg = gen_reg_rtx (SImode);
6082 if ((REG_P (base = XEXP (operands[1], 0))
6083 || (GET_CODE (base) == PLUS
6084 && (CONST_INT_P (offset = XEXP (base, 1)))
6085 && ((INTVAL(offset) & 1) != 1)
6086 && REG_P (base = XEXP (base, 0))))
6087 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6091 new_rtx = widen_memory_access (operands[1], SImode,
6092 ((INTVAL (offset) & ~3)
6093 - INTVAL (offset)));
6094 emit_insn (gen_movsi (reg, new_rtx));
6095 if (((INTVAL (offset) & 2) != 0)
6096 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6098 rtx reg2 = gen_reg_rtx (SImode);
6100 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6105 emit_insn (gen_movhi_bytes (reg, operands[1]));
6107 operands[1] = gen_lowpart (HImode, reg);
6111 /* Handle loading a large integer during reload. */
6112 else if (CONST_INT_P (operands[1])
6113 && !const_ok_for_arm (INTVAL (operands[1]))
6114 && !const_ok_for_arm (~INTVAL (operands[1])))
6116 /* Writing a constant to memory needs a scratch, which should
6117 be handled with SECONDARY_RELOADs. */
6118 gcc_assert (REG_P (operands[0]));
6120 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6121 emit_insn (gen_movsi (operands[0], operands[1]));
6125 else if (TARGET_THUMB2)
6127 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6128 if (can_create_pseudo_p ())
6130 if (!REG_P (operands[0]))
6131 operands[1] = force_reg (HImode, operands[1]);
6132 /* Zero extend a constant, and keep it in an SImode reg. */
6133 else if (CONST_INT_P (operands[1]))
6135 rtx reg = gen_reg_rtx (SImode);
6136 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6138 emit_insn (gen_movsi (reg, GEN_INT (val)));
6139 operands[1] = gen_lowpart (HImode, reg);
6143 else /* TARGET_THUMB1 */
6145 if (can_create_pseudo_p ())
6147 if (CONST_INT_P (operands[1]))
6149 rtx reg = gen_reg_rtx (SImode);
6151 emit_insn (gen_movsi (reg, operands[1]));
6152 operands[1] = gen_lowpart (HImode, reg);
6155 /* ??? We shouldn't really get invalid addresses here, but this can
6156 happen if we are passed a SP (never OK for HImode/QImode) or
6157 virtual register (also rejected as illegitimate for HImode/QImode)
6158 relative address. */
6159 /* ??? This should perhaps be fixed elsewhere, for instance, in
6160 fixup_stack_1, by checking for other kinds of invalid addresses,
6161 e.g. a bare reference to a virtual register. This may confuse the
6162 alpha though, which must handle this case differently. */
6163 if (MEM_P (operands[0])
6164 && !memory_address_p (GET_MODE (operands[0]),
6165 XEXP (operands[0], 0)))
6167 = replace_equiv_address (operands[0],
6168 copy_to_reg (XEXP (operands[0], 0)));
6170 if (MEM_P (operands[1])
6171 && !memory_address_p (GET_MODE (operands[1]),
6172 XEXP (operands[1], 0)))
6174 = replace_equiv_address (operands[1],
6175 copy_to_reg (XEXP (operands[1], 0)));
6177 if (MEM_P (operands[1]) && optimize > 0)
6179 rtx reg = gen_reg_rtx (SImode);
6181 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6182 operands[1] = gen_lowpart (HImode, reg);
6185 if (MEM_P (operands[0]))
6186 operands[1] = force_reg (HImode, operands[1]);
6188 else if (CONST_INT_P (operands[1])
6189 && !satisfies_constraint_I (operands[1]))
6191 /* Handle loading a large integer during reload. */
6193 /* Writing a constant to memory needs a scratch, which should
6194 be handled with SECONDARY_RELOADs. */
6195 gcc_assert (REG_P (operands[0]));
6197 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6198 emit_insn (gen_movsi (operands[0], operands[1]));
6205 (define_insn "*thumb1_movhi_insn"
6206 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6207 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6209 && ( register_operand (operands[0], HImode)
6210 || register_operand (operands[1], HImode))"
6212 switch (which_alternative)
6214 case 0: return \"add %0, %1, #0\";
6215 case 2: return \"strh %1, %0\";
6216 case 3: return \"mov %0, %1\";
6217 case 4: return \"mov %0, %1\";
6218 case 5: return \"mov %0, %1\";
6219 default: gcc_unreachable ();
6221 /* The stack pointer can end up being taken as an index register.
6222 Catch this case here and deal with it. */
6223 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6224 && REG_P (XEXP (XEXP (operands[1], 0), 0))
6225 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6228 ops[0] = operands[0];
6229 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6231 output_asm_insn (\"mov %0, %1\", ops);
6233 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6236 return \"ldrh %0, %1\";
6238 [(set_attr "length" "2,4,2,2,2,2")
6239 (set_attr "type" "*,load1,store1,*,*,*")
6240 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6243 (define_expand "movhi_bytes"
6244 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6246 (zero_extend:SI (match_dup 6)))
6247 (set (match_operand:SI 0 "" "")
6248 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6253 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6255 mem1 = change_address (operands[1], QImode, addr);
6256 mem2 = change_address (operands[1], QImode,
6257 plus_constant (Pmode, addr, 1));
6258 operands[0] = gen_lowpart (SImode, operands[0]);
6260 operands[2] = gen_reg_rtx (SImode);
6261 operands[3] = gen_reg_rtx (SImode);
6264 if (BYTES_BIG_ENDIAN)
6266 operands[4] = operands[2];
6267 operands[5] = operands[3];
6271 operands[4] = operands[3];
6272 operands[5] = operands[2];
6277 (define_expand "movhi_bigend"
6279 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6282 (ashiftrt:SI (match_dup 2) (const_int 16)))
6283 (set (match_operand:HI 0 "s_register_operand" "")
6287 operands[2] = gen_reg_rtx (SImode);
6288 operands[3] = gen_reg_rtx (SImode);
6289 operands[4] = gen_lowpart (HImode, operands[3]);
6293 ;; Pattern to recognize insn generated default case above
6294 (define_insn "*movhi_insn_arch4"
6295 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6296 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6299 && (register_operand (operands[0], HImode)
6300 || register_operand (operands[1], HImode))"
6302 mov%?\\t%0, %1\\t%@ movhi
6303 mvn%?\\t%0, #%B1\\t%@ movhi
6304 str%(h%)\\t%1, %0\\t%@ movhi
6305 ldr%(h%)\\t%0, %1\\t%@ movhi"
6306 [(set_attr "predicable" "yes")
6307 (set_attr "insn" "mov,mvn,*,*")
6308 (set_attr "pool_range" "*,*,*,256")
6309 (set_attr "neg_pool_range" "*,*,*,244")
6310 (set_attr_alternative "type"
6311 [(if_then_else (match_operand 1 "const_int_operand" "")
6312 (const_string "simple_alu_imm" )
6314 (const_string "simple_alu_imm")
6315 (const_string "store1")
6316 (const_string "load1")])]
6319 (define_insn "*movhi_bytes"
6320 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6321 (match_operand:HI 1 "arm_rhs_operand" "I,r,K"))]
6324 mov%?\\t%0, %1\\t%@ movhi
6325 mov%?\\t%0, %1\\t%@ movhi
6326 mvn%?\\t%0, #%B1\\t%@ movhi"
6327 [(set_attr "predicable" "yes")
6328 (set_attr "insn" "mov, mov,mvn")
6329 (set_attr "type" "simple_alu_imm,*,simple_alu_imm")]
6332 (define_expand "thumb_movhi_clobber"
6333 [(set (match_operand:HI 0 "memory_operand" "")
6334 (match_operand:HI 1 "register_operand" ""))
6335 (clobber (match_operand:DI 2 "register_operand" ""))]
6338 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6339 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6341 emit_insn (gen_movhi (operands[0], operands[1]));
6344 /* XXX Fixme, need to handle other cases here as well. */
6349 ;; We use a DImode scratch because we may occasionally need an additional
6350 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6351 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6352 (define_expand "reload_outhi"
6353 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6354 (match_operand:HI 1 "s_register_operand" "r")
6355 (match_operand:DI 2 "s_register_operand" "=&l")])]
6358 arm_reload_out_hi (operands);
6360 thumb_reload_out_hi (operands);
6365 (define_expand "reload_inhi"
6366 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6367 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6368 (match_operand:DI 2 "s_register_operand" "=&r")])]
6372 arm_reload_in_hi (operands);
6374 thumb_reload_out_hi (operands);
6378 (define_expand "movqi"
6379 [(set (match_operand:QI 0 "general_operand" "")
6380 (match_operand:QI 1 "general_operand" ""))]
6383 /* Everything except mem = const or mem = mem can be done easily */
6385 if (can_create_pseudo_p ())
6387 if (CONST_INT_P (operands[1]))
6389 rtx reg = gen_reg_rtx (SImode);
6391 /* For thumb we want an unsigned immediate, then we are more likely
6392 to be able to use a movs insn. */
6394 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6396 emit_insn (gen_movsi (reg, operands[1]));
6397 operands[1] = gen_lowpart (QImode, reg);
6402 /* ??? We shouldn't really get invalid addresses here, but this can
6403 happen if we are passed a SP (never OK for HImode/QImode) or
6404 virtual register (also rejected as illegitimate for HImode/QImode)
6405 relative address. */
6406 /* ??? This should perhaps be fixed elsewhere, for instance, in
6407 fixup_stack_1, by checking for other kinds of invalid addresses,
6408 e.g. a bare reference to a virtual register. This may confuse the
6409 alpha though, which must handle this case differently. */
6410 if (MEM_P (operands[0])
6411 && !memory_address_p (GET_MODE (operands[0]),
6412 XEXP (operands[0], 0)))
6414 = replace_equiv_address (operands[0],
6415 copy_to_reg (XEXP (operands[0], 0)));
6416 if (MEM_P (operands[1])
6417 && !memory_address_p (GET_MODE (operands[1]),
6418 XEXP (operands[1], 0)))
6420 = replace_equiv_address (operands[1],
6421 copy_to_reg (XEXP (operands[1], 0)));
6424 if (MEM_P (operands[1]) && optimize > 0)
6426 rtx reg = gen_reg_rtx (SImode);
6428 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6429 operands[1] = gen_lowpart (QImode, reg);
6432 if (MEM_P (operands[0]))
6433 operands[1] = force_reg (QImode, operands[1]);
6435 else if (TARGET_THUMB
6436 && CONST_INT_P (operands[1])
6437 && !satisfies_constraint_I (operands[1]))
6439 /* Handle loading a large integer during reload. */
6441 /* Writing a constant to memory needs a scratch, which should
6442 be handled with SECONDARY_RELOADs. */
6443 gcc_assert (REG_P (operands[0]));
6445 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6446 emit_insn (gen_movsi (operands[0], operands[1]));
6453 (define_insn "*arm_movqi_insn"
6454 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,Uu,r,m")
6455 (match_operand:QI 1 "general_operand" "r,I,K,Uu,l,m,r"))]
6457 && ( register_operand (operands[0], QImode)
6458 || register_operand (operands[1], QImode))"
6467 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,load1, store1, load1, store1")
6468 (set_attr "insn" "mov,mov,mvn,*,*,*,*")
6469 (set_attr "predicable" "yes")
6470 (set_attr "arch" "any,any,any,t2,t2,any,any")
6471 (set_attr "length" "4,4,4,2,2,4,4")]
6474 (define_insn "*thumb1_movqi_insn"
6475 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6476 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6478 && ( register_operand (operands[0], QImode)
6479 || register_operand (operands[1], QImode))"
6487 [(set_attr "length" "2")
6488 (set_attr "type" "simple_alu_imm,load1,store1,*,*,simple_alu_imm")
6489 (set_attr "insn" "*,*,*,mov,mov,mov")
6490 (set_attr "pool_range" "*,32,*,*,*,*")
6491 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6494 (define_expand "movhf"
6495 [(set (match_operand:HF 0 "general_operand" "")
6496 (match_operand:HF 1 "general_operand" ""))]
6501 if (MEM_P (operands[0]))
6502 operands[1] = force_reg (HFmode, operands[1]);
6504 else /* TARGET_THUMB1 */
6506 if (can_create_pseudo_p ())
6508 if (!REG_P (operands[0]))
6509 operands[1] = force_reg (HFmode, operands[1]);
6515 (define_insn "*arm32_movhf"
6516 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6517 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6518 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6519 && ( s_register_operand (operands[0], HFmode)
6520 || s_register_operand (operands[1], HFmode))"
6522 switch (which_alternative)
6524 case 0: /* ARM register from memory */
6525 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6526 case 1: /* memory from ARM register */
6527 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6528 case 2: /* ARM register from ARM register */
6529 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6530 case 3: /* ARM register from constant */
6536 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6537 bits = real_to_target (NULL, &r, HFmode);
6538 ops[0] = operands[0];
6539 ops[1] = GEN_INT (bits);
6540 ops[2] = GEN_INT (bits & 0xff00);
6541 ops[3] = GEN_INT (bits & 0x00ff);
6543 if (arm_arch_thumb2)
6544 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6546 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6553 [(set_attr "conds" "unconditional")
6554 (set_attr "type" "load1,store1,*,*")
6555 (set_attr "insn" "*,*,mov,mov")
6556 (set_attr "length" "4,4,4,8")
6557 (set_attr "predicable" "yes")]
6560 (define_insn "*thumb1_movhf"
6561 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6562 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6564 && ( s_register_operand (operands[0], HFmode)
6565 || s_register_operand (operands[1], HFmode))"
6567 switch (which_alternative)
6572 gcc_assert (MEM_P (operands[1]));
6573 addr = XEXP (operands[1], 0);
6574 if (GET_CODE (addr) == LABEL_REF
6575 || (GET_CODE (addr) == CONST
6576 && GET_CODE (XEXP (addr, 0)) == PLUS
6577 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6578 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
6580 /* Constant pool entry. */
6581 return \"ldr\\t%0, %1\";
6583 return \"ldrh\\t%0, %1\";
6585 case 2: return \"strh\\t%1, %0\";
6586 default: return \"mov\\t%0, %1\";
6589 [(set_attr "length" "2")
6590 (set_attr "type" "*,load1,store1,*,*")
6591 (set_attr "insn" "mov,*,*,mov,mov")
6592 (set_attr "pool_range" "*,1018,*,*,*")
6593 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6595 (define_expand "movsf"
6596 [(set (match_operand:SF 0 "general_operand" "")
6597 (match_operand:SF 1 "general_operand" ""))]
6602 if (MEM_P (operands[0]))
6603 operands[1] = force_reg (SFmode, operands[1]);
6605 else /* TARGET_THUMB1 */
6607 if (can_create_pseudo_p ())
6609 if (!REG_P (operands[0]))
6610 operands[1] = force_reg (SFmode, operands[1]);
6616 ;; Transform a floating-point move of a constant into a core register into
6617 ;; an SImode operation.
6619 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6620 (match_operand:SF 1 "immediate_operand" ""))]
6623 && CONST_DOUBLE_P (operands[1])"
6624 [(set (match_dup 2) (match_dup 3))]
6626 operands[2] = gen_lowpart (SImode, operands[0]);
6627 operands[3] = gen_lowpart (SImode, operands[1]);
6628 if (operands[2] == 0 || operands[3] == 0)
6633 (define_insn "*arm_movsf_soft_insn"
6634 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6635 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6637 && TARGET_SOFT_FLOAT
6638 && (!MEM_P (operands[0])
6639 || register_operand (operands[1], SFmode))"
6642 ldr%?\\t%0, %1\\t%@ float
6643 str%?\\t%1, %0\\t%@ float"
6644 [(set_attr "predicable" "yes")
6645 (set_attr "type" "*,load1,store1")
6646 (set_attr "insn" "mov,*,*")
6647 (set_attr "arm_pool_range" "*,4096,*")
6648 (set_attr "thumb2_pool_range" "*,4094,*")
6649 (set_attr "arm_neg_pool_range" "*,4084,*")
6650 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6653 ;;; ??? This should have alternatives for constants.
6654 (define_insn "*thumb1_movsf_insn"
6655 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6656 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6658 && ( register_operand (operands[0], SFmode)
6659 || register_operand (operands[1], SFmode))"
6668 [(set_attr "length" "2")
6669 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6670 (set_attr "pool_range" "*,*,*,1018,*,*,*")
6671 (set_attr "insn" "*,*,*,*,*,mov,mov")
6672 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6675 (define_expand "movdf"
6676 [(set (match_operand:DF 0 "general_operand" "")
6677 (match_operand:DF 1 "general_operand" ""))]
6682 if (MEM_P (operands[0]))
6683 operands[1] = force_reg (DFmode, operands[1]);
6685 else /* TARGET_THUMB */
6687 if (can_create_pseudo_p ())
6689 if (!REG_P (operands[0]))
6690 operands[1] = force_reg (DFmode, operands[1]);
6696 ;; Reloading a df mode value stored in integer regs to memory can require a
6698 (define_expand "reload_outdf"
6699 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6700 (match_operand:DF 1 "s_register_operand" "r")
6701 (match_operand:SI 2 "s_register_operand" "=&r")]
6705 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6708 operands[2] = XEXP (operands[0], 0);
6709 else if (code == POST_INC || code == PRE_DEC)
6711 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6712 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6713 emit_insn (gen_movdi (operands[0], operands[1]));
6716 else if (code == PRE_INC)
6718 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6720 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6723 else if (code == POST_DEC)
6724 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6726 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6727 XEXP (XEXP (operands[0], 0), 1)));
6729 emit_insn (gen_rtx_SET (VOIDmode,
6730 replace_equiv_address (operands[0], operands[2]),
6733 if (code == POST_DEC)
6734 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6740 (define_insn "*movdf_soft_insn"
6741 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6742 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6743 "TARGET_32BIT && TARGET_SOFT_FLOAT
6744 && ( register_operand (operands[0], DFmode)
6745 || register_operand (operands[1], DFmode))"
6747 switch (which_alternative)
6754 return output_move_double (operands, true, NULL);
6757 [(set_attr "length" "8,12,16,8,8")
6758 (set_attr "type" "*,*,*,load2,store2")
6759 (set_attr "arm_pool_range" "*,*,*,1020,*")
6760 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6761 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6762 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6765 ;;; ??? This should have alternatives for constants.
6766 ;;; ??? This was originally identical to the movdi_insn pattern.
6767 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6768 ;;; thumb_reorg with a memory reference.
6769 (define_insn "*thumb_movdf_insn"
6770 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6771 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6773 && ( register_operand (operands[0], DFmode)
6774 || register_operand (operands[1], DFmode))"
6776 switch (which_alternative)
6780 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6781 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6782 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6784 return \"ldmia\\t%1, {%0, %H0}\";
6786 return \"stmia\\t%0, {%1, %H1}\";
6788 return thumb_load_double_from_address (operands);
6790 operands[2] = gen_rtx_MEM (SImode,
6791 plus_constant (Pmode,
6792 XEXP (operands[0], 0), 4));
6793 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6796 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6797 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6798 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6801 [(set_attr "length" "4,2,2,6,4,4")
6802 (set_attr "type" "*,load2,store2,load2,store2,*")
6803 (set_attr "insn" "*,*,*,*,*,mov")
6804 (set_attr "pool_range" "*,*,*,1018,*,*")]
6808 ;; load- and store-multiple insns
6809 ;; The arm can load/store any set of registers, provided that they are in
6810 ;; ascending order, but these expanders assume a contiguous set.
6812 (define_expand "load_multiple"
6813 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6814 (match_operand:SI 1 "" ""))
6815 (use (match_operand:SI 2 "" ""))])]
6818 HOST_WIDE_INT offset = 0;
6820 /* Support only fixed point registers. */
6821 if (!CONST_INT_P (operands[2])
6822 || INTVAL (operands[2]) > 14
6823 || INTVAL (operands[2]) < 2
6824 || !MEM_P (operands[1])
6825 || !REG_P (operands[0])
6826 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6827 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6831 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6832 INTVAL (operands[2]),
6833 force_reg (SImode, XEXP (operands[1], 0)),
6834 FALSE, operands[1], &offset);
6837 (define_expand "store_multiple"
6838 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6839 (match_operand:SI 1 "" ""))
6840 (use (match_operand:SI 2 "" ""))])]
6843 HOST_WIDE_INT offset = 0;
6845 /* Support only fixed point registers. */
6846 if (!CONST_INT_P (operands[2])
6847 || INTVAL (operands[2]) > 14
6848 || INTVAL (operands[2]) < 2
6849 || !REG_P (operands[1])
6850 || !MEM_P (operands[0])
6851 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6852 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6856 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6857 INTVAL (operands[2]),
6858 force_reg (SImode, XEXP (operands[0], 0)),
6859 FALSE, operands[0], &offset);
6863 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6864 ;; We could let this apply for blocks of less than this, but it clobbers so
6865 ;; many registers that there is then probably a better way.
6867 (define_expand "movmemqi"
6868 [(match_operand:BLK 0 "general_operand" "")
6869 (match_operand:BLK 1 "general_operand" "")
6870 (match_operand:SI 2 "const_int_operand" "")
6871 (match_operand:SI 3 "const_int_operand" "")]
6876 if (arm_gen_movmemqi (operands))
6880 else /* TARGET_THUMB1 */
6882 if ( INTVAL (operands[3]) != 4
6883 || INTVAL (operands[2]) > 48)
6886 thumb_expand_movmemqi (operands);
6892 ;; Thumb block-move insns
6894 (define_insn "movmem12b"
6895 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6896 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6897 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6898 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6899 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6900 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6901 (set (match_operand:SI 0 "register_operand" "=l")
6902 (plus:SI (match_dup 2) (const_int 12)))
6903 (set (match_operand:SI 1 "register_operand" "=l")
6904 (plus:SI (match_dup 3) (const_int 12)))
6905 (clobber (match_scratch:SI 4 "=&l"))
6906 (clobber (match_scratch:SI 5 "=&l"))
6907 (clobber (match_scratch:SI 6 "=&l"))]
6909 "* return thumb_output_move_mem_multiple (3, operands);"
6910 [(set_attr "length" "4")
6911 ; This isn't entirely accurate... It loads as well, but in terms of
6912 ; scheduling the following insn it is better to consider it as a store
6913 (set_attr "type" "store3")]
6916 (define_insn "movmem8b"
6917 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6918 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6919 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6920 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6921 (set (match_operand:SI 0 "register_operand" "=l")
6922 (plus:SI (match_dup 2) (const_int 8)))
6923 (set (match_operand:SI 1 "register_operand" "=l")
6924 (plus:SI (match_dup 3) (const_int 8)))
6925 (clobber (match_scratch:SI 4 "=&l"))
6926 (clobber (match_scratch:SI 5 "=&l"))]
6928 "* return thumb_output_move_mem_multiple (2, operands);"
6929 [(set_attr "length" "4")
6930 ; This isn't entirely accurate... It loads as well, but in terms of
6931 ; scheduling the following insn it is better to consider it as a store
6932 (set_attr "type" "store2")]
6937 ;; Compare & branch insns
6938 ;; The range calculations are based as follows:
6939 ;; For forward branches, the address calculation returns the address of
6940 ;; the next instruction. This is 2 beyond the branch instruction.
6941 ;; For backward branches, the address calculation returns the address of
6942 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6943 ;; instruction for the shortest sequence, and 4 before the branch instruction
6944 ;; if we have to jump around an unconditional branch.
6945 ;; To the basic branch range the PC offset must be added (this is +4).
6946 ;; So for forward branches we have
6947 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6948 ;; And for backward branches we have
6949 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6951 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6952 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6954 (define_expand "cbranchsi4"
6955 [(set (pc) (if_then_else
6956 (match_operator 0 "expandable_comparison_operator"
6957 [(match_operand:SI 1 "s_register_operand" "")
6958 (match_operand:SI 2 "nonmemory_operand" "")])
6959 (label_ref (match_operand 3 "" ""))
6965 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6967 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6971 if (thumb1_cmpneg_operand (operands[2], SImode))
6973 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6974 operands[3], operands[0]));
6977 if (!thumb1_cmp_operand (operands[2], SImode))
6978 operands[2] = force_reg (SImode, operands[2]);
6981 ;; A pattern to recognize a special situation and optimize for it.
6982 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6983 ;; due to the available addressing modes. Hence, convert a signed comparison
6984 ;; with zero into an unsigned comparison with 127 if possible.
6985 (define_expand "cbranchqi4"
6986 [(set (pc) (if_then_else
6987 (match_operator 0 "lt_ge_comparison_operator"
6988 [(match_operand:QI 1 "memory_operand" "")
6989 (match_operand:QI 2 "const0_operand" "")])
6990 (label_ref (match_operand 3 "" ""))
6995 xops[1] = gen_reg_rtx (SImode);
6996 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6997 xops[2] = GEN_INT (127);
6998 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6999 VOIDmode, xops[1], xops[2]);
7000 xops[3] = operands[3];
7001 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7005 (define_expand "cbranchsf4"
7006 [(set (pc) (if_then_else
7007 (match_operator 0 "expandable_comparison_operator"
7008 [(match_operand:SF 1 "s_register_operand" "")
7009 (match_operand:SF 2 "arm_float_compare_operand" "")])
7010 (label_ref (match_operand 3 "" ""))
7012 "TARGET_32BIT && TARGET_HARD_FLOAT"
7013 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7014 operands[3])); DONE;"
7017 (define_expand "cbranchdf4"
7018 [(set (pc) (if_then_else
7019 (match_operator 0 "expandable_comparison_operator"
7020 [(match_operand:DF 1 "s_register_operand" "")
7021 (match_operand:DF 2 "arm_float_compare_operand" "")])
7022 (label_ref (match_operand 3 "" ""))
7024 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7025 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7026 operands[3])); DONE;"
7029 (define_expand "cbranchdi4"
7030 [(set (pc) (if_then_else
7031 (match_operator 0 "expandable_comparison_operator"
7032 [(match_operand:DI 1 "s_register_operand" "")
7033 (match_operand:DI 2 "cmpdi_operand" "")])
7034 (label_ref (match_operand 3 "" ""))
7038 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7040 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7046 (define_insn "cbranchsi4_insn"
7047 [(set (pc) (if_then_else
7048 (match_operator 0 "arm_comparison_operator"
7049 [(match_operand:SI 1 "s_register_operand" "l,l*h")
7050 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
7051 (label_ref (match_operand 3 "" ""))
7055 rtx t = cfun->machine->thumb1_cc_insn;
7058 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
7059 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
7061 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
7063 if (!noov_comparison_operator (operands[0], VOIDmode))
7066 else if (cfun->machine->thumb1_cc_mode != CCmode)
7071 output_asm_insn ("cmp\t%1, %2", operands);
7072 cfun->machine->thumb1_cc_insn = insn;
7073 cfun->machine->thumb1_cc_op0 = operands[1];
7074 cfun->machine->thumb1_cc_op1 = operands[2];
7075 cfun->machine->thumb1_cc_mode = CCmode;
7078 /* Ensure we emit the right type of condition code on the jump. */
7079 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
7082 switch (get_attr_length (insn))
7084 case 4: return \"b%d0\\t%l3\";
7085 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7086 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7089 [(set (attr "far_jump")
7091 (eq_attr "length" "8")
7092 (const_string "yes")
7093 (const_string "no")))
7094 (set (attr "length")
7096 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7097 (le (minus (match_dup 3) (pc)) (const_int 256)))
7100 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7101 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7106 (define_insn "cbranchsi4_scratch"
7107 [(set (pc) (if_then_else
7108 (match_operator 4 "arm_comparison_operator"
7109 [(match_operand:SI 1 "s_register_operand" "l,0")
7110 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7111 (label_ref (match_operand 3 "" ""))
7113 (clobber (match_scratch:SI 0 "=l,l"))]
7116 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7118 switch (get_attr_length (insn))
7120 case 4: return \"b%d4\\t%l3\";
7121 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7122 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7125 [(set (attr "far_jump")
7127 (eq_attr "length" "8")
7128 (const_string "yes")
7129 (const_string "no")))
7130 (set (attr "length")
7132 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7133 (le (minus (match_dup 3) (pc)) (const_int 256)))
7136 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7137 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7142 (define_insn "*negated_cbranchsi4"
7145 (match_operator 0 "equality_operator"
7146 [(match_operand:SI 1 "s_register_operand" "l")
7147 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7148 (label_ref (match_operand 3 "" ""))
7152 output_asm_insn (\"cmn\\t%1, %2\", operands);
7153 switch (get_attr_length (insn))
7155 case 4: return \"b%d0\\t%l3\";
7156 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7157 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7160 [(set (attr "far_jump")
7162 (eq_attr "length" "8")
7163 (const_string "yes")
7164 (const_string "no")))
7165 (set (attr "length")
7167 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7168 (le (minus (match_dup 3) (pc)) (const_int 256)))
7171 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7172 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7177 (define_insn "*tbit_cbranch"
7180 (match_operator 0 "equality_operator"
7181 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7183 (match_operand:SI 2 "const_int_operand" "i"))
7185 (label_ref (match_operand 3 "" ""))
7187 (clobber (match_scratch:SI 4 "=l"))]
7192 op[0] = operands[4];
7193 op[1] = operands[1];
7194 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7196 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7197 switch (get_attr_length (insn))
7199 case 4: return \"b%d0\\t%l3\";
7200 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7201 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7204 [(set (attr "far_jump")
7206 (eq_attr "length" "8")
7207 (const_string "yes")
7208 (const_string "no")))
7209 (set (attr "length")
7211 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7212 (le (minus (match_dup 3) (pc)) (const_int 256)))
7215 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7216 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7221 (define_insn "*tlobits_cbranch"
7224 (match_operator 0 "equality_operator"
7225 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7226 (match_operand:SI 2 "const_int_operand" "i")
7229 (label_ref (match_operand 3 "" ""))
7231 (clobber (match_scratch:SI 4 "=l"))]
7236 op[0] = operands[4];
7237 op[1] = operands[1];
7238 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7240 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7241 switch (get_attr_length (insn))
7243 case 4: return \"b%d0\\t%l3\";
7244 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7245 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7248 [(set (attr "far_jump")
7250 (eq_attr "length" "8")
7251 (const_string "yes")
7252 (const_string "no")))
7253 (set (attr "length")
7255 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7256 (le (minus (match_dup 3) (pc)) (const_int 256)))
7259 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7260 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7265 (define_insn "*tstsi3_cbranch"
7268 (match_operator 3 "equality_operator"
7269 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7270 (match_operand:SI 1 "s_register_operand" "l"))
7272 (label_ref (match_operand 2 "" ""))
7277 output_asm_insn (\"tst\\t%0, %1\", operands);
7278 switch (get_attr_length (insn))
7280 case 4: return \"b%d3\\t%l2\";
7281 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7282 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7285 [(set (attr "far_jump")
7287 (eq_attr "length" "8")
7288 (const_string "yes")
7289 (const_string "no")))
7290 (set (attr "length")
7292 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7293 (le (minus (match_dup 2) (pc)) (const_int 256)))
7296 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7297 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7302 (define_insn "*cbranchne_decr1"
7304 (if_then_else (match_operator 3 "equality_operator"
7305 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7307 (label_ref (match_operand 4 "" ""))
7309 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7310 (plus:SI (match_dup 2) (const_int -1)))
7311 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7316 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7318 VOIDmode, operands[2], const1_rtx);
7319 cond[1] = operands[4];
7321 if (which_alternative == 0)
7322 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7323 else if (which_alternative == 1)
7325 /* We must provide an alternative for a hi reg because reload
7326 cannot handle output reloads on a jump instruction, but we
7327 can't subtract into that. Fortunately a mov from lo to hi
7328 does not clobber the condition codes. */
7329 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7330 output_asm_insn (\"mov\\t%0, %1\", operands);
7334 /* Similarly, but the target is memory. */
7335 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7336 output_asm_insn (\"str\\t%1, %0\", operands);
7339 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7342 output_asm_insn (\"b%d0\\t%l1\", cond);
7345 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7346 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7348 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7349 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7353 [(set (attr "far_jump")
7355 (ior (and (eq (symbol_ref ("which_alternative"))
7357 (eq_attr "length" "8"))
7358 (eq_attr "length" "10"))
7359 (const_string "yes")
7360 (const_string "no")))
7361 (set_attr_alternative "length"
7365 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7366 (le (minus (match_dup 4) (pc)) (const_int 256)))
7369 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7370 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7375 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7376 (le (minus (match_dup 4) (pc)) (const_int 256)))
7379 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7380 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7385 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7386 (le (minus (match_dup 4) (pc)) (const_int 256)))
7389 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7390 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7395 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7396 (le (minus (match_dup 4) (pc)) (const_int 256)))
7399 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7400 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7405 (define_insn "*addsi3_cbranch"
7408 (match_operator 4 "arm_comparison_operator"
7410 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7411 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7413 (label_ref (match_operand 5 "" ""))
7416 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7417 (plus:SI (match_dup 2) (match_dup 3)))
7418 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7420 && (GET_CODE (operands[4]) == EQ
7421 || GET_CODE (operands[4]) == NE
7422 || GET_CODE (operands[4]) == GE
7423 || GET_CODE (operands[4]) == LT)"
7428 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7429 cond[1] = operands[2];
7430 cond[2] = operands[3];
7432 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
7433 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7435 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7437 if (which_alternative >= 2
7438 && which_alternative < 4)
7439 output_asm_insn (\"mov\\t%0, %1\", operands);
7440 else if (which_alternative >= 4)
7441 output_asm_insn (\"str\\t%1, %0\", operands);
7443 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7446 return \"b%d4\\t%l5\";
7448 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7450 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7454 [(set (attr "far_jump")
7456 (ior (and (lt (symbol_ref ("which_alternative"))
7458 (eq_attr "length" "8"))
7459 (eq_attr "length" "10"))
7460 (const_string "yes")
7461 (const_string "no")))
7462 (set (attr "length")
7464 (lt (symbol_ref ("which_alternative"))
7467 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7468 (le (minus (match_dup 5) (pc)) (const_int 256)))
7471 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7472 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7476 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7477 (le (minus (match_dup 5) (pc)) (const_int 256)))
7480 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7481 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7486 (define_insn "*addsi3_cbranch_scratch"
7489 (match_operator 3 "arm_comparison_operator"
7491 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7492 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7494 (label_ref (match_operand 4 "" ""))
7496 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7498 && (GET_CODE (operands[3]) == EQ
7499 || GET_CODE (operands[3]) == NE
7500 || GET_CODE (operands[3]) == GE
7501 || GET_CODE (operands[3]) == LT)"
7504 switch (which_alternative)
7507 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7510 output_asm_insn (\"cmn\t%1, %2\", operands);
7513 if (INTVAL (operands[2]) < 0)
7514 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7516 output_asm_insn (\"add\t%0, %1, %2\", operands);
7519 if (INTVAL (operands[2]) < 0)
7520 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7522 output_asm_insn (\"add\t%0, %0, %2\", operands);
7526 switch (get_attr_length (insn))
7529 return \"b%d3\\t%l4\";
7531 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7533 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7537 [(set (attr "far_jump")
7539 (eq_attr "length" "8")
7540 (const_string "yes")
7541 (const_string "no")))
7542 (set (attr "length")
7544 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7545 (le (minus (match_dup 4) (pc)) (const_int 256)))
7548 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7549 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7555 ;; Comparison and test insns
7557 (define_insn "*arm_cmpsi_insn"
7558 [(set (reg:CC CC_REGNUM)
7559 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7560 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7567 [(set_attr "conds" "set")
7568 (set_attr "arch" "t2,t2,any,any")
7569 (set_attr "length" "2,2,4,4")
7570 (set_attr "predicable" "yes")
7571 (set_attr "type" "*,*,*,simple_alu_imm")]
7574 (define_insn "*cmpsi_shiftsi"
7575 [(set (reg:CC CC_REGNUM)
7576 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7577 (match_operator:SI 3 "shift_operator"
7578 [(match_operand:SI 1 "s_register_operand" "r,r")
7579 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7582 [(set_attr "conds" "set")
7583 (set_attr "shift" "1")
7584 (set_attr "arch" "32,a")
7585 (set_attr "type" "alu_shift,alu_shift_reg")])
7587 (define_insn "*cmpsi_shiftsi_swp"
7588 [(set (reg:CC_SWP CC_REGNUM)
7589 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7590 [(match_operand:SI 1 "s_register_operand" "r,r")
7591 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7592 (match_operand:SI 0 "s_register_operand" "r,r")))]
7595 [(set_attr "conds" "set")
7596 (set_attr "shift" "1")
7597 (set_attr "arch" "32,a")
7598 (set_attr "type" "alu_shift,alu_shift_reg")])
7600 (define_insn "*arm_cmpsi_negshiftsi_si"
7601 [(set (reg:CC_Z CC_REGNUM)
7603 (neg:SI (match_operator:SI 1 "shift_operator"
7604 [(match_operand:SI 2 "s_register_operand" "r")
7605 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7606 (match_operand:SI 0 "s_register_operand" "r")))]
7609 [(set_attr "conds" "set")
7610 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7611 (const_string "alu_shift")
7612 (const_string "alu_shift_reg")))
7613 (set_attr "predicable" "yes")]
7616 ;; DImode comparisons. The generic code generates branches that
7617 ;; if-conversion can not reduce to a conditional compare, so we do
7620 (define_insn "*arm_cmpdi_insn"
7621 [(set (reg:CC_NCV CC_REGNUM)
7622 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7623 (match_operand:DI 1 "arm_di_operand" "rDi")))
7624 (clobber (match_scratch:SI 2 "=r"))]
7626 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7627 [(set_attr "conds" "set")
7628 (set_attr "length" "8")]
7631 (define_insn "*arm_cmpdi_unsigned"
7632 [(set (reg:CC_CZ CC_REGNUM)
7633 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7634 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7636 "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
7637 [(set_attr "conds" "set")
7638 (set_attr "length" "8")]
7641 (define_insn "*arm_cmpdi_zero"
7642 [(set (reg:CC_Z CC_REGNUM)
7643 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7645 (clobber (match_scratch:SI 1 "=r"))]
7647 "orr%.\\t%1, %Q0, %R0"
7648 [(set_attr "conds" "set")]
7651 (define_insn "*thumb_cmpdi_zero"
7652 [(set (reg:CC_Z CC_REGNUM)
7653 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7655 (clobber (match_scratch:SI 1 "=l"))]
7657 "orr\\t%1, %Q0, %R0"
7658 [(set_attr "conds" "set")
7659 (set_attr "length" "2")]
7662 ; This insn allows redundant compares to be removed by cse, nothing should
7663 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7664 ; is deleted later on. The match_dup will match the mode here, so that
7665 ; mode changes of the condition codes aren't lost by this even though we don't
7666 ; specify what they are.
7668 (define_insn "*deleted_compare"
7669 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7671 "\\t%@ deleted compare"
7672 [(set_attr "conds" "set")
7673 (set_attr "length" "0")]
7677 ;; Conditional branch insns
7679 (define_expand "cbranch_cc"
7681 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7682 (match_operand 2 "" "")])
7683 (label_ref (match_operand 3 "" ""))
7686 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7687 operands[1], operands[2], NULL_RTX);
7688 operands[2] = const0_rtx;"
7692 ;; Patterns to match conditional branch insns.
7695 (define_insn "arm_cond_branch"
7697 (if_then_else (match_operator 1 "arm_comparison_operator"
7698 [(match_operand 2 "cc_register" "") (const_int 0)])
7699 (label_ref (match_operand 0 "" ""))
7703 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7705 arm_ccfsm_state += 2;
7708 return \"b%d1\\t%l0\";
7710 [(set_attr "conds" "use")
7711 (set_attr "type" "branch")
7712 (set (attr "length")
7714 (and (match_test "TARGET_THUMB2")
7715 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7716 (le (minus (match_dup 0) (pc)) (const_int 256))))
7721 (define_insn "*arm_cond_branch_reversed"
7723 (if_then_else (match_operator 1 "arm_comparison_operator"
7724 [(match_operand 2 "cc_register" "") (const_int 0)])
7726 (label_ref (match_operand 0 "" ""))))]
7729 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7731 arm_ccfsm_state += 2;
7734 return \"b%D1\\t%l0\";
7736 [(set_attr "conds" "use")
7737 (set_attr "type" "branch")
7738 (set (attr "length")
7740 (and (match_test "TARGET_THUMB2")
7741 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7742 (le (minus (match_dup 0) (pc)) (const_int 256))))
7751 (define_expand "cstore_cc"
7752 [(set (match_operand:SI 0 "s_register_operand" "")
7753 (match_operator:SI 1 "" [(match_operand 2 "" "")
7754 (match_operand 3 "" "")]))]
7756 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7757 operands[2], operands[3], NULL_RTX);
7758 operands[3] = const0_rtx;"
7761 (define_insn "*mov_scc"
7762 [(set (match_operand:SI 0 "s_register_operand" "=r")
7763 (match_operator:SI 1 "arm_comparison_operator"
7764 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7766 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7767 [(set_attr "conds" "use")
7768 (set_attr "insn" "mov")
7769 (set_attr "length" "8")]
7772 (define_insn "*mov_negscc"
7773 [(set (match_operand:SI 0 "s_register_operand" "=r")
7774 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7775 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7777 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7778 [(set_attr "conds" "use")
7779 (set_attr "insn" "mov")
7780 (set_attr "length" "8")]
7783 (define_insn "*mov_notscc"
7784 [(set (match_operand:SI 0 "s_register_operand" "=r")
7785 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7786 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7788 "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7789 [(set_attr "conds" "use")
7790 (set_attr "insn" "mov")
7791 (set_attr "length" "8")]
7794 (define_expand "cstoresi4"
7795 [(set (match_operand:SI 0 "s_register_operand" "")
7796 (match_operator:SI 1 "expandable_comparison_operator"
7797 [(match_operand:SI 2 "s_register_operand" "")
7798 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7799 "TARGET_32BIT || TARGET_THUMB1"
7801 rtx op3, scratch, scratch2;
7805 if (!arm_add_operand (operands[3], SImode))
7806 operands[3] = force_reg (SImode, operands[3]);
7807 emit_insn (gen_cstore_cc (operands[0], operands[1],
7808 operands[2], operands[3]));
7812 if (operands[3] == const0_rtx)
7814 switch (GET_CODE (operands[1]))
7817 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7821 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7825 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7826 NULL_RTX, 0, OPTAB_WIDEN);
7827 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7828 NULL_RTX, 0, OPTAB_WIDEN);
7829 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7830 operands[0], 1, OPTAB_WIDEN);
7834 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7836 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7837 NULL_RTX, 1, OPTAB_WIDEN);
7841 scratch = expand_binop (SImode, ashr_optab, operands[2],
7842 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7843 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7844 NULL_RTX, 0, OPTAB_WIDEN);
7845 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7849 /* LT is handled by generic code. No need for unsigned with 0. */
7856 switch (GET_CODE (operands[1]))
7859 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7860 NULL_RTX, 0, OPTAB_WIDEN);
7861 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7865 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7866 NULL_RTX, 0, OPTAB_WIDEN);
7867 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7871 op3 = force_reg (SImode, operands[3]);
7873 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7874 NULL_RTX, 1, OPTAB_WIDEN);
7875 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7876 NULL_RTX, 0, OPTAB_WIDEN);
7877 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7883 if (!thumb1_cmp_operand (op3, SImode))
7884 op3 = force_reg (SImode, op3);
7885 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7886 NULL_RTX, 0, OPTAB_WIDEN);
7887 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7888 NULL_RTX, 1, OPTAB_WIDEN);
7889 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7894 op3 = force_reg (SImode, operands[3]);
7895 scratch = force_reg (SImode, const0_rtx);
7896 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7902 if (!thumb1_cmp_operand (op3, SImode))
7903 op3 = force_reg (SImode, op3);
7904 scratch = force_reg (SImode, const0_rtx);
7905 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7911 if (!thumb1_cmp_operand (op3, SImode))
7912 op3 = force_reg (SImode, op3);
7913 scratch = gen_reg_rtx (SImode);
7914 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7918 op3 = force_reg (SImode, operands[3]);
7919 scratch = gen_reg_rtx (SImode);
7920 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7923 /* No good sequences for GT, LT. */
7930 (define_expand "cstoresf4"
7931 [(set (match_operand:SI 0 "s_register_operand" "")
7932 (match_operator:SI 1 "expandable_comparison_operator"
7933 [(match_operand:SF 2 "s_register_operand" "")
7934 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7935 "TARGET_32BIT && TARGET_HARD_FLOAT"
7936 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7937 operands[2], operands[3])); DONE;"
7940 (define_expand "cstoredf4"
7941 [(set (match_operand:SI 0 "s_register_operand" "")
7942 (match_operator:SI 1 "expandable_comparison_operator"
7943 [(match_operand:DF 2 "s_register_operand" "")
7944 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7945 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7946 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7947 operands[2], operands[3])); DONE;"
7950 (define_expand "cstoredi4"
7951 [(set (match_operand:SI 0 "s_register_operand" "")
7952 (match_operator:SI 1 "expandable_comparison_operator"
7953 [(match_operand:DI 2 "s_register_operand" "")
7954 (match_operand:DI 3 "cmpdi_operand" "")]))]
7957 if (!arm_validize_comparison (&operands[1],
7961 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7967 (define_expand "cstoresi_eq0_thumb1"
7969 [(set (match_operand:SI 0 "s_register_operand" "")
7970 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7972 (clobber (match_dup:SI 2))])]
7974 "operands[2] = gen_reg_rtx (SImode);"
7977 (define_expand "cstoresi_ne0_thumb1"
7979 [(set (match_operand:SI 0 "s_register_operand" "")
7980 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7982 (clobber (match_dup:SI 2))])]
7984 "operands[2] = gen_reg_rtx (SImode);"
7987 (define_insn "*cstoresi_eq0_thumb1_insn"
7988 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7989 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7991 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7994 neg\\t%0, %1\;adc\\t%0, %0, %1
7995 neg\\t%2, %1\;adc\\t%0, %1, %2"
7996 [(set_attr "length" "4")]
7999 (define_insn "*cstoresi_ne0_thumb1_insn"
8000 [(set (match_operand:SI 0 "s_register_operand" "=l")
8001 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8003 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8005 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8006 [(set_attr "length" "4")]
8009 ;; Used as part of the expansion of thumb ltu and gtu sequences
8010 (define_insn "cstoresi_nltu_thumb1"
8011 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8012 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8013 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8015 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8016 [(set_attr "length" "4")]
8019 (define_insn_and_split "cstoresi_ltu_thumb1"
8020 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8021 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8022 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8027 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8028 (set (match_dup 0) (neg:SI (match_dup 3)))]
8029 "operands[3] = gen_reg_rtx (SImode);"
8030 [(set_attr "length" "4")]
8033 ;; Used as part of the expansion of thumb les sequence.
8034 (define_insn "thumb1_addsi3_addgeu"
8035 [(set (match_operand:SI 0 "s_register_operand" "=l")
8036 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8037 (match_operand:SI 2 "s_register_operand" "l"))
8038 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8039 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8041 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8042 [(set_attr "length" "4")]
8046 ;; Conditional move insns
8048 (define_expand "movsicc"
8049 [(set (match_operand:SI 0 "s_register_operand" "")
8050 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8051 (match_operand:SI 2 "arm_not_operand" "")
8052 (match_operand:SI 3 "arm_not_operand" "")))]
8059 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8060 &XEXP (operands[1], 1)))
8063 code = GET_CODE (operands[1]);
8064 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8065 XEXP (operands[1], 1), NULL_RTX);
8066 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8070 (define_expand "movsfcc"
8071 [(set (match_operand:SF 0 "s_register_operand" "")
8072 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8073 (match_operand:SF 2 "s_register_operand" "")
8074 (match_operand:SF 3 "s_register_operand" "")))]
8075 "TARGET_32BIT && TARGET_HARD_FLOAT"
8078 enum rtx_code code = GET_CODE (operands[1]);
8081 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8082 &XEXP (operands[1], 1)))
8085 code = GET_CODE (operands[1]);
8086 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8087 XEXP (operands[1], 1), NULL_RTX);
8088 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8092 (define_expand "movdfcc"
8093 [(set (match_operand:DF 0 "s_register_operand" "")
8094 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8095 (match_operand:DF 2 "s_register_operand" "")
8096 (match_operand:DF 3 "s_register_operand" "")))]
8097 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8100 enum rtx_code code = GET_CODE (operands[1]);
8103 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8104 &XEXP (operands[1], 1)))
8106 code = GET_CODE (operands[1]);
8107 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8108 XEXP (operands[1], 1), NULL_RTX);
8109 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8113 (define_insn "*movsicc_insn"
8114 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8116 (match_operator 3 "arm_comparison_operator"
8117 [(match_operand 4 "cc_register" "") (const_int 0)])
8118 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8119 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8126 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8127 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8128 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8129 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8130 [(set_attr "length" "4,4,4,4,8,8,8,8")
8131 (set_attr "conds" "use")
8132 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")
8133 (set_attr_alternative "type"
8134 [(if_then_else (match_operand 2 "const_int_operand" "")
8135 (const_string "simple_alu_imm")
8137 (const_string "simple_alu_imm")
8138 (if_then_else (match_operand 1 "const_int_operand" "")
8139 (const_string "simple_alu_imm")
8141 (const_string "simple_alu_imm")
8145 (const_string "*")])]
8148 (define_insn "*movsfcc_soft_insn"
8149 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8150 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8151 [(match_operand 4 "cc_register" "") (const_int 0)])
8152 (match_operand:SF 1 "s_register_operand" "0,r")
8153 (match_operand:SF 2 "s_register_operand" "r,0")))]
8154 "TARGET_ARM && TARGET_SOFT_FLOAT"
8158 [(set_attr "conds" "use")
8159 (set_attr "insn" "mov")]
8163 ;; Jump and linkage insns
8165 (define_expand "jump"
8167 (label_ref (match_operand 0 "" "")))]
8172 (define_insn "*arm_jump"
8174 (label_ref (match_operand 0 "" "")))]
8178 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8180 arm_ccfsm_state += 2;
8183 return \"b%?\\t%l0\";
8186 [(set_attr "predicable" "yes")
8187 (set (attr "length")
8189 (and (match_test "TARGET_THUMB2")
8190 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8191 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8196 (define_insn "*thumb_jump"
8198 (label_ref (match_operand 0 "" "")))]
8201 if (get_attr_length (insn) == 2)
8203 return \"bl\\t%l0\\t%@ far jump\";
8205 [(set (attr "far_jump")
8207 (eq_attr "length" "4")
8208 (const_string "yes")
8209 (const_string "no")))
8210 (set (attr "length")
8212 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8213 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8218 (define_expand "call"
8219 [(parallel [(call (match_operand 0 "memory_operand" "")
8220 (match_operand 1 "general_operand" ""))
8221 (use (match_operand 2 "" ""))
8222 (clobber (reg:SI LR_REGNUM))])]
8228 /* In an untyped call, we can get NULL for operand 2. */
8229 if (operands[2] == NULL_RTX)
8230 operands[2] = const0_rtx;
8232 /* Decide if we should generate indirect calls by loading the
8233 32-bit address of the callee into a register before performing the
8235 callee = XEXP (operands[0], 0);
8236 if (GET_CODE (callee) == SYMBOL_REF
8237 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8239 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8241 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8242 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8247 (define_expand "call_internal"
8248 [(parallel [(call (match_operand 0 "memory_operand" "")
8249 (match_operand 1 "general_operand" ""))
8250 (use (match_operand 2 "" ""))
8251 (clobber (reg:SI LR_REGNUM))])])
8253 (define_insn "*call_reg_armv5"
8254 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8255 (match_operand 1 "" ""))
8256 (use (match_operand 2 "" ""))
8257 (clobber (reg:SI LR_REGNUM))]
8258 "TARGET_ARM && arm_arch5"
8260 [(set_attr "type" "call")]
8263 (define_insn "*call_reg_arm"
8264 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8265 (match_operand 1 "" ""))
8266 (use (match_operand 2 "" ""))
8267 (clobber (reg:SI LR_REGNUM))]
8268 "TARGET_ARM && !arm_arch5"
8270 return output_call (operands);
8272 ;; length is worst case, normally it is only two
8273 [(set_attr "length" "12")
8274 (set_attr "type" "call")]
8278 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8279 ;; considered a function call by the branch predictor of some cores (PR40887).
8280 ;; Falls back to blx rN (*call_reg_armv5).
8282 (define_insn "*call_mem"
8283 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8284 (match_operand 1 "" ""))
8285 (use (match_operand 2 "" ""))
8286 (clobber (reg:SI LR_REGNUM))]
8287 "TARGET_ARM && !arm_arch5"
8289 return output_call_mem (operands);
8291 [(set_attr "length" "12")
8292 (set_attr "type" "call")]
8295 (define_insn "*call_reg_thumb1_v5"
8296 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8297 (match_operand 1 "" ""))
8298 (use (match_operand 2 "" ""))
8299 (clobber (reg:SI LR_REGNUM))]
8300 "TARGET_THUMB1 && arm_arch5"
8302 [(set_attr "length" "2")
8303 (set_attr "type" "call")]
8306 (define_insn "*call_reg_thumb1"
8307 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8308 (match_operand 1 "" ""))
8309 (use (match_operand 2 "" ""))
8310 (clobber (reg:SI LR_REGNUM))]
8311 "TARGET_THUMB1 && !arm_arch5"
8314 if (!TARGET_CALLER_INTERWORKING)
8315 return thumb_call_via_reg (operands[0]);
8316 else if (operands[1] == const0_rtx)
8317 return \"bl\\t%__interwork_call_via_%0\";
8318 else if (frame_pointer_needed)
8319 return \"bl\\t%__interwork_r7_call_via_%0\";
8321 return \"bl\\t%__interwork_r11_call_via_%0\";
8323 [(set_attr "type" "call")]
8326 (define_expand "call_value"
8327 [(parallel [(set (match_operand 0 "" "")
8328 (call (match_operand 1 "memory_operand" "")
8329 (match_operand 2 "general_operand" "")))
8330 (use (match_operand 3 "" ""))
8331 (clobber (reg:SI LR_REGNUM))])]
8337 /* In an untyped call, we can get NULL for operand 2. */
8338 if (operands[3] == 0)
8339 operands[3] = const0_rtx;
8341 /* Decide if we should generate indirect calls by loading the
8342 32-bit address of the callee into a register before performing the
8344 callee = XEXP (operands[1], 0);
8345 if (GET_CODE (callee) == SYMBOL_REF
8346 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8348 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8350 pat = gen_call_value_internal (operands[0], operands[1],
8351 operands[2], operands[3]);
8352 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8357 (define_expand "call_value_internal"
8358 [(parallel [(set (match_operand 0 "" "")
8359 (call (match_operand 1 "memory_operand" "")
8360 (match_operand 2 "general_operand" "")))
8361 (use (match_operand 3 "" ""))
8362 (clobber (reg:SI LR_REGNUM))])])
8364 (define_insn "*call_value_reg_armv5"
8365 [(set (match_operand 0 "" "")
8366 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8367 (match_operand 2 "" "")))
8368 (use (match_operand 3 "" ""))
8369 (clobber (reg:SI LR_REGNUM))]
8370 "TARGET_ARM && arm_arch5"
8372 [(set_attr "type" "call")]
8375 (define_insn "*call_value_reg_arm"
8376 [(set (match_operand 0 "" "")
8377 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8378 (match_operand 2 "" "")))
8379 (use (match_operand 3 "" ""))
8380 (clobber (reg:SI LR_REGNUM))]
8381 "TARGET_ARM && !arm_arch5"
8383 return output_call (&operands[1]);
8385 [(set_attr "length" "12")
8386 (set_attr "type" "call")]
8389 ;; Note: see *call_mem
8391 (define_insn "*call_value_mem"
8392 [(set (match_operand 0 "" "")
8393 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8394 (match_operand 2 "" "")))
8395 (use (match_operand 3 "" ""))
8396 (clobber (reg:SI LR_REGNUM))]
8397 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8399 return output_call_mem (&operands[1]);
8401 [(set_attr "length" "12")
8402 (set_attr "type" "call")]
8405 (define_insn "*call_value_reg_thumb1_v5"
8406 [(set (match_operand 0 "" "")
8407 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8408 (match_operand 2 "" "")))
8409 (use (match_operand 3 "" ""))
8410 (clobber (reg:SI LR_REGNUM))]
8411 "TARGET_THUMB1 && arm_arch5"
8413 [(set_attr "length" "2")
8414 (set_attr "type" "call")]
8417 (define_insn "*call_value_reg_thumb1"
8418 [(set (match_operand 0 "" "")
8419 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8420 (match_operand 2 "" "")))
8421 (use (match_operand 3 "" ""))
8422 (clobber (reg:SI LR_REGNUM))]
8423 "TARGET_THUMB1 && !arm_arch5"
8426 if (!TARGET_CALLER_INTERWORKING)
8427 return thumb_call_via_reg (operands[1]);
8428 else if (operands[2] == const0_rtx)
8429 return \"bl\\t%__interwork_call_via_%1\";
8430 else if (frame_pointer_needed)
8431 return \"bl\\t%__interwork_r7_call_via_%1\";
8433 return \"bl\\t%__interwork_r11_call_via_%1\";
8435 [(set_attr "type" "call")]
8438 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8439 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8441 (define_insn "*call_symbol"
8442 [(call (mem:SI (match_operand:SI 0 "" ""))
8443 (match_operand 1 "" ""))
8444 (use (match_operand 2 "" ""))
8445 (clobber (reg:SI LR_REGNUM))]
8447 && (GET_CODE (operands[0]) == SYMBOL_REF)
8448 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8451 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8453 [(set_attr "type" "call")]
8456 (define_insn "*call_value_symbol"
8457 [(set (match_operand 0 "" "")
8458 (call (mem:SI (match_operand:SI 1 "" ""))
8459 (match_operand:SI 2 "" "")))
8460 (use (match_operand 3 "" ""))
8461 (clobber (reg:SI LR_REGNUM))]
8463 && (GET_CODE (operands[1]) == SYMBOL_REF)
8464 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8467 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8469 [(set_attr "type" "call")]
8472 (define_insn "*call_insn"
8473 [(call (mem:SI (match_operand:SI 0 "" ""))
8474 (match_operand:SI 1 "" ""))
8475 (use (match_operand 2 "" ""))
8476 (clobber (reg:SI LR_REGNUM))]
8478 && GET_CODE (operands[0]) == SYMBOL_REF
8479 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8481 [(set_attr "length" "4")
8482 (set_attr "type" "call")]
8485 (define_insn "*call_value_insn"
8486 [(set (match_operand 0 "" "")
8487 (call (mem:SI (match_operand 1 "" ""))
8488 (match_operand 2 "" "")))
8489 (use (match_operand 3 "" ""))
8490 (clobber (reg:SI LR_REGNUM))]
8492 && GET_CODE (operands[1]) == SYMBOL_REF
8493 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8495 [(set_attr "length" "4")
8496 (set_attr "type" "call")]
8499 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8500 (define_expand "sibcall"
8501 [(parallel [(call (match_operand 0 "memory_operand" "")
8502 (match_operand 1 "general_operand" ""))
8504 (use (match_operand 2 "" ""))])]
8508 if (operands[2] == NULL_RTX)
8509 operands[2] = const0_rtx;
8513 (define_expand "sibcall_value"
8514 [(parallel [(set (match_operand 0 "" "")
8515 (call (match_operand 1 "memory_operand" "")
8516 (match_operand 2 "general_operand" "")))
8518 (use (match_operand 3 "" ""))])]
8522 if (operands[3] == NULL_RTX)
8523 operands[3] = const0_rtx;
8527 (define_insn "*sibcall_insn"
8528 [(call (mem:SI (match_operand:SI 0 "" "X"))
8529 (match_operand 1 "" ""))
8531 (use (match_operand 2 "" ""))]
8532 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8534 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8536 [(set_attr "type" "call")]
8539 (define_insn "*sibcall_value_insn"
8540 [(set (match_operand 0 "" "")
8541 (call (mem:SI (match_operand:SI 1 "" "X"))
8542 (match_operand 2 "" "")))
8544 (use (match_operand 3 "" ""))]
8545 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8547 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8549 [(set_attr "type" "call")]
8552 (define_expand "return"
8554 "(TARGET_ARM || (TARGET_THUMB2
8555 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8556 && !IS_STACKALIGN (arm_current_func_type ())))
8557 && USE_RETURN_INSN (FALSE)"
8562 thumb2_expand_return ();
8569 ;; Often the return insn will be the same as loading from memory, so set attr
8570 (define_insn "*arm_return"
8572 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8575 if (arm_ccfsm_state == 2)
8577 arm_ccfsm_state += 2;
8580 return output_return_instruction (const_true_rtx, true, false, false);
8582 [(set_attr "type" "load1")
8583 (set_attr "length" "12")
8584 (set_attr "predicable" "yes")]
8587 (define_insn "*cond_return"
8589 (if_then_else (match_operator 0 "arm_comparison_operator"
8590 [(match_operand 1 "cc_register" "") (const_int 0)])
8593 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8596 if (arm_ccfsm_state == 2)
8598 arm_ccfsm_state += 2;
8601 return output_return_instruction (operands[0], true, false, false);
8603 [(set_attr "conds" "use")
8604 (set_attr "length" "12")
8605 (set_attr "type" "load1")]
8608 (define_insn "*cond_return_inverted"
8610 (if_then_else (match_operator 0 "arm_comparison_operator"
8611 [(match_operand 1 "cc_register" "") (const_int 0)])
8614 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8617 if (arm_ccfsm_state == 2)
8619 arm_ccfsm_state += 2;
8622 return output_return_instruction (operands[0], true, true, false);
8624 [(set_attr "conds" "use")
8625 (set_attr "length" "12")
8626 (set_attr "type" "load1")]
8629 (define_insn "*arm_simple_return"
8634 if (arm_ccfsm_state == 2)
8636 arm_ccfsm_state += 2;
8639 return output_return_instruction (const_true_rtx, true, false, true);
8641 [(set_attr "type" "branch")
8642 (set_attr "length" "4")
8643 (set_attr "predicable" "yes")]
8646 ;; Generate a sequence of instructions to determine if the processor is
8647 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8650 (define_expand "return_addr_mask"
8652 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8654 (set (match_operand:SI 0 "s_register_operand" "")
8655 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8657 (const_int 67108860)))] ; 0x03fffffc
8660 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8663 (define_insn "*check_arch2"
8664 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8665 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8668 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8669 [(set_attr "length" "8")
8670 (set_attr "conds" "set")]
8673 ;; Call subroutine returning any type.
8675 (define_expand "untyped_call"
8676 [(parallel [(call (match_operand 0 "" "")
8678 (match_operand 1 "" "")
8679 (match_operand 2 "" "")])]
8684 rtx par = gen_rtx_PARALLEL (VOIDmode,
8685 rtvec_alloc (XVECLEN (operands[2], 0)));
8686 rtx addr = gen_reg_rtx (Pmode);
8690 emit_move_insn (addr, XEXP (operands[1], 0));
8691 mem = change_address (operands[1], BLKmode, addr);
8693 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8695 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8697 /* Default code only uses r0 as a return value, but we could
8698 be using anything up to 4 registers. */
8699 if (REGNO (src) == R0_REGNUM)
8700 src = gen_rtx_REG (TImode, R0_REGNUM);
8702 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8704 size += GET_MODE_SIZE (GET_MODE (src));
8707 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8712 for (i = 0; i < XVECLEN (par, 0); i++)
8714 HOST_WIDE_INT offset = 0;
8715 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8718 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8720 mem = change_address (mem, GET_MODE (reg), NULL);
8721 if (REGNO (reg) == R0_REGNUM)
8723 /* On thumb we have to use a write-back instruction. */
8724 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8725 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8726 size = TARGET_ARM ? 16 : 0;
8730 emit_move_insn (mem, reg);
8731 size = GET_MODE_SIZE (GET_MODE (reg));
8735 /* The optimizer does not know that the call sets the function value
8736 registers we stored in the result block. We avoid problems by
8737 claiming that all hard registers are used and clobbered at this
8739 emit_insn (gen_blockage ());
8745 (define_expand "untyped_return"
8746 [(match_operand:BLK 0 "memory_operand" "")
8747 (match_operand 1 "" "")]
8752 rtx addr = gen_reg_rtx (Pmode);
8756 emit_move_insn (addr, XEXP (operands[0], 0));
8757 mem = change_address (operands[0], BLKmode, addr);
8759 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8761 HOST_WIDE_INT offset = 0;
8762 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8765 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8767 mem = change_address (mem, GET_MODE (reg), NULL);
8768 if (REGNO (reg) == R0_REGNUM)
8770 /* On thumb we have to use a write-back instruction. */
8771 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8772 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8773 size = TARGET_ARM ? 16 : 0;
8777 emit_move_insn (reg, mem);
8778 size = GET_MODE_SIZE (GET_MODE (reg));
8782 /* Emit USE insns before the return. */
8783 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8784 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8786 /* Construct the return. */
8787 expand_naked_return ();
8793 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8794 ;; all of memory. This blocks insns from being moved across this point.
8796 (define_insn "blockage"
8797 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8800 [(set_attr "length" "0")
8801 (set_attr "type" "block")]
8804 (define_expand "casesi"
8805 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8806 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8807 (match_operand:SI 2 "const_int_operand" "") ; total range
8808 (match_operand:SI 3 "" "") ; table label
8809 (match_operand:SI 4 "" "")] ; Out of range label
8810 "TARGET_32BIT || optimize_size || flag_pic"
8813 enum insn_code code;
8814 if (operands[1] != const0_rtx)
8816 rtx reg = gen_reg_rtx (SImode);
8818 emit_insn (gen_addsi3 (reg, operands[0],
8819 gen_int_mode (-INTVAL (operands[1]),
8825 code = CODE_FOR_arm_casesi_internal;
8826 else if (TARGET_THUMB1)
8827 code = CODE_FOR_thumb1_casesi_internal_pic;
8829 code = CODE_FOR_thumb2_casesi_internal_pic;
8831 code = CODE_FOR_thumb2_casesi_internal;
8833 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8834 operands[2] = force_reg (SImode, operands[2]);
8836 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8837 operands[3], operands[4]));
8842 ;; The USE in this pattern is needed to tell flow analysis that this is
8843 ;; a CASESI insn. It has no other purpose.
8844 (define_insn "arm_casesi_internal"
8845 [(parallel [(set (pc)
8847 (leu (match_operand:SI 0 "s_register_operand" "r")
8848 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8849 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8850 (label_ref (match_operand 2 "" ""))))
8851 (label_ref (match_operand 3 "" ""))))
8852 (clobber (reg:CC CC_REGNUM))
8853 (use (label_ref (match_dup 2)))])]
8857 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8858 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8860 [(set_attr "conds" "clob")
8861 (set_attr "length" "12")]
8864 (define_expand "thumb1_casesi_internal_pic"
8865 [(match_operand:SI 0 "s_register_operand" "")
8866 (match_operand:SI 1 "thumb1_cmp_operand" "")
8867 (match_operand 2 "" "")
8868 (match_operand 3 "" "")]
8872 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8873 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8875 reg0 = gen_rtx_REG (SImode, 0);
8876 emit_move_insn (reg0, operands[0]);
8877 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8882 (define_insn "thumb1_casesi_dispatch"
8883 [(parallel [(set (pc) (unspec [(reg:SI 0)
8884 (label_ref (match_operand 0 "" ""))
8885 ;; (label_ref (match_operand 1 "" ""))
8887 UNSPEC_THUMB1_CASESI))
8888 (clobber (reg:SI IP_REGNUM))
8889 (clobber (reg:SI LR_REGNUM))])]
8891 "* return thumb1_output_casesi(operands);"
8892 [(set_attr "length" "4")]
8895 (define_expand "indirect_jump"
8897 (match_operand:SI 0 "s_register_operand" ""))]
8900 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8901 address and use bx. */
8905 tmp = gen_reg_rtx (SImode);
8906 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8912 ;; NB Never uses BX.
8913 (define_insn "*arm_indirect_jump"
8915 (match_operand:SI 0 "s_register_operand" "r"))]
8917 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8918 [(set_attr "predicable" "yes")]
8921 (define_insn "*load_indirect_jump"
8923 (match_operand:SI 0 "memory_operand" "m"))]
8925 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8926 [(set_attr "type" "load1")
8927 (set_attr "pool_range" "4096")
8928 (set_attr "neg_pool_range" "4084")
8929 (set_attr "predicable" "yes")]
8932 ;; NB Never uses BX.
8933 (define_insn "*thumb1_indirect_jump"
8935 (match_operand:SI 0 "register_operand" "l*r"))]
8938 [(set_attr "conds" "clob")
8939 (set_attr "length" "2")]
8949 if (TARGET_UNIFIED_ASM)
8952 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8953 return \"mov\\tr8, r8\";
8955 [(set (attr "length")
8956 (if_then_else (eq_attr "is_thumb" "yes")
8962 ;; Patterns to allow combination of arithmetic, cond code and shifts
8964 (define_insn "*arith_shiftsi"
8965 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8966 (match_operator:SI 1 "shiftable_operator"
8967 [(match_operator:SI 3 "shift_operator"
8968 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8969 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8970 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8972 "%i1%?\\t%0, %2, %4%S3"
8973 [(set_attr "predicable" "yes")
8974 (set_attr "shift" "4")
8975 (set_attr "arch" "a,t2,t2,a")
8976 ;; Thumb2 doesn't allow the stack pointer to be used for
8977 ;; operand1 for all operations other than add and sub. In this case
8978 ;; the minus operation is a candidate for an rsub and hence needs
8980 ;; We have to make sure to disable the fourth alternative if
8981 ;; the shift_operator is MULT, since otherwise the insn will
8982 ;; also match a multiply_accumulate pattern and validate_change
8983 ;; will allow a replacement of the constant with a register
8984 ;; despite the checks done in shift_operator.
8985 (set_attr_alternative "insn_enabled"
8986 [(const_string "yes")
8988 (match_operand:SI 1 "add_operator" "")
8989 (const_string "yes") (const_string "no"))
8990 (const_string "yes")
8992 (match_operand:SI 3 "mult_operator" "")
8993 (const_string "no") (const_string "yes"))])
8994 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8997 [(set (match_operand:SI 0 "s_register_operand" "")
8998 (match_operator:SI 1 "shiftable_operator"
8999 [(match_operator:SI 2 "shiftable_operator"
9000 [(match_operator:SI 3 "shift_operator"
9001 [(match_operand:SI 4 "s_register_operand" "")
9002 (match_operand:SI 5 "reg_or_int_operand" "")])
9003 (match_operand:SI 6 "s_register_operand" "")])
9004 (match_operand:SI 7 "arm_rhs_operand" "")]))
9005 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9008 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9011 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9014 (define_insn "*arith_shiftsi_compare0"
9015 [(set (reg:CC_NOOV CC_REGNUM)
9017 (match_operator:SI 1 "shiftable_operator"
9018 [(match_operator:SI 3 "shift_operator"
9019 [(match_operand:SI 4 "s_register_operand" "r,r")
9020 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9021 (match_operand:SI 2 "s_register_operand" "r,r")])
9023 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9024 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9027 "%i1%.\\t%0, %2, %4%S3"
9028 [(set_attr "conds" "set")
9029 (set_attr "shift" "4")
9030 (set_attr "arch" "32,a")
9031 (set_attr "type" "alu_shift,alu_shift_reg")])
9033 (define_insn "*arith_shiftsi_compare0_scratch"
9034 [(set (reg:CC_NOOV CC_REGNUM)
9036 (match_operator:SI 1 "shiftable_operator"
9037 [(match_operator:SI 3 "shift_operator"
9038 [(match_operand:SI 4 "s_register_operand" "r,r")
9039 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9040 (match_operand:SI 2 "s_register_operand" "r,r")])
9042 (clobber (match_scratch:SI 0 "=r,r"))]
9044 "%i1%.\\t%0, %2, %4%S3"
9045 [(set_attr "conds" "set")
9046 (set_attr "shift" "4")
9047 (set_attr "arch" "32,a")
9048 (set_attr "type" "alu_shift,alu_shift_reg")])
9050 (define_insn "*sub_shiftsi"
9051 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9052 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9053 (match_operator:SI 2 "shift_operator"
9054 [(match_operand:SI 3 "s_register_operand" "r,r")
9055 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9057 "sub%?\\t%0, %1, %3%S2"
9058 [(set_attr "predicable" "yes")
9059 (set_attr "shift" "3")
9060 (set_attr "arch" "32,a")
9061 (set_attr "type" "alu_shift,alu_shift_reg")])
9063 (define_insn "*sub_shiftsi_compare0"
9064 [(set (reg:CC_NOOV CC_REGNUM)
9066 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9067 (match_operator:SI 2 "shift_operator"
9068 [(match_operand:SI 3 "s_register_operand" "r,r")
9069 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9071 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9072 (minus:SI (match_dup 1)
9073 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9075 "sub%.\\t%0, %1, %3%S2"
9076 [(set_attr "conds" "set")
9077 (set_attr "shift" "3")
9078 (set_attr "arch" "32,a")
9079 (set_attr "type" "alu_shift,alu_shift_reg")])
9081 (define_insn "*sub_shiftsi_compare0_scratch"
9082 [(set (reg:CC_NOOV CC_REGNUM)
9084 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9085 (match_operator:SI 2 "shift_operator"
9086 [(match_operand:SI 3 "s_register_operand" "r,r")
9087 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9089 (clobber (match_scratch:SI 0 "=r,r"))]
9091 "sub%.\\t%0, %1, %3%S2"
9092 [(set_attr "conds" "set")
9093 (set_attr "shift" "3")
9094 (set_attr "arch" "32,a")
9095 (set_attr "type" "alu_shift,alu_shift_reg")])
9098 (define_insn "*and_scc"
9099 [(set (match_operand:SI 0 "s_register_operand" "=r")
9100 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9101 [(match_operand 3 "cc_register" "") (const_int 0)])
9102 (match_operand:SI 2 "s_register_operand" "r")))]
9104 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9105 [(set_attr "conds" "use")
9106 (set_attr "insn" "mov")
9107 (set_attr "length" "8")]
9110 (define_insn "*ior_scc"
9111 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9112 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9113 [(match_operand 3 "cc_register" "") (const_int 0)])
9114 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9118 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9119 [(set_attr "conds" "use")
9120 (set_attr "length" "4,8")]
9123 ; A series of splitters for the compare_scc pattern below. Note that
9124 ; order is important.
9126 [(set (match_operand:SI 0 "s_register_operand" "")
9127 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9129 (clobber (reg:CC CC_REGNUM))]
9130 "TARGET_32BIT && reload_completed"
9131 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9134 [(set (match_operand:SI 0 "s_register_operand" "")
9135 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9137 (clobber (reg:CC CC_REGNUM))]
9138 "TARGET_32BIT && reload_completed"
9139 [(set (match_dup 0) (not:SI (match_dup 1)))
9140 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9143 [(set (match_operand:SI 0 "s_register_operand" "")
9144 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9146 (clobber (reg:CC CC_REGNUM))]
9147 "TARGET_32BIT && reload_completed"
9149 [(set (reg:CC CC_REGNUM)
9150 (compare:CC (const_int 1) (match_dup 1)))
9152 (minus:SI (const_int 1) (match_dup 1)))])
9153 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9154 (set (match_dup 0) (const_int 0)))])
9157 [(set (match_operand:SI 0 "s_register_operand" "")
9158 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9159 (match_operand:SI 2 "const_int_operand" "")))
9160 (clobber (reg:CC CC_REGNUM))]
9161 "TARGET_32BIT && reload_completed"
9163 [(set (reg:CC CC_REGNUM)
9164 (compare:CC (match_dup 1) (match_dup 2)))
9165 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9166 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9167 (set (match_dup 0) (const_int 1)))]
9169 operands[3] = GEN_INT (-INTVAL (operands[2]));
9173 [(set (match_operand:SI 0 "s_register_operand" "")
9174 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9175 (match_operand:SI 2 "arm_add_operand" "")))
9176 (clobber (reg:CC CC_REGNUM))]
9177 "TARGET_32BIT && reload_completed"
9179 [(set (reg:CC_NOOV CC_REGNUM)
9180 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9182 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9183 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9184 (set (match_dup 0) (const_int 1)))])
9186 (define_insn_and_split "*compare_scc"
9187 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9188 (match_operator:SI 1 "arm_comparison_operator"
9189 [(match_operand:SI 2 "s_register_operand" "r,r")
9190 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9191 (clobber (reg:CC CC_REGNUM))]
9194 "&& reload_completed"
9195 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9196 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9197 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9200 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9201 operands[2], operands[3]);
9202 enum rtx_code rc = GET_CODE (operands[1]);
9204 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9206 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9207 if (mode == CCFPmode || mode == CCFPEmode)
9208 rc = reverse_condition_maybe_unordered (rc);
9210 rc = reverse_condition (rc);
9211 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9214 ;; Attempt to improve the sequence generated by the compare_scc splitters
9215 ;; not to use conditional execution.
9217 [(set (reg:CC CC_REGNUM)
9218 (compare:CC (match_operand:SI 1 "register_operand" "")
9219 (match_operand:SI 2 "arm_rhs_operand" "")))
9220 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9221 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9222 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9223 (set (match_dup 0) (const_int 1)))
9224 (match_scratch:SI 3 "r")]
9227 [(set (reg:CC CC_REGNUM)
9228 (compare:CC (match_dup 1) (match_dup 2)))
9229 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
9231 [(set (reg:CC CC_REGNUM)
9232 (compare:CC (const_int 0) (match_dup 3)))
9233 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9236 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9237 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
9238 (clobber (reg:CC CC_REGNUM))])])
9240 (define_insn "*cond_move"
9241 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9242 (if_then_else:SI (match_operator 3 "equality_operator"
9243 [(match_operator 4 "arm_comparison_operator"
9244 [(match_operand 5 "cc_register" "") (const_int 0)])
9246 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9247 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9250 if (GET_CODE (operands[3]) == NE)
9252 if (which_alternative != 1)
9253 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9254 if (which_alternative != 0)
9255 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9258 if (which_alternative != 0)
9259 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9260 if (which_alternative != 1)
9261 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9264 [(set_attr "conds" "use")
9265 (set_attr "insn" "mov")
9266 (set_attr "length" "4,4,8")]
9269 (define_insn "*cond_arith"
9270 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9271 (match_operator:SI 5 "shiftable_operator"
9272 [(match_operator:SI 4 "arm_comparison_operator"
9273 [(match_operand:SI 2 "s_register_operand" "r,r")
9274 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9275 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9276 (clobber (reg:CC CC_REGNUM))]
9279 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9280 return \"%i5\\t%0, %1, %2, lsr #31\";
9282 output_asm_insn (\"cmp\\t%2, %3\", operands);
9283 if (GET_CODE (operands[5]) == AND)
9284 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9285 else if (GET_CODE (operands[5]) == MINUS)
9286 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9287 else if (which_alternative != 0)
9288 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9289 return \"%i5%d4\\t%0, %1, #1\";
9291 [(set_attr "conds" "clob")
9292 (set_attr "length" "12")]
9295 (define_insn "*cond_sub"
9296 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9297 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9298 (match_operator:SI 4 "arm_comparison_operator"
9299 [(match_operand:SI 2 "s_register_operand" "r,r")
9300 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9301 (clobber (reg:CC CC_REGNUM))]
9304 output_asm_insn (\"cmp\\t%2, %3\", operands);
9305 if (which_alternative != 0)
9306 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9307 return \"sub%d4\\t%0, %1, #1\";
9309 [(set_attr "conds" "clob")
9310 (set_attr "length" "8,12")]
9313 (define_insn "*cmp_ite0"
9314 [(set (match_operand 6 "dominant_cc_register" "")
9317 (match_operator 4 "arm_comparison_operator"
9318 [(match_operand:SI 0 "s_register_operand"
9319 "l,l,l,r,r,r,r,r,r")
9320 (match_operand:SI 1 "arm_add_operand"
9321 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9322 (match_operator:SI 5 "arm_comparison_operator"
9323 [(match_operand:SI 2 "s_register_operand"
9324 "l,r,r,l,l,r,r,r,r")
9325 (match_operand:SI 3 "arm_add_operand"
9326 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9332 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9334 {\"cmp%d5\\t%0, %1\",
9335 \"cmp%d4\\t%2, %3\"},
9336 {\"cmn%d5\\t%0, #%n1\",
9337 \"cmp%d4\\t%2, %3\"},
9338 {\"cmp%d5\\t%0, %1\",
9339 \"cmn%d4\\t%2, #%n3\"},
9340 {\"cmn%d5\\t%0, #%n1\",
9341 \"cmn%d4\\t%2, #%n3\"}
9343 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9348 \"cmn\\t%0, #%n1\"},
9349 {\"cmn\\t%2, #%n3\",
9351 {\"cmn\\t%2, #%n3\",
9354 static const char * const ite[2] =
9359 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9360 CMP_CMP, CMN_CMP, CMP_CMP,
9361 CMN_CMP, CMP_CMN, CMN_CMN};
9363 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9365 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9366 if (TARGET_THUMB2) {
9367 output_asm_insn (ite[swap], operands);
9369 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9372 [(set_attr "conds" "set")
9373 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9374 (set_attr_alternative "length"
9380 (if_then_else (eq_attr "is_thumb" "no")
9383 (if_then_else (eq_attr "is_thumb" "no")
9386 (if_then_else (eq_attr "is_thumb" "no")
9389 (if_then_else (eq_attr "is_thumb" "no")
9394 (define_insn "*cmp_ite1"
9395 [(set (match_operand 6 "dominant_cc_register" "")
9398 (match_operator 4 "arm_comparison_operator"
9399 [(match_operand:SI 0 "s_register_operand"
9400 "l,l,l,r,r,r,r,r,r")
9401 (match_operand:SI 1 "arm_add_operand"
9402 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9403 (match_operator:SI 5 "arm_comparison_operator"
9404 [(match_operand:SI 2 "s_register_operand"
9405 "l,r,r,l,l,r,r,r,r")
9406 (match_operand:SI 3 "arm_add_operand"
9407 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9413 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9417 {\"cmn\\t%0, #%n1\",
9420 \"cmn\\t%2, #%n3\"},
9421 {\"cmn\\t%0, #%n1\",
9424 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9426 {\"cmp%d4\\t%2, %3\",
9427 \"cmp%D5\\t%0, %1\"},
9428 {\"cmp%d4\\t%2, %3\",
9429 \"cmn%D5\\t%0, #%n1\"},
9430 {\"cmn%d4\\t%2, #%n3\",
9431 \"cmp%D5\\t%0, %1\"},
9432 {\"cmn%d4\\t%2, #%n3\",
9433 \"cmn%D5\\t%0, #%n1\"}
9435 static const char * const ite[2] =
9440 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9441 CMP_CMP, CMN_CMP, CMP_CMP,
9442 CMN_CMP, CMP_CMN, CMN_CMN};
9444 comparison_dominates_p (GET_CODE (operands[5]),
9445 reverse_condition (GET_CODE (operands[4])));
9447 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9448 if (TARGET_THUMB2) {
9449 output_asm_insn (ite[swap], operands);
9451 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9454 [(set_attr "conds" "set")
9455 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9456 (set_attr_alternative "length"
9462 (if_then_else (eq_attr "is_thumb" "no")
9465 (if_then_else (eq_attr "is_thumb" "no")
9468 (if_then_else (eq_attr "is_thumb" "no")
9471 (if_then_else (eq_attr "is_thumb" "no")
9476 (define_insn "*cmp_and"
9477 [(set (match_operand 6 "dominant_cc_register" "")
9480 (match_operator 4 "arm_comparison_operator"
9481 [(match_operand:SI 0 "s_register_operand"
9482 "l,l,l,r,r,r,r,r,r")
9483 (match_operand:SI 1 "arm_add_operand"
9484 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9485 (match_operator:SI 5 "arm_comparison_operator"
9486 [(match_operand:SI 2 "s_register_operand"
9487 "l,r,r,l,l,r,r,r,r")
9488 (match_operand:SI 3 "arm_add_operand"
9489 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9494 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9496 {\"cmp%d5\\t%0, %1\",
9497 \"cmp%d4\\t%2, %3\"},
9498 {\"cmn%d5\\t%0, #%n1\",
9499 \"cmp%d4\\t%2, %3\"},
9500 {\"cmp%d5\\t%0, %1\",
9501 \"cmn%d4\\t%2, #%n3\"},
9502 {\"cmn%d5\\t%0, #%n1\",
9503 \"cmn%d4\\t%2, #%n3\"}
9505 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9510 \"cmn\\t%0, #%n1\"},
9511 {\"cmn\\t%2, #%n3\",
9513 {\"cmn\\t%2, #%n3\",
9516 static const char *const ite[2] =
9521 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9522 CMP_CMP, CMN_CMP, CMP_CMP,
9523 CMN_CMP, CMP_CMN, CMN_CMN};
9525 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9527 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9528 if (TARGET_THUMB2) {
9529 output_asm_insn (ite[swap], operands);
9531 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9534 [(set_attr "conds" "set")
9535 (set_attr "predicable" "no")
9536 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9537 (set_attr_alternative "length"
9543 (if_then_else (eq_attr "is_thumb" "no")
9546 (if_then_else (eq_attr "is_thumb" "no")
9549 (if_then_else (eq_attr "is_thumb" "no")
9552 (if_then_else (eq_attr "is_thumb" "no")
9557 (define_insn "*cmp_ior"
9558 [(set (match_operand 6 "dominant_cc_register" "")
9561 (match_operator 4 "arm_comparison_operator"
9562 [(match_operand:SI 0 "s_register_operand"
9563 "l,l,l,r,r,r,r,r,r")
9564 (match_operand:SI 1 "arm_add_operand"
9565 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9566 (match_operator:SI 5 "arm_comparison_operator"
9567 [(match_operand:SI 2 "s_register_operand"
9568 "l,r,r,l,l,r,r,r,r")
9569 (match_operand:SI 3 "arm_add_operand"
9570 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9575 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9579 {\"cmn\\t%0, #%n1\",
9582 \"cmn\\t%2, #%n3\"},
9583 {\"cmn\\t%0, #%n1\",
9586 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9588 {\"cmp%D4\\t%2, %3\",
9589 \"cmp%D5\\t%0, %1\"},
9590 {\"cmp%D4\\t%2, %3\",
9591 \"cmn%D5\\t%0, #%n1\"},
9592 {\"cmn%D4\\t%2, #%n3\",
9593 \"cmp%D5\\t%0, %1\"},
9594 {\"cmn%D4\\t%2, #%n3\",
9595 \"cmn%D5\\t%0, #%n1\"}
9597 static const char *const ite[2] =
9602 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9603 CMP_CMP, CMN_CMP, CMP_CMP,
9604 CMN_CMP, CMP_CMN, CMN_CMN};
9606 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9608 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9609 if (TARGET_THUMB2) {
9610 output_asm_insn (ite[swap], operands);
9612 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9616 [(set_attr "conds" "set")
9617 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9618 (set_attr_alternative "length"
9624 (if_then_else (eq_attr "is_thumb" "no")
9627 (if_then_else (eq_attr "is_thumb" "no")
9630 (if_then_else (eq_attr "is_thumb" "no")
9633 (if_then_else (eq_attr "is_thumb" "no")
9638 (define_insn_and_split "*ior_scc_scc"
9639 [(set (match_operand:SI 0 "s_register_operand" "=r")
9640 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9641 [(match_operand:SI 1 "s_register_operand" "r")
9642 (match_operand:SI 2 "arm_add_operand" "rIL")])
9643 (match_operator:SI 6 "arm_comparison_operator"
9644 [(match_operand:SI 4 "s_register_operand" "r")
9645 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9646 (clobber (reg:CC CC_REGNUM))]
9648 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9651 "TARGET_32BIT && reload_completed"
9655 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9656 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9658 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9660 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9663 [(set_attr "conds" "clob")
9664 (set_attr "length" "16")])
9666 ; If the above pattern is followed by a CMP insn, then the compare is
9667 ; redundant, since we can rework the conditional instruction that follows.
9668 (define_insn_and_split "*ior_scc_scc_cmp"
9669 [(set (match_operand 0 "dominant_cc_register" "")
9670 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9671 [(match_operand:SI 1 "s_register_operand" "r")
9672 (match_operand:SI 2 "arm_add_operand" "rIL")])
9673 (match_operator:SI 6 "arm_comparison_operator"
9674 [(match_operand:SI 4 "s_register_operand" "r")
9675 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9677 (set (match_operand:SI 7 "s_register_operand" "=r")
9678 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9679 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9682 "TARGET_32BIT && reload_completed"
9686 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9687 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9689 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9691 [(set_attr "conds" "set")
9692 (set_attr "length" "16")])
9694 (define_insn_and_split "*and_scc_scc"
9695 [(set (match_operand:SI 0 "s_register_operand" "=r")
9696 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9697 [(match_operand:SI 1 "s_register_operand" "r")
9698 (match_operand:SI 2 "arm_add_operand" "rIL")])
9699 (match_operator:SI 6 "arm_comparison_operator"
9700 [(match_operand:SI 4 "s_register_operand" "r")
9701 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9702 (clobber (reg:CC CC_REGNUM))]
9704 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9707 "TARGET_32BIT && reload_completed
9708 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9713 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9714 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9716 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9718 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9721 [(set_attr "conds" "clob")
9722 (set_attr "length" "16")])
9724 ; If the above pattern is followed by a CMP insn, then the compare is
9725 ; redundant, since we can rework the conditional instruction that follows.
9726 (define_insn_and_split "*and_scc_scc_cmp"
9727 [(set (match_operand 0 "dominant_cc_register" "")
9728 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9729 [(match_operand:SI 1 "s_register_operand" "r")
9730 (match_operand:SI 2 "arm_add_operand" "rIL")])
9731 (match_operator:SI 6 "arm_comparison_operator"
9732 [(match_operand:SI 4 "s_register_operand" "r")
9733 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9735 (set (match_operand:SI 7 "s_register_operand" "=r")
9736 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9737 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9740 "TARGET_32BIT && reload_completed"
9744 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9745 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9747 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9749 [(set_attr "conds" "set")
9750 (set_attr "length" "16")])
9752 ;; If there is no dominance in the comparison, then we can still save an
9753 ;; instruction in the AND case, since we can know that the second compare
9754 ;; need only zero the value if false (if true, then the value is already
9756 (define_insn_and_split "*and_scc_scc_nodom"
9757 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9758 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9759 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9760 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9761 (match_operator:SI 6 "arm_comparison_operator"
9762 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9763 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9764 (clobber (reg:CC CC_REGNUM))]
9766 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9769 "TARGET_32BIT && reload_completed"
9770 [(parallel [(set (match_dup 0)
9771 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9772 (clobber (reg:CC CC_REGNUM))])
9773 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9775 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9778 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9779 operands[4], operands[5]),
9781 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9783 [(set_attr "conds" "clob")
9784 (set_attr "length" "20")])
9787 [(set (reg:CC_NOOV CC_REGNUM)
9788 (compare:CC_NOOV (ior:SI
9789 (and:SI (match_operand:SI 0 "s_register_operand" "")
9791 (match_operator:SI 1 "arm_comparison_operator"
9792 [(match_operand:SI 2 "s_register_operand" "")
9793 (match_operand:SI 3 "arm_add_operand" "")]))
9795 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9798 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9800 (set (reg:CC_NOOV CC_REGNUM)
9801 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9806 [(set (reg:CC_NOOV CC_REGNUM)
9807 (compare:CC_NOOV (ior:SI
9808 (match_operator:SI 1 "arm_comparison_operator"
9809 [(match_operand:SI 2 "s_register_operand" "")
9810 (match_operand:SI 3 "arm_add_operand" "")])
9811 (and:SI (match_operand:SI 0 "s_register_operand" "")
9814 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9817 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9819 (set (reg:CC_NOOV CC_REGNUM)
9820 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9823 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9825 (define_insn "*negscc"
9826 [(set (match_operand:SI 0 "s_register_operand" "=r")
9827 (neg:SI (match_operator 3 "arm_comparison_operator"
9828 [(match_operand:SI 1 "s_register_operand" "r")
9829 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9830 (clobber (reg:CC CC_REGNUM))]
9833 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9834 return \"mov\\t%0, %1, asr #31\";
9836 if (GET_CODE (operands[3]) == NE)
9837 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9839 output_asm_insn (\"cmp\\t%1, %2\", operands);
9840 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9841 return \"mvn%d3\\t%0, #0\";
9843 [(set_attr "conds" "clob")
9844 (set_attr "length" "12")]
9847 (define_insn "movcond"
9848 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9850 (match_operator 5 "arm_comparison_operator"
9851 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9852 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9853 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9854 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9855 (clobber (reg:CC CC_REGNUM))]
9858 if (GET_CODE (operands[5]) == LT
9859 && (operands[4] == const0_rtx))
9861 if (which_alternative != 1 && REG_P (operands[1]))
9863 if (operands[2] == const0_rtx)
9864 return \"and\\t%0, %1, %3, asr #31\";
9865 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9867 else if (which_alternative != 0 && REG_P (operands[2]))
9869 if (operands[1] == const0_rtx)
9870 return \"bic\\t%0, %2, %3, asr #31\";
9871 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9873 /* The only case that falls through to here is when both ops 1 & 2
9877 if (GET_CODE (operands[5]) == GE
9878 && (operands[4] == const0_rtx))
9880 if (which_alternative != 1 && REG_P (operands[1]))
9882 if (operands[2] == const0_rtx)
9883 return \"bic\\t%0, %1, %3, asr #31\";
9884 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9886 else if (which_alternative != 0 && REG_P (operands[2]))
9888 if (operands[1] == const0_rtx)
9889 return \"and\\t%0, %2, %3, asr #31\";
9890 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9892 /* The only case that falls through to here is when both ops 1 & 2
9895 if (CONST_INT_P (operands[4])
9896 && !const_ok_for_arm (INTVAL (operands[4])))
9897 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9899 output_asm_insn (\"cmp\\t%3, %4\", operands);
9900 if (which_alternative != 0)
9901 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9902 if (which_alternative != 1)
9903 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9906 [(set_attr "conds" "clob")
9907 (set_attr "length" "8,8,12")]
9910 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9912 (define_insn "*ifcompare_plus_move"
9913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9914 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9915 [(match_operand:SI 4 "s_register_operand" "r,r")
9916 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9918 (match_operand:SI 2 "s_register_operand" "r,r")
9919 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9920 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9921 (clobber (reg:CC CC_REGNUM))]
9924 [(set_attr "conds" "clob")
9925 (set_attr "length" "8,12")]
9928 (define_insn "*if_plus_move"
9929 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9931 (match_operator 4 "arm_comparison_operator"
9932 [(match_operand 5 "cc_register" "") (const_int 0)])
9934 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9935 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9936 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9940 sub%d4\\t%0, %2, #%n3
9941 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9942 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9943 [(set_attr "conds" "use")
9944 (set_attr "length" "4,4,8,8")
9945 (set_attr_alternative "type"
9946 [(if_then_else (match_operand 3 "const_int_operand" "")
9947 (const_string "simple_alu_imm" )
9949 (const_string "simple_alu_imm")
9951 (const_string "*")])]
9954 (define_insn "*ifcompare_move_plus"
9955 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9956 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9957 [(match_operand:SI 4 "s_register_operand" "r,r")
9958 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9959 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9961 (match_operand:SI 2 "s_register_operand" "r,r")
9962 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9963 (clobber (reg:CC CC_REGNUM))]
9966 [(set_attr "conds" "clob")
9967 (set_attr "length" "8,12")]
9970 (define_insn "*if_move_plus"
9971 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9973 (match_operator 4 "arm_comparison_operator"
9974 [(match_operand 5 "cc_register" "") (const_int 0)])
9975 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9977 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9978 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9982 sub%D4\\t%0, %2, #%n3
9983 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9984 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9985 [(set_attr "conds" "use")
9986 (set_attr "length" "4,4,8,8")
9987 (set_attr_alternative "type"
9988 [(if_then_else (match_operand 3 "const_int_operand" "")
9989 (const_string "simple_alu_imm" )
9991 (const_string "simple_alu_imm")
9993 (const_string "*")])]
9996 (define_insn "*ifcompare_arith_arith"
9997 [(set (match_operand:SI 0 "s_register_operand" "=r")
9998 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9999 [(match_operand:SI 5 "s_register_operand" "r")
10000 (match_operand:SI 6 "arm_add_operand" "rIL")])
10001 (match_operator:SI 8 "shiftable_operator"
10002 [(match_operand:SI 1 "s_register_operand" "r")
10003 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10004 (match_operator:SI 7 "shiftable_operator"
10005 [(match_operand:SI 3 "s_register_operand" "r")
10006 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10007 (clobber (reg:CC CC_REGNUM))]
10010 [(set_attr "conds" "clob")
10011 (set_attr "length" "12")]
10014 (define_insn "*if_arith_arith"
10015 [(set (match_operand:SI 0 "s_register_operand" "=r")
10016 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10017 [(match_operand 8 "cc_register" "") (const_int 0)])
10018 (match_operator:SI 6 "shiftable_operator"
10019 [(match_operand:SI 1 "s_register_operand" "r")
10020 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10021 (match_operator:SI 7 "shiftable_operator"
10022 [(match_operand:SI 3 "s_register_operand" "r")
10023 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10025 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10026 [(set_attr "conds" "use")
10027 (set_attr "length" "8")]
10030 (define_insn "*ifcompare_arith_move"
10031 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10032 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10033 [(match_operand:SI 2 "s_register_operand" "r,r")
10034 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10035 (match_operator:SI 7 "shiftable_operator"
10036 [(match_operand:SI 4 "s_register_operand" "r,r")
10037 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10038 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10039 (clobber (reg:CC CC_REGNUM))]
10042 /* If we have an operation where (op x 0) is the identity operation and
10043 the conditional operator is LT or GE and we are comparing against zero and
10044 everything is in registers then we can do this in two instructions. */
10045 if (operands[3] == const0_rtx
10046 && GET_CODE (operands[7]) != AND
10047 && REG_P (operands[5])
10048 && REG_P (operands[1])
10049 && REGNO (operands[1]) == REGNO (operands[4])
10050 && REGNO (operands[4]) != REGNO (operands[0]))
10052 if (GET_CODE (operands[6]) == LT)
10053 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10054 else if (GET_CODE (operands[6]) == GE)
10055 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10057 if (CONST_INT_P (operands[3])
10058 && !const_ok_for_arm (INTVAL (operands[3])))
10059 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10061 output_asm_insn (\"cmp\\t%2, %3\", operands);
10062 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10063 if (which_alternative != 0)
10064 return \"mov%D6\\t%0, %1\";
10067 [(set_attr "conds" "clob")
10068 (set_attr "length" "8,12")]
10071 (define_insn "*if_arith_move"
10072 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10073 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10074 [(match_operand 6 "cc_register" "") (const_int 0)])
10075 (match_operator:SI 5 "shiftable_operator"
10076 [(match_operand:SI 2 "s_register_operand" "r,r")
10077 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10078 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10081 %I5%d4\\t%0, %2, %3
10082 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10083 [(set_attr "conds" "use")
10084 (set_attr "length" "4,8")
10085 (set_attr "type" "*,*")]
10088 (define_insn "*ifcompare_move_arith"
10089 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10091 [(match_operand:SI 4 "s_register_operand" "r,r")
10092 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10093 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10094 (match_operator:SI 7 "shiftable_operator"
10095 [(match_operand:SI 2 "s_register_operand" "r,r")
10096 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10097 (clobber (reg:CC CC_REGNUM))]
10100 /* If we have an operation where (op x 0) is the identity operation and
10101 the conditional operator is LT or GE and we are comparing against zero and
10102 everything is in registers then we can do this in two instructions */
10103 if (operands[5] == const0_rtx
10104 && GET_CODE (operands[7]) != AND
10105 && REG_P (operands[3])
10106 && REG_P (operands[1])
10107 && REGNO (operands[1]) == REGNO (operands[2])
10108 && REGNO (operands[2]) != REGNO (operands[0]))
10110 if (GET_CODE (operands[6]) == GE)
10111 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10112 else if (GET_CODE (operands[6]) == LT)
10113 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10116 if (CONST_INT_P (operands[5])
10117 && !const_ok_for_arm (INTVAL (operands[5])))
10118 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10120 output_asm_insn (\"cmp\\t%4, %5\", operands);
10122 if (which_alternative != 0)
10123 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10124 return \"%I7%D6\\t%0, %2, %3\";
10126 [(set_attr "conds" "clob")
10127 (set_attr "length" "8,12")]
10130 (define_insn "*if_move_arith"
10131 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10133 (match_operator 4 "arm_comparison_operator"
10134 [(match_operand 6 "cc_register" "") (const_int 0)])
10135 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10136 (match_operator:SI 5 "shiftable_operator"
10137 [(match_operand:SI 2 "s_register_operand" "r,r")
10138 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10141 %I5%D4\\t%0, %2, %3
10142 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10143 [(set_attr "conds" "use")
10144 (set_attr "length" "4,8")
10145 (set_attr "type" "*,*")]
10148 (define_insn "*ifcompare_move_not"
10149 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10151 (match_operator 5 "arm_comparison_operator"
10152 [(match_operand:SI 3 "s_register_operand" "r,r")
10153 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10154 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10156 (match_operand:SI 2 "s_register_operand" "r,r"))))
10157 (clobber (reg:CC CC_REGNUM))]
10160 [(set_attr "conds" "clob")
10161 (set_attr "length" "8,12")]
10164 (define_insn "*if_move_not"
10165 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10167 (match_operator 4 "arm_comparison_operator"
10168 [(match_operand 3 "cc_register" "") (const_int 0)])
10169 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10170 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10174 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10175 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10176 [(set_attr "conds" "use")
10177 (set_attr "insn" "mvn")
10178 (set_attr "length" "4,8,8")]
10181 (define_insn "*ifcompare_not_move"
10182 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10184 (match_operator 5 "arm_comparison_operator"
10185 [(match_operand:SI 3 "s_register_operand" "r,r")
10186 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10188 (match_operand:SI 2 "s_register_operand" "r,r"))
10189 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10190 (clobber (reg:CC CC_REGNUM))]
10193 [(set_attr "conds" "clob")
10194 (set_attr "length" "8,12")]
10197 (define_insn "*if_not_move"
10198 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10200 (match_operator 4 "arm_comparison_operator"
10201 [(match_operand 3 "cc_register" "") (const_int 0)])
10202 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10203 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10207 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10208 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10209 [(set_attr "conds" "use")
10210 (set_attr "insn" "mvn")
10211 (set_attr "length" "4,8,8")]
10214 (define_insn "*ifcompare_shift_move"
10215 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10217 (match_operator 6 "arm_comparison_operator"
10218 [(match_operand:SI 4 "s_register_operand" "r,r")
10219 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10220 (match_operator:SI 7 "shift_operator"
10221 [(match_operand:SI 2 "s_register_operand" "r,r")
10222 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10223 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10224 (clobber (reg:CC CC_REGNUM))]
10227 [(set_attr "conds" "clob")
10228 (set_attr "length" "8,12")]
10231 (define_insn "*if_shift_move"
10232 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10234 (match_operator 5 "arm_comparison_operator"
10235 [(match_operand 6 "cc_register" "") (const_int 0)])
10236 (match_operator:SI 4 "shift_operator"
10237 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10238 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10239 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10243 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10244 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10245 [(set_attr "conds" "use")
10246 (set_attr "shift" "2")
10247 (set_attr "length" "4,8,8")
10248 (set_attr "insn" "mov")
10249 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10250 (const_string "alu_shift")
10251 (const_string "alu_shift_reg")))]
10254 (define_insn "*ifcompare_move_shift"
10255 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10257 (match_operator 6 "arm_comparison_operator"
10258 [(match_operand:SI 4 "s_register_operand" "r,r")
10259 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10260 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10261 (match_operator:SI 7 "shift_operator"
10262 [(match_operand:SI 2 "s_register_operand" "r,r")
10263 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10264 (clobber (reg:CC CC_REGNUM))]
10267 [(set_attr "conds" "clob")
10268 (set_attr "length" "8,12")]
10271 (define_insn "*if_move_shift"
10272 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10274 (match_operator 5 "arm_comparison_operator"
10275 [(match_operand 6 "cc_register" "") (const_int 0)])
10276 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10277 (match_operator:SI 4 "shift_operator"
10278 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10279 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10283 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10284 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10285 [(set_attr "conds" "use")
10286 (set_attr "shift" "2")
10287 (set_attr "length" "4,8,8")
10288 (set_attr "insn" "mov")
10289 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10290 (const_string "alu_shift")
10291 (const_string "alu_shift_reg")))]
10294 (define_insn "*ifcompare_shift_shift"
10295 [(set (match_operand:SI 0 "s_register_operand" "=r")
10297 (match_operator 7 "arm_comparison_operator"
10298 [(match_operand:SI 5 "s_register_operand" "r")
10299 (match_operand:SI 6 "arm_add_operand" "rIL")])
10300 (match_operator:SI 8 "shift_operator"
10301 [(match_operand:SI 1 "s_register_operand" "r")
10302 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10303 (match_operator:SI 9 "shift_operator"
10304 [(match_operand:SI 3 "s_register_operand" "r")
10305 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10306 (clobber (reg:CC CC_REGNUM))]
10309 [(set_attr "conds" "clob")
10310 (set_attr "length" "12")]
10313 (define_insn "*if_shift_shift"
10314 [(set (match_operand:SI 0 "s_register_operand" "=r")
10316 (match_operator 5 "arm_comparison_operator"
10317 [(match_operand 8 "cc_register" "") (const_int 0)])
10318 (match_operator:SI 6 "shift_operator"
10319 [(match_operand:SI 1 "s_register_operand" "r")
10320 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10321 (match_operator:SI 7 "shift_operator"
10322 [(match_operand:SI 3 "s_register_operand" "r")
10323 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10325 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10326 [(set_attr "conds" "use")
10327 (set_attr "shift" "1")
10328 (set_attr "length" "8")
10329 (set_attr "insn" "mov")
10330 (set (attr "type") (if_then_else
10331 (and (match_operand 2 "const_int_operand" "")
10332 (match_operand 4 "const_int_operand" ""))
10333 (const_string "alu_shift")
10334 (const_string "alu_shift_reg")))]
10337 (define_insn "*ifcompare_not_arith"
10338 [(set (match_operand:SI 0 "s_register_operand" "=r")
10340 (match_operator 6 "arm_comparison_operator"
10341 [(match_operand:SI 4 "s_register_operand" "r")
10342 (match_operand:SI 5 "arm_add_operand" "rIL")])
10343 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10344 (match_operator:SI 7 "shiftable_operator"
10345 [(match_operand:SI 2 "s_register_operand" "r")
10346 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10347 (clobber (reg:CC CC_REGNUM))]
10350 [(set_attr "conds" "clob")
10351 (set_attr "length" "12")]
10354 (define_insn "*if_not_arith"
10355 [(set (match_operand:SI 0 "s_register_operand" "=r")
10357 (match_operator 5 "arm_comparison_operator"
10358 [(match_operand 4 "cc_register" "") (const_int 0)])
10359 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10360 (match_operator:SI 6 "shiftable_operator"
10361 [(match_operand:SI 2 "s_register_operand" "r")
10362 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10364 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10365 [(set_attr "conds" "use")
10366 (set_attr "insn" "mvn")
10367 (set_attr "length" "8")]
10370 (define_insn "*ifcompare_arith_not"
10371 [(set (match_operand:SI 0 "s_register_operand" "=r")
10373 (match_operator 6 "arm_comparison_operator"
10374 [(match_operand:SI 4 "s_register_operand" "r")
10375 (match_operand:SI 5 "arm_add_operand" "rIL")])
10376 (match_operator:SI 7 "shiftable_operator"
10377 [(match_operand:SI 2 "s_register_operand" "r")
10378 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10379 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10380 (clobber (reg:CC CC_REGNUM))]
10383 [(set_attr "conds" "clob")
10384 (set_attr "length" "12")]
10387 (define_insn "*if_arith_not"
10388 [(set (match_operand:SI 0 "s_register_operand" "=r")
10390 (match_operator 5 "arm_comparison_operator"
10391 [(match_operand 4 "cc_register" "") (const_int 0)])
10392 (match_operator:SI 6 "shiftable_operator"
10393 [(match_operand:SI 2 "s_register_operand" "r")
10394 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10395 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10397 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10398 [(set_attr "conds" "use")
10399 (set_attr "insn" "mvn")
10400 (set_attr "length" "8")]
10403 (define_insn "*ifcompare_neg_move"
10404 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10406 (match_operator 5 "arm_comparison_operator"
10407 [(match_operand:SI 3 "s_register_operand" "r,r")
10408 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10409 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10410 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10411 (clobber (reg:CC CC_REGNUM))]
10414 [(set_attr "conds" "clob")
10415 (set_attr "length" "8,12")]
10418 (define_insn "*if_neg_move"
10419 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10421 (match_operator 4 "arm_comparison_operator"
10422 [(match_operand 3 "cc_register" "") (const_int 0)])
10423 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10424 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10427 rsb%d4\\t%0, %2, #0
10428 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10429 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10430 [(set_attr "conds" "use")
10431 (set_attr "length" "4,8,8")]
10434 (define_insn "*ifcompare_move_neg"
10435 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10437 (match_operator 5 "arm_comparison_operator"
10438 [(match_operand:SI 3 "s_register_operand" "r,r")
10439 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10440 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10441 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10442 (clobber (reg:CC CC_REGNUM))]
10445 [(set_attr "conds" "clob")
10446 (set_attr "length" "8,12")]
10449 (define_insn "*if_move_neg"
10450 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10452 (match_operator 4 "arm_comparison_operator"
10453 [(match_operand 3 "cc_register" "") (const_int 0)])
10454 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10455 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10458 rsb%D4\\t%0, %2, #0
10459 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10460 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10461 [(set_attr "conds" "use")
10462 (set_attr "length" "4,8,8")]
10465 (define_insn "*arith_adjacentmem"
10466 [(set (match_operand:SI 0 "s_register_operand" "=r")
10467 (match_operator:SI 1 "shiftable_operator"
10468 [(match_operand:SI 2 "memory_operand" "m")
10469 (match_operand:SI 3 "memory_operand" "m")]))
10470 (clobber (match_scratch:SI 4 "=r"))]
10471 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10477 HOST_WIDE_INT val1 = 0, val2 = 0;
10479 if (REGNO (operands[0]) > REGNO (operands[4]))
10481 ldm[1] = operands[4];
10482 ldm[2] = operands[0];
10486 ldm[1] = operands[0];
10487 ldm[2] = operands[4];
10490 base_reg = XEXP (operands[2], 0);
10492 if (!REG_P (base_reg))
10494 val1 = INTVAL (XEXP (base_reg, 1));
10495 base_reg = XEXP (base_reg, 0);
10498 if (!REG_P (XEXP (operands[3], 0)))
10499 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10501 arith[0] = operands[0];
10502 arith[3] = operands[1];
10516 if (val1 !=0 && val2 != 0)
10520 if (val1 == 4 || val2 == 4)
10521 /* Other val must be 8, since we know they are adjacent and neither
10523 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10524 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10526 ldm[0] = ops[0] = operands[4];
10528 ops[2] = GEN_INT (val1);
10529 output_add_immediate (ops);
10531 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10533 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10537 /* Offset is out of range for a single add, so use two ldr. */
10540 ops[2] = GEN_INT (val1);
10541 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10543 ops[2] = GEN_INT (val2);
10544 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10547 else if (val1 != 0)
10550 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10552 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10557 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10559 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10561 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10564 [(set_attr "length" "12")
10565 (set_attr "predicable" "yes")
10566 (set_attr "type" "load1")]
10569 ; This pattern is never tried by combine, so do it as a peephole
10572 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10573 (match_operand:SI 1 "arm_general_register_operand" ""))
10574 (set (reg:CC CC_REGNUM)
10575 (compare:CC (match_dup 1) (const_int 0)))]
10577 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10578 (set (match_dup 0) (match_dup 1))])]
10583 [(set (match_operand:SI 0 "s_register_operand" "")
10584 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10586 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10587 [(match_operand:SI 3 "s_register_operand" "")
10588 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10589 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10591 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10592 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10597 ;; This split can be used because CC_Z mode implies that the following
10598 ;; branch will be an equality, or an unsigned inequality, so the sign
10599 ;; extension is not needed.
10602 [(set (reg:CC_Z CC_REGNUM)
10604 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10606 (match_operand 1 "const_int_operand" "")))
10607 (clobber (match_scratch:SI 2 ""))]
10609 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10610 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10611 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10612 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10614 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10617 ;; ??? Check the patterns above for Thumb-2 usefulness
10619 (define_expand "prologue"
10620 [(clobber (const_int 0))]
10623 arm_expand_prologue ();
10625 thumb1_expand_prologue ();
10630 (define_expand "epilogue"
10631 [(clobber (const_int 0))]
10634 if (crtl->calls_eh_return)
10635 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10638 thumb1_expand_epilogue ();
10639 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10640 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10642 else if (HAVE_return)
10644 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10645 no need for explicit testing again. */
10646 emit_jump_insn (gen_return ());
10648 else if (TARGET_32BIT)
10650 arm_expand_epilogue (true);
10656 (define_insn "prologue_thumb1_interwork"
10657 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10659 "* return thumb1_output_interwork ();"
10660 [(set_attr "length" "8")]
10663 ;; Note - although unspec_volatile's USE all hard registers,
10664 ;; USEs are ignored after relaod has completed. Thus we need
10665 ;; to add an unspec of the link register to ensure that flow
10666 ;; does not think that it is unused by the sibcall branch that
10667 ;; will replace the standard function epilogue.
10668 (define_expand "sibcall_epilogue"
10669 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10670 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10673 arm_expand_epilogue (false);
10678 (define_insn "*epilogue_insns"
10679 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10682 return thumb1_unexpanded_epilogue ();
10684 ; Length is absolute worst case
10685 [(set_attr "length" "44")
10686 (set_attr "type" "block")
10687 ;; We don't clobber the conditions, but the potential length of this
10688 ;; operation is sufficient to make conditionalizing the sequence
10689 ;; unlikely to be profitable.
10690 (set_attr "conds" "clob")]
10693 (define_expand "eh_epilogue"
10694 [(use (match_operand:SI 0 "register_operand" ""))
10695 (use (match_operand:SI 1 "register_operand" ""))
10696 (use (match_operand:SI 2 "register_operand" ""))]
10700 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10701 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10703 rtx ra = gen_rtx_REG (Pmode, 2);
10705 emit_move_insn (ra, operands[2]);
10708 /* This is a hack -- we may have crystalized the function type too
10710 cfun->machine->func_type = 0;
10714 ;; This split is only used during output to reduce the number of patterns
10715 ;; that need assembler instructions adding to them. We allowed the setting
10716 ;; of the conditions to be implicit during rtl generation so that
10717 ;; the conditional compare patterns would work. However this conflicts to
10718 ;; some extent with the conditional data operations, so we have to split them
10721 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10722 ;; conditional execution sufficient?
10725 [(set (match_operand:SI 0 "s_register_operand" "")
10726 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10727 [(match_operand 2 "" "") (match_operand 3 "" "")])
10729 (match_operand 4 "" "")))
10730 (clobber (reg:CC CC_REGNUM))]
10731 "TARGET_ARM && reload_completed"
10732 [(set (match_dup 5) (match_dup 6))
10733 (cond_exec (match_dup 7)
10734 (set (match_dup 0) (match_dup 4)))]
10737 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10738 operands[2], operands[3]);
10739 enum rtx_code rc = GET_CODE (operands[1]);
10741 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10742 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10743 if (mode == CCFPmode || mode == CCFPEmode)
10744 rc = reverse_condition_maybe_unordered (rc);
10746 rc = reverse_condition (rc);
10748 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10753 [(set (match_operand:SI 0 "s_register_operand" "")
10754 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10755 [(match_operand 2 "" "") (match_operand 3 "" "")])
10756 (match_operand 4 "" "")
10758 (clobber (reg:CC CC_REGNUM))]
10759 "TARGET_ARM && reload_completed"
10760 [(set (match_dup 5) (match_dup 6))
10761 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10762 (set (match_dup 0) (match_dup 4)))]
10765 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10766 operands[2], operands[3]);
10768 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10769 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10774 [(set (match_operand:SI 0 "s_register_operand" "")
10775 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10776 [(match_operand 2 "" "") (match_operand 3 "" "")])
10777 (match_operand 4 "" "")
10778 (match_operand 5 "" "")))
10779 (clobber (reg:CC CC_REGNUM))]
10780 "TARGET_ARM && reload_completed"
10781 [(set (match_dup 6) (match_dup 7))
10782 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10783 (set (match_dup 0) (match_dup 4)))
10784 (cond_exec (match_dup 8)
10785 (set (match_dup 0) (match_dup 5)))]
10788 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10789 operands[2], operands[3]);
10790 enum rtx_code rc = GET_CODE (operands[1]);
10792 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10793 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10794 if (mode == CCFPmode || mode == CCFPEmode)
10795 rc = reverse_condition_maybe_unordered (rc);
10797 rc = reverse_condition (rc);
10799 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10804 [(set (match_operand:SI 0 "s_register_operand" "")
10805 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10806 [(match_operand:SI 2 "s_register_operand" "")
10807 (match_operand:SI 3 "arm_add_operand" "")])
10808 (match_operand:SI 4 "arm_rhs_operand" "")
10810 (match_operand:SI 5 "s_register_operand" ""))))
10811 (clobber (reg:CC CC_REGNUM))]
10812 "TARGET_ARM && reload_completed"
10813 [(set (match_dup 6) (match_dup 7))
10814 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10815 (set (match_dup 0) (match_dup 4)))
10816 (cond_exec (match_dup 8)
10817 (set (match_dup 0) (not:SI (match_dup 5))))]
10820 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10821 operands[2], operands[3]);
10822 enum rtx_code rc = GET_CODE (operands[1]);
10824 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10825 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10826 if (mode == CCFPmode || mode == CCFPEmode)
10827 rc = reverse_condition_maybe_unordered (rc);
10829 rc = reverse_condition (rc);
10831 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10835 (define_insn "*cond_move_not"
10836 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10837 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10838 [(match_operand 3 "cc_register" "") (const_int 0)])
10839 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10841 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10845 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10846 [(set_attr "conds" "use")
10847 (set_attr "insn" "mvn")
10848 (set_attr "length" "4,8")]
10851 ;; The next two patterns occur when an AND operation is followed by a
10852 ;; scc insn sequence
10854 (define_insn "*sign_extract_onebit"
10855 [(set (match_operand:SI 0 "s_register_operand" "=r")
10856 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10858 (match_operand:SI 2 "const_int_operand" "n")))
10859 (clobber (reg:CC CC_REGNUM))]
10862 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10863 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10864 return \"mvnne\\t%0, #0\";
10866 [(set_attr "conds" "clob")
10867 (set_attr "length" "8")]
10870 (define_insn "*not_signextract_onebit"
10871 [(set (match_operand:SI 0 "s_register_operand" "=r")
10873 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10875 (match_operand:SI 2 "const_int_operand" "n"))))
10876 (clobber (reg:CC CC_REGNUM))]
10879 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10880 output_asm_insn (\"tst\\t%1, %2\", operands);
10881 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10882 return \"movne\\t%0, #0\";
10884 [(set_attr "conds" "clob")
10885 (set_attr "length" "12")]
10887 ;; ??? The above patterns need auditing for Thumb-2
10889 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10890 ;; expressions. For simplicity, the first register is also in the unspec
10892 ;; To avoid the usage of GNU extension, the length attribute is computed
10893 ;; in a C function arm_attr_length_push_multi.
10894 (define_insn "*push_multi"
10895 [(match_parallel 2 "multi_register_push"
10896 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10897 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10898 UNSPEC_PUSH_MULT))])]
10902 int num_saves = XVECLEN (operands[2], 0);
10904 /* For the StrongARM at least it is faster to
10905 use STR to store only a single register.
10906 In Thumb mode always use push, and the assembler will pick
10907 something appropriate. */
10908 if (num_saves == 1 && TARGET_ARM)
10909 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10916 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
10917 else if (TARGET_THUMB2)
10918 strcpy (pattern, \"push%?\\t{%1\");
10920 strcpy (pattern, \"push\\t{%1\");
10922 for (i = 1; i < num_saves; i++)
10924 strcat (pattern, \", %|\");
10926 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10929 strcat (pattern, \"}\");
10930 output_asm_insn (pattern, operands);
10935 [(set_attr "type" "store4")
10936 (set (attr "length")
10937 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10940 (define_insn "stack_tie"
10941 [(set (mem:BLK (scratch))
10942 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10943 (match_operand:SI 1 "s_register_operand" "rk")]
10947 [(set_attr "length" "0")]
10950 ;; Pop (as used in epilogue RTL)
10952 (define_insn "*load_multiple_with_writeback"
10953 [(match_parallel 0 "load_multiple_operation"
10954 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10955 (plus:SI (match_dup 1)
10956 (match_operand:SI 2 "const_int_operand" "I")))
10957 (set (match_operand:SI 3 "s_register_operand" "=rk")
10958 (mem:SI (match_dup 1)))
10960 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10963 arm_output_multireg_pop (operands, /*return_pc=*/false,
10964 /*cond=*/const_true_rtx,
10970 [(set_attr "type" "load4")
10971 (set_attr "predicable" "yes")]
10974 ;; Pop with return (as used in epilogue RTL)
10976 ;; This instruction is generated when the registers are popped at the end of
10977 ;; epilogue. Here, instead of popping the value into LR and then generating
10978 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10980 (define_insn "*pop_multiple_with_writeback_and_return"
10981 [(match_parallel 0 "pop_multiple_return"
10983 (set (match_operand:SI 1 "s_register_operand" "+rk")
10984 (plus:SI (match_dup 1)
10985 (match_operand:SI 2 "const_int_operand" "I")))
10986 (set (match_operand:SI 3 "s_register_operand" "=rk")
10987 (mem:SI (match_dup 1)))
10989 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10992 arm_output_multireg_pop (operands, /*return_pc=*/true,
10993 /*cond=*/const_true_rtx,
10999 [(set_attr "type" "load4")
11000 (set_attr "predicable" "yes")]
11003 (define_insn "*pop_multiple_with_return"
11004 [(match_parallel 0 "pop_multiple_return"
11006 (set (match_operand:SI 2 "s_register_operand" "=rk")
11007 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11009 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11012 arm_output_multireg_pop (operands, /*return_pc=*/true,
11013 /*cond=*/const_true_rtx,
11019 [(set_attr "type" "load4")
11020 (set_attr "predicable" "yes")]
11023 ;; Load into PC and return
11024 (define_insn "*ldr_with_return"
11026 (set (reg:SI PC_REGNUM)
11027 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11028 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11029 "ldr%?\t%|pc, [%0], #4"
11030 [(set_attr "type" "load1")
11031 (set_attr "predicable" "yes")]
11033 ;; Pop for floating point registers (as used in epilogue RTL)
11034 (define_insn "*vfp_pop_multiple_with_writeback"
11035 [(match_parallel 0 "pop_multiple_fp"
11036 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11037 (plus:SI (match_dup 1)
11038 (match_operand:SI 2 "const_int_operand" "I")))
11039 (set (match_operand:DF 3 "arm_hard_register_operand" "")
11040 (mem:DF (match_dup 1)))])]
11041 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
11044 int num_regs = XVECLEN (operands[0], 0);
11047 strcpy (pattern, \"fldmfdd\\t\");
11048 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11049 strcat (pattern, \"!, {\");
11050 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11051 strcat (pattern, \"%P0\");
11052 if ((num_regs - 1) > 1)
11054 strcat (pattern, \"-%P1\");
11055 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11058 strcat (pattern, \"}\");
11059 output_asm_insn (pattern, op_list);
11063 [(set_attr "type" "load4")
11064 (set_attr "conds" "unconditional")
11065 (set_attr "predicable" "no")]
11068 ;; Special patterns for dealing with the constant pool
11070 (define_insn "align_4"
11071 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11074 assemble_align (32);
11079 (define_insn "align_8"
11080 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11083 assemble_align (64);
11088 (define_insn "consttable_end"
11089 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11092 making_const_table = FALSE;
11097 (define_insn "consttable_1"
11098 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11101 making_const_table = TRUE;
11102 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11103 assemble_zeros (3);
11106 [(set_attr "length" "4")]
11109 (define_insn "consttable_2"
11110 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11113 making_const_table = TRUE;
11114 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
11115 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11116 assemble_zeros (2);
11119 [(set_attr "length" "4")]
11122 (define_insn "consttable_4"
11123 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11127 rtx x = operands[0];
11128 making_const_table = TRUE;
11129 switch (GET_MODE_CLASS (GET_MODE (x)))
11132 if (GET_MODE (x) == HFmode)
11133 arm_emit_fp16_const (x);
11137 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
11138 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
11142 /* XXX: Sometimes gcc does something really dumb and ends up with
11143 a HIGH in a constant pool entry, usually because it's trying to
11144 load into a VFP register. We know this will always be used in
11145 combination with a LO_SUM which ignores the high bits, so just
11146 strip off the HIGH. */
11147 if (GET_CODE (x) == HIGH)
11149 assemble_integer (x, 4, BITS_PER_WORD, 1);
11150 mark_symbol_refs_as_used (x);
11155 [(set_attr "length" "4")]
11158 (define_insn "consttable_8"
11159 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11163 making_const_table = TRUE;
11164 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11169 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11170 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11174 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11179 [(set_attr "length" "8")]
11182 (define_insn "consttable_16"
11183 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11187 making_const_table = TRUE;
11188 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11193 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11194 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11198 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11203 [(set_attr "length" "16")]
11206 ;; Miscellaneous Thumb patterns
11208 (define_expand "tablejump"
11209 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11210 (use (label_ref (match_operand 1 "" "")))])]
11215 /* Hopefully, CSE will eliminate this copy. */
11216 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11217 rtx reg2 = gen_reg_rtx (SImode);
11219 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11220 operands[0] = reg2;
11225 ;; NB never uses BX.
11226 (define_insn "*thumb1_tablejump"
11227 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11228 (use (label_ref (match_operand 1 "" "")))]
11231 [(set_attr "length" "2")]
11234 ;; V5 Instructions,
11236 (define_insn "clzsi2"
11237 [(set (match_operand:SI 0 "s_register_operand" "=r")
11238 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11239 "TARGET_32BIT && arm_arch5"
11241 [(set_attr "predicable" "yes")
11242 (set_attr "insn" "clz")])
11244 (define_insn "rbitsi2"
11245 [(set (match_operand:SI 0 "s_register_operand" "=r")
11246 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11247 "TARGET_32BIT && arm_arch_thumb2"
11249 [(set_attr "predicable" "yes")
11250 (set_attr "insn" "clz")])
11252 (define_expand "ctzsi2"
11253 [(set (match_operand:SI 0 "s_register_operand" "")
11254 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11255 "TARGET_32BIT && arm_arch_thumb2"
11258 rtx tmp = gen_reg_rtx (SImode);
11259 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11260 emit_insn (gen_clzsi2 (operands[0], tmp));
11266 ;; V5E instructions.
11268 (define_insn "prefetch"
11269 [(prefetch (match_operand:SI 0 "address_operand" "p")
11270 (match_operand:SI 1 "" "")
11271 (match_operand:SI 2 "" ""))]
11272 "TARGET_32BIT && arm_arch5e"
11275 ;; General predication pattern
11278 [(match_operator 0 "arm_comparison_operator"
11279 [(match_operand 1 "cc_register" "")
11285 (define_insn "force_register_use"
11286 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11289 [(set_attr "length" "0")]
11293 ;; Patterns for exception handling
11295 (define_expand "eh_return"
11296 [(use (match_operand 0 "general_operand" ""))]
11301 emit_insn (gen_arm_eh_return (operands[0]));
11303 emit_insn (gen_thumb_eh_return (operands[0]));
11308 ;; We can't expand this before we know where the link register is stored.
11309 (define_insn_and_split "arm_eh_return"
11310 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11312 (clobber (match_scratch:SI 1 "=&r"))]
11315 "&& reload_completed"
11319 arm_set_return_address (operands[0], operands[1]);
11324 (define_insn_and_split "thumb_eh_return"
11325 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11327 (clobber (match_scratch:SI 1 "=&l"))]
11330 "&& reload_completed"
11334 thumb_set_return_address (operands[0], operands[1]);
11342 (define_insn "load_tp_hard"
11343 [(set (match_operand:SI 0 "register_operand" "=r")
11344 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11346 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11347 [(set_attr "predicable" "yes")]
11350 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11351 (define_insn "load_tp_soft"
11352 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11353 (clobber (reg:SI LR_REGNUM))
11354 (clobber (reg:SI IP_REGNUM))
11355 (clobber (reg:CC CC_REGNUM))]
11357 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11358 [(set_attr "conds" "clob")]
11361 ;; tls descriptor call
11362 (define_insn "tlscall"
11363 [(set (reg:SI R0_REGNUM)
11364 (unspec:SI [(reg:SI R0_REGNUM)
11365 (match_operand:SI 0 "" "X")
11366 (match_operand 1 "" "")] UNSPEC_TLS))
11367 (clobber (reg:SI R1_REGNUM))
11368 (clobber (reg:SI LR_REGNUM))
11369 (clobber (reg:SI CC_REGNUM))]
11372 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11373 INTVAL (operands[1]));
11374 return "bl\\t%c0(tlscall)";
11376 [(set_attr "conds" "clob")
11377 (set_attr "length" "4")]
11380 ;; For thread pointer builtin
11381 (define_expand "get_thread_pointersi"
11382 [(match_operand:SI 0 "s_register_operand" "=r")]
11386 arm_load_tp (operands[0]);
11392 ;; We only care about the lower 16 bits of the constant
11393 ;; being inserted into the upper 16 bits of the register.
11394 (define_insn "*arm_movtas_ze"
11395 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11398 (match_operand:SI 1 "const_int_operand" ""))]
11401 [(set_attr "predicable" "yes")
11402 (set_attr "length" "4")]
11405 (define_insn "*arm_rev"
11406 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11407 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11413 [(set_attr "arch" "t1,t2,32")
11414 (set_attr "length" "2,2,4")]
11417 (define_expand "arm_legacy_rev"
11418 [(set (match_operand:SI 2 "s_register_operand" "")
11419 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11423 (lshiftrt:SI (match_dup 2)
11425 (set (match_operand:SI 3 "s_register_operand" "")
11426 (rotatert:SI (match_dup 1)
11429 (and:SI (match_dup 2)
11430 (const_int -65281)))
11431 (set (match_operand:SI 0 "s_register_operand" "")
11432 (xor:SI (match_dup 3)
11438 ;; Reuse temporaries to keep register pressure down.
11439 (define_expand "thumb_legacy_rev"
11440 [(set (match_operand:SI 2 "s_register_operand" "")
11441 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11443 (set (match_operand:SI 3 "s_register_operand" "")
11444 (lshiftrt:SI (match_dup 1)
11447 (ior:SI (match_dup 3)
11449 (set (match_operand:SI 4 "s_register_operand" "")
11451 (set (match_operand:SI 5 "s_register_operand" "")
11452 (rotatert:SI (match_dup 1)
11455 (ashift:SI (match_dup 5)
11458 (lshiftrt:SI (match_dup 5)
11461 (ior:SI (match_dup 5)
11464 (rotatert:SI (match_dup 5)
11466 (set (match_operand:SI 0 "s_register_operand" "")
11467 (ior:SI (match_dup 5)
11473 (define_expand "bswapsi2"
11474 [(set (match_operand:SI 0 "s_register_operand" "=r")
11475 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11476 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11480 rtx op2 = gen_reg_rtx (SImode);
11481 rtx op3 = gen_reg_rtx (SImode);
11485 rtx op4 = gen_reg_rtx (SImode);
11486 rtx op5 = gen_reg_rtx (SImode);
11488 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11489 op2, op3, op4, op5));
11493 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11502 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11503 ;; and unsigned variants, respectively. For rev16, expose
11504 ;; byte-swapping in the lower 16 bits only.
11505 (define_insn "*arm_revsh"
11506 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11507 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11513 [(set_attr "arch" "t1,t2,32")
11514 (set_attr "length" "2,2,4")]
11517 (define_insn "*arm_rev16"
11518 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11519 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11525 [(set_attr "arch" "t1,t2,32")
11526 (set_attr "length" "2,2,4")]
11529 (define_expand "bswaphi2"
11530 [(set (match_operand:HI 0 "s_register_operand" "=r")
11531 (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
11536 ;; Patterns for LDRD/STRD in Thumb2 mode
11538 (define_insn "*thumb2_ldrd"
11539 [(set (match_operand:SI 0 "s_register_operand" "=r")
11540 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11541 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11542 (set (match_operand:SI 3 "s_register_operand" "=r")
11543 (mem:SI (plus:SI (match_dup 1)
11544 (match_operand:SI 4 "const_int_operand" ""))))]
11545 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11546 && current_tune->prefer_ldrd_strd
11547 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11548 && (operands_ok_ldrd_strd (operands[0], operands[3],
11549 operands[1], INTVAL (operands[2]),
11551 "ldrd%?\t%0, %3, [%1, %2]"
11552 [(set_attr "type" "load2")
11553 (set_attr "predicable" "yes")])
11555 (define_insn "*thumb2_ldrd_base"
11556 [(set (match_operand:SI 0 "s_register_operand" "=r")
11557 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11558 (set (match_operand:SI 2 "s_register_operand" "=r")
11559 (mem:SI (plus:SI (match_dup 1)
11561 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11562 && current_tune->prefer_ldrd_strd
11563 && (operands_ok_ldrd_strd (operands[0], operands[2],
11564 operands[1], 0, false, true))"
11565 "ldrd%?\t%0, %2, [%1]"
11566 [(set_attr "type" "load2")
11567 (set_attr "predicable" "yes")])
11569 (define_insn "*thumb2_ldrd_base_neg"
11570 [(set (match_operand:SI 0 "s_register_operand" "=r")
11571 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11573 (set (match_operand:SI 2 "s_register_operand" "=r")
11574 (mem:SI (match_dup 1)))]
11575 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11576 && current_tune->prefer_ldrd_strd
11577 && (operands_ok_ldrd_strd (operands[0], operands[2],
11578 operands[1], -4, false, true))"
11579 "ldrd%?\t%0, %2, [%1, #-4]"
11580 [(set_attr "type" "load2")
11581 (set_attr "predicable" "yes")])
11583 (define_insn "*thumb2_strd"
11584 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11585 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11586 (match_operand:SI 2 "s_register_operand" "r"))
11587 (set (mem:SI (plus:SI (match_dup 0)
11588 (match_operand:SI 3 "const_int_operand" "")))
11589 (match_operand:SI 4 "s_register_operand" "r"))]
11590 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11591 && current_tune->prefer_ldrd_strd
11592 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11593 && (operands_ok_ldrd_strd (operands[2], operands[4],
11594 operands[0], INTVAL (operands[1]),
11596 "strd%?\t%2, %4, [%0, %1]"
11597 [(set_attr "type" "store2")
11598 (set_attr "predicable" "yes")])
11600 (define_insn "*thumb2_strd_base"
11601 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11602 (match_operand:SI 1 "s_register_operand" "r"))
11603 (set (mem:SI (plus:SI (match_dup 0)
11605 (match_operand:SI 2 "s_register_operand" "r"))]
11606 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11607 && current_tune->prefer_ldrd_strd
11608 && (operands_ok_ldrd_strd (operands[1], operands[2],
11609 operands[0], 0, false, false))"
11610 "strd%?\t%1, %2, [%0]"
11611 [(set_attr "type" "store2")
11612 (set_attr "predicable" "yes")])
11614 (define_insn "*thumb2_strd_base_neg"
11615 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11617 (match_operand:SI 1 "s_register_operand" "r"))
11618 (set (mem:SI (match_dup 0))
11619 (match_operand:SI 2 "s_register_operand" "r"))]
11620 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11621 && current_tune->prefer_ldrd_strd
11622 && (operands_ok_ldrd_strd (operands[1], operands[2],
11623 operands[0], -4, false, false))"
11624 "strd%?\t%1, %2, [%0, #-4]"
11625 [(set_attr "type" "store2")
11626 (set_attr "predicable" "yes")])
11629 ;; Load the load/store multiple patterns
11630 (include "ldmstm.md")
11632 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11633 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11634 (define_insn "*load_multiple"
11635 [(match_parallel 0 "load_multiple_operation"
11636 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11637 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11642 arm_output_multireg_pop (operands, /*return_pc=*/false,
11643 /*cond=*/const_true_rtx,
11649 [(set_attr "predicable" "yes")]
11652 ;; Vector bits common to IWMMXT and Neon
11653 (include "vec-common.md")
11654 ;; Load the Intel Wireless Multimedia Extension patterns
11655 (include "iwmmxt.md")
11656 ;; Load the VFP co-processor patterns
11658 ;; Thumb-2 patterns
11659 (include "thumb2.md")
11661 (include "neon.md")
11662 ;; Synchronization Primitives
11663 (include "sync.md")
11664 ;; Fixed-point patterns
11665 (include "arm-fixed.md")