1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (R1_REGNUM 1) ; Second CORE register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (CC_REGNUM 24) ; Condition code pseudo register
40 (LAST_ARM_REGNUM 15) ;
41 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
42 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
45 ;; 3rd operand to select_dominance_cc_mode
52 ;; conditional compare combination
63 ;; Note: sin and cos are no-longer used.
64 ;; Unspec enumerators for Neon are defined in neon.md.
66 (define_c_enum "unspec" [
67 UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
68 ; operand 0 is the result,
69 ; operand 1 the parameter.
70 UNPSEC_COS ; `cos' operation (MODE_FLOAT):
71 ; operand 0 is the result,
72 ; operand 1 the parameter.
73 UNSPEC_PUSH_MULT ; `push multiple' operation:
74 ; operand 0 is the first register,
75 ; subsequent registers are in parallel (use ...)
77 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
78 ; usage, that is, we will add the pic_register
79 ; value to it before trying to dereference it.
80 UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
81 ; The last operand is the number of a PIC_LABEL
82 ; that points at the containing instruction.
83 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
84 ; being scheduled before the stack adjustment insn.
85 UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
86 ; this unspec is used to prevent the deletion of
87 ; instructions setting registers for EH handling
88 ; and stack frame generation. Operand 0 is the
90 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
91 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
92 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
93 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
94 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
95 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
96 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
97 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
98 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
99 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
100 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
101 UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
102 UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
103 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
104 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
105 ; instruction stream.
106 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
107 ; correctly for PIC usage.
108 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
109 ; a given symbolic address.
110 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
111 UNSPEC_RBIT ; rbit operation.
112 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
113 ; another symbolic address.
114 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
115 UNSPEC_UNALIGNED_LOAD ; Used to represent ldr/ldrh instructions that access
116 ; unaligned locations, on architectures which support
118 UNSPEC_UNALIGNED_STORE ; Same for str/strh.
119 UNSPEC_PIC_UNIFIED ; Create a common pic addressing form.
122 ;; UNSPEC_VOLATILE Usage:
124 (define_c_enum "unspecv" [
125 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
127 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
128 ; instruction epilogue sequence that isn't expanded
129 ; into normal RTL. Used for both normal and sibcall
131 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
132 ; modes from arm to thumb.
133 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
134 ; for inlined constants.
135 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
137 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
139 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
141 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
143 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
145 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
147 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
148 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
149 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
150 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
151 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
152 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
153 VUNSPEC_EH_RETURN ; Use to override the return address for exception
155 VUNSPEC_ATOMIC_CAS ; Represent an atomic compare swap.
156 VUNSPEC_ATOMIC_XCHG ; Represent an atomic exchange.
157 VUNSPEC_ATOMIC_OP ; Represent an atomic operation.
158 VUNSPEC_LL ; Represent a load-register-exclusive.
159 VUNSPEC_SC ; Represent a store-register-exclusive.
162 ;;---------------------------------------------------------------------------
165 ;; Processor type. This is created automatically from arm-cores.def.
166 (include "arm-tune.md")
168 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
169 ; generating ARM code. This is used to control the length of some insn
170 ; patterns that share the same RTL in both ARM and Thumb code.
171 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
173 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
174 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
176 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
177 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
179 ;; Operand number of an input operand that is shifted. Zero if the
180 ;; given instruction does not shift one of its input operands.
181 (define_attr "shift" "" (const_int 0))
183 ; Floating Point Unit. If we only have floating point emulation, then there
184 ; is no point in scheduling the floating point insns. (Well, for best
185 ; performance we should try and group them together).
186 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
187 (const (symbol_ref "arm_fpu_attr")))
189 ; LENGTH of an instruction (in bytes)
190 (define_attr "length" ""
193 ; The architecture which supports the instruction (or alternative).
194 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
195 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
196 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
197 ; arm_arch6. This attribute is used to compute attribute "enabled",
198 ; use type "any" to enable an alternative in all cases.
199 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
200 (const_string "any"))
202 (define_attr "arch_enabled" "no,yes"
203 (cond [(eq_attr "arch" "any")
206 (and (eq_attr "arch" "a")
207 (match_test "TARGET_ARM"))
210 (and (eq_attr "arch" "t")
211 (match_test "TARGET_THUMB"))
214 (and (eq_attr "arch" "t1")
215 (match_test "TARGET_THUMB1"))
218 (and (eq_attr "arch" "t2")
219 (match_test "TARGET_THUMB2"))
222 (and (eq_attr "arch" "32")
223 (match_test "TARGET_32BIT"))
226 (and (eq_attr "arch" "v6")
227 (match_test "TARGET_32BIT && arm_arch6"))
230 (and (eq_attr "arch" "nov6")
231 (match_test "TARGET_32BIT && !arm_arch6"))
234 (and (eq_attr "arch" "onlya8")
235 (eq_attr "tune" "cortexa8"))
238 (and (eq_attr "arch" "nota8")
239 (not (eq_attr "tune" "cortexa8")))
240 (const_string "yes")]
241 (const_string "no")))
243 ; Allows an insn to disable certain alternatives for reasons other than
245 (define_attr "insn_enabled" "no,yes"
246 (const_string "yes"))
248 ; Enable all alternatives that are both arch_enabled and insn_enabled.
249 (define_attr "enabled" "no,yes"
250 (if_then_else (eq_attr "insn_enabled" "yes")
251 (if_then_else (eq_attr "arch_enabled" "yes")
254 (const_string "no")))
256 ; POOL_RANGE is how far away from a constant pool entry that this insn
257 ; can be placed. If the distance is zero, then this insn will never
258 ; reference the pool.
259 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
260 ; before its address. It is set to <max_range> - (8 + <data_size>).
261 (define_attr "arm_pool_range" "" (const_int 0))
262 (define_attr "thumb2_pool_range" "" (const_int 0))
263 (define_attr "arm_neg_pool_range" "" (const_int 0))
264 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
266 (define_attr "pool_range" ""
267 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
268 (attr "arm_pool_range")))
269 (define_attr "neg_pool_range" ""
270 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
271 (attr "arm_neg_pool_range")))
273 ; An assembler sequence may clobber the condition codes without us knowing.
274 ; If such an insn references the pool, then we have no way of knowing how,
275 ; so use the most conservative value for pool_range.
276 (define_asm_attributes
277 [(set_attr "conds" "clob")
278 (set_attr "length" "4")
279 (set_attr "pool_range" "250")])
281 ;; The instruction used to implement a particular pattern. This
282 ;; information is used by pipeline descriptions to provide accurate
283 ;; scheduling information.
286 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
287 (const_string "other"))
289 ; TYPE attribute is used to detect floating point instructions which, if
290 ; running on a co-processor can run in parallel with other, basic instructions
291 ; If write-buffer scheduling is enabled then it can also be used in the
292 ; scheduling of writes.
294 ; Classification of each insn
295 ; Note: vfp.md has different meanings for some of these, and some further
296 ; types as well. See that file for details.
297 ; alu any alu instruction that doesn't hit memory or fp
298 ; regs or have a shifted source operand
299 ; alu_shift any data instruction that doesn't hit memory or fp
300 ; regs, but has a source operand shifted by a constant
301 ; alu_shift_reg any data instruction that doesn't hit memory or fp
302 ; regs, but has a source operand shifted by a register value
303 ; mult a multiply instruction
304 ; block blockage insn, this blocks all functional units
305 ; float a floating point arithmetic operation (subject to expansion)
306 ; fdivd DFmode floating point division
307 ; fdivs SFmode floating point division
308 ; fmul Floating point multiply
309 ; ffmul Fast floating point multiply
310 ; farith Floating point arithmetic (4 cycle)
311 ; ffarith Fast floating point arithmetic (2 cycle)
312 ; float_em a floating point arithmetic operation that is normally emulated
313 ; even on a machine with an fpa.
314 ; f_fpa_load a floating point load from memory. Only for the FPA.
315 ; f_fpa_store a floating point store to memory. Only for the FPA.
316 ; f_load[sd] A single/double load from memory. Used for VFP unit.
317 ; f_store[sd] A single/double store to memory. Used for VFP unit.
318 ; f_flag a transfer of co-processor flags to the CPSR
319 ; f_mem_r a transfer of a floating point register to a real reg via mem
320 ; r_mem_f the reverse of f_mem_r
321 ; f_2_r fast transfer float to arm (no memory needed)
322 ; r_2_f fast transfer arm to float
323 ; f_cvt convert floating<->integral
325 ; call a subroutine call
326 ; load_byte load byte(s) from memory to arm registers
327 ; load1 load 1 word from memory to arm registers
328 ; load2 load 2 words from memory to arm registers
329 ; load3 load 3 words from memory to arm registers
330 ; load4 load 4 words from memory to arm registers
331 ; store store 1 word to memory from arm registers
332 ; store2 store 2 words
333 ; store3 store 3 words
334 ; store4 store 4 (or more) words
335 ; Additions for Cirrus Maverick co-processor:
336 ; mav_farith Floating point arithmetic (4 cycle)
337 ; mav_dmult Double multiplies (7 cycle)
341 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
343 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
344 (const_string "mult")
345 (const_string "alu")))
347 ; Is this an (integer side) multiply with a 64-bit result?
348 (define_attr "mul64" "no,yes"
350 (eq_attr "insn" "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
352 (const_string "no")))
354 ; Load scheduling, set from the arm_ld_sched variable
355 ; initialized by arm_option_override()
356 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
358 ;; Classification of NEON instructions for scheduling purposes.
359 ;; Do not set this attribute and the "type" attribute together in
360 ;; any one instruction pattern.
361 (define_attr "neon_type"
372 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
373 neon_mul_qqq_8_16_32_ddd_32,\
374 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
375 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
377 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
378 neon_mla_qqq_32_qqd_32_scalar,\
379 neon_mul_ddd_16_scalar_32_16_long_scalar,\
380 neon_mul_qqd_32_scalar,\
381 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
386 neon_vqshl_vrshl_vqrshl_qqq,\
388 neon_fp_vadd_ddd_vabs_dd,\
389 neon_fp_vadd_qqq_vabs_qq,\
395 neon_fp_vmla_ddd_scalar,\
396 neon_fp_vmla_qqq_scalar,\
397 neon_fp_vrecps_vrsqrts_ddd,\
398 neon_fp_vrecps_vrsqrts_qqq,\
406 neon_vld2_2_regs_vld1_vld2_all_lanes,\
409 neon_vst1_1_2_regs_vst2_2_regs,\
411 neon_vst2_4_regs_vst3_vst4,\
413 neon_vld1_vld2_lane,\
414 neon_vld3_vld4_lane,\
415 neon_vst1_vst2_lane,\
416 neon_vst3_vst4_lane,\
417 neon_vld3_vld4_all_lanes,\
425 (const_string "none"))
427 ; condition codes: this one is used by final_prescan_insn to speed up
428 ; conditionalizing instructions. It saves having to scan the rtl to see if
429 ; it uses or alters the condition codes.
431 ; USE means that the condition codes are used by the insn in the process of
432 ; outputting code, this means (at present) that we can't use the insn in
435 ; SET means that the purpose of the insn is to set the condition codes in a
436 ; well defined manner.
438 ; CLOB means that the condition codes are altered in an undefined manner, if
439 ; they are altered at all
441 ; UNCONDITIONAL means the instruction can not be conditionally executed and
442 ; that the instruction does not use or alter the condition codes.
444 ; NOCOND means that the instruction does not use or alter the condition
445 ; codes but can be converted into a conditionally exectuted instruction.
447 (define_attr "conds" "use,set,clob,unconditional,nocond"
449 (ior (eq_attr "is_thumb1" "yes")
450 (eq_attr "type" "call"))
451 (const_string "clob")
452 (if_then_else (eq_attr "neon_type" "none")
453 (const_string "nocond")
454 (const_string "unconditional"))))
456 ; Predicable means that the insn can be conditionally executed based on
457 ; an automatically added predicate (additional patterns are generated by
458 ; gen...). We default to 'no' because no Thumb patterns match this rule
459 ; and not all ARM patterns do.
460 (define_attr "predicable" "no,yes" (const_string "no"))
462 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
463 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
464 ; suffer blockages enough to warrant modelling this (and it can adversely
465 ; affect the schedule).
466 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
468 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
469 ; to stall the processor. Used with model_wbuf above.
470 (define_attr "write_conflict" "no,yes"
471 (if_then_else (eq_attr "type"
472 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
474 (const_string "no")))
476 ; Classify the insns into those that take one cycle and those that take more
477 ; than one on the main cpu execution unit.
478 (define_attr "core_cycles" "single,multi"
479 (if_then_else (eq_attr "type"
480 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
481 (const_string "single")
482 (const_string "multi")))
484 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
485 ;; distant label. Only applicable to Thumb code.
486 (define_attr "far_jump" "yes,no" (const_string "no"))
489 ;; The number of machine instructions this pattern expands to.
490 ;; Used for Thumb-2 conditional execution.
491 (define_attr "ce_count" "" (const_int 1))
493 ;;---------------------------------------------------------------------------
496 (include "iterators.md")
498 ;;---------------------------------------------------------------------------
501 (include "predicates.md")
502 (include "constraints.md")
504 ;;---------------------------------------------------------------------------
505 ;; Pipeline descriptions
507 (define_attr "tune_cortexr4" "yes,no"
509 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
511 (const_string "no"))))
513 ;; True if the generic scheduling description should be used.
515 (define_attr "generic_sched" "yes,no"
517 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexa15,cortexm4")
518 (eq_attr "tune_cortexr4" "yes"))
520 (const_string "yes"))))
522 (define_attr "generic_vfp" "yes,no"
524 (and (eq_attr "fpu" "vfp")
525 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
526 (eq_attr "tune_cortexr4" "no"))
528 (const_string "no"))))
530 (include "arm-generic.md")
531 (include "arm926ejs.md")
532 (include "arm1020e.md")
533 (include "arm1026ejs.md")
534 (include "arm1136jfs.md")
536 (include "fa606te.md")
537 (include "fa626te.md")
538 (include "fmp626.md")
539 (include "fa726te.md")
540 (include "cortex-a5.md")
541 (include "cortex-a8.md")
542 (include "cortex-a9.md")
543 (include "cortex-a15.md")
544 (include "cortex-r4.md")
545 (include "cortex-r4f.md")
546 (include "cortex-m4.md")
547 (include "cortex-m4-fpu.md")
551 ;;---------------------------------------------------------------------------
556 ;; Note: For DImode insns, there is normally no reason why operands should
557 ;; not be in the same register, what we don't want is for something being
558 ;; written to partially overlap something that is an input.
559 ;; Cirrus 64bit additions should not be split because we have a native
560 ;; 64bit addition instructions.
562 (define_expand "adddi3"
564 [(set (match_operand:DI 0 "s_register_operand" "")
565 (plus:DI (match_operand:DI 1 "s_register_operand" "")
566 (match_operand:DI 2 "s_register_operand" "")))
567 (clobber (reg:CC CC_REGNUM))])]
570 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
572 if (!cirrus_fp_register (operands[0], DImode))
573 operands[0] = force_reg (DImode, operands[0]);
574 if (!cirrus_fp_register (operands[1], DImode))
575 operands[1] = force_reg (DImode, operands[1]);
576 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
582 if (GET_CODE (operands[1]) != REG)
583 operands[1] = force_reg (DImode, operands[1]);
584 if (GET_CODE (operands[2]) != REG)
585 operands[2] = force_reg (DImode, operands[2]);
590 (define_insn "*thumb1_adddi3"
591 [(set (match_operand:DI 0 "register_operand" "=l")
592 (plus:DI (match_operand:DI 1 "register_operand" "%0")
593 (match_operand:DI 2 "register_operand" "l")))
594 (clobber (reg:CC CC_REGNUM))
597 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
598 [(set_attr "length" "4")]
601 (define_insn_and_split "*arm_adddi3"
602 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
603 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
604 (match_operand:DI 2 "s_register_operand" "r, 0")))
605 (clobber (reg:CC CC_REGNUM))]
606 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
608 "TARGET_32BIT && reload_completed
609 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
610 [(parallel [(set (reg:CC_C CC_REGNUM)
611 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
613 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
614 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
615 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
618 operands[3] = gen_highpart (SImode, operands[0]);
619 operands[0] = gen_lowpart (SImode, operands[0]);
620 operands[4] = gen_highpart (SImode, operands[1]);
621 operands[1] = gen_lowpart (SImode, operands[1]);
622 operands[5] = gen_highpart (SImode, operands[2]);
623 operands[2] = gen_lowpart (SImode, operands[2]);
625 [(set_attr "conds" "clob")
626 (set_attr "length" "8")]
629 (define_insn_and_split "*adddi_sesidi_di"
630 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
631 (plus:DI (sign_extend:DI
632 (match_operand:SI 2 "s_register_operand" "r,r"))
633 (match_operand:DI 1 "s_register_operand" "0,r")))
634 (clobber (reg:CC CC_REGNUM))]
635 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
637 "TARGET_32BIT && reload_completed"
638 [(parallel [(set (reg:CC_C CC_REGNUM)
639 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
641 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
642 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
645 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
648 operands[3] = gen_highpart (SImode, operands[0]);
649 operands[0] = gen_lowpart (SImode, operands[0]);
650 operands[4] = gen_highpart (SImode, operands[1]);
651 operands[1] = gen_lowpart (SImode, operands[1]);
652 operands[2] = gen_lowpart (SImode, operands[2]);
654 [(set_attr "conds" "clob")
655 (set_attr "length" "8")]
658 (define_insn_and_split "*adddi_zesidi_di"
659 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
660 (plus:DI (zero_extend:DI
661 (match_operand:SI 2 "s_register_operand" "r,r"))
662 (match_operand:DI 1 "s_register_operand" "0,r")))
663 (clobber (reg:CC CC_REGNUM))]
664 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
666 "TARGET_32BIT && reload_completed"
667 [(parallel [(set (reg:CC_C CC_REGNUM)
668 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
670 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
671 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
672 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
675 operands[3] = gen_highpart (SImode, operands[0]);
676 operands[0] = gen_lowpart (SImode, operands[0]);
677 operands[4] = gen_highpart (SImode, operands[1]);
678 operands[1] = gen_lowpart (SImode, operands[1]);
679 operands[2] = gen_lowpart (SImode, operands[2]);
681 [(set_attr "conds" "clob")
682 (set_attr "length" "8")]
685 (define_expand "addsi3"
686 [(set (match_operand:SI 0 "s_register_operand" "")
687 (plus:SI (match_operand:SI 1 "s_register_operand" "")
688 (match_operand:SI 2 "reg_or_int_operand" "")))]
691 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
693 arm_split_constant (PLUS, SImode, NULL_RTX,
694 INTVAL (operands[2]), operands[0], operands[1],
695 optimize && can_create_pseudo_p ());
701 ; If there is a scratch available, this will be faster than synthesizing the
704 [(match_scratch:SI 3 "r")
705 (set (match_operand:SI 0 "arm_general_register_operand" "")
706 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
707 (match_operand:SI 2 "const_int_operand" "")))]
709 !(const_ok_for_arm (INTVAL (operands[2]))
710 || const_ok_for_arm (-INTVAL (operands[2])))
711 && const_ok_for_arm (~INTVAL (operands[2]))"
712 [(set (match_dup 3) (match_dup 2))
713 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
717 ;; The r/r/k alternative is required when reloading the address
718 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
719 ;; put the duplicated register first, and not try the commutative version.
720 (define_insn_and_split "*arm_addsi3"
721 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,r, k, r")
722 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,rk,k, rk")
723 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,PJ,PJ,?n")))]
733 subw%?\\t%0, %1, #%n2
734 subw%?\\t%0, %1, #%n2
737 && GET_CODE (operands[2]) == CONST_INT
738 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
739 && (reload_completed || !arm_eliminable_register (operands[1]))"
740 [(clobber (const_int 0))]
742 arm_split_constant (PLUS, SImode, curr_insn,
743 INTVAL (operands[2]), operands[0],
747 [(set_attr "length" "4,4,4,4,4,4,4,4,4,16")
748 (set_attr "predicable" "yes")
749 (set_attr "arch" "*,*,*,t2,t2,*,*,t2,t2,*")]
752 (define_insn_and_split "*thumb1_addsi3"
753 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
754 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
755 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
758 static const char * const asms[] =
760 \"add\\t%0, %0, %2\",
761 \"sub\\t%0, %0, #%n2\",
762 \"add\\t%0, %1, %2\",
763 \"add\\t%0, %0, %2\",
764 \"add\\t%0, %0, %2\",
765 \"add\\t%0, %1, %2\",
766 \"add\\t%0, %1, %2\",
771 if ((which_alternative == 2 || which_alternative == 6)
772 && GET_CODE (operands[2]) == CONST_INT
773 && INTVAL (operands[2]) < 0)
774 return \"sub\\t%0, %1, #%n2\";
775 return asms[which_alternative];
777 "&& reload_completed && CONST_INT_P (operands[2])
778 && ((operands[1] != stack_pointer_rtx
779 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
780 || (operands[1] == stack_pointer_rtx
781 && INTVAL (operands[2]) > 1020))"
782 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
783 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
785 HOST_WIDE_INT offset = INTVAL (operands[2]);
786 if (operands[1] == stack_pointer_rtx)
792 else if (offset < -255)
795 operands[3] = GEN_INT (offset);
796 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
798 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
801 ;; Reloading and elimination of the frame pointer can
802 ;; sometimes cause this optimization to be missed.
804 [(set (match_operand:SI 0 "arm_general_register_operand" "")
805 (match_operand:SI 1 "const_int_operand" ""))
807 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
809 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
810 && (INTVAL (operands[1]) & 3) == 0"
811 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
815 (define_insn "addsi3_compare0"
816 [(set (reg:CC_NOOV CC_REGNUM)
818 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
819 (match_operand:SI 2 "arm_add_operand" "rI,L"))
821 (set (match_operand:SI 0 "s_register_operand" "=r,r")
822 (plus:SI (match_dup 1) (match_dup 2)))]
826 sub%.\\t%0, %1, #%n2"
827 [(set_attr "conds" "set")]
830 (define_insn "*addsi3_compare0_scratch"
831 [(set (reg:CC_NOOV CC_REGNUM)
833 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
834 (match_operand:SI 1 "arm_add_operand" "rI,L"))
840 [(set_attr "conds" "set")
841 (set_attr "predicable" "yes")]
844 (define_insn "*compare_negsi_si"
845 [(set (reg:CC_Z CC_REGNUM)
847 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
848 (match_operand:SI 1 "s_register_operand" "r")))]
851 [(set_attr "conds" "set")
852 (set_attr "predicable" "yes")]
855 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
856 ;; addend is a constant.
857 (define_insn "*cmpsi2_addneg"
858 [(set (reg:CC CC_REGNUM)
860 (match_operand:SI 1 "s_register_operand" "r,r")
861 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
862 (set (match_operand:SI 0 "s_register_operand" "=r,r")
863 (plus:SI (match_dup 1)
864 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
865 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
868 sub%.\\t%0, %1, #%n3"
869 [(set_attr "conds" "set")]
872 ;; Convert the sequence
874 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
878 ;; bcs dest ((unsigned)rn >= 1)
879 ;; similarly for the beq variant using bcc.
880 ;; This is a common looping idiom (while (n--))
882 [(set (match_operand:SI 0 "arm_general_register_operand" "")
883 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
885 (set (match_operand 2 "cc_register" "")
886 (compare (match_dup 0) (const_int -1)))
888 (if_then_else (match_operator 3 "equality_operator"
889 [(match_dup 2) (const_int 0)])
890 (match_operand 4 "" "")
891 (match_operand 5 "" "")))]
892 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
896 (match_dup 1) (const_int 1)))
897 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
899 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
902 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
903 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
906 operands[2], const0_rtx);"
909 ;; The next four insns work because they compare the result with one of
910 ;; the operands, and we know that the use of the condition code is
911 ;; either GEU or LTU, so we can use the carry flag from the addition
912 ;; instead of doing the compare a second time.
913 (define_insn "*addsi3_compare_op1"
914 [(set (reg:CC_C CC_REGNUM)
916 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
917 (match_operand:SI 2 "arm_add_operand" "rI,L"))
919 (set (match_operand:SI 0 "s_register_operand" "=r,r")
920 (plus:SI (match_dup 1) (match_dup 2)))]
924 sub%.\\t%0, %1, #%n2"
925 [(set_attr "conds" "set")]
928 (define_insn "*addsi3_compare_op2"
929 [(set (reg:CC_C CC_REGNUM)
931 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
932 (match_operand:SI 2 "arm_add_operand" "rI,L"))
934 (set (match_operand:SI 0 "s_register_operand" "=r,r")
935 (plus:SI (match_dup 1) (match_dup 2)))]
939 sub%.\\t%0, %1, #%n2"
940 [(set_attr "conds" "set")]
943 (define_insn "*compare_addsi2_op0"
944 [(set (reg:CC_C CC_REGNUM)
946 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
947 (match_operand:SI 1 "arm_add_operand" "rI,L"))
953 [(set_attr "conds" "set")
954 (set_attr "predicable" "yes")]
957 (define_insn "*compare_addsi2_op1"
958 [(set (reg:CC_C CC_REGNUM)
960 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
961 (match_operand:SI 1 "arm_add_operand" "rI,L"))
967 [(set_attr "conds" "set")
968 (set_attr "predicable" "yes")]
971 (define_insn "*addsi3_carryin_<optab>"
972 [(set (match_operand:SI 0 "s_register_operand" "=r")
973 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
974 (match_operand:SI 2 "arm_rhs_operand" "rI"))
975 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
978 [(set_attr "conds" "use")]
981 (define_insn "*addsi3_carryin_alt2_<optab>"
982 [(set (match_operand:SI 0 "s_register_operand" "=r")
983 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
984 (match_operand:SI 1 "s_register_operand" "%r"))
985 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
988 [(set_attr "conds" "use")]
991 (define_insn "*addsi3_carryin_shift_<optab>"
992 [(set (match_operand:SI 0 "s_register_operand" "=r")
994 (match_operator:SI 2 "shift_operator"
995 [(match_operand:SI 3 "s_register_operand" "r")
996 (match_operand:SI 4 "reg_or_int_operand" "rM")])
997 (match_operand:SI 1 "s_register_operand" "r"))
998 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1000 "adc%?\\t%0, %1, %3%S2"
1001 [(set_attr "conds" "use")
1002 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1003 (const_string "alu_shift")
1004 (const_string "alu_shift_reg")))]
1007 (define_insn "*addsi3_carryin_clobercc_<optab>"
1008 [(set (match_operand:SI 0 "s_register_operand" "=r")
1009 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1010 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1011 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1012 (clobber (reg:CC CC_REGNUM))]
1014 "adc%.\\t%0, %1, %2"
1015 [(set_attr "conds" "set")]
1018 (define_expand "incscc"
1019 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1020 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1021 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1022 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1027 (define_insn "*arm_incscc"
1028 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1029 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1030 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1031 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1035 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1036 [(set_attr "conds" "use")
1037 (set_attr "length" "4,8")]
1040 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1042 [(set (match_operand:SI 0 "s_register_operand" "")
1043 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1044 (match_operand:SI 2 "s_register_operand" ""))
1046 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1048 [(set (match_dup 3) (match_dup 1))
1049 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1051 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1054 (define_expand "addsf3"
1055 [(set (match_operand:SF 0 "s_register_operand" "")
1056 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1057 (match_operand:SF 2 "arm_float_add_operand" "")))]
1058 "TARGET_32BIT && TARGET_HARD_FLOAT"
1061 && !cirrus_fp_register (operands[2], SFmode))
1062 operands[2] = force_reg (SFmode, operands[2]);
1065 (define_expand "adddf3"
1066 [(set (match_operand:DF 0 "s_register_operand" "")
1067 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1068 (match_operand:DF 2 "arm_float_add_operand" "")))]
1069 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1072 && !cirrus_fp_register (operands[2], DFmode))
1073 operands[2] = force_reg (DFmode, operands[2]);
1076 (define_expand "subdi3"
1078 [(set (match_operand:DI 0 "s_register_operand" "")
1079 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1080 (match_operand:DI 2 "s_register_operand" "")))
1081 (clobber (reg:CC CC_REGNUM))])]
1084 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1086 && cirrus_fp_register (operands[0], DImode)
1087 && cirrus_fp_register (operands[1], DImode))
1089 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1095 if (GET_CODE (operands[1]) != REG)
1096 operands[1] = force_reg (DImode, operands[1]);
1097 if (GET_CODE (operands[2]) != REG)
1098 operands[2] = force_reg (DImode, operands[2]);
1103 (define_insn "*arm_subdi3"
1104 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1105 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1106 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1107 (clobber (reg:CC CC_REGNUM))]
1108 "TARGET_32BIT && !TARGET_NEON"
1109 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1110 [(set_attr "conds" "clob")
1111 (set_attr "length" "8")]
1114 (define_insn "*thumb_subdi3"
1115 [(set (match_operand:DI 0 "register_operand" "=l")
1116 (minus:DI (match_operand:DI 1 "register_operand" "0")
1117 (match_operand:DI 2 "register_operand" "l")))
1118 (clobber (reg:CC CC_REGNUM))]
1120 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1121 [(set_attr "length" "4")]
1124 (define_insn "*subdi_di_zesidi"
1125 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1126 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1128 (match_operand:SI 2 "s_register_operand" "r,r"))))
1129 (clobber (reg:CC CC_REGNUM))]
1131 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1132 [(set_attr "conds" "clob")
1133 (set_attr "length" "8")]
1136 (define_insn "*subdi_di_sesidi"
1137 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1138 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1140 (match_operand:SI 2 "s_register_operand" "r,r"))))
1141 (clobber (reg:CC CC_REGNUM))]
1143 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1144 [(set_attr "conds" "clob")
1145 (set_attr "length" "8")]
1148 (define_insn "*subdi_zesidi_di"
1149 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1150 (minus:DI (zero_extend:DI
1151 (match_operand:SI 2 "s_register_operand" "r,r"))
1152 (match_operand:DI 1 "s_register_operand" "0,r")))
1153 (clobber (reg:CC CC_REGNUM))]
1155 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1156 [(set_attr "conds" "clob")
1157 (set_attr "length" "8")]
1160 (define_insn "*subdi_sesidi_di"
1161 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1162 (minus:DI (sign_extend:DI
1163 (match_operand:SI 2 "s_register_operand" "r,r"))
1164 (match_operand:DI 1 "s_register_operand" "0,r")))
1165 (clobber (reg:CC CC_REGNUM))]
1167 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1168 [(set_attr "conds" "clob")
1169 (set_attr "length" "8")]
1172 (define_insn "*subdi_zesidi_zesidi"
1173 [(set (match_operand:DI 0 "s_register_operand" "=r")
1174 (minus:DI (zero_extend:DI
1175 (match_operand:SI 1 "s_register_operand" "r"))
1177 (match_operand:SI 2 "s_register_operand" "r"))))
1178 (clobber (reg:CC CC_REGNUM))]
1180 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1181 [(set_attr "conds" "clob")
1182 (set_attr "length" "8")]
1185 (define_expand "subsi3"
1186 [(set (match_operand:SI 0 "s_register_operand" "")
1187 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1188 (match_operand:SI 2 "s_register_operand" "")))]
1191 if (GET_CODE (operands[1]) == CONST_INT)
1195 arm_split_constant (MINUS, SImode, NULL_RTX,
1196 INTVAL (operands[1]), operands[0],
1197 operands[2], optimize && can_create_pseudo_p ());
1200 else /* TARGET_THUMB1 */
1201 operands[1] = force_reg (SImode, operands[1]);
1206 (define_insn "thumb1_subsi3_insn"
1207 [(set (match_operand:SI 0 "register_operand" "=l")
1208 (minus:SI (match_operand:SI 1 "register_operand" "l")
1209 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1212 [(set_attr "length" "2")
1213 (set_attr "conds" "set")])
1215 ; ??? Check Thumb-2 split length
1216 (define_insn_and_split "*arm_subsi3_insn"
1217 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r")
1218 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n")
1219 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r")))]
1226 "&& (GET_CODE (operands[1]) == CONST_INT
1227 && !const_ok_for_arm (INTVAL (operands[1])))"
1228 [(clobber (const_int 0))]
1230 arm_split_constant (MINUS, SImode, curr_insn,
1231 INTVAL (operands[1]), operands[0], operands[2], 0);
1234 [(set_attr "length" "4,4,4,16")
1235 (set_attr "predicable" "yes")]
1239 [(match_scratch:SI 3 "r")
1240 (set (match_operand:SI 0 "arm_general_register_operand" "")
1241 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1242 (match_operand:SI 2 "arm_general_register_operand" "")))]
1244 && !const_ok_for_arm (INTVAL (operands[1]))
1245 && const_ok_for_arm (~INTVAL (operands[1]))"
1246 [(set (match_dup 3) (match_dup 1))
1247 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1251 (define_insn "*subsi3_compare0"
1252 [(set (reg:CC_NOOV CC_REGNUM)
1254 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1255 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1257 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1258 (minus:SI (match_dup 1) (match_dup 2)))]
1263 [(set_attr "conds" "set")]
1266 (define_insn "*subsi3_compare"
1267 [(set (reg:CC CC_REGNUM)
1268 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1269 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1270 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1271 (minus:SI (match_dup 1) (match_dup 2)))]
1276 [(set_attr "conds" "set")]
1279 (define_expand "decscc"
1280 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1281 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1282 (match_operator:SI 2 "arm_comparison_operator"
1283 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1288 (define_insn "*arm_decscc"
1289 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1290 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1291 (match_operator:SI 2 "arm_comparison_operator"
1292 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1296 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1297 [(set_attr "conds" "use")
1298 (set_attr "length" "*,8")]
1301 (define_expand "subsf3"
1302 [(set (match_operand:SF 0 "s_register_operand" "")
1303 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1304 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1305 "TARGET_32BIT && TARGET_HARD_FLOAT"
1307 if (TARGET_MAVERICK)
1309 if (!cirrus_fp_register (operands[1], SFmode))
1310 operands[1] = force_reg (SFmode, operands[1]);
1311 if (!cirrus_fp_register (operands[2], SFmode))
1312 operands[2] = force_reg (SFmode, operands[2]);
1316 (define_expand "subdf3"
1317 [(set (match_operand:DF 0 "s_register_operand" "")
1318 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1319 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1320 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1322 if (TARGET_MAVERICK)
1324 if (!cirrus_fp_register (operands[1], DFmode))
1325 operands[1] = force_reg (DFmode, operands[1]);
1326 if (!cirrus_fp_register (operands[2], DFmode))
1327 operands[2] = force_reg (DFmode, operands[2]);
1332 ;; Multiplication insns
1334 (define_expand "mulsi3"
1335 [(set (match_operand:SI 0 "s_register_operand" "")
1336 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1337 (match_operand:SI 1 "s_register_operand" "")))]
1342 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1343 (define_insn "*arm_mulsi3"
1344 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1345 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1346 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1347 "TARGET_32BIT && !arm_arch6"
1348 "mul%?\\t%0, %2, %1"
1349 [(set_attr "insn" "mul")
1350 (set_attr "predicable" "yes")]
1353 (define_insn "*arm_mulsi3_v6"
1354 [(set (match_operand:SI 0 "s_register_operand" "=r")
1355 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1356 (match_operand:SI 2 "s_register_operand" "r")))]
1357 "TARGET_32BIT && arm_arch6"
1358 "mul%?\\t%0, %1, %2"
1359 [(set_attr "insn" "mul")
1360 (set_attr "predicable" "yes")]
1363 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1364 ; 1 and 2; are the same, because reload will make operand 0 match
1365 ; operand 1 without realizing that this conflicts with operand 2. We fix
1366 ; this by adding another alternative to match this case, and then `reload'
1367 ; it ourselves. This alternative must come first.
1368 (define_insn "*thumb_mulsi3"
1369 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1370 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1371 (match_operand:SI 2 "register_operand" "l,l,l")))]
1372 "TARGET_THUMB1 && !arm_arch6"
1374 if (which_alternative < 2)
1375 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1377 return \"mul\\t%0, %2\";
1379 [(set_attr "length" "4,4,2")
1380 (set_attr "insn" "mul")]
1383 (define_insn "*thumb_mulsi3_v6"
1384 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1385 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1386 (match_operand:SI 2 "register_operand" "l,0,0")))]
1387 "TARGET_THUMB1 && arm_arch6"
1392 [(set_attr "length" "2")
1393 (set_attr "insn" "mul")]
1396 (define_insn "*mulsi3_compare0"
1397 [(set (reg:CC_NOOV CC_REGNUM)
1398 (compare:CC_NOOV (mult:SI
1399 (match_operand:SI 2 "s_register_operand" "r,r")
1400 (match_operand:SI 1 "s_register_operand" "%0,r"))
1402 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1403 (mult:SI (match_dup 2) (match_dup 1)))]
1404 "TARGET_ARM && !arm_arch6"
1405 "mul%.\\t%0, %2, %1"
1406 [(set_attr "conds" "set")
1407 (set_attr "insn" "muls")]
1410 (define_insn "*mulsi3_compare0_v6"
1411 [(set (reg:CC_NOOV CC_REGNUM)
1412 (compare:CC_NOOV (mult:SI
1413 (match_operand:SI 2 "s_register_operand" "r")
1414 (match_operand:SI 1 "s_register_operand" "r"))
1416 (set (match_operand:SI 0 "s_register_operand" "=r")
1417 (mult:SI (match_dup 2) (match_dup 1)))]
1418 "TARGET_ARM && arm_arch6 && optimize_size"
1419 "mul%.\\t%0, %2, %1"
1420 [(set_attr "conds" "set")
1421 (set_attr "insn" "muls")]
1424 (define_insn "*mulsi_compare0_scratch"
1425 [(set (reg:CC_NOOV CC_REGNUM)
1426 (compare:CC_NOOV (mult:SI
1427 (match_operand:SI 2 "s_register_operand" "r,r")
1428 (match_operand:SI 1 "s_register_operand" "%0,r"))
1430 (clobber (match_scratch:SI 0 "=&r,&r"))]
1431 "TARGET_ARM && !arm_arch6"
1432 "mul%.\\t%0, %2, %1"
1433 [(set_attr "conds" "set")
1434 (set_attr "insn" "muls")]
1437 (define_insn "*mulsi_compare0_scratch_v6"
1438 [(set (reg:CC_NOOV CC_REGNUM)
1439 (compare:CC_NOOV (mult:SI
1440 (match_operand:SI 2 "s_register_operand" "r")
1441 (match_operand:SI 1 "s_register_operand" "r"))
1443 (clobber (match_scratch:SI 0 "=r"))]
1444 "TARGET_ARM && arm_arch6 && optimize_size"
1445 "mul%.\\t%0, %2, %1"
1446 [(set_attr "conds" "set")
1447 (set_attr "insn" "muls")]
1450 ;; Unnamed templates to match MLA instruction.
1452 (define_insn "*mulsi3addsi"
1453 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1455 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1456 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1457 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1458 "TARGET_32BIT && !arm_arch6"
1459 "mla%?\\t%0, %2, %1, %3"
1460 [(set_attr "insn" "mla")
1461 (set_attr "predicable" "yes")]
1464 (define_insn "*mulsi3addsi_v6"
1465 [(set (match_operand:SI 0 "s_register_operand" "=r")
1467 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1468 (match_operand:SI 1 "s_register_operand" "r"))
1469 (match_operand:SI 3 "s_register_operand" "r")))]
1470 "TARGET_32BIT && arm_arch6"
1471 "mla%?\\t%0, %2, %1, %3"
1472 [(set_attr "insn" "mla")
1473 (set_attr "predicable" "yes")]
1476 (define_insn "*mulsi3addsi_compare0"
1477 [(set (reg:CC_NOOV CC_REGNUM)
1480 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1481 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1482 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1484 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1485 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1487 "TARGET_ARM && arm_arch6"
1488 "mla%.\\t%0, %2, %1, %3"
1489 [(set_attr "conds" "set")
1490 (set_attr "insn" "mlas")]
1493 (define_insn "*mulsi3addsi_compare0_v6"
1494 [(set (reg:CC_NOOV CC_REGNUM)
1497 (match_operand:SI 2 "s_register_operand" "r")
1498 (match_operand:SI 1 "s_register_operand" "r"))
1499 (match_operand:SI 3 "s_register_operand" "r"))
1501 (set (match_operand:SI 0 "s_register_operand" "=r")
1502 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1504 "TARGET_ARM && arm_arch6 && optimize_size"
1505 "mla%.\\t%0, %2, %1, %3"
1506 [(set_attr "conds" "set")
1507 (set_attr "insn" "mlas")]
1510 (define_insn "*mulsi3addsi_compare0_scratch"
1511 [(set (reg:CC_NOOV CC_REGNUM)
1514 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1515 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1516 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1518 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1519 "TARGET_ARM && !arm_arch6"
1520 "mla%.\\t%0, %2, %1, %3"
1521 [(set_attr "conds" "set")
1522 (set_attr "insn" "mlas")]
1525 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1526 [(set (reg:CC_NOOV CC_REGNUM)
1529 (match_operand:SI 2 "s_register_operand" "r")
1530 (match_operand:SI 1 "s_register_operand" "r"))
1531 (match_operand:SI 3 "s_register_operand" "r"))
1533 (clobber (match_scratch:SI 0 "=r"))]
1534 "TARGET_ARM && arm_arch6 && optimize_size"
1535 "mla%.\\t%0, %2, %1, %3"
1536 [(set_attr "conds" "set")
1537 (set_attr "insn" "mlas")]
1540 (define_insn "*mulsi3subsi"
1541 [(set (match_operand:SI 0 "s_register_operand" "=r")
1543 (match_operand:SI 3 "s_register_operand" "r")
1544 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1545 (match_operand:SI 1 "s_register_operand" "r"))))]
1546 "TARGET_32BIT && arm_arch_thumb2"
1547 "mls%?\\t%0, %2, %1, %3"
1548 [(set_attr "insn" "mla")
1549 (set_attr "predicable" "yes")]
1552 (define_expand "maddsidi4"
1553 [(set (match_operand:DI 0 "s_register_operand" "")
1556 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1557 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1558 (match_operand:DI 3 "s_register_operand" "")))]
1559 "TARGET_32BIT && arm_arch3m"
1562 (define_insn "*mulsidi3adddi"
1563 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1566 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1567 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1568 (match_operand:DI 1 "s_register_operand" "0")))]
1569 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1570 "smlal%?\\t%Q0, %R0, %3, %2"
1571 [(set_attr "insn" "smlal")
1572 (set_attr "predicable" "yes")]
1575 (define_insn "*mulsidi3adddi_v6"
1576 [(set (match_operand:DI 0 "s_register_operand" "=r")
1579 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1580 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1581 (match_operand:DI 1 "s_register_operand" "0")))]
1582 "TARGET_32BIT && arm_arch6"
1583 "smlal%?\\t%Q0, %R0, %3, %2"
1584 [(set_attr "insn" "smlal")
1585 (set_attr "predicable" "yes")]
1588 ;; 32x32->64 widening multiply.
1589 ;; As with mulsi3, the only difference between the v3-5 and v6+
1590 ;; versions of these patterns is the requirement that the output not
1591 ;; overlap the inputs, but that still means we have to have a named
1592 ;; expander and two different starred insns.
1594 (define_expand "mulsidi3"
1595 [(set (match_operand:DI 0 "s_register_operand" "")
1597 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1598 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1599 "TARGET_32BIT && arm_arch3m"
1603 (define_insn "*mulsidi3_nov6"
1604 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1606 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1607 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1608 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1609 "smull%?\\t%Q0, %R0, %1, %2"
1610 [(set_attr "insn" "smull")
1611 (set_attr "predicable" "yes")]
1614 (define_insn "*mulsidi3_v6"
1615 [(set (match_operand:DI 0 "s_register_operand" "=r")
1617 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1618 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1619 "TARGET_32BIT && arm_arch6"
1620 "smull%?\\t%Q0, %R0, %1, %2"
1621 [(set_attr "insn" "smull")
1622 (set_attr "predicable" "yes")]
1625 (define_expand "umulsidi3"
1626 [(set (match_operand:DI 0 "s_register_operand" "")
1628 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1629 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1630 "TARGET_32BIT && arm_arch3m"
1634 (define_insn "*umulsidi3_nov6"
1635 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1637 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1638 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1639 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1640 "umull%?\\t%Q0, %R0, %1, %2"
1641 [(set_attr "insn" "umull")
1642 (set_attr "predicable" "yes")]
1645 (define_insn "*umulsidi3_v6"
1646 [(set (match_operand:DI 0 "s_register_operand" "=r")
1648 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1649 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1650 "TARGET_32BIT && arm_arch6"
1651 "umull%?\\t%Q0, %R0, %1, %2"
1652 [(set_attr "insn" "umull")
1653 (set_attr "predicable" "yes")]
1656 (define_expand "umaddsidi4"
1657 [(set (match_operand:DI 0 "s_register_operand" "")
1660 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1661 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1662 (match_operand:DI 3 "s_register_operand" "")))]
1663 "TARGET_32BIT && arm_arch3m"
1666 (define_insn "*umulsidi3adddi"
1667 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1670 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1671 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1672 (match_operand:DI 1 "s_register_operand" "0")))]
1673 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1674 "umlal%?\\t%Q0, %R0, %3, %2"
1675 [(set_attr "insn" "umlal")
1676 (set_attr "predicable" "yes")]
1679 (define_insn "*umulsidi3adddi_v6"
1680 [(set (match_operand:DI 0 "s_register_operand" "=r")
1683 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1684 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1685 (match_operand:DI 1 "s_register_operand" "0")))]
1686 "TARGET_32BIT && arm_arch6"
1687 "umlal%?\\t%Q0, %R0, %3, %2"
1688 [(set_attr "insn" "umlal")
1689 (set_attr "predicable" "yes")]
1692 (define_expand "smulsi3_highpart"
1694 [(set (match_operand:SI 0 "s_register_operand" "")
1698 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1699 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1701 (clobber (match_scratch:SI 3 ""))])]
1702 "TARGET_32BIT && arm_arch3m"
1706 (define_insn "*smulsi3_highpart_nov6"
1707 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1711 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1712 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1714 (clobber (match_scratch:SI 3 "=&r,&r"))]
1715 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1716 "smull%?\\t%3, %0, %2, %1"
1717 [(set_attr "insn" "smull")
1718 (set_attr "predicable" "yes")]
1721 (define_insn "*smulsi3_highpart_v6"
1722 [(set (match_operand:SI 0 "s_register_operand" "=r")
1726 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1727 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1729 (clobber (match_scratch:SI 3 "=r"))]
1730 "TARGET_32BIT && arm_arch6"
1731 "smull%?\\t%3, %0, %2, %1"
1732 [(set_attr "insn" "smull")
1733 (set_attr "predicable" "yes")]
1736 (define_expand "umulsi3_highpart"
1738 [(set (match_operand:SI 0 "s_register_operand" "")
1742 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1743 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1745 (clobber (match_scratch:SI 3 ""))])]
1746 "TARGET_32BIT && arm_arch3m"
1750 (define_insn "*umulsi3_highpart_nov6"
1751 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1755 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1756 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1758 (clobber (match_scratch:SI 3 "=&r,&r"))]
1759 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1760 "umull%?\\t%3, %0, %2, %1"
1761 [(set_attr "insn" "umull")
1762 (set_attr "predicable" "yes")]
1765 (define_insn "*umulsi3_highpart_v6"
1766 [(set (match_operand:SI 0 "s_register_operand" "=r")
1770 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1771 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1773 (clobber (match_scratch:SI 3 "=r"))]
1774 "TARGET_32BIT && arm_arch6"
1775 "umull%?\\t%3, %0, %2, %1"
1776 [(set_attr "insn" "umull")
1777 (set_attr "predicable" "yes")]
1780 (define_insn "mulhisi3"
1781 [(set (match_operand:SI 0 "s_register_operand" "=r")
1782 (mult:SI (sign_extend:SI
1783 (match_operand:HI 1 "s_register_operand" "%r"))
1785 (match_operand:HI 2 "s_register_operand" "r"))))]
1786 "TARGET_DSP_MULTIPLY"
1787 "smulbb%?\\t%0, %1, %2"
1788 [(set_attr "insn" "smulxy")
1789 (set_attr "predicable" "yes")]
1792 (define_insn "*mulhisi3tb"
1793 [(set (match_operand:SI 0 "s_register_operand" "=r")
1794 (mult:SI (ashiftrt:SI
1795 (match_operand:SI 1 "s_register_operand" "r")
1798 (match_operand:HI 2 "s_register_operand" "r"))))]
1799 "TARGET_DSP_MULTIPLY"
1800 "smultb%?\\t%0, %1, %2"
1801 [(set_attr "insn" "smulxy")
1802 (set_attr "predicable" "yes")]
1805 (define_insn "*mulhisi3bt"
1806 [(set (match_operand:SI 0 "s_register_operand" "=r")
1807 (mult:SI (sign_extend:SI
1808 (match_operand:HI 1 "s_register_operand" "r"))
1810 (match_operand:SI 2 "s_register_operand" "r")
1812 "TARGET_DSP_MULTIPLY"
1813 "smulbt%?\\t%0, %1, %2"
1814 [(set_attr "insn" "smulxy")
1815 (set_attr "predicable" "yes")]
1818 (define_insn "*mulhisi3tt"
1819 [(set (match_operand:SI 0 "s_register_operand" "=r")
1820 (mult:SI (ashiftrt:SI
1821 (match_operand:SI 1 "s_register_operand" "r")
1824 (match_operand:SI 2 "s_register_operand" "r")
1826 "TARGET_DSP_MULTIPLY"
1827 "smultt%?\\t%0, %1, %2"
1828 [(set_attr "insn" "smulxy")
1829 (set_attr "predicable" "yes")]
1832 (define_insn "maddhisi4"
1833 [(set (match_operand:SI 0 "s_register_operand" "=r")
1834 (plus:SI (mult:SI (sign_extend:SI
1835 (match_operand:HI 1 "s_register_operand" "r"))
1837 (match_operand:HI 2 "s_register_operand" "r")))
1838 (match_operand:SI 3 "s_register_operand" "r")))]
1839 "TARGET_DSP_MULTIPLY"
1840 "smlabb%?\\t%0, %1, %2, %3"
1841 [(set_attr "insn" "smlaxy")
1842 (set_attr "predicable" "yes")]
1845 ;; Note: there is no maddhisi4ibt because this one is canonical form
1846 (define_insn "*maddhisi4tb"
1847 [(set (match_operand:SI 0 "s_register_operand" "=r")
1848 (plus:SI (mult:SI (ashiftrt:SI
1849 (match_operand:SI 1 "s_register_operand" "r")
1852 (match_operand:HI 2 "s_register_operand" "r")))
1853 (match_operand:SI 3 "s_register_operand" "r")))]
1854 "TARGET_DSP_MULTIPLY"
1855 "smlatb%?\\t%0, %1, %2, %3"
1856 [(set_attr "insn" "smlaxy")
1857 (set_attr "predicable" "yes")]
1860 (define_insn "*maddhisi4tt"
1861 [(set (match_operand:SI 0 "s_register_operand" "=r")
1862 (plus:SI (mult:SI (ashiftrt:SI
1863 (match_operand:SI 1 "s_register_operand" "r")
1866 (match_operand:SI 2 "s_register_operand" "r")
1868 (match_operand:SI 3 "s_register_operand" "r")))]
1869 "TARGET_DSP_MULTIPLY"
1870 "smlatt%?\\t%0, %1, %2, %3"
1871 [(set_attr "insn" "smlaxy")
1872 (set_attr "predicable" "yes")]
1875 (define_insn "maddhidi4"
1876 [(set (match_operand:DI 0 "s_register_operand" "=r")
1878 (mult:DI (sign_extend:DI
1879 (match_operand:HI 1 "s_register_operand" "r"))
1881 (match_operand:HI 2 "s_register_operand" "r")))
1882 (match_operand:DI 3 "s_register_operand" "0")))]
1883 "TARGET_DSP_MULTIPLY"
1884 "smlalbb%?\\t%Q0, %R0, %1, %2"
1885 [(set_attr "insn" "smlalxy")
1886 (set_attr "predicable" "yes")])
1888 ;; Note: there is no maddhidi4ibt because this one is canonical form
1889 (define_insn "*maddhidi4tb"
1890 [(set (match_operand:DI 0 "s_register_operand" "=r")
1892 (mult:DI (sign_extend:DI
1894 (match_operand:SI 1 "s_register_operand" "r")
1897 (match_operand:HI 2 "s_register_operand" "r")))
1898 (match_operand:DI 3 "s_register_operand" "0")))]
1899 "TARGET_DSP_MULTIPLY"
1900 "smlaltb%?\\t%Q0, %R0, %1, %2"
1901 [(set_attr "insn" "smlalxy")
1902 (set_attr "predicable" "yes")])
1904 (define_insn "*maddhidi4tt"
1905 [(set (match_operand:DI 0 "s_register_operand" "=r")
1907 (mult:DI (sign_extend:DI
1909 (match_operand:SI 1 "s_register_operand" "r")
1913 (match_operand:SI 2 "s_register_operand" "r")
1915 (match_operand:DI 3 "s_register_operand" "0")))]
1916 "TARGET_DSP_MULTIPLY"
1917 "smlaltt%?\\t%Q0, %R0, %1, %2"
1918 [(set_attr "insn" "smlalxy")
1919 (set_attr "predicable" "yes")])
1921 (define_expand "mulsf3"
1922 [(set (match_operand:SF 0 "s_register_operand" "")
1923 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1924 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1925 "TARGET_32BIT && TARGET_HARD_FLOAT"
1928 && !cirrus_fp_register (operands[2], SFmode))
1929 operands[2] = force_reg (SFmode, operands[2]);
1932 (define_expand "muldf3"
1933 [(set (match_operand:DF 0 "s_register_operand" "")
1934 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1935 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1936 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1939 && !cirrus_fp_register (operands[2], DFmode))
1940 operands[2] = force_reg (DFmode, operands[2]);
1945 (define_expand "divsf3"
1946 [(set (match_operand:SF 0 "s_register_operand" "")
1947 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1948 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1949 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1952 (define_expand "divdf3"
1953 [(set (match_operand:DF 0 "s_register_operand" "")
1954 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1955 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1956 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1961 (define_expand "modsf3"
1962 [(set (match_operand:SF 0 "s_register_operand" "")
1963 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1964 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1965 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1968 (define_expand "moddf3"
1969 [(set (match_operand:DF 0 "s_register_operand" "")
1970 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1971 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1972 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1975 ;; Boolean and,ior,xor insns
1977 ;; Split up double word logical operations
1979 ;; Split up simple DImode logical operations. Simply perform the logical
1980 ;; operation on the upper and lower halves of the registers.
1982 [(set (match_operand:DI 0 "s_register_operand" "")
1983 (match_operator:DI 6 "logical_binary_operator"
1984 [(match_operand:DI 1 "s_register_operand" "")
1985 (match_operand:DI 2 "s_register_operand" "")]))]
1986 "TARGET_32BIT && reload_completed
1987 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1988 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1989 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1990 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1993 operands[3] = gen_highpart (SImode, operands[0]);
1994 operands[0] = gen_lowpart (SImode, operands[0]);
1995 operands[4] = gen_highpart (SImode, operands[1]);
1996 operands[1] = gen_lowpart (SImode, operands[1]);
1997 operands[5] = gen_highpart (SImode, operands[2]);
1998 operands[2] = gen_lowpart (SImode, operands[2]);
2003 [(set (match_operand:DI 0 "s_register_operand" "")
2004 (match_operator:DI 6 "logical_binary_operator"
2005 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2006 (match_operand:DI 1 "s_register_operand" "")]))]
2007 "TARGET_32BIT && reload_completed"
2008 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2009 (set (match_dup 3) (match_op_dup:SI 6
2010 [(ashiftrt:SI (match_dup 2) (const_int 31))
2014 operands[3] = gen_highpart (SImode, operands[0]);
2015 operands[0] = gen_lowpart (SImode, operands[0]);
2016 operands[4] = gen_highpart (SImode, operands[1]);
2017 operands[1] = gen_lowpart (SImode, operands[1]);
2018 operands[5] = gen_highpart (SImode, operands[2]);
2019 operands[2] = gen_lowpart (SImode, operands[2]);
2023 ;; The zero extend of operand 2 means we can just copy the high part of
2024 ;; operand1 into operand0.
2026 [(set (match_operand:DI 0 "s_register_operand" "")
2028 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2029 (match_operand:DI 1 "s_register_operand" "")))]
2030 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2031 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2032 (set (match_dup 3) (match_dup 4))]
2035 operands[4] = gen_highpart (SImode, operands[1]);
2036 operands[3] = gen_highpart (SImode, operands[0]);
2037 operands[0] = gen_lowpart (SImode, operands[0]);
2038 operands[1] = gen_lowpart (SImode, operands[1]);
2042 ;; The zero extend of operand 2 means we can just copy the high part of
2043 ;; operand1 into operand0.
2045 [(set (match_operand:DI 0 "s_register_operand" "")
2047 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2048 (match_operand:DI 1 "s_register_operand" "")))]
2049 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2050 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2051 (set (match_dup 3) (match_dup 4))]
2054 operands[4] = gen_highpart (SImode, operands[1]);
2055 operands[3] = gen_highpart (SImode, operands[0]);
2056 operands[0] = gen_lowpart (SImode, operands[0]);
2057 operands[1] = gen_lowpart (SImode, operands[1]);
2061 (define_expand "anddi3"
2062 [(set (match_operand:DI 0 "s_register_operand" "")
2063 (and:DI (match_operand:DI 1 "s_register_operand" "")
2064 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2069 (define_insn "*anddi3_insn"
2070 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2071 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2072 (match_operand:DI 2 "s_register_operand" "r,r")))]
2073 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2075 [(set_attr "length" "8")]
2078 (define_insn_and_split "*anddi_zesidi_di"
2079 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2080 (and:DI (zero_extend:DI
2081 (match_operand:SI 2 "s_register_operand" "r,r"))
2082 (match_operand:DI 1 "s_register_operand" "0,r")))]
2085 "TARGET_32BIT && reload_completed"
2086 ; The zero extend of operand 2 clears the high word of the output
2088 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2089 (set (match_dup 3) (const_int 0))]
2092 operands[3] = gen_highpart (SImode, operands[0]);
2093 operands[0] = gen_lowpart (SImode, operands[0]);
2094 operands[1] = gen_lowpart (SImode, operands[1]);
2096 [(set_attr "length" "8")]
2099 (define_insn "*anddi_sesdi_di"
2100 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2101 (and:DI (sign_extend:DI
2102 (match_operand:SI 2 "s_register_operand" "r,r"))
2103 (match_operand:DI 1 "s_register_operand" "0,r")))]
2106 [(set_attr "length" "8")]
2109 (define_expand "andsi3"
2110 [(set (match_operand:SI 0 "s_register_operand" "")
2111 (and:SI (match_operand:SI 1 "s_register_operand" "")
2112 (match_operand:SI 2 "reg_or_int_operand" "")))]
2117 if (GET_CODE (operands[2]) == CONST_INT)
2119 if (INTVAL (operands[2]) == 255 && arm_arch6)
2121 operands[1] = convert_to_mode (QImode, operands[1], 1);
2122 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2126 arm_split_constant (AND, SImode, NULL_RTX,
2127 INTVAL (operands[2]), operands[0],
2129 optimize && can_create_pseudo_p ());
2134 else /* TARGET_THUMB1 */
2136 if (GET_CODE (operands[2]) != CONST_INT)
2138 rtx tmp = force_reg (SImode, operands[2]);
2139 if (rtx_equal_p (operands[0], operands[1]))
2143 operands[2] = operands[1];
2151 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2153 operands[2] = force_reg (SImode,
2154 GEN_INT (~INTVAL (operands[2])));
2156 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2161 for (i = 9; i <= 31; i++)
2163 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2165 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2169 else if ((((HOST_WIDE_INT) 1) << i) - 1
2170 == ~INTVAL (operands[2]))
2172 rtx shift = GEN_INT (i);
2173 rtx reg = gen_reg_rtx (SImode);
2175 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2176 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2182 operands[2] = force_reg (SImode, operands[2]);
2188 ; ??? Check split length for Thumb-2
2189 (define_insn_and_split "*arm_andsi3_insn"
2190 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2191 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2192 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2196 bic%?\\t%0, %1, #%B2
2199 && GET_CODE (operands[2]) == CONST_INT
2200 && !(const_ok_for_arm (INTVAL (operands[2]))
2201 || const_ok_for_arm (~INTVAL (operands[2])))"
2202 [(clobber (const_int 0))]
2204 arm_split_constant (AND, SImode, curr_insn,
2205 INTVAL (operands[2]), operands[0], operands[1], 0);
2208 [(set_attr "length" "4,4,16")
2209 (set_attr "predicable" "yes")]
2212 (define_insn "*thumb1_andsi3_insn"
2213 [(set (match_operand:SI 0 "register_operand" "=l")
2214 (and:SI (match_operand:SI 1 "register_operand" "%0")
2215 (match_operand:SI 2 "register_operand" "l")))]
2218 [(set_attr "length" "2")
2219 (set_attr "conds" "set")])
2221 (define_insn "*andsi3_compare0"
2222 [(set (reg:CC_NOOV CC_REGNUM)
2224 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2225 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2227 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2228 (and:SI (match_dup 1) (match_dup 2)))]
2232 bic%.\\t%0, %1, #%B2"
2233 [(set_attr "conds" "set")]
2236 (define_insn "*andsi3_compare0_scratch"
2237 [(set (reg:CC_NOOV CC_REGNUM)
2239 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2240 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2242 (clobber (match_scratch:SI 2 "=X,r"))]
2246 bic%.\\t%2, %0, #%B1"
2247 [(set_attr "conds" "set")]
2250 (define_insn "*zeroextractsi_compare0_scratch"
2251 [(set (reg:CC_NOOV CC_REGNUM)
2252 (compare:CC_NOOV (zero_extract:SI
2253 (match_operand:SI 0 "s_register_operand" "r")
2254 (match_operand 1 "const_int_operand" "n")
2255 (match_operand 2 "const_int_operand" "n"))
2258 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2259 && INTVAL (operands[1]) > 0
2260 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2261 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2263 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2264 << INTVAL (operands[2]));
2265 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2268 [(set_attr "conds" "set")
2269 (set_attr "predicable" "yes")]
2272 (define_insn_and_split "*ne_zeroextractsi"
2273 [(set (match_operand:SI 0 "s_register_operand" "=r")
2274 (ne:SI (zero_extract:SI
2275 (match_operand:SI 1 "s_register_operand" "r")
2276 (match_operand:SI 2 "const_int_operand" "n")
2277 (match_operand:SI 3 "const_int_operand" "n"))
2279 (clobber (reg:CC CC_REGNUM))]
2281 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2282 && INTVAL (operands[2]) > 0
2283 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2284 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2287 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2288 && INTVAL (operands[2]) > 0
2289 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2290 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2291 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2292 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2294 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2296 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2297 (match_dup 0) (const_int 1)))]
2299 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2300 << INTVAL (operands[3]));
2302 [(set_attr "conds" "clob")
2303 (set (attr "length")
2304 (if_then_else (eq_attr "is_thumb" "yes")
2309 (define_insn_and_split "*ne_zeroextractsi_shifted"
2310 [(set (match_operand:SI 0 "s_register_operand" "=r")
2311 (ne:SI (zero_extract:SI
2312 (match_operand:SI 1 "s_register_operand" "r")
2313 (match_operand:SI 2 "const_int_operand" "n")
2316 (clobber (reg:CC CC_REGNUM))]
2320 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2321 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2323 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2325 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2326 (match_dup 0) (const_int 1)))]
2328 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2330 [(set_attr "conds" "clob")
2331 (set_attr "length" "8")]
2334 (define_insn_and_split "*ite_ne_zeroextractsi"
2335 [(set (match_operand:SI 0 "s_register_operand" "=r")
2336 (if_then_else:SI (ne (zero_extract:SI
2337 (match_operand:SI 1 "s_register_operand" "r")
2338 (match_operand:SI 2 "const_int_operand" "n")
2339 (match_operand:SI 3 "const_int_operand" "n"))
2341 (match_operand:SI 4 "arm_not_operand" "rIK")
2343 (clobber (reg:CC CC_REGNUM))]
2345 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2346 && INTVAL (operands[2]) > 0
2347 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2348 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2349 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2352 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2353 && INTVAL (operands[2]) > 0
2354 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2355 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2356 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2357 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2358 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2360 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2362 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2363 (match_dup 0) (match_dup 4)))]
2365 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2366 << INTVAL (operands[3]));
2368 [(set_attr "conds" "clob")
2369 (set_attr "length" "8")]
2372 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2373 [(set (match_operand:SI 0 "s_register_operand" "=r")
2374 (if_then_else:SI (ne (zero_extract:SI
2375 (match_operand:SI 1 "s_register_operand" "r")
2376 (match_operand:SI 2 "const_int_operand" "n")
2379 (match_operand:SI 3 "arm_not_operand" "rIK")
2381 (clobber (reg:CC CC_REGNUM))]
2382 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2384 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2385 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2386 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2388 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2390 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2391 (match_dup 0) (match_dup 3)))]
2393 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2395 [(set_attr "conds" "clob")
2396 (set_attr "length" "8")]
2400 [(set (match_operand:SI 0 "s_register_operand" "")
2401 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2402 (match_operand:SI 2 "const_int_operand" "")
2403 (match_operand:SI 3 "const_int_operand" "")))
2404 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2406 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2407 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2409 HOST_WIDE_INT temp = INTVAL (operands[2]);
2411 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2412 operands[3] = GEN_INT (32 - temp);
2416 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2418 [(set (match_operand:SI 0 "s_register_operand" "")
2419 (match_operator:SI 1 "shiftable_operator"
2420 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2421 (match_operand:SI 3 "const_int_operand" "")
2422 (match_operand:SI 4 "const_int_operand" ""))
2423 (match_operand:SI 5 "s_register_operand" "")]))
2424 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2426 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2429 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2432 HOST_WIDE_INT temp = INTVAL (operands[3]);
2434 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2435 operands[4] = GEN_INT (32 - temp);
2440 [(set (match_operand:SI 0 "s_register_operand" "")
2441 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2442 (match_operand:SI 2 "const_int_operand" "")
2443 (match_operand:SI 3 "const_int_operand" "")))]
2445 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2446 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2448 HOST_WIDE_INT temp = INTVAL (operands[2]);
2450 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2451 operands[3] = GEN_INT (32 - temp);
2456 [(set (match_operand:SI 0 "s_register_operand" "")
2457 (match_operator:SI 1 "shiftable_operator"
2458 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2459 (match_operand:SI 3 "const_int_operand" "")
2460 (match_operand:SI 4 "const_int_operand" ""))
2461 (match_operand:SI 5 "s_register_operand" "")]))
2462 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2464 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2467 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2470 HOST_WIDE_INT temp = INTVAL (operands[3]);
2472 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2473 operands[4] = GEN_INT (32 - temp);
2477 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2478 ;;; represented by the bitfield, then this will produce incorrect results.
2479 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2480 ;;; which have a real bit-field insert instruction, the truncation happens
2481 ;;; in the bit-field insert instruction itself. Since arm does not have a
2482 ;;; bit-field insert instruction, we would have to emit code here to truncate
2483 ;;; the value before we insert. This loses some of the advantage of having
2484 ;;; this insv pattern, so this pattern needs to be reevalutated.
2486 (define_expand "insv"
2487 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2488 (match_operand 1 "general_operand" "")
2489 (match_operand 2 "general_operand" ""))
2490 (match_operand 3 "reg_or_int_operand" ""))]
2491 "TARGET_ARM || arm_arch_thumb2"
2494 int start_bit = INTVAL (operands[2]);
2495 int width = INTVAL (operands[1]);
2496 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2497 rtx target, subtarget;
2499 if (arm_arch_thumb2)
2501 if (unaligned_access && MEM_P (operands[0])
2502 && s_register_operand (operands[3], GET_MODE (operands[3]))
2503 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2507 if (BYTES_BIG_ENDIAN)
2508 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2513 base_addr = adjust_address (operands[0], SImode,
2514 start_bit / BITS_PER_UNIT);
2515 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2519 rtx tmp = gen_reg_rtx (HImode);
2521 base_addr = adjust_address (operands[0], HImode,
2522 start_bit / BITS_PER_UNIT);
2523 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2524 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2528 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2530 bool use_bfi = TRUE;
2532 if (GET_CODE (operands[3]) == CONST_INT)
2534 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2538 emit_insn (gen_insv_zero (operands[0], operands[1],
2543 /* See if the set can be done with a single orr instruction. */
2544 if (val == mask && const_ok_for_arm (val << start_bit))
2550 if (GET_CODE (operands[3]) != REG)
2551 operands[3] = force_reg (SImode, operands[3]);
2553 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2562 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2565 target = copy_rtx (operands[0]);
2566 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2567 subreg as the final target. */
2568 if (GET_CODE (target) == SUBREG)
2570 subtarget = gen_reg_rtx (SImode);
2571 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2572 < GET_MODE_SIZE (SImode))
2573 target = SUBREG_REG (target);
2578 if (GET_CODE (operands[3]) == CONST_INT)
2580 /* Since we are inserting a known constant, we may be able to
2581 reduce the number of bits that we have to clear so that
2582 the mask becomes simple. */
2583 /* ??? This code does not check to see if the new mask is actually
2584 simpler. It may not be. */
2585 rtx op1 = gen_reg_rtx (SImode);
2586 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2587 start of this pattern. */
2588 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2589 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2591 emit_insn (gen_andsi3 (op1, operands[0],
2592 gen_int_mode (~mask2, SImode)));
2593 emit_insn (gen_iorsi3 (subtarget, op1,
2594 gen_int_mode (op3_value << start_bit, SImode)));
2596 else if (start_bit == 0
2597 && !(const_ok_for_arm (mask)
2598 || const_ok_for_arm (~mask)))
2600 /* A Trick, since we are setting the bottom bits in the word,
2601 we can shift operand[3] up, operand[0] down, OR them together
2602 and rotate the result back again. This takes 3 insns, and
2603 the third might be mergeable into another op. */
2604 /* The shift up copes with the possibility that operand[3] is
2605 wider than the bitfield. */
2606 rtx op0 = gen_reg_rtx (SImode);
2607 rtx op1 = gen_reg_rtx (SImode);
2609 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2610 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2611 emit_insn (gen_iorsi3 (op1, op1, op0));
2612 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2614 else if ((width + start_bit == 32)
2615 && !(const_ok_for_arm (mask)
2616 || const_ok_for_arm (~mask)))
2618 /* Similar trick, but slightly less efficient. */
2620 rtx op0 = gen_reg_rtx (SImode);
2621 rtx op1 = gen_reg_rtx (SImode);
2623 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2624 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2625 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2626 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2630 rtx op0 = gen_int_mode (mask, SImode);
2631 rtx op1 = gen_reg_rtx (SImode);
2632 rtx op2 = gen_reg_rtx (SImode);
2634 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2636 rtx tmp = gen_reg_rtx (SImode);
2638 emit_insn (gen_movsi (tmp, op0));
2642 /* Mask out any bits in operand[3] that are not needed. */
2643 emit_insn (gen_andsi3 (op1, operands[3], op0));
2645 if (GET_CODE (op0) == CONST_INT
2646 && (const_ok_for_arm (mask << start_bit)
2647 || const_ok_for_arm (~(mask << start_bit))))
2649 op0 = gen_int_mode (~(mask << start_bit), SImode);
2650 emit_insn (gen_andsi3 (op2, operands[0], op0));
2654 if (GET_CODE (op0) == CONST_INT)
2656 rtx tmp = gen_reg_rtx (SImode);
2658 emit_insn (gen_movsi (tmp, op0));
2663 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2665 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2669 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2671 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2674 if (subtarget != target)
2676 /* If TARGET is still a SUBREG, then it must be wider than a word,
2677 so we must be careful only to set the subword we were asked to. */
2678 if (GET_CODE (target) == SUBREG)
2679 emit_move_insn (target, subtarget);
2681 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2688 (define_insn "insv_zero"
2689 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2690 (match_operand:SI 1 "const_int_operand" "M")
2691 (match_operand:SI 2 "const_int_operand" "M"))
2695 [(set_attr "length" "4")
2696 (set_attr "predicable" "yes")]
2699 (define_insn "insv_t2"
2700 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2701 (match_operand:SI 1 "const_int_operand" "M")
2702 (match_operand:SI 2 "const_int_operand" "M"))
2703 (match_operand:SI 3 "s_register_operand" "r"))]
2705 "bfi%?\t%0, %3, %2, %1"
2706 [(set_attr "length" "4")
2707 (set_attr "predicable" "yes")]
2710 ; constants for op 2 will never be given to these patterns.
2711 (define_insn_and_split "*anddi_notdi_di"
2712 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2713 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2714 (match_operand:DI 2 "s_register_operand" "r,0")))]
2717 "TARGET_32BIT && reload_completed
2718 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2719 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2720 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2721 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2724 operands[3] = gen_highpart (SImode, operands[0]);
2725 operands[0] = gen_lowpart (SImode, operands[0]);
2726 operands[4] = gen_highpart (SImode, operands[1]);
2727 operands[1] = gen_lowpart (SImode, operands[1]);
2728 operands[5] = gen_highpart (SImode, operands[2]);
2729 operands[2] = gen_lowpart (SImode, operands[2]);
2731 [(set_attr "length" "8")
2732 (set_attr "predicable" "yes")]
2735 (define_insn_and_split "*anddi_notzesidi_di"
2736 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2737 (and:DI (not:DI (zero_extend:DI
2738 (match_operand:SI 2 "s_register_operand" "r,r")))
2739 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2742 bic%?\\t%Q0, %Q1, %2
2744 ; (not (zero_extend ...)) allows us to just copy the high word from
2745 ; operand1 to operand0.
2748 && operands[0] != operands[1]"
2749 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2750 (set (match_dup 3) (match_dup 4))]
2753 operands[3] = gen_highpart (SImode, operands[0]);
2754 operands[0] = gen_lowpart (SImode, operands[0]);
2755 operands[4] = gen_highpart (SImode, operands[1]);
2756 operands[1] = gen_lowpart (SImode, operands[1]);
2758 [(set_attr "length" "4,8")
2759 (set_attr "predicable" "yes")]
2762 (define_insn_and_split "*anddi_notsesidi_di"
2763 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2764 (and:DI (not:DI (sign_extend:DI
2765 (match_operand:SI 2 "s_register_operand" "r,r")))
2766 (match_operand:DI 1 "s_register_operand" "0,r")))]
2769 "TARGET_32BIT && reload_completed"
2770 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2771 (set (match_dup 3) (and:SI (not:SI
2772 (ashiftrt:SI (match_dup 2) (const_int 31)))
2776 operands[3] = gen_highpart (SImode, operands[0]);
2777 operands[0] = gen_lowpart (SImode, operands[0]);
2778 operands[4] = gen_highpart (SImode, operands[1]);
2779 operands[1] = gen_lowpart (SImode, operands[1]);
2781 [(set_attr "length" "8")
2782 (set_attr "predicable" "yes")]
2785 (define_insn "andsi_notsi_si"
2786 [(set (match_operand:SI 0 "s_register_operand" "=r")
2787 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2788 (match_operand:SI 1 "s_register_operand" "r")))]
2790 "bic%?\\t%0, %1, %2"
2791 [(set_attr "predicable" "yes")]
2794 (define_insn "thumb1_bicsi3"
2795 [(set (match_operand:SI 0 "register_operand" "=l")
2796 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2797 (match_operand:SI 2 "register_operand" "0")))]
2800 [(set_attr "length" "2")
2801 (set_attr "conds" "set")])
2803 (define_insn "andsi_not_shiftsi_si"
2804 [(set (match_operand:SI 0 "s_register_operand" "=r")
2805 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2806 [(match_operand:SI 2 "s_register_operand" "r")
2807 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2808 (match_operand:SI 1 "s_register_operand" "r")))]
2810 "bic%?\\t%0, %1, %2%S4"
2811 [(set_attr "predicable" "yes")
2812 (set_attr "shift" "2")
2813 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2814 (const_string "alu_shift")
2815 (const_string "alu_shift_reg")))]
2818 (define_insn "*andsi_notsi_si_compare0"
2819 [(set (reg:CC_NOOV CC_REGNUM)
2821 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2822 (match_operand:SI 1 "s_register_operand" "r"))
2824 (set (match_operand:SI 0 "s_register_operand" "=r")
2825 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2827 "bic%.\\t%0, %1, %2"
2828 [(set_attr "conds" "set")]
2831 (define_insn "*andsi_notsi_si_compare0_scratch"
2832 [(set (reg:CC_NOOV CC_REGNUM)
2834 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2835 (match_operand:SI 1 "s_register_operand" "r"))
2837 (clobber (match_scratch:SI 0 "=r"))]
2839 "bic%.\\t%0, %1, %2"
2840 [(set_attr "conds" "set")]
2843 (define_expand "iordi3"
2844 [(set (match_operand:DI 0 "s_register_operand" "")
2845 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2846 (match_operand:DI 2 "neon_logic_op2" "")))]
2851 (define_insn "*iordi3_insn"
2852 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2853 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2854 (match_operand:DI 2 "s_register_operand" "r,r")))]
2855 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2857 [(set_attr "length" "8")
2858 (set_attr "predicable" "yes")]
2861 (define_insn "*iordi_zesidi_di"
2862 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2863 (ior:DI (zero_extend:DI
2864 (match_operand:SI 2 "s_register_operand" "r,r"))
2865 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2868 orr%?\\t%Q0, %Q1, %2
2870 [(set_attr "length" "4,8")
2871 (set_attr "predicable" "yes")]
2874 (define_insn "*iordi_sesidi_di"
2875 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2876 (ior:DI (sign_extend:DI
2877 (match_operand:SI 2 "s_register_operand" "r,r"))
2878 (match_operand:DI 1 "s_register_operand" "0,r")))]
2881 [(set_attr "length" "8")
2882 (set_attr "predicable" "yes")]
2885 (define_expand "iorsi3"
2886 [(set (match_operand:SI 0 "s_register_operand" "")
2887 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2888 (match_operand:SI 2 "reg_or_int_operand" "")))]
2891 if (GET_CODE (operands[2]) == CONST_INT)
2895 arm_split_constant (IOR, SImode, NULL_RTX,
2896 INTVAL (operands[2]), operands[0], operands[1],
2897 optimize && can_create_pseudo_p ());
2900 else /* TARGET_THUMB1 */
2902 rtx tmp = force_reg (SImode, operands[2]);
2903 if (rtx_equal_p (operands[0], operands[1]))
2907 operands[2] = operands[1];
2915 (define_insn_and_split "*iorsi3_insn"
2916 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2917 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2918 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2922 orn%?\\t%0, %1, #%B2
2925 && GET_CODE (operands[2]) == CONST_INT
2926 && !(const_ok_for_arm (INTVAL (operands[2]))
2927 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2928 [(clobber (const_int 0))]
2930 arm_split_constant (IOR, SImode, curr_insn,
2931 INTVAL (operands[2]), operands[0], operands[1], 0);
2934 [(set_attr "length" "4,4,16")
2935 (set_attr "arch" "32,t2,32")
2936 (set_attr "predicable" "yes")])
2938 (define_insn "*thumb1_iorsi3_insn"
2939 [(set (match_operand:SI 0 "register_operand" "=l")
2940 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2941 (match_operand:SI 2 "register_operand" "l")))]
2944 [(set_attr "length" "2")
2945 (set_attr "conds" "set")])
2948 [(match_scratch:SI 3 "r")
2949 (set (match_operand:SI 0 "arm_general_register_operand" "")
2950 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2951 (match_operand:SI 2 "const_int_operand" "")))]
2953 && !const_ok_for_arm (INTVAL (operands[2]))
2954 && const_ok_for_arm (~INTVAL (operands[2]))"
2955 [(set (match_dup 3) (match_dup 2))
2956 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2960 (define_insn "*iorsi3_compare0"
2961 [(set (reg:CC_NOOV CC_REGNUM)
2962 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2963 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2965 (set (match_operand:SI 0 "s_register_operand" "=r")
2966 (ior:SI (match_dup 1) (match_dup 2)))]
2968 "orr%.\\t%0, %1, %2"
2969 [(set_attr "conds" "set")]
2972 (define_insn "*iorsi3_compare0_scratch"
2973 [(set (reg:CC_NOOV CC_REGNUM)
2974 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2975 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2977 (clobber (match_scratch:SI 0 "=r"))]
2979 "orr%.\\t%0, %1, %2"
2980 [(set_attr "conds" "set")]
2983 (define_expand "xordi3"
2984 [(set (match_operand:DI 0 "s_register_operand" "")
2985 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2986 (match_operand:DI 2 "s_register_operand" "")))]
2991 (define_insn "*xordi3_insn"
2992 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2993 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2994 (match_operand:DI 2 "s_register_operand" "r,r")))]
2995 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2997 [(set_attr "length" "8")
2998 (set_attr "predicable" "yes")]
3001 (define_insn "*xordi_zesidi_di"
3002 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3003 (xor:DI (zero_extend:DI
3004 (match_operand:SI 2 "s_register_operand" "r,r"))
3005 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3008 eor%?\\t%Q0, %Q1, %2
3010 [(set_attr "length" "4,8")
3011 (set_attr "predicable" "yes")]
3014 (define_insn "*xordi_sesidi_di"
3015 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3016 (xor:DI (sign_extend:DI
3017 (match_operand:SI 2 "s_register_operand" "r,r"))
3018 (match_operand:DI 1 "s_register_operand" "0,r")))]
3021 [(set_attr "length" "8")
3022 (set_attr "predicable" "yes")]
3025 (define_expand "xorsi3"
3026 [(set (match_operand:SI 0 "s_register_operand" "")
3027 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3028 (match_operand:SI 2 "reg_or_int_operand" "")))]
3030 "if (GET_CODE (operands[2]) == CONST_INT)
3034 arm_split_constant (XOR, SImode, NULL_RTX,
3035 INTVAL (operands[2]), operands[0], operands[1],
3036 optimize && can_create_pseudo_p ());
3039 else /* TARGET_THUMB1 */
3041 rtx tmp = force_reg (SImode, operands[2]);
3042 if (rtx_equal_p (operands[0], operands[1]))
3046 operands[2] = operands[1];
3053 (define_insn_and_split "*arm_xorsi3"
3054 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3055 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3056 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
3062 && GET_CODE (operands[2]) == CONST_INT
3063 && !const_ok_for_arm (INTVAL (operands[2]))"
3064 [(clobber (const_int 0))]
3066 arm_split_constant (XOR, SImode, curr_insn,
3067 INTVAL (operands[2]), operands[0], operands[1], 0);
3070 [(set_attr "length" "4,16")
3071 (set_attr "predicable" "yes")]
3074 (define_insn "*thumb1_xorsi3_insn"
3075 [(set (match_operand:SI 0 "register_operand" "=l")
3076 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3077 (match_operand:SI 2 "register_operand" "l")))]
3080 [(set_attr "length" "2")
3081 (set_attr "conds" "set")])
3083 (define_insn "*xorsi3_compare0"
3084 [(set (reg:CC_NOOV CC_REGNUM)
3085 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3086 (match_operand:SI 2 "arm_rhs_operand" "rI"))
3088 (set (match_operand:SI 0 "s_register_operand" "=r")
3089 (xor:SI (match_dup 1) (match_dup 2)))]
3091 "eor%.\\t%0, %1, %2"
3092 [(set_attr "conds" "set")]
3095 (define_insn "*xorsi3_compare0_scratch"
3096 [(set (reg:CC_NOOV CC_REGNUM)
3097 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
3098 (match_operand:SI 1 "arm_rhs_operand" "rI"))
3102 [(set_attr "conds" "set")]
3105 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3106 ; (NOT D) we can sometimes merge the final NOT into one of the following
3110 [(set (match_operand:SI 0 "s_register_operand" "")
3111 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3112 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3113 (match_operand:SI 3 "arm_rhs_operand" "")))
3114 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3116 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3117 (not:SI (match_dup 3))))
3118 (set (match_dup 0) (not:SI (match_dup 4)))]
3122 (define_insn "*andsi_iorsi3_notsi"
3123 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3124 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3125 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3126 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3128 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3129 [(set_attr "length" "8")
3130 (set_attr "ce_count" "2")
3131 (set_attr "predicable" "yes")]
3134 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3135 ; insns are available?
3137 [(set (match_operand:SI 0 "s_register_operand" "")
3138 (match_operator:SI 1 "logical_binary_operator"
3139 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3140 (match_operand:SI 3 "const_int_operand" "")
3141 (match_operand:SI 4 "const_int_operand" ""))
3142 (match_operator:SI 9 "logical_binary_operator"
3143 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3144 (match_operand:SI 6 "const_int_operand" ""))
3145 (match_operand:SI 7 "s_register_operand" "")])]))
3146 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3148 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3149 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3152 [(ashift:SI (match_dup 2) (match_dup 4))
3156 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3159 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3163 [(set (match_operand:SI 0 "s_register_operand" "")
3164 (match_operator:SI 1 "logical_binary_operator"
3165 [(match_operator:SI 9 "logical_binary_operator"
3166 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3167 (match_operand:SI 6 "const_int_operand" ""))
3168 (match_operand:SI 7 "s_register_operand" "")])
3169 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3170 (match_operand:SI 3 "const_int_operand" "")
3171 (match_operand:SI 4 "const_int_operand" ""))]))
3172 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3174 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3175 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3178 [(ashift:SI (match_dup 2) (match_dup 4))
3182 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3185 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3189 [(set (match_operand:SI 0 "s_register_operand" "")
3190 (match_operator:SI 1 "logical_binary_operator"
3191 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3192 (match_operand:SI 3 "const_int_operand" "")
3193 (match_operand:SI 4 "const_int_operand" ""))
3194 (match_operator:SI 9 "logical_binary_operator"
3195 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3196 (match_operand:SI 6 "const_int_operand" ""))
3197 (match_operand:SI 7 "s_register_operand" "")])]))
3198 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3200 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3201 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3204 [(ashift:SI (match_dup 2) (match_dup 4))
3208 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3211 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3215 [(set (match_operand:SI 0 "s_register_operand" "")
3216 (match_operator:SI 1 "logical_binary_operator"
3217 [(match_operator:SI 9 "logical_binary_operator"
3218 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3219 (match_operand:SI 6 "const_int_operand" ""))
3220 (match_operand:SI 7 "s_register_operand" "")])
3221 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3222 (match_operand:SI 3 "const_int_operand" "")
3223 (match_operand:SI 4 "const_int_operand" ""))]))
3224 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3226 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3227 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3230 [(ashift:SI (match_dup 2) (match_dup 4))
3234 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3237 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3241 ;; Minimum and maximum insns
3243 (define_expand "smaxsi3"
3245 (set (match_operand:SI 0 "s_register_operand" "")
3246 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3247 (match_operand:SI 2 "arm_rhs_operand" "")))
3248 (clobber (reg:CC CC_REGNUM))])]
3251 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3253 /* No need for a clobber of the condition code register here. */
3254 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3255 gen_rtx_SMAX (SImode, operands[1],
3261 (define_insn "*smax_0"
3262 [(set (match_operand:SI 0 "s_register_operand" "=r")
3263 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3266 "bic%?\\t%0, %1, %1, asr #31"
3267 [(set_attr "predicable" "yes")]
3270 (define_insn "*smax_m1"
3271 [(set (match_operand:SI 0 "s_register_operand" "=r")
3272 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3275 "orr%?\\t%0, %1, %1, asr #31"
3276 [(set_attr "predicable" "yes")]
3279 (define_insn "*arm_smax_insn"
3280 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3281 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3282 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3283 (clobber (reg:CC CC_REGNUM))]
3286 cmp\\t%1, %2\;movlt\\t%0, %2
3287 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3288 [(set_attr "conds" "clob")
3289 (set_attr "length" "8,12")]
3292 (define_expand "sminsi3"
3294 (set (match_operand:SI 0 "s_register_operand" "")
3295 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3296 (match_operand:SI 2 "arm_rhs_operand" "")))
3297 (clobber (reg:CC CC_REGNUM))])]
3300 if (operands[2] == const0_rtx)
3302 /* No need for a clobber of the condition code register here. */
3303 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3304 gen_rtx_SMIN (SImode, operands[1],
3310 (define_insn "*smin_0"
3311 [(set (match_operand:SI 0 "s_register_operand" "=r")
3312 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3315 "and%?\\t%0, %1, %1, asr #31"
3316 [(set_attr "predicable" "yes")]
3319 (define_insn "*arm_smin_insn"
3320 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3321 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3322 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3323 (clobber (reg:CC CC_REGNUM))]
3326 cmp\\t%1, %2\;movge\\t%0, %2
3327 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3328 [(set_attr "conds" "clob")
3329 (set_attr "length" "8,12")]
3332 (define_expand "umaxsi3"
3334 (set (match_operand:SI 0 "s_register_operand" "")
3335 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3336 (match_operand:SI 2 "arm_rhs_operand" "")))
3337 (clobber (reg:CC CC_REGNUM))])]
3342 (define_insn "*arm_umaxsi3"
3343 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3344 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3345 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3346 (clobber (reg:CC CC_REGNUM))]
3349 cmp\\t%1, %2\;movcc\\t%0, %2
3350 cmp\\t%1, %2\;movcs\\t%0, %1
3351 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3352 [(set_attr "conds" "clob")
3353 (set_attr "length" "8,8,12")]
3356 (define_expand "uminsi3"
3358 (set (match_operand:SI 0 "s_register_operand" "")
3359 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3360 (match_operand:SI 2 "arm_rhs_operand" "")))
3361 (clobber (reg:CC CC_REGNUM))])]
3366 (define_insn "*arm_uminsi3"
3367 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3368 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3369 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3370 (clobber (reg:CC CC_REGNUM))]
3373 cmp\\t%1, %2\;movcs\\t%0, %2
3374 cmp\\t%1, %2\;movcc\\t%0, %1
3375 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3376 [(set_attr "conds" "clob")
3377 (set_attr "length" "8,8,12")]
3380 (define_insn "*store_minmaxsi"
3381 [(set (match_operand:SI 0 "memory_operand" "=m")
3382 (match_operator:SI 3 "minmax_operator"
3383 [(match_operand:SI 1 "s_register_operand" "r")
3384 (match_operand:SI 2 "s_register_operand" "r")]))
3385 (clobber (reg:CC CC_REGNUM))]
3388 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3389 operands[1], operands[2]);
3390 output_asm_insn (\"cmp\\t%1, %2\", operands);
3392 output_asm_insn (\"ite\t%d3\", operands);
3393 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3394 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3397 [(set_attr "conds" "clob")
3398 (set (attr "length")
3399 (if_then_else (eq_attr "is_thumb" "yes")
3402 (set_attr "type" "store1")]
3405 ; Reject the frame pointer in operand[1], since reloading this after
3406 ; it has been eliminated can cause carnage.
3407 (define_insn "*minmax_arithsi"
3408 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3409 (match_operator:SI 4 "shiftable_operator"
3410 [(match_operator:SI 5 "minmax_operator"
3411 [(match_operand:SI 2 "s_register_operand" "r,r")
3412 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3413 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3414 (clobber (reg:CC CC_REGNUM))]
3415 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3418 enum rtx_code code = GET_CODE (operands[4]);
3421 if (which_alternative != 0 || operands[3] != const0_rtx
3422 || (code != PLUS && code != IOR && code != XOR))
3427 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3428 operands[2], operands[3]);
3429 output_asm_insn (\"cmp\\t%2, %3\", operands);
3433 output_asm_insn (\"ite\\t%d5\", operands);
3435 output_asm_insn (\"it\\t%d5\", operands);
3437 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3439 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3442 [(set_attr "conds" "clob")
3443 (set (attr "length")
3444 (if_then_else (eq_attr "is_thumb" "yes")
3450 ;; Shift and rotation insns
3452 (define_expand "ashldi3"
3453 [(set (match_operand:DI 0 "s_register_operand" "")
3454 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3455 (match_operand:SI 2 "reg_or_int_operand" "")))]
3458 if (GET_CODE (operands[2]) == CONST_INT)
3460 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3462 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3465 /* Ideally we shouldn't fail here if we could know that operands[1]
3466 ends up already living in an iwmmxt register. Otherwise it's
3467 cheaper to have the alternate code being generated than moving
3468 values to iwmmxt regs and back. */
3471 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3476 (define_insn "arm_ashldi3_1bit"
3477 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3478 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3480 (clobber (reg:CC CC_REGNUM))]
3482 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3483 [(set_attr "conds" "clob")
3484 (set_attr "length" "8")]
3487 (define_expand "ashlsi3"
3488 [(set (match_operand:SI 0 "s_register_operand" "")
3489 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3490 (match_operand:SI 2 "arm_rhs_operand" "")))]
3493 if (GET_CODE (operands[2]) == CONST_INT
3494 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3496 emit_insn (gen_movsi (operands[0], const0_rtx));
3502 (define_insn "*thumb1_ashlsi3"
3503 [(set (match_operand:SI 0 "register_operand" "=l,l")
3504 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3505 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3508 [(set_attr "length" "2")
3509 (set_attr "conds" "set")])
3511 (define_expand "ashrdi3"
3512 [(set (match_operand:DI 0 "s_register_operand" "")
3513 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3514 (match_operand:SI 2 "reg_or_int_operand" "")))]
3517 if (GET_CODE (operands[2]) == CONST_INT)
3519 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3521 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3524 /* Ideally we shouldn't fail here if we could know that operands[1]
3525 ends up already living in an iwmmxt register. Otherwise it's
3526 cheaper to have the alternate code being generated than moving
3527 values to iwmmxt regs and back. */
3530 else if (!TARGET_REALLY_IWMMXT)
3535 (define_insn "arm_ashrdi3_1bit"
3536 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3537 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3539 (clobber (reg:CC CC_REGNUM))]
3541 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3542 [(set_attr "conds" "clob")
3543 (set_attr "insn" "mov")
3544 (set_attr "length" "8")]
3547 (define_expand "ashrsi3"
3548 [(set (match_operand:SI 0 "s_register_operand" "")
3549 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3550 (match_operand:SI 2 "arm_rhs_operand" "")))]
3553 if (GET_CODE (operands[2]) == CONST_INT
3554 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3555 operands[2] = GEN_INT (31);
3559 (define_insn "*thumb1_ashrsi3"
3560 [(set (match_operand:SI 0 "register_operand" "=l,l")
3561 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3562 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3565 [(set_attr "length" "2")
3566 (set_attr "conds" "set")])
3568 (define_expand "lshrdi3"
3569 [(set (match_operand:DI 0 "s_register_operand" "")
3570 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3571 (match_operand:SI 2 "reg_or_int_operand" "")))]
3574 if (GET_CODE (operands[2]) == CONST_INT)
3576 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3578 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3581 /* Ideally we shouldn't fail here if we could know that operands[1]
3582 ends up already living in an iwmmxt register. Otherwise it's
3583 cheaper to have the alternate code being generated than moving
3584 values to iwmmxt regs and back. */
3587 else if (!TARGET_REALLY_IWMMXT)
3592 (define_insn "arm_lshrdi3_1bit"
3593 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3594 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3596 (clobber (reg:CC CC_REGNUM))]
3598 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3599 [(set_attr "conds" "clob")
3600 (set_attr "insn" "mov")
3601 (set_attr "length" "8")]
3604 (define_expand "lshrsi3"
3605 [(set (match_operand:SI 0 "s_register_operand" "")
3606 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3607 (match_operand:SI 2 "arm_rhs_operand" "")))]
3610 if (GET_CODE (operands[2]) == CONST_INT
3611 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3613 emit_insn (gen_movsi (operands[0], const0_rtx));
3619 (define_insn "*thumb1_lshrsi3"
3620 [(set (match_operand:SI 0 "register_operand" "=l,l")
3621 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3622 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3625 [(set_attr "length" "2")
3626 (set_attr "conds" "set")])
3628 (define_expand "rotlsi3"
3629 [(set (match_operand:SI 0 "s_register_operand" "")
3630 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3631 (match_operand:SI 2 "reg_or_int_operand" "")))]
3634 if (GET_CODE (operands[2]) == CONST_INT)
3635 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3638 rtx reg = gen_reg_rtx (SImode);
3639 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3645 (define_expand "rotrsi3"
3646 [(set (match_operand:SI 0 "s_register_operand" "")
3647 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3648 (match_operand:SI 2 "arm_rhs_operand" "")))]
3653 if (GET_CODE (operands[2]) == CONST_INT
3654 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3655 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3657 else /* TARGET_THUMB1 */
3659 if (GET_CODE (operands [2]) == CONST_INT)
3660 operands [2] = force_reg (SImode, operands[2]);
3665 (define_insn "*thumb1_rotrsi3"
3666 [(set (match_operand:SI 0 "register_operand" "=l")
3667 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3668 (match_operand:SI 2 "register_operand" "l")))]
3671 [(set_attr "length" "2")]
3674 (define_insn "*arm_shiftsi3"
3675 [(set (match_operand:SI 0 "s_register_operand" "=r")
3676 (match_operator:SI 3 "shift_operator"
3677 [(match_operand:SI 1 "s_register_operand" "r")
3678 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3680 "* return arm_output_shift(operands, 0);"
3681 [(set_attr "predicable" "yes")
3682 (set_attr "shift" "1")
3683 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3684 (const_string "alu_shift")
3685 (const_string "alu_shift_reg")))]
3688 (define_insn "*shiftsi3_compare0"
3689 [(set (reg:CC_NOOV CC_REGNUM)
3690 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3691 [(match_operand:SI 1 "s_register_operand" "r")
3692 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3694 (set (match_operand:SI 0 "s_register_operand" "=r")
3695 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3697 "* return arm_output_shift(operands, 1);"
3698 [(set_attr "conds" "set")
3699 (set_attr "shift" "1")
3700 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3701 (const_string "alu_shift")
3702 (const_string "alu_shift_reg")))]
3705 (define_insn "*shiftsi3_compare0_scratch"
3706 [(set (reg:CC_NOOV CC_REGNUM)
3707 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3708 [(match_operand:SI 1 "s_register_operand" "r")
3709 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3711 (clobber (match_scratch:SI 0 "=r"))]
3713 "* return arm_output_shift(operands, 1);"
3714 [(set_attr "conds" "set")
3715 (set_attr "shift" "1")]
3718 (define_insn "*not_shiftsi"
3719 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3720 (not:SI (match_operator:SI 3 "shift_operator"
3721 [(match_operand:SI 1 "s_register_operand" "r,r")
3722 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3725 [(set_attr "predicable" "yes")
3726 (set_attr "shift" "1")
3727 (set_attr "insn" "mvn")
3728 (set_attr "arch" "32,a")
3729 (set_attr "type" "alu_shift,alu_shift_reg")])
3731 (define_insn "*not_shiftsi_compare0"
3732 [(set (reg:CC_NOOV CC_REGNUM)
3734 (not:SI (match_operator:SI 3 "shift_operator"
3735 [(match_operand:SI 1 "s_register_operand" "r,r")
3736 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3738 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3739 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3742 [(set_attr "conds" "set")
3743 (set_attr "shift" "1")
3744 (set_attr "insn" "mvn")
3745 (set_attr "arch" "32,a")
3746 (set_attr "type" "alu_shift,alu_shift_reg")])
3748 (define_insn "*not_shiftsi_compare0_scratch"
3749 [(set (reg:CC_NOOV CC_REGNUM)
3751 (not:SI (match_operator:SI 3 "shift_operator"
3752 [(match_operand:SI 1 "s_register_operand" "r,r")
3753 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3755 (clobber (match_scratch:SI 0 "=r,r"))]
3758 [(set_attr "conds" "set")
3759 (set_attr "shift" "1")
3760 (set_attr "insn" "mvn")
3761 (set_attr "arch" "32,a")
3762 (set_attr "type" "alu_shift,alu_shift_reg")])
3764 ;; We don't really have extzv, but defining this using shifts helps
3765 ;; to reduce register pressure later on.
3767 (define_expand "extzv"
3768 [(set (match_operand 0 "s_register_operand" "")
3769 (zero_extract (match_operand 1 "nonimmediate_operand" "")
3770 (match_operand 2 "const_int_operand" "")
3771 (match_operand 3 "const_int_operand" "")))]
3772 "TARGET_THUMB1 || arm_arch_thumb2"
3775 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3776 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3778 if (arm_arch_thumb2)
3780 HOST_WIDE_INT width = INTVAL (operands[2]);
3781 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3783 if (unaligned_access && MEM_P (operands[1])
3784 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3788 if (BYTES_BIG_ENDIAN)
3789 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3794 base_addr = adjust_address (operands[1], SImode,
3795 bitpos / BITS_PER_UNIT);
3796 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3800 rtx dest = operands[0];
3801 rtx tmp = gen_reg_rtx (SImode);
3803 /* We may get a paradoxical subreg here. Strip it off. */
3804 if (GET_CODE (dest) == SUBREG
3805 && GET_MODE (dest) == SImode
3806 && GET_MODE (SUBREG_REG (dest)) == HImode)
3807 dest = SUBREG_REG (dest);
3809 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3812 base_addr = adjust_address (operands[1], HImode,
3813 bitpos / BITS_PER_UNIT);
3814 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3815 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3819 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3821 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3829 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3832 operands[3] = GEN_INT (rshift);
3836 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3840 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3841 operands[3], gen_reg_rtx (SImode)));
3846 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3848 (define_expand "extzv_t1"
3849 [(set (match_operand:SI 4 "s_register_operand" "")
3850 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
3851 (match_operand:SI 2 "const_int_operand" "")))
3852 (set (match_operand:SI 0 "s_register_operand" "")
3853 (lshiftrt:SI (match_dup 4)
3854 (match_operand:SI 3 "const_int_operand" "")))]
3858 (define_expand "extv"
3859 [(set (match_operand 0 "s_register_operand" "")
3860 (sign_extract (match_operand 1 "nonimmediate_operand" "")
3861 (match_operand 2 "const_int_operand" "")
3862 (match_operand 3 "const_int_operand" "")))]
3865 HOST_WIDE_INT width = INTVAL (operands[2]);
3866 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3868 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3869 && (bitpos % BITS_PER_UNIT) == 0)
3873 if (BYTES_BIG_ENDIAN)
3874 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3878 base_addr = adjust_address (operands[1], SImode,
3879 bitpos / BITS_PER_UNIT);
3880 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3884 rtx dest = operands[0];
3885 rtx tmp = gen_reg_rtx (SImode);
3887 /* We may get a paradoxical subreg here. Strip it off. */
3888 if (GET_CODE (dest) == SUBREG
3889 && GET_MODE (dest) == SImode
3890 && GET_MODE (SUBREG_REG (dest)) == HImode)
3891 dest = SUBREG_REG (dest);
3893 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3896 base_addr = adjust_address (operands[1], HImode,
3897 bitpos / BITS_PER_UNIT);
3898 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3899 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3904 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3906 else if (GET_MODE (operands[0]) == SImode
3907 && GET_MODE (operands[1]) == SImode)
3909 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3917 ; Helper to expand register forms of extv with the proper modes.
3919 (define_expand "extv_regsi"
3920 [(set (match_operand:SI 0 "s_register_operand" "")
3921 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
3922 (match_operand 2 "const_int_operand" "")
3923 (match_operand 3 "const_int_operand" "")))]
3928 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3930 (define_insn "unaligned_loadsi"
3931 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3932 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
3933 UNSPEC_UNALIGNED_LOAD))]
3934 "unaligned_access && TARGET_32BIT"
3935 "ldr%?\t%0, %1\t@ unaligned"
3936 [(set_attr "arch" "t2,any")
3937 (set_attr "length" "2,4")
3938 (set_attr "predicable" "yes")
3939 (set_attr "type" "load1")])
3941 (define_insn "unaligned_loadhis"
3942 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3944 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3945 UNSPEC_UNALIGNED_LOAD)))]
3946 "unaligned_access && TARGET_32BIT"
3947 "ldr%(sh%)\t%0, %1\t@ unaligned"
3948 [(set_attr "arch" "t2,any")
3949 (set_attr "length" "2,4")
3950 (set_attr "predicable" "yes")
3951 (set_attr "type" "load_byte")])
3953 (define_insn "unaligned_loadhiu"
3954 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3956 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3957 UNSPEC_UNALIGNED_LOAD)))]
3958 "unaligned_access && TARGET_32BIT"
3959 "ldr%(h%)\t%0, %1\t@ unaligned"
3960 [(set_attr "arch" "t2,any")
3961 (set_attr "length" "2,4")
3962 (set_attr "predicable" "yes")
3963 (set_attr "type" "load_byte")])
3965 (define_insn "unaligned_storesi"
3966 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
3967 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
3968 UNSPEC_UNALIGNED_STORE))]
3969 "unaligned_access && TARGET_32BIT"
3970 "str%?\t%1, %0\t@ unaligned"
3971 [(set_attr "arch" "t2,any")
3972 (set_attr "length" "2,4")
3973 (set_attr "predicable" "yes")
3974 (set_attr "type" "store1")])
3976 (define_insn "unaligned_storehi"
3977 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
3978 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
3979 UNSPEC_UNALIGNED_STORE))]
3980 "unaligned_access && TARGET_32BIT"
3981 "str%(h%)\t%1, %0\t@ unaligned"
3982 [(set_attr "arch" "t2,any")
3983 (set_attr "length" "2,4")
3984 (set_attr "predicable" "yes")
3985 (set_attr "type" "store1")])
3987 (define_insn "*extv_reg"
3988 [(set (match_operand:SI 0 "s_register_operand" "=r")
3989 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3990 (match_operand:SI 2 "const_int_operand" "M")
3991 (match_operand:SI 3 "const_int_operand" "M")))]
3993 "sbfx%?\t%0, %1, %3, %2"
3994 [(set_attr "length" "4")
3995 (set_attr "predicable" "yes")]
3998 (define_insn "extzv_t2"
3999 [(set (match_operand:SI 0 "s_register_operand" "=r")
4000 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4001 (match_operand:SI 2 "const_int_operand" "M")
4002 (match_operand:SI 3 "const_int_operand" "M")))]
4004 "ubfx%?\t%0, %1, %3, %2"
4005 [(set_attr "length" "4")
4006 (set_attr "predicable" "yes")]
4010 ;; Division instructions
4011 (define_insn "divsi3"
4012 [(set (match_operand:SI 0 "s_register_operand" "=r")
4013 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4014 (match_operand:SI 2 "s_register_operand" "r")))]
4016 "sdiv%?\t%0, %1, %2"
4017 [(set_attr "predicable" "yes")
4018 (set_attr "insn" "sdiv")]
4021 (define_insn "udivsi3"
4022 [(set (match_operand:SI 0 "s_register_operand" "=r")
4023 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4024 (match_operand:SI 2 "s_register_operand" "r")))]
4026 "udiv%?\t%0, %1, %2"
4027 [(set_attr "predicable" "yes")
4028 (set_attr "insn" "udiv")]
4032 ;; Unary arithmetic insns
4034 (define_expand "negdi2"
4036 [(set (match_operand:DI 0 "s_register_operand" "")
4037 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4038 (clobber (reg:CC CC_REGNUM))])]
4043 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4044 ;; The first alternative allows the common case of a *full* overlap.
4045 (define_insn "*arm_negdi2"
4046 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4047 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4048 (clobber (reg:CC CC_REGNUM))]
4050 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4051 [(set_attr "conds" "clob")
4052 (set_attr "length" "8")]
4055 (define_insn "*thumb1_negdi2"
4056 [(set (match_operand:DI 0 "register_operand" "=&l")
4057 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4058 (clobber (reg:CC CC_REGNUM))]
4060 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4061 [(set_attr "length" "6")]
4064 (define_expand "negsi2"
4065 [(set (match_operand:SI 0 "s_register_operand" "")
4066 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4071 (define_insn "*arm_negsi2"
4072 [(set (match_operand:SI 0 "s_register_operand" "=r")
4073 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4075 "rsb%?\\t%0, %1, #0"
4076 [(set_attr "predicable" "yes")]
4079 (define_insn "*thumb1_negsi2"
4080 [(set (match_operand:SI 0 "register_operand" "=l")
4081 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4084 [(set_attr "length" "2")]
4087 (define_expand "negsf2"
4088 [(set (match_operand:SF 0 "s_register_operand" "")
4089 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4090 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4094 (define_expand "negdf2"
4095 [(set (match_operand:DF 0 "s_register_operand" "")
4096 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4097 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4100 ;; abssi2 doesn't really clobber the condition codes if a different register
4101 ;; is being set. To keep things simple, assume during rtl manipulations that
4102 ;; it does, but tell the final scan operator the truth. Similarly for
4105 (define_expand "abssi2"
4107 [(set (match_operand:SI 0 "s_register_operand" "")
4108 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4109 (clobber (match_dup 2))])]
4113 operands[2] = gen_rtx_SCRATCH (SImode);
4115 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4118 (define_insn "*arm_abssi2"
4119 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4120 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4121 (clobber (reg:CC CC_REGNUM))]
4124 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4125 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
4126 [(set_attr "conds" "clob,*")
4127 (set_attr "shift" "1")
4128 ;; predicable can't be set based on the variant, so left as no
4129 (set_attr "length" "8")]
4132 (define_insn_and_split "*thumb1_abssi2"
4133 [(set (match_operand:SI 0 "s_register_operand" "=l")
4134 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4135 (clobber (match_scratch:SI 2 "=&l"))]
4138 "TARGET_THUMB1 && reload_completed"
4139 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4140 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4141 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4143 [(set_attr "length" "6")]
4146 (define_insn "*arm_neg_abssi2"
4147 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4148 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4149 (clobber (reg:CC CC_REGNUM))]
4152 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4153 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
4154 [(set_attr "conds" "clob,*")
4155 (set_attr "shift" "1")
4156 ;; predicable can't be set based on the variant, so left as no
4157 (set_attr "length" "8")]
4160 (define_insn_and_split "*thumb1_neg_abssi2"
4161 [(set (match_operand:SI 0 "s_register_operand" "=l")
4162 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4163 (clobber (match_scratch:SI 2 "=&l"))]
4166 "TARGET_THUMB1 && reload_completed"
4167 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4168 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4169 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4171 [(set_attr "length" "6")]
4174 (define_expand "abssf2"
4175 [(set (match_operand:SF 0 "s_register_operand" "")
4176 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4177 "TARGET_32BIT && TARGET_HARD_FLOAT"
4180 (define_expand "absdf2"
4181 [(set (match_operand:DF 0 "s_register_operand" "")
4182 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4183 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4186 (define_expand "sqrtsf2"
4187 [(set (match_operand:SF 0 "s_register_operand" "")
4188 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4189 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4192 (define_expand "sqrtdf2"
4193 [(set (match_operand:DF 0 "s_register_operand" "")
4194 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4195 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4198 (define_insn_and_split "one_cmpldi2"
4199 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
4200 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
4203 "TARGET_32BIT && reload_completed"
4204 [(set (match_dup 0) (not:SI (match_dup 1)))
4205 (set (match_dup 2) (not:SI (match_dup 3)))]
4208 operands[2] = gen_highpart (SImode, operands[0]);
4209 operands[0] = gen_lowpart (SImode, operands[0]);
4210 operands[3] = gen_highpart (SImode, operands[1]);
4211 operands[1] = gen_lowpart (SImode, operands[1]);
4213 [(set_attr "length" "8")
4214 (set_attr "predicable" "yes")]
4217 (define_expand "one_cmplsi2"
4218 [(set (match_operand:SI 0 "s_register_operand" "")
4219 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4224 (define_insn "*arm_one_cmplsi2"
4225 [(set (match_operand:SI 0 "s_register_operand" "=r")
4226 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4229 [(set_attr "predicable" "yes")
4230 (set_attr "insn" "mvn")]
4233 (define_insn "*thumb1_one_cmplsi2"
4234 [(set (match_operand:SI 0 "register_operand" "=l")
4235 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4238 [(set_attr "length" "2")
4239 (set_attr "insn" "mvn")]
4242 (define_insn "*notsi_compare0"
4243 [(set (reg:CC_NOOV CC_REGNUM)
4244 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4246 (set (match_operand:SI 0 "s_register_operand" "=r")
4247 (not:SI (match_dup 1)))]
4250 [(set_attr "conds" "set")
4251 (set_attr "insn" "mvn")]
4254 (define_insn "*notsi_compare0_scratch"
4255 [(set (reg:CC_NOOV CC_REGNUM)
4256 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4258 (clobber (match_scratch:SI 0 "=r"))]
4261 [(set_attr "conds" "set")
4262 (set_attr "insn" "mvn")]
4265 ;; Fixed <--> Floating conversion insns
4267 (define_expand "floatsihf2"
4268 [(set (match_operand:HF 0 "general_operand" "")
4269 (float:HF (match_operand:SI 1 "general_operand" "")))]
4273 rtx op1 = gen_reg_rtx (SFmode);
4274 expand_float (op1, operands[1], 0);
4275 op1 = convert_to_mode (HFmode, op1, 0);
4276 emit_move_insn (operands[0], op1);
4281 (define_expand "floatdihf2"
4282 [(set (match_operand:HF 0 "general_operand" "")
4283 (float:HF (match_operand:DI 1 "general_operand" "")))]
4287 rtx op1 = gen_reg_rtx (SFmode);
4288 expand_float (op1, operands[1], 0);
4289 op1 = convert_to_mode (HFmode, op1, 0);
4290 emit_move_insn (operands[0], op1);
4295 (define_expand "floatsisf2"
4296 [(set (match_operand:SF 0 "s_register_operand" "")
4297 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4298 "TARGET_32BIT && TARGET_HARD_FLOAT"
4300 if (TARGET_MAVERICK)
4302 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
4307 (define_expand "floatsidf2"
4308 [(set (match_operand:DF 0 "s_register_operand" "")
4309 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4310 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4312 if (TARGET_MAVERICK)
4314 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
4319 (define_expand "fix_trunchfsi2"
4320 [(set (match_operand:SI 0 "general_operand" "")
4321 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4325 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4326 expand_fix (operands[0], op1, 0);
4331 (define_expand "fix_trunchfdi2"
4332 [(set (match_operand:DI 0 "general_operand" "")
4333 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4337 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4338 expand_fix (operands[0], op1, 0);
4343 (define_expand "fix_truncsfsi2"
4344 [(set (match_operand:SI 0 "s_register_operand" "")
4345 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4346 "TARGET_32BIT && TARGET_HARD_FLOAT"
4348 if (TARGET_MAVERICK)
4350 if (!cirrus_fp_register (operands[0], SImode))
4351 operands[0] = force_reg (SImode, operands[0]);
4352 if (!cirrus_fp_register (operands[1], SFmode))
4353 operands[1] = force_reg (SFmode, operands[0]);
4354 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4359 (define_expand "fix_truncdfsi2"
4360 [(set (match_operand:SI 0 "s_register_operand" "")
4361 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4362 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4364 if (TARGET_MAVERICK)
4366 if (!cirrus_fp_register (operands[1], DFmode))
4367 operands[1] = force_reg (DFmode, operands[0]);
4368 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4375 (define_expand "truncdfsf2"
4376 [(set (match_operand:SF 0 "s_register_operand" "")
4378 (match_operand:DF 1 "s_register_operand" "")))]
4379 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4383 /* DFmode -> HFmode conversions have to go through SFmode. */
4384 (define_expand "truncdfhf2"
4385 [(set (match_operand:HF 0 "general_operand" "")
4387 (match_operand:DF 1 "general_operand" "")))]
4392 op1 = convert_to_mode (SFmode, operands[1], 0);
4393 op1 = convert_to_mode (HFmode, op1, 0);
4394 emit_move_insn (operands[0], op1);
4399 ;; Zero and sign extension instructions.
4401 (define_insn "zero_extend<mode>di2"
4402 [(set (match_operand:DI 0 "s_register_operand" "=r")
4403 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
4404 "<qhs_zextenddi_cstr>")))]
4405 "TARGET_32BIT <qhs_zextenddi_cond>"
4407 [(set_attr "length" "8")
4408 (set_attr "ce_count" "2")
4409 (set_attr "predicable" "yes")]
4412 (define_insn "extend<mode>di2"
4413 [(set (match_operand:DI 0 "s_register_operand" "=r")
4414 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4415 "<qhs_extenddi_cstr>")))]
4416 "TARGET_32BIT <qhs_sextenddi_cond>"
4418 [(set_attr "length" "8")
4419 (set_attr "ce_count" "2")
4420 (set_attr "shift" "1")
4421 (set_attr "predicable" "yes")]
4424 ;; Splits for all extensions to DImode
4426 [(set (match_operand:DI 0 "s_register_operand" "")
4427 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4429 [(set (match_dup 0) (match_dup 1))]
4431 rtx lo_part = gen_lowpart (SImode, operands[0]);
4432 enum machine_mode src_mode = GET_MODE (operands[1]);
4434 if (REG_P (operands[0])
4435 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4436 emit_clobber (operands[0]);
4437 if (!REG_P (lo_part) || src_mode != SImode
4438 || !rtx_equal_p (lo_part, operands[1]))
4440 if (src_mode == SImode)
4441 emit_move_insn (lo_part, operands[1]);
4443 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4444 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4445 operands[1] = lo_part;
4447 operands[0] = gen_highpart (SImode, operands[0]);
4448 operands[1] = const0_rtx;
4452 [(set (match_operand:DI 0 "s_register_operand" "")
4453 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4455 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4457 rtx lo_part = gen_lowpart (SImode, operands[0]);
4458 enum machine_mode src_mode = GET_MODE (operands[1]);
4460 if (REG_P (operands[0])
4461 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4462 emit_clobber (operands[0]);
4464 if (!REG_P (lo_part) || src_mode != SImode
4465 || !rtx_equal_p (lo_part, operands[1]))
4467 if (src_mode == SImode)
4468 emit_move_insn (lo_part, operands[1]);
4470 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4471 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4472 operands[1] = lo_part;
4474 operands[0] = gen_highpart (SImode, operands[0]);
4477 (define_expand "zero_extendhisi2"
4478 [(set (match_operand:SI 0 "s_register_operand" "")
4479 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4482 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4484 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4487 if (!arm_arch6 && !MEM_P (operands[1]))
4489 rtx t = gen_lowpart (SImode, operands[1]);
4490 rtx tmp = gen_reg_rtx (SImode);
4491 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4492 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4498 [(set (match_operand:SI 0 "s_register_operand" "")
4499 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4500 "!TARGET_THUMB2 && !arm_arch6"
4501 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4502 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4504 operands[2] = gen_lowpart (SImode, operands[1]);
4507 (define_insn "*thumb1_zero_extendhisi2"
4508 [(set (match_operand:SI 0 "register_operand" "=l,l")
4509 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4514 if (which_alternative == 0 && arm_arch6)
4515 return "uxth\t%0, %1";
4516 if (which_alternative == 0)
4519 mem = XEXP (operands[1], 0);
4521 if (GET_CODE (mem) == CONST)
4522 mem = XEXP (mem, 0);
4524 if (GET_CODE (mem) == PLUS)
4526 rtx a = XEXP (mem, 0);
4528 /* This can happen due to bugs in reload. */
4529 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4532 ops[0] = operands[0];
4535 output_asm_insn ("mov\t%0, %1", ops);
4537 XEXP (mem, 0) = operands[0];
4541 return "ldrh\t%0, %1";
4543 [(set_attr_alternative "length"
4544 [(if_then_else (eq_attr "is_arch6" "yes")
4545 (const_int 2) (const_int 4))
4547 (set_attr "type" "alu_shift,load_byte")]
4550 (define_insn "*arm_zero_extendhisi2"
4551 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4552 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4553 "TARGET_ARM && arm_arch4 && !arm_arch6"
4557 [(set_attr "type" "alu_shift,load_byte")
4558 (set_attr "predicable" "yes")]
4561 (define_insn "*arm_zero_extendhisi2_v6"
4562 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4563 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4564 "TARGET_ARM && arm_arch6"
4568 [(set_attr "type" "alu_shift,load_byte")
4569 (set_attr "predicable" "yes")]
4572 (define_insn "*arm_zero_extendhisi2addsi"
4573 [(set (match_operand:SI 0 "s_register_operand" "=r")
4574 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4575 (match_operand:SI 2 "s_register_operand" "r")))]
4577 "uxtah%?\\t%0, %2, %1"
4578 [(set_attr "type" "alu_shift")
4579 (set_attr "predicable" "yes")]
4582 (define_expand "zero_extendqisi2"
4583 [(set (match_operand:SI 0 "s_register_operand" "")
4584 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4587 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4589 emit_insn (gen_andsi3 (operands[0],
4590 gen_lowpart (SImode, operands[1]),
4594 if (!arm_arch6 && !MEM_P (operands[1]))
4596 rtx t = gen_lowpart (SImode, operands[1]);
4597 rtx tmp = gen_reg_rtx (SImode);
4598 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4599 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4605 [(set (match_operand:SI 0 "s_register_operand" "")
4606 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4608 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4609 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4611 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4614 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4619 (define_insn "*thumb1_zero_extendqisi2"
4620 [(set (match_operand:SI 0 "register_operand" "=l,l")
4621 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4622 "TARGET_THUMB1 && !arm_arch6"
4626 [(set_attr "length" "4,2")
4627 (set_attr "type" "alu_shift,load_byte")
4628 (set_attr "pool_range" "*,32")]
4631 (define_insn "*thumb1_zero_extendqisi2_v6"
4632 [(set (match_operand:SI 0 "register_operand" "=l,l")
4633 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4634 "TARGET_THUMB1 && arm_arch6"
4638 [(set_attr "length" "2")
4639 (set_attr "type" "alu_shift,load_byte")]
4642 (define_insn "*arm_zero_extendqisi2"
4643 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4644 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4645 "TARGET_ARM && !arm_arch6"
4648 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4649 [(set_attr "length" "8,4")
4650 (set_attr "type" "alu_shift,load_byte")
4651 (set_attr "predicable" "yes")]
4654 (define_insn "*arm_zero_extendqisi2_v6"
4655 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4656 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4657 "TARGET_ARM && arm_arch6"
4660 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4661 [(set_attr "type" "alu_shift,load_byte")
4662 (set_attr "predicable" "yes")]
4665 (define_insn "*arm_zero_extendqisi2addsi"
4666 [(set (match_operand:SI 0 "s_register_operand" "=r")
4667 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4668 (match_operand:SI 2 "s_register_operand" "r")))]
4670 "uxtab%?\\t%0, %2, %1"
4671 [(set_attr "predicable" "yes")
4672 (set_attr "insn" "xtab")
4673 (set_attr "type" "alu_shift")]
4677 [(set (match_operand:SI 0 "s_register_operand" "")
4678 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4679 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4680 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4681 [(set (match_dup 2) (match_dup 1))
4682 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4687 [(set (match_operand:SI 0 "s_register_operand" "")
4688 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4689 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4690 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4691 [(set (match_dup 2) (match_dup 1))
4692 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4698 [(set (match_operand:SI 0 "s_register_operand" "")
4699 (ior_xor:SI (and:SI (ashift:SI
4700 (match_operand:SI 1 "s_register_operand" "")
4701 (match_operand:SI 2 "const_int_operand" ""))
4702 (match_operand:SI 3 "const_int_operand" ""))
4704 (match_operator 5 "subreg_lowpart_operator"
4705 [(match_operand:SI 4 "s_register_operand" "")]))))]
4707 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4708 == (GET_MODE_MASK (GET_MODE (operands[5]))
4709 & (GET_MODE_MASK (GET_MODE (operands[5]))
4710 << (INTVAL (operands[2])))))"
4711 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4713 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4714 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4717 (define_insn "*compareqi_eq0"
4718 [(set (reg:CC_Z CC_REGNUM)
4719 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4723 [(set_attr "conds" "set")
4724 (set_attr "predicable" "yes")]
4727 (define_expand "extendhisi2"
4728 [(set (match_operand:SI 0 "s_register_operand" "")
4729 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4734 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4737 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4739 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4743 if (!arm_arch6 && !MEM_P (operands[1]))
4745 rtx t = gen_lowpart (SImode, operands[1]);
4746 rtx tmp = gen_reg_rtx (SImode);
4747 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4748 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4755 [(set (match_operand:SI 0 "register_operand" "")
4756 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4757 (clobber (match_scratch:SI 2 ""))])]
4759 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4760 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4762 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4765 ;; We used to have an early-clobber on the scratch register here.
4766 ;; However, there's a bug somewhere in reload which means that this
4767 ;; can be partially ignored during spill allocation if the memory
4768 ;; address also needs reloading; this causes us to die later on when
4769 ;; we try to verify the operands. Fortunately, we don't really need
4770 ;; the early-clobber: we can always use operand 0 if operand 2
4771 ;; overlaps the address.
4772 (define_insn "thumb1_extendhisi2"
4773 [(set (match_operand:SI 0 "register_operand" "=l,l")
4774 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4775 (clobber (match_scratch:SI 2 "=X,l"))]
4782 if (which_alternative == 0 && !arm_arch6)
4784 if (which_alternative == 0)
4785 return \"sxth\\t%0, %1\";
4787 mem = XEXP (operands[1], 0);
4789 /* This code used to try to use 'V', and fix the address only if it was
4790 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4791 range of QImode offsets, and offsettable_address_p does a QImode
4794 if (GET_CODE (mem) == CONST)
4795 mem = XEXP (mem, 0);
4797 if (GET_CODE (mem) == LABEL_REF)
4798 return \"ldr\\t%0, %1\";
4800 if (GET_CODE (mem) == PLUS)
4802 rtx a = XEXP (mem, 0);
4803 rtx b = XEXP (mem, 1);
4805 if (GET_CODE (a) == LABEL_REF
4806 && GET_CODE (b) == CONST_INT)
4807 return \"ldr\\t%0, %1\";
4809 if (GET_CODE (b) == REG)
4810 return \"ldrsh\\t%0, %1\";
4818 ops[2] = const0_rtx;
4821 gcc_assert (GET_CODE (ops[1]) == REG);
4823 ops[0] = operands[0];
4824 if (reg_mentioned_p (operands[2], ops[1]))
4827 ops[3] = operands[2];
4828 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4831 [(set_attr_alternative "length"
4832 [(if_then_else (eq_attr "is_arch6" "yes")
4833 (const_int 2) (const_int 4))
4835 (set_attr "type" "alu_shift,load_byte")
4836 (set_attr "pool_range" "*,1020")]
4839 ;; This pattern will only be used when ldsh is not available
4840 (define_expand "extendhisi2_mem"
4841 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4843 (zero_extend:SI (match_dup 7)))
4844 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4845 (set (match_operand:SI 0 "" "")
4846 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4851 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4853 mem1 = change_address (operands[1], QImode, addr);
4854 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4855 operands[0] = gen_lowpart (SImode, operands[0]);
4857 operands[2] = gen_reg_rtx (SImode);
4858 operands[3] = gen_reg_rtx (SImode);
4859 operands[6] = gen_reg_rtx (SImode);
4862 if (BYTES_BIG_ENDIAN)
4864 operands[4] = operands[2];
4865 operands[5] = operands[3];
4869 operands[4] = operands[3];
4870 operands[5] = operands[2];
4876 [(set (match_operand:SI 0 "register_operand" "")
4877 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4879 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4880 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4882 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4885 (define_insn "*arm_extendhisi2"
4886 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4887 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4888 "TARGET_ARM && arm_arch4 && !arm_arch6"
4892 [(set_attr "length" "8,4")
4893 (set_attr "type" "alu_shift,load_byte")
4894 (set_attr "predicable" "yes")
4895 (set_attr "pool_range" "*,256")
4896 (set_attr "neg_pool_range" "*,244")]
4899 ;; ??? Check Thumb-2 pool range
4900 (define_insn "*arm_extendhisi2_v6"
4901 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4902 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4903 "TARGET_32BIT && arm_arch6"
4907 [(set_attr "type" "alu_shift,load_byte")
4908 (set_attr "predicable" "yes")
4909 (set_attr "pool_range" "*,256")
4910 (set_attr "neg_pool_range" "*,244")]
4913 (define_insn "*arm_extendhisi2addsi"
4914 [(set (match_operand:SI 0 "s_register_operand" "=r")
4915 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4916 (match_operand:SI 2 "s_register_operand" "r")))]
4918 "sxtah%?\\t%0, %2, %1"
4921 (define_expand "extendqihi2"
4923 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4925 (set (match_operand:HI 0 "s_register_operand" "")
4926 (ashiftrt:SI (match_dup 2)
4931 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4933 emit_insn (gen_rtx_SET (VOIDmode,
4935 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4938 if (!s_register_operand (operands[1], QImode))
4939 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4940 operands[0] = gen_lowpart (SImode, operands[0]);
4941 operands[1] = gen_lowpart (SImode, operands[1]);
4942 operands[2] = gen_reg_rtx (SImode);
4946 (define_insn "*arm_extendqihi_insn"
4947 [(set (match_operand:HI 0 "s_register_operand" "=r")
4948 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4949 "TARGET_ARM && arm_arch4"
4950 "ldr%(sb%)\\t%0, %1"
4951 [(set_attr "type" "load_byte")
4952 (set_attr "predicable" "yes")
4953 (set_attr "pool_range" "256")
4954 (set_attr "neg_pool_range" "244")]
4957 (define_expand "extendqisi2"
4958 [(set (match_operand:SI 0 "s_register_operand" "")
4959 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4962 if (!arm_arch4 && MEM_P (operands[1]))
4963 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4965 if (!arm_arch6 && !MEM_P (operands[1]))
4967 rtx t = gen_lowpart (SImode, operands[1]);
4968 rtx tmp = gen_reg_rtx (SImode);
4969 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4970 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4976 [(set (match_operand:SI 0 "register_operand" "")
4977 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4979 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4980 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4982 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4985 (define_insn "*arm_extendqisi"
4986 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4987 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4988 "TARGET_ARM && arm_arch4 && !arm_arch6"
4992 [(set_attr "length" "8,4")
4993 (set_attr "type" "alu_shift,load_byte")
4994 (set_attr "predicable" "yes")
4995 (set_attr "pool_range" "*,256")
4996 (set_attr "neg_pool_range" "*,244")]
4999 (define_insn "*arm_extendqisi_v6"
5000 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5002 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5003 "TARGET_ARM && arm_arch6"
5007 [(set_attr "type" "alu_shift,load_byte")
5008 (set_attr "predicable" "yes")
5009 (set_attr "pool_range" "*,256")
5010 (set_attr "neg_pool_range" "*,244")]
5013 (define_insn "*arm_extendqisi2addsi"
5014 [(set (match_operand:SI 0 "s_register_operand" "=r")
5015 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5016 (match_operand:SI 2 "s_register_operand" "r")))]
5018 "sxtab%?\\t%0, %2, %1"
5019 [(set_attr "type" "alu_shift")
5020 (set_attr "insn" "xtab")
5021 (set_attr "predicable" "yes")]
5025 [(set (match_operand:SI 0 "register_operand" "")
5026 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5027 "TARGET_THUMB1 && reload_completed"
5028 [(set (match_dup 0) (match_dup 2))
5029 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5031 rtx addr = XEXP (operands[1], 0);
5033 if (GET_CODE (addr) == CONST)
5034 addr = XEXP (addr, 0);
5036 if (GET_CODE (addr) == PLUS
5037 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5038 /* No split necessary. */
5041 if (GET_CODE (addr) == PLUS
5042 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5045 if (reg_overlap_mentioned_p (operands[0], addr))
5047 rtx t = gen_lowpart (QImode, operands[0]);
5048 emit_move_insn (t, operands[1]);
5049 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5055 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5056 operands[2] = const0_rtx;
5058 else if (GET_CODE (addr) != PLUS)
5060 else if (REG_P (XEXP (addr, 0)))
5062 operands[2] = XEXP (addr, 1);
5063 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5067 operands[2] = XEXP (addr, 0);
5068 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5071 operands[3] = change_address (operands[1], QImode, addr);
5075 [(set (match_operand:SI 0 "register_operand" "")
5076 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5077 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5078 (set (match_operand:SI 3 "register_operand" "")
5079 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5081 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5082 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5083 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5084 && (peep2_reg_dead_p (3, operands[0])
5085 || rtx_equal_p (operands[0], operands[3]))
5086 && (peep2_reg_dead_p (3, operands[2])
5087 || rtx_equal_p (operands[2], operands[3]))"
5088 [(set (match_dup 2) (match_dup 1))
5089 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5091 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5092 operands[4] = change_address (operands[4], QImode, addr);
5095 (define_insn "thumb1_extendqisi2"
5096 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5097 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5102 if (which_alternative == 0 && arm_arch6)
5103 return "sxtb\\t%0, %1";
5104 if (which_alternative == 0)
5107 addr = XEXP (operands[1], 0);
5108 if (GET_CODE (addr) == PLUS
5109 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5110 return "ldrsb\\t%0, %1";
5114 [(set_attr_alternative "length"
5115 [(if_then_else (eq_attr "is_arch6" "yes")
5116 (const_int 2) (const_int 4))
5118 (if_then_else (eq_attr "is_arch6" "yes")
5119 (const_int 4) (const_int 6))])
5120 (set_attr "type" "alu_shift,load_byte,load_byte")]
5123 (define_expand "extendsfdf2"
5124 [(set (match_operand:DF 0 "s_register_operand" "")
5125 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5126 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5130 /* HFmode -> DFmode conversions have to go through SFmode. */
5131 (define_expand "extendhfdf2"
5132 [(set (match_operand:DF 0 "general_operand" "")
5133 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5138 op1 = convert_to_mode (SFmode, operands[1], 0);
5139 op1 = convert_to_mode (DFmode, op1, 0);
5140 emit_insn (gen_movdf (operands[0], op1));
5145 ;; Move insns (including loads and stores)
5147 ;; XXX Just some ideas about movti.
5148 ;; I don't think these are a good idea on the arm, there just aren't enough
5150 ;;(define_expand "loadti"
5151 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5152 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5155 ;;(define_expand "storeti"
5156 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5157 ;; (match_operand:TI 1 "s_register_operand" ""))]
5160 ;;(define_expand "movti"
5161 ;; [(set (match_operand:TI 0 "general_operand" "")
5162 ;; (match_operand:TI 1 "general_operand" ""))]
5168 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
5169 ;; operands[1] = copy_to_reg (operands[1]);
5170 ;; if (GET_CODE (operands[0]) == MEM)
5171 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5172 ;; else if (GET_CODE (operands[1]) == MEM)
5173 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5177 ;; emit_insn (insn);
5181 ;; Recognize garbage generated above.
5184 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5185 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5189 ;; register mem = (which_alternative < 3);
5190 ;; register const char *template;
5192 ;; operands[mem] = XEXP (operands[mem], 0);
5193 ;; switch (which_alternative)
5195 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5196 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5197 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5198 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5199 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5200 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5202 ;; output_asm_insn (template, operands);
5206 (define_expand "movdi"
5207 [(set (match_operand:DI 0 "general_operand" "")
5208 (match_operand:DI 1 "general_operand" ""))]
5211 if (can_create_pseudo_p ())
5213 if (GET_CODE (operands[0]) != REG)
5214 operands[1] = force_reg (DImode, operands[1]);
5219 (define_insn "*arm_movdi"
5220 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5221 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5223 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
5225 && ( register_operand (operands[0], DImode)
5226 || register_operand (operands[1], DImode))"
5228 switch (which_alternative)
5235 return output_move_double (operands, true, NULL);
5238 [(set_attr "length" "8,12,16,8,8")
5239 (set_attr "type" "*,*,*,load2,store2")
5240 (set_attr "arm_pool_range" "*,*,*,1020,*")
5241 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5242 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
5243 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5247 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5248 (match_operand:ANY64 1 "const_double_operand" ""))]
5251 && (arm_const_double_inline_cost (operands[1])
5252 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5255 arm_split_constant (SET, SImode, curr_insn,
5256 INTVAL (gen_lowpart (SImode, operands[1])),
5257 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5258 arm_split_constant (SET, SImode, curr_insn,
5259 INTVAL (gen_highpart_mode (SImode,
5260 GET_MODE (operands[0]),
5262 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5267 ; If optimizing for size, or if we have load delay slots, then
5268 ; we want to split the constant into two separate operations.
5269 ; In both cases this may split a trivial part into a single data op
5270 ; leaving a single complex constant to load. We can also get longer
5271 ; offsets in a LDR which means we get better chances of sharing the pool
5272 ; entries. Finally, we can normally do a better job of scheduling
5273 ; LDR instructions than we can with LDM.
5274 ; This pattern will only match if the one above did not.
5276 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5277 (match_operand:ANY64 1 "const_double_operand" ""))]
5278 "TARGET_ARM && reload_completed
5279 && arm_const_double_by_parts (operands[1])"
5280 [(set (match_dup 0) (match_dup 1))
5281 (set (match_dup 2) (match_dup 3))]
5283 operands[2] = gen_highpart (SImode, operands[0]);
5284 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5286 operands[0] = gen_lowpart (SImode, operands[0]);
5287 operands[1] = gen_lowpart (SImode, operands[1]);
5292 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5293 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5294 "TARGET_EITHER && reload_completed"
5295 [(set (match_dup 0) (match_dup 1))
5296 (set (match_dup 2) (match_dup 3))]
5298 operands[2] = gen_highpart (SImode, operands[0]);
5299 operands[3] = gen_highpart (SImode, operands[1]);
5300 operands[0] = gen_lowpart (SImode, operands[0]);
5301 operands[1] = gen_lowpart (SImode, operands[1]);
5303 /* Handle a partial overlap. */
5304 if (rtx_equal_p (operands[0], operands[3]))
5306 rtx tmp0 = operands[0];
5307 rtx tmp1 = operands[1];
5309 operands[0] = operands[2];
5310 operands[1] = operands[3];
5317 ;; We can't actually do base+index doubleword loads if the index and
5318 ;; destination overlap. Split here so that we at least have chance to
5321 [(set (match_operand:DI 0 "s_register_operand" "")
5322 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5323 (match_operand:SI 2 "s_register_operand" ""))))]
5325 && reg_overlap_mentioned_p (operands[0], operands[1])
5326 && reg_overlap_mentioned_p (operands[0], operands[2])"
5328 (plus:SI (match_dup 1)
5331 (mem:DI (match_dup 4)))]
5333 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5337 ;;; ??? This should have alternatives for constants.
5338 ;;; ??? This was originally identical to the movdf_insn pattern.
5339 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5340 ;;; thumb_reorg with a memory reference.
5341 (define_insn "*thumb1_movdi_insn"
5342 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5343 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5345 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5346 && ( register_operand (operands[0], DImode)
5347 || register_operand (operands[1], DImode))"
5350 switch (which_alternative)
5354 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5355 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5356 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5358 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5360 operands[1] = GEN_INT (- INTVAL (operands[1]));
5361 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5363 return \"ldmia\\t%1, {%0, %H0}\";
5365 return \"stmia\\t%0, {%1, %H1}\";
5367 return thumb_load_double_from_address (operands);
5369 operands[2] = gen_rtx_MEM (SImode,
5370 plus_constant (XEXP (operands[0], 0), 4));
5371 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5374 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5375 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5376 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5379 [(set_attr "length" "4,4,6,2,2,6,4,4")
5380 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5381 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5382 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5385 (define_expand "movsi"
5386 [(set (match_operand:SI 0 "general_operand" "")
5387 (match_operand:SI 1 "general_operand" ""))]
5391 rtx base, offset, tmp;
5395 /* Everything except mem = const or mem = mem can be done easily. */
5396 if (GET_CODE (operands[0]) == MEM)
5397 operands[1] = force_reg (SImode, operands[1]);
5398 if (arm_general_register_operand (operands[0], SImode)
5399 && GET_CODE (operands[1]) == CONST_INT
5400 && !(const_ok_for_arm (INTVAL (operands[1]))
5401 || const_ok_for_arm (~INTVAL (operands[1]))))
5403 arm_split_constant (SET, SImode, NULL_RTX,
5404 INTVAL (operands[1]), operands[0], NULL_RTX,
5405 optimize && can_create_pseudo_p ());
5409 if (TARGET_USE_MOVT && !target_word_relocations
5410 && GET_CODE (operands[1]) == SYMBOL_REF
5411 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5413 arm_emit_movpair (operands[0], operands[1]);
5417 else /* TARGET_THUMB1... */
5419 if (can_create_pseudo_p ())
5421 if (GET_CODE (operands[0]) != REG)
5422 operands[1] = force_reg (SImode, operands[1]);
5426 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5428 split_const (operands[1], &base, &offset);
5429 if (GET_CODE (base) == SYMBOL_REF
5430 && !offset_within_block_p (base, INTVAL (offset)))
5432 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5433 emit_move_insn (tmp, base);
5434 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5439 /* Recognize the case where operand[1] is a reference to thread-local
5440 data and load its address to a register. */
5441 if (arm_tls_referenced_p (operands[1]))
5443 rtx tmp = operands[1];
5446 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5448 addend = XEXP (XEXP (tmp, 0), 1);
5449 tmp = XEXP (XEXP (tmp, 0), 0);
5452 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5453 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5455 tmp = legitimize_tls_address (tmp,
5456 !can_create_pseudo_p () ? operands[0] : 0);
5459 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5460 tmp = force_operand (tmp, operands[0]);
5465 && (CONSTANT_P (operands[1])
5466 || symbol_mentioned_p (operands[1])
5467 || label_mentioned_p (operands[1])))
5468 operands[1] = legitimize_pic_address (operands[1], SImode,
5469 (!can_create_pseudo_p ()
5476 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5477 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5478 ;; so this does not matter.
5479 (define_insn "*arm_movt"
5480 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5481 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5482 (match_operand:SI 2 "general_operand" "i")))]
5484 "movt%?\t%0, #:upper16:%c2"
5485 [(set_attr "predicable" "yes")
5486 (set_attr "length" "4")]
5489 (define_insn "*arm_movsi_insn"
5490 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5491 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5492 "TARGET_ARM && ! TARGET_IWMMXT
5493 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5494 && ( register_operand (operands[0], SImode)
5495 || register_operand (operands[1], SImode))"
5503 [(set_attr "type" "*,*,*,*,load1,store1")
5504 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5505 (set_attr "predicable" "yes")
5506 (set_attr "pool_range" "*,*,*,*,4096,*")
5507 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5511 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5512 (match_operand:SI 1 "const_int_operand" ""))]
5514 && (!(const_ok_for_arm (INTVAL (operands[1]))
5515 || const_ok_for_arm (~INTVAL (operands[1]))))"
5516 [(clobber (const_int 0))]
5518 arm_split_constant (SET, SImode, NULL_RTX,
5519 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5524 (define_insn "*thumb1_movsi_insn"
5525 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5526 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5528 && ( register_operand (operands[0], SImode)
5529 || register_operand (operands[1], SImode))"
5540 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5541 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5542 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5543 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5546 [(set (match_operand:SI 0 "register_operand" "")
5547 (match_operand:SI 1 "const_int_operand" ""))]
5548 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5549 [(set (match_dup 2) (match_dup 1))
5550 (set (match_dup 0) (neg:SI (match_dup 2)))]
5553 operands[1] = GEN_INT (- INTVAL (operands[1]));
5554 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5559 [(set (match_operand:SI 0 "register_operand" "")
5560 (match_operand:SI 1 "const_int_operand" ""))]
5561 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5562 [(set (match_dup 2) (match_dup 1))
5563 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5566 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5567 unsigned HOST_WIDE_INT mask = 0xff;
5570 for (i = 0; i < 25; i++)
5571 if ((val & (mask << i)) == val)
5574 /* Don't split if the shift is zero. */
5578 operands[1] = GEN_INT (val >> i);
5579 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5580 operands[3] = GEN_INT (i);
5584 ;; When generating pic, we need to load the symbol offset into a register.
5585 ;; So that the optimizer does not confuse this with a normal symbol load
5586 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5587 ;; since that is the only type of relocation we can use.
5589 ;; Wrap calculation of the whole PIC address in a single pattern for the
5590 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5591 ;; a PIC address involves two loads from memory, so we want to CSE it
5592 ;; as often as possible.
5593 ;; This pattern will be split into one of the pic_load_addr_* patterns
5594 ;; and a move after GCSE optimizations.
5596 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5597 (define_expand "calculate_pic_address"
5598 [(set (match_operand:SI 0 "register_operand" "")
5599 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5600 (unspec:SI [(match_operand:SI 2 "" "")]
5605 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5607 [(set (match_operand:SI 0 "register_operand" "")
5608 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5609 (unspec:SI [(match_operand:SI 2 "" "")]
5612 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5613 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5614 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5617 ;; operand1 is the memory address to go into
5618 ;; pic_load_addr_32bit.
5619 ;; operand2 is the PIC label to be emitted
5620 ;; from pic_add_dot_plus_eight.
5621 ;; We do this to allow hoisting of the entire insn.
5622 (define_insn_and_split "pic_load_addr_unified"
5623 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5624 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5625 (match_operand:SI 2 "" "")]
5626 UNSPEC_PIC_UNIFIED))]
5629 "&& reload_completed"
5630 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5631 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5632 (match_dup 2)] UNSPEC_PIC_BASE))]
5633 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5634 [(set_attr "type" "load1,load1,load1")
5635 (set_attr "pool_range" "4096,4096,1024")
5636 (set_attr "neg_pool_range" "4084,0,0")
5637 (set_attr "arch" "a,t2,t1")
5638 (set_attr "length" "8,6,4")]
5641 ;; The rather odd constraints on the following are to force reload to leave
5642 ;; the insn alone, and to force the minipool generation pass to then move
5643 ;; the GOT symbol to memory.
5645 (define_insn "pic_load_addr_32bit"
5646 [(set (match_operand:SI 0 "s_register_operand" "=r")
5647 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5648 "TARGET_32BIT && flag_pic"
5650 [(set_attr "type" "load1")
5651 (set_attr "pool_range" "4096")
5652 (set (attr "neg_pool_range")
5653 (if_then_else (eq_attr "is_thumb" "no")
5658 (define_insn "pic_load_addr_thumb1"
5659 [(set (match_operand:SI 0 "s_register_operand" "=l")
5660 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5661 "TARGET_THUMB1 && flag_pic"
5663 [(set_attr "type" "load1")
5664 (set (attr "pool_range") (const_int 1024))]
5667 (define_insn "pic_add_dot_plus_four"
5668 [(set (match_operand:SI 0 "register_operand" "=r")
5669 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5671 (match_operand 2 "" "")]
5675 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5676 INTVAL (operands[2]));
5677 return \"add\\t%0, %|pc\";
5679 [(set_attr "length" "2")]
5682 (define_insn "pic_add_dot_plus_eight"
5683 [(set (match_operand:SI 0 "register_operand" "=r")
5684 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5686 (match_operand 2 "" "")]
5690 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5691 INTVAL (operands[2]));
5692 return \"add%?\\t%0, %|pc, %1\";
5694 [(set_attr "predicable" "yes")]
5697 (define_insn "tls_load_dot_plus_eight"
5698 [(set (match_operand:SI 0 "register_operand" "=r")
5699 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5701 (match_operand 2 "" "")]
5705 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5706 INTVAL (operands[2]));
5707 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5709 [(set_attr "predicable" "yes")]
5712 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5713 ;; followed by a load. These sequences can be crunched down to
5714 ;; tls_load_dot_plus_eight by a peephole.
5717 [(set (match_operand:SI 0 "register_operand" "")
5718 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5720 (match_operand 1 "" "")]
5722 (set (match_operand:SI 2 "arm_general_register_operand" "")
5723 (mem:SI (match_dup 0)))]
5724 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5726 (mem:SI (unspec:SI [(match_dup 3)
5733 (define_insn "pic_offset_arm"
5734 [(set (match_operand:SI 0 "register_operand" "=r")
5735 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5736 (unspec:SI [(match_operand:SI 2 "" "X")]
5737 UNSPEC_PIC_OFFSET))))]
5738 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5739 "ldr%?\\t%0, [%1,%2]"
5740 [(set_attr "type" "load1")]
5743 (define_expand "builtin_setjmp_receiver"
5744 [(label_ref (match_operand 0 "" ""))]
5748 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5750 if (arm_pic_register != INVALID_REGNUM)
5751 arm_load_pic_register (1UL << 3);
5755 ;; If copying one reg to another we can set the condition codes according to
5756 ;; its value. Such a move is common after a return from subroutine and the
5757 ;; result is being tested against zero.
5759 (define_insn "*movsi_compare0"
5760 [(set (reg:CC CC_REGNUM)
5761 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5763 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5769 [(set_attr "conds" "set")]
5772 ;; Subroutine to store a half word from a register into memory.
5773 ;; Operand 0 is the source register (HImode)
5774 ;; Operand 1 is the destination address in a register (SImode)
5776 ;; In both this routine and the next, we must be careful not to spill
5777 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5778 ;; can generate unrecognizable rtl.
5780 (define_expand "storehi"
5781 [;; store the low byte
5782 (set (match_operand 1 "" "") (match_dup 3))
5783 ;; extract the high byte
5785 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5786 ;; store the high byte
5787 (set (match_dup 4) (match_dup 5))]
5791 rtx op1 = operands[1];
5792 rtx addr = XEXP (op1, 0);
5793 enum rtx_code code = GET_CODE (addr);
5795 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5797 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5799 operands[4] = adjust_address (op1, QImode, 1);
5800 operands[1] = adjust_address (operands[1], QImode, 0);
5801 operands[3] = gen_lowpart (QImode, operands[0]);
5802 operands[0] = gen_lowpart (SImode, operands[0]);
5803 operands[2] = gen_reg_rtx (SImode);
5804 operands[5] = gen_lowpart (QImode, operands[2]);
5808 (define_expand "storehi_bigend"
5809 [(set (match_dup 4) (match_dup 3))
5811 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5812 (set (match_operand 1 "" "") (match_dup 5))]
5816 rtx op1 = operands[1];
5817 rtx addr = XEXP (op1, 0);
5818 enum rtx_code code = GET_CODE (addr);
5820 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5822 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5824 operands[4] = adjust_address (op1, QImode, 1);
5825 operands[1] = adjust_address (operands[1], QImode, 0);
5826 operands[3] = gen_lowpart (QImode, operands[0]);
5827 operands[0] = gen_lowpart (SImode, operands[0]);
5828 operands[2] = gen_reg_rtx (SImode);
5829 operands[5] = gen_lowpart (QImode, operands[2]);
5833 ;; Subroutine to store a half word integer constant into memory.
5834 (define_expand "storeinthi"
5835 [(set (match_operand 0 "" "")
5836 (match_operand 1 "" ""))
5837 (set (match_dup 3) (match_dup 2))]
5841 HOST_WIDE_INT value = INTVAL (operands[1]);
5842 rtx addr = XEXP (operands[0], 0);
5843 rtx op0 = operands[0];
5844 enum rtx_code code = GET_CODE (addr);
5846 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5848 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5850 operands[1] = gen_reg_rtx (SImode);
5851 if (BYTES_BIG_ENDIAN)
5853 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5854 if ((value & 255) == ((value >> 8) & 255))
5855 operands[2] = operands[1];
5858 operands[2] = gen_reg_rtx (SImode);
5859 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5864 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5865 if ((value & 255) == ((value >> 8) & 255))
5866 operands[2] = operands[1];
5869 operands[2] = gen_reg_rtx (SImode);
5870 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5874 operands[3] = adjust_address (op0, QImode, 1);
5875 operands[0] = adjust_address (operands[0], QImode, 0);
5876 operands[2] = gen_lowpart (QImode, operands[2]);
5877 operands[1] = gen_lowpart (QImode, operands[1]);
5881 (define_expand "storehi_single_op"
5882 [(set (match_operand:HI 0 "memory_operand" "")
5883 (match_operand:HI 1 "general_operand" ""))]
5884 "TARGET_32BIT && arm_arch4"
5886 if (!s_register_operand (operands[1], HImode))
5887 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5891 (define_expand "movhi"
5892 [(set (match_operand:HI 0 "general_operand" "")
5893 (match_operand:HI 1 "general_operand" ""))]
5898 if (can_create_pseudo_p ())
5900 if (GET_CODE (operands[0]) == MEM)
5904 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5907 if (GET_CODE (operands[1]) == CONST_INT)
5908 emit_insn (gen_storeinthi (operands[0], operands[1]));
5911 if (GET_CODE (operands[1]) == MEM)
5912 operands[1] = force_reg (HImode, operands[1]);
5913 if (BYTES_BIG_ENDIAN)
5914 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5916 emit_insn (gen_storehi (operands[1], operands[0]));
5920 /* Sign extend a constant, and keep it in an SImode reg. */
5921 else if (GET_CODE (operands[1]) == CONST_INT)
5923 rtx reg = gen_reg_rtx (SImode);
5924 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5926 /* If the constant is already valid, leave it alone. */
5927 if (!const_ok_for_arm (val))
5929 /* If setting all the top bits will make the constant
5930 loadable in a single instruction, then set them.
5931 Otherwise, sign extend the number. */
5933 if (const_ok_for_arm (~(val | ~0xffff)))
5935 else if (val & 0x8000)
5939 emit_insn (gen_movsi (reg, GEN_INT (val)));
5940 operands[1] = gen_lowpart (HImode, reg);
5942 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5943 && GET_CODE (operands[1]) == MEM)
5945 rtx reg = gen_reg_rtx (SImode);
5947 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5948 operands[1] = gen_lowpart (HImode, reg);
5950 else if (!arm_arch4)
5952 if (GET_CODE (operands[1]) == MEM)
5955 rtx offset = const0_rtx;
5956 rtx reg = gen_reg_rtx (SImode);
5958 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5959 || (GET_CODE (base) == PLUS
5960 && (GET_CODE (offset = XEXP (base, 1))
5962 && ((INTVAL(offset) & 1) != 1)
5963 && GET_CODE (base = XEXP (base, 0)) == REG))
5964 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5968 new_rtx = widen_memory_access (operands[1], SImode,
5969 ((INTVAL (offset) & ~3)
5970 - INTVAL (offset)));
5971 emit_insn (gen_movsi (reg, new_rtx));
5972 if (((INTVAL (offset) & 2) != 0)
5973 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5975 rtx reg2 = gen_reg_rtx (SImode);
5977 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5982 emit_insn (gen_movhi_bytes (reg, operands[1]));
5984 operands[1] = gen_lowpart (HImode, reg);
5988 /* Handle loading a large integer during reload. */
5989 else if (GET_CODE (operands[1]) == CONST_INT
5990 && !const_ok_for_arm (INTVAL (operands[1]))
5991 && !const_ok_for_arm (~INTVAL (operands[1])))
5993 /* Writing a constant to memory needs a scratch, which should
5994 be handled with SECONDARY_RELOADs. */
5995 gcc_assert (GET_CODE (operands[0]) == REG);
5997 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5998 emit_insn (gen_movsi (operands[0], operands[1]));
6002 else if (TARGET_THUMB2)
6004 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6005 if (can_create_pseudo_p ())
6007 if (GET_CODE (operands[0]) != REG)
6008 operands[1] = force_reg (HImode, operands[1]);
6009 /* Zero extend a constant, and keep it in an SImode reg. */
6010 else if (GET_CODE (operands[1]) == CONST_INT)
6012 rtx reg = gen_reg_rtx (SImode);
6013 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6015 emit_insn (gen_movsi (reg, GEN_INT (val)));
6016 operands[1] = gen_lowpart (HImode, reg);
6020 else /* TARGET_THUMB1 */
6022 if (can_create_pseudo_p ())
6024 if (GET_CODE (operands[1]) == CONST_INT)
6026 rtx reg = gen_reg_rtx (SImode);
6028 emit_insn (gen_movsi (reg, operands[1]));
6029 operands[1] = gen_lowpart (HImode, reg);
6032 /* ??? We shouldn't really get invalid addresses here, but this can
6033 happen if we are passed a SP (never OK for HImode/QImode) or
6034 virtual register (also rejected as illegitimate for HImode/QImode)
6035 relative address. */
6036 /* ??? This should perhaps be fixed elsewhere, for instance, in
6037 fixup_stack_1, by checking for other kinds of invalid addresses,
6038 e.g. a bare reference to a virtual register. This may confuse the
6039 alpha though, which must handle this case differently. */
6040 if (GET_CODE (operands[0]) == MEM
6041 && !memory_address_p (GET_MODE (operands[0]),
6042 XEXP (operands[0], 0)))
6044 = replace_equiv_address (operands[0],
6045 copy_to_reg (XEXP (operands[0], 0)));
6047 if (GET_CODE (operands[1]) == MEM
6048 && !memory_address_p (GET_MODE (operands[1]),
6049 XEXP (operands[1], 0)))
6051 = replace_equiv_address (operands[1],
6052 copy_to_reg (XEXP (operands[1], 0)));
6054 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6056 rtx reg = gen_reg_rtx (SImode);
6058 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6059 operands[1] = gen_lowpart (HImode, reg);
6062 if (GET_CODE (operands[0]) == MEM)
6063 operands[1] = force_reg (HImode, operands[1]);
6065 else if (GET_CODE (operands[1]) == CONST_INT
6066 && !satisfies_constraint_I (operands[1]))
6068 /* Handle loading a large integer during reload. */
6070 /* Writing a constant to memory needs a scratch, which should
6071 be handled with SECONDARY_RELOADs. */
6072 gcc_assert (GET_CODE (operands[0]) == REG);
6074 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6075 emit_insn (gen_movsi (operands[0], operands[1]));
6082 (define_insn "*thumb1_movhi_insn"
6083 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6084 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6086 && ( register_operand (operands[0], HImode)
6087 || register_operand (operands[1], HImode))"
6089 switch (which_alternative)
6091 case 0: return \"add %0, %1, #0\";
6092 case 2: return \"strh %1, %0\";
6093 case 3: return \"mov %0, %1\";
6094 case 4: return \"mov %0, %1\";
6095 case 5: return \"mov %0, %1\";
6096 default: gcc_unreachable ();
6098 /* The stack pointer can end up being taken as an index register.
6099 Catch this case here and deal with it. */
6100 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6101 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
6102 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6105 ops[0] = operands[0];
6106 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6108 output_asm_insn (\"mov %0, %1\", ops);
6110 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6113 return \"ldrh %0, %1\";
6115 [(set_attr "length" "2,4,2,2,2,2")
6116 (set_attr "type" "*,load1,store1,*,*,*")
6117 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6120 (define_expand "movhi_bytes"
6121 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6123 (zero_extend:SI (match_dup 6)))
6124 (set (match_operand:SI 0 "" "")
6125 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6130 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6132 mem1 = change_address (operands[1], QImode, addr);
6133 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
6134 operands[0] = gen_lowpart (SImode, operands[0]);
6136 operands[2] = gen_reg_rtx (SImode);
6137 operands[3] = gen_reg_rtx (SImode);
6140 if (BYTES_BIG_ENDIAN)
6142 operands[4] = operands[2];
6143 operands[5] = operands[3];
6147 operands[4] = operands[3];
6148 operands[5] = operands[2];
6153 (define_expand "movhi_bigend"
6155 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6158 (ashiftrt:SI (match_dup 2) (const_int 16)))
6159 (set (match_operand:HI 0 "s_register_operand" "")
6163 operands[2] = gen_reg_rtx (SImode);
6164 operands[3] = gen_reg_rtx (SImode);
6165 operands[4] = gen_lowpart (HImode, operands[3]);
6169 ;; Pattern to recognize insn generated default case above
6170 (define_insn "*movhi_insn_arch4"
6171 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6172 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6175 && (register_operand (operands[0], HImode)
6176 || register_operand (operands[1], HImode))"
6178 mov%?\\t%0, %1\\t%@ movhi
6179 mvn%?\\t%0, #%B1\\t%@ movhi
6180 str%(h%)\\t%1, %0\\t%@ movhi
6181 ldr%(h%)\\t%0, %1\\t%@ movhi"
6182 [(set_attr "type" "*,*,store1,load1")
6183 (set_attr "predicable" "yes")
6184 (set_attr "insn" "mov,mvn,*,*")
6185 (set_attr "pool_range" "*,*,*,256")
6186 (set_attr "neg_pool_range" "*,*,*,244")]
6189 (define_insn "*movhi_bytes"
6190 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
6191 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
6194 mov%?\\t%0, %1\\t%@ movhi
6195 mvn%?\\t%0, #%B1\\t%@ movhi"
6196 [(set_attr "predicable" "yes")
6197 (set_attr "insn" "mov,mvn")]
6200 (define_expand "thumb_movhi_clobber"
6201 [(set (match_operand:HI 0 "memory_operand" "")
6202 (match_operand:HI 1 "register_operand" ""))
6203 (clobber (match_operand:DI 2 "register_operand" ""))]
6206 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6207 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6209 emit_insn (gen_movhi (operands[0], operands[1]));
6212 /* XXX Fixme, need to handle other cases here as well. */
6217 ;; We use a DImode scratch because we may occasionally need an additional
6218 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6219 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6220 (define_expand "reload_outhi"
6221 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6222 (match_operand:HI 1 "s_register_operand" "r")
6223 (match_operand:DI 2 "s_register_operand" "=&l")])]
6226 arm_reload_out_hi (operands);
6228 thumb_reload_out_hi (operands);
6233 (define_expand "reload_inhi"
6234 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6235 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6236 (match_operand:DI 2 "s_register_operand" "=&r")])]
6240 arm_reload_in_hi (operands);
6242 thumb_reload_out_hi (operands);
6246 (define_expand "movqi"
6247 [(set (match_operand:QI 0 "general_operand" "")
6248 (match_operand:QI 1 "general_operand" ""))]
6251 /* Everything except mem = const or mem = mem can be done easily */
6253 if (can_create_pseudo_p ())
6255 if (GET_CODE (operands[1]) == CONST_INT)
6257 rtx reg = gen_reg_rtx (SImode);
6259 /* For thumb we want an unsigned immediate, then we are more likely
6260 to be able to use a movs insn. */
6262 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6264 emit_insn (gen_movsi (reg, operands[1]));
6265 operands[1] = gen_lowpart (QImode, reg);
6270 /* ??? We shouldn't really get invalid addresses here, but this can
6271 happen if we are passed a SP (never OK for HImode/QImode) or
6272 virtual register (also rejected as illegitimate for HImode/QImode)
6273 relative address. */
6274 /* ??? This should perhaps be fixed elsewhere, for instance, in
6275 fixup_stack_1, by checking for other kinds of invalid addresses,
6276 e.g. a bare reference to a virtual register. This may confuse the
6277 alpha though, which must handle this case differently. */
6278 if (GET_CODE (operands[0]) == MEM
6279 && !memory_address_p (GET_MODE (operands[0]),
6280 XEXP (operands[0], 0)))
6282 = replace_equiv_address (operands[0],
6283 copy_to_reg (XEXP (operands[0], 0)));
6284 if (GET_CODE (operands[1]) == MEM
6285 && !memory_address_p (GET_MODE (operands[1]),
6286 XEXP (operands[1], 0)))
6288 = replace_equiv_address (operands[1],
6289 copy_to_reg (XEXP (operands[1], 0)));
6292 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6294 rtx reg = gen_reg_rtx (SImode);
6296 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6297 operands[1] = gen_lowpart (QImode, reg);
6300 if (GET_CODE (operands[0]) == MEM)
6301 operands[1] = force_reg (QImode, operands[1]);
6303 else if (TARGET_THUMB
6304 && GET_CODE (operands[1]) == CONST_INT
6305 && !satisfies_constraint_I (operands[1]))
6307 /* Handle loading a large integer during reload. */
6309 /* Writing a constant to memory needs a scratch, which should
6310 be handled with SECONDARY_RELOADs. */
6311 gcc_assert (GET_CODE (operands[0]) == REG);
6313 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6314 emit_insn (gen_movsi (operands[0], operands[1]));
6321 (define_insn "*arm_movqi_insn"
6322 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
6323 (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
6325 && ( register_operand (operands[0], QImode)
6326 || register_operand (operands[1], QImode))"
6334 [(set_attr "type" "*,*,load1,store1,load1,store1")
6335 (set_attr "insn" "mov,mvn,*,*,*,*")
6336 (set_attr "predicable" "yes")
6337 (set_attr "arch" "any,any,t2,t2,any,any")
6338 (set_attr "length" "4,4,2,2,4,4")]
6341 (define_insn "*thumb1_movqi_insn"
6342 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6343 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6345 && ( register_operand (operands[0], QImode)
6346 || register_operand (operands[1], QImode))"
6354 [(set_attr "length" "2")
6355 (set_attr "type" "*,load1,store1,*,*,*")
6356 (set_attr "insn" "*,*,*,mov,mov,mov")
6357 (set_attr "pool_range" "*,32,*,*,*,*")
6358 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6361 (define_expand "movhf"
6362 [(set (match_operand:HF 0 "general_operand" "")
6363 (match_operand:HF 1 "general_operand" ""))]
6368 if (GET_CODE (operands[0]) == MEM)
6369 operands[1] = force_reg (HFmode, operands[1]);
6371 else /* TARGET_THUMB1 */
6373 if (can_create_pseudo_p ())
6375 if (GET_CODE (operands[0]) != REG)
6376 operands[1] = force_reg (HFmode, operands[1]);
6382 (define_insn "*arm32_movhf"
6383 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6384 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6385 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6386 && ( s_register_operand (operands[0], HFmode)
6387 || s_register_operand (operands[1], HFmode))"
6389 switch (which_alternative)
6391 case 0: /* ARM register from memory */
6392 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6393 case 1: /* memory from ARM register */
6394 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6395 case 2: /* ARM register from ARM register */
6396 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6397 case 3: /* ARM register from constant */
6403 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6404 bits = real_to_target (NULL, &r, HFmode);
6405 ops[0] = operands[0];
6406 ops[1] = GEN_INT (bits);
6407 ops[2] = GEN_INT (bits & 0xff00);
6408 ops[3] = GEN_INT (bits & 0x00ff);
6410 if (arm_arch_thumb2)
6411 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6413 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6420 [(set_attr "conds" "unconditional")
6421 (set_attr "type" "load1,store1,*,*")
6422 (set_attr "insn" "*,*,mov,mov")
6423 (set_attr "length" "4,4,4,8")
6424 (set_attr "predicable" "yes")]
6427 (define_insn "*thumb1_movhf"
6428 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6429 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6431 && ( s_register_operand (operands[0], HFmode)
6432 || s_register_operand (operands[1], HFmode))"
6434 switch (which_alternative)
6439 gcc_assert (GET_CODE(operands[1]) == MEM);
6440 addr = XEXP (operands[1], 0);
6441 if (GET_CODE (addr) == LABEL_REF
6442 || (GET_CODE (addr) == CONST
6443 && GET_CODE (XEXP (addr, 0)) == PLUS
6444 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6445 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6447 /* Constant pool entry. */
6448 return \"ldr\\t%0, %1\";
6450 return \"ldrh\\t%0, %1\";
6452 case 2: return \"strh\\t%1, %0\";
6453 default: return \"mov\\t%0, %1\";
6456 [(set_attr "length" "2")
6457 (set_attr "type" "*,load1,store1,*,*")
6458 (set_attr "insn" "mov,*,*,mov,mov")
6459 (set_attr "pool_range" "*,1020,*,*,*")
6460 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6462 (define_expand "movsf"
6463 [(set (match_operand:SF 0 "general_operand" "")
6464 (match_operand:SF 1 "general_operand" ""))]
6469 if (GET_CODE (operands[0]) == MEM)
6470 operands[1] = force_reg (SFmode, operands[1]);
6472 else /* TARGET_THUMB1 */
6474 if (can_create_pseudo_p ())
6476 if (GET_CODE (operands[0]) != REG)
6477 operands[1] = force_reg (SFmode, operands[1]);
6483 ;; Transform a floating-point move of a constant into a core register into
6484 ;; an SImode operation.
6486 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6487 (match_operand:SF 1 "immediate_operand" ""))]
6490 && GET_CODE (operands[1]) == CONST_DOUBLE"
6491 [(set (match_dup 2) (match_dup 3))]
6493 operands[2] = gen_lowpart (SImode, operands[0]);
6494 operands[3] = gen_lowpart (SImode, operands[1]);
6495 if (operands[2] == 0 || operands[3] == 0)
6500 (define_insn "*arm_movsf_soft_insn"
6501 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6502 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6504 && TARGET_SOFT_FLOAT
6505 && (GET_CODE (operands[0]) != MEM
6506 || register_operand (operands[1], SFmode))"
6509 ldr%?\\t%0, %1\\t%@ float
6510 str%?\\t%1, %0\\t%@ float"
6511 [(set_attr "predicable" "yes")
6512 (set_attr "type" "*,load1,store1")
6513 (set_attr "insn" "mov,*,*")
6514 (set_attr "pool_range" "*,4096,*")
6515 (set_attr "arm_neg_pool_range" "*,4084,*")
6516 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6519 ;;; ??? This should have alternatives for constants.
6520 (define_insn "*thumb1_movsf_insn"
6521 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6522 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6524 && ( register_operand (operands[0], SFmode)
6525 || register_operand (operands[1], SFmode))"
6534 [(set_attr "length" "2")
6535 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6536 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6537 (set_attr "insn" "*,*,*,*,*,mov,mov")
6538 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6541 (define_expand "movdf"
6542 [(set (match_operand:DF 0 "general_operand" "")
6543 (match_operand:DF 1 "general_operand" ""))]
6548 if (GET_CODE (operands[0]) == MEM)
6549 operands[1] = force_reg (DFmode, operands[1]);
6551 else /* TARGET_THUMB */
6553 if (can_create_pseudo_p ())
6555 if (GET_CODE (operands[0]) != REG)
6556 operands[1] = force_reg (DFmode, operands[1]);
6562 ;; Reloading a df mode value stored in integer regs to memory can require a
6564 (define_expand "reload_outdf"
6565 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6566 (match_operand:DF 1 "s_register_operand" "r")
6567 (match_operand:SI 2 "s_register_operand" "=&r")]
6571 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6574 operands[2] = XEXP (operands[0], 0);
6575 else if (code == POST_INC || code == PRE_DEC)
6577 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6578 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6579 emit_insn (gen_movdi (operands[0], operands[1]));
6582 else if (code == PRE_INC)
6584 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6586 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6589 else if (code == POST_DEC)
6590 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6592 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6593 XEXP (XEXP (operands[0], 0), 1)));
6595 emit_insn (gen_rtx_SET (VOIDmode,
6596 replace_equiv_address (operands[0], operands[2]),
6599 if (code == POST_DEC)
6600 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6606 (define_insn "*movdf_soft_insn"
6607 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6608 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6609 "TARGET_32BIT && TARGET_SOFT_FLOAT
6610 && ( register_operand (operands[0], DFmode)
6611 || register_operand (operands[1], DFmode))"
6613 switch (which_alternative)
6620 return output_move_double (operands, true, NULL);
6623 [(set_attr "length" "8,12,16,8,8")
6624 (set_attr "type" "*,*,*,load2,store2")
6625 (set_attr "pool_range" "*,*,*,1020,*")
6626 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6627 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6630 ;;; ??? This should have alternatives for constants.
6631 ;;; ??? This was originally identical to the movdi_insn pattern.
6632 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6633 ;;; thumb_reorg with a memory reference.
6634 (define_insn "*thumb_movdf_insn"
6635 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6636 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6638 && ( register_operand (operands[0], DFmode)
6639 || register_operand (operands[1], DFmode))"
6641 switch (which_alternative)
6645 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6646 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6647 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6649 return \"ldmia\\t%1, {%0, %H0}\";
6651 return \"stmia\\t%0, {%1, %H1}\";
6653 return thumb_load_double_from_address (operands);
6655 operands[2] = gen_rtx_MEM (SImode,
6656 plus_constant (XEXP (operands[0], 0), 4));
6657 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6660 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6661 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6662 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6665 [(set_attr "length" "4,2,2,6,4,4")
6666 (set_attr "type" "*,load2,store2,load2,store2,*")
6667 (set_attr "insn" "*,*,*,*,*,mov")
6668 (set_attr "pool_range" "*,*,*,1020,*,*")]
6671 (define_expand "movxf"
6672 [(set (match_operand:XF 0 "general_operand" "")
6673 (match_operand:XF 1 "general_operand" ""))]
6674 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6676 if (GET_CODE (operands[0]) == MEM)
6677 operands[1] = force_reg (XFmode, operands[1]);
6683 ;; load- and store-multiple insns
6684 ;; The arm can load/store any set of registers, provided that they are in
6685 ;; ascending order, but these expanders assume a contiguous set.
6687 (define_expand "load_multiple"
6688 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6689 (match_operand:SI 1 "" ""))
6690 (use (match_operand:SI 2 "" ""))])]
6693 HOST_WIDE_INT offset = 0;
6695 /* Support only fixed point registers. */
6696 if (GET_CODE (operands[2]) != CONST_INT
6697 || INTVAL (operands[2]) > 14
6698 || INTVAL (operands[2]) < 2
6699 || GET_CODE (operands[1]) != MEM
6700 || GET_CODE (operands[0]) != REG
6701 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6702 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6706 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6707 INTVAL (operands[2]),
6708 force_reg (SImode, XEXP (operands[1], 0)),
6709 FALSE, operands[1], &offset);
6712 (define_expand "store_multiple"
6713 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6714 (match_operand:SI 1 "" ""))
6715 (use (match_operand:SI 2 "" ""))])]
6718 HOST_WIDE_INT offset = 0;
6720 /* Support only fixed point registers. */
6721 if (GET_CODE (operands[2]) != CONST_INT
6722 || INTVAL (operands[2]) > 14
6723 || INTVAL (operands[2]) < 2
6724 || GET_CODE (operands[1]) != REG
6725 || GET_CODE (operands[0]) != MEM
6726 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6727 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6731 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6732 INTVAL (operands[2]),
6733 force_reg (SImode, XEXP (operands[0], 0)),
6734 FALSE, operands[0], &offset);
6738 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6739 ;; We could let this apply for blocks of less than this, but it clobbers so
6740 ;; many registers that there is then probably a better way.
6742 (define_expand "movmemqi"
6743 [(match_operand:BLK 0 "general_operand" "")
6744 (match_operand:BLK 1 "general_operand" "")
6745 (match_operand:SI 2 "const_int_operand" "")
6746 (match_operand:SI 3 "const_int_operand" "")]
6751 if (arm_gen_movmemqi (operands))
6755 else /* TARGET_THUMB1 */
6757 if ( INTVAL (operands[3]) != 4
6758 || INTVAL (operands[2]) > 48)
6761 thumb_expand_movmemqi (operands);
6767 ;; Thumb block-move insns
6769 (define_insn "movmem12b"
6770 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6771 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6772 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6773 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6774 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6775 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6776 (set (match_operand:SI 0 "register_operand" "=l")
6777 (plus:SI (match_dup 2) (const_int 12)))
6778 (set (match_operand:SI 1 "register_operand" "=l")
6779 (plus:SI (match_dup 3) (const_int 12)))
6780 (clobber (match_scratch:SI 4 "=&l"))
6781 (clobber (match_scratch:SI 5 "=&l"))
6782 (clobber (match_scratch:SI 6 "=&l"))]
6784 "* return thumb_output_move_mem_multiple (3, operands);"
6785 [(set_attr "length" "4")
6786 ; This isn't entirely accurate... It loads as well, but in terms of
6787 ; scheduling the following insn it is better to consider it as a store
6788 (set_attr "type" "store3")]
6791 (define_insn "movmem8b"
6792 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6793 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6794 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6795 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6796 (set (match_operand:SI 0 "register_operand" "=l")
6797 (plus:SI (match_dup 2) (const_int 8)))
6798 (set (match_operand:SI 1 "register_operand" "=l")
6799 (plus:SI (match_dup 3) (const_int 8)))
6800 (clobber (match_scratch:SI 4 "=&l"))
6801 (clobber (match_scratch:SI 5 "=&l"))]
6803 "* return thumb_output_move_mem_multiple (2, operands);"
6804 [(set_attr "length" "4")
6805 ; This isn't entirely accurate... It loads as well, but in terms of
6806 ; scheduling the following insn it is better to consider it as a store
6807 (set_attr "type" "store2")]
6812 ;; Compare & branch insns
6813 ;; The range calculations are based as follows:
6814 ;; For forward branches, the address calculation returns the address of
6815 ;; the next instruction. This is 2 beyond the branch instruction.
6816 ;; For backward branches, the address calculation returns the address of
6817 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6818 ;; instruction for the shortest sequence, and 4 before the branch instruction
6819 ;; if we have to jump around an unconditional branch.
6820 ;; To the basic branch range the PC offset must be added (this is +4).
6821 ;; So for forward branches we have
6822 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6823 ;; And for backward branches we have
6824 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6826 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6827 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6829 (define_expand "cbranchsi4"
6830 [(set (pc) (if_then_else
6831 (match_operator 0 "expandable_comparison_operator"
6832 [(match_operand:SI 1 "s_register_operand" "")
6833 (match_operand:SI 2 "nonmemory_operand" "")])
6834 (label_ref (match_operand 3 "" ""))
6836 "TARGET_THUMB1 || TARGET_32BIT"
6840 if (!arm_add_operand (operands[2], SImode))
6841 operands[2] = force_reg (SImode, operands[2]);
6842 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6846 if (thumb1_cmpneg_operand (operands[2], SImode))
6848 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6849 operands[3], operands[0]));
6852 if (!thumb1_cmp_operand (operands[2], SImode))
6853 operands[2] = force_reg (SImode, operands[2]);
6856 ;; A pattern to recognize a special situation and optimize for it.
6857 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6858 ;; due to the available addressing modes. Hence, convert a signed comparison
6859 ;; with zero into an unsigned comparison with 127 if possible.
6860 (define_expand "cbranchqi4"
6861 [(set (pc) (if_then_else
6862 (match_operator 0 "lt_ge_comparison_operator"
6863 [(match_operand:QI 1 "memory_operand" "")
6864 (match_operand:QI 2 "const0_operand" "")])
6865 (label_ref (match_operand 3 "" ""))
6870 xops[1] = gen_reg_rtx (SImode);
6871 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6872 xops[2] = GEN_INT (127);
6873 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6874 VOIDmode, xops[1], xops[2]);
6875 xops[3] = operands[3];
6876 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6880 (define_expand "cbranchsf4"
6881 [(set (pc) (if_then_else
6882 (match_operator 0 "expandable_comparison_operator"
6883 [(match_operand:SF 1 "s_register_operand" "")
6884 (match_operand:SF 2 "arm_float_compare_operand" "")])
6885 (label_ref (match_operand 3 "" ""))
6887 "TARGET_32BIT && TARGET_HARD_FLOAT"
6888 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6889 operands[3])); DONE;"
6892 (define_expand "cbranchdf4"
6893 [(set (pc) (if_then_else
6894 (match_operator 0 "expandable_comparison_operator"
6895 [(match_operand:DF 1 "s_register_operand" "")
6896 (match_operand:DF 2 "arm_float_compare_operand" "")])
6897 (label_ref (match_operand 3 "" ""))
6899 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6900 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6901 operands[3])); DONE;"
6904 (define_expand "cbranchdi4"
6905 [(set (pc) (if_then_else
6906 (match_operator 0 "expandable_comparison_operator"
6907 [(match_operand:DI 1 "cmpdi_operand" "")
6908 (match_operand:DI 2 "cmpdi_operand" "")])
6909 (label_ref (match_operand 3 "" ""))
6913 rtx swap = NULL_RTX;
6914 enum rtx_code code = GET_CODE (operands[0]);
6916 /* We should not have two constants. */
6917 gcc_assert (GET_MODE (operands[1]) == DImode
6918 || GET_MODE (operands[2]) == DImode);
6920 /* Flip unimplemented DImode comparisons to a form that
6921 arm_gen_compare_reg can handle. */
6925 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6927 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6929 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6931 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6936 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6939 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6945 (define_insn "cbranchsi4_insn"
6946 [(set (pc) (if_then_else
6947 (match_operator 0 "arm_comparison_operator"
6948 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6949 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6950 (label_ref (match_operand 3 "" ""))
6954 rtx t = cfun->machine->thumb1_cc_insn;
6957 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6958 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6960 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6962 if (!noov_comparison_operator (operands[0], VOIDmode))
6965 else if (cfun->machine->thumb1_cc_mode != CCmode)
6970 output_asm_insn ("cmp\t%1, %2", operands);
6971 cfun->machine->thumb1_cc_insn = insn;
6972 cfun->machine->thumb1_cc_op0 = operands[1];
6973 cfun->machine->thumb1_cc_op1 = operands[2];
6974 cfun->machine->thumb1_cc_mode = CCmode;
6977 /* Ensure we emit the right type of condition code on the jump. */
6978 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6981 switch (get_attr_length (insn))
6983 case 4: return \"b%d0\\t%l3\";
6984 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6985 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6988 [(set (attr "far_jump")
6990 (eq_attr "length" "8")
6991 (const_string "yes")
6992 (const_string "no")))
6993 (set (attr "length")
6995 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6996 (le (minus (match_dup 3) (pc)) (const_int 256)))
6999 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7000 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7005 (define_insn "cbranchsi4_scratch"
7006 [(set (pc) (if_then_else
7007 (match_operator 4 "arm_comparison_operator"
7008 [(match_operand:SI 1 "s_register_operand" "l,0")
7009 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7010 (label_ref (match_operand 3 "" ""))
7012 (clobber (match_scratch:SI 0 "=l,l"))]
7015 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7017 switch (get_attr_length (insn))
7019 case 4: return \"b%d4\\t%l3\";
7020 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7021 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7024 [(set (attr "far_jump")
7026 (eq_attr "length" "8")
7027 (const_string "yes")
7028 (const_string "no")))
7029 (set (attr "length")
7031 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7032 (le (minus (match_dup 3) (pc)) (const_int 256)))
7035 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7036 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7041 ;; Two peepholes to generate subtract of 0 instead of a move if the
7042 ;; condition codes will be useful.
7044 [(set (match_operand:SI 0 "low_register_operand" "")
7045 (match_operand:SI 1 "low_register_operand" ""))
7047 (if_then_else (match_operator 2 "arm_comparison_operator"
7048 [(match_dup 1) (const_int 0)])
7049 (label_ref (match_operand 3 "" ""))
7052 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7054 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7055 (label_ref (match_dup 3))
7059 ;; Sigh! This variant shouldn't be needed, but combine often fails to
7060 ;; merge cases like this because the op1 is a hard register in
7061 ;; arm_class_likely_spilled_p.
7063 [(set (match_operand:SI 0 "low_register_operand" "")
7064 (match_operand:SI 1 "low_register_operand" ""))
7066 (if_then_else (match_operator 2 "arm_comparison_operator"
7067 [(match_dup 0) (const_int 0)])
7068 (label_ref (match_operand 3 "" ""))
7071 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7073 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7074 (label_ref (match_dup 3))
7078 (define_insn "*negated_cbranchsi4"
7081 (match_operator 0 "equality_operator"
7082 [(match_operand:SI 1 "s_register_operand" "l")
7083 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7084 (label_ref (match_operand 3 "" ""))
7088 output_asm_insn (\"cmn\\t%1, %2\", operands);
7089 switch (get_attr_length (insn))
7091 case 4: return \"b%d0\\t%l3\";
7092 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7093 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7096 [(set (attr "far_jump")
7098 (eq_attr "length" "8")
7099 (const_string "yes")
7100 (const_string "no")))
7101 (set (attr "length")
7103 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7104 (le (minus (match_dup 3) (pc)) (const_int 256)))
7107 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7108 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7113 (define_insn "*tbit_cbranch"
7116 (match_operator 0 "equality_operator"
7117 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7119 (match_operand:SI 2 "const_int_operand" "i"))
7121 (label_ref (match_operand 3 "" ""))
7123 (clobber (match_scratch:SI 4 "=l"))]
7128 op[0] = operands[4];
7129 op[1] = operands[1];
7130 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7132 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7133 switch (get_attr_length (insn))
7135 case 4: return \"b%d0\\t%l3\";
7136 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7137 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7140 [(set (attr "far_jump")
7142 (eq_attr "length" "8")
7143 (const_string "yes")
7144 (const_string "no")))
7145 (set (attr "length")
7147 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7148 (le (minus (match_dup 3) (pc)) (const_int 256)))
7151 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7152 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7157 (define_insn "*tlobits_cbranch"
7160 (match_operator 0 "equality_operator"
7161 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7162 (match_operand:SI 2 "const_int_operand" "i")
7165 (label_ref (match_operand 3 "" ""))
7167 (clobber (match_scratch:SI 4 "=l"))]
7172 op[0] = operands[4];
7173 op[1] = operands[1];
7174 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7176 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7177 switch (get_attr_length (insn))
7179 case 4: return \"b%d0\\t%l3\";
7180 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7181 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7184 [(set (attr "far_jump")
7186 (eq_attr "length" "8")
7187 (const_string "yes")
7188 (const_string "no")))
7189 (set (attr "length")
7191 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7192 (le (minus (match_dup 3) (pc)) (const_int 256)))
7195 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7196 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7201 (define_insn "*tstsi3_cbranch"
7204 (match_operator 3 "equality_operator"
7205 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7206 (match_operand:SI 1 "s_register_operand" "l"))
7208 (label_ref (match_operand 2 "" ""))
7213 output_asm_insn (\"tst\\t%0, %1\", operands);
7214 switch (get_attr_length (insn))
7216 case 4: return \"b%d3\\t%l2\";
7217 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7218 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7221 [(set (attr "far_jump")
7223 (eq_attr "length" "8")
7224 (const_string "yes")
7225 (const_string "no")))
7226 (set (attr "length")
7228 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7229 (le (minus (match_dup 2) (pc)) (const_int 256)))
7232 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7233 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7238 (define_insn "*cbranchne_decr1"
7240 (if_then_else (match_operator 3 "equality_operator"
7241 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7243 (label_ref (match_operand 4 "" ""))
7245 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7246 (plus:SI (match_dup 2) (const_int -1)))
7247 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7252 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7254 VOIDmode, operands[2], const1_rtx);
7255 cond[1] = operands[4];
7257 if (which_alternative == 0)
7258 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7259 else if (which_alternative == 1)
7261 /* We must provide an alternative for a hi reg because reload
7262 cannot handle output reloads on a jump instruction, but we
7263 can't subtract into that. Fortunately a mov from lo to hi
7264 does not clobber the condition codes. */
7265 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7266 output_asm_insn (\"mov\\t%0, %1\", operands);
7270 /* Similarly, but the target is memory. */
7271 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7272 output_asm_insn (\"str\\t%1, %0\", operands);
7275 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7278 output_asm_insn (\"b%d0\\t%l1\", cond);
7281 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7282 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7284 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7285 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7289 [(set (attr "far_jump")
7291 (ior (and (eq (symbol_ref ("which_alternative"))
7293 (eq_attr "length" "8"))
7294 (eq_attr "length" "10"))
7295 (const_string "yes")
7296 (const_string "no")))
7297 (set_attr_alternative "length"
7301 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7302 (le (minus (match_dup 4) (pc)) (const_int 256)))
7305 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7306 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7311 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7312 (le (minus (match_dup 4) (pc)) (const_int 256)))
7315 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7316 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7321 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7322 (le (minus (match_dup 4) (pc)) (const_int 256)))
7325 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7326 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7331 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7332 (le (minus (match_dup 4) (pc)) (const_int 256)))
7335 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7336 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7341 (define_insn "*addsi3_cbranch"
7344 (match_operator 4 "arm_comparison_operator"
7346 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7347 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7349 (label_ref (match_operand 5 "" ""))
7352 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7353 (plus:SI (match_dup 2) (match_dup 3)))
7354 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7356 && (GET_CODE (operands[4]) == EQ
7357 || GET_CODE (operands[4]) == NE
7358 || GET_CODE (operands[4]) == GE
7359 || GET_CODE (operands[4]) == LT)"
7364 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7365 cond[1] = operands[2];
7366 cond[2] = operands[3];
7368 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7369 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7371 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7373 if (which_alternative >= 2
7374 && which_alternative < 4)
7375 output_asm_insn (\"mov\\t%0, %1\", operands);
7376 else if (which_alternative >= 4)
7377 output_asm_insn (\"str\\t%1, %0\", operands);
7379 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7382 return \"b%d4\\t%l5\";
7384 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7386 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7390 [(set (attr "far_jump")
7392 (ior (and (lt (symbol_ref ("which_alternative"))
7394 (eq_attr "length" "8"))
7395 (eq_attr "length" "10"))
7396 (const_string "yes")
7397 (const_string "no")))
7398 (set (attr "length")
7400 (lt (symbol_ref ("which_alternative"))
7403 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7404 (le (minus (match_dup 5) (pc)) (const_int 256)))
7407 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7408 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7412 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7413 (le (minus (match_dup 5) (pc)) (const_int 256)))
7416 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7417 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7422 (define_insn "*addsi3_cbranch_scratch"
7425 (match_operator 3 "arm_comparison_operator"
7427 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7428 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7430 (label_ref (match_operand 4 "" ""))
7432 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7434 && (GET_CODE (operands[3]) == EQ
7435 || GET_CODE (operands[3]) == NE
7436 || GET_CODE (operands[3]) == GE
7437 || GET_CODE (operands[3]) == LT)"
7440 switch (which_alternative)
7443 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7446 output_asm_insn (\"cmn\t%1, %2\", operands);
7449 if (INTVAL (operands[2]) < 0)
7450 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7452 output_asm_insn (\"add\t%0, %1, %2\", operands);
7455 if (INTVAL (operands[2]) < 0)
7456 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7458 output_asm_insn (\"add\t%0, %0, %2\", operands);
7462 switch (get_attr_length (insn))
7465 return \"b%d3\\t%l4\";
7467 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7469 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7473 [(set (attr "far_jump")
7475 (eq_attr "length" "8")
7476 (const_string "yes")
7477 (const_string "no")))
7478 (set (attr "length")
7480 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7481 (le (minus (match_dup 4) (pc)) (const_int 256)))
7484 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7485 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7491 ;; Comparison and test insns
7493 (define_insn "*arm_cmpsi_insn"
7494 [(set (reg:CC CC_REGNUM)
7495 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7496 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7503 [(set_attr "conds" "set")
7504 (set_attr "arch" "t2,t2,any,any")
7505 (set_attr "length" "2,2,4,4")
7506 (set_attr "predicable" "yes")]
7509 (define_insn "*cmpsi_shiftsi"
7510 [(set (reg:CC CC_REGNUM)
7511 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7512 (match_operator:SI 3 "shift_operator"
7513 [(match_operand:SI 1 "s_register_operand" "r,r")
7514 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7517 [(set_attr "conds" "set")
7518 (set_attr "shift" "1")
7519 (set_attr "arch" "32,a")
7520 (set_attr "type" "alu_shift,alu_shift_reg")])
7522 (define_insn "*cmpsi_shiftsi_swp"
7523 [(set (reg:CC_SWP CC_REGNUM)
7524 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7525 [(match_operand:SI 1 "s_register_operand" "r,r")
7526 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7527 (match_operand:SI 0 "s_register_operand" "r,r")))]
7530 [(set_attr "conds" "set")
7531 (set_attr "shift" "1")
7532 (set_attr "arch" "32,a")
7533 (set_attr "type" "alu_shift,alu_shift_reg")])
7535 (define_insn "*arm_cmpsi_negshiftsi_si"
7536 [(set (reg:CC_Z CC_REGNUM)
7538 (neg:SI (match_operator:SI 1 "shift_operator"
7539 [(match_operand:SI 2 "s_register_operand" "r")
7540 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7541 (match_operand:SI 0 "s_register_operand" "r")))]
7544 [(set_attr "conds" "set")
7545 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7546 (const_string "alu_shift")
7547 (const_string "alu_shift_reg")))
7548 (set_attr "predicable" "yes")]
7551 ;; DImode comparisons. The generic code generates branches that
7552 ;; if-conversion can not reduce to a conditional compare, so we do
7555 (define_insn "*arm_cmpdi_insn"
7556 [(set (reg:CC_NCV CC_REGNUM)
7557 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7558 (match_operand:DI 1 "arm_di_operand" "rDi")))
7559 (clobber (match_scratch:SI 2 "=r"))]
7560 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7561 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7562 [(set_attr "conds" "set")
7563 (set_attr "length" "8")]
7566 (define_insn "*arm_cmpdi_unsigned"
7567 [(set (reg:CC_CZ CC_REGNUM)
7568 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7569 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7571 "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
7572 [(set_attr "conds" "set")
7573 (set_attr "length" "8")]
7576 (define_insn "*arm_cmpdi_zero"
7577 [(set (reg:CC_Z CC_REGNUM)
7578 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7580 (clobber (match_scratch:SI 1 "=r"))]
7582 "orr%.\\t%1, %Q0, %R0"
7583 [(set_attr "conds" "set")]
7586 (define_insn "*thumb_cmpdi_zero"
7587 [(set (reg:CC_Z CC_REGNUM)
7588 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7590 (clobber (match_scratch:SI 1 "=l"))]
7592 "orr\\t%1, %Q0, %R0"
7593 [(set_attr "conds" "set")
7594 (set_attr "length" "2")]
7597 ;; Cirrus SF compare instruction
7598 (define_insn "*cirrus_cmpsf"
7599 [(set (reg:CCFP CC_REGNUM)
7600 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7601 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7602 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7603 "cfcmps%?\\tr15, %V0, %V1"
7604 [(set_attr "type" "mav_farith")
7605 (set_attr "cirrus" "compare")]
7608 ;; Cirrus DF compare instruction
7609 (define_insn "*cirrus_cmpdf"
7610 [(set (reg:CCFP CC_REGNUM)
7611 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7612 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7613 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7614 "cfcmpd%?\\tr15, %V0, %V1"
7615 [(set_attr "type" "mav_farith")
7616 (set_attr "cirrus" "compare")]
7619 (define_insn "*cirrus_cmpdi"
7620 [(set (reg:CC CC_REGNUM)
7621 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7622 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7623 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7624 "cfcmp64%?\\tr15, %V0, %V1"
7625 [(set_attr "type" "mav_farith")
7626 (set_attr "cirrus" "compare")]
7629 ; This insn allows redundant compares to be removed by cse, nothing should
7630 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7631 ; is deleted later on. The match_dup will match the mode here, so that
7632 ; mode changes of the condition codes aren't lost by this even though we don't
7633 ; specify what they are.
7635 (define_insn "*deleted_compare"
7636 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7638 "\\t%@ deleted compare"
7639 [(set_attr "conds" "set")
7640 (set_attr "length" "0")]
7644 ;; Conditional branch insns
7646 (define_expand "cbranch_cc"
7648 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7649 (match_operand 2 "" "")])
7650 (label_ref (match_operand 3 "" ""))
7653 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7654 operands[1], operands[2], NULL_RTX);
7655 operands[2] = const0_rtx;"
7659 ;; Patterns to match conditional branch insns.
7662 (define_insn "*arm_cond_branch"
7664 (if_then_else (match_operator 1 "arm_comparison_operator"
7665 [(match_operand 2 "cc_register" "") (const_int 0)])
7666 (label_ref (match_operand 0 "" ""))
7670 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7672 arm_ccfsm_state += 2;
7675 return \"b%d1\\t%l0\";
7677 [(set_attr "conds" "use")
7678 (set_attr "type" "branch")
7679 (set (attr "length")
7681 (and (match_test "TARGET_THUMB2")
7682 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7683 (le (minus (match_dup 0) (pc)) (const_int 256))))
7688 (define_insn "*arm_cond_branch_reversed"
7690 (if_then_else (match_operator 1 "arm_comparison_operator"
7691 [(match_operand 2 "cc_register" "") (const_int 0)])
7693 (label_ref (match_operand 0 "" ""))))]
7696 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7698 arm_ccfsm_state += 2;
7701 return \"b%D1\\t%l0\";
7703 [(set_attr "conds" "use")
7704 (set_attr "type" "branch")
7705 (set (attr "length")
7707 (and (match_test "TARGET_THUMB2")
7708 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7709 (le (minus (match_dup 0) (pc)) (const_int 256))))
7718 (define_expand "cstore_cc"
7719 [(set (match_operand:SI 0 "s_register_operand" "")
7720 (match_operator:SI 1 "" [(match_operand 2 "" "")
7721 (match_operand 3 "" "")]))]
7723 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7724 operands[2], operands[3], NULL_RTX);
7725 operands[3] = const0_rtx;"
7728 (define_insn "*mov_scc"
7729 [(set (match_operand:SI 0 "s_register_operand" "=r")
7730 (match_operator:SI 1 "arm_comparison_operator"
7731 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7733 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7734 [(set_attr "conds" "use")
7735 (set_attr "insn" "mov")
7736 (set_attr "length" "8")]
7739 (define_insn "*mov_negscc"
7740 [(set (match_operand:SI 0 "s_register_operand" "=r")
7741 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7742 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7744 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7745 [(set_attr "conds" "use")
7746 (set_attr "insn" "mov")
7747 (set_attr "length" "8")]
7750 (define_insn "*mov_notscc"
7751 [(set (match_operand:SI 0 "s_register_operand" "=r")
7752 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7753 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7755 "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7756 [(set_attr "conds" "use")
7757 (set_attr "insn" "mov")
7758 (set_attr "length" "8")]
7761 (define_expand "cstoresi4"
7762 [(set (match_operand:SI 0 "s_register_operand" "")
7763 (match_operator:SI 1 "expandable_comparison_operator"
7764 [(match_operand:SI 2 "s_register_operand" "")
7765 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7766 "TARGET_32BIT || TARGET_THUMB1"
7768 rtx op3, scratch, scratch2;
7772 if (!arm_add_operand (operands[3], SImode))
7773 operands[3] = force_reg (SImode, operands[3]);
7774 emit_insn (gen_cstore_cc (operands[0], operands[1],
7775 operands[2], operands[3]));
7779 if (operands[3] == const0_rtx)
7781 switch (GET_CODE (operands[1]))
7784 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7788 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7792 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7793 NULL_RTX, 0, OPTAB_WIDEN);
7794 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7795 NULL_RTX, 0, OPTAB_WIDEN);
7796 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7797 operands[0], 1, OPTAB_WIDEN);
7801 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7803 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7804 NULL_RTX, 1, OPTAB_WIDEN);
7808 scratch = expand_binop (SImode, ashr_optab, operands[2],
7809 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7810 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7811 NULL_RTX, 0, OPTAB_WIDEN);
7812 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7816 /* LT is handled by generic code. No need for unsigned with 0. */
7823 switch (GET_CODE (operands[1]))
7826 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7827 NULL_RTX, 0, OPTAB_WIDEN);
7828 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7832 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7833 NULL_RTX, 0, OPTAB_WIDEN);
7834 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7838 op3 = force_reg (SImode, operands[3]);
7840 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7841 NULL_RTX, 1, OPTAB_WIDEN);
7842 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7843 NULL_RTX, 0, OPTAB_WIDEN);
7844 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7850 if (!thumb1_cmp_operand (op3, SImode))
7851 op3 = force_reg (SImode, op3);
7852 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7853 NULL_RTX, 0, OPTAB_WIDEN);
7854 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7855 NULL_RTX, 1, OPTAB_WIDEN);
7856 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7861 op3 = force_reg (SImode, operands[3]);
7862 scratch = force_reg (SImode, const0_rtx);
7863 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7869 if (!thumb1_cmp_operand (op3, SImode))
7870 op3 = force_reg (SImode, op3);
7871 scratch = force_reg (SImode, const0_rtx);
7872 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7878 if (!thumb1_cmp_operand (op3, SImode))
7879 op3 = force_reg (SImode, op3);
7880 scratch = gen_reg_rtx (SImode);
7881 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7885 op3 = force_reg (SImode, operands[3]);
7886 scratch = gen_reg_rtx (SImode);
7887 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7890 /* No good sequences for GT, LT. */
7897 (define_expand "cstoresf4"
7898 [(set (match_operand:SI 0 "s_register_operand" "")
7899 (match_operator:SI 1 "expandable_comparison_operator"
7900 [(match_operand:SF 2 "s_register_operand" "")
7901 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7902 "TARGET_32BIT && TARGET_HARD_FLOAT"
7903 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7904 operands[2], operands[3])); DONE;"
7907 (define_expand "cstoredf4"
7908 [(set (match_operand:SI 0 "s_register_operand" "")
7909 (match_operator:SI 1 "expandable_comparison_operator"
7910 [(match_operand:DF 2 "s_register_operand" "")
7911 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7912 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7913 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7914 operands[2], operands[3])); DONE;"
7917 (define_expand "cstoredi4"
7918 [(set (match_operand:SI 0 "s_register_operand" "")
7919 (match_operator:SI 1 "expandable_comparison_operator"
7920 [(match_operand:DI 2 "cmpdi_operand" "")
7921 (match_operand:DI 3 "cmpdi_operand" "")]))]
7924 rtx swap = NULL_RTX;
7925 enum rtx_code code = GET_CODE (operands[1]);
7927 /* We should not have two constants. */
7928 gcc_assert (GET_MODE (operands[2]) == DImode
7929 || GET_MODE (operands[3]) == DImode);
7931 /* Flip unimplemented DImode comparisons to a form that
7932 arm_gen_compare_reg can handle. */
7936 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7938 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7940 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7942 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7947 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7950 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7956 (define_expand "cstoresi_eq0_thumb1"
7958 [(set (match_operand:SI 0 "s_register_operand" "")
7959 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7961 (clobber (match_dup:SI 2))])]
7963 "operands[2] = gen_reg_rtx (SImode);"
7966 (define_expand "cstoresi_ne0_thumb1"
7968 [(set (match_operand:SI 0 "s_register_operand" "")
7969 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7971 (clobber (match_dup:SI 2))])]
7973 "operands[2] = gen_reg_rtx (SImode);"
7976 (define_insn "*cstoresi_eq0_thumb1_insn"
7977 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7978 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7980 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7983 neg\\t%0, %1\;adc\\t%0, %0, %1
7984 neg\\t%2, %1\;adc\\t%0, %1, %2"
7985 [(set_attr "length" "4")]
7988 (define_insn "*cstoresi_ne0_thumb1_insn"
7989 [(set (match_operand:SI 0 "s_register_operand" "=l")
7990 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7992 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7994 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7995 [(set_attr "length" "4")]
7998 ;; Used as part of the expansion of thumb ltu and gtu sequences
7999 (define_insn "cstoresi_nltu_thumb1"
8000 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8001 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8002 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8004 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8005 [(set_attr "length" "4")]
8008 (define_insn_and_split "cstoresi_ltu_thumb1"
8009 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8010 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8011 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8016 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8017 (set (match_dup 0) (neg:SI (match_dup 3)))]
8018 "operands[3] = gen_reg_rtx (SImode);"
8019 [(set_attr "length" "4")]
8022 ;; Used as part of the expansion of thumb les sequence.
8023 (define_insn "thumb1_addsi3_addgeu"
8024 [(set (match_operand:SI 0 "s_register_operand" "=l")
8025 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8026 (match_operand:SI 2 "s_register_operand" "l"))
8027 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8028 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8030 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8031 [(set_attr "length" "4")]
8035 ;; Conditional move insns
8037 (define_expand "movsicc"
8038 [(set (match_operand:SI 0 "s_register_operand" "")
8039 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8040 (match_operand:SI 2 "arm_not_operand" "")
8041 (match_operand:SI 3 "arm_not_operand" "")))]
8045 enum rtx_code code = GET_CODE (operands[1]);
8048 if (code == UNEQ || code == LTGT)
8051 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8052 XEXP (operands[1], 1), NULL_RTX);
8053 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8057 (define_expand "movsfcc"
8058 [(set (match_operand:SF 0 "s_register_operand" "")
8059 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8060 (match_operand:SF 2 "s_register_operand" "")
8061 (match_operand:SF 3 "nonmemory_operand" "")))]
8062 "TARGET_32BIT && TARGET_HARD_FLOAT"
8065 enum rtx_code code = GET_CODE (operands[1]);
8068 if (code == UNEQ || code == LTGT)
8071 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8072 Otherwise, ensure it is a valid FP add operand */
8073 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8074 || (!arm_float_add_operand (operands[3], SFmode)))
8075 operands[3] = force_reg (SFmode, operands[3]);
8077 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8078 XEXP (operands[1], 1), NULL_RTX);
8079 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8083 (define_expand "movdfcc"
8084 [(set (match_operand:DF 0 "s_register_operand" "")
8085 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8086 (match_operand:DF 2 "s_register_operand" "")
8087 (match_operand:DF 3 "arm_float_add_operand" "")))]
8088 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8091 enum rtx_code code = GET_CODE (operands[1]);
8094 if (code == UNEQ || code == LTGT)
8097 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8098 XEXP (operands[1], 1), NULL_RTX);
8099 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8103 (define_insn "*movsicc_insn"
8104 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8106 (match_operator 3 "arm_comparison_operator"
8107 [(match_operand 4 "cc_register" "") (const_int 0)])
8108 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8109 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8116 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8117 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8118 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8119 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8120 [(set_attr "length" "4,4,4,4,8,8,8,8")
8121 (set_attr "conds" "use")
8122 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
8125 (define_insn "*movsfcc_soft_insn"
8126 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8127 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8128 [(match_operand 4 "cc_register" "") (const_int 0)])
8129 (match_operand:SF 1 "s_register_operand" "0,r")
8130 (match_operand:SF 2 "s_register_operand" "r,0")))]
8131 "TARGET_ARM && TARGET_SOFT_FLOAT"
8135 [(set_attr "conds" "use")
8136 (set_attr "insn" "mov")]
8140 ;; Jump and linkage insns
8142 (define_expand "jump"
8144 (label_ref (match_operand 0 "" "")))]
8149 (define_insn "*arm_jump"
8151 (label_ref (match_operand 0 "" "")))]
8155 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8157 arm_ccfsm_state += 2;
8160 return \"b%?\\t%l0\";
8163 [(set_attr "predicable" "yes")
8164 (set (attr "length")
8166 (and (match_test "TARGET_THUMB2")
8167 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8168 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8173 (define_insn "*thumb_jump"
8175 (label_ref (match_operand 0 "" "")))]
8178 if (get_attr_length (insn) == 2)
8180 return \"bl\\t%l0\\t%@ far jump\";
8182 [(set (attr "far_jump")
8184 (eq_attr "length" "4")
8185 (const_string "yes")
8186 (const_string "no")))
8187 (set (attr "length")
8189 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8190 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8195 (define_expand "call"
8196 [(parallel [(call (match_operand 0 "memory_operand" "")
8197 (match_operand 1 "general_operand" ""))
8198 (use (match_operand 2 "" ""))
8199 (clobber (reg:SI LR_REGNUM))])]
8205 /* In an untyped call, we can get NULL for operand 2. */
8206 if (operands[2] == NULL_RTX)
8207 operands[2] = const0_rtx;
8209 /* Decide if we should generate indirect calls by loading the
8210 32-bit address of the callee into a register before performing the
8212 callee = XEXP (operands[0], 0);
8213 if (GET_CODE (callee) == SYMBOL_REF
8214 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8216 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8218 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8219 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8224 (define_expand "call_internal"
8225 [(parallel [(call (match_operand 0 "memory_operand" "")
8226 (match_operand 1 "general_operand" ""))
8227 (use (match_operand 2 "" ""))
8228 (clobber (reg:SI LR_REGNUM))])])
8230 (define_insn "*call_reg_armv5"
8231 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8232 (match_operand 1 "" ""))
8233 (use (match_operand 2 "" ""))
8234 (clobber (reg:SI LR_REGNUM))]
8235 "TARGET_ARM && arm_arch5"
8237 [(set_attr "type" "call")]
8240 (define_insn "*call_reg_arm"
8241 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8242 (match_operand 1 "" ""))
8243 (use (match_operand 2 "" ""))
8244 (clobber (reg:SI LR_REGNUM))]
8245 "TARGET_ARM && !arm_arch5"
8247 return output_call (operands);
8249 ;; length is worst case, normally it is only two
8250 [(set_attr "length" "12")
8251 (set_attr "type" "call")]
8255 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8256 ;; considered a function call by the branch predictor of some cores (PR40887).
8257 ;; Falls back to blx rN (*call_reg_armv5).
8259 (define_insn "*call_mem"
8260 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8261 (match_operand 1 "" ""))
8262 (use (match_operand 2 "" ""))
8263 (clobber (reg:SI LR_REGNUM))]
8264 "TARGET_ARM && !arm_arch5"
8266 return output_call_mem (operands);
8268 [(set_attr "length" "12")
8269 (set_attr "type" "call")]
8272 (define_insn "*call_reg_thumb1_v5"
8273 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8274 (match_operand 1 "" ""))
8275 (use (match_operand 2 "" ""))
8276 (clobber (reg:SI LR_REGNUM))]
8277 "TARGET_THUMB1 && arm_arch5"
8279 [(set_attr "length" "2")
8280 (set_attr "type" "call")]
8283 (define_insn "*call_reg_thumb1"
8284 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8285 (match_operand 1 "" ""))
8286 (use (match_operand 2 "" ""))
8287 (clobber (reg:SI LR_REGNUM))]
8288 "TARGET_THUMB1 && !arm_arch5"
8291 if (!TARGET_CALLER_INTERWORKING)
8292 return thumb_call_via_reg (operands[0]);
8293 else if (operands[1] == const0_rtx)
8294 return \"bl\\t%__interwork_call_via_%0\";
8295 else if (frame_pointer_needed)
8296 return \"bl\\t%__interwork_r7_call_via_%0\";
8298 return \"bl\\t%__interwork_r11_call_via_%0\";
8300 [(set_attr "type" "call")]
8303 (define_expand "call_value"
8304 [(parallel [(set (match_operand 0 "" "")
8305 (call (match_operand 1 "memory_operand" "")
8306 (match_operand 2 "general_operand" "")))
8307 (use (match_operand 3 "" ""))
8308 (clobber (reg:SI LR_REGNUM))])]
8314 /* In an untyped call, we can get NULL for operand 2. */
8315 if (operands[3] == 0)
8316 operands[3] = const0_rtx;
8318 /* Decide if we should generate indirect calls by loading the
8319 32-bit address of the callee into a register before performing the
8321 callee = XEXP (operands[1], 0);
8322 if (GET_CODE (callee) == SYMBOL_REF
8323 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8325 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8327 pat = gen_call_value_internal (operands[0], operands[1],
8328 operands[2], operands[3]);
8329 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8334 (define_expand "call_value_internal"
8335 [(parallel [(set (match_operand 0 "" "")
8336 (call (match_operand 1 "memory_operand" "")
8337 (match_operand 2 "general_operand" "")))
8338 (use (match_operand 3 "" ""))
8339 (clobber (reg:SI LR_REGNUM))])])
8341 (define_insn "*call_value_reg_armv5"
8342 [(set (match_operand 0 "" "")
8343 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8344 (match_operand 2 "" "")))
8345 (use (match_operand 3 "" ""))
8346 (clobber (reg:SI LR_REGNUM))]
8347 "TARGET_ARM && arm_arch5"
8349 [(set_attr "type" "call")]
8352 (define_insn "*call_value_reg_arm"
8353 [(set (match_operand 0 "" "")
8354 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8355 (match_operand 2 "" "")))
8356 (use (match_operand 3 "" ""))
8357 (clobber (reg:SI LR_REGNUM))]
8358 "TARGET_ARM && !arm_arch5"
8360 return output_call (&operands[1]);
8362 [(set_attr "length" "12")
8363 (set_attr "type" "call")]
8366 ;; Note: see *call_mem
8368 (define_insn "*call_value_mem"
8369 [(set (match_operand 0 "" "")
8370 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8371 (match_operand 2 "" "")))
8372 (use (match_operand 3 "" ""))
8373 (clobber (reg:SI LR_REGNUM))]
8374 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8376 return output_call_mem (&operands[1]);
8378 [(set_attr "length" "12")
8379 (set_attr "type" "call")]
8382 (define_insn "*call_value_reg_thumb1_v5"
8383 [(set (match_operand 0 "" "")
8384 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8385 (match_operand 2 "" "")))
8386 (use (match_operand 3 "" ""))
8387 (clobber (reg:SI LR_REGNUM))]
8388 "TARGET_THUMB1 && arm_arch5"
8390 [(set_attr "length" "2")
8391 (set_attr "type" "call")]
8394 (define_insn "*call_value_reg_thumb1"
8395 [(set (match_operand 0 "" "")
8396 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8397 (match_operand 2 "" "")))
8398 (use (match_operand 3 "" ""))
8399 (clobber (reg:SI LR_REGNUM))]
8400 "TARGET_THUMB1 && !arm_arch5"
8403 if (!TARGET_CALLER_INTERWORKING)
8404 return thumb_call_via_reg (operands[1]);
8405 else if (operands[2] == const0_rtx)
8406 return \"bl\\t%__interwork_call_via_%1\";
8407 else if (frame_pointer_needed)
8408 return \"bl\\t%__interwork_r7_call_via_%1\";
8410 return \"bl\\t%__interwork_r11_call_via_%1\";
8412 [(set_attr "type" "call")]
8415 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8416 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8418 (define_insn "*call_symbol"
8419 [(call (mem:SI (match_operand:SI 0 "" ""))
8420 (match_operand 1 "" ""))
8421 (use (match_operand 2 "" ""))
8422 (clobber (reg:SI LR_REGNUM))]
8424 && (GET_CODE (operands[0]) == SYMBOL_REF)
8425 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8428 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8430 [(set_attr "type" "call")]
8433 (define_insn "*call_value_symbol"
8434 [(set (match_operand 0 "" "")
8435 (call (mem:SI (match_operand:SI 1 "" ""))
8436 (match_operand:SI 2 "" "")))
8437 (use (match_operand 3 "" ""))
8438 (clobber (reg:SI LR_REGNUM))]
8440 && (GET_CODE (operands[1]) == SYMBOL_REF)
8441 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8444 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8446 [(set_attr "type" "call")]
8449 (define_insn "*call_insn"
8450 [(call (mem:SI (match_operand:SI 0 "" ""))
8451 (match_operand:SI 1 "" ""))
8452 (use (match_operand 2 "" ""))
8453 (clobber (reg:SI LR_REGNUM))]
8455 && GET_CODE (operands[0]) == SYMBOL_REF
8456 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8458 [(set_attr "length" "4")
8459 (set_attr "type" "call")]
8462 (define_insn "*call_value_insn"
8463 [(set (match_operand 0 "" "")
8464 (call (mem:SI (match_operand 1 "" ""))
8465 (match_operand 2 "" "")))
8466 (use (match_operand 3 "" ""))
8467 (clobber (reg:SI LR_REGNUM))]
8469 && GET_CODE (operands[1]) == SYMBOL_REF
8470 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8472 [(set_attr "length" "4")
8473 (set_attr "type" "call")]
8476 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8477 (define_expand "sibcall"
8478 [(parallel [(call (match_operand 0 "memory_operand" "")
8479 (match_operand 1 "general_operand" ""))
8481 (use (match_operand 2 "" ""))])]
8485 if (operands[2] == NULL_RTX)
8486 operands[2] = const0_rtx;
8490 (define_expand "sibcall_value"
8491 [(parallel [(set (match_operand 0 "" "")
8492 (call (match_operand 1 "memory_operand" "")
8493 (match_operand 2 "general_operand" "")))
8495 (use (match_operand 3 "" ""))])]
8499 if (operands[3] == NULL_RTX)
8500 operands[3] = const0_rtx;
8504 (define_insn "*sibcall_insn"
8505 [(call (mem:SI (match_operand:SI 0 "" "X"))
8506 (match_operand 1 "" ""))
8508 (use (match_operand 2 "" ""))]
8509 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8511 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8513 [(set_attr "type" "call")]
8516 (define_insn "*sibcall_value_insn"
8517 [(set (match_operand 0 "" "")
8518 (call (mem:SI (match_operand:SI 1 "" "X"))
8519 (match_operand 2 "" "")))
8521 (use (match_operand 3 "" ""))]
8522 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8524 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8526 [(set_attr "type" "call")]
8529 (define_expand "return"
8531 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8534 ;; Often the return insn will be the same as loading from memory, so set attr
8535 (define_insn "*arm_return"
8537 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8540 if (arm_ccfsm_state == 2)
8542 arm_ccfsm_state += 2;
8545 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8547 [(set_attr "type" "load1")
8548 (set_attr "length" "12")
8549 (set_attr "predicable" "yes")]
8552 (define_insn "*cond_return"
8554 (if_then_else (match_operator 0 "arm_comparison_operator"
8555 [(match_operand 1 "cc_register" "") (const_int 0)])
8558 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8561 if (arm_ccfsm_state == 2)
8563 arm_ccfsm_state += 2;
8566 return output_return_instruction (operands[0], TRUE, FALSE);
8568 [(set_attr "conds" "use")
8569 (set_attr "length" "12")
8570 (set_attr "type" "load1")]
8573 (define_insn "*cond_return_inverted"
8575 (if_then_else (match_operator 0 "arm_comparison_operator"
8576 [(match_operand 1 "cc_register" "") (const_int 0)])
8579 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8582 if (arm_ccfsm_state == 2)
8584 arm_ccfsm_state += 2;
8587 return output_return_instruction (operands[0], TRUE, TRUE);
8589 [(set_attr "conds" "use")
8590 (set_attr "length" "12")
8591 (set_attr "type" "load1")]
8594 ;; Generate a sequence of instructions to determine if the processor is
8595 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8598 (define_expand "return_addr_mask"
8600 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8602 (set (match_operand:SI 0 "s_register_operand" "")
8603 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8605 (const_int 67108860)))] ; 0x03fffffc
8608 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8611 (define_insn "*check_arch2"
8612 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8613 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8616 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8617 [(set_attr "length" "8")
8618 (set_attr "conds" "set")]
8621 ;; Call subroutine returning any type.
8623 (define_expand "untyped_call"
8624 [(parallel [(call (match_operand 0 "" "")
8626 (match_operand 1 "" "")
8627 (match_operand 2 "" "")])]
8632 rtx par = gen_rtx_PARALLEL (VOIDmode,
8633 rtvec_alloc (XVECLEN (operands[2], 0)));
8634 rtx addr = gen_reg_rtx (Pmode);
8638 emit_move_insn (addr, XEXP (operands[1], 0));
8639 mem = change_address (operands[1], BLKmode, addr);
8641 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8643 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8645 /* Default code only uses r0 as a return value, but we could
8646 be using anything up to 4 registers. */
8647 if (REGNO (src) == R0_REGNUM)
8648 src = gen_rtx_REG (TImode, R0_REGNUM);
8650 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8652 size += GET_MODE_SIZE (GET_MODE (src));
8655 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8660 for (i = 0; i < XVECLEN (par, 0); i++)
8662 HOST_WIDE_INT offset = 0;
8663 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8666 emit_move_insn (addr, plus_constant (addr, size));
8668 mem = change_address (mem, GET_MODE (reg), NULL);
8669 if (REGNO (reg) == R0_REGNUM)
8671 /* On thumb we have to use a write-back instruction. */
8672 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8673 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8674 size = TARGET_ARM ? 16 : 0;
8678 emit_move_insn (mem, reg);
8679 size = GET_MODE_SIZE (GET_MODE (reg));
8683 /* The optimizer does not know that the call sets the function value
8684 registers we stored in the result block. We avoid problems by
8685 claiming that all hard registers are used and clobbered at this
8687 emit_insn (gen_blockage ());
8693 (define_expand "untyped_return"
8694 [(match_operand:BLK 0 "memory_operand" "")
8695 (match_operand 1 "" "")]
8700 rtx addr = gen_reg_rtx (Pmode);
8704 emit_move_insn (addr, XEXP (operands[0], 0));
8705 mem = change_address (operands[0], BLKmode, addr);
8707 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8709 HOST_WIDE_INT offset = 0;
8710 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8713 emit_move_insn (addr, plus_constant (addr, size));
8715 mem = change_address (mem, GET_MODE (reg), NULL);
8716 if (REGNO (reg) == R0_REGNUM)
8718 /* On thumb we have to use a write-back instruction. */
8719 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8720 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8721 size = TARGET_ARM ? 16 : 0;
8725 emit_move_insn (reg, mem);
8726 size = GET_MODE_SIZE (GET_MODE (reg));
8730 /* Emit USE insns before the return. */
8731 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8732 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8734 /* Construct the return. */
8735 expand_naked_return ();
8741 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8742 ;; all of memory. This blocks insns from being moved across this point.
8744 (define_insn "blockage"
8745 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8748 [(set_attr "length" "0")
8749 (set_attr "type" "block")]
8752 (define_expand "casesi"
8753 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8754 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8755 (match_operand:SI 2 "const_int_operand" "") ; total range
8756 (match_operand:SI 3 "" "") ; table label
8757 (match_operand:SI 4 "" "")] ; Out of range label
8758 "TARGET_32BIT || optimize_size || flag_pic"
8761 enum insn_code code;
8762 if (operands[1] != const0_rtx)
8764 rtx reg = gen_reg_rtx (SImode);
8766 emit_insn (gen_addsi3 (reg, operands[0],
8767 gen_int_mode (-INTVAL (operands[1]),
8773 code = CODE_FOR_arm_casesi_internal;
8774 else if (TARGET_THUMB1)
8775 code = CODE_FOR_thumb1_casesi_internal_pic;
8777 code = CODE_FOR_thumb2_casesi_internal_pic;
8779 code = CODE_FOR_thumb2_casesi_internal;
8781 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8782 operands[2] = force_reg (SImode, operands[2]);
8784 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8785 operands[3], operands[4]));
8790 ;; The USE in this pattern is needed to tell flow analysis that this is
8791 ;; a CASESI insn. It has no other purpose.
8792 (define_insn "arm_casesi_internal"
8793 [(parallel [(set (pc)
8795 (leu (match_operand:SI 0 "s_register_operand" "r")
8796 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8797 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8798 (label_ref (match_operand 2 "" ""))))
8799 (label_ref (match_operand 3 "" ""))))
8800 (clobber (reg:CC CC_REGNUM))
8801 (use (label_ref (match_dup 2)))])]
8805 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8806 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8808 [(set_attr "conds" "clob")
8809 (set_attr "length" "12")]
8812 (define_expand "thumb1_casesi_internal_pic"
8813 [(match_operand:SI 0 "s_register_operand" "")
8814 (match_operand:SI 1 "thumb1_cmp_operand" "")
8815 (match_operand 2 "" "")
8816 (match_operand 3 "" "")]
8820 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8821 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8823 reg0 = gen_rtx_REG (SImode, 0);
8824 emit_move_insn (reg0, operands[0]);
8825 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8830 (define_insn "thumb1_casesi_dispatch"
8831 [(parallel [(set (pc) (unspec [(reg:SI 0)
8832 (label_ref (match_operand 0 "" ""))
8833 ;; (label_ref (match_operand 1 "" ""))
8835 UNSPEC_THUMB1_CASESI))
8836 (clobber (reg:SI IP_REGNUM))
8837 (clobber (reg:SI LR_REGNUM))])]
8839 "* return thumb1_output_casesi(operands);"
8840 [(set_attr "length" "4")]
8843 (define_expand "indirect_jump"
8845 (match_operand:SI 0 "s_register_operand" ""))]
8848 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8849 address and use bx. */
8853 tmp = gen_reg_rtx (SImode);
8854 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8860 ;; NB Never uses BX.
8861 (define_insn "*arm_indirect_jump"
8863 (match_operand:SI 0 "s_register_operand" "r"))]
8865 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8866 [(set_attr "predicable" "yes")]
8869 (define_insn "*load_indirect_jump"
8871 (match_operand:SI 0 "memory_operand" "m"))]
8873 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8874 [(set_attr "type" "load1")
8875 (set_attr "pool_range" "4096")
8876 (set_attr "neg_pool_range" "4084")
8877 (set_attr "predicable" "yes")]
8880 ;; NB Never uses BX.
8881 (define_insn "*thumb1_indirect_jump"
8883 (match_operand:SI 0 "register_operand" "l*r"))]
8886 [(set_attr "conds" "clob")
8887 (set_attr "length" "2")]
8897 if (TARGET_UNIFIED_ASM)
8900 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8901 return \"mov\\tr8, r8\";
8903 [(set (attr "length")
8904 (if_then_else (eq_attr "is_thumb" "yes")
8910 ;; Patterns to allow combination of arithmetic, cond code and shifts
8912 (define_insn "*arith_shiftsi"
8913 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8914 (match_operator:SI 1 "shiftable_operator"
8915 [(match_operator:SI 3 "shift_operator"
8916 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8917 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8918 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8920 "%i1%?\\t%0, %2, %4%S3"
8921 [(set_attr "predicable" "yes")
8922 (set_attr "shift" "4")
8923 (set_attr "arch" "a,t2,t2,a")
8924 ;; Thumb2 doesn't allow the stack pointer to be used for
8925 ;; operand1 for all operations other than add and sub. In this case
8926 ;; the minus operation is a candidate for an rsub and hence needs
8928 ;; We have to make sure to disable the fourth alternative if
8929 ;; the shift_operator is MULT, since otherwise the insn will
8930 ;; also match a multiply_accumulate pattern and validate_change
8931 ;; will allow a replacement of the constant with a register
8932 ;; despite the checks done in shift_operator.
8933 (set_attr_alternative "insn_enabled"
8934 [(const_string "yes")
8936 (match_operand:SI 1 "add_operator" "")
8937 (const_string "yes") (const_string "no"))
8938 (const_string "yes")
8940 (match_operand:SI 3 "mult_operator" "")
8941 (const_string "no") (const_string "yes"))])
8942 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8945 [(set (match_operand:SI 0 "s_register_operand" "")
8946 (match_operator:SI 1 "shiftable_operator"
8947 [(match_operator:SI 2 "shiftable_operator"
8948 [(match_operator:SI 3 "shift_operator"
8949 [(match_operand:SI 4 "s_register_operand" "")
8950 (match_operand:SI 5 "reg_or_int_operand" "")])
8951 (match_operand:SI 6 "s_register_operand" "")])
8952 (match_operand:SI 7 "arm_rhs_operand" "")]))
8953 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8956 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8959 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8962 (define_insn "*arith_shiftsi_compare0"
8963 [(set (reg:CC_NOOV CC_REGNUM)
8965 (match_operator:SI 1 "shiftable_operator"
8966 [(match_operator:SI 3 "shift_operator"
8967 [(match_operand:SI 4 "s_register_operand" "r,r")
8968 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8969 (match_operand:SI 2 "s_register_operand" "r,r")])
8971 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8972 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8975 "%i1%.\\t%0, %2, %4%S3"
8976 [(set_attr "conds" "set")
8977 (set_attr "shift" "4")
8978 (set_attr "arch" "32,a")
8979 (set_attr "type" "alu_shift,alu_shift_reg")])
8981 (define_insn "*arith_shiftsi_compare0_scratch"
8982 [(set (reg:CC_NOOV CC_REGNUM)
8984 (match_operator:SI 1 "shiftable_operator"
8985 [(match_operator:SI 3 "shift_operator"
8986 [(match_operand:SI 4 "s_register_operand" "r,r")
8987 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8988 (match_operand:SI 2 "s_register_operand" "r,r")])
8990 (clobber (match_scratch:SI 0 "=r,r"))]
8992 "%i1%.\\t%0, %2, %4%S3"
8993 [(set_attr "conds" "set")
8994 (set_attr "shift" "4")
8995 (set_attr "arch" "32,a")
8996 (set_attr "type" "alu_shift,alu_shift_reg")])
8998 (define_insn "*sub_shiftsi"
8999 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9000 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9001 (match_operator:SI 2 "shift_operator"
9002 [(match_operand:SI 3 "s_register_operand" "r,r")
9003 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9005 "sub%?\\t%0, %1, %3%S2"
9006 [(set_attr "predicable" "yes")
9007 (set_attr "shift" "3")
9008 (set_attr "arch" "32,a")
9009 (set_attr "type" "alu_shift,alu_shift_reg")])
9011 (define_insn "*sub_shiftsi_compare0"
9012 [(set (reg:CC_NOOV CC_REGNUM)
9014 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9015 (match_operator:SI 2 "shift_operator"
9016 [(match_operand:SI 3 "s_register_operand" "r,r")
9017 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9019 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9020 (minus:SI (match_dup 1)
9021 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9023 "sub%.\\t%0, %1, %3%S2"
9024 [(set_attr "conds" "set")
9025 (set_attr "shift" "3")
9026 (set_attr "arch" "32,a")
9027 (set_attr "type" "alu_shift,alu_shift_reg")])
9029 (define_insn "*sub_shiftsi_compare0_scratch"
9030 [(set (reg:CC_NOOV CC_REGNUM)
9032 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9033 (match_operator:SI 2 "shift_operator"
9034 [(match_operand:SI 3 "s_register_operand" "r,r")
9035 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9037 (clobber (match_scratch:SI 0 "=r,r"))]
9039 "sub%.\\t%0, %1, %3%S2"
9040 [(set_attr "conds" "set")
9041 (set_attr "shift" "3")
9042 (set_attr "arch" "32,a")
9043 (set_attr "type" "alu_shift,alu_shift_reg")])
9046 (define_insn "*and_scc"
9047 [(set (match_operand:SI 0 "s_register_operand" "=r")
9048 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9049 [(match_operand 3 "cc_register" "") (const_int 0)])
9050 (match_operand:SI 2 "s_register_operand" "r")))]
9052 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9053 [(set_attr "conds" "use")
9054 (set_attr "insn" "mov")
9055 (set_attr "length" "8")]
9058 (define_insn "*ior_scc"
9059 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9060 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9061 [(match_operand 3 "cc_register" "") (const_int 0)])
9062 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9066 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9067 [(set_attr "conds" "use")
9068 (set_attr "length" "4,8")]
9071 ; A series of splitters for the compare_scc pattern below. Note that
9072 ; order is important.
9074 [(set (match_operand:SI 0 "s_register_operand" "")
9075 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9077 (clobber (reg:CC CC_REGNUM))]
9078 "TARGET_32BIT && reload_completed"
9079 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9082 [(set (match_operand:SI 0 "s_register_operand" "")
9083 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9085 (clobber (reg:CC CC_REGNUM))]
9086 "TARGET_32BIT && reload_completed"
9087 [(set (match_dup 0) (not:SI (match_dup 1)))
9088 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9091 [(set (match_operand:SI 0 "s_register_operand" "")
9092 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9094 (clobber (reg:CC CC_REGNUM))]
9095 "TARGET_32BIT && reload_completed"
9097 [(set (reg:CC CC_REGNUM)
9098 (compare:CC (const_int 1) (match_dup 1)))
9100 (minus:SI (const_int 1) (match_dup 1)))])
9101 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9102 (set (match_dup 0) (const_int 0)))])
9105 [(set (match_operand:SI 0 "s_register_operand" "")
9106 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9107 (match_operand:SI 2 "const_int_operand" "")))
9108 (clobber (reg:CC CC_REGNUM))]
9109 "TARGET_32BIT && reload_completed"
9111 [(set (reg:CC CC_REGNUM)
9112 (compare:CC (match_dup 1) (match_dup 2)))
9113 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9114 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9115 (set (match_dup 0) (const_int 1)))]
9117 operands[3] = GEN_INT (-INTVAL (operands[2]));
9121 [(set (match_operand:SI 0 "s_register_operand" "")
9122 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9123 (match_operand:SI 2 "arm_add_operand" "")))
9124 (clobber (reg:CC CC_REGNUM))]
9125 "TARGET_32BIT && reload_completed"
9127 [(set (reg:CC_NOOV CC_REGNUM)
9128 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9130 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9131 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9132 (set (match_dup 0) (const_int 1)))])
9134 (define_insn_and_split "*compare_scc"
9135 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9136 (match_operator:SI 1 "arm_comparison_operator"
9137 [(match_operand:SI 2 "s_register_operand" "r,r")
9138 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9139 (clobber (reg:CC CC_REGNUM))]
9142 "&& reload_completed"
9143 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9144 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9145 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9148 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9149 operands[2], operands[3]);
9150 enum rtx_code rc = GET_CODE (operands[1]);
9152 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9154 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9155 if (mode == CCFPmode || mode == CCFPEmode)
9156 rc = reverse_condition_maybe_unordered (rc);
9158 rc = reverse_condition (rc);
9159 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9162 ;; Attempt to improve the sequence generated by the compare_scc splitters
9163 ;; not to use conditional execution.
9165 [(set (reg:CC CC_REGNUM)
9166 (compare:CC (match_operand:SI 1 "register_operand" "")
9167 (match_operand:SI 2 "arm_rhs_operand" "")))
9168 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9169 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9170 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9171 (set (match_dup 0) (const_int 1)))
9172 (match_scratch:SI 3 "r")]
9175 [(set (reg:CC CC_REGNUM)
9176 (compare:CC (match_dup 1) (match_dup 2)))
9177 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
9179 [(set (reg:CC CC_REGNUM)
9180 (compare:CC (const_int 0) (match_dup 3)))
9181 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9184 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9185 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
9186 (clobber (reg:CC CC_REGNUM))])])
9188 (define_insn "*cond_move"
9189 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9190 (if_then_else:SI (match_operator 3 "equality_operator"
9191 [(match_operator 4 "arm_comparison_operator"
9192 [(match_operand 5 "cc_register" "") (const_int 0)])
9194 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9195 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9198 if (GET_CODE (operands[3]) == NE)
9200 if (which_alternative != 1)
9201 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9202 if (which_alternative != 0)
9203 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9206 if (which_alternative != 0)
9207 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9208 if (which_alternative != 1)
9209 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9212 [(set_attr "conds" "use")
9213 (set_attr "insn" "mov")
9214 (set_attr "length" "4,4,8")]
9217 (define_insn "*cond_arith"
9218 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9219 (match_operator:SI 5 "shiftable_operator"
9220 [(match_operator:SI 4 "arm_comparison_operator"
9221 [(match_operand:SI 2 "s_register_operand" "r,r")
9222 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9223 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9224 (clobber (reg:CC CC_REGNUM))]
9227 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9228 return \"%i5\\t%0, %1, %2, lsr #31\";
9230 output_asm_insn (\"cmp\\t%2, %3\", operands);
9231 if (GET_CODE (operands[5]) == AND)
9232 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9233 else if (GET_CODE (operands[5]) == MINUS)
9234 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9235 else if (which_alternative != 0)
9236 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9237 return \"%i5%d4\\t%0, %1, #1\";
9239 [(set_attr "conds" "clob")
9240 (set_attr "length" "12")]
9243 (define_insn "*cond_sub"
9244 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9245 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9246 (match_operator:SI 4 "arm_comparison_operator"
9247 [(match_operand:SI 2 "s_register_operand" "r,r")
9248 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9249 (clobber (reg:CC CC_REGNUM))]
9252 output_asm_insn (\"cmp\\t%2, %3\", operands);
9253 if (which_alternative != 0)
9254 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9255 return \"sub%d4\\t%0, %1, #1\";
9257 [(set_attr "conds" "clob")
9258 (set_attr "length" "8,12")]
9261 (define_insn "*cmp_ite0"
9262 [(set (match_operand 6 "dominant_cc_register" "")
9265 (match_operator 4 "arm_comparison_operator"
9266 [(match_operand:SI 0 "s_register_operand"
9267 "l,l,l,r,r,r,r,r,r")
9268 (match_operand:SI 1 "arm_add_operand"
9269 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9270 (match_operator:SI 5 "arm_comparison_operator"
9271 [(match_operand:SI 2 "s_register_operand"
9272 "l,r,r,l,l,r,r,r,r")
9273 (match_operand:SI 3 "arm_add_operand"
9274 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9280 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9282 {\"cmp%d5\\t%0, %1\",
9283 \"cmp%d4\\t%2, %3\"},
9284 {\"cmn%d5\\t%0, #%n1\",
9285 \"cmp%d4\\t%2, %3\"},
9286 {\"cmp%d5\\t%0, %1\",
9287 \"cmn%d4\\t%2, #%n3\"},
9288 {\"cmn%d5\\t%0, #%n1\",
9289 \"cmn%d4\\t%2, #%n3\"}
9291 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9296 \"cmn\\t%0, #%n1\"},
9297 {\"cmn\\t%2, #%n3\",
9299 {\"cmn\\t%2, #%n3\",
9302 static const char * const ite[2] =
9307 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9308 CMP_CMP, CMN_CMP, CMP_CMP,
9309 CMN_CMP, CMP_CMN, CMN_CMN};
9311 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9313 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9314 if (TARGET_THUMB2) {
9315 output_asm_insn (ite[swap], operands);
9317 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9320 [(set_attr "conds" "set")
9321 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9322 (set_attr_alternative "length"
9328 (if_then_else (eq_attr "is_thumb" "no")
9331 (if_then_else (eq_attr "is_thumb" "no")
9334 (if_then_else (eq_attr "is_thumb" "no")
9337 (if_then_else (eq_attr "is_thumb" "no")
9342 (define_insn "*cmp_ite1"
9343 [(set (match_operand 6 "dominant_cc_register" "")
9346 (match_operator 4 "arm_comparison_operator"
9347 [(match_operand:SI 0 "s_register_operand"
9348 "l,l,l,r,r,r,r,r,r")
9349 (match_operand:SI 1 "arm_add_operand"
9350 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9351 (match_operator:SI 5 "arm_comparison_operator"
9352 [(match_operand:SI 2 "s_register_operand"
9353 "l,r,r,l,l,r,r,r,r")
9354 (match_operand:SI 3 "arm_add_operand"
9355 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9361 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9365 {\"cmn\\t%0, #%n1\",
9368 \"cmn\\t%2, #%n3\"},
9369 {\"cmn\\t%0, #%n1\",
9372 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9374 {\"cmp%d4\\t%2, %3\",
9375 \"cmp%D5\\t%0, %1\"},
9376 {\"cmp%d4\\t%2, %3\",
9377 \"cmn%D5\\t%0, #%n1\"},
9378 {\"cmn%d4\\t%2, #%n3\",
9379 \"cmp%D5\\t%0, %1\"},
9380 {\"cmn%d4\\t%2, #%n3\",
9381 \"cmn%D5\\t%0, #%n1\"}
9383 static const char * const ite[2] =
9388 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9389 CMP_CMP, CMN_CMP, CMP_CMP,
9390 CMN_CMP, CMP_CMN, CMN_CMN};
9392 comparison_dominates_p (GET_CODE (operands[5]),
9393 reverse_condition (GET_CODE (operands[4])));
9395 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9396 if (TARGET_THUMB2) {
9397 output_asm_insn (ite[swap], operands);
9399 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9402 [(set_attr "conds" "set")
9403 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9404 (set_attr_alternative "length"
9410 (if_then_else (eq_attr "is_thumb" "no")
9413 (if_then_else (eq_attr "is_thumb" "no")
9416 (if_then_else (eq_attr "is_thumb" "no")
9419 (if_then_else (eq_attr "is_thumb" "no")
9424 (define_insn "*cmp_and"
9425 [(set (match_operand 6 "dominant_cc_register" "")
9428 (match_operator 4 "arm_comparison_operator"
9429 [(match_operand:SI 0 "s_register_operand"
9430 "l,l,l,r,r,r,r,r,r")
9431 (match_operand:SI 1 "arm_add_operand"
9432 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9433 (match_operator:SI 5 "arm_comparison_operator"
9434 [(match_operand:SI 2 "s_register_operand"
9435 "l,r,r,l,l,r,r,r,r")
9436 (match_operand:SI 3 "arm_add_operand"
9437 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9442 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9444 {\"cmp%d5\\t%0, %1\",
9445 \"cmp%d4\\t%2, %3\"},
9446 {\"cmn%d5\\t%0, #%n1\",
9447 \"cmp%d4\\t%2, %3\"},
9448 {\"cmp%d5\\t%0, %1\",
9449 \"cmn%d4\\t%2, #%n3\"},
9450 {\"cmn%d5\\t%0, #%n1\",
9451 \"cmn%d4\\t%2, #%n3\"}
9453 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9458 \"cmn\\t%0, #%n1\"},
9459 {\"cmn\\t%2, #%n3\",
9461 {\"cmn\\t%2, #%n3\",
9464 static const char *const ite[2] =
9469 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9470 CMP_CMP, CMN_CMP, CMP_CMP,
9471 CMN_CMP, CMP_CMN, CMN_CMN};
9473 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9475 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9476 if (TARGET_THUMB2) {
9477 output_asm_insn (ite[swap], operands);
9479 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9482 [(set_attr "conds" "set")
9483 (set_attr "predicable" "no")
9484 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9485 (set_attr_alternative "length"
9491 (if_then_else (eq_attr "is_thumb" "no")
9494 (if_then_else (eq_attr "is_thumb" "no")
9497 (if_then_else (eq_attr "is_thumb" "no")
9500 (if_then_else (eq_attr "is_thumb" "no")
9505 (define_insn "*cmp_ior"
9506 [(set (match_operand 6 "dominant_cc_register" "")
9509 (match_operator 4 "arm_comparison_operator"
9510 [(match_operand:SI 0 "s_register_operand"
9511 "l,l,l,r,r,r,r,r,r")
9512 (match_operand:SI 1 "arm_add_operand"
9513 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9514 (match_operator:SI 5 "arm_comparison_operator"
9515 [(match_operand:SI 2 "s_register_operand"
9516 "l,r,r,l,l,r,r,r,r")
9517 (match_operand:SI 3 "arm_add_operand"
9518 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9523 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9527 {\"cmn\\t%0, #%n1\",
9530 \"cmn\\t%2, #%n3\"},
9531 {\"cmn\\t%0, #%n1\",
9534 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9536 {\"cmp%D4\\t%2, %3\",
9537 \"cmp%D5\\t%0, %1\"},
9538 {\"cmp%D4\\t%2, %3\",
9539 \"cmn%D5\\t%0, #%n1\"},
9540 {\"cmn%D4\\t%2, #%n3\",
9541 \"cmp%D5\\t%0, %1\"},
9542 {\"cmn%D4\\t%2, #%n3\",
9543 \"cmn%D5\\t%0, #%n1\"}
9545 static const char *const ite[2] =
9550 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9551 CMP_CMP, CMN_CMP, CMP_CMP,
9552 CMN_CMP, CMP_CMN, CMN_CMN};
9554 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9556 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9557 if (TARGET_THUMB2) {
9558 output_asm_insn (ite[swap], operands);
9560 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9564 [(set_attr "conds" "set")
9565 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9566 (set_attr_alternative "length"
9572 (if_then_else (eq_attr "is_thumb" "no")
9575 (if_then_else (eq_attr "is_thumb" "no")
9578 (if_then_else (eq_attr "is_thumb" "no")
9581 (if_then_else (eq_attr "is_thumb" "no")
9586 (define_insn_and_split "*ior_scc_scc"
9587 [(set (match_operand:SI 0 "s_register_operand" "=r")
9588 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9589 [(match_operand:SI 1 "s_register_operand" "r")
9590 (match_operand:SI 2 "arm_add_operand" "rIL")])
9591 (match_operator:SI 6 "arm_comparison_operator"
9592 [(match_operand:SI 4 "s_register_operand" "r")
9593 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9594 (clobber (reg:CC CC_REGNUM))]
9596 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9599 "TARGET_32BIT && reload_completed"
9603 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9604 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9606 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9608 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9611 [(set_attr "conds" "clob")
9612 (set_attr "length" "16")])
9614 ; If the above pattern is followed by a CMP insn, then the compare is
9615 ; redundant, since we can rework the conditional instruction that follows.
9616 (define_insn_and_split "*ior_scc_scc_cmp"
9617 [(set (match_operand 0 "dominant_cc_register" "")
9618 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9619 [(match_operand:SI 1 "s_register_operand" "r")
9620 (match_operand:SI 2 "arm_add_operand" "rIL")])
9621 (match_operator:SI 6 "arm_comparison_operator"
9622 [(match_operand:SI 4 "s_register_operand" "r")
9623 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9625 (set (match_operand:SI 7 "s_register_operand" "=r")
9626 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9627 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9630 "TARGET_32BIT && reload_completed"
9634 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9635 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9637 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9639 [(set_attr "conds" "set")
9640 (set_attr "length" "16")])
9642 (define_insn_and_split "*and_scc_scc"
9643 [(set (match_operand:SI 0 "s_register_operand" "=r")
9644 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9645 [(match_operand:SI 1 "s_register_operand" "r")
9646 (match_operand:SI 2 "arm_add_operand" "rIL")])
9647 (match_operator:SI 6 "arm_comparison_operator"
9648 [(match_operand:SI 4 "s_register_operand" "r")
9649 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9650 (clobber (reg:CC CC_REGNUM))]
9652 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9655 "TARGET_32BIT && reload_completed
9656 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9661 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9662 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9664 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9666 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9669 [(set_attr "conds" "clob")
9670 (set_attr "length" "16")])
9672 ; If the above pattern is followed by a CMP insn, then the compare is
9673 ; redundant, since we can rework the conditional instruction that follows.
9674 (define_insn_and_split "*and_scc_scc_cmp"
9675 [(set (match_operand 0 "dominant_cc_register" "")
9676 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9677 [(match_operand:SI 1 "s_register_operand" "r")
9678 (match_operand:SI 2 "arm_add_operand" "rIL")])
9679 (match_operator:SI 6 "arm_comparison_operator"
9680 [(match_operand:SI 4 "s_register_operand" "r")
9681 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9683 (set (match_operand:SI 7 "s_register_operand" "=r")
9684 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9685 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9688 "TARGET_32BIT && reload_completed"
9692 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9693 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9695 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9697 [(set_attr "conds" "set")
9698 (set_attr "length" "16")])
9700 ;; If there is no dominance in the comparison, then we can still save an
9701 ;; instruction in the AND case, since we can know that the second compare
9702 ;; need only zero the value if false (if true, then the value is already
9704 (define_insn_and_split "*and_scc_scc_nodom"
9705 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9706 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9707 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9708 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9709 (match_operator:SI 6 "arm_comparison_operator"
9710 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9711 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9712 (clobber (reg:CC CC_REGNUM))]
9714 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9717 "TARGET_32BIT && reload_completed"
9718 [(parallel [(set (match_dup 0)
9719 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9720 (clobber (reg:CC CC_REGNUM))])
9721 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9723 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9726 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9727 operands[4], operands[5]),
9729 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9731 [(set_attr "conds" "clob")
9732 (set_attr "length" "20")])
9735 [(set (reg:CC_NOOV CC_REGNUM)
9736 (compare:CC_NOOV (ior:SI
9737 (and:SI (match_operand:SI 0 "s_register_operand" "")
9739 (match_operator:SI 1 "arm_comparison_operator"
9740 [(match_operand:SI 2 "s_register_operand" "")
9741 (match_operand:SI 3 "arm_add_operand" "")]))
9743 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9746 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9748 (set (reg:CC_NOOV CC_REGNUM)
9749 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9754 [(set (reg:CC_NOOV CC_REGNUM)
9755 (compare:CC_NOOV (ior:SI
9756 (match_operator:SI 1 "arm_comparison_operator"
9757 [(match_operand:SI 2 "s_register_operand" "")
9758 (match_operand:SI 3 "arm_add_operand" "")])
9759 (and:SI (match_operand:SI 0 "s_register_operand" "")
9762 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9765 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9767 (set (reg:CC_NOOV CC_REGNUM)
9768 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9771 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9773 (define_insn "*negscc"
9774 [(set (match_operand:SI 0 "s_register_operand" "=r")
9775 (neg:SI (match_operator 3 "arm_comparison_operator"
9776 [(match_operand:SI 1 "s_register_operand" "r")
9777 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9778 (clobber (reg:CC CC_REGNUM))]
9781 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9782 return \"mov\\t%0, %1, asr #31\";
9784 if (GET_CODE (operands[3]) == NE)
9785 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9787 output_asm_insn (\"cmp\\t%1, %2\", operands);
9788 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9789 return \"mvn%d3\\t%0, #0\";
9791 [(set_attr "conds" "clob")
9792 (set_attr "length" "12")]
9795 (define_insn "movcond"
9796 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9798 (match_operator 5 "arm_comparison_operator"
9799 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9800 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9801 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9802 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9803 (clobber (reg:CC CC_REGNUM))]
9806 if (GET_CODE (operands[5]) == LT
9807 && (operands[4] == const0_rtx))
9809 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9811 if (operands[2] == const0_rtx)
9812 return \"and\\t%0, %1, %3, asr #31\";
9813 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9815 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9817 if (operands[1] == const0_rtx)
9818 return \"bic\\t%0, %2, %3, asr #31\";
9819 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9821 /* The only case that falls through to here is when both ops 1 & 2
9825 if (GET_CODE (operands[5]) == GE
9826 && (operands[4] == const0_rtx))
9828 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9830 if (operands[2] == const0_rtx)
9831 return \"bic\\t%0, %1, %3, asr #31\";
9832 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9834 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9836 if (operands[1] == const0_rtx)
9837 return \"and\\t%0, %2, %3, asr #31\";
9838 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9840 /* The only case that falls through to here is when both ops 1 & 2
9843 if (GET_CODE (operands[4]) == CONST_INT
9844 && !const_ok_for_arm (INTVAL (operands[4])))
9845 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9847 output_asm_insn (\"cmp\\t%3, %4\", operands);
9848 if (which_alternative != 0)
9849 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9850 if (which_alternative != 1)
9851 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9854 [(set_attr "conds" "clob")
9855 (set_attr "length" "8,8,12")]
9858 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9860 (define_insn "*ifcompare_plus_move"
9861 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9862 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9863 [(match_operand:SI 4 "s_register_operand" "r,r")
9864 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9866 (match_operand:SI 2 "s_register_operand" "r,r")
9867 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9868 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9869 (clobber (reg:CC CC_REGNUM))]
9872 [(set_attr "conds" "clob")
9873 (set_attr "length" "8,12")]
9876 (define_insn "*if_plus_move"
9877 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9879 (match_operator 4 "arm_comparison_operator"
9880 [(match_operand 5 "cc_register" "") (const_int 0)])
9882 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9883 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9884 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9888 sub%d4\\t%0, %2, #%n3
9889 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9890 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9891 [(set_attr "conds" "use")
9892 (set_attr "length" "4,4,8,8")
9893 (set_attr "type" "*,*,*,*")]
9896 (define_insn "*ifcompare_move_plus"
9897 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9898 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9899 [(match_operand:SI 4 "s_register_operand" "r,r")
9900 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9901 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9903 (match_operand:SI 2 "s_register_operand" "r,r")
9904 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9905 (clobber (reg:CC CC_REGNUM))]
9908 [(set_attr "conds" "clob")
9909 (set_attr "length" "8,12")]
9912 (define_insn "*if_move_plus"
9913 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9915 (match_operator 4 "arm_comparison_operator"
9916 [(match_operand 5 "cc_register" "") (const_int 0)])
9917 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9919 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9920 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9924 sub%D4\\t%0, %2, #%n3
9925 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9926 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9927 [(set_attr "conds" "use")
9928 (set_attr "length" "4,4,8,8")
9929 (set_attr "type" "*,*,*,*")]
9932 (define_insn "*ifcompare_arith_arith"
9933 [(set (match_operand:SI 0 "s_register_operand" "=r")
9934 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9935 [(match_operand:SI 5 "s_register_operand" "r")
9936 (match_operand:SI 6 "arm_add_operand" "rIL")])
9937 (match_operator:SI 8 "shiftable_operator"
9938 [(match_operand:SI 1 "s_register_operand" "r")
9939 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9940 (match_operator:SI 7 "shiftable_operator"
9941 [(match_operand:SI 3 "s_register_operand" "r")
9942 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9943 (clobber (reg:CC CC_REGNUM))]
9946 [(set_attr "conds" "clob")
9947 (set_attr "length" "12")]
9950 (define_insn "*if_arith_arith"
9951 [(set (match_operand:SI 0 "s_register_operand" "=r")
9952 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9953 [(match_operand 8 "cc_register" "") (const_int 0)])
9954 (match_operator:SI 6 "shiftable_operator"
9955 [(match_operand:SI 1 "s_register_operand" "r")
9956 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9957 (match_operator:SI 7 "shiftable_operator"
9958 [(match_operand:SI 3 "s_register_operand" "r")
9959 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9961 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9962 [(set_attr "conds" "use")
9963 (set_attr "length" "8")]
9966 (define_insn "*ifcompare_arith_move"
9967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9968 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9969 [(match_operand:SI 2 "s_register_operand" "r,r")
9970 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9971 (match_operator:SI 7 "shiftable_operator"
9972 [(match_operand:SI 4 "s_register_operand" "r,r")
9973 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9974 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9975 (clobber (reg:CC CC_REGNUM))]
9978 /* If we have an operation where (op x 0) is the identity operation and
9979 the conditional operator is LT or GE and we are comparing against zero and
9980 everything is in registers then we can do this in two instructions. */
9981 if (operands[3] == const0_rtx
9982 && GET_CODE (operands[7]) != AND
9983 && GET_CODE (operands[5]) == REG
9984 && GET_CODE (operands[1]) == REG
9985 && REGNO (operands[1]) == REGNO (operands[4])
9986 && REGNO (operands[4]) != REGNO (operands[0]))
9988 if (GET_CODE (operands[6]) == LT)
9989 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9990 else if (GET_CODE (operands[6]) == GE)
9991 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9993 if (GET_CODE (operands[3]) == CONST_INT
9994 && !const_ok_for_arm (INTVAL (operands[3])))
9995 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9997 output_asm_insn (\"cmp\\t%2, %3\", operands);
9998 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9999 if (which_alternative != 0)
10000 return \"mov%D6\\t%0, %1\";
10003 [(set_attr "conds" "clob")
10004 (set_attr "length" "8,12")]
10007 (define_insn "*if_arith_move"
10008 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10009 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10010 [(match_operand 6 "cc_register" "") (const_int 0)])
10011 (match_operator:SI 5 "shiftable_operator"
10012 [(match_operand:SI 2 "s_register_operand" "r,r")
10013 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10014 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10017 %I5%d4\\t%0, %2, %3
10018 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10019 [(set_attr "conds" "use")
10020 (set_attr "length" "4,8")
10021 (set_attr "type" "*,*")]
10024 (define_insn "*ifcompare_move_arith"
10025 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10026 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10027 [(match_operand:SI 4 "s_register_operand" "r,r")
10028 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10029 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10030 (match_operator:SI 7 "shiftable_operator"
10031 [(match_operand:SI 2 "s_register_operand" "r,r")
10032 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10033 (clobber (reg:CC CC_REGNUM))]
10036 /* If we have an operation where (op x 0) is the identity operation and
10037 the conditional operator is LT or GE and we are comparing against zero and
10038 everything is in registers then we can do this in two instructions */
10039 if (operands[5] == const0_rtx
10040 && GET_CODE (operands[7]) != AND
10041 && GET_CODE (operands[3]) == REG
10042 && GET_CODE (operands[1]) == REG
10043 && REGNO (operands[1]) == REGNO (operands[2])
10044 && REGNO (operands[2]) != REGNO (operands[0]))
10046 if (GET_CODE (operands[6]) == GE)
10047 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10048 else if (GET_CODE (operands[6]) == LT)
10049 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10052 if (GET_CODE (operands[5]) == CONST_INT
10053 && !const_ok_for_arm (INTVAL (operands[5])))
10054 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10056 output_asm_insn (\"cmp\\t%4, %5\", operands);
10058 if (which_alternative != 0)
10059 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10060 return \"%I7%D6\\t%0, %2, %3\";
10062 [(set_attr "conds" "clob")
10063 (set_attr "length" "8,12")]
10066 (define_insn "*if_move_arith"
10067 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10069 (match_operator 4 "arm_comparison_operator"
10070 [(match_operand 6 "cc_register" "") (const_int 0)])
10071 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10072 (match_operator:SI 5 "shiftable_operator"
10073 [(match_operand:SI 2 "s_register_operand" "r,r")
10074 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10077 %I5%D4\\t%0, %2, %3
10078 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10079 [(set_attr "conds" "use")
10080 (set_attr "length" "4,8")
10081 (set_attr "type" "*,*")]
10084 (define_insn "*ifcompare_move_not"
10085 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10087 (match_operator 5 "arm_comparison_operator"
10088 [(match_operand:SI 3 "s_register_operand" "r,r")
10089 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10090 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10092 (match_operand:SI 2 "s_register_operand" "r,r"))))
10093 (clobber (reg:CC CC_REGNUM))]
10096 [(set_attr "conds" "clob")
10097 (set_attr "length" "8,12")]
10100 (define_insn "*if_move_not"
10101 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10103 (match_operator 4 "arm_comparison_operator"
10104 [(match_operand 3 "cc_register" "") (const_int 0)])
10105 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10106 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10110 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10111 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10112 [(set_attr "conds" "use")
10113 (set_attr "insn" "mvn")
10114 (set_attr "length" "4,8,8")]
10117 (define_insn "*ifcompare_not_move"
10118 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10120 (match_operator 5 "arm_comparison_operator"
10121 [(match_operand:SI 3 "s_register_operand" "r,r")
10122 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10124 (match_operand:SI 2 "s_register_operand" "r,r"))
10125 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10126 (clobber (reg:CC CC_REGNUM))]
10129 [(set_attr "conds" "clob")
10130 (set_attr "length" "8,12")]
10133 (define_insn "*if_not_move"
10134 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10136 (match_operator 4 "arm_comparison_operator"
10137 [(match_operand 3 "cc_register" "") (const_int 0)])
10138 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10139 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10143 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10144 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10145 [(set_attr "conds" "use")
10146 (set_attr "insn" "mvn")
10147 (set_attr "length" "4,8,8")]
10150 (define_insn "*ifcompare_shift_move"
10151 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10153 (match_operator 6 "arm_comparison_operator"
10154 [(match_operand:SI 4 "s_register_operand" "r,r")
10155 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10156 (match_operator:SI 7 "shift_operator"
10157 [(match_operand:SI 2 "s_register_operand" "r,r")
10158 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10159 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10160 (clobber (reg:CC CC_REGNUM))]
10163 [(set_attr "conds" "clob")
10164 (set_attr "length" "8,12")]
10167 (define_insn "*if_shift_move"
10168 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10170 (match_operator 5 "arm_comparison_operator"
10171 [(match_operand 6 "cc_register" "") (const_int 0)])
10172 (match_operator:SI 4 "shift_operator"
10173 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10174 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10175 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10179 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10180 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10181 [(set_attr "conds" "use")
10182 (set_attr "shift" "2")
10183 (set_attr "length" "4,8,8")
10184 (set_attr "insn" "mov")
10185 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10186 (const_string "alu_shift")
10187 (const_string "alu_shift_reg")))]
10190 (define_insn "*ifcompare_move_shift"
10191 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10193 (match_operator 6 "arm_comparison_operator"
10194 [(match_operand:SI 4 "s_register_operand" "r,r")
10195 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10196 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10197 (match_operator:SI 7 "shift_operator"
10198 [(match_operand:SI 2 "s_register_operand" "r,r")
10199 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10200 (clobber (reg:CC CC_REGNUM))]
10203 [(set_attr "conds" "clob")
10204 (set_attr "length" "8,12")]
10207 (define_insn "*if_move_shift"
10208 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10210 (match_operator 5 "arm_comparison_operator"
10211 [(match_operand 6 "cc_register" "") (const_int 0)])
10212 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10213 (match_operator:SI 4 "shift_operator"
10214 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10215 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10219 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10220 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10221 [(set_attr "conds" "use")
10222 (set_attr "shift" "2")
10223 (set_attr "length" "4,8,8")
10224 (set_attr "insn" "mov")
10225 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10226 (const_string "alu_shift")
10227 (const_string "alu_shift_reg")))]
10230 (define_insn "*ifcompare_shift_shift"
10231 [(set (match_operand:SI 0 "s_register_operand" "=r")
10233 (match_operator 7 "arm_comparison_operator"
10234 [(match_operand:SI 5 "s_register_operand" "r")
10235 (match_operand:SI 6 "arm_add_operand" "rIL")])
10236 (match_operator:SI 8 "shift_operator"
10237 [(match_operand:SI 1 "s_register_operand" "r")
10238 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10239 (match_operator:SI 9 "shift_operator"
10240 [(match_operand:SI 3 "s_register_operand" "r")
10241 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10242 (clobber (reg:CC CC_REGNUM))]
10245 [(set_attr "conds" "clob")
10246 (set_attr "length" "12")]
10249 (define_insn "*if_shift_shift"
10250 [(set (match_operand:SI 0 "s_register_operand" "=r")
10252 (match_operator 5 "arm_comparison_operator"
10253 [(match_operand 8 "cc_register" "") (const_int 0)])
10254 (match_operator:SI 6 "shift_operator"
10255 [(match_operand:SI 1 "s_register_operand" "r")
10256 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10257 (match_operator:SI 7 "shift_operator"
10258 [(match_operand:SI 3 "s_register_operand" "r")
10259 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10261 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10262 [(set_attr "conds" "use")
10263 (set_attr "shift" "1")
10264 (set_attr "length" "8")
10265 (set_attr "insn" "mov")
10266 (set (attr "type") (if_then_else
10267 (and (match_operand 2 "const_int_operand" "")
10268 (match_operand 4 "const_int_operand" ""))
10269 (const_string "alu_shift")
10270 (const_string "alu_shift_reg")))]
10273 (define_insn "*ifcompare_not_arith"
10274 [(set (match_operand:SI 0 "s_register_operand" "=r")
10276 (match_operator 6 "arm_comparison_operator"
10277 [(match_operand:SI 4 "s_register_operand" "r")
10278 (match_operand:SI 5 "arm_add_operand" "rIL")])
10279 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10280 (match_operator:SI 7 "shiftable_operator"
10281 [(match_operand:SI 2 "s_register_operand" "r")
10282 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10283 (clobber (reg:CC CC_REGNUM))]
10286 [(set_attr "conds" "clob")
10287 (set_attr "length" "12")]
10290 (define_insn "*if_not_arith"
10291 [(set (match_operand:SI 0 "s_register_operand" "=r")
10293 (match_operator 5 "arm_comparison_operator"
10294 [(match_operand 4 "cc_register" "") (const_int 0)])
10295 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10296 (match_operator:SI 6 "shiftable_operator"
10297 [(match_operand:SI 2 "s_register_operand" "r")
10298 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10300 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10301 [(set_attr "conds" "use")
10302 (set_attr "insn" "mvn")
10303 (set_attr "length" "8")]
10306 (define_insn "*ifcompare_arith_not"
10307 [(set (match_operand:SI 0 "s_register_operand" "=r")
10309 (match_operator 6 "arm_comparison_operator"
10310 [(match_operand:SI 4 "s_register_operand" "r")
10311 (match_operand:SI 5 "arm_add_operand" "rIL")])
10312 (match_operator:SI 7 "shiftable_operator"
10313 [(match_operand:SI 2 "s_register_operand" "r")
10314 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10315 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10316 (clobber (reg:CC CC_REGNUM))]
10319 [(set_attr "conds" "clob")
10320 (set_attr "length" "12")]
10323 (define_insn "*if_arith_not"
10324 [(set (match_operand:SI 0 "s_register_operand" "=r")
10326 (match_operator 5 "arm_comparison_operator"
10327 [(match_operand 4 "cc_register" "") (const_int 0)])
10328 (match_operator:SI 6 "shiftable_operator"
10329 [(match_operand:SI 2 "s_register_operand" "r")
10330 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10331 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10333 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10334 [(set_attr "conds" "use")
10335 (set_attr "insn" "mvn")
10336 (set_attr "length" "8")]
10339 (define_insn "*ifcompare_neg_move"
10340 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10342 (match_operator 5 "arm_comparison_operator"
10343 [(match_operand:SI 3 "s_register_operand" "r,r")
10344 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10345 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10346 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10347 (clobber (reg:CC CC_REGNUM))]
10350 [(set_attr "conds" "clob")
10351 (set_attr "length" "8,12")]
10354 (define_insn "*if_neg_move"
10355 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10357 (match_operator 4 "arm_comparison_operator"
10358 [(match_operand 3 "cc_register" "") (const_int 0)])
10359 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10360 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10363 rsb%d4\\t%0, %2, #0
10364 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10365 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10366 [(set_attr "conds" "use")
10367 (set_attr "length" "4,8,8")]
10370 (define_insn "*ifcompare_move_neg"
10371 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10373 (match_operator 5 "arm_comparison_operator"
10374 [(match_operand:SI 3 "s_register_operand" "r,r")
10375 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10376 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10377 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10378 (clobber (reg:CC CC_REGNUM))]
10381 [(set_attr "conds" "clob")
10382 (set_attr "length" "8,12")]
10385 (define_insn "*if_move_neg"
10386 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10388 (match_operator 4 "arm_comparison_operator"
10389 [(match_operand 3 "cc_register" "") (const_int 0)])
10390 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10391 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10394 rsb%D4\\t%0, %2, #0
10395 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10396 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10397 [(set_attr "conds" "use")
10398 (set_attr "length" "4,8,8")]
10401 (define_insn "*arith_adjacentmem"
10402 [(set (match_operand:SI 0 "s_register_operand" "=r")
10403 (match_operator:SI 1 "shiftable_operator"
10404 [(match_operand:SI 2 "memory_operand" "m")
10405 (match_operand:SI 3 "memory_operand" "m")]))
10406 (clobber (match_scratch:SI 4 "=r"))]
10407 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10413 HOST_WIDE_INT val1 = 0, val2 = 0;
10415 if (REGNO (operands[0]) > REGNO (operands[4]))
10417 ldm[1] = operands[4];
10418 ldm[2] = operands[0];
10422 ldm[1] = operands[0];
10423 ldm[2] = operands[4];
10426 base_reg = XEXP (operands[2], 0);
10428 if (!REG_P (base_reg))
10430 val1 = INTVAL (XEXP (base_reg, 1));
10431 base_reg = XEXP (base_reg, 0);
10434 if (!REG_P (XEXP (operands[3], 0)))
10435 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10437 arith[0] = operands[0];
10438 arith[3] = operands[1];
10452 if (val1 !=0 && val2 != 0)
10456 if (val1 == 4 || val2 == 4)
10457 /* Other val must be 8, since we know they are adjacent and neither
10459 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10460 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10462 ldm[0] = ops[0] = operands[4];
10464 ops[2] = GEN_INT (val1);
10465 output_add_immediate (ops);
10467 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10469 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10473 /* Offset is out of range for a single add, so use two ldr. */
10476 ops[2] = GEN_INT (val1);
10477 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10479 ops[2] = GEN_INT (val2);
10480 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10483 else if (val1 != 0)
10486 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10488 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10493 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10495 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10497 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10500 [(set_attr "length" "12")
10501 (set_attr "predicable" "yes")
10502 (set_attr "type" "load1")]
10505 ; This pattern is never tried by combine, so do it as a peephole
10508 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10509 (match_operand:SI 1 "arm_general_register_operand" ""))
10510 (set (reg:CC CC_REGNUM)
10511 (compare:CC (match_dup 1) (const_int 0)))]
10513 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10514 (set (match_dup 0) (match_dup 1))])]
10519 [(set (match_operand:SI 0 "s_register_operand" "")
10520 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10522 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10523 [(match_operand:SI 3 "s_register_operand" "")
10524 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10525 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10527 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10528 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10533 ;; This split can be used because CC_Z mode implies that the following
10534 ;; branch will be an equality, or an unsigned inequality, so the sign
10535 ;; extension is not needed.
10538 [(set (reg:CC_Z CC_REGNUM)
10540 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10542 (match_operand 1 "const_int_operand" "")))
10543 (clobber (match_scratch:SI 2 ""))]
10545 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10546 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10547 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10548 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10550 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10553 ;; ??? Check the patterns above for Thumb-2 usefulness
10555 (define_expand "prologue"
10556 [(clobber (const_int 0))]
10559 arm_expand_prologue ();
10561 thumb1_expand_prologue ();
10566 (define_expand "epilogue"
10567 [(clobber (const_int 0))]
10570 if (crtl->calls_eh_return)
10571 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10573 thumb1_expand_epilogue ();
10574 else if (USE_RETURN_INSN (FALSE))
10576 emit_jump_insn (gen_return ());
10579 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10580 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10585 (define_insn "prologue_thumb1_interwork"
10586 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10588 "* return thumb1_output_interwork ();"
10589 [(set_attr "length" "8")]
10592 ;; Note - although unspec_volatile's USE all hard registers,
10593 ;; USEs are ignored after relaod has completed. Thus we need
10594 ;; to add an unspec of the link register to ensure that flow
10595 ;; does not think that it is unused by the sibcall branch that
10596 ;; will replace the standard function epilogue.
10597 (define_insn "sibcall_epilogue"
10598 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10599 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10602 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10603 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10604 return arm_output_epilogue (next_nonnote_insn (insn));
10606 ;; Length is absolute worst case
10607 [(set_attr "length" "44")
10608 (set_attr "type" "block")
10609 ;; We don't clobber the conditions, but the potential length of this
10610 ;; operation is sufficient to make conditionalizing the sequence
10611 ;; unlikely to be profitable.
10612 (set_attr "conds" "clob")]
10615 (define_insn "*epilogue_insns"
10616 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10620 return arm_output_epilogue (NULL);
10621 else /* TARGET_THUMB1 */
10622 return thumb_unexpanded_epilogue ();
10624 ; Length is absolute worst case
10625 [(set_attr "length" "44")
10626 (set_attr "type" "block")
10627 ;; We don't clobber the conditions, but the potential length of this
10628 ;; operation is sufficient to make conditionalizing the sequence
10629 ;; unlikely to be profitable.
10630 (set_attr "conds" "clob")]
10633 (define_expand "eh_epilogue"
10634 [(use (match_operand:SI 0 "register_operand" ""))
10635 (use (match_operand:SI 1 "register_operand" ""))
10636 (use (match_operand:SI 2 "register_operand" ""))]
10640 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10641 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10643 rtx ra = gen_rtx_REG (Pmode, 2);
10645 emit_move_insn (ra, operands[2]);
10648 /* This is a hack -- we may have crystalized the function type too
10650 cfun->machine->func_type = 0;
10654 ;; This split is only used during output to reduce the number of patterns
10655 ;; that need assembler instructions adding to them. We allowed the setting
10656 ;; of the conditions to be implicit during rtl generation so that
10657 ;; the conditional compare patterns would work. However this conflicts to
10658 ;; some extent with the conditional data operations, so we have to split them
10661 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10662 ;; conditional execution sufficient?
10665 [(set (match_operand:SI 0 "s_register_operand" "")
10666 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10667 [(match_operand 2 "" "") (match_operand 3 "" "")])
10669 (match_operand 4 "" "")))
10670 (clobber (reg:CC CC_REGNUM))]
10671 "TARGET_ARM && reload_completed"
10672 [(set (match_dup 5) (match_dup 6))
10673 (cond_exec (match_dup 7)
10674 (set (match_dup 0) (match_dup 4)))]
10677 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10678 operands[2], operands[3]);
10679 enum rtx_code rc = GET_CODE (operands[1]);
10681 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10682 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10683 if (mode == CCFPmode || mode == CCFPEmode)
10684 rc = reverse_condition_maybe_unordered (rc);
10686 rc = reverse_condition (rc);
10688 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10693 [(set (match_operand:SI 0 "s_register_operand" "")
10694 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10695 [(match_operand 2 "" "") (match_operand 3 "" "")])
10696 (match_operand 4 "" "")
10698 (clobber (reg:CC CC_REGNUM))]
10699 "TARGET_ARM && reload_completed"
10700 [(set (match_dup 5) (match_dup 6))
10701 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10702 (set (match_dup 0) (match_dup 4)))]
10705 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10706 operands[2], operands[3]);
10708 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10709 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10714 [(set (match_operand:SI 0 "s_register_operand" "")
10715 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10716 [(match_operand 2 "" "") (match_operand 3 "" "")])
10717 (match_operand 4 "" "")
10718 (match_operand 5 "" "")))
10719 (clobber (reg:CC CC_REGNUM))]
10720 "TARGET_ARM && reload_completed"
10721 [(set (match_dup 6) (match_dup 7))
10722 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10723 (set (match_dup 0) (match_dup 4)))
10724 (cond_exec (match_dup 8)
10725 (set (match_dup 0) (match_dup 5)))]
10728 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10729 operands[2], operands[3]);
10730 enum rtx_code rc = GET_CODE (operands[1]);
10732 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10733 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10734 if (mode == CCFPmode || mode == CCFPEmode)
10735 rc = reverse_condition_maybe_unordered (rc);
10737 rc = reverse_condition (rc);
10739 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10744 [(set (match_operand:SI 0 "s_register_operand" "")
10745 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10746 [(match_operand:SI 2 "s_register_operand" "")
10747 (match_operand:SI 3 "arm_add_operand" "")])
10748 (match_operand:SI 4 "arm_rhs_operand" "")
10750 (match_operand:SI 5 "s_register_operand" ""))))
10751 (clobber (reg:CC CC_REGNUM))]
10752 "TARGET_ARM && reload_completed"
10753 [(set (match_dup 6) (match_dup 7))
10754 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10755 (set (match_dup 0) (match_dup 4)))
10756 (cond_exec (match_dup 8)
10757 (set (match_dup 0) (not:SI (match_dup 5))))]
10760 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10761 operands[2], operands[3]);
10762 enum rtx_code rc = GET_CODE (operands[1]);
10764 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10765 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10766 if (mode == CCFPmode || mode == CCFPEmode)
10767 rc = reverse_condition_maybe_unordered (rc);
10769 rc = reverse_condition (rc);
10771 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10775 (define_insn "*cond_move_not"
10776 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10777 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10778 [(match_operand 3 "cc_register" "") (const_int 0)])
10779 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10781 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10785 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10786 [(set_attr "conds" "use")
10787 (set_attr "insn" "mvn")
10788 (set_attr "length" "4,8")]
10791 ;; The next two patterns occur when an AND operation is followed by a
10792 ;; scc insn sequence
10794 (define_insn "*sign_extract_onebit"
10795 [(set (match_operand:SI 0 "s_register_operand" "=r")
10796 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10798 (match_operand:SI 2 "const_int_operand" "n")))
10799 (clobber (reg:CC CC_REGNUM))]
10802 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10803 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10804 return \"mvnne\\t%0, #0\";
10806 [(set_attr "conds" "clob")
10807 (set_attr "length" "8")]
10810 (define_insn "*not_signextract_onebit"
10811 [(set (match_operand:SI 0 "s_register_operand" "=r")
10813 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10815 (match_operand:SI 2 "const_int_operand" "n"))))
10816 (clobber (reg:CC CC_REGNUM))]
10819 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10820 output_asm_insn (\"tst\\t%1, %2\", operands);
10821 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10822 return \"movne\\t%0, #0\";
10824 [(set_attr "conds" "clob")
10825 (set_attr "length" "12")]
10827 ;; ??? The above patterns need auditing for Thumb-2
10829 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10830 ;; expressions. For simplicity, the first register is also in the unspec
10832 ;; To avoid the usage of GNU extension, the length attribute is computed
10833 ;; in a C function arm_attr_length_push_multi.
10834 (define_insn "*push_multi"
10835 [(match_parallel 2 "multi_register_push"
10836 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10837 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10838 UNSPEC_PUSH_MULT))])]
10842 int num_saves = XVECLEN (operands[2], 0);
10844 /* For the StrongARM at least it is faster to
10845 use STR to store only a single register.
10846 In Thumb mode always use push, and the assembler will pick
10847 something appropriate. */
10848 if (num_saves == 1 && TARGET_ARM)
10849 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10856 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
10857 else if (TARGET_THUMB2)
10858 strcpy (pattern, \"push%?\\t{%1\");
10860 strcpy (pattern, \"push\\t{%1\");
10862 for (i = 1; i < num_saves; i++)
10864 strcat (pattern, \", %|\");
10866 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10869 strcat (pattern, \"}\");
10870 output_asm_insn (pattern, operands);
10875 [(set_attr "type" "store4")
10876 (set (attr "length")
10877 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10880 (define_insn "stack_tie"
10881 [(set (mem:BLK (scratch))
10882 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10883 (match_operand:SI 1 "s_register_operand" "rk")]
10887 [(set_attr "length" "0")]
10890 ;; Similarly for the floating point registers
10891 (define_insn "*push_fp_multi"
10892 [(match_parallel 2 "multi_register_push"
10893 [(set (match_operand:BLK 0 "memory_operand" "=m")
10894 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10895 UNSPEC_PUSH_MULT))])]
10896 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10901 sprintf (pattern, \"sfm%%(fd%%)\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10902 output_asm_insn (pattern, operands);
10905 [(set_attr "type" "f_fpa_store")]
10908 ;; Special patterns for dealing with the constant pool
10910 (define_insn "align_4"
10911 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10914 assemble_align (32);
10919 (define_insn "align_8"
10920 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10923 assemble_align (64);
10928 (define_insn "consttable_end"
10929 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10932 making_const_table = FALSE;
10937 (define_insn "consttable_1"
10938 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10941 making_const_table = TRUE;
10942 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10943 assemble_zeros (3);
10946 [(set_attr "length" "4")]
10949 (define_insn "consttable_2"
10950 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10953 making_const_table = TRUE;
10954 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10955 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10956 assemble_zeros (2);
10959 [(set_attr "length" "4")]
10962 (define_insn "consttable_4"
10963 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10967 rtx x = operands[0];
10968 making_const_table = TRUE;
10969 switch (GET_MODE_CLASS (GET_MODE (x)))
10972 if (GET_MODE (x) == HFmode)
10973 arm_emit_fp16_const (x);
10977 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10978 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10982 /* XXX: Sometimes gcc does something really dumb and ends up with
10983 a HIGH in a constant pool entry, usually because it's trying to
10984 load into a VFP register. We know this will always be used in
10985 combination with a LO_SUM which ignores the high bits, so just
10986 strip off the HIGH. */
10987 if (GET_CODE (x) == HIGH)
10989 assemble_integer (x, 4, BITS_PER_WORD, 1);
10990 mark_symbol_refs_as_used (x);
10995 [(set_attr "length" "4")]
10998 (define_insn "consttable_8"
10999 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11003 making_const_table = TRUE;
11004 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11009 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11010 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11014 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11019 [(set_attr "length" "8")]
11022 (define_insn "consttable_16"
11023 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11027 making_const_table = TRUE;
11028 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11033 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11034 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11038 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11043 [(set_attr "length" "16")]
11046 ;; Miscellaneous Thumb patterns
11048 (define_expand "tablejump"
11049 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11050 (use (label_ref (match_operand 1 "" "")))])]
11055 /* Hopefully, CSE will eliminate this copy. */
11056 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11057 rtx reg2 = gen_reg_rtx (SImode);
11059 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11060 operands[0] = reg2;
11065 ;; NB never uses BX.
11066 (define_insn "*thumb1_tablejump"
11067 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11068 (use (label_ref (match_operand 1 "" "")))]
11071 [(set_attr "length" "2")]
11074 ;; V5 Instructions,
11076 (define_insn "clzsi2"
11077 [(set (match_operand:SI 0 "s_register_operand" "=r")
11078 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11079 "TARGET_32BIT && arm_arch5"
11081 [(set_attr "predicable" "yes")
11082 (set_attr "insn" "clz")])
11084 (define_insn "rbitsi2"
11085 [(set (match_operand:SI 0 "s_register_operand" "=r")
11086 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11087 "TARGET_32BIT && arm_arch_thumb2"
11089 [(set_attr "predicable" "yes")
11090 (set_attr "insn" "clz")])
11092 (define_expand "ctzsi2"
11093 [(set (match_operand:SI 0 "s_register_operand" "")
11094 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11095 "TARGET_32BIT && arm_arch_thumb2"
11098 rtx tmp = gen_reg_rtx (SImode);
11099 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11100 emit_insn (gen_clzsi2 (operands[0], tmp));
11106 ;; V5E instructions.
11108 (define_insn "prefetch"
11109 [(prefetch (match_operand:SI 0 "address_operand" "p")
11110 (match_operand:SI 1 "" "")
11111 (match_operand:SI 2 "" ""))]
11112 "TARGET_32BIT && arm_arch5e"
11115 ;; General predication pattern
11118 [(match_operator 0 "arm_comparison_operator"
11119 [(match_operand 1 "cc_register" "")
11125 (define_insn "prologue_use"
11126 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11128 "%@ %0 needed for prologue"
11129 [(set_attr "length" "0")]
11133 ;; Patterns for exception handling
11135 (define_expand "eh_return"
11136 [(use (match_operand 0 "general_operand" ""))]
11141 emit_insn (gen_arm_eh_return (operands[0]));
11143 emit_insn (gen_thumb_eh_return (operands[0]));
11148 ;; We can't expand this before we know where the link register is stored.
11149 (define_insn_and_split "arm_eh_return"
11150 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11152 (clobber (match_scratch:SI 1 "=&r"))]
11155 "&& reload_completed"
11159 arm_set_return_address (operands[0], operands[1]);
11164 (define_insn_and_split "thumb_eh_return"
11165 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11167 (clobber (match_scratch:SI 1 "=&l"))]
11170 "&& reload_completed"
11174 thumb_set_return_address (operands[0], operands[1]);
11182 (define_insn "load_tp_hard"
11183 [(set (match_operand:SI 0 "register_operand" "=r")
11184 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11186 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11187 [(set_attr "predicable" "yes")]
11190 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11191 (define_insn "load_tp_soft"
11192 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11193 (clobber (reg:SI LR_REGNUM))
11194 (clobber (reg:SI IP_REGNUM))
11195 (clobber (reg:CC CC_REGNUM))]
11197 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11198 [(set_attr "conds" "clob")]
11201 ;; tls descriptor call
11202 (define_insn "tlscall"
11203 [(set (reg:SI R0_REGNUM)
11204 (unspec:SI [(reg:SI R0_REGNUM)
11205 (match_operand:SI 0 "" "X")
11206 (match_operand 1 "" "")] UNSPEC_TLS))
11207 (clobber (reg:SI R1_REGNUM))
11208 (clobber (reg:SI LR_REGNUM))
11209 (clobber (reg:SI CC_REGNUM))]
11212 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11213 INTVAL (operands[1]));
11214 return "bl\\t%c0(tlscall)";
11216 [(set_attr "conds" "clob")
11217 (set_attr "length" "4")]
11222 ;; We only care about the lower 16 bits of the constant
11223 ;; being inserted into the upper 16 bits of the register.
11224 (define_insn "*arm_movtas_ze"
11225 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11228 (match_operand:SI 1 "const_int_operand" ""))]
11231 [(set_attr "predicable" "yes")
11232 (set_attr "length" "4")]
11235 (define_insn "*arm_rev"
11236 [(set (match_operand:SI 0 "s_register_operand" "=r")
11237 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11238 "TARGET_32BIT && arm_arch6"
11240 [(set_attr "predicable" "yes")
11241 (set_attr "length" "4")]
11244 (define_insn "*thumb1_rev"
11245 [(set (match_operand:SI 0 "s_register_operand" "=l")
11246 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
11247 "TARGET_THUMB1 && arm_arch6"
11249 [(set_attr "length" "2")]
11252 (define_expand "arm_legacy_rev"
11253 [(set (match_operand:SI 2 "s_register_operand" "")
11254 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11258 (lshiftrt:SI (match_dup 2)
11260 (set (match_operand:SI 3 "s_register_operand" "")
11261 (rotatert:SI (match_dup 1)
11264 (and:SI (match_dup 2)
11265 (const_int -65281)))
11266 (set (match_operand:SI 0 "s_register_operand" "")
11267 (xor:SI (match_dup 3)
11273 ;; Reuse temporaries to keep register pressure down.
11274 (define_expand "thumb_legacy_rev"
11275 [(set (match_operand:SI 2 "s_register_operand" "")
11276 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11278 (set (match_operand:SI 3 "s_register_operand" "")
11279 (lshiftrt:SI (match_dup 1)
11282 (ior:SI (match_dup 3)
11284 (set (match_operand:SI 4 "s_register_operand" "")
11286 (set (match_operand:SI 5 "s_register_operand" "")
11287 (rotatert:SI (match_dup 1)
11290 (ashift:SI (match_dup 5)
11293 (lshiftrt:SI (match_dup 5)
11296 (ior:SI (match_dup 5)
11299 (rotatert:SI (match_dup 5)
11301 (set (match_operand:SI 0 "s_register_operand" "")
11302 (ior:SI (match_dup 5)
11308 (define_expand "bswapsi2"
11309 [(set (match_operand:SI 0 "s_register_operand" "=r")
11310 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11311 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11315 rtx op2 = gen_reg_rtx (SImode);
11316 rtx op3 = gen_reg_rtx (SImode);
11320 rtx op4 = gen_reg_rtx (SImode);
11321 rtx op5 = gen_reg_rtx (SImode);
11323 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11324 op2, op3, op4, op5));
11328 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11337 ;; Load the load/store multiple patterns
11338 (include "ldmstm.md")
11339 ;; Load the FPA co-processor patterns
11341 ;; Load the Maverick co-processor patterns
11342 (include "cirrus.md")
11343 ;; Vector bits common to IWMMXT and Neon
11344 (include "vec-common.md")
11345 ;; Load the Intel Wireless Multimedia Extension patterns
11346 (include "iwmmxt.md")
11347 ;; Load the VFP co-processor patterns
11349 ;; Thumb-2 patterns
11350 (include "thumb2.md")
11352 (include "neon.md")
11353 ;; Synchronization Primitives
11354 (include "sync.md")
11355 ;; Fixed-point patterns
11356 (include "arm-fixed.md")