1 ;; Machine description for ARM processor synchronization primitives.
2 ;; Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 ;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com)
4 ;; 64bit Atomics by Dave Gilbert (david.gilbert@linaro.org)
6 ;; This file is part of GCC.
8 ;; GCC is free software; you can redistribute it and/or modify it
9 ;; under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 3, or (at your option)
13 ;; GCC is distributed in the hope that it will be useful, but
14 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 ;; General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>. */
22 (define_mode_attr sync_predtab
23 [(QI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
24 (HI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
25 (SI "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER")
26 (DI "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN
27 && TARGET_HAVE_MEMORY_BARRIER")])
29 (define_code_iterator syncop [plus minus ior xor and])
31 (define_code_attr sync_optab
32 [(ior "or") (xor "xor") (and "and") (plus "add") (minus "sub")])
34 (define_mode_attr sync_sfx
35 [(QI "b") (HI "h") (SI "") (DI "d")])
37 (define_expand "memory_barrier"
39 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
40 "TARGET_HAVE_MEMORY_BARRIER"
42 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
43 MEM_VOLATILE_P (operands[0]) = 1;
46 (define_insn "*memory_barrier"
47 [(set (match_operand:BLK 0 "" "")
48 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
49 "TARGET_HAVE_MEMORY_BARRIER"
53 /* Note we issue a system level barrier. We should consider issuing
54 a inner shareabilty zone barrier here instead, ie. "DMB ISH". */
55 /* ??? Differentiate based on SEQ_CST vs less strict? */
59 if (TARGET_HAVE_DMB_MCR)
60 return "mcr\tp15, 0, r0, c7, c10, 5";
64 [(set_attr "length" "4")
65 (set_attr "conds" "unconditional")
66 (set_attr "predicable" "no")])
68 ;; Note that ldrd and vldr are *not* guaranteed to be single-copy atomic,
69 ;; even for a 64-bit aligned address. Instead we use a ldrexd unparied
71 (define_expand "atomic_loaddi"
72 [(match_operand:DI 0 "s_register_operand") ;; val out
73 (match_operand:DI 1 "mem_noofs_operand") ;; memory
74 (match_operand:SI 2 "const_int_operand")] ;; model
75 "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN"
77 enum memmodel model = (enum memmodel) INTVAL (operands[2]);
78 expand_mem_thread_fence (model);
79 emit_insn (gen_atomic_loaddi_1 (operands[0], operands[1]));
80 if (model == MEMMODEL_SEQ_CST)
81 expand_mem_thread_fence (model);
85 (define_insn "atomic_loaddi_1"
86 [(set (match_operand:DI 0 "s_register_operand" "=r")
87 (unspec:DI [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
89 "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN"
90 "ldrexd%?\t%0, %H0, %C1"
91 [(set_attr "predicable" "yes")])
93 (define_expand "atomic_compare_and_swap<mode>"
94 [(match_operand:SI 0 "s_register_operand" "") ;; bool out
95 (match_operand:QHSD 1 "s_register_operand" "") ;; val out
96 (match_operand:QHSD 2 "mem_noofs_operand" "") ;; memory
97 (match_operand:QHSD 3 "general_operand" "") ;; expected
98 (match_operand:QHSD 4 "s_register_operand" "") ;; desired
99 (match_operand:SI 5 "const_int_operand") ;; is_weak
100 (match_operand:SI 6 "const_int_operand") ;; mod_s
101 (match_operand:SI 7 "const_int_operand")] ;; mod_f
104 arm_expand_compare_and_swap (operands);
108 (define_insn_and_split "atomic_compare_and_swap<mode>_1"
109 [(set (reg:CC_Z CC_REGNUM) ;; bool out
110 (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
111 (set (match_operand:SI 0 "s_register_operand" "=&r") ;; val out
113 (match_operand:NARROW 1 "mem_noofs_operand" "+Ua"))) ;; memory
115 (unspec_volatile:NARROW
116 [(match_operand:SI 2 "arm_add_operand" "rIL") ;; expected
117 (match_operand:NARROW 3 "s_register_operand" "r") ;; desired
118 (match_operand:SI 4 "const_int_operand") ;; is_weak
119 (match_operand:SI 5 "const_int_operand") ;; mod_s
120 (match_operand:SI 6 "const_int_operand")] ;; mod_f
122 (clobber (match_scratch:SI 7 "=&r"))]
125 "&& reload_completed"
128 arm_split_compare_and_swap (operands);
132 (define_mode_attr cas_cmp_operand
133 [(SI "arm_add_operand") (DI "cmpdi_operand")])
134 (define_mode_attr cas_cmp_str
135 [(SI "rIL") (DI "rDi")])
137 (define_insn_and_split "atomic_compare_and_swap<mode>_1"
138 [(set (reg:CC_Z CC_REGNUM) ;; bool out
139 (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
140 (set (match_operand:SIDI 0 "s_register_operand" "=&r") ;; val out
141 (match_operand:SIDI 1 "mem_noofs_operand" "+Ua")) ;; memory
143 (unspec_volatile:SIDI
144 [(match_operand:SIDI 2 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect
145 (match_operand:SIDI 3 "s_register_operand" "r") ;; desired
146 (match_operand:SI 4 "const_int_operand") ;; is_weak
147 (match_operand:SI 5 "const_int_operand") ;; mod_s
148 (match_operand:SI 6 "const_int_operand")] ;; mod_f
150 (clobber (match_scratch:SI 7 "=&r"))]
153 "&& reload_completed"
156 arm_split_compare_and_swap (operands);
160 (define_insn_and_split "atomic_exchange<mode>"
161 [(set (match_operand:QHSD 0 "s_register_operand" "=&r") ;; output
162 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")) ;; memory
164 (unspec_volatile:QHSD
165 [(match_operand:QHSD 2 "s_register_operand" "r") ;; input
166 (match_operand:SI 3 "const_int_operand" "")] ;; model
167 VUNSPEC_ATOMIC_XCHG))
168 (clobber (reg:CC CC_REGNUM))
169 (clobber (match_scratch:SI 4 "=&r"))]
172 "&& reload_completed"
175 arm_split_atomic_op (SET, operands[0], NULL, operands[1],
176 operands[2], operands[3], operands[4]);
180 (define_mode_attr atomic_op_operand
181 [(QI "reg_or_int_operand")
182 (HI "reg_or_int_operand")
183 (SI "reg_or_int_operand")
184 (DI "s_register_operand")])
186 (define_mode_attr atomic_op_str
187 [(QI "rn") (HI "rn") (SI "rn") (DI "r")])
189 (define_insn_and_split "atomic_<sync_optab><mode>"
190 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
191 (unspec_volatile:QHSD
192 [(syncop:QHSD (match_dup 0)
193 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>"))
194 (match_operand:SI 2 "const_int_operand")] ;; model
196 (clobber (reg:CC CC_REGNUM))
197 (clobber (match_scratch:QHSD 3 "=&r"))
198 (clobber (match_scratch:SI 4 "=&r"))]
201 "&& reload_completed"
204 arm_split_atomic_op (<CODE>, NULL, operands[3], operands[0],
205 operands[1], operands[2], operands[4]);
209 (define_insn_and_split "atomic_nand<mode>"
210 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
211 (unspec_volatile:QHSD
213 (and:QHSD (match_dup 0)
214 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>")))
215 (match_operand:SI 2 "const_int_operand")] ;; model
217 (clobber (reg:CC CC_REGNUM))
218 (clobber (match_scratch:QHSD 3 "=&r"))
219 (clobber (match_scratch:SI 4 "=&r"))]
222 "&& reload_completed"
225 arm_split_atomic_op (NOT, NULL, operands[3], operands[0],
226 operands[1], operands[2], operands[4]);
230 (define_insn_and_split "atomic_fetch_<sync_optab><mode>"
231 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
232 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
234 (unspec_volatile:QHSD
235 [(syncop:QHSD (match_dup 1)
236 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))
237 (match_operand:SI 3 "const_int_operand")] ;; model
239 (clobber (reg:CC CC_REGNUM))
240 (clobber (match_scratch:QHSD 4 "=&r"))
241 (clobber (match_scratch:SI 5 "=&r"))]
244 "&& reload_completed"
247 arm_split_atomic_op (<CODE>, operands[0], operands[4], operands[1],
248 operands[2], operands[3], operands[5]);
252 (define_insn_and_split "atomic_fetch_nand<mode>"
253 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
254 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
256 (unspec_volatile:QHSD
258 (and:QHSD (match_dup 1)
259 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
260 (match_operand:SI 3 "const_int_operand")] ;; model
262 (clobber (reg:CC CC_REGNUM))
263 (clobber (match_scratch:QHSD 4 "=&r"))
264 (clobber (match_scratch:SI 5 "=&r"))]
267 "&& reload_completed"
270 arm_split_atomic_op (NOT, operands[0], operands[4], operands[1],
271 operands[2], operands[3], operands[5]);
275 (define_insn_and_split "atomic_<sync_optab>_fetch<mode>"
276 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
278 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
279 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
281 (unspec_volatile:QHSD
282 [(match_dup 1) (match_dup 2)
283 (match_operand:SI 3 "const_int_operand")] ;; model
285 (clobber (reg:CC CC_REGNUM))
286 (clobber (match_scratch:SI 4 "=&r"))]
289 "&& reload_completed"
292 arm_split_atomic_op (<CODE>, NULL, operands[0], operands[1],
293 operands[2], operands[3], operands[4]);
297 (define_insn_and_split "atomic_nand_fetch<mode>"
298 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
301 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
302 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))))
304 (unspec_volatile:QHSD
305 [(match_dup 1) (match_dup 2)
306 (match_operand:SI 3 "const_int_operand")] ;; model
308 (clobber (reg:CC CC_REGNUM))
309 (clobber (match_scratch:SI 4 "=&r"))]
312 "&& reload_completed"
315 arm_split_atomic_op (NOT, NULL, operands[0], operands[1],
316 operands[2], operands[3], operands[4]);
320 (define_insn "arm_load_exclusive<mode>"
321 [(set (match_operand:SI 0 "s_register_operand" "=r")
323 (unspec_volatile:NARROW
324 [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")]
326 "TARGET_HAVE_LDREXBH"
327 "ldrex<sync_sfx>%?\t%0, %C1"
328 [(set_attr "predicable" "yes")])
330 (define_insn "arm_load_exclusivesi"
331 [(set (match_operand:SI 0 "s_register_operand" "=r")
333 [(match_operand:SI 1 "mem_noofs_operand" "Ua")]
337 [(set_attr "predicable" "yes")])
339 (define_insn "arm_load_exclusivedi"
340 [(set (match_operand:DI 0 "s_register_operand" "=r")
342 [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
345 "ldrexd%?\t%0, %H0, %C1"
346 [(set_attr "predicable" "yes")])
348 (define_insn "arm_store_exclusive<mode>"
349 [(set (match_operand:SI 0 "s_register_operand" "=&r")
350 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SC))
351 (set (match_operand:QHSD 1 "mem_noofs_operand" "=Ua")
352 (unspec_volatile:QHSD
353 [(match_operand:QHSD 2 "s_register_operand" "r")]
357 if (<MODE>mode == DImode)
359 rtx value = operands[2];
360 /* The restrictions on target registers in ARM mode are that the two
361 registers are consecutive and the first one is even; Thumb is
362 actually more flexible, but DI should give us this anyway.
363 Note that the 1st register always gets the lowest word in memory. */
364 gcc_assert ((REGNO (value) & 1) == 0 || TARGET_THUMB2);
365 operands[3] = gen_rtx_REG (SImode, REGNO (value) + 1);
366 return "strexd%?\t%0, %2, %3, %C1";
368 return "strex<sync_sfx>%?\t%0, %2, %C1";
370 [(set_attr "predicable" "yes")])