1 ;; Machine description for ARM processor synchronization primitives.
2 ;; Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 ;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com)
4 ;; 64bit Atomics by Dave Gilbert (david.gilbert@linaro.org)
6 ;; This file is part of GCC.
8 ;; GCC is free software; you can redistribute it and/or modify it
9 ;; under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 3, or (at your option)
13 ;; GCC is distributed in the hope that it will be useful, but
14 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 ;; General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>. */
22 (define_mode_attr sync_predtab
23 [(QI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
24 (HI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
25 (SI "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER")
26 (DI "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN
27 && TARGET_HAVE_MEMORY_BARRIER")])
29 (define_code_iterator syncop [plus minus ior xor and])
31 (define_code_attr sync_optab
32 [(ior "or") (xor "xor") (and "and") (plus "add") (minus "sub")])
34 (define_mode_attr sync_sfx
35 [(QI "b") (HI "h") (SI "") (DI "d")])
37 (define_expand "memory_barrier"
39 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
40 "TARGET_HAVE_MEMORY_BARRIER"
42 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
43 MEM_VOLATILE_P (operands[0]) = 1;
46 (define_insn "*memory_barrier"
47 [(set (match_operand:BLK 0 "" "")
48 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
49 "TARGET_HAVE_MEMORY_BARRIER"
53 /* Note we issue a system level barrier. We should consider issuing
54 a inner shareabilty zone barrier here instead, ie. "DMB ISH". */
55 /* ??? Differentiate based on SEQ_CST vs less strict? */
59 if (TARGET_HAVE_DMB_MCR)
60 return "mcr\tp15, 0, r0, c7, c10, 5";
64 [(set_attr "length" "4")
65 (set_attr "conds" "unconditional")
66 (set_attr "predicable" "no")])
68 (define_insn "atomic_load<mode>"
69 [(set (match_operand:QHSI 0 "register_operand" "=r")
71 [(match_operand:QHSI 1 "arm_sync_memory_operand" "Q")
72 (match_operand:SI 2 "const_int_operand")] ;; model
76 enum memmodel model = (enum memmodel) INTVAL (operands[2]);
77 if (model == MEMMODEL_RELAXED
78 || model == MEMMODEL_CONSUME
79 || model == MEMMODEL_RELEASE)
80 return \"ldr<sync_sfx>\\t%0, %1\";
82 return \"lda<sync_sfx>\\t%0, %1\";
86 (define_insn "atomic_store<mode>"
87 [(set (match_operand:QHSI 0 "memory_operand" "=Q")
89 [(match_operand:QHSI 1 "general_operand" "r")
90 (match_operand:SI 2 "const_int_operand")] ;; model
94 enum memmodel model = (enum memmodel) INTVAL (operands[2]);
95 if (model == MEMMODEL_RELAXED
96 || model == MEMMODEL_CONSUME
97 || model == MEMMODEL_ACQUIRE)
98 return \"str<sync_sfx>\t%1, %0\";
100 return \"stl<sync_sfx>\t%1, %0\";
104 ;; Note that ldrd and vldr are *not* guaranteed to be single-copy atomic,
105 ;; even for a 64-bit aligned address. Instead we use a ldrexd unparied
107 (define_expand "atomic_loaddi"
108 [(match_operand:DI 0 "s_register_operand") ;; val out
109 (match_operand:DI 1 "mem_noofs_operand") ;; memory
110 (match_operand:SI 2 "const_int_operand")] ;; model
111 "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN"
113 enum memmodel model = (enum memmodel) INTVAL (operands[2]);
114 expand_mem_thread_fence (model);
115 emit_insn (gen_atomic_loaddi_1 (operands[0], operands[1]));
116 if (model == MEMMODEL_SEQ_CST)
117 expand_mem_thread_fence (model);
121 (define_insn "atomic_loaddi_1"
122 [(set (match_operand:DI 0 "s_register_operand" "=r")
123 (unspec:DI [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
125 "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN"
126 "ldrexd%?\t%0, %H0, %C1"
127 [(set_attr "predicable" "yes")
128 (set_attr "predicable_short_it" "no")])
130 (define_expand "atomic_compare_and_swap<mode>"
131 [(match_operand:SI 0 "s_register_operand" "") ;; bool out
132 (match_operand:QHSD 1 "s_register_operand" "") ;; val out
133 (match_operand:QHSD 2 "mem_noofs_operand" "") ;; memory
134 (match_operand:QHSD 3 "general_operand" "") ;; expected
135 (match_operand:QHSD 4 "s_register_operand" "") ;; desired
136 (match_operand:SI 5 "const_int_operand") ;; is_weak
137 (match_operand:SI 6 "const_int_operand") ;; mod_s
138 (match_operand:SI 7 "const_int_operand")] ;; mod_f
141 arm_expand_compare_and_swap (operands);
145 (define_insn_and_split "atomic_compare_and_swap<mode>_1"
146 [(set (reg:CC_Z CC_REGNUM) ;; bool out
147 (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
148 (set (match_operand:SI 0 "s_register_operand" "=&r") ;; val out
150 (match_operand:NARROW 1 "mem_noofs_operand" "+Ua"))) ;; memory
152 (unspec_volatile:NARROW
153 [(match_operand:SI 2 "arm_add_operand" "rIL") ;; expected
154 (match_operand:NARROW 3 "s_register_operand" "r") ;; desired
155 (match_operand:SI 4 "const_int_operand") ;; is_weak
156 (match_operand:SI 5 "const_int_operand") ;; mod_s
157 (match_operand:SI 6 "const_int_operand")] ;; mod_f
159 (clobber (match_scratch:SI 7 "=&r"))]
162 "&& reload_completed"
165 arm_split_compare_and_swap (operands);
169 (define_mode_attr cas_cmp_operand
170 [(SI "arm_add_operand") (DI "cmpdi_operand")])
171 (define_mode_attr cas_cmp_str
172 [(SI "rIL") (DI "rDi")])
174 (define_insn_and_split "atomic_compare_and_swap<mode>_1"
175 [(set (reg:CC_Z CC_REGNUM) ;; bool out
176 (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
177 (set (match_operand:SIDI 0 "s_register_operand" "=&r") ;; val out
178 (match_operand:SIDI 1 "mem_noofs_operand" "+Ua")) ;; memory
180 (unspec_volatile:SIDI
181 [(match_operand:SIDI 2 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect
182 (match_operand:SIDI 3 "s_register_operand" "r") ;; desired
183 (match_operand:SI 4 "const_int_operand") ;; is_weak
184 (match_operand:SI 5 "const_int_operand") ;; mod_s
185 (match_operand:SI 6 "const_int_operand")] ;; mod_f
187 (clobber (match_scratch:SI 7 "=&r"))]
190 "&& reload_completed"
193 arm_split_compare_and_swap (operands);
197 (define_insn_and_split "atomic_exchange<mode>"
198 [(set (match_operand:QHSD 0 "s_register_operand" "=&r") ;; output
199 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")) ;; memory
201 (unspec_volatile:QHSD
202 [(match_operand:QHSD 2 "s_register_operand" "r") ;; input
203 (match_operand:SI 3 "const_int_operand" "")] ;; model
204 VUNSPEC_ATOMIC_XCHG))
205 (clobber (reg:CC CC_REGNUM))
206 (clobber (match_scratch:SI 4 "=&r"))]
209 "&& reload_completed"
212 arm_split_atomic_op (SET, operands[0], NULL, operands[1],
213 operands[2], operands[3], operands[4]);
217 (define_mode_attr atomic_op_operand
218 [(QI "reg_or_int_operand")
219 (HI "reg_or_int_operand")
220 (SI "reg_or_int_operand")
221 (DI "s_register_operand")])
223 (define_mode_attr atomic_op_str
224 [(QI "rn") (HI "rn") (SI "rn") (DI "r")])
226 (define_insn_and_split "atomic_<sync_optab><mode>"
227 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
228 (unspec_volatile:QHSD
229 [(syncop:QHSD (match_dup 0)
230 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>"))
231 (match_operand:SI 2 "const_int_operand")] ;; model
233 (clobber (reg:CC CC_REGNUM))
234 (clobber (match_scratch:QHSD 3 "=&r"))
235 (clobber (match_scratch:SI 4 "=&r"))]
238 "&& reload_completed"
241 arm_split_atomic_op (<CODE>, NULL, operands[3], operands[0],
242 operands[1], operands[2], operands[4]);
246 (define_insn_and_split "atomic_nand<mode>"
247 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
248 (unspec_volatile:QHSD
250 (and:QHSD (match_dup 0)
251 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>")))
252 (match_operand:SI 2 "const_int_operand")] ;; model
254 (clobber (reg:CC CC_REGNUM))
255 (clobber (match_scratch:QHSD 3 "=&r"))
256 (clobber (match_scratch:SI 4 "=&r"))]
259 "&& reload_completed"
262 arm_split_atomic_op (NOT, NULL, operands[3], operands[0],
263 operands[1], operands[2], operands[4]);
267 (define_insn_and_split "atomic_fetch_<sync_optab><mode>"
268 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
269 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
271 (unspec_volatile:QHSD
272 [(syncop:QHSD (match_dup 1)
273 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))
274 (match_operand:SI 3 "const_int_operand")] ;; model
276 (clobber (reg:CC CC_REGNUM))
277 (clobber (match_scratch:QHSD 4 "=&r"))
278 (clobber (match_scratch:SI 5 "=&r"))]
281 "&& reload_completed"
284 arm_split_atomic_op (<CODE>, operands[0], operands[4], operands[1],
285 operands[2], operands[3], operands[5]);
289 (define_insn_and_split "atomic_fetch_nand<mode>"
290 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
291 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
293 (unspec_volatile:QHSD
295 (and:QHSD (match_dup 1)
296 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
297 (match_operand:SI 3 "const_int_operand")] ;; model
299 (clobber (reg:CC CC_REGNUM))
300 (clobber (match_scratch:QHSD 4 "=&r"))
301 (clobber (match_scratch:SI 5 "=&r"))]
304 "&& reload_completed"
307 arm_split_atomic_op (NOT, operands[0], operands[4], operands[1],
308 operands[2], operands[3], operands[5]);
312 (define_insn_and_split "atomic_<sync_optab>_fetch<mode>"
313 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
315 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
316 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
318 (unspec_volatile:QHSD
319 [(match_dup 1) (match_dup 2)
320 (match_operand:SI 3 "const_int_operand")] ;; model
322 (clobber (reg:CC CC_REGNUM))
323 (clobber (match_scratch:SI 4 "=&r"))]
326 "&& reload_completed"
329 arm_split_atomic_op (<CODE>, NULL, operands[0], operands[1],
330 operands[2], operands[3], operands[4]);
334 (define_insn_and_split "atomic_nand_fetch<mode>"
335 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
338 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
339 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))))
341 (unspec_volatile:QHSD
342 [(match_dup 1) (match_dup 2)
343 (match_operand:SI 3 "const_int_operand")] ;; model
345 (clobber (reg:CC CC_REGNUM))
346 (clobber (match_scratch:SI 4 "=&r"))]
349 "&& reload_completed"
352 arm_split_atomic_op (NOT, NULL, operands[0], operands[1],
353 operands[2], operands[3], operands[4]);
357 (define_insn "arm_load_exclusive<mode>"
358 [(set (match_operand:SI 0 "s_register_operand" "=r")
360 (unspec_volatile:NARROW
361 [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")]
363 "TARGET_HAVE_LDREXBH"
364 "ldrex<sync_sfx>%?\t%0, %C1"
365 [(set_attr "predicable" "yes")
366 (set_attr "predicable_short_it" "no")])
368 (define_insn "arm_load_acquire_exclusive<mode>"
369 [(set (match_operand:SI 0 "s_register_operand" "=r")
371 (unspec_volatile:NARROW
372 [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")]
375 "ldaex<sync_sfx>%?\\t%0, %C1"
376 [(set_attr "predicable" "yes")
377 (set_attr "predicable_short_it" "no")])
379 (define_insn "arm_load_exclusivesi"
380 [(set (match_operand:SI 0 "s_register_operand" "=r")
382 [(match_operand:SI 1 "mem_noofs_operand" "Ua")]
386 [(set_attr "predicable" "yes")
387 (set_attr "predicable_short_it" "no")])
389 (define_insn "arm_load_acquire_exclusivesi"
390 [(set (match_operand:SI 0 "s_register_operand" "=r")
392 [(match_operand:SI 1 "mem_noofs_operand" "Ua")]
396 [(set_attr "predicable" "yes")
397 (set_attr "predicable_short_it" "no")])
399 (define_insn "arm_load_exclusivedi"
400 [(set (match_operand:DI 0 "s_register_operand" "=r")
402 [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
405 "ldrexd%?\t%0, %H0, %C1"
406 [(set_attr "predicable" "yes")
407 (set_attr "predicable_short_it" "no")])
409 (define_insn "arm_load_acquire_exclusivedi"
410 [(set (match_operand:DI 0 "s_register_operand" "=r")
412 [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
414 "TARGET_HAVE_LDACQ && ARM_DOUBLEWORD_ALIGN"
415 "ldaexd%?\t%0, %H0, %C1"
416 [(set_attr "predicable" "yes")
417 (set_attr "predicable_short_it" "no")])
419 (define_insn "arm_store_exclusive<mode>"
420 [(set (match_operand:SI 0 "s_register_operand" "=&r")
421 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SC))
422 (set (match_operand:QHSD 1 "mem_noofs_operand" "=Ua")
423 (unspec_volatile:QHSD
424 [(match_operand:QHSD 2 "s_register_operand" "r")]
428 if (<MODE>mode == DImode)
430 rtx value = operands[2];
431 /* The restrictions on target registers in ARM mode are that the two
432 registers are consecutive and the first one is even; Thumb is
433 actually more flexible, but DI should give us this anyway.
434 Note that the 1st register always gets the lowest word in memory. */
435 gcc_assert ((REGNO (value) & 1) == 0 || TARGET_THUMB2);
436 operands[3] = gen_rtx_REG (SImode, REGNO (value) + 1);
437 return "strexd%?\t%0, %2, %3, %C1";
439 return "strex<sync_sfx>%?\t%0, %2, %C1";
441 [(set_attr "predicable" "yes")
442 (set_attr "predicable_short_it" "no")])
444 (define_insn "arm_store_release_exclusivedi"
445 [(set (match_operand:SI 0 "s_register_operand" "=&r")
446 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SLX))
447 (set (match_operand:DI 1 "mem_noofs_operand" "=Ua")
449 [(match_operand:DI 2 "s_register_operand" "r")]
451 "TARGET_HAVE_LDACQ && ARM_DOUBLEWORD_ALIGN"
453 rtx value = operands[2];
454 /* See comment in arm_store_exclusive<mode> above. */
455 gcc_assert ((REGNO (value) & 1) == 0 || TARGET_THUMB2);
456 operands[3] = gen_rtx_REG (SImode, REGNO (value) + 1);
457 return "stlexd%?\t%0, %2, %3, %C1";
459 [(set_attr "predicable" "yes")
460 (set_attr "predicable_short_it" "no")])
462 (define_insn "arm_store_release_exclusive<mode>"
463 [(set (match_operand:SI 0 "s_register_operand" "=&r")
464 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SLX))
465 (set (match_operand:QHSI 1 "mem_noofs_operand" "=Ua")
466 (unspec_volatile:QHSI
467 [(match_operand:QHSI 2 "s_register_operand" "r")]
470 "stlex<sync_sfx>%?\t%0, %2, %C1"
471 [(set_attr "predicable" "yes")
472 (set_attr "predicable_short_it" "no")])