1 ;; Machine description for ARM processor synchronization primitives.
2 ;; Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 ;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com)
4 ;; 64bit Atomics by Dave Gilbert (david.gilbert@linaro.org)
6 ;; This file is part of GCC.
8 ;; GCC is free software; you can redistribute it and/or modify it
9 ;; under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 3, or (at your option)
13 ;; GCC is distributed in the hope that it will be useful, but
14 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 ;; General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>. */
22 (define_mode_attr sync_predtab
23 [(QI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
24 (HI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
25 (SI "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER")
26 (DI "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN
27 && TARGET_HAVE_MEMORY_BARRIER")])
29 (define_code_iterator syncop [plus minus ior xor and])
31 (define_code_attr sync_optab
32 [(ior "or") (xor "xor") (and "and") (plus "add") (minus "sub")])
34 (define_mode_attr sync_sfx
35 [(QI "b") (HI "h") (SI "") (DI "d")])
37 (define_expand "memory_barrier"
39 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
40 "TARGET_HAVE_MEMORY_BARRIER"
42 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
43 MEM_VOLATILE_P (operands[0]) = 1;
46 (define_insn "*memory_barrier"
47 [(set (match_operand:BLK 0 "" "")
48 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
49 "TARGET_HAVE_MEMORY_BARRIER"
56 if (TARGET_HAVE_DMB_MCR)
57 return "mcr\\tp15, 0, r0, c7, c10, 5";
61 [(set_attr "length" "4")
62 (set_attr "conds" "unconditional")
63 (set_attr "predicable" "no")])
65 (define_insn "atomic_load<mode>"
66 [(set (match_operand:QHSI 0 "register_operand" "=r")
68 [(match_operand:QHSI 1 "arm_sync_memory_operand" "Q")
69 (match_operand:SI 2 "const_int_operand")] ;; model
73 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
74 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_release (model))
75 return \"ldr<sync_sfx>%?\\t%0, %1\";
77 return \"lda<sync_sfx>%?\\t%0, %1\";
79 [(set_attr "predicable" "yes")
80 (set_attr "predicable_short_it" "no")])
82 (define_insn "atomic_store<mode>"
83 [(set (match_operand:QHSI 0 "memory_operand" "=Q")
85 [(match_operand:QHSI 1 "general_operand" "r")
86 (match_operand:SI 2 "const_int_operand")] ;; model
90 enum memmodel model = memmodel_from_int (INTVAL (operands[2]));
91 if (is_mm_relaxed (model) || is_mm_consume (model) || is_mm_acquire (model))
92 return \"str<sync_sfx>%?\t%1, %0\";
94 return \"stl<sync_sfx>%?\t%1, %0\";
96 [(set_attr "predicable" "yes")
97 (set_attr "predicable_short_it" "no")])
99 ;; An LDRD instruction usable by the atomic_loaddi expander on LPAE targets
101 (define_insn "arm_atomic_loaddi2_ldrd"
102 [(set (match_operand:DI 0 "register_operand" "=r")
104 [(match_operand:DI 1 "arm_sync_memory_operand" "Q")]
105 VUNSPEC_LDRD_ATOMIC))]
106 "ARM_DOUBLEWORD_ALIGN && TARGET_HAVE_LPAE"
107 "ldrd%?\t%0, %H0, %C1"
108 [(set_attr "predicable" "yes")
109 (set_attr "predicable_short_it" "no")])
111 ;; There are three ways to expand this depending on the architecture
112 ;; features available. As for the barriers, a load needs a barrier
113 ;; after it on all non-relaxed memory models except when the load
114 ;; has acquire semantics (for ARMv8-A).
116 (define_expand "atomic_loaddi"
117 [(match_operand:DI 0 "s_register_operand") ;; val out
118 (match_operand:DI 1 "mem_noofs_operand") ;; memory
119 (match_operand:SI 2 "const_int_operand")] ;; model
120 "(TARGET_HAVE_LDREXD || TARGET_HAVE_LPAE || TARGET_HAVE_LDACQEXD)
121 && ARM_DOUBLEWORD_ALIGN"
123 memmodel model = memmodel_from_int (INTVAL (operands[2]));
125 /* For ARMv8-A we can use an LDAEXD to atomically load two 32-bit registers
126 when acquire or stronger semantics are needed. When the relaxed model is
127 used this can be relaxed to a normal LDRD. */
128 if (TARGET_HAVE_LDACQEXD)
130 if (is_mm_relaxed (model))
131 emit_insn (gen_arm_atomic_loaddi2_ldrd (operands[0], operands[1]));
133 emit_insn (gen_arm_load_acquire_exclusivedi (operands[0], operands[1]));
138 /* On LPAE targets LDRD and STRD accesses to 64-bit aligned
139 locations are 64-bit single-copy atomic. We still need barriers in the
140 appropriate places to implement the ordering constraints. */
141 if (TARGET_HAVE_LPAE)
142 emit_insn (gen_arm_atomic_loaddi2_ldrd (operands[0], operands[1]));
144 emit_insn (gen_arm_load_exclusivedi (operands[0], operands[1]));
147 /* All non-relaxed models need a barrier after the load when load-acquire
148 instructions are not available. */
149 if (!is_mm_relaxed (model))
150 expand_mem_thread_fence (model);
155 (define_expand "atomic_compare_and_swap<mode>"
156 [(match_operand:SI 0 "s_register_operand" "") ;; bool out
157 (match_operand:QHSD 1 "s_register_operand" "") ;; val out
158 (match_operand:QHSD 2 "mem_noofs_operand" "") ;; memory
159 (match_operand:QHSD 3 "general_operand" "") ;; expected
160 (match_operand:QHSD 4 "s_register_operand" "") ;; desired
161 (match_operand:SI 5 "const_int_operand") ;; is_weak
162 (match_operand:SI 6 "const_int_operand") ;; mod_s
163 (match_operand:SI 7 "const_int_operand")] ;; mod_f
166 arm_expand_compare_and_swap (operands);
170 (define_insn_and_split "atomic_compare_and_swap<mode>_1"
171 [(set (reg:CC_Z CC_REGNUM) ;; bool out
172 (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
173 (set (match_operand:SI 0 "s_register_operand" "=&r") ;; val out
175 (match_operand:NARROW 1 "mem_noofs_operand" "+Ua"))) ;; memory
177 (unspec_volatile:NARROW
178 [(match_operand:SI 2 "arm_add_operand" "rIL") ;; expected
179 (match_operand:NARROW 3 "s_register_operand" "r") ;; desired
180 (match_operand:SI 4 "const_int_operand") ;; is_weak
181 (match_operand:SI 5 "const_int_operand") ;; mod_s
182 (match_operand:SI 6 "const_int_operand")] ;; mod_f
184 (clobber (match_scratch:SI 7 "=&r"))]
187 "&& reload_completed"
190 arm_split_compare_and_swap (operands);
194 (define_mode_attr cas_cmp_operand
195 [(SI "arm_add_operand") (DI "cmpdi_operand")])
196 (define_mode_attr cas_cmp_str
197 [(SI "rIL") (DI "rDi")])
199 (define_insn_and_split "atomic_compare_and_swap<mode>_1"
200 [(set (reg:CC_Z CC_REGNUM) ;; bool out
201 (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
202 (set (match_operand:SIDI 0 "s_register_operand" "=&r") ;; val out
203 (match_operand:SIDI 1 "mem_noofs_operand" "+Ua")) ;; memory
205 (unspec_volatile:SIDI
206 [(match_operand:SIDI 2 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect
207 (match_operand:SIDI 3 "s_register_operand" "r") ;; desired
208 (match_operand:SI 4 "const_int_operand") ;; is_weak
209 (match_operand:SI 5 "const_int_operand") ;; mod_s
210 (match_operand:SI 6 "const_int_operand")] ;; mod_f
212 (clobber (match_scratch:SI 7 "=&r"))]
215 "&& reload_completed"
218 arm_split_compare_and_swap (operands);
222 (define_insn_and_split "atomic_exchange<mode>"
223 [(set (match_operand:QHSD 0 "s_register_operand" "=&r") ;; output
224 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")) ;; memory
226 (unspec_volatile:QHSD
227 [(match_operand:QHSD 2 "s_register_operand" "r") ;; input
228 (match_operand:SI 3 "const_int_operand" "")] ;; model
229 VUNSPEC_ATOMIC_XCHG))
230 (clobber (reg:CC CC_REGNUM))
231 (clobber (match_scratch:SI 4 "=&r"))]
234 "&& reload_completed"
237 arm_split_atomic_op (SET, operands[0], NULL, operands[1],
238 operands[2], operands[3], operands[4]);
242 (define_mode_attr atomic_op_operand
243 [(QI "reg_or_int_operand")
244 (HI "reg_or_int_operand")
245 (SI "reg_or_int_operand")
246 (DI "s_register_operand")])
248 (define_mode_attr atomic_op_str
249 [(QI "rn") (HI "rn") (SI "rn") (DI "r")])
251 (define_insn_and_split "atomic_<sync_optab><mode>"
252 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
253 (unspec_volatile:QHSD
254 [(syncop:QHSD (match_dup 0)
255 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>"))
256 (match_operand:SI 2 "const_int_operand")] ;; model
258 (clobber (reg:CC CC_REGNUM))
259 (clobber (match_scratch:QHSD 3 "=&r"))
260 (clobber (match_scratch:SI 4 "=&r"))]
263 "&& reload_completed"
266 arm_split_atomic_op (<CODE>, NULL, operands[3], operands[0],
267 operands[1], operands[2], operands[4]);
271 (define_insn_and_split "atomic_nand<mode>"
272 [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
273 (unspec_volatile:QHSD
275 (and:QHSD (match_dup 0)
276 (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>")))
277 (match_operand:SI 2 "const_int_operand")] ;; model
279 (clobber (reg:CC CC_REGNUM))
280 (clobber (match_scratch:QHSD 3 "=&r"))
281 (clobber (match_scratch:SI 4 "=&r"))]
284 "&& reload_completed"
287 arm_split_atomic_op (NOT, NULL, operands[3], operands[0],
288 operands[1], operands[2], operands[4]);
292 (define_insn_and_split "atomic_fetch_<sync_optab><mode>"
293 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
294 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
296 (unspec_volatile:QHSD
297 [(syncop:QHSD (match_dup 1)
298 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))
299 (match_operand:SI 3 "const_int_operand")] ;; model
301 (clobber (reg:CC CC_REGNUM))
302 (clobber (match_scratch:QHSD 4 "=&r"))
303 (clobber (match_scratch:SI 5 "=&r"))]
306 "&& reload_completed"
309 arm_split_atomic_op (<CODE>, operands[0], operands[4], operands[1],
310 operands[2], operands[3], operands[5]);
314 (define_insn_and_split "atomic_fetch_nand<mode>"
315 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
316 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
318 (unspec_volatile:QHSD
320 (and:QHSD (match_dup 1)
321 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
322 (match_operand:SI 3 "const_int_operand")] ;; model
324 (clobber (reg:CC CC_REGNUM))
325 (clobber (match_scratch:QHSD 4 "=&r"))
326 (clobber (match_scratch:SI 5 "=&r"))]
329 "&& reload_completed"
332 arm_split_atomic_op (NOT, operands[0], operands[4], operands[1],
333 operands[2], operands[3], operands[5]);
337 (define_insn_and_split "atomic_<sync_optab>_fetch<mode>"
338 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
340 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
341 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
343 (unspec_volatile:QHSD
344 [(match_dup 1) (match_dup 2)
345 (match_operand:SI 3 "const_int_operand")] ;; model
347 (clobber (reg:CC CC_REGNUM))
348 (clobber (match_scratch:SI 4 "=&r"))]
351 "&& reload_completed"
354 arm_split_atomic_op (<CODE>, NULL, operands[0], operands[1],
355 operands[2], operands[3], operands[4]);
359 (define_insn_and_split "atomic_nand_fetch<mode>"
360 [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
363 (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
364 (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))))
366 (unspec_volatile:QHSD
367 [(match_dup 1) (match_dup 2)
368 (match_operand:SI 3 "const_int_operand")] ;; model
370 (clobber (reg:CC CC_REGNUM))
371 (clobber (match_scratch:SI 4 "=&r"))]
374 "&& reload_completed"
377 arm_split_atomic_op (NOT, NULL, operands[0], operands[1],
378 operands[2], operands[3], operands[4]);
382 (define_insn "arm_load_exclusive<mode>"
383 [(set (match_operand:SI 0 "s_register_operand" "=r")
385 (unspec_volatile:NARROW
386 [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")]
388 "TARGET_HAVE_LDREXBH"
389 "ldrex<sync_sfx>%?\t%0, %C1"
390 [(set_attr "predicable" "yes")
391 (set_attr "predicable_short_it" "no")])
393 (define_insn "arm_load_acquire_exclusive<mode>"
394 [(set (match_operand:SI 0 "s_register_operand" "=r")
396 (unspec_volatile:NARROW
397 [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")]
400 "ldaex<sync_sfx>%?\\t%0, %C1"
401 [(set_attr "predicable" "yes")
402 (set_attr "predicable_short_it" "no")])
404 (define_insn "arm_load_exclusivesi"
405 [(set (match_operand:SI 0 "s_register_operand" "=r")
407 [(match_operand:SI 1 "mem_noofs_operand" "Ua")]
411 [(set_attr "predicable" "yes")
412 (set_attr "predicable_short_it" "no")])
414 (define_insn "arm_load_acquire_exclusivesi"
415 [(set (match_operand:SI 0 "s_register_operand" "=r")
417 [(match_operand:SI 1 "mem_noofs_operand" "Ua")]
421 [(set_attr "predicable" "yes")
422 (set_attr "predicable_short_it" "no")])
424 (define_insn "arm_load_exclusivedi"
425 [(set (match_operand:DI 0 "s_register_operand" "=r")
427 [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
430 "ldrexd%?\t%0, %H0, %C1"
431 [(set_attr "predicable" "yes")
432 (set_attr "predicable_short_it" "no")])
434 (define_insn "arm_load_acquire_exclusivedi"
435 [(set (match_operand:DI 0 "s_register_operand" "=r")
437 [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
439 "TARGET_HAVE_LDACQEXD && ARM_DOUBLEWORD_ALIGN"
440 "ldaexd%?\t%0, %H0, %C1"
441 [(set_attr "predicable" "yes")
442 (set_attr "predicable_short_it" "no")])
444 (define_insn "arm_store_exclusive<mode>"
445 [(set (match_operand:SI 0 "s_register_operand" "=&r")
446 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SC))
447 (set (match_operand:QHSD 1 "mem_noofs_operand" "=Ua")
448 (unspec_volatile:QHSD
449 [(match_operand:QHSD 2 "s_register_operand" "r")]
453 if (<MODE>mode == DImode)
455 /* The restrictions on target registers in ARM mode are that the two
456 registers are consecutive and the first one is even; Thumb is
457 actually more flexible, but DI should give us this anyway.
458 Note that the 1st register always gets the
459 lowest word in memory. */
460 gcc_assert ((REGNO (operands[2]) & 1) == 0 || TARGET_THUMB2);
461 return "strexd%?\t%0, %2, %H2, %C1";
463 return "strex<sync_sfx>%?\t%0, %2, %C1";
465 [(set_attr "predicable" "yes")
466 (set_attr "predicable_short_it" "no")])
468 (define_insn "arm_store_release_exclusivedi"
469 [(set (match_operand:SI 0 "s_register_operand" "=&r")
470 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SLX))
471 (set (match_operand:DI 1 "mem_noofs_operand" "=Ua")
473 [(match_operand:DI 2 "s_register_operand" "r")]
475 "TARGET_HAVE_LDACQEXD && ARM_DOUBLEWORD_ALIGN"
477 /* See comment in arm_store_exclusive<mode> above. */
478 gcc_assert ((REGNO (operands[2]) & 1) == 0 || TARGET_THUMB2);
479 return "stlexd%?\t%0, %2, %H2, %C1";
481 [(set_attr "predicable" "yes")
482 (set_attr "predicable_short_it" "no")])
484 (define_insn "arm_store_release_exclusive<mode>"
485 [(set (match_operand:SI 0 "s_register_operand" "=&r")
486 (unspec_volatile:SI [(const_int 0)] VUNSPEC_SLX))
487 (set (match_operand:QHSI 1 "mem_noofs_operand" "=Ua")
488 (unspec_volatile:QHSI
489 [(match_operand:QHSI 2 "s_register_operand" "r")]
492 "stlex<sync_sfx>%?\t%0, %2, %C1"
493 [(set_attr "predicable" "yes")
494 (set_attr "predicable_short_it" "no")])