1 ;; Machine description for LoongArch atomic operations.
2 ;; Copyright (C) 2021-2024 Free Software Foundation, Inc.
3 ;; Contributed by Loongson Ltd.
4 ;; Based on MIPS and RISC-V target for GNU compiler.
6 ;; This file is part of GCC.
8 ;; GCC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 3, or (at your option)
13 ;; GCC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>.
22 (define_c_enum "unspec" [
23 UNSPEC_COMPARE_AND_SWAP
24 UNSPEC_COMPARE_AND_SWAP_ADD
25 UNSPEC_COMPARE_AND_SWAP_SUB
26 UNSPEC_COMPARE_AND_SWAP_AND
27 UNSPEC_COMPARE_AND_SWAP_XOR
28 UNSPEC_COMPARE_AND_SWAP_OR
29 UNSPEC_COMPARE_AND_SWAP_NAND
37 (define_code_iterator any_atomic [plus ior xor and])
38 (define_code_attr atomic_optab
39 [(plus "add") (ior "or") (xor "xor") (and "and")])
41 ;; This attribute gives the format suffix for atomic memory operations.
42 (define_mode_attr amo [(QI "b") (HI "h") (SI "w") (DI "d")])
44 ;; <amop> expands to the name of the atomic operand that implements a
46 (define_code_attr amop [(ior "or") (xor "xor") (and "and") (plus "add")])
50 (define_expand "mem_thread_fence"
51 [(match_operand:SI 0 "const_int_operand" "")] ;; model
54 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
55 MEM_VOLATILE_P (mem) = 1;
56 emit_insn (gen_mem_thread_fence_1 (mem, operands[0]));
61 ;; DBAR hint encoding for LA664 and later micro-architectures, paraphrased from
62 ;; the Linux patch revealing it [1]:
64 ;; - Bit 4: kind of constraint (0: completion, 1: ordering)
65 ;; - Bit 3: barrier for previous read (0: true, 1: false)
66 ;; - Bit 2: barrier for previous write (0: true, 1: false)
67 ;; - Bit 1: barrier for succeeding read (0: true, 1: false)
68 ;; - Bit 0: barrier for succeeding write (0: true, 1: false)
70 ;; [1]: https://git.kernel.org/torvalds/c/e031a5f3f1ed
72 ;; Implementations without support for the finer-granularity hints simply treat
73 ;; all as the full barrier (DBAR 0), so we can unconditionally start emiting the
74 ;; more precise hints right away.
75 (define_insn "mem_thread_fence_1"
76 [(set (match_operand:BLK 0 "" "")
77 (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))
78 (match_operand:SI 1 "const_int_operand" "")] ;; model
81 enum memmodel model = memmodel_base (INTVAL (operands[1]));
85 case MEMMODEL_ACQUIRE:
86 return "dbar\t0b10100";
87 case MEMMODEL_RELEASE:
88 return "dbar\t0b10010";
89 case MEMMODEL_ACQ_REL:
90 case MEMMODEL_SEQ_CST:
91 return "dbar\t0b10000";
93 /* GCC internal: "For the '__ATOMIC_RELAXED' model no instructions
94 need to be issued and this expansion is not invoked."
96 __atomic builtins doc: "Consume is implemented using the
97 stronger acquire memory order because of a deficiency in C++11's
98 semantics." See PR 59448 and get_memmodel in builtins.cc.
100 Other values should not be returned by memmodel_base. */
105 ;; Atomic memory operations.
107 (define_insn "atomic_load<mode>"
108 [(set (match_operand:QHWD 0 "register_operand" "=r")
109 (unspec_volatile:QHWD
110 [(match_operand:QHWD 1 "memory_operand" "+m")
111 (match_operand:SI 2 "const_int_operand")] ;; model
112 UNSPEC_ATOMIC_LOAD))]
115 enum memmodel model = memmodel_base (INTVAL (operands[2]));
119 case MEMMODEL_SEQ_CST:
120 return "dbar\t0x11\\n\\t"
121 "ld.<size>\t%0,%1\\n\\t"
123 case MEMMODEL_ACQUIRE:
124 return "ld.<size>\t%0,%1\\n\\t"
126 case MEMMODEL_RELAXED:
127 return ISA_HAS_LD_SEQ_SA ? "ld.<size>\t%0,%1"
128 : "ld.<size>\t%0,%1\\n\\t"
132 /* The valid memory order variants are __ATOMIC_RELAXED, __ATOMIC_SEQ_CST,
133 __ATOMIC_CONSUME and __ATOMIC_ACQUIRE.
134 The expand_builtin_atomic_store function converts all invalid memmodels
137 __atomic builtins doc: "Consume is implemented using the
138 stronger acquire memory order because of a deficiency in C++11's
139 semantics." See PR 59448 and get_memmodel in builtins.cc. */
143 [(set (attr "length") (const_int 12))])
145 ;; Implement atomic stores with amoswap. Fall back to fences for atomic loads.
146 (define_insn "atomic_store<mode>"
147 [(set (match_operand:QHWD 0 "memory_operand" "+m")
148 (unspec_volatile:QHWD
149 [(match_operand:QHWD 1 "reg_or_0_operand" "rJ")
150 (match_operand:SI 2 "const_int_operand")] ;; model
151 UNSPEC_ATOMIC_STORE))]
154 enum memmodel model = memmodel_base (INTVAL (operands[2]));
158 case MEMMODEL_SEQ_CST:
159 return "dbar\t0x12\\n\\t"
160 "st.<size>\t%z1,%0\\n\\t"
162 case MEMMODEL_RELEASE:
163 return "dbar\t0x12\\n\\t"
164 "st.<size>\t%z1,%0\\n\\t";
165 case MEMMODEL_RELAXED:
166 return "st.<size>\t%z1,%0";
169 /* The valid memory order variants are __ATOMIC_RELAXED, __ATOMIC_SEQ_CST,
170 and __ATOMIC_RELEASE.
171 The expand_builtin_atomic_store function converts all invalid memmodels
172 to MEMMODEL_SEQ_CST. */
176 [(set (attr "length") (const_int 12))])
178 (define_insn "atomic_<atomic_optab><mode>"
179 [(set (match_operand:GPR 0 "memory_operand" "+ZB")
181 [(any_atomic:GPR (match_dup 0)
182 (match_operand:GPR 1 "reg_or_0_operand" "rJ"))
183 (match_operand:SI 2 "const_int_operand")] ;; model
184 UNSPEC_SYNC_OLD_OP))]
186 "am<amop>%A2.<amo>\t$zero,%z1,%0"
187 [(set (attr "length") (const_int 4))])
189 (define_insn "atomic_add<mode>"
190 [(set (match_operand:SHORT 0 "memory_operand" "+ZB")
191 (unspec_volatile:SHORT
192 [(plus:SHORT (match_dup 0)
193 (match_operand:SHORT 1 "reg_or_0_operand" "rJ"))
194 (match_operand:SI 2 "const_int_operand")] ;; model
195 UNSPEC_SYNC_OLD_OP))]
197 "amadd%A2.<amo>\t$zero,%z1,%0"
198 [(set (attr "length") (const_int 4))])
200 (define_insn "atomic_fetch_<atomic_optab><mode>"
201 [(set (match_operand:GPR 0 "register_operand" "=&r")
202 (match_operand:GPR 1 "memory_operand" "+ZB"))
205 [(any_atomic:GPR (match_dup 1)
206 (match_operand:GPR 2 "reg_or_0_operand" "rJ"))
207 (match_operand:SI 3 "const_int_operand")] ;; model
208 UNSPEC_SYNC_OLD_OP))]
210 "am<amop>%A3.<amo>\t%0,%z2,%1"
211 [(set (attr "length") (const_int 4))])
213 (define_insn "atomic_exchange<mode>"
214 [(set (match_operand:GPR 0 "register_operand" "=&r")
216 [(match_operand:GPR 1 "memory_operand" "+ZB")
217 (match_operand:SI 3 "const_int_operand")] ;; model
218 UNSPEC_SYNC_EXCHANGE))
220 (match_operand:GPR 2 "register_operand" "r"))]
222 "amswap%A3.<amo>\t%0,%z2,%1"
223 [(set (attr "length") (const_int 4))])
225 (define_insn "atomic_exchange<mode>_short"
226 [(set (match_operand:SHORT 0 "register_operand" "=&r")
227 (unspec_volatile:SHORT
228 [(match_operand:SHORT 1 "memory_operand" "+ZB")
229 (match_operand:SI 3 "const_int_operand")] ;; model
230 UNSPEC_SYNC_EXCHANGE))
232 (match_operand:SHORT 2 "register_operand" "r"))]
234 "amswap%A3.<amo>\t%0,%z2,%1"
235 [(set (attr "length") (const_int 4))])
237 (define_insn "atomic_cas_value_strong<mode>"
238 [(set (match_operand:GPR 0 "register_operand" "=&r")
239 (match_operand:GPR 1 "memory_operand" "+ZC"))
241 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ")
242 (match_operand:GPR 3 "reg_or_0_operand" "rJ")
243 (match_operand:SI 4 "const_int_operand")] ;; mod_s
244 UNSPEC_COMPARE_AND_SWAP))
245 (clobber (match_scratch:GPR 5 "=&r"))]
248 output_asm_insn ("1:", operands);
249 output_asm_insn ("ll.<amo>\t%0,%1", operands);
251 /* Like the test case atomic-cas-int.C, in loongarch64, O1 and higher, the
252 return value of the val_without_const_folding will not be truncated and
253 will be passed directly to the function compare_exchange_strong.
254 However, the instruction 'bne' does not distinguish between 32-bit and
255 64-bit operations. so if the upper 32 bits of the register are not
256 extended by the 32nd bit symbol, then the comparison may not be valid
257 here. This will affect the result of the operation. */
259 if (TARGET_64BIT && REG_P (operands[2])
260 && GET_MODE (operands[2]) == SImode)
262 output_asm_insn ("addi.w\t%5,%2,0", operands);
263 output_asm_insn ("bne\t%0,%5,2f", operands);
266 output_asm_insn ("bne\t%0,%z2,2f", operands);
268 output_asm_insn ("or%i3\t%5,$zero,%3", operands);
269 output_asm_insn ("sc.<amo>\t%5,%1", operands);
270 output_asm_insn ("beqz\t%5,1b", operands);
271 output_asm_insn ("b\t3f", operands);
272 output_asm_insn ("2:", operands);
273 output_asm_insn ("%G4", operands);
274 output_asm_insn ("3:", operands);
278 [(set (attr "length")
280 (and (match_test "GET_MODE (operands[2]) == SImode")
281 (match_test "REG_P (operands[2])"))
285 (define_insn "atomic_cas_value_strong<mode>_amcas"
286 [(set (match_operand:QHWD 0 "register_operand" "=&r")
287 (match_operand:QHWD 1 "memory_operand" "+ZB"))
289 (unspec_volatile:QHWD [(match_operand:QHWD 2 "reg_or_0_operand" "rJ")
290 (match_operand:QHWD 3 "reg_or_0_operand" "rJ")
291 (match_operand:SI 4 "const_int_operand")] ;; mod_s
292 UNSPEC_COMPARE_AND_SWAP))]
294 "ori\t%0,%z2,0\n\tamcas%A4.<amo>\t%0,%z3,%1"
295 [(set (attr "length") (const_int 8))])
297 (define_expand "atomic_compare_and_swap<mode>"
298 [(match_operand:SI 0 "register_operand" "") ;; bool output
299 (match_operand:GPR 1 "register_operand" "") ;; val output
300 (match_operand:GPR 2 "memory_operand" "") ;; memory
301 (match_operand:GPR 3 "reg_or_0_operand" "") ;; expected value
302 (match_operand:GPR 4 "reg_or_0_operand" "") ;; desired value
303 (match_operand:SI 5 "const_int_operand" "") ;; is_weak
304 (match_operand:SI 6 "const_int_operand" "") ;; mod_s
305 (match_operand:SI 7 "const_int_operand" "")] ;; mod_f
313 /* Normally the succ memory model must be stronger than fail, but in the
314 unlikely event of fail being ACQUIRE and succ being RELEASE we need to
315 promote succ to ACQ_REL so that we don't lose the acquire semantics. */
317 if (is_mm_acquire (memmodel_base (INTVAL (mod_f)))
318 && is_mm_release (memmodel_base (INTVAL (mod_s))))
319 mod_s = GEN_INT (MEMMODEL_ACQ_REL);
324 emit_insn (gen_atomic_cas_value_strong<mode>_amcas (operands[1], operands[2],
325 operands[3], operands[4],
328 emit_insn (gen_atomic_cas_value_strong<mode> (operands[1], operands[2],
329 operands[3], operands[4],
332 rtx compare = operands[1];
333 if (operands[3] != const0_rtx)
335 rtx difference = gen_rtx_MINUS (<MODE>mode, operands[1], operands[3]);
336 compare = gen_reg_rtx (<MODE>mode);
337 emit_insn (gen_rtx_SET (compare, difference));
340 if (word_mode != <MODE>mode)
342 rtx reg = gen_reg_rtx (word_mode);
343 emit_insn (gen_rtx_SET (reg, gen_rtx_SIGN_EXTEND (word_mode, compare)));
347 emit_insn (gen_rtx_SET (operands[0],
348 gen_rtx_EQ (SImode, compare, const0_rtx)));
352 (define_expand "atomic_test_and_set"
353 [(match_operand:QI 0 "register_operand" "") ;; bool output
354 (match_operand:QI 1 "memory_operand" "+ZB") ;; memory
355 (match_operand:SI 2 "const_int_operand" "")] ;; model
358 /* We have no QImode atomics, so use the address LSBs to form a mask,
359 then use an aligned SImode atomic. */
360 rtx result = operands[0];
361 rtx mem = operands[1];
362 rtx model = operands[2];
363 rtx addr = force_reg (Pmode, XEXP (mem, 0));
364 rtx tmp_reg = gen_reg_rtx (Pmode);
365 rtx zero_reg = gen_rtx_REG (Pmode, 0);
367 rtx aligned_addr = gen_reg_rtx (Pmode);
368 emit_move_insn (tmp_reg, gen_rtx_PLUS (Pmode, zero_reg, GEN_INT (-4)));
369 emit_move_insn (aligned_addr, gen_rtx_AND (Pmode, addr, tmp_reg));
371 rtx aligned_mem = change_address (mem, SImode, aligned_addr);
372 set_mem_alias_set (aligned_mem, 0);
374 rtx offset = gen_reg_rtx (SImode);
375 emit_move_insn (offset, gen_rtx_AND (SImode, gen_lowpart (SImode, addr),
378 rtx tmp = gen_reg_rtx (SImode);
379 emit_move_insn (tmp, GEN_INT (1));
381 rtx shmt = gen_reg_rtx (SImode);
382 emit_move_insn (shmt, gen_rtx_ASHIFT (SImode, offset, GEN_INT (3)));
384 rtx word = gen_reg_rtx (SImode);
385 emit_move_insn (word, gen_rtx_ASHIFT (SImode, tmp, shmt));
387 tmp = gen_reg_rtx (SImode);
388 emit_insn (gen_atomic_fetch_orsi (tmp, aligned_mem, word, model));
390 emit_move_insn (gen_lowpart (SImode, result),
391 gen_rtx_LSHIFTRT (SImode, tmp, shmt));
395 (define_insn "atomic_cas_value_cmp_and_7_<mode>"
396 [(set (match_operand:GPR 0 "register_operand" "=&r")
397 (match_operand:GPR 1 "memory_operand" "+ZC"))
399 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ")
400 (match_operand:GPR 3 "reg_or_0_operand" "rJ")
401 (match_operand:GPR 4 "reg_or_0_operand" "rJ")
402 (match_operand:GPR 5 "reg_or_0_operand" "rJ")
403 (match_operand:SI 6 "const_int_operand")] ;; model
404 UNSPEC_COMPARE_AND_SWAP))
405 (clobber (match_scratch:GPR 7 "=&r"))]
409 "ll.<amo>\\t%0,%1\\n\\t"
410 "and\\t%7,%0,%2\\n\\t"
411 "bne\\t%7,%z4,2f\\n\\t"
412 "and\\t%7,%0,%z3\\n\\t"
413 "or%i5\\t%7,%7,%5\\n\\t"
414 "sc.<amo>\\t%7,%1\\n\\t"
415 "beq\\t$zero,%7,1b\\n\\t"
421 [(set (attr "length") (const_int 36))])
423 (define_expand "atomic_compare_and_swap<mode>"
424 [(match_operand:SI 0 "register_operand" "") ;; bool output
425 (match_operand:SHORT 1 "register_operand" "") ;; val output
426 (match_operand:SHORT 2 "memory_operand" "") ;; memory
427 (match_operand:SHORT 3 "reg_or_0_operand" "") ;; expected value
428 (match_operand:SHORT 4 "reg_or_0_operand" "") ;; desired value
429 (match_operand:SI 5 "const_int_operand" "") ;; is_weak
430 (match_operand:SI 6 "const_int_operand" "") ;; mod_s
431 (match_operand:SI 7 "const_int_operand" "")] ;; mod_f
439 /* Normally the succ memory model must be stronger than fail, but in the
440 unlikely event of fail being ACQUIRE and succ being RELEASE we need to
441 promote succ to ACQ_REL so that we don't lose the acquire semantics. */
443 if (is_mm_acquire (memmodel_base (INTVAL (mod_f)))
444 && is_mm_release (memmodel_base (INTVAL (mod_s))))
445 mod_s = GEN_INT (MEMMODEL_ACQ_REL);
450 emit_insn (gen_atomic_cas_value_strong<mode>_amcas (operands[1], operands[2],
451 operands[3], operands[4],
455 union loongarch_gen_fn_ptrs generator;
456 generator.fn_7 = gen_atomic_cas_value_cmp_and_7_si;
457 loongarch_expand_atomic_qihi (generator, operands[1], operands[2],
458 operands[3], operands[4], operands[6]);
461 rtx compare = operands[1];
462 if (operands[3] != const0_rtx)
464 machine_mode mode = GET_MODE (operands[3]);
465 rtx op1 = convert_modes (SImode, mode, operands[1], true);
466 rtx op3 = convert_modes (SImode, mode, operands[3], true);
467 rtx difference = gen_rtx_MINUS (SImode, op1, op3);
468 compare = gen_reg_rtx (SImode);
469 emit_insn (gen_rtx_SET (compare, difference));
472 if (word_mode != <MODE>mode)
474 rtx reg = gen_reg_rtx (word_mode);
475 emit_insn (gen_rtx_SET (reg, gen_rtx_SIGN_EXTEND (word_mode, compare)));
479 emit_insn (gen_rtx_SET (operands[0],
480 gen_rtx_EQ (SImode, compare, const0_rtx)));
484 (define_insn "atomic_cas_value_add_7_<mode>"
485 [(set (match_operand:GPR 0 "register_operand" "=&r") ;; res
486 (match_operand:GPR 1 "memory_operand" "+ZC"))
488 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ") ;; mask
489 (match_operand:GPR 3 "reg_or_0_operand" "rJ") ;; inverted_mask
490 (match_operand:GPR 4 "reg_or_0_operand" "rJ") ;; old val
491 (match_operand:GPR 5 "reg_or_0_operand" "rJ") ;; new val
492 (match_operand:SI 6 "const_int_operand")] ;; model
493 UNSPEC_COMPARE_AND_SWAP_ADD))
494 (clobber (match_scratch:GPR 7 "=&r"))
495 (clobber (match_scratch:GPR 8 "=&r"))]
499 "ll.<amo>\\t%0,%1\\n\\t"
500 "and\\t%7,%0,%3\\n\\t"
501 "add.w\\t%8,%0,%z5\\n\\t"
502 "and\\t%8,%8,%z2\\n\\t"
503 "or%i8\\t%7,%7,%8\\n\\t"
504 "sc.<amo>\\t%7,%1\\n\\t"
508 [(set (attr "length") (const_int 28))])
510 (define_insn "atomic_cas_value_sub_7_<mode>"
511 [(set (match_operand:GPR 0 "register_operand" "=&r") ;; res
512 (match_operand:GPR 1 "memory_operand" "+ZC"))
514 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ") ;; mask
515 (match_operand:GPR 3 "reg_or_0_operand" "rJ") ;; inverted_mask
516 (match_operand:GPR 4 "reg_or_0_operand" "rJ") ;; old val
517 (match_operand:GPR 5 "reg_or_0_operand" "rJ") ;; new val
518 (match_operand:SI 6 "const_int_operand")] ;; model
519 UNSPEC_COMPARE_AND_SWAP_SUB))
520 (clobber (match_scratch:GPR 7 "=&r"))
521 (clobber (match_scratch:GPR 8 "=&r"))]
525 "ll.<amo>\\t%0,%1\\n\\t"
526 "and\\t%7,%0,%3\\n\\t"
527 "sub.w\\t%8,%0,%z5\\n\\t"
528 "and\\t%8,%8,%z2\\n\\t"
529 "or%i8\\t%7,%7,%8\\n\\t"
530 "sc.<amo>\\t%7,%1\\n\\t"
533 [(set (attr "length") (const_int 28))])
535 (define_insn "atomic_cas_value_and_7_<mode>"
536 [(set (match_operand:GPR 0 "register_operand" "=&r") ;; res
537 (match_operand:GPR 1 "memory_operand" "+ZC"))
539 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ") ;; mask
540 (match_operand:GPR 3 "reg_or_0_operand" "rJ") ;; inverted_mask
541 (match_operand:GPR 4 "reg_or_0_operand" "rJ") ;; old val
542 (match_operand:GPR 5 "reg_or_0_operand" "rJ") ;; new val
543 (match_operand:SI 6 "const_int_operand")] ;; model
544 UNSPEC_COMPARE_AND_SWAP_AND))
545 (clobber (match_scratch:GPR 7 "=&r"))
546 (clobber (match_scratch:GPR 8 "=&r"))]
550 "ll.<amo>\\t%0,%1\\n\\t"
551 "and\\t%7,%0,%3\\n\\t"
552 "and\\t%8,%0,%z5\\n\\t"
553 "and\\t%8,%8,%z2\\n\\t"
554 "or%i8\\t%7,%7,%8\\n\\t"
555 "sc.<amo>\\t%7,%1\\n\\t"
558 [(set (attr "length") (const_int 28))])
560 (define_insn "atomic_cas_value_xor_7_<mode>"
561 [(set (match_operand:GPR 0 "register_operand" "=&r") ;; res
562 (match_operand:GPR 1 "memory_operand" "+ZC"))
564 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ") ;; mask
565 (match_operand:GPR 3 "reg_or_0_operand" "rJ") ;; inverted_mask
566 (match_operand:GPR 4 "reg_or_0_operand" "rJ") ;; old val
567 (match_operand:GPR 5 "reg_or_0_operand" "rJ") ;; new val
568 (match_operand:SI 6 "const_int_operand")] ;; model
569 UNSPEC_COMPARE_AND_SWAP_XOR))
570 (clobber (match_scratch:GPR 7 "=&r"))
571 (clobber (match_scratch:GPR 8 "=&r"))]
575 "ll.<amo>\\t%0,%1\\n\\t"
576 "and\\t%7,%0,%3\\n\\t"
577 "xor\\t%8,%0,%z5\\n\\t"
578 "and\\t%8,%8,%z2\\n\\t"
579 "or%i8\\t%7,%7,%8\\n\\t"
580 "sc.<amo>\\t%7,%1\\n\\t"
584 [(set (attr "length") (const_int 28))])
586 (define_insn "atomic_cas_value_or_7_<mode>"
587 [(set (match_operand:GPR 0 "register_operand" "=&r") ;; res
588 (match_operand:GPR 1 "memory_operand" "+ZC"))
590 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ") ;; mask
591 (match_operand:GPR 3 "reg_or_0_operand" "rJ") ;; inverted_mask
592 (match_operand:GPR 4 "reg_or_0_operand" "rJ") ;; old val
593 (match_operand:GPR 5 "reg_or_0_operand" "rJ") ;; new val
594 (match_operand:SI 6 "const_int_operand")] ;; model
595 UNSPEC_COMPARE_AND_SWAP_OR))
596 (clobber (match_scratch:GPR 7 "=&r"))
597 (clobber (match_scratch:GPR 8 "=&r"))]
601 "ll.<amo>\\t%0,%1\\n\\t"
602 "and\\t%7,%0,%3\\n\\t"
603 "or\\t%8,%0,%z5\\n\\t"
604 "and\\t%8,%8,%z2\\n\\t"
605 "or%i8\\t%7,%7,%8\\n\\t"
606 "sc.<amo>\\t%7,%1\\n\\t"
610 [(set (attr "length") (const_int 28))])
612 (define_insn "atomic_cas_value_nand_7_<mode>"
613 [(set (match_operand:GPR 0 "register_operand" "=&r") ;; res
614 (match_operand:GPR 1 "memory_operand" "+ZC"))
616 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ") ;; mask
617 (match_operand:GPR 3 "reg_or_0_operand" "rJ") ;; inverted_mask
618 (match_operand:GPR 4 "reg_or_0_operand" "rJ") ;; old val
619 (match_operand:GPR 5 "reg_or_0_operand" "rJ") ;; new val
620 (match_operand:SI 6 "const_int_operand")] ;; model
621 UNSPEC_COMPARE_AND_SWAP_NAND))
622 (clobber (match_scratch:GPR 7 "=&r"))
623 (clobber (match_scratch:GPR 8 "=&r"))]
627 "ll.<amo>\\t%0,%1\\n\\t"
628 "and\\t%7,%0,%3\\n\\t"
629 "and\\t%8,%0,%z5\\n\\t"
630 "xor\\t%8,%8,%z2\\n\\t"
631 "or%i8\\t%7,%7,%8\\n\\t"
632 "sc.<amo>\\t%7,%1\\n\\t"
635 [(set (attr "length") (const_int 28))])
637 (define_insn "atomic_cas_value_exchange_7_<mode>"
638 [(set (match_operand:GPR 0 "register_operand" "=&r")
639 (match_operand:GPR 1 "memory_operand" "+ZC"))
641 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ")
642 (match_operand:GPR 3 "reg_or_0_operand" "rJ")
643 (match_operand:GPR 4 "reg_or_0_operand" "rJ")
644 (match_operand:GPR 5 "reg_or_0_operand" "rJ")
645 (match_operand:SI 6 "const_int_operand")] ;; model
646 UNSPEC_SYNC_EXCHANGE))
647 (clobber (match_scratch:GPR 7 "=&r"))]
651 "ll.<amo>\\t%0,%1\\n\\t"
652 "and\\t%7,%0,%z3\\n\\t"
653 "or%i5\\t%7,%7,%5\\n\\t"
654 "sc.<amo>\\t%7,%1\\n\\t"
655 "beqz\\t%7,1b\\n\\t";
657 [(set (attr "length") (const_int 20))])
659 (define_expand "atomic_exchange<mode>"
660 [(set (match_operand:SHORT 0 "register_operand")
661 (unspec_volatile:SHORT
662 [(match_operand:SHORT 1 "memory_operand")
663 (match_operand:SI 3 "const_int_operand")] ;; model
664 UNSPEC_SYNC_EXCHANGE))
666 (match_operand:SHORT 2 "register_operand"))]
670 emit_insn (gen_atomic_exchange<mode>_short (operands[0], operands[1], operands[2], operands[3]));
673 union loongarch_gen_fn_ptrs generator;
674 generator.fn_7 = gen_atomic_cas_value_exchange_7_si;
675 loongarch_expand_atomic_qihi (generator, operands[0], operands[1],
676 const0_rtx, operands[2], operands[3]);
681 (define_insn "atomic_fetch_add<mode>_short"
682 [(set (match_operand:SHORT 0 "register_operand" "=&r")
683 (match_operand:SHORT 1 "memory_operand" "+ZB"))
685 (unspec_volatile:SHORT
686 [(plus:SHORT (match_dup 1)
687 (match_operand:SHORT 2 "reg_or_0_operand" "rJ"))
688 (match_operand:SI 3 "const_int_operand")] ;; model
689 UNSPEC_SYNC_OLD_OP))]
691 "amadd%A3.<amo>\t%0,%z2,%1"
692 [(set (attr "length") (const_int 4))])
694 (define_expand "atomic_fetch_add<mode>"
695 [(set (match_operand:SHORT 0 "register_operand" "=&r")
696 (match_operand:SHORT 1 "memory_operand" "+ZB"))
698 (unspec_volatile:SHORT
699 [(plus:SHORT (match_dup 1)
700 (match_operand:SHORT 2 "reg_or_0_operand" "rJ"))
701 (match_operand:SI 3 "const_int_operand")] ;; model
702 UNSPEC_SYNC_OLD_OP))]
706 emit_insn (gen_atomic_fetch_add<mode>_short (operands[0], operands[1],
707 operands[2], operands[3]));
710 union loongarch_gen_fn_ptrs generator;
711 generator.fn_7 = gen_atomic_cas_value_add_7_si;
712 loongarch_expand_atomic_qihi (generator, operands[0], operands[1],
713 operands[1], operands[2], operands[3]);
718 (define_expand "atomic_fetch_sub<mode>"
719 [(set (match_operand:SHORT 0 "register_operand" "=&r")
720 (match_operand:SHORT 1 "memory_operand" "+ZB"))
722 (unspec_volatile:SHORT
723 [(minus:SHORT (match_dup 1)
724 (match_operand:SHORT 2 "reg_or_0_operand" "rJ"))
725 (match_operand:SI 3 "const_int_operand")] ;; model
726 UNSPEC_SYNC_OLD_OP))]
729 union loongarch_gen_fn_ptrs generator;
730 generator.fn_7 = gen_atomic_cas_value_sub_7_si;
731 loongarch_expand_atomic_qihi (generator, operands[0], operands[1],
732 operands[1], operands[2], operands[3]);
736 (define_expand "atomic_fetch_and<mode>"
737 [(set (match_operand:SHORT 0 "register_operand" "=&r")
738 (match_operand:SHORT 1 "memory_operand" "+ZB"))
740 (unspec_volatile:SHORT
741 [(and:SHORT (match_dup 1)
742 (match_operand:SHORT 2 "reg_or_0_operand" "rJ"))
743 (match_operand:SI 3 "const_int_operand")] ;; model
744 UNSPEC_SYNC_OLD_OP))]
747 union loongarch_gen_fn_ptrs generator;
748 generator.fn_7 = gen_atomic_cas_value_and_7_si;
749 loongarch_expand_atomic_qihi (generator, operands[0], operands[1],
750 operands[1], operands[2], operands[3]);
754 (define_expand "atomic_fetch_xor<mode>"
755 [(set (match_operand:SHORT 0 "register_operand" "=&r")
756 (match_operand:SHORT 1 "memory_operand" "+ZB"))
758 (unspec_volatile:SHORT
759 [(xor:SHORT (match_dup 1)
760 (match_operand:SHORT 2 "reg_or_0_operand" "rJ"))
761 (match_operand:SI 3 "const_int_operand")] ;; model
762 UNSPEC_SYNC_OLD_OP))]
765 union loongarch_gen_fn_ptrs generator;
766 generator.fn_7 = gen_atomic_cas_value_xor_7_si;
767 loongarch_expand_atomic_qihi (generator, operands[0], operands[1],
768 operands[1], operands[2], operands[3]);
772 (define_expand "atomic_fetch_or<mode>"
773 [(set (match_operand:SHORT 0 "register_operand" "=&r")
774 (match_operand:SHORT 1 "memory_operand" "+ZB"))
776 (unspec_volatile:SHORT
777 [(ior:SHORT (match_dup 1)
778 (match_operand:SHORT 2 "reg_or_0_operand" "rJ"))
779 (match_operand:SI 3 "const_int_operand")] ;; model
780 UNSPEC_SYNC_OLD_OP))]
783 union loongarch_gen_fn_ptrs generator;
784 generator.fn_7 = gen_atomic_cas_value_or_7_si;
785 loongarch_expand_atomic_qihi (generator, operands[0], operands[1],
786 operands[1], operands[2], operands[3]);
790 (define_expand "atomic_fetch_nand<mode>"
791 [(set (match_operand:SHORT 0 "register_operand" "=&r")
792 (match_operand:SHORT 1 "memory_operand" "+ZB"))
794 (unspec_volatile:SHORT
795 [(not:SHORT (and:SHORT (match_dup 1)
796 (match_operand:SHORT 2 "reg_or_0_operand" "rJ")))
797 (match_operand:SI 3 "const_int_operand")] ;; model
798 UNSPEC_SYNC_OLD_OP))]
801 union loongarch_gen_fn_ptrs generator;
802 generator.fn_7 = gen_atomic_cas_value_nand_7_si;
803 loongarch_expand_atomic_qihi (generator, operands[0], operands[1],
804 operands[1], operands[2], operands[3]);