Daily bump.
[official-gcc.git] / libatomic / gexch.c
blobafb054c0ef281e7d9b201a0d2933be13088371c2
1 /* Copyright (C) 2012-2024 Free Software Foundation, Inc.
2 Contributed by Richard Henderson <rth@redhat.com>.
4 This file is part of the GNU Atomic Library (libatomic).
6 Libatomic is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3 of the License, or
9 (at your option) any later version.
11 Libatomic is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
13 FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 more details.
16 Under Section 7 of GPL version 3, you are granted additional
17 permissions described in the GCC Runtime Library Exception, version
18 3.1, as published by the Free Software Foundation.
20 You should have received a copy of the GNU General Public License and
21 a copy of the GCC Runtime Library Exception along with this program;
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 <http://www.gnu.org/licenses/>. */
25 #define LAT_GEXCH
26 #include "libatomic_i.h"
29 /* If we natively support the exchange, and if we're unconcerned with extra
30 barriers (e.g. fully in-order cpu for which barriers are a nop), then
31 go ahead and expand the operation inline. */
32 #if !defined(WANT_SPECIALCASE_RELAXED) && !defined(__OPTIMIZE_SIZE__)
33 # define EXACT_INLINE(N) \
34 if (C2(HAVE_ATOMIC_EXCHANGE_,N)) \
35 { \
36 *PTR(N,rptr) = __atomic_exchange_n \
37 (PTR(N,mptr), *PTR(N,vptr), __ATOMIC_SEQ_CST); \
38 return; \
40 #else
41 # define EXACT_INLINE(N)
42 #endif
45 #define EXACT(N) \
46 do { \
47 if (!C2(HAVE_INT,N)) break; \
48 if ((uintptr_t)mptr & (N - 1)) break; \
49 EXACT_INLINE (N); \
50 *PTR(N,rptr) = C3(local_,exchange_,N) \
51 (PTR(N,mptr), *PTR(N,vptr), smodel); \
52 return; \
53 } while (0)
56 #define LARGER(N) \
57 do { \
58 if (!C2(HAVE_INT,N)) break; \
59 if (!C2(MAYBE_HAVE_ATOMIC_CAS_,N)) break; \
60 r = (uintptr_t)mptr & (N - 1); \
61 a = (uintptr_t)mptr & -N; \
62 if (r + n <= N) \
63 { \
64 pre_barrier (smodel); \
65 u.C2(i,N) = *PTR(N,a); \
66 do { \
67 v = u; \
68 memcpy (v.b + r, vptr, n); \
69 } while (!(C2(HAVE_ATOMIC_CAS_,N) \
70 ? __atomic_compare_exchange_n (PTR(N,a), \
71 &u.C2(i,N), v.C2(i,N), true, \
72 __ATOMIC_RELAXED, __ATOMIC_RELAXED) \
73 : C3(local_,compare_exchange_,N) (PTR(N,a), \
74 &u.C2(i,N), v.C2(i,N), \
75 __ATOMIC_RELAXED, __ATOMIC_RELAXED))); \
76 goto Lfinish; \
77 } \
78 } while (0)
81 static void __attribute__((noinline))
82 libat_exchange_large_inplace (size_t n, void *mptr, void *vptr)
84 #define BUF 1024
86 char temp[BUF];
87 size_t i = 0;
89 for (i = 0; n >= BUF; i += BUF, n -= BUF)
91 memcpy (temp, mptr + i, BUF);
92 memcpy (mptr + i, vptr + i, BUF);
93 memcpy (vptr + i, temp, BUF);
95 if (n > 0)
97 memcpy (temp, mptr + i, n);
98 memcpy (mptr + i, vptr + i, n);
99 memcpy (vptr + i, temp, n);
102 #undef BUF
105 void
106 libat_exchange (size_t n, void *mptr, void *vptr, void *rptr, int smodel)
108 union max_size_u u, v;
109 uintptr_t r, a;
111 switch (n)
113 case 0: return;
114 case 1: EXACT(1); goto L4;
115 case 2: EXACT(2); goto L4;
116 case 4: EXACT(4); goto L8;
117 case 8: EXACT(8); goto L16;
118 case 16: EXACT(16); break;
120 case 3: L4: LARGER(4); /* FALLTHRU */
121 case 5 ... 7: L8: LARGER(8); /* FALLTHRU */
122 case 9 ... 15: L16: LARGER(16); break;
124 Lfinish:
125 post_barrier (smodel);
126 memcpy (rptr, u.b + r, n);
127 return;
130 pre_seq_barrier (smodel);
131 libat_lock_n (mptr, n);
133 if (vptr != rptr)
135 memcpy (rptr, mptr, n);
136 memcpy (mptr, vptr, n);
138 else
139 libat_exchange_large_inplace (n, mptr, vptr);
141 libat_unlock_n (mptr, n);
142 post_seq_barrier (smodel);
145 EXPORT_ALIAS (exchange);
146 #undef LAT_GEXCH