Use tree_vector_builder::new_unary_operation for folding
[official-gcc.git] / libatomic / load_n.c
blob782d7d0ed0a4cf2588a300d179594f8e8de0fa81
1 /* Copyright (C) 2012-2017 Free Software Foundation, Inc.
2 Contributed by Richard Henderson <rth@redhat.com>.
4 This file is part of the GNU Atomic Library (libatomic).
6 Libatomic is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3 of the License, or
9 (at your option) any later version.
11 Libatomic is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
13 FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 more details.
16 Under Section 7 of GPL version 3, you are granted additional
17 permissions described in the GCC Runtime Library Exception, version
18 3.1, as published by the Free Software Foundation.
20 You should have received a copy of the GNU General Public License and
21 a copy of the GCC Runtime Library Exception along with this program;
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 <http://www.gnu.org/licenses/>. */
25 #include "libatomic_i.h"
28 /* If we support the builtin, just use it. */
29 #if !DONE && SIZE(HAVE_ATOMIC_LDST)
30 UTYPE
31 SIZE(libat_load) (UTYPE *mptr, int smodel)
33 if (maybe_specialcase_relaxed(smodel))
34 return __atomic_load_n (mptr, __ATOMIC_RELAXED);
35 else if (maybe_specialcase_acqrel(smodel))
36 /* Note that REL and ACQ_REL are not valid for loads. */
37 return __atomic_load_n (mptr, __ATOMIC_ACQUIRE);
38 else
39 return __atomic_load_n (mptr, __ATOMIC_SEQ_CST);
42 #define DONE 1
43 #endif /* HAVE_ATOMIC_LOAD */
46 /* If we have compare-and-swap, use it to swap 0 with 0 and as a side
47 effect load the original value. */
48 #if !DONE && defined(atomic_compare_exchange_n)
49 UTYPE
50 SIZE(libat_load) (UTYPE *mptr, int smodel)
52 UTYPE t = 0;
54 if (maybe_specialcase_relaxed(smodel))
55 atomic_compare_exchange_n (mptr, &t, 0, true,
56 __ATOMIC_RELAXED, __ATOMIC_RELAXED);
57 else if (maybe_specialcase_acqrel(smodel))
58 atomic_compare_exchange_n (mptr, &t, 0, true,
59 __ATOMIC_ACQ_REL, __ATOMIC_ACQ_REL);
60 else
61 atomic_compare_exchange_n (mptr, &t, 0, true,
62 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
64 return t;
67 #define DONE 1
68 #endif /* atomic_compare_exchange_n */
71 /* Similar, but only assume a word-sized compare-and-swap. */
72 #if !DONE && N < WORDSIZE && defined(atomic_compare_exchange_w)
73 UTYPE
74 SIZE(libat_load) (UTYPE *mptr, int smodel)
76 UWORD shift, t, *wptr;
78 pre_barrier (smodel);
80 wptr = (UWORD *)((uintptr_t)mptr & -WORDSIZE);
81 shift = (((uintptr_t)mptr % WORDSIZE) * CHAR_BIT) ^ SIZE(INVERT_MASK);
83 /* Exchange 0 with 0, placing the old value of *WPTR in T. */
84 t = 0;
85 atomic_compare_exchange_w (wptr, &t, 0);
87 post_barrier (smodel);
88 return t >> shift;
91 #define DONE 1
92 #endif /* HAVE_ATOMIC_CAS && N < WORDSIZE */
95 /* Otherwise, fall back to some sort of protection mechanism. */
96 #if !DONE
97 UTYPE
98 SIZE(libat_load) (UTYPE *mptr, int smodel)
100 UTYPE ret;
101 UWORD magic;
103 pre_seq_barrier (smodel);
104 magic = protect_start (mptr);
106 ret = *mptr;
108 protect_end (mptr, magic);
109 post_seq_barrier (smodel);
111 return ret;
113 #endif
115 EXPORT_ALIAS (SIZE(load));