Bug 1874644 [wpt PR 43993] - Rewrite prerender-while-prerender.html with a different...
[gecko.git] / mfbt / WasiAtomic.h
blobba222e91c0ce56d2cf95229a58b21b46dd4f115c
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef mozilla_WasiAtomic_h
8 #define mozilla_WasiAtomic_h
10 // Clang >= 14 supports <atomic> for wasm targets.
11 #if _LIBCPP_VERSION >= 14000
12 # include <atomic>
13 #else
15 # include <cstddef> // For ptrdiff_t
16 # include <cstdint>
18 // WASI doesn't support <atomic> and we use it as single-threaded for now.
19 // This is a stub implementation of std atomics to build WASI port of SM.
21 namespace std {
22 enum memory_order {
23 relaxed,
24 consume, // load-consume
25 acquire, // load-acquire
26 release, // store-release
27 acq_rel, // store-release load-acquire
28 seq_cst // store-release load-acquire
31 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
32 inline constexpr auto memory_order_consume = memory_order::consume;
33 inline constexpr auto memory_order_acquire = memory_order::acquire;
34 inline constexpr auto memory_order_release = memory_order::release;
35 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
36 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
38 template <class T>
39 struct atomic {
40 using value_type = T;
41 value_type value_;
43 atomic() noexcept = default;
44 constexpr atomic(T desired) noexcept : value_{desired} {}
46 atomic(const atomic&) = delete;
47 atomic& operator=(const atomic&) = delete;
48 atomic& operator=(const atomic&) volatile = delete;
49 ~atomic() noexcept = default;
51 T load(memory_order m = memory_order_seq_cst) const volatile noexcept {
52 return value_;
55 void store(T desired,
56 memory_order m = memory_order_seq_cst) volatile noexcept {
57 value_ = desired;
60 T operator=(T desired) volatile noexcept { return value_ = desired; }
62 T exchange(T desired,
63 memory_order m = memory_order_seq_cst) volatile noexcept {
64 T tmp = value_;
65 value_ = desired;
66 return tmp;
69 bool compare_exchange_weak(T& expected, T desired, memory_order,
70 memory_order) volatile noexcept {
71 expected = desired;
72 return true;
75 bool compare_exchange_weak(
76 T& expected, T desired,
77 memory_order m = memory_order_seq_cst) volatile noexcept {
78 expected = desired;
79 return true;
82 bool compare_exchange_strong(T& expected, T desired, memory_order,
83 memory_order) volatile noexcept {
84 expected = desired;
85 return true;
88 bool compare_exchange_strong(
89 T& expected, T desired,
90 memory_order m = memory_order_seq_cst) volatile noexcept {
91 expected = desired;
92 return true;
95 T fetch_add(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
96 T previous = value_;
97 value_ = value_ + arg;
98 return previous;
101 T fetch_sub(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
102 T previous = value_;
103 value_ = value_ - arg;
104 return previous;
107 T fetch_or(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
108 T previous = value_;
109 value_ = value_ | arg;
110 return previous;
113 T fetch_xor(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
114 T previous = value_;
115 value_ = value_ ^ arg;
116 return previous;
119 T fetch_and(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
120 T previous = value_;
121 value_ = value_ & arg;
122 return previous;
126 template <class T>
127 struct atomic<T*> {
128 using value_type = T*;
129 using difference_type = ptrdiff_t;
131 value_type value_;
133 atomic() noexcept = default;
134 constexpr atomic(T* desired) noexcept : value_{desired} {}
135 atomic(const atomic&) = delete;
136 atomic& operator=(const atomic&) = delete;
137 atomic& operator=(const atomic&) volatile = delete;
139 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept {
140 return value_;
143 void store(T* desired,
144 memory_order m = memory_order_seq_cst) volatile noexcept {
145 value_ = desired;
148 T* operator=(T* other) volatile noexcept { return value_ = other; }
150 T* exchange(T* desired,
151 memory_order m = memory_order_seq_cst) volatile noexcept {
152 T* previous = value_;
153 value_ = desired;
154 return previous;
157 bool compare_exchange_weak(T*& expected, T* desired, memory_order s,
158 memory_order f) volatile noexcept {
159 expected = desired;
160 return true;
163 bool compare_exchange_weak(
164 T*& expected, T* desired,
165 memory_order m = memory_order_seq_cst) volatile noexcept {
166 expected = desired;
167 return true;
170 bool compare_exchange_strong(T*& expected, T* desired, memory_order s,
171 memory_order f) volatile noexcept {
172 expected = desired;
173 return true;
176 T* fetch_add(ptrdiff_t arg,
177 memory_order m = memory_order_seq_cst) volatile noexcept {
178 T* previous = value_;
179 value_ = value_ + arg;
180 return previous;
183 T* fetch_sub(ptrdiff_t arg,
184 memory_order m = memory_order_seq_cst) volatile noexcept {
185 T* previous = value_;
186 value_ = value_ - arg;
187 return previous;
191 using atomic_uint8_t = atomic<uint8_t>;
192 using atomic_uint16_t = atomic<uint16_t>;
193 using atomic_uint32_t = atomic<uint32_t>;
194 using atomic_uint64_t = atomic<uint64_t>;
196 } // namespace std
198 #endif
200 #endif // mozilla_WasiAtomic_h