1//===----------------------------------------------------------------------===//
  2//
  3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4// See https://llvm.org/LICENSE.txt for license information.
  5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6//
  7//===----------------------------------------------------------------------===//
  8
  9#ifndef _LIBCPP___ATOMIC_SUPPORT_GCC_H
 10#define _LIBCPP___ATOMIC_SUPPORT_GCC_H
 11
 12#include <__atomic/memory_order.h>
 13#include <__atomic/to_gcc_order.h>
 14#include <__config>
 15#include <__memory/addressof.h>
 16#include <__type_traits/enable_if.h>
 17#include <__type_traits/is_assignable.h>
 18#include <__type_traits/remove_const.h>
 19
 20#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
 21#  pragma GCC system_header
 22#endif
 23
 24//
 25// This file implements support for GCC-style atomics
 26//
 27
 28_LIBCPP_BEGIN_NAMESPACE_STD
 29
 30// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
 31// the default operator= in an object is not volatile, a byte-by-byte copy
 32// is required.
 33template <typename _Tp, typename _Tv, __enable_if_t<is_assignable<_Tp&, _Tv>::value, int> = 0>
 34_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
 35  __a_value = __val;
 36}
 37template <typename _Tp, typename _Tv, __enable_if_t<is_assignable<_Tp&, _Tv>::value, int> = 0>
 38_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
 39  volatile char* __to         = reinterpret_cast<volatile char*>(std::addressof(__a_value));
 40  volatile char* __end        = __to + sizeof(_Tp);
 41  volatile const char* __from = reinterpret_cast<volatile const char*>(std::addressof(__val));
 42  while (__to != __end)
 43    *__to++ = *__from++;
 44}
 45
 46template <typename _Tp>
 47struct __cxx_atomic_base_impl {
 48  _LIBCPP_HIDE_FROM_ABI
 49#ifndef _LIBCPP_CXX03_LANG
 50  __cxx_atomic_base_impl() _NOEXCEPT = default;
 51#else
 52  __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {
 53  }
 54#endif // _LIBCPP_CXX03_LANG
 55  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT : __a_value(value) {}
 56  _Tp __a_value;
 57};
 58
 59template <typename _Tp>
 60_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
 61  __cxx_atomic_assign_volatile(__a->__a_value, __val);
 62}
 63
 64template <typename _Tp>
 65_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
 66  __a->__a_value = __val;
 67}
 68
 69_LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_thread_fence(memory_order __order) {
 70  __atomic_thread_fence(__to_gcc_order(__order));
 71}
 72
 73_LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_signal_fence(memory_order __order) {
 74  __atomic_signal_fence(__to_gcc_order(__order));
 75}
 76
 77template <typename _Tp>
 78_LIBCPP_HIDE_FROM_ABI void
 79__cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_order __order) {
 80  __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order));
 81}
 82
 83template <typename _Tp>
 84_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_order __order) {
 85  __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order));
 86}
 87
 88template <typename _Tp>
 89_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
 90  _Tp __ret;
 91  __atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order));
 92  return __ret;
 93}
 94
 95template <typename _Tp>
 96_LIBCPP_HIDE_FROM_ABI void
 97__cxx_atomic_load_inplace(const volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __dst, memory_order __order) {
 98  __atomic_load(std::addressof(__a->__a_value), __dst, __to_gcc_order(__order));
 99}
100
101template <typename _Tp>
102_LIBCPP_HIDE_FROM_ABI void
103__cxx_atomic_load_inplace(const __cxx_atomic_base_impl<_Tp>* __a, _Tp* __dst, memory_order __order) {
104  __atomic_load(std::addressof(__a->__a_value), __dst, __to_gcc_order(__order));
105}
106
107template <typename _Tp>
108_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
109  _Tp __ret;
110  __atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order));
111  return __ret;
112}
113
114template <typename _Tp>
115_LIBCPP_HIDE_FROM_ABI _Tp
116__cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __value, memory_order __order) {
117  _Tp __ret;
118  __atomic_exchange(
119      std::addressof(__a->__a_value), std::addressof(__value), std::addressof(__ret), __to_gcc_order(__order));
120  return __ret;
121}
122
123template <typename _Tp>
124_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, memory_order __order) {
125  _Tp __ret;
126  __atomic_exchange(
127      std::addressof(__a->__a_value), std::addressof(__value), std::addressof(__ret), __to_gcc_order(__order));
128  return __ret;
129}
130
131template <typename _Tp>
132_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong(
133    volatile __cxx_atomic_base_impl<_Tp>* __a,
134    _Tp* __expected,
135    _Tp __value,
136    memory_order __success,
137    memory_order __failure) {
138  return __atomic_compare_exchange(
139      std::addressof(__a->__a_value),
140      __expected,
141      std::addressof(__value),
142      false,
143      __to_gcc_order(__success),
144      __to_gcc_failure_order(__failure));
145}
146
147template <typename _Tp>
148_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong(
149    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) {
150  return __atomic_compare_exchange(
151      std::addressof(__a->__a_value),
152      __expected,
153      std::addressof(__value),
154      false,
155      __to_gcc_order(__success),
156      __to_gcc_failure_order(__failure));
157}
158
159template <typename _Tp>
160_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak(
161    volatile __cxx_atomic_base_impl<_Tp>* __a,
162    _Tp* __expected,
163    _Tp __value,
164    memory_order __success,
165    memory_order __failure) {
166  return __atomic_compare_exchange(
167      std::addressof(__a->__a_value),
168      __expected,
169      std::addressof(__value),
170      true,
171      __to_gcc_order(__success),
172      __to_gcc_failure_order(__failure));
173}
174
175template <typename _Tp>
176_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak(
177    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) {
178  return __atomic_compare_exchange(
179      std::addressof(__a->__a_value),
180      __expected,
181      std::addressof(__value),
182      true,
183      __to_gcc_order(__success),
184      __to_gcc_failure_order(__failure));
185}
186
187template <typename _Tp>
188struct __skip_amt {
189  enum { value = 1 };
190};
191
192template <typename _Tp>
193struct __skip_amt<_Tp*> {
194  enum { value = sizeof(_Tp) };
195};
196
197// FIXME: Haven't figured out what the spec says about using arrays with
198// atomic_fetch_add. Force a failure rather than creating bad behavior.
199template <typename _Tp>
200struct __skip_amt<_Tp[]> {};
201template <typename _Tp, int n>
202struct __skip_amt<_Tp[n]> {};
203
204template <typename _Tp, typename _Td>
205_LIBCPP_HIDE_FROM_ABI _Tp
206__cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
207  return __atomic_fetch_add(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
208}
209
210template <typename _Tp, typename _Td>
211_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
212  return __atomic_fetch_add(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
213}
214
215template <typename _Tp, typename _Td>
216_LIBCPP_HIDE_FROM_ABI _Tp
217__cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
218  return __atomic_fetch_sub(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
219}
220
221template <typename _Tp, typename _Td>
222_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
223  return __atomic_fetch_sub(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
224}
225
226template <typename _Tp>
227_LIBCPP_HIDE_FROM_ABI _Tp
228__cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
229  return __atomic_fetch_and(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
230}
231
232template <typename _Tp>
233_LIBCPP_HIDE_FROM_ABI _Tp
234__cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
235  return __atomic_fetch_and(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
236}
237
238template <typename _Tp>
239_LIBCPP_HIDE_FROM_ABI _Tp
240__cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
241  return __atomic_fetch_or(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
242}
243
244template <typename _Tp>
245_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
246  return __atomic_fetch_or(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
247}
248
249template <typename _Tp>
250_LIBCPP_HIDE_FROM_ABI _Tp
251__cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
252  return __atomic_fetch_xor(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
253}
254
255template <typename _Tp>
256_LIBCPP_HIDE_FROM_ABI _Tp
257__cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
258  return __atomic_fetch_xor(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
259}
260
261#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
262
263_LIBCPP_END_NAMESPACE_STD
264
265#endif // _LIBCPP___ATOMIC_SUPPORT_GCC_H