3// Copyright (C) 2008-2022 Free Software Foundation, Inc.
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
25/** @file include/atomic
26 * This is a Standard C++ Library header.
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
35#pragma GCC system_header
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
41#include <bits/atomic_base.h>
43namespace std _GLIBCXX_VISIBILITY(default)
45_GLIBCXX_BEGIN_NAMESPACE_VERSION
52#if __cplusplus >= 201703L
53# define __cpp_lib_atomic_is_always_lock_free 201603L
56 template<typename _Tp>
60 // NB: No operators or fetch-operations for this type.
64 using value_type = bool;
67 __atomic_base<bool> _M_base;
70 atomic() noexcept = default;
71 ~atomic() noexcept = default;
72 atomic(const atomic&) = delete;
73 atomic& operator=(const atomic&) = delete;
74 atomic& operator=(const atomic&) volatile = delete;
76 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
79 operator=(bool __i) noexcept
80 { return _M_base.operator=(__i); }
83 operator=(bool __i) volatile noexcept
84 { return _M_base.operator=(__i); }
86 operator bool() const noexcept
87 { return _M_base.load(); }
89 operator bool() const volatile noexcept
90 { return _M_base.load(); }
93 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
96 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
98#if __cplusplus >= 201703L
99 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
103 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
107 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(memory_order __m = memory_order_seq_cst) const noexcept
112 { return _M_base.load(__m); }
115 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116 { return _M_base.load(__m); }
119 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120 { return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst) volatile noexcept
125 { return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129 memory_order __m2) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(bool& __i1, bool __i2,
139 memory_order __m = memory_order_seq_cst) noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) volatile noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149 memory_order __m2) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(bool& __i1, bool __i2,
159 memory_order __m = memory_order_seq_cst) noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) volatile noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
167#if __cpp_lib_atomic_wait
169 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
170 { _M_base.wait(__old, __m); }
172 // TODO add const volatile overload
175 notify_one() noexcept
176 { _M_base.notify_one(); }
179 notify_all() noexcept
180 { _M_base.notify_all(); }
181#endif // __cpp_lib_atomic_wait
184/// @cond undocumented
185#if __cpp_lib_atomic_value_initialization
186# define _GLIBCXX20_INIT(I) = I
188# define _GLIBCXX20_INIT(I)
193 * @brief Generic atomic type, primary class template.
195 * @tparam _Tp Type to be made atomic, must be trivially copyable.
197 template<typename _Tp>
200 using value_type = _Tp;
203 // Align 1/2/4/8/16-byte types to at least their size.
204 static constexpr int _S_min_alignment
205 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
208 static constexpr int _S_alignment
209 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
211 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
213 static_assert(__is_trivially_copyable(_Tp),
214 "std::atomic requires a trivially copyable type");
216 static_assert(sizeof(_Tp) > 0,
217 "Incomplete or zero-sized types are not supported");
219#if __cplusplus > 201703L
220 static_assert(is_copy_constructible_v<_Tp>);
221 static_assert(is_move_constructible_v<_Tp>);
222 static_assert(is_copy_assignable_v<_Tp>);
223 static_assert(is_move_assignable_v<_Tp>);
228 ~atomic() noexcept = default;
229 atomic(const atomic&) = delete;
230 atomic& operator=(const atomic&) = delete;
231 atomic& operator=(const atomic&) volatile = delete;
233 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
235 operator _Tp() const noexcept
238 operator _Tp() const volatile noexcept
242 operator=(_Tp __i) noexcept
243 { store(__i); return __i; }
246 operator=(_Tp __i) volatile noexcept
247 { store(__i); return __i; }
250 is_lock_free() const noexcept
252 // Produce a fake, minimally aligned pointer.
253 return __atomic_is_lock_free(sizeof(_M_i),
254 reinterpret_cast<void *>(-_S_alignment));
258 is_lock_free() const volatile noexcept
260 // Produce a fake, minimally aligned pointer.
261 return __atomic_is_lock_free(sizeof(_M_i),
262 reinterpret_cast<void *>(-_S_alignment));
265#if __cplusplus >= 201703L
266 static constexpr bool is_always_lock_free
267 = __atomic_always_lock_free(sizeof(_M_i), 0);
271 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
273 __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
277 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
279 __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
283 load(memory_order __m = memory_order_seq_cst) const noexcept
285 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
286 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
287 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
292 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
294 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
295 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
296 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
301 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
303 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
304 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
305 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
312 memory_order __m = memory_order_seq_cst) volatile noexcept
314 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
315 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
316 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
322 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
323 memory_order __f) noexcept
325 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
327 return __atomic_compare_exchange(std::__addressof(_M_i),
328 std::__addressof(__e),
329 std::__addressof(__i),
330 true, int(__s), int(__f));
334 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
335 memory_order __f) volatile noexcept
337 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
339 return __atomic_compare_exchange(std::__addressof(_M_i),
340 std::__addressof(__e),
341 std::__addressof(__i),
342 true, int(__s), int(__f));
346 compare_exchange_weak(_Tp& __e, _Tp __i,
347 memory_order __m = memory_order_seq_cst) noexcept
348 { return compare_exchange_weak(__e, __i, __m,
349 __cmpexch_failure_order(__m)); }
352 compare_exchange_weak(_Tp& __e, _Tp __i,
353 memory_order __m = memory_order_seq_cst) volatile noexcept
354 { return compare_exchange_weak(__e, __i, __m,
355 __cmpexch_failure_order(__m)); }
358 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
359 memory_order __f) noexcept
361 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
363 return __atomic_compare_exchange(std::__addressof(_M_i),
364 std::__addressof(__e),
365 std::__addressof(__i),
366 false, int(__s), int(__f));
370 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
371 memory_order __f) volatile noexcept
373 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
375 return __atomic_compare_exchange(std::__addressof(_M_i),
376 std::__addressof(__e),
377 std::__addressof(__i),
378 false, int(__s), int(__f));
382 compare_exchange_strong(_Tp& __e, _Tp __i,
383 memory_order __m = memory_order_seq_cst) noexcept
384 { return compare_exchange_strong(__e, __i, __m,
385 __cmpexch_failure_order(__m)); }
388 compare_exchange_strong(_Tp& __e, _Tp __i,
389 memory_order __m = memory_order_seq_cst) volatile noexcept
390 { return compare_exchange_strong(__e, __i, __m,
391 __cmpexch_failure_order(__m)); }
393#if __cpp_lib_atomic_wait
395 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
397 std::__atomic_wait_address_v(&_M_i, __old,
398 [__m, this] { return this->load(__m); });
401 // TODO add const volatile overload
404 notify_one() noexcept
405 { std::__atomic_notify_address(&_M_i, false); }
408 notify_all() noexcept
409 { std::__atomic_notify_address(&_M_i, true); }
410#endif // __cpp_lib_atomic_wait
413#undef _GLIBCXX20_INIT
415 /// Partial specialization for pointer types.
416 template<typename _Tp>
419 using value_type = _Tp*;
420 using difference_type = ptrdiff_t;
422 typedef _Tp* __pointer_type;
423 typedef __atomic_base<_Tp*> __base_type;
426 atomic() noexcept = default;
427 ~atomic() noexcept = default;
428 atomic(const atomic&) = delete;
429 atomic& operator=(const atomic&) = delete;
430 atomic& operator=(const atomic&) volatile = delete;
432 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
434 operator __pointer_type() const noexcept
435 { return __pointer_type(_M_b); }
437 operator __pointer_type() const volatile noexcept
438 { return __pointer_type(_M_b); }
441 operator=(__pointer_type __p) noexcept
442 { return _M_b.operator=(__p); }
445 operator=(__pointer_type __p) volatile noexcept
446 { return _M_b.operator=(__p); }
449 operator++(int) noexcept
451#if __cplusplus >= 201703L
452 static_assert( is_object<_Tp>::value, "pointer to object type" );
458 operator++(int) volatile noexcept
460#if __cplusplus >= 201703L
461 static_assert( is_object<_Tp>::value, "pointer to object type" );
467 operator--(int) noexcept
469#if __cplusplus >= 201703L
470 static_assert( is_object<_Tp>::value, "pointer to object type" );
476 operator--(int) volatile noexcept
478#if __cplusplus >= 201703L
479 static_assert( is_object<_Tp>::value, "pointer to object type" );
485 operator++() noexcept
487#if __cplusplus >= 201703L
488 static_assert( is_object<_Tp>::value, "pointer to object type" );
494 operator++() volatile noexcept
496#if __cplusplus >= 201703L
497 static_assert( is_object<_Tp>::value, "pointer to object type" );
503 operator--() noexcept
505#if __cplusplus >= 201703L
506 static_assert( is_object<_Tp>::value, "pointer to object type" );
512 operator--() volatile noexcept
514#if __cplusplus >= 201703L
515 static_assert( is_object<_Tp>::value, "pointer to object type" );
521 operator+=(ptrdiff_t __d) noexcept
523#if __cplusplus >= 201703L
524 static_assert( is_object<_Tp>::value, "pointer to object type" );
526 return _M_b.operator+=(__d);
530 operator+=(ptrdiff_t __d) volatile noexcept
532#if __cplusplus >= 201703L
533 static_assert( is_object<_Tp>::value, "pointer to object type" );
535 return _M_b.operator+=(__d);
539 operator-=(ptrdiff_t __d) noexcept
541#if __cplusplus >= 201703L
542 static_assert( is_object<_Tp>::value, "pointer to object type" );
544 return _M_b.operator-=(__d);
548 operator-=(ptrdiff_t __d) volatile noexcept
550#if __cplusplus >= 201703L
551 static_assert( is_object<_Tp>::value, "pointer to object type" );
553 return _M_b.operator-=(__d);
557 is_lock_free() const noexcept
558 { return _M_b.is_lock_free(); }
561 is_lock_free() const volatile noexcept
562 { return _M_b.is_lock_free(); }
564#if __cplusplus >= 201703L
565 static constexpr bool is_always_lock_free
566 = ATOMIC_POINTER_LOCK_FREE == 2;
570 store(__pointer_type __p,
571 memory_order __m = memory_order_seq_cst) noexcept
572 { return _M_b.store(__p, __m); }
575 store(__pointer_type __p,
576 memory_order __m = memory_order_seq_cst) volatile noexcept
577 { return _M_b.store(__p, __m); }
580 load(memory_order __m = memory_order_seq_cst) const noexcept
581 { return _M_b.load(__m); }
584 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
585 { return _M_b.load(__m); }
588 exchange(__pointer_type __p,
589 memory_order __m = memory_order_seq_cst) noexcept
590 { return _M_b.exchange(__p, __m); }
593 exchange(__pointer_type __p,
594 memory_order __m = memory_order_seq_cst) volatile noexcept
595 { return _M_b.exchange(__p, __m); }
598 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
599 memory_order __m1, memory_order __m2) noexcept
600 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
603 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
605 memory_order __m2) volatile noexcept
606 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
609 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
610 memory_order __m = memory_order_seq_cst) noexcept
612 return compare_exchange_weak(__p1, __p2, __m,
613 __cmpexch_failure_order(__m));
617 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
618 memory_order __m = memory_order_seq_cst) volatile noexcept
620 return compare_exchange_weak(__p1, __p2, __m,
621 __cmpexch_failure_order(__m));
625 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
626 memory_order __m1, memory_order __m2) noexcept
627 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
630 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
632 memory_order __m2) volatile noexcept
633 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
636 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
637 memory_order __m = memory_order_seq_cst) noexcept
639 return _M_b.compare_exchange_strong(__p1, __p2, __m,
640 __cmpexch_failure_order(__m));
644 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
645 memory_order __m = memory_order_seq_cst) volatile noexcept
647 return _M_b.compare_exchange_strong(__p1, __p2, __m,
648 __cmpexch_failure_order(__m));
651#if __cpp_lib_atomic_wait
653 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
654 { _M_b.wait(__old, __m); }
656 // TODO add const volatile overload
659 notify_one() noexcept
660 { _M_b.notify_one(); }
663 notify_all() noexcept
664 { _M_b.notify_all(); }
665#endif // __cpp_lib_atomic_wait
668 fetch_add(ptrdiff_t __d,
669 memory_order __m = memory_order_seq_cst) noexcept
671#if __cplusplus >= 201703L
672 static_assert( is_object<_Tp>::value, "pointer to object type" );
674 return _M_b.fetch_add(__d, __m);
678 fetch_add(ptrdiff_t __d,
679 memory_order __m = memory_order_seq_cst) volatile noexcept
681#if __cplusplus >= 201703L
682 static_assert( is_object<_Tp>::value, "pointer to object type" );
684 return _M_b.fetch_add(__d, __m);
688 fetch_sub(ptrdiff_t __d,
689 memory_order __m = memory_order_seq_cst) noexcept
691#if __cplusplus >= 201703L
692 static_assert( is_object<_Tp>::value, "pointer to object type" );
694 return _M_b.fetch_sub(__d, __m);
698 fetch_sub(ptrdiff_t __d,
699 memory_order __m = memory_order_seq_cst) volatile noexcept
701#if __cplusplus >= 201703L
702 static_assert( is_object<_Tp>::value, "pointer to object type" );
704 return _M_b.fetch_sub(__d, __m);
709 /// Explicit specialization for char.
711 struct atomic<char> : __atomic_base<char>
713 typedef char __integral_type;
714 typedef __atomic_base<char> __base_type;
716 atomic() noexcept = default;
717 ~atomic() noexcept = default;
718 atomic(const atomic&) = delete;
719 atomic& operator=(const atomic&) = delete;
720 atomic& operator=(const atomic&) volatile = delete;
722 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
724 using __base_type::operator __integral_type;
725 using __base_type::operator=;
727#if __cplusplus >= 201703L
728 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
732 /// Explicit specialization for signed char.
734 struct atomic<signed char> : __atomic_base<signed char>
736 typedef signed char __integral_type;
737 typedef __atomic_base<signed char> __base_type;
739 atomic() noexcept= default;
740 ~atomic() noexcept = default;
741 atomic(const atomic&) = delete;
742 atomic& operator=(const atomic&) = delete;
743 atomic& operator=(const atomic&) volatile = delete;
745 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
747 using __base_type::operator __integral_type;
748 using __base_type::operator=;
750#if __cplusplus >= 201703L
751 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
755 /// Explicit specialization for unsigned char.
757 struct atomic<unsigned char> : __atomic_base<unsigned char>
759 typedef unsigned char __integral_type;
760 typedef __atomic_base<unsigned char> __base_type;
762 atomic() noexcept= default;
763 ~atomic() noexcept = default;
764 atomic(const atomic&) = delete;
765 atomic& operator=(const atomic&) = delete;
766 atomic& operator=(const atomic&) volatile = delete;
768 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
770 using __base_type::operator __integral_type;
771 using __base_type::operator=;
773#if __cplusplus >= 201703L
774 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
778 /// Explicit specialization for short.
780 struct atomic<short> : __atomic_base<short>
782 typedef short __integral_type;
783 typedef __atomic_base<short> __base_type;
785 atomic() noexcept = default;
786 ~atomic() noexcept = default;
787 atomic(const atomic&) = delete;
788 atomic& operator=(const atomic&) = delete;
789 atomic& operator=(const atomic&) volatile = delete;
791 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
793 using __base_type::operator __integral_type;
794 using __base_type::operator=;
796#if __cplusplus >= 201703L
797 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
801 /// Explicit specialization for unsigned short.
803 struct atomic<unsigned short> : __atomic_base<unsigned short>
805 typedef unsigned short __integral_type;
806 typedef __atomic_base<unsigned short> __base_type;
808 atomic() noexcept = default;
809 ~atomic() noexcept = default;
810 atomic(const atomic&) = delete;
811 atomic& operator=(const atomic&) = delete;
812 atomic& operator=(const atomic&) volatile = delete;
814 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
816 using __base_type::operator __integral_type;
817 using __base_type::operator=;
819#if __cplusplus >= 201703L
820 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
824 /// Explicit specialization for int.
826 struct atomic<int> : __atomic_base<int>
828 typedef int __integral_type;
829 typedef __atomic_base<int> __base_type;
831 atomic() noexcept = default;
832 ~atomic() noexcept = default;
833 atomic(const atomic&) = delete;
834 atomic& operator=(const atomic&) = delete;
835 atomic& operator=(const atomic&) volatile = delete;
837 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
839 using __base_type::operator __integral_type;
840 using __base_type::operator=;
842#if __cplusplus >= 201703L
843 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
847 /// Explicit specialization for unsigned int.
849 struct atomic<unsigned int> : __atomic_base<unsigned int>
851 typedef unsigned int __integral_type;
852 typedef __atomic_base<unsigned int> __base_type;
854 atomic() noexcept = default;
855 ~atomic() noexcept = default;
856 atomic(const atomic&) = delete;
857 atomic& operator=(const atomic&) = delete;
858 atomic& operator=(const atomic&) volatile = delete;
860 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
862 using __base_type::operator __integral_type;
863 using __base_type::operator=;
865#if __cplusplus >= 201703L
866 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
870 /// Explicit specialization for long.
872 struct atomic<long> : __atomic_base<long>
874 typedef long __integral_type;
875 typedef __atomic_base<long> __base_type;
877 atomic() noexcept = default;
878 ~atomic() noexcept = default;
879 atomic(const atomic&) = delete;
880 atomic& operator=(const atomic&) = delete;
881 atomic& operator=(const atomic&) volatile = delete;
883 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
885 using __base_type::operator __integral_type;
886 using __base_type::operator=;
888#if __cplusplus >= 201703L
889 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
893 /// Explicit specialization for unsigned long.
895 struct atomic<unsigned long> : __atomic_base<unsigned long>
897 typedef unsigned long __integral_type;
898 typedef __atomic_base<unsigned long> __base_type;
900 atomic() noexcept = default;
901 ~atomic() noexcept = default;
902 atomic(const atomic&) = delete;
903 atomic& operator=(const atomic&) = delete;
904 atomic& operator=(const atomic&) volatile = delete;
906 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
908 using __base_type::operator __integral_type;
909 using __base_type::operator=;
911#if __cplusplus >= 201703L
912 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
916 /// Explicit specialization for long long.
918 struct atomic<long long> : __atomic_base<long long>
920 typedef long long __integral_type;
921 typedef __atomic_base<long long> __base_type;
923 atomic() noexcept = default;
924 ~atomic() noexcept = default;
925 atomic(const atomic&) = delete;
926 atomic& operator=(const atomic&) = delete;
927 atomic& operator=(const atomic&) volatile = delete;
929 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
931 using __base_type::operator __integral_type;
932 using __base_type::operator=;
934#if __cplusplus >= 201703L
935 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
939 /// Explicit specialization for unsigned long long.
941 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
943 typedef unsigned long long __integral_type;
944 typedef __atomic_base<unsigned long long> __base_type;
946 atomic() noexcept = default;
947 ~atomic() noexcept = default;
948 atomic(const atomic&) = delete;
949 atomic& operator=(const atomic&) = delete;
950 atomic& operator=(const atomic&) volatile = delete;
952 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
954 using __base_type::operator __integral_type;
955 using __base_type::operator=;
957#if __cplusplus >= 201703L
958 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
962 /// Explicit specialization for wchar_t.
964 struct atomic<wchar_t> : __atomic_base<wchar_t>
966 typedef wchar_t __integral_type;
967 typedef __atomic_base<wchar_t> __base_type;
969 atomic() noexcept = default;
970 ~atomic() noexcept = default;
971 atomic(const atomic&) = delete;
972 atomic& operator=(const atomic&) = delete;
973 atomic& operator=(const atomic&) volatile = delete;
975 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
977 using __base_type::operator __integral_type;
978 using __base_type::operator=;
980#if __cplusplus >= 201703L
981 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
985#ifdef _GLIBCXX_USE_CHAR8_T
986 /// Explicit specialization for char8_t.
988 struct atomic<char8_t> : __atomic_base<char8_t>
990 typedef char8_t __integral_type;
991 typedef __atomic_base<char8_t> __base_type;
993 atomic() noexcept = default;
994 ~atomic() noexcept = default;
995 atomic(const atomic&) = delete;
996 atomic& operator=(const atomic&) = delete;
997 atomic& operator=(const atomic&) volatile = delete;
999 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1001 using __base_type::operator __integral_type;
1002 using __base_type::operator=;
1004#if __cplusplus > 201402L
1005 static constexpr bool is_always_lock_free
1006 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1011 /// Explicit specialization for char16_t.
1013 struct atomic<char16_t> : __atomic_base<char16_t>
1015 typedef char16_t __integral_type;
1016 typedef __atomic_base<char16_t> __base_type;
1018 atomic() noexcept = default;
1019 ~atomic() noexcept = default;
1020 atomic(const atomic&) = delete;
1021 atomic& operator=(const atomic&) = delete;
1022 atomic& operator=(const atomic&) volatile = delete;
1024 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1026 using __base_type::operator __integral_type;
1027 using __base_type::operator=;
1029#if __cplusplus >= 201703L
1030 static constexpr bool is_always_lock_free
1031 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1035 /// Explicit specialization for char32_t.
1037 struct atomic<char32_t> : __atomic_base<char32_t>
1039 typedef char32_t __integral_type;
1040 typedef __atomic_base<char32_t> __base_type;
1042 atomic() noexcept = default;
1043 ~atomic() noexcept = default;
1044 atomic(const atomic&) = delete;
1045 atomic& operator=(const atomic&) = delete;
1046 atomic& operator=(const atomic&) volatile = delete;
1048 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1050 using __base_type::operator __integral_type;
1051 using __base_type::operator=;
1053#if __cplusplus >= 201703L
1054 static constexpr bool is_always_lock_free
1055 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1061 typedef atomic<bool> atomic_bool;
1064 typedef atomic<char> atomic_char;
1067 typedef atomic<signed char> atomic_schar;
1070 typedef atomic<unsigned char> atomic_uchar;
1073 typedef atomic<short> atomic_short;
1076 typedef atomic<unsigned short> atomic_ushort;
1079 typedef atomic<int> atomic_int;
1082 typedef atomic<unsigned int> atomic_uint;
1085 typedef atomic<long> atomic_long;
1088 typedef atomic<unsigned long> atomic_ulong;
1091 typedef atomic<long long> atomic_llong;
1094 typedef atomic<unsigned long long> atomic_ullong;
1097 typedef atomic<wchar_t> atomic_wchar_t;
1099#ifdef _GLIBCXX_USE_CHAR8_T
1101 typedef atomic<char8_t> atomic_char8_t;
1105 typedef atomic<char16_t> atomic_char16_t;
1108 typedef atomic<char32_t> atomic_char32_t;
1110#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1111 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1112 // 2441. Exact-width atomic typedefs should be provided
1115 typedef atomic<int8_t> atomic_int8_t;
1118 typedef atomic<uint8_t> atomic_uint8_t;
1121 typedef atomic<int16_t> atomic_int16_t;
1124 typedef atomic<uint16_t> atomic_uint16_t;
1127 typedef atomic<int32_t> atomic_int32_t;
1130 typedef atomic<uint32_t> atomic_uint32_t;
1133 typedef atomic<int64_t> atomic_int64_t;
1136 typedef atomic<uint64_t> atomic_uint64_t;
1139 /// atomic_int_least8_t
1140 typedef atomic<int_least8_t> atomic_int_least8_t;
1142 /// atomic_uint_least8_t
1143 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1145 /// atomic_int_least16_t
1146 typedef atomic<int_least16_t> atomic_int_least16_t;
1148 /// atomic_uint_least16_t
1149 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1151 /// atomic_int_least32_t
1152 typedef atomic<int_least32_t> atomic_int_least32_t;
1154 /// atomic_uint_least32_t
1155 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1157 /// atomic_int_least64_t
1158 typedef atomic<int_least64_t> atomic_int_least64_t;
1160 /// atomic_uint_least64_t
1161 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1164 /// atomic_int_fast8_t
1165 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1167 /// atomic_uint_fast8_t
1168 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1170 /// atomic_int_fast16_t
1171 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1173 /// atomic_uint_fast16_t
1174 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1176 /// atomic_int_fast32_t
1177 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1179 /// atomic_uint_fast32_t
1180 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1182 /// atomic_int_fast64_t
1183 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1185 /// atomic_uint_fast64_t
1186 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1191 typedef atomic<intptr_t> atomic_intptr_t;
1193 /// atomic_uintptr_t
1194 typedef atomic<uintptr_t> atomic_uintptr_t;
1197 typedef atomic<size_t> atomic_size_t;
1199 /// atomic_ptrdiff_t
1200 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1202#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1204 typedef atomic<intmax_t> atomic_intmax_t;
1206 /// atomic_uintmax_t
1207 typedef atomic<uintmax_t> atomic_uintmax_t;
1210 // Function definitions, atomic_flag operations.
1212 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1213 memory_order __m) noexcept
1214 { return __a->test_and_set(__m); }
1217 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1218 memory_order __m) noexcept
1219 { return __a->test_and_set(__m); }
1221#if __cpp_lib_atomic_flag_test
1223 atomic_flag_test(const atomic_flag* __a) noexcept
1224 { return __a->test(); }
1227 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1228 { return __a->test(); }
1231 atomic_flag_test_explicit(const atomic_flag* __a,
1232 memory_order __m) noexcept
1233 { return __a->test(__m); }
1236 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1237 memory_order __m) noexcept
1238 { return __a->test(__m); }
1242 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1243 { __a->clear(__m); }
1246 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1247 memory_order __m) noexcept
1248 { __a->clear(__m); }
1251 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1252 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1255 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1256 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1259 atomic_flag_clear(atomic_flag* __a) noexcept
1260 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1263 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1264 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1266#if __cpp_lib_atomic_wait
1268 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1269 { __a->wait(__old); }
1272 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1273 memory_order __m) noexcept
1274 { __a->wait(__old, __m); }
1277 atomic_flag_notify_one(atomic_flag* __a) noexcept
1278 { __a->notify_one(); }
1281 atomic_flag_notify_all(atomic_flag* __a) noexcept
1282 { __a->notify_all(); }
1283#endif // __cpp_lib_atomic_wait
1285 /// @cond undocumented
1286 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1287 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1288 template<typename _Tp>
1289 using __atomic_val_t = __type_identity_t<_Tp>;
1290 template<typename _Tp>
1291 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1294 // [atomics.nonmembers] Non-member functions.
1295 // Function templates generally applicable to atomic types.
1296 template<typename _ITp>
1298 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1299 { return __a->is_lock_free(); }
1301 template<typename _ITp>
1303 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1304 { return __a->is_lock_free(); }
1306 template<typename _ITp>
1308 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1309 { __a->store(__i, memory_order_relaxed); }
1311 template<typename _ITp>
1313 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1314 { __a->store(__i, memory_order_relaxed); }
1316 template<typename _ITp>
1318 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1319 memory_order __m) noexcept
1320 { __a->store(__i, __m); }
1322 template<typename _ITp>
1324 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1325 memory_order __m) noexcept
1326 { __a->store(__i, __m); }
1328 template<typename _ITp>
1330 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1331 { return __a->load(__m); }
1333 template<typename _ITp>
1335 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1336 memory_order __m) noexcept
1337 { return __a->load(__m); }
1339 template<typename _ITp>
1341 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1342 memory_order __m) noexcept
1343 { return __a->exchange(__i, __m); }
1345 template<typename _ITp>
1347 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1348 __atomic_val_t<_ITp> __i,
1349 memory_order __m) noexcept
1350 { return __a->exchange(__i, __m); }
1352 template<typename _ITp>
1354 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1355 __atomic_val_t<_ITp>* __i1,
1356 __atomic_val_t<_ITp> __i2,
1358 memory_order __m2) noexcept
1359 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1361 template<typename _ITp>
1363 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1364 __atomic_val_t<_ITp>* __i1,
1365 __atomic_val_t<_ITp> __i2,
1367 memory_order __m2) noexcept
1368 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1370 template<typename _ITp>
1372 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1373 __atomic_val_t<_ITp>* __i1,
1374 __atomic_val_t<_ITp> __i2,
1376 memory_order __m2) noexcept
1377 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1379 template<typename _ITp>
1381 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1382 __atomic_val_t<_ITp>* __i1,
1383 __atomic_val_t<_ITp> __i2,
1385 memory_order __m2) noexcept
1386 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1389 template<typename _ITp>
1391 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1392 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1394 template<typename _ITp>
1396 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1397 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1399 template<typename _ITp>
1401 atomic_load(const atomic<_ITp>* __a) noexcept
1402 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1404 template<typename _ITp>
1406 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1407 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1409 template<typename _ITp>
1411 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1412 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1414 template<typename _ITp>
1416 atomic_exchange(volatile atomic<_ITp>* __a,
1417 __atomic_val_t<_ITp> __i) noexcept
1418 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1420 template<typename _ITp>
1422 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1423 __atomic_val_t<_ITp>* __i1,
1424 __atomic_val_t<_ITp> __i2) noexcept
1426 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1427 memory_order_seq_cst,
1428 memory_order_seq_cst);
1431 template<typename _ITp>
1433 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1434 __atomic_val_t<_ITp>* __i1,
1435 __atomic_val_t<_ITp> __i2) noexcept
1437 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1438 memory_order_seq_cst,
1439 memory_order_seq_cst);
1442 template<typename _ITp>
1444 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1445 __atomic_val_t<_ITp>* __i1,
1446 __atomic_val_t<_ITp> __i2) noexcept
1448 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1449 memory_order_seq_cst,
1450 memory_order_seq_cst);
1453 template<typename _ITp>
1455 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1456 __atomic_val_t<_ITp>* __i1,
1457 __atomic_val_t<_ITp> __i2) noexcept
1459 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1460 memory_order_seq_cst,
1461 memory_order_seq_cst);
1465#if __cpp_lib_atomic_wait
1466 template<typename _Tp>
1468 atomic_wait(const atomic<_Tp>* __a,
1469 typename std::atomic<_Tp>::value_type __old) noexcept
1470 { __a->wait(__old); }
1472 template<typename _Tp>
1474 atomic_wait_explicit(const atomic<_Tp>* __a,
1475 typename std::atomic<_Tp>::value_type __old,
1476 std::memory_order __m) noexcept
1477 { __a->wait(__old, __m); }
1479 template<typename _Tp>
1481 atomic_notify_one(atomic<_Tp>* __a) noexcept
1482 { __a->notify_one(); }
1484 template<typename _Tp>
1486 atomic_notify_all(atomic<_Tp>* __a) noexcept
1487 { __a->notify_all(); }
1488#endif // __cpp_lib_atomic_wait
1490 // Function templates for atomic_integral and atomic_pointer operations only.
1491 // Some operations (and, or, xor) are only available for atomic integrals,
1492 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1494 template<typename _ITp>
1496 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1497 __atomic_diff_t<_ITp> __i,
1498 memory_order __m) noexcept
1499 { return __a->fetch_add(__i, __m); }
1501 template<typename _ITp>
1503 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1504 __atomic_diff_t<_ITp> __i,
1505 memory_order __m) noexcept
1506 { return __a->fetch_add(__i, __m); }
1508 template<typename _ITp>
1510 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1511 __atomic_diff_t<_ITp> __i,
1512 memory_order __m) noexcept
1513 { return __a->fetch_sub(__i, __m); }
1515 template<typename _ITp>
1517 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1518 __atomic_diff_t<_ITp> __i,
1519 memory_order __m) noexcept
1520 { return __a->fetch_sub(__i, __m); }
1522 template<typename _ITp>
1524 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1525 __atomic_val_t<_ITp> __i,
1526 memory_order __m) noexcept
1527 { return __a->fetch_and(__i, __m); }
1529 template<typename _ITp>
1531 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1532 __atomic_val_t<_ITp> __i,
1533 memory_order __m) noexcept
1534 { return __a->fetch_and(__i, __m); }
1536 template<typename _ITp>
1538 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1539 __atomic_val_t<_ITp> __i,
1540 memory_order __m) noexcept
1541 { return __a->fetch_or(__i, __m); }
1543 template<typename _ITp>
1545 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1546 __atomic_val_t<_ITp> __i,
1547 memory_order __m) noexcept
1548 { return __a->fetch_or(__i, __m); }
1550 template<typename _ITp>
1552 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1553 __atomic_val_t<_ITp> __i,
1554 memory_order __m) noexcept
1555 { return __a->fetch_xor(__i, __m); }
1557 template<typename _ITp>
1559 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1560 __atomic_val_t<_ITp> __i,
1561 memory_order __m) noexcept
1562 { return __a->fetch_xor(__i, __m); }
1564 template<typename _ITp>
1566 atomic_fetch_add(atomic<_ITp>* __a,
1567 __atomic_diff_t<_ITp> __i) noexcept
1568 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1570 template<typename _ITp>
1572 atomic_fetch_add(volatile atomic<_ITp>* __a,
1573 __atomic_diff_t<_ITp> __i) noexcept
1574 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1576 template<typename _ITp>
1578 atomic_fetch_sub(atomic<_ITp>* __a,
1579 __atomic_diff_t<_ITp> __i) noexcept
1580 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1582 template<typename _ITp>
1584 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1585 __atomic_diff_t<_ITp> __i) noexcept
1586 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1588 template<typename _ITp>
1590 atomic_fetch_and(__atomic_base<_ITp>* __a,
1591 __atomic_val_t<_ITp> __i) noexcept
1592 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1594 template<typename _ITp>
1596 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1597 __atomic_val_t<_ITp> __i) noexcept
1598 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1600 template<typename _ITp>
1602 atomic_fetch_or(__atomic_base<_ITp>* __a,
1603 __atomic_val_t<_ITp> __i) noexcept
1604 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1606 template<typename _ITp>
1608 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1609 __atomic_val_t<_ITp> __i) noexcept
1610 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1612 template<typename _ITp>
1614 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1615 __atomic_val_t<_ITp> __i) noexcept
1616 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1618 template<typename _ITp>
1620 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1621 __atomic_val_t<_ITp> __i) noexcept
1622 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1624#if __cplusplus > 201703L
1625#define __cpp_lib_atomic_float 201711L
1627 struct atomic<float> : __atomic_float<float>
1629 atomic() noexcept = default;
1632 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1635 atomic& operator=(const atomic&) volatile = delete;
1636 atomic& operator=(const atomic&) = delete;
1638 using __atomic_float<float>::operator=;
1642 struct atomic<double> : __atomic_float<double>
1644 atomic() noexcept = default;
1647 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1650 atomic& operator=(const atomic&) volatile = delete;
1651 atomic& operator=(const atomic&) = delete;
1653 using __atomic_float<double>::operator=;
1657 struct atomic<long double> : __atomic_float<long double>
1659 atomic() noexcept = default;
1662 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1665 atomic& operator=(const atomic&) volatile = delete;
1666 atomic& operator=(const atomic&) = delete;
1668 using __atomic_float<long double>::operator=;
1671#define __cpp_lib_atomic_ref 201806L
1673 /// Class template to provide atomic operations on a non-atomic variable.
1674 template<typename _Tp>
1675 struct atomic_ref : __atomic_ref<_Tp>
1678 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1681 atomic_ref& operator=(const atomic_ref&) = delete;
1683 atomic_ref(const atomic_ref&) = default;
1685 using __atomic_ref<_Tp>::operator=;
1690 /// @} group atomics
1692_GLIBCXX_END_NAMESPACE_VERSION
1697#endif // _GLIBCXX_ATOMIC