3 // Copyright (C) 2008-2025 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #ifdef _GLIBCXX_SYSHDR
36 #pragma GCC system_header
39 #if __cplusplus < 201103L
40 # include <bits/c++0x_warning.h>
43 #define __glibcxx_want_atomic_is_always_lock_free
44 #define __glibcxx_want_atomic_flag_test
45 #define __glibcxx_want_atomic_float
46 #define __glibcxx_want_atomic_ref
47 #define __glibcxx_want_atomic_lock_free_type_aliases
48 #define __glibcxx_want_atomic_value_initialization
49 #define __glibcxx_want_atomic_wait
50 #include <bits/version.h>
52 #include <bits/atomic_base.h>
54 #include <type_traits>
56 namespace std _GLIBCXX_VISIBILITY(default)
58 _GLIBCXX_BEGIN_NAMESPACE_VERSION
65 template<typename _Tp>
69 // NB: No operators or fetch-operations for this type.
73 using value_type = bool;
76 __atomic_base<bool> _M_base;
79 atomic() noexcept = default;
80 ~atomic() noexcept = default;
81 atomic(const atomic&) = delete;
82 atomic& operator=(const atomic&) = delete;
83 atomic& operator=(const atomic&) volatile = delete;
85 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
88 operator=(bool __i) noexcept
89 { return _M_base.operator=(__i); }
92 operator=(bool __i) volatile noexcept
93 { return _M_base.operator=(__i); }
95 operator bool() const noexcept
96 { return _M_base.load(); }
98 operator bool() const volatile noexcept
99 { return _M_base.load(); }
102 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
105 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
107 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
108 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
112 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
113 { _M_base.store(__i, __m); }
116 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
117 { _M_base.store(__i, __m); }
120 load(memory_order __m = memory_order_seq_cst) const noexcept
121 { return _M_base.load(__m); }
124 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
125 { return _M_base.load(__m); }
128 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
129 { return _M_base.exchange(__i, __m); }
133 memory_order __m = memory_order_seq_cst) volatile noexcept
134 { return _M_base.exchange(__i, __m); }
137 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
138 memory_order __m2) noexcept
139 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
142 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
143 memory_order __m2) volatile noexcept
144 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
147 compare_exchange_weak(bool& __i1, bool __i2,
148 memory_order __m = memory_order_seq_cst) noexcept
149 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
152 compare_exchange_weak(bool& __i1, bool __i2,
153 memory_order __m = memory_order_seq_cst) volatile noexcept
154 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
157 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
158 memory_order __m2) noexcept
159 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
162 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
163 memory_order __m2) volatile noexcept
164 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
167 compare_exchange_strong(bool& __i1, bool __i2,
168 memory_order __m = memory_order_seq_cst) noexcept
169 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
172 compare_exchange_strong(bool& __i1, bool __i2,
173 memory_order __m = memory_order_seq_cst) volatile noexcept
174 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
176 #if __cpp_lib_atomic_wait
178 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
179 { _M_base.wait(__old, __m); }
181 // TODO add const volatile overload
184 notify_one() noexcept
185 { _M_base.notify_one(); }
188 notify_all() noexcept
189 { _M_base.notify_all(); }
190 #endif // __cpp_lib_atomic_wait
194 * @brief Generic atomic type, primary class template.
196 * @tparam _Tp Type to be made atomic, must be trivially copyable.
198 template<typename _Tp>
201 using value_type = _Tp;
204 // Align 1/2/4/8/16-byte types to at least their size.
205 static constexpr int _S_min_alignment
206 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
209 static constexpr int _S_alignment
210 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
212 alignas(_S_alignment) _Tp _M_i;
214 static_assert(__is_trivially_copyable(_Tp),
215 "std::atomic requires a trivially copyable type");
217 static_assert(sizeof(_Tp) > 0,
218 "Incomplete or zero-sized types are not supported");
220 #if __cplusplus > 201703L
221 static_assert(is_copy_constructible_v<_Tp>);
222 static_assert(is_move_constructible_v<_Tp>);
223 static_assert(is_copy_assignable_v<_Tp>);
224 static_assert(is_move_assignable_v<_Tp>);
228 #if __cpp_lib_atomic_value_initialization
229 // _GLIBCXX_RESOLVE_LIB_DEFECTS
230 // 4169. std::atomic<T>'s default constructor should be constrained
231 constexpr atomic() noexcept(is_nothrow_default_constructible_v<_Tp>)
232 requires is_default_constructible_v<_Tp>
239 ~atomic() noexcept = default;
240 atomic(const atomic&) = delete;
241 atomic& operator=(const atomic&) = delete;
242 atomic& operator=(const atomic&) volatile = delete;
244 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
246 #if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
247 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
248 __builtin_clear_padding(std::__addressof(_M_i));
252 operator _Tp() const noexcept
255 operator _Tp() const volatile noexcept
259 operator=(_Tp __i) noexcept
260 { store(__i); return __i; }
263 operator=(_Tp __i) volatile noexcept
264 { store(__i); return __i; }
267 is_lock_free() const noexcept
269 // Produce a fake, minimally aligned pointer.
270 return __atomic_is_lock_free(sizeof(_M_i),
271 reinterpret_cast<void *>(-_S_alignment));
275 is_lock_free() const volatile noexcept
277 // Produce a fake, minimally aligned pointer.
278 return __atomic_is_lock_free(sizeof(_M_i),
279 reinterpret_cast<void *>(-_S_alignment));
282 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
283 static constexpr bool is_always_lock_free
284 = __atomic_always_lock_free(sizeof(_M_i), 0);
288 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
290 __atomic_store(std::__addressof(_M_i),
291 __atomic_impl::__clear_padding(__i),
296 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
298 __atomic_store(std::__addressof(_M_i),
299 __atomic_impl::__clear_padding(__i),
304 load(memory_order __m = memory_order_seq_cst) const noexcept
306 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
307 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
308 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
313 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
315 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
316 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
317 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
322 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
324 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
325 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
326 __atomic_exchange(std::__addressof(_M_i),
327 __atomic_impl::__clear_padding(__i),
334 memory_order __m = memory_order_seq_cst) volatile noexcept
336 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
337 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
338 __atomic_exchange(std::__addressof(_M_i),
339 __atomic_impl::__clear_padding(__i),
345 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
346 memory_order __f) noexcept
348 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
353 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
354 memory_order __f) volatile noexcept
356 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
361 compare_exchange_weak(_Tp& __e, _Tp __i,
362 memory_order __m = memory_order_seq_cst) noexcept
363 { return compare_exchange_weak(__e, __i, __m,
364 __cmpexch_failure_order(__m)); }
367 compare_exchange_weak(_Tp& __e, _Tp __i,
368 memory_order __m = memory_order_seq_cst) volatile noexcept
369 { return compare_exchange_weak(__e, __i, __m,
370 __cmpexch_failure_order(__m)); }
373 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
374 memory_order __f) noexcept
376 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
381 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
382 memory_order __f) volatile noexcept
384 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
389 compare_exchange_strong(_Tp& __e, _Tp __i,
390 memory_order __m = memory_order_seq_cst) noexcept
391 { return compare_exchange_strong(__e, __i, __m,
392 __cmpexch_failure_order(__m)); }
395 compare_exchange_strong(_Tp& __e, _Tp __i,
396 memory_order __m = memory_order_seq_cst) volatile noexcept
397 { return compare_exchange_strong(__e, __i, __m,
398 __cmpexch_failure_order(__m)); }
400 #if __cpp_lib_atomic_wait // C++ >= 20
402 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
404 std::__atomic_wait_address_v(std::addressof(_M_i), __old,
405 [__m, this] { return this->load(__m); });
408 // TODO add const volatile overload
411 notify_one() noexcept
412 { std::__atomic_notify_address(std::addressof(_M_i), false); }
415 notify_all() noexcept
416 { std::__atomic_notify_address(std::addressof(_M_i), true); }
417 #endif // __cpp_lib_atomic_wait
420 /// Partial specialization for pointer types.
421 template<typename _Tp>
424 using value_type = _Tp*;
425 using difference_type = ptrdiff_t;
427 typedef _Tp* __pointer_type;
428 typedef __atomic_base<_Tp*> __base_type;
431 atomic() noexcept = default;
432 ~atomic() noexcept = default;
433 atomic(const atomic&) = delete;
434 atomic& operator=(const atomic&) = delete;
435 atomic& operator=(const atomic&) volatile = delete;
437 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
439 operator __pointer_type() const noexcept
440 { return __pointer_type(_M_b); }
442 operator __pointer_type() const volatile noexcept
443 { return __pointer_type(_M_b); }
446 operator=(__pointer_type __p) noexcept
447 { return _M_b.operator=(__p); }
450 operator=(__pointer_type __p) volatile noexcept
451 { return _M_b.operator=(__p); }
454 operator++(int) noexcept
456 #if __cplusplus >= 201703L
457 static_assert( is_object_v<_Tp>, "pointer to object type" );
463 operator++(int) volatile noexcept
465 #if __cplusplus >= 201703L
466 static_assert( is_object_v<_Tp>, "pointer to object type" );
472 operator--(int) noexcept
474 #if __cplusplus >= 201703L
475 static_assert( is_object_v<_Tp>, "pointer to object type" );
481 operator--(int) volatile noexcept
483 #if __cplusplus >= 201703L
484 static_assert( is_object_v<_Tp>, "pointer to object type" );
490 operator++() noexcept
492 #if __cplusplus >= 201703L
493 static_assert( is_object_v<_Tp>, "pointer to object type" );
499 operator++() volatile noexcept
501 #if __cplusplus >= 201703L
502 static_assert( is_object_v<_Tp>, "pointer to object type" );
508 operator--() noexcept
510 #if __cplusplus >= 201703L
511 static_assert( is_object_v<_Tp>, "pointer to object type" );
517 operator--() volatile noexcept
519 #if __cplusplus >= 201703L
520 static_assert( is_object_v<_Tp>, "pointer to object type" );
526 operator+=(ptrdiff_t __d) noexcept
528 #if __cplusplus >= 201703L
529 static_assert( is_object_v<_Tp>, "pointer to object type" );
531 return _M_b.operator+=(__d);
535 operator+=(ptrdiff_t __d) volatile noexcept
537 #if __cplusplus >= 201703L
538 static_assert( is_object_v<_Tp>, "pointer to object type" );
540 return _M_b.operator+=(__d);
544 operator-=(ptrdiff_t __d) noexcept
546 #if __cplusplus >= 201703L
547 static_assert( is_object_v<_Tp>, "pointer to object type" );
549 return _M_b.operator-=(__d);
553 operator-=(ptrdiff_t __d) volatile noexcept
555 #if __cplusplus >= 201703L
556 static_assert( is_object_v<_Tp>, "pointer to object type" );
558 return _M_b.operator-=(__d);
562 is_lock_free() const noexcept
563 { return _M_b.is_lock_free(); }
566 is_lock_free() const volatile noexcept
567 { return _M_b.is_lock_free(); }
569 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
570 static constexpr bool is_always_lock_free
571 = ATOMIC_POINTER_LOCK_FREE == 2;
575 store(__pointer_type __p,
576 memory_order __m = memory_order_seq_cst) noexcept
577 { return _M_b.store(__p, __m); }
580 store(__pointer_type __p,
581 memory_order __m = memory_order_seq_cst) volatile noexcept
582 { return _M_b.store(__p, __m); }
585 load(memory_order __m = memory_order_seq_cst) const noexcept
586 { return _M_b.load(__m); }
589 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
590 { return _M_b.load(__m); }
593 exchange(__pointer_type __p,
594 memory_order __m = memory_order_seq_cst) noexcept
595 { return _M_b.exchange(__p, __m); }
598 exchange(__pointer_type __p,
599 memory_order __m = memory_order_seq_cst) volatile noexcept
600 { return _M_b.exchange(__p, __m); }
603 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
604 memory_order __m1, memory_order __m2) noexcept
605 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
608 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
610 memory_order __m2) volatile noexcept
611 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
614 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
615 memory_order __m = memory_order_seq_cst) noexcept
617 return compare_exchange_weak(__p1, __p2, __m,
618 __cmpexch_failure_order(__m));
622 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
623 memory_order __m = memory_order_seq_cst) volatile noexcept
625 return compare_exchange_weak(__p1, __p2, __m,
626 __cmpexch_failure_order(__m));
630 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
631 memory_order __m1, memory_order __m2) noexcept
632 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
635 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
637 memory_order __m2) volatile noexcept
638 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
641 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
642 memory_order __m = memory_order_seq_cst) noexcept
644 return _M_b.compare_exchange_strong(__p1, __p2, __m,
645 __cmpexch_failure_order(__m));
649 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
650 memory_order __m = memory_order_seq_cst) volatile noexcept
652 return _M_b.compare_exchange_strong(__p1, __p2, __m,
653 __cmpexch_failure_order(__m));
656 #if __cpp_lib_atomic_wait
658 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
659 { _M_b.wait(__old, __m); }
661 // TODO add const volatile overload
664 notify_one() noexcept
665 { _M_b.notify_one(); }
668 notify_all() noexcept
669 { _M_b.notify_all(); }
670 #endif // __cpp_lib_atomic_wait
673 fetch_add(ptrdiff_t __d,
674 memory_order __m = memory_order_seq_cst) noexcept
676 #if __cplusplus >= 201703L
677 static_assert( is_object_v<_Tp>, "pointer to object type" );
679 return _M_b.fetch_add(__d, __m);
683 fetch_add(ptrdiff_t __d,
684 memory_order __m = memory_order_seq_cst) volatile noexcept
686 #if __cplusplus >= 201703L
687 static_assert( is_object_v<_Tp>, "pointer to object type" );
689 return _M_b.fetch_add(__d, __m);
693 fetch_sub(ptrdiff_t __d,
694 memory_order __m = memory_order_seq_cst) noexcept
696 #if __cplusplus >= 201703L
697 static_assert( is_object_v<_Tp>, "pointer to object type" );
699 return _M_b.fetch_sub(__d, __m);
703 fetch_sub(ptrdiff_t __d,
704 memory_order __m = memory_order_seq_cst) volatile noexcept
706 #if __cplusplus >= 201703L
707 static_assert( is_object_v<_Tp>, "pointer to object type" );
709 return _M_b.fetch_sub(__d, __m);
714 /// Explicit specialization for char.
716 struct atomic<char> : __atomic_base<char>
718 typedef char __integral_type;
719 typedef __atomic_base<char> __base_type;
721 atomic() noexcept = default;
722 ~atomic() noexcept = default;
723 atomic(const atomic&) = delete;
724 atomic& operator=(const atomic&) = delete;
725 atomic& operator=(const atomic&) volatile = delete;
727 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
729 using __base_type::operator __integral_type;
730 using __base_type::operator=;
732 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
733 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
737 /// Explicit specialization for signed char.
739 struct atomic<signed char> : __atomic_base<signed char>
741 typedef signed char __integral_type;
742 typedef __atomic_base<signed char> __base_type;
744 atomic() noexcept= default;
745 ~atomic() noexcept = default;
746 atomic(const atomic&) = delete;
747 atomic& operator=(const atomic&) = delete;
748 atomic& operator=(const atomic&) volatile = delete;
750 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
752 using __base_type::operator __integral_type;
753 using __base_type::operator=;
755 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
756 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
760 /// Explicit specialization for unsigned char.
762 struct atomic<unsigned char> : __atomic_base<unsigned char>
764 typedef unsigned char __integral_type;
765 typedef __atomic_base<unsigned char> __base_type;
767 atomic() noexcept= default;
768 ~atomic() noexcept = default;
769 atomic(const atomic&) = delete;
770 atomic& operator=(const atomic&) = delete;
771 atomic& operator=(const atomic&) volatile = delete;
773 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
775 using __base_type::operator __integral_type;
776 using __base_type::operator=;
778 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
779 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
783 /// Explicit specialization for short.
785 struct atomic<short> : __atomic_base<short>
787 typedef short __integral_type;
788 typedef __atomic_base<short> __base_type;
790 atomic() noexcept = default;
791 ~atomic() noexcept = default;
792 atomic(const atomic&) = delete;
793 atomic& operator=(const atomic&) = delete;
794 atomic& operator=(const atomic&) volatile = delete;
796 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
798 using __base_type::operator __integral_type;
799 using __base_type::operator=;
801 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
802 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
806 /// Explicit specialization for unsigned short.
808 struct atomic<unsigned short> : __atomic_base<unsigned short>
810 typedef unsigned short __integral_type;
811 typedef __atomic_base<unsigned short> __base_type;
813 atomic() noexcept = default;
814 ~atomic() noexcept = default;
815 atomic(const atomic&) = delete;
816 atomic& operator=(const atomic&) = delete;
817 atomic& operator=(const atomic&) volatile = delete;
819 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
821 using __base_type::operator __integral_type;
822 using __base_type::operator=;
824 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
825 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
829 /// Explicit specialization for int.
831 struct atomic<int> : __atomic_base<int>
833 typedef int __integral_type;
834 typedef __atomic_base<int> __base_type;
836 atomic() noexcept = default;
837 ~atomic() noexcept = default;
838 atomic(const atomic&) = delete;
839 atomic& operator=(const atomic&) = delete;
840 atomic& operator=(const atomic&) volatile = delete;
842 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
844 using __base_type::operator __integral_type;
845 using __base_type::operator=;
847 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
848 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
852 /// Explicit specialization for unsigned int.
854 struct atomic<unsigned int> : __atomic_base<unsigned int>
856 typedef unsigned int __integral_type;
857 typedef __atomic_base<unsigned int> __base_type;
859 atomic() noexcept = default;
860 ~atomic() noexcept = default;
861 atomic(const atomic&) = delete;
862 atomic& operator=(const atomic&) = delete;
863 atomic& operator=(const atomic&) volatile = delete;
865 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
867 using __base_type::operator __integral_type;
868 using __base_type::operator=;
870 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
871 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
875 /// Explicit specialization for long.
877 struct atomic<long> : __atomic_base<long>
879 typedef long __integral_type;
880 typedef __atomic_base<long> __base_type;
882 atomic() noexcept = default;
883 ~atomic() noexcept = default;
884 atomic(const atomic&) = delete;
885 atomic& operator=(const atomic&) = delete;
886 atomic& operator=(const atomic&) volatile = delete;
888 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
890 using __base_type::operator __integral_type;
891 using __base_type::operator=;
893 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
894 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
898 /// Explicit specialization for unsigned long.
900 struct atomic<unsigned long> : __atomic_base<unsigned long>
902 typedef unsigned long __integral_type;
903 typedef __atomic_base<unsigned long> __base_type;
905 atomic() noexcept = default;
906 ~atomic() noexcept = default;
907 atomic(const atomic&) = delete;
908 atomic& operator=(const atomic&) = delete;
909 atomic& operator=(const atomic&) volatile = delete;
911 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
913 using __base_type::operator __integral_type;
914 using __base_type::operator=;
916 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
917 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
921 /// Explicit specialization for long long.
923 struct atomic<long long> : __atomic_base<long long>
925 typedef long long __integral_type;
926 typedef __atomic_base<long long> __base_type;
928 atomic() noexcept = default;
929 ~atomic() noexcept = default;
930 atomic(const atomic&) = delete;
931 atomic& operator=(const atomic&) = delete;
932 atomic& operator=(const atomic&) volatile = delete;
934 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
936 using __base_type::operator __integral_type;
937 using __base_type::operator=;
939 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
940 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
944 /// Explicit specialization for unsigned long long.
946 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
948 typedef unsigned long long __integral_type;
949 typedef __atomic_base<unsigned long long> __base_type;
951 atomic() noexcept = default;
952 ~atomic() noexcept = default;
953 atomic(const atomic&) = delete;
954 atomic& operator=(const atomic&) = delete;
955 atomic& operator=(const atomic&) volatile = delete;
957 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
959 using __base_type::operator __integral_type;
960 using __base_type::operator=;
962 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
963 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
967 /// Explicit specialization for wchar_t.
969 struct atomic<wchar_t> : __atomic_base<wchar_t>
971 typedef wchar_t __integral_type;
972 typedef __atomic_base<wchar_t> __base_type;
974 atomic() noexcept = default;
975 ~atomic() noexcept = default;
976 atomic(const atomic&) = delete;
977 atomic& operator=(const atomic&) = delete;
978 atomic& operator=(const atomic&) volatile = delete;
980 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
982 using __base_type::operator __integral_type;
983 using __base_type::operator=;
985 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
986 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
990 #ifdef _GLIBCXX_USE_CHAR8_T
991 /// Explicit specialization for char8_t.
993 struct atomic<char8_t> : __atomic_base<char8_t>
995 typedef char8_t __integral_type;
996 typedef __atomic_base<char8_t> __base_type;
998 atomic() noexcept = default;
999 ~atomic() noexcept = default;
1000 atomic(const atomic&) = delete;
1001 atomic& operator=(const atomic&) = delete;
1002 atomic& operator=(const atomic&) volatile = delete;
1004 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1006 using __base_type::operator __integral_type;
1007 using __base_type::operator=;
1009 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1010 static constexpr bool is_always_lock_free
1011 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1016 /// Explicit specialization for char16_t.
1018 struct atomic<char16_t> : __atomic_base<char16_t>
1020 typedef char16_t __integral_type;
1021 typedef __atomic_base<char16_t> __base_type;
1023 atomic() noexcept = default;
1024 ~atomic() noexcept = default;
1025 atomic(const atomic&) = delete;
1026 atomic& operator=(const atomic&) = delete;
1027 atomic& operator=(const atomic&) volatile = delete;
1029 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1031 using __base_type::operator __integral_type;
1032 using __base_type::operator=;
1034 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1035 static constexpr bool is_always_lock_free
1036 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1040 /// Explicit specialization for char32_t.
1042 struct atomic<char32_t> : __atomic_base<char32_t>
1044 typedef char32_t __integral_type;
1045 typedef __atomic_base<char32_t> __base_type;
1047 atomic() noexcept = default;
1048 ~atomic() noexcept = default;
1049 atomic(const atomic&) = delete;
1050 atomic& operator=(const atomic&) = delete;
1051 atomic& operator=(const atomic&) volatile = delete;
1053 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1055 using __base_type::operator __integral_type;
1056 using __base_type::operator=;
1058 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1059 static constexpr bool is_always_lock_free
1060 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1066 typedef atomic<bool> atomic_bool;
1069 typedef atomic<char> atomic_char;
1072 typedef atomic<signed char> atomic_schar;
1075 typedef atomic<unsigned char> atomic_uchar;
1078 typedef atomic<short> atomic_short;
1081 typedef atomic<unsigned short> atomic_ushort;
1084 typedef atomic<int> atomic_int;
1087 typedef atomic<unsigned int> atomic_uint;
1090 typedef atomic<long> atomic_long;
1093 typedef atomic<unsigned long> atomic_ulong;
1096 typedef atomic<long long> atomic_llong;
1099 typedef atomic<unsigned long long> atomic_ullong;
1102 typedef atomic<wchar_t> atomic_wchar_t;
1104 #ifdef _GLIBCXX_USE_CHAR8_T
1106 typedef atomic<char8_t> atomic_char8_t;
1110 typedef atomic<char16_t> atomic_char16_t;
1113 typedef atomic<char32_t> atomic_char32_t;
1115 #ifdef _GLIBCXX_USE_C99_STDINT
1116 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1117 // 2441. Exact-width atomic typedefs should be provided
1120 typedef atomic<int8_t> atomic_int8_t;
1123 typedef atomic<uint8_t> atomic_uint8_t;
1126 typedef atomic<int16_t> atomic_int16_t;
1129 typedef atomic<uint16_t> atomic_uint16_t;
1132 typedef atomic<int32_t> atomic_int32_t;
1135 typedef atomic<uint32_t> atomic_uint32_t;
1138 typedef atomic<int64_t> atomic_int64_t;
1141 typedef atomic<uint64_t> atomic_uint64_t;
1144 /// atomic_int_least8_t
1145 typedef atomic<int_least8_t> atomic_int_least8_t;
1147 /// atomic_uint_least8_t
1148 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1150 /// atomic_int_least16_t
1151 typedef atomic<int_least16_t> atomic_int_least16_t;
1153 /// atomic_uint_least16_t
1154 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1156 /// atomic_int_least32_t
1157 typedef atomic<int_least32_t> atomic_int_least32_t;
1159 /// atomic_uint_least32_t
1160 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1162 /// atomic_int_least64_t
1163 typedef atomic<int_least64_t> atomic_int_least64_t;
1165 /// atomic_uint_least64_t
1166 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1169 /// atomic_int_fast8_t
1170 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1172 /// atomic_uint_fast8_t
1173 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1175 /// atomic_int_fast16_t
1176 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1178 /// atomic_uint_fast16_t
1179 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1181 /// atomic_int_fast32_t
1182 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1184 /// atomic_uint_fast32_t
1185 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1187 /// atomic_int_fast64_t
1188 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1190 /// atomic_uint_fast64_t
1191 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1195 typedef atomic<intptr_t> atomic_intptr_t;
1197 /// atomic_uintptr_t
1198 typedef atomic<uintptr_t> atomic_uintptr_t;
1201 typedef atomic<size_t> atomic_size_t;
1203 /// atomic_ptrdiff_t
1204 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1207 typedef atomic<intmax_t> atomic_intmax_t;
1209 /// atomic_uintmax_t
1210 typedef atomic<uintmax_t> atomic_uintmax_t;
1212 // Function definitions, atomic_flag operations.
1214 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1215 memory_order __m) noexcept
1216 { return __a->test_and_set(__m); }
1219 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1220 memory_order __m) noexcept
1221 { return __a->test_and_set(__m); }
1223 #if __cpp_lib_atomic_flag_test
1225 atomic_flag_test(const atomic_flag* __a) noexcept
1226 { return __a->test(); }
1229 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1230 { return __a->test(); }
1233 atomic_flag_test_explicit(const atomic_flag* __a,
1234 memory_order __m) noexcept
1235 { return __a->test(__m); }
1238 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1239 memory_order __m) noexcept
1240 { return __a->test(__m); }
1244 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1245 { __a->clear(__m); }
1248 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1249 memory_order __m) noexcept
1250 { __a->clear(__m); }
1253 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1254 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1257 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1258 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1261 atomic_flag_clear(atomic_flag* __a) noexcept
1262 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1265 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1266 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1268 #if __cpp_lib_atomic_wait
1270 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1271 { __a->wait(__old); }
1274 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1275 memory_order __m) noexcept
1276 { __a->wait(__old, __m); }
1279 atomic_flag_notify_one(atomic_flag* __a) noexcept
1280 { __a->notify_one(); }
1283 atomic_flag_notify_all(atomic_flag* __a) noexcept
1284 { __a->notify_all(); }
1285 #endif // __cpp_lib_atomic_wait
1287 /// @cond undocumented
1288 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1289 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1290 template<typename _Tp>
1291 using __atomic_val_t = __type_identity_t<_Tp>;
1292 template<typename _Tp>
1293 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1296 // [atomics.nonmembers] Non-member functions.
1297 // Function templates generally applicable to atomic types.
1298 template<typename _ITp>
1300 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1301 { return __a->is_lock_free(); }
1303 template<typename _ITp>
1305 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1306 { return __a->is_lock_free(); }
1308 template<typename _ITp>
1310 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1311 { __a->store(__i, memory_order_relaxed); }
1313 template<typename _ITp>
1315 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1316 { __a->store(__i, memory_order_relaxed); }
1318 template<typename _ITp>
1320 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1321 memory_order __m) noexcept
1322 { __a->store(__i, __m); }
1324 template<typename _ITp>
1326 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1327 memory_order __m) noexcept
1328 { __a->store(__i, __m); }
1330 template<typename _ITp>
1332 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1333 { return __a->load(__m); }
1335 template<typename _ITp>
1337 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1338 memory_order __m) noexcept
1339 { return __a->load(__m); }
1341 template<typename _ITp>
1343 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1344 memory_order __m) noexcept
1345 { return __a->exchange(__i, __m); }
1347 template<typename _ITp>
1349 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1350 __atomic_val_t<_ITp> __i,
1351 memory_order __m) noexcept
1352 { return __a->exchange(__i, __m); }
1354 template<typename _ITp>
1356 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1357 __atomic_val_t<_ITp>* __i1,
1358 __atomic_val_t<_ITp> __i2,
1360 memory_order __m2) noexcept
1361 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1363 template<typename _ITp>
1365 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1366 __atomic_val_t<_ITp>* __i1,
1367 __atomic_val_t<_ITp> __i2,
1369 memory_order __m2) noexcept
1370 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1372 template<typename _ITp>
1374 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1375 __atomic_val_t<_ITp>* __i1,
1376 __atomic_val_t<_ITp> __i2,
1378 memory_order __m2) noexcept
1379 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1381 template<typename _ITp>
1383 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1384 __atomic_val_t<_ITp>* __i1,
1385 __atomic_val_t<_ITp> __i2,
1387 memory_order __m2) noexcept
1388 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1391 template<typename _ITp>
1393 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1394 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1396 template<typename _ITp>
1398 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1399 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1401 template<typename _ITp>
1403 atomic_load(const atomic<_ITp>* __a) noexcept
1404 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1406 template<typename _ITp>
1408 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1409 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1411 template<typename _ITp>
1413 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1414 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1416 template<typename _ITp>
1418 atomic_exchange(volatile atomic<_ITp>* __a,
1419 __atomic_val_t<_ITp> __i) noexcept
1420 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1422 template<typename _ITp>
1424 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1425 __atomic_val_t<_ITp>* __i1,
1426 __atomic_val_t<_ITp> __i2) noexcept
1428 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1429 memory_order_seq_cst,
1430 memory_order_seq_cst);
1433 template<typename _ITp>
1435 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1436 __atomic_val_t<_ITp>* __i1,
1437 __atomic_val_t<_ITp> __i2) noexcept
1439 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1440 memory_order_seq_cst,
1441 memory_order_seq_cst);
1444 template<typename _ITp>
1446 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1447 __atomic_val_t<_ITp>* __i1,
1448 __atomic_val_t<_ITp> __i2) noexcept
1450 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1451 memory_order_seq_cst,
1452 memory_order_seq_cst);
1455 template<typename _ITp>
1457 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1458 __atomic_val_t<_ITp>* __i1,
1459 __atomic_val_t<_ITp> __i2) noexcept
1461 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1462 memory_order_seq_cst,
1463 memory_order_seq_cst);
1467 #if __cpp_lib_atomic_wait
1468 template<typename _Tp>
1470 atomic_wait(const atomic<_Tp>* __a,
1471 typename std::atomic<_Tp>::value_type __old) noexcept
1472 { __a->wait(__old); }
1474 template<typename _Tp>
1476 atomic_wait_explicit(const atomic<_Tp>* __a,
1477 typename std::atomic<_Tp>::value_type __old,
1478 std::memory_order __m) noexcept
1479 { __a->wait(__old, __m); }
1481 template<typename _Tp>
1483 atomic_notify_one(atomic<_Tp>* __a) noexcept
1484 { __a->notify_one(); }
1486 template<typename _Tp>
1488 atomic_notify_all(atomic<_Tp>* __a) noexcept
1489 { __a->notify_all(); }
1490 #endif // __cpp_lib_atomic_wait
1492 // Function templates for atomic_integral and atomic_pointer operations only.
1493 // Some operations (and, or, xor) are only available for atomic integrals,
1494 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1496 template<typename _ITp>
1498 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1499 __atomic_diff_t<_ITp> __i,
1500 memory_order __m) noexcept
1501 { return __a->fetch_add(__i, __m); }
1503 template<typename _ITp>
1505 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1506 __atomic_diff_t<_ITp> __i,
1507 memory_order __m) noexcept
1508 { return __a->fetch_add(__i, __m); }
1510 template<typename _ITp>
1512 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1513 __atomic_diff_t<_ITp> __i,
1514 memory_order __m) noexcept
1515 { return __a->fetch_sub(__i, __m); }
1517 template<typename _ITp>
1519 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1520 __atomic_diff_t<_ITp> __i,
1521 memory_order __m) noexcept
1522 { return __a->fetch_sub(__i, __m); }
1524 template<typename _ITp>
1526 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1527 __atomic_val_t<_ITp> __i,
1528 memory_order __m) noexcept
1529 { return __a->fetch_and(__i, __m); }
1531 template<typename _ITp>
1533 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1534 __atomic_val_t<_ITp> __i,
1535 memory_order __m) noexcept
1536 { return __a->fetch_and(__i, __m); }
1538 template<typename _ITp>
1540 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1541 __atomic_val_t<_ITp> __i,
1542 memory_order __m) noexcept
1543 { return __a->fetch_or(__i, __m); }
1545 template<typename _ITp>
1547 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1548 __atomic_val_t<_ITp> __i,
1549 memory_order __m) noexcept
1550 { return __a->fetch_or(__i, __m); }
1552 template<typename _ITp>
1554 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1555 __atomic_val_t<_ITp> __i,
1556 memory_order __m) noexcept
1557 { return __a->fetch_xor(__i, __m); }
1559 template<typename _ITp>
1561 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1562 __atomic_val_t<_ITp> __i,
1563 memory_order __m) noexcept
1564 { return __a->fetch_xor(__i, __m); }
1566 template<typename _ITp>
1568 atomic_fetch_add(atomic<_ITp>* __a,
1569 __atomic_diff_t<_ITp> __i) noexcept
1570 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1572 template<typename _ITp>
1574 atomic_fetch_add(volatile atomic<_ITp>* __a,
1575 __atomic_diff_t<_ITp> __i) noexcept
1576 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1578 template<typename _ITp>
1580 atomic_fetch_sub(atomic<_ITp>* __a,
1581 __atomic_diff_t<_ITp> __i) noexcept
1582 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1584 template<typename _ITp>
1586 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1587 __atomic_diff_t<_ITp> __i) noexcept
1588 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1590 template<typename _ITp>
1592 atomic_fetch_and(__atomic_base<_ITp>* __a,
1593 __atomic_val_t<_ITp> __i) noexcept
1594 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1596 template<typename _ITp>
1598 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1599 __atomic_val_t<_ITp> __i) noexcept
1600 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1602 template<typename _ITp>
1604 atomic_fetch_or(__atomic_base<_ITp>* __a,
1605 __atomic_val_t<_ITp> __i) noexcept
1606 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1608 template<typename _ITp>
1610 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1611 __atomic_val_t<_ITp> __i) noexcept
1612 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1614 template<typename _ITp>
1616 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1617 __atomic_val_t<_ITp> __i) noexcept
1618 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1620 template<typename _ITp>
1622 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1623 __atomic_val_t<_ITp> __i) noexcept
1624 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1626 #ifdef __cpp_lib_atomic_float
1628 struct atomic<float> : __atomic_float<float>
1630 atomic() noexcept = default;
1633 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1636 atomic& operator=(const atomic&) volatile = delete;
1637 atomic& operator=(const atomic&) = delete;
1639 using __atomic_float<float>::operator=;
1643 struct atomic<double> : __atomic_float<double>
1645 atomic() noexcept = default;
1648 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1651 atomic& operator=(const atomic&) volatile = delete;
1652 atomic& operator=(const atomic&) = delete;
1654 using __atomic_float<double>::operator=;
1658 struct atomic<long double> : __atomic_float<long double>
1660 atomic() noexcept = default;
1663 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1666 atomic& operator=(const atomic&) volatile = delete;
1667 atomic& operator=(const atomic&) = delete;
1669 using __atomic_float<long double>::operator=;
1672 #ifdef __STDCPP_FLOAT16_T__
1674 struct atomic<_Float16> : __atomic_float<_Float16>
1676 atomic() noexcept = default;
1679 atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1682 atomic& operator=(const atomic&) volatile = delete;
1683 atomic& operator=(const atomic&) = delete;
1685 using __atomic_float<_Float16>::operator=;
1689 #ifdef __STDCPP_FLOAT32_T__
1691 struct atomic<_Float32> : __atomic_float<_Float32>
1693 atomic() noexcept = default;
1696 atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1699 atomic& operator=(const atomic&) volatile = delete;
1700 atomic& operator=(const atomic&) = delete;
1702 using __atomic_float<_Float32>::operator=;
1706 #ifdef __STDCPP_FLOAT64_T__
1708 struct atomic<_Float64> : __atomic_float<_Float64>
1710 atomic() noexcept = default;
1713 atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1716 atomic& operator=(const atomic&) volatile = delete;
1717 atomic& operator=(const atomic&) = delete;
1719 using __atomic_float<_Float64>::operator=;
1723 #ifdef __STDCPP_FLOAT128_T__
1725 struct atomic<_Float128> : __atomic_float<_Float128>
1727 atomic() noexcept = default;
1730 atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1733 atomic& operator=(const atomic&) volatile = delete;
1734 atomic& operator=(const atomic&) = delete;
1736 using __atomic_float<_Float128>::operator=;
1740 #ifdef __STDCPP_BFLOAT16_T__
1742 struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1744 atomic() noexcept = default;
1747 atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1750 atomic& operator=(const atomic&) volatile = delete;
1751 atomic& operator=(const atomic&) = delete;
1753 using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1756 #endif // __cpp_lib_atomic_float
1758 #ifdef __cpp_lib_atomic_ref
1759 /// Class template to provide atomic operations on a non-atomic variable.
1760 template<typename _Tp>
1761 struct atomic_ref : __atomic_ref<_Tp>
1764 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1767 atomic_ref& operator=(const atomic_ref&) = delete;
1769 atomic_ref(const atomic_ref&) = default;
1771 using __atomic_ref<_Tp>::operator=;
1773 #endif // __cpp_lib_atomic_ref
1775 #ifdef __cpp_lib_atomic_lock_free_type_aliases
1776 # ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1777 using atomic_signed_lock_free
1778 = atomic<make_signed_t<__detail::__platform_wait_t>>;
1779 using atomic_unsigned_lock_free
1780 = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
1781 # elif ATOMIC_INT_LOCK_FREE == 2
1782 using atomic_signed_lock_free = atomic<signed int>;
1783 using atomic_unsigned_lock_free = atomic<unsigned int>;
1784 # elif ATOMIC_LONG_LOCK_FREE == 2
1785 using atomic_signed_lock_free = atomic<signed long>;
1786 using atomic_unsigned_lock_free = atomic<unsigned long>;
1787 # elif ATOMIC_CHAR_LOCK_FREE == 2
1788 using atomic_signed_lock_free = atomic<signed char>;
1789 using atomic_unsigned_lock_free = atomic<unsigned char>;
1791 # error "libstdc++ bug: no lock-free atomics but they were emitted in <version>"
1795 /// @} group atomics
1797 _GLIBCXX_END_NAMESPACE_VERSION
1802 #endif // _GLIBCXX_ATOMIC