1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2025 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 * This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#ifdef _GLIBCXX_SYSHDR
36#pragma GCC system_header
37#endif
38
39#if __cplusplus < 201103L
40# include <bits/c++0x_warning.h>
41#else
42
43#define __glibcxx_want_atomic_is_always_lock_free
44#define __glibcxx_want_atomic_flag_test
45#define __glibcxx_want_atomic_float
46#define __glibcxx_want_atomic_ref
47#define __glibcxx_want_atomic_lock_free_type_aliases
48#define __glibcxx_want_atomic_value_initialization
49#define __glibcxx_want_atomic_wait
50#include <bits/version.h>
51
52#include <bits/atomic_base.h>
53#include <cstdint>
54#include <type_traits>
55
56namespace std _GLIBCXX_VISIBILITY(default)
57{
58_GLIBCXX_BEGIN_NAMESPACE_VERSION
59
60 /**
61 * @addtogroup atomics
62 * @{
63 */
64
65 template<typename _Tp>
66 struct atomic;
67
68 /// atomic<bool>
69 // NB: No operators or fetch-operations for this type.
70 template<>
71 struct atomic<bool>
72 {
73 using value_type = bool;
74
75 private:
76 __atomic_base<bool> _M_base;
77
78 public:
79 atomic() noexcept = default;
80 ~atomic() noexcept = default;
81 atomic(const atomic&) = delete;
82 atomic& operator=(const atomic&) = delete;
83 atomic& operator=(const atomic&) volatile = delete;
84
85 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
86
87 bool
88 operator=(bool __i) noexcept
89 { return _M_base.operator=(__i); }
90
91 bool
92 operator=(bool __i) volatile noexcept
93 { return _M_base.operator=(__i); }
94
95 operator bool() const noexcept
96 { return _M_base.load(); }
97
98 operator bool() const volatile noexcept
99 { return _M_base.load(); }
100
101 bool
102 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
103
104 bool
105 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
106
107#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
108 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
109#endif
110
111 void
112 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
113 { _M_base.store(__i, __m); }
114
115 void
116 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
117 { _M_base.store(__i, __m); }
118
119 bool
120 load(memory_order __m = memory_order_seq_cst) const noexcept
121 { return _M_base.load(__m); }
122
123 bool
124 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
125 { return _M_base.load(__m); }
126
127 bool
128 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
129 { return _M_base.exchange(__i, __m); }
130
131 bool
132 exchange(bool __i,
133 memory_order __m = memory_order_seq_cst) volatile noexcept
134 { return _M_base.exchange(__i, __m); }
135
136 bool
137 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
138 memory_order __m2) noexcept
139 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
140
141 bool
142 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
143 memory_order __m2) volatile noexcept
144 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
145
146 bool
147 compare_exchange_weak(bool& __i1, bool __i2,
148 memory_order __m = memory_order_seq_cst) noexcept
149 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
150
151 bool
152 compare_exchange_weak(bool& __i1, bool __i2,
153 memory_order __m = memory_order_seq_cst) volatile noexcept
154 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
155
156 bool
157 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
158 memory_order __m2) noexcept
159 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
160
161 bool
162 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
163 memory_order __m2) volatile noexcept
164 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
165
166 bool
167 compare_exchange_strong(bool& __i1, bool __i2,
168 memory_order __m = memory_order_seq_cst) noexcept
169 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
170
171 bool
172 compare_exchange_strong(bool& __i1, bool __i2,
173 memory_order __m = memory_order_seq_cst) volatile noexcept
174 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
175
176#if __cpp_lib_atomic_wait
177 void
178 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
179 { _M_base.wait(__old, __m); }
180
181 // TODO add const volatile overload
182
183 void
184 notify_one() noexcept
185 { _M_base.notify_one(); }
186
187 void
188 notify_all() noexcept
189 { _M_base.notify_all(); }
190#endif // __cpp_lib_atomic_wait
191 };
192
193 /**
194 * @brief Generic atomic type, primary class template.
195 *
196 * @tparam _Tp Type to be made atomic, must be trivially copyable.
197 */
198 template<typename _Tp>
199 struct atomic
200 {
201 using value_type = _Tp;
202
203 private:
204 // Align 1/2/4/8/16-byte types to at least their size.
205 static constexpr int _S_min_alignment
206 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
207 ? 0 : sizeof(_Tp);
208
209 static constexpr int _S_alignment
210 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
211
212 alignas(_S_alignment) _Tp _M_i;
213
214 static_assert(__is_trivially_copyable(_Tp),
215 "std::atomic requires a trivially copyable type");
216
217 static_assert(sizeof(_Tp) > 0,
218 "Incomplete or zero-sized types are not supported");
219
220#if __cplusplus > 201703L
221 static_assert(is_copy_constructible_v<_Tp>);
222 static_assert(is_move_constructible_v<_Tp>);
223 static_assert(is_copy_assignable_v<_Tp>);
224 static_assert(is_move_assignable_v<_Tp>);
225#endif
226
227 public:
228#if __cpp_lib_atomic_value_initialization
229 // _GLIBCXX_RESOLVE_LIB_DEFECTS
230 // 4169. std::atomic<T>'s default constructor should be constrained
231 constexpr atomic() noexcept(is_nothrow_default_constructible_v<_Tp>)
232 requires is_default_constructible_v<_Tp>
233 : _M_i()
234 {}
235#else
236 atomic() = default;
237#endif
238
239 ~atomic() noexcept = default;
240 atomic(const atomic&) = delete;
241 atomic& operator=(const atomic&) = delete;
242 atomic& operator=(const atomic&) volatile = delete;
243
244 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
245 {
246#if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
247 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
248 __builtin_clear_padding(std::__addressof(_M_i));
249#endif
250 }
251
252 operator _Tp() const noexcept
253 { return load(); }
254
255 operator _Tp() const volatile noexcept
256 { return load(); }
257
258 _Tp
259 operator=(_Tp __i) noexcept
260 { store(__i); return __i; }
261
262 _Tp
263 operator=(_Tp __i) volatile noexcept
264 { store(__i); return __i; }
265
266 bool
267 is_lock_free() const noexcept
268 {
269 // Produce a fake, minimally aligned pointer.
270 return __atomic_is_lock_free(sizeof(_M_i),
271 reinterpret_cast<void *>(-_S_alignment));
272 }
273
274 bool
275 is_lock_free() const volatile noexcept
276 {
277 // Produce a fake, minimally aligned pointer.
278 return __atomic_is_lock_free(sizeof(_M_i),
279 reinterpret_cast<void *>(-_S_alignment));
280 }
281
282#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
283 static constexpr bool is_always_lock_free
284 = __atomic_always_lock_free(sizeof(_M_i), 0);
285#endif
286
287 void
288 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
289 {
290 __atomic_store(std::__addressof(_M_i),
291 __atomic_impl::__clear_padding(__i),
292 int(__m));
293 }
294
295 void
296 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
297 {
298 __atomic_store(std::__addressof(_M_i),
299 __atomic_impl::__clear_padding(__i),
300 int(__m));
301 }
302
303 _Tp
304 load(memory_order __m = memory_order_seq_cst) const noexcept
305 {
306 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
307 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
308 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
309 return *__ptr;
310 }
311
312 _Tp
313 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
314 {
315 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
316 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
317 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
318 return *__ptr;
319 }
320
321 _Tp
322 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
323 {
324 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
325 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
326 __atomic_exchange(std::__addressof(_M_i),
327 __atomic_impl::__clear_padding(__i),
328 __ptr, int(__m));
329 return *__ptr;
330 }
331
332 _Tp
333 exchange(_Tp __i,
334 memory_order __m = memory_order_seq_cst) volatile noexcept
335 {
336 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
337 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
338 __atomic_exchange(std::__addressof(_M_i),
339 __atomic_impl::__clear_padding(__i),
340 __ptr, int(__m));
341 return *__ptr;
342 }
343
344 bool
345 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
346 memory_order __f) noexcept
347 {
348 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
349 __s, __f);
350 }
351
352 bool
353 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
354 memory_order __f) volatile noexcept
355 {
356 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
357 __s, __f);
358 }
359
360 bool
361 compare_exchange_weak(_Tp& __e, _Tp __i,
362 memory_order __m = memory_order_seq_cst) noexcept
363 { return compare_exchange_weak(__e, __i, __m,
364 __cmpexch_failure_order(__m)); }
365
366 bool
367 compare_exchange_weak(_Tp& __e, _Tp __i,
368 memory_order __m = memory_order_seq_cst) volatile noexcept
369 { return compare_exchange_weak(__e, __i, __m,
370 __cmpexch_failure_order(__m)); }
371
372 bool
373 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
374 memory_order __f) noexcept
375 {
376 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
377 __s, __f);
378 }
379
380 bool
381 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
382 memory_order __f) volatile noexcept
383 {
384 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
385 __s, __f);
386 }
387
388 bool
389 compare_exchange_strong(_Tp& __e, _Tp __i,
390 memory_order __m = memory_order_seq_cst) noexcept
391 { return compare_exchange_strong(__e, __i, __m,
392 __cmpexch_failure_order(__m)); }
393
394 bool
395 compare_exchange_strong(_Tp& __e, _Tp __i,
396 memory_order __m = memory_order_seq_cst) volatile noexcept
397 { return compare_exchange_strong(__e, __i, __m,
398 __cmpexch_failure_order(__m)); }
399
400#if __cpp_lib_atomic_wait // C++ >= 20
401 void
402 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
403 {
404 std::__atomic_wait_address_v(std::addressof(_M_i), __old,
405 [__m, this] { return this->load(__m); });
406 }
407
408 // TODO add const volatile overload
409
410 void
411 notify_one() noexcept
412 { std::__atomic_notify_address(std::addressof(_M_i), false); }
413
414 void
415 notify_all() noexcept
416 { std::__atomic_notify_address(std::addressof(_M_i), true); }
417#endif // __cpp_lib_atomic_wait
418 };
419
420 /// Partial specialization for pointer types.
421 template<typename _Tp>
422 struct atomic<_Tp*>
423 {
424 using value_type = _Tp*;
425 using difference_type = ptrdiff_t;
426
427 typedef _Tp* __pointer_type;
428 typedef __atomic_base<_Tp*> __base_type;
429 __base_type _M_b;
430
431 atomic() noexcept = default;
432 ~atomic() noexcept = default;
433 atomic(const atomic&) = delete;
434 atomic& operator=(const atomic&) = delete;
435 atomic& operator=(const atomic&) volatile = delete;
436
437 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
438
439 operator __pointer_type() const noexcept
440 { return __pointer_type(_M_b); }
441
442 operator __pointer_type() const volatile noexcept
443 { return __pointer_type(_M_b); }
444
445 __pointer_type
446 operator=(__pointer_type __p) noexcept
447 { return _M_b.operator=(__p); }
448
449 __pointer_type
450 operator=(__pointer_type __p) volatile noexcept
451 { return _M_b.operator=(__p); }
452
453 __pointer_type
454 operator++(int) noexcept
455 {
456#if __cplusplus >= 201703L
457 static_assert( is_object_v<_Tp>, "pointer to object type" );
458#endif
459 return _M_b++;
460 }
461
462 __pointer_type
463 operator++(int) volatile noexcept
464 {
465#if __cplusplus >= 201703L
466 static_assert( is_object_v<_Tp>, "pointer to object type" );
467#endif
468 return _M_b++;
469 }
470
471 __pointer_type
472 operator--(int) noexcept
473 {
474#if __cplusplus >= 201703L
475 static_assert( is_object_v<_Tp>, "pointer to object type" );
476#endif
477 return _M_b--;
478 }
479
480 __pointer_type
481 operator--(int) volatile noexcept
482 {
483#if __cplusplus >= 201703L
484 static_assert( is_object_v<_Tp>, "pointer to object type" );
485#endif
486 return _M_b--;
487 }
488
489 __pointer_type
490 operator++() noexcept
491 {
492#if __cplusplus >= 201703L
493 static_assert( is_object_v<_Tp>, "pointer to object type" );
494#endif
495 return ++_M_b;
496 }
497
498 __pointer_type
499 operator++() volatile noexcept
500 {
501#if __cplusplus >= 201703L
502 static_assert( is_object_v<_Tp>, "pointer to object type" );
503#endif
504 return ++_M_b;
505 }
506
507 __pointer_type
508 operator--() noexcept
509 {
510#if __cplusplus >= 201703L
511 static_assert( is_object_v<_Tp>, "pointer to object type" );
512#endif
513 return --_M_b;
514 }
515
516 __pointer_type
517 operator--() volatile noexcept
518 {
519#if __cplusplus >= 201703L
520 static_assert( is_object_v<_Tp>, "pointer to object type" );
521#endif
522 return --_M_b;
523 }
524
525 __pointer_type
526 operator+=(ptrdiff_t __d) noexcept
527 {
528#if __cplusplus >= 201703L
529 static_assert( is_object_v<_Tp>, "pointer to object type" );
530#endif
531 return _M_b.operator+=(__d);
532 }
533
534 __pointer_type
535 operator+=(ptrdiff_t __d) volatile noexcept
536 {
537#if __cplusplus >= 201703L
538 static_assert( is_object_v<_Tp>, "pointer to object type" );
539#endif
540 return _M_b.operator+=(__d);
541 }
542
543 __pointer_type
544 operator-=(ptrdiff_t __d) noexcept
545 {
546#if __cplusplus >= 201703L
547 static_assert( is_object_v<_Tp>, "pointer to object type" );
548#endif
549 return _M_b.operator-=(__d);
550 }
551
552 __pointer_type
553 operator-=(ptrdiff_t __d) volatile noexcept
554 {
555#if __cplusplus >= 201703L
556 static_assert( is_object_v<_Tp>, "pointer to object type" );
557#endif
558 return _M_b.operator-=(__d);
559 }
560
561 bool
562 is_lock_free() const noexcept
563 { return _M_b.is_lock_free(); }
564
565 bool
566 is_lock_free() const volatile noexcept
567 { return _M_b.is_lock_free(); }
568
569#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
570 static constexpr bool is_always_lock_free
571 = ATOMIC_POINTER_LOCK_FREE == 2;
572#endif
573
574 void
575 store(__pointer_type __p,
576 memory_order __m = memory_order_seq_cst) noexcept
577 { return _M_b.store(__p, __m); }
578
579 void
580 store(__pointer_type __p,
581 memory_order __m = memory_order_seq_cst) volatile noexcept
582 { return _M_b.store(__p, __m); }
583
584 __pointer_type
585 load(memory_order __m = memory_order_seq_cst) const noexcept
586 { return _M_b.load(__m); }
587
588 __pointer_type
589 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
590 { return _M_b.load(__m); }
591
592 __pointer_type
593 exchange(__pointer_type __p,
594 memory_order __m = memory_order_seq_cst) noexcept
595 { return _M_b.exchange(__p, __m); }
596
597 __pointer_type
598 exchange(__pointer_type __p,
599 memory_order __m = memory_order_seq_cst) volatile noexcept
600 { return _M_b.exchange(__p, __m); }
601
602 bool
603 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
604 memory_order __m1, memory_order __m2) noexcept
605 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
606
607 bool
608 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
609 memory_order __m1,
610 memory_order __m2) volatile noexcept
611 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
612
613 bool
614 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
615 memory_order __m = memory_order_seq_cst) noexcept
616 {
617 return compare_exchange_weak(__p1, __p2, __m,
618 __cmpexch_failure_order(__m));
619 }
620
621 bool
622 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
623 memory_order __m = memory_order_seq_cst) volatile noexcept
624 {
625 return compare_exchange_weak(__p1, __p2, __m,
626 __cmpexch_failure_order(__m));
627 }
628
629 bool
630 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
631 memory_order __m1, memory_order __m2) noexcept
632 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
633
634 bool
635 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
636 memory_order __m1,
637 memory_order __m2) volatile noexcept
638 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
639
640 bool
641 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
642 memory_order __m = memory_order_seq_cst) noexcept
643 {
644 return _M_b.compare_exchange_strong(__p1, __p2, __m,
645 __cmpexch_failure_order(__m));
646 }
647
648 bool
649 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
650 memory_order __m = memory_order_seq_cst) volatile noexcept
651 {
652 return _M_b.compare_exchange_strong(__p1, __p2, __m,
653 __cmpexch_failure_order(__m));
654 }
655
656#if __cpp_lib_atomic_wait
657 void
658 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
659 { _M_b.wait(__old, __m); }
660
661 // TODO add const volatile overload
662
663 void
664 notify_one() noexcept
665 { _M_b.notify_one(); }
666
667 void
668 notify_all() noexcept
669 { _M_b.notify_all(); }
670#endif // __cpp_lib_atomic_wait
671
672 __pointer_type
673 fetch_add(ptrdiff_t __d,
674 memory_order __m = memory_order_seq_cst) noexcept
675 {
676#if __cplusplus >= 201703L
677 static_assert( is_object_v<_Tp>, "pointer to object type" );
678#endif
679 return _M_b.fetch_add(__d, __m);
680 }
681
682 __pointer_type
683 fetch_add(ptrdiff_t __d,
684 memory_order __m = memory_order_seq_cst) volatile noexcept
685 {
686#if __cplusplus >= 201703L
687 static_assert( is_object_v<_Tp>, "pointer to object type" );
688#endif
689 return _M_b.fetch_add(__d, __m);
690 }
691
692 __pointer_type
693 fetch_sub(ptrdiff_t __d,
694 memory_order __m = memory_order_seq_cst) noexcept
695 {
696#if __cplusplus >= 201703L
697 static_assert( is_object_v<_Tp>, "pointer to object type" );
698#endif
699 return _M_b.fetch_sub(__d, __m);
700 }
701
702 __pointer_type
703 fetch_sub(ptrdiff_t __d,
704 memory_order __m = memory_order_seq_cst) volatile noexcept
705 {
706#if __cplusplus >= 201703L
707 static_assert( is_object_v<_Tp>, "pointer to object type" );
708#endif
709 return _M_b.fetch_sub(__d, __m);
710 }
711 };
712
713
714 /// Explicit specialization for char.
715 template<>
716 struct atomic<char> : __atomic_base<char>
717 {
718 typedef char __integral_type;
719 typedef __atomic_base<char> __base_type;
720
721 atomic() noexcept = default;
722 ~atomic() noexcept = default;
723 atomic(const atomic&) = delete;
724 atomic& operator=(const atomic&) = delete;
725 atomic& operator=(const atomic&) volatile = delete;
726
727 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
728
729 using __base_type::operator __integral_type;
730 using __base_type::operator=;
731
732#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
733 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
734#endif
735 };
736
737 /// Explicit specialization for signed char.
738 template<>
739 struct atomic<signed char> : __atomic_base<signed char>
740 {
741 typedef signed char __integral_type;
742 typedef __atomic_base<signed char> __base_type;
743
744 atomic() noexcept= default;
745 ~atomic() noexcept = default;
746 atomic(const atomic&) = delete;
747 atomic& operator=(const atomic&) = delete;
748 atomic& operator=(const atomic&) volatile = delete;
749
750 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
751
752 using __base_type::operator __integral_type;
753 using __base_type::operator=;
754
755#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
756 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
757#endif
758 };
759
760 /// Explicit specialization for unsigned char.
761 template<>
762 struct atomic<unsigned char> : __atomic_base<unsigned char>
763 {
764 typedef unsigned char __integral_type;
765 typedef __atomic_base<unsigned char> __base_type;
766
767 atomic() noexcept= default;
768 ~atomic() noexcept = default;
769 atomic(const atomic&) = delete;
770 atomic& operator=(const atomic&) = delete;
771 atomic& operator=(const atomic&) volatile = delete;
772
773 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
774
775 using __base_type::operator __integral_type;
776 using __base_type::operator=;
777
778#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
779 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
780#endif
781 };
782
783 /// Explicit specialization for short.
784 template<>
785 struct atomic<short> : __atomic_base<short>
786 {
787 typedef short __integral_type;
788 typedef __atomic_base<short> __base_type;
789
790 atomic() noexcept = default;
791 ~atomic() noexcept = default;
792 atomic(const atomic&) = delete;
793 atomic& operator=(const atomic&) = delete;
794 atomic& operator=(const atomic&) volatile = delete;
795
796 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
797
798 using __base_type::operator __integral_type;
799 using __base_type::operator=;
800
801#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
802 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
803#endif
804 };
805
806 /// Explicit specialization for unsigned short.
807 template<>
808 struct atomic<unsigned short> : __atomic_base<unsigned short>
809 {
810 typedef unsigned short __integral_type;
811 typedef __atomic_base<unsigned short> __base_type;
812
813 atomic() noexcept = default;
814 ~atomic() noexcept = default;
815 atomic(const atomic&) = delete;
816 atomic& operator=(const atomic&) = delete;
817 atomic& operator=(const atomic&) volatile = delete;
818
819 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
820
821 using __base_type::operator __integral_type;
822 using __base_type::operator=;
823
824#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
825 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
826#endif
827 };
828
829 /// Explicit specialization for int.
830 template<>
831 struct atomic<int> : __atomic_base<int>
832 {
833 typedef int __integral_type;
834 typedef __atomic_base<int> __base_type;
835
836 atomic() noexcept = default;
837 ~atomic() noexcept = default;
838 atomic(const atomic&) = delete;
839 atomic& operator=(const atomic&) = delete;
840 atomic& operator=(const atomic&) volatile = delete;
841
842 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
843
844 using __base_type::operator __integral_type;
845 using __base_type::operator=;
846
847#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
848 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
849#endif
850 };
851
852 /// Explicit specialization for unsigned int.
853 template<>
854 struct atomic<unsigned int> : __atomic_base<unsigned int>
855 {
856 typedef unsigned int __integral_type;
857 typedef __atomic_base<unsigned int> __base_type;
858
859 atomic() noexcept = default;
860 ~atomic() noexcept = default;
861 atomic(const atomic&) = delete;
862 atomic& operator=(const atomic&) = delete;
863 atomic& operator=(const atomic&) volatile = delete;
864
865 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
866
867 using __base_type::operator __integral_type;
868 using __base_type::operator=;
869
870#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
871 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
872#endif
873 };
874
875 /// Explicit specialization for long.
876 template<>
877 struct atomic<long> : __atomic_base<long>
878 {
879 typedef long __integral_type;
880 typedef __atomic_base<long> __base_type;
881
882 atomic() noexcept = default;
883 ~atomic() noexcept = default;
884 atomic(const atomic&) = delete;
885 atomic& operator=(const atomic&) = delete;
886 atomic& operator=(const atomic&) volatile = delete;
887
888 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
889
890 using __base_type::operator __integral_type;
891 using __base_type::operator=;
892
893#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
894 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
895#endif
896 };
897
898 /// Explicit specialization for unsigned long.
899 template<>
900 struct atomic<unsigned long> : __atomic_base<unsigned long>
901 {
902 typedef unsigned long __integral_type;
903 typedef __atomic_base<unsigned long> __base_type;
904
905 atomic() noexcept = default;
906 ~atomic() noexcept = default;
907 atomic(const atomic&) = delete;
908 atomic& operator=(const atomic&) = delete;
909 atomic& operator=(const atomic&) volatile = delete;
910
911 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
912
913 using __base_type::operator __integral_type;
914 using __base_type::operator=;
915
916#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
917 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
918#endif
919 };
920
921 /// Explicit specialization for long long.
922 template<>
923 struct atomic<long long> : __atomic_base<long long>
924 {
925 typedef long long __integral_type;
926 typedef __atomic_base<long long> __base_type;
927
928 atomic() noexcept = default;
929 ~atomic() noexcept = default;
930 atomic(const atomic&) = delete;
931 atomic& operator=(const atomic&) = delete;
932 atomic& operator=(const atomic&) volatile = delete;
933
934 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
935
936 using __base_type::operator __integral_type;
937 using __base_type::operator=;
938
939#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
940 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
941#endif
942 };
943
944 /// Explicit specialization for unsigned long long.
945 template<>
946 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
947 {
948 typedef unsigned long long __integral_type;
949 typedef __atomic_base<unsigned long long> __base_type;
950
951 atomic() noexcept = default;
952 ~atomic() noexcept = default;
953 atomic(const atomic&) = delete;
954 atomic& operator=(const atomic&) = delete;
955 atomic& operator=(const atomic&) volatile = delete;
956
957 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
958
959 using __base_type::operator __integral_type;
960 using __base_type::operator=;
961
962#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
963 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
964#endif
965 };
966
967 /// Explicit specialization for wchar_t.
968 template<>
969 struct atomic<wchar_t> : __atomic_base<wchar_t>
970 {
971 typedef wchar_t __integral_type;
972 typedef __atomic_base<wchar_t> __base_type;
973
974 atomic() noexcept = default;
975 ~atomic() noexcept = default;
976 atomic(const atomic&) = delete;
977 atomic& operator=(const atomic&) = delete;
978 atomic& operator=(const atomic&) volatile = delete;
979
980 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
981
982 using __base_type::operator __integral_type;
983 using __base_type::operator=;
984
985#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
986 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
987#endif
988 };
989
990#ifdef _GLIBCXX_USE_CHAR8_T
991 /// Explicit specialization for char8_t.
992 template<>
993 struct atomic<char8_t> : __atomic_base<char8_t>
994 {
995 typedef char8_t __integral_type;
996 typedef __atomic_base<char8_t> __base_type;
997
998 atomic() noexcept = default;
999 ~atomic() noexcept = default;
1000 atomic(const atomic&) = delete;
1001 atomic& operator=(const atomic&) = delete;
1002 atomic& operator=(const atomic&) volatile = delete;
1003
1004 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1005
1006 using __base_type::operator __integral_type;
1007 using __base_type::operator=;
1008
1009#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1010 static constexpr bool is_always_lock_free
1011 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1012#endif
1013 };
1014#endif
1015
1016 /// Explicit specialization for char16_t.
1017 template<>
1018 struct atomic<char16_t> : __atomic_base<char16_t>
1019 {
1020 typedef char16_t __integral_type;
1021 typedef __atomic_base<char16_t> __base_type;
1022
1023 atomic() noexcept = default;
1024 ~atomic() noexcept = default;
1025 atomic(const atomic&) = delete;
1026 atomic& operator=(const atomic&) = delete;
1027 atomic& operator=(const atomic&) volatile = delete;
1028
1029 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1030
1031 using __base_type::operator __integral_type;
1032 using __base_type::operator=;
1033
1034#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1035 static constexpr bool is_always_lock_free
1036 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1037#endif
1038 };
1039
1040 /// Explicit specialization for char32_t.
1041 template<>
1042 struct atomic<char32_t> : __atomic_base<char32_t>
1043 {
1044 typedef char32_t __integral_type;
1045 typedef __atomic_base<char32_t> __base_type;
1046
1047 atomic() noexcept = default;
1048 ~atomic() noexcept = default;
1049 atomic(const atomic&) = delete;
1050 atomic& operator=(const atomic&) = delete;
1051 atomic& operator=(const atomic&) volatile = delete;
1052
1053 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1054
1055 using __base_type::operator __integral_type;
1056 using __base_type::operator=;
1057
1058#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1059 static constexpr bool is_always_lock_free
1060 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1061#endif
1062 };
1063
1064
1065 /// atomic_bool
1066 typedef atomic<bool> atomic_bool;
1067
1068 /// atomic_char
1069 typedef atomic<char> atomic_char;
1070
1071 /// atomic_schar
1072 typedef atomic<signed char> atomic_schar;
1073
1074 /// atomic_uchar
1075 typedef atomic<unsigned char> atomic_uchar;
1076
1077 /// atomic_short
1078 typedef atomic<short> atomic_short;
1079
1080 /// atomic_ushort
1081 typedef atomic<unsigned short> atomic_ushort;
1082
1083 /// atomic_int
1084 typedef atomic<int> atomic_int;
1085
1086 /// atomic_uint
1087 typedef atomic<unsigned int> atomic_uint;
1088
1089 /// atomic_long
1090 typedef atomic<long> atomic_long;
1091
1092 /// atomic_ulong
1093 typedef atomic<unsigned long> atomic_ulong;
1094
1095 /// atomic_llong
1096 typedef atomic<long long> atomic_llong;
1097
1098 /// atomic_ullong
1099 typedef atomic<unsigned long long> atomic_ullong;
1100
1101 /// atomic_wchar_t
1102 typedef atomic<wchar_t> atomic_wchar_t;
1103
1104#ifdef _GLIBCXX_USE_CHAR8_T
1105 /// atomic_char8_t
1106 typedef atomic<char8_t> atomic_char8_t;
1107#endif
1108
1109 /// atomic_char16_t
1110 typedef atomic<char16_t> atomic_char16_t;
1111
1112 /// atomic_char32_t
1113 typedef atomic<char32_t> atomic_char32_t;
1114
1115#ifdef _GLIBCXX_USE_C99_STDINT
1116 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1117 // 2441. Exact-width atomic typedefs should be provided
1118
1119 /// atomic_int8_t
1120 typedef atomic<int8_t> atomic_int8_t;
1121
1122 /// atomic_uint8_t
1123 typedef atomic<uint8_t> atomic_uint8_t;
1124
1125 /// atomic_int16_t
1126 typedef atomic<int16_t> atomic_int16_t;
1127
1128 /// atomic_uint16_t
1129 typedef atomic<uint16_t> atomic_uint16_t;
1130
1131 /// atomic_int32_t
1132 typedef atomic<int32_t> atomic_int32_t;
1133
1134 /// atomic_uint32_t
1135 typedef atomic<uint32_t> atomic_uint32_t;
1136
1137 /// atomic_int64_t
1138 typedef atomic<int64_t> atomic_int64_t;
1139
1140 /// atomic_uint64_t
1141 typedef atomic<uint64_t> atomic_uint64_t;
1142#endif
1143
1144 /// atomic_int_least8_t
1145 typedef atomic<int_least8_t> atomic_int_least8_t;
1146
1147 /// atomic_uint_least8_t
1148 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1149
1150 /// atomic_int_least16_t
1151 typedef atomic<int_least16_t> atomic_int_least16_t;
1152
1153 /// atomic_uint_least16_t
1154 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1155
1156 /// atomic_int_least32_t
1157 typedef atomic<int_least32_t> atomic_int_least32_t;
1158
1159 /// atomic_uint_least32_t
1160 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1161
1162 /// atomic_int_least64_t
1163 typedef atomic<int_least64_t> atomic_int_least64_t;
1164
1165 /// atomic_uint_least64_t
1166 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1167
1168
1169 /// atomic_int_fast8_t
1170 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1171
1172 /// atomic_uint_fast8_t
1173 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1174
1175 /// atomic_int_fast16_t
1176 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1177
1178 /// atomic_uint_fast16_t
1179 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1180
1181 /// atomic_int_fast32_t
1182 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1183
1184 /// atomic_uint_fast32_t
1185 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1186
1187 /// atomic_int_fast64_t
1188 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1189
1190 /// atomic_uint_fast64_t
1191 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1192
1193
1194 /// atomic_intptr_t
1195 typedef atomic<intptr_t> atomic_intptr_t;
1196
1197 /// atomic_uintptr_t
1198 typedef atomic<uintptr_t> atomic_uintptr_t;
1199
1200 /// atomic_size_t
1201 typedef atomic<size_t> atomic_size_t;
1202
1203 /// atomic_ptrdiff_t
1204 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1205
1206 /// atomic_intmax_t
1207 typedef atomic<intmax_t> atomic_intmax_t;
1208
1209 /// atomic_uintmax_t
1210 typedef atomic<uintmax_t> atomic_uintmax_t;
1211
1212 // Function definitions, atomic_flag operations.
1213 inline bool
1214 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1215 memory_order __m) noexcept
1216 { return __a->test_and_set(__m); }
1217
1218 inline bool
1219 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1220 memory_order __m) noexcept
1221 { return __a->test_and_set(__m); }
1222
1223#if __cpp_lib_atomic_flag_test
1224 inline bool
1225 atomic_flag_test(const atomic_flag* __a) noexcept
1226 { return __a->test(); }
1227
1228 inline bool
1229 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1230 { return __a->test(); }
1231
1232 inline bool
1233 atomic_flag_test_explicit(const atomic_flag* __a,
1234 memory_order __m) noexcept
1235 { return __a->test(__m); }
1236
1237 inline bool
1238 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1239 memory_order __m) noexcept
1240 { return __a->test(__m); }
1241#endif
1242
1243 inline void
1244 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1245 { __a->clear(__m); }
1246
1247 inline void
1248 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1249 memory_order __m) noexcept
1250 { __a->clear(__m); }
1251
1252 inline bool
1253 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1254 { return atomic_flag_test_and_set_explicit(__a, m: memory_order_seq_cst); }
1255
1256 inline bool
1257 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1258 { return atomic_flag_test_and_set_explicit(__a, m: memory_order_seq_cst); }
1259
1260 inline void
1261 atomic_flag_clear(atomic_flag* __a) noexcept
1262 { atomic_flag_clear_explicit(__a, m: memory_order_seq_cst); }
1263
1264 inline void
1265 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1266 { atomic_flag_clear_explicit(__a, m: memory_order_seq_cst); }
1267
1268#if __cpp_lib_atomic_wait
1269 inline void
1270 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1271 { __a->wait(__old); }
1272
1273 inline void
1274 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1275 memory_order __m) noexcept
1276 { __a->wait(__old, __m); }
1277
1278 inline void
1279 atomic_flag_notify_one(atomic_flag* __a) noexcept
1280 { __a->notify_one(); }
1281
1282 inline void
1283 atomic_flag_notify_all(atomic_flag* __a) noexcept
1284 { __a->notify_all(); }
1285#endif // __cpp_lib_atomic_wait
1286
1287 /// @cond undocumented
1288 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1289 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1290 template<typename _Tp>
1291 using __atomic_val_t = __type_identity_t<_Tp>;
1292 template<typename _Tp>
1293 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1294 /// @endcond
1295
1296 // [atomics.nonmembers] Non-member functions.
1297 // Function templates generally applicable to atomic types.
1298 template<typename _ITp>
1299 inline bool
1300 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1301 { return __a->is_lock_free(); }
1302
1303 template<typename _ITp>
1304 inline bool
1305 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1306 { return __a->is_lock_free(); }
1307
1308 template<typename _ITp>
1309 inline void
1310 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1311 { __a->store(__i, memory_order_relaxed); }
1312
1313 template<typename _ITp>
1314 inline void
1315 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1316 { __a->store(__i, memory_order_relaxed); }
1317
1318 template<typename _ITp>
1319 inline void
1320 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1321 memory_order __m) noexcept
1322 { __a->store(__i, __m); }
1323
1324 template<typename _ITp>
1325 inline void
1326 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1327 memory_order __m) noexcept
1328 { __a->store(__i, __m); }
1329
1330 template<typename _ITp>
1331 inline _ITp
1332 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1333 { return __a->load(__m); }
1334
1335 template<typename _ITp>
1336 inline _ITp
1337 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1338 memory_order __m) noexcept
1339 { return __a->load(__m); }
1340
1341 template<typename _ITp>
1342 inline _ITp
1343 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1344 memory_order __m) noexcept
1345 { return __a->exchange(__i, __m); }
1346
1347 template<typename _ITp>
1348 inline _ITp
1349 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1350 __atomic_val_t<_ITp> __i,
1351 memory_order __m) noexcept
1352 { return __a->exchange(__i, __m); }
1353
1354 template<typename _ITp>
1355 inline bool
1356 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1357 __atomic_val_t<_ITp>* __i1,
1358 __atomic_val_t<_ITp> __i2,
1359 memory_order __m1,
1360 memory_order __m2) noexcept
1361 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1362
1363 template<typename _ITp>
1364 inline bool
1365 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1366 __atomic_val_t<_ITp>* __i1,
1367 __atomic_val_t<_ITp> __i2,
1368 memory_order __m1,
1369 memory_order __m2) noexcept
1370 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1371
1372 template<typename _ITp>
1373 inline bool
1374 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1375 __atomic_val_t<_ITp>* __i1,
1376 __atomic_val_t<_ITp> __i2,
1377 memory_order __m1,
1378 memory_order __m2) noexcept
1379 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1380
1381 template<typename _ITp>
1382 inline bool
1383 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1384 __atomic_val_t<_ITp>* __i1,
1385 __atomic_val_t<_ITp> __i2,
1386 memory_order __m1,
1387 memory_order __m2) noexcept
1388 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1389
1390
1391 template<typename _ITp>
1392 inline void
1393 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1394 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1395
1396 template<typename _ITp>
1397 inline void
1398 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1399 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1400
1401 template<typename _ITp>
1402 inline _ITp
1403 atomic_load(const atomic<_ITp>* __a) noexcept
1404 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1405
1406 template<typename _ITp>
1407 inline _ITp
1408 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1409 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1410
1411 template<typename _ITp>
1412 inline _ITp
1413 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1414 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1415
1416 template<typename _ITp>
1417 inline _ITp
1418 atomic_exchange(volatile atomic<_ITp>* __a,
1419 __atomic_val_t<_ITp> __i) noexcept
1420 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1421
1422 template<typename _ITp>
1423 inline bool
1424 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1425 __atomic_val_t<_ITp>* __i1,
1426 __atomic_val_t<_ITp> __i2) noexcept
1427 {
1428 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1429 memory_order_seq_cst,
1430 memory_order_seq_cst);
1431 }
1432
1433 template<typename _ITp>
1434 inline bool
1435 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1436 __atomic_val_t<_ITp>* __i1,
1437 __atomic_val_t<_ITp> __i2) noexcept
1438 {
1439 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1440 memory_order_seq_cst,
1441 memory_order_seq_cst);
1442 }
1443
1444 template<typename _ITp>
1445 inline bool
1446 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1447 __atomic_val_t<_ITp>* __i1,
1448 __atomic_val_t<_ITp> __i2) noexcept
1449 {
1450 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1451 memory_order_seq_cst,
1452 memory_order_seq_cst);
1453 }
1454
1455 template<typename _ITp>
1456 inline bool
1457 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1458 __atomic_val_t<_ITp>* __i1,
1459 __atomic_val_t<_ITp> __i2) noexcept
1460 {
1461 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1462 memory_order_seq_cst,
1463 memory_order_seq_cst);
1464 }
1465
1466
1467#if __cpp_lib_atomic_wait
1468 template<typename _Tp>
1469 inline void
1470 atomic_wait(const atomic<_Tp>* __a,
1471 typename std::atomic<_Tp>::value_type __old) noexcept
1472 { __a->wait(__old); }
1473
1474 template<typename _Tp>
1475 inline void
1476 atomic_wait_explicit(const atomic<_Tp>* __a,
1477 typename std::atomic<_Tp>::value_type __old,
1478 std::memory_order __m) noexcept
1479 { __a->wait(__old, __m); }
1480
1481 template<typename _Tp>
1482 inline void
1483 atomic_notify_one(atomic<_Tp>* __a) noexcept
1484 { __a->notify_one(); }
1485
1486 template<typename _Tp>
1487 inline void
1488 atomic_notify_all(atomic<_Tp>* __a) noexcept
1489 { __a->notify_all(); }
1490#endif // __cpp_lib_atomic_wait
1491
1492 // Function templates for atomic_integral and atomic_pointer operations only.
1493 // Some operations (and, or, xor) are only available for atomic integrals,
1494 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1495
1496 template<typename _ITp>
1497 inline _ITp
1498 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1499 __atomic_diff_t<_ITp> __i,
1500 memory_order __m) noexcept
1501 { return __a->fetch_add(__i, __m); }
1502
1503 template<typename _ITp>
1504 inline _ITp
1505 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1506 __atomic_diff_t<_ITp> __i,
1507 memory_order __m) noexcept
1508 { return __a->fetch_add(__i, __m); }
1509
1510 template<typename _ITp>
1511 inline _ITp
1512 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1513 __atomic_diff_t<_ITp> __i,
1514 memory_order __m) noexcept
1515 { return __a->fetch_sub(__i, __m); }
1516
1517 template<typename _ITp>
1518 inline _ITp
1519 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1520 __atomic_diff_t<_ITp> __i,
1521 memory_order __m) noexcept
1522 { return __a->fetch_sub(__i, __m); }
1523
1524 template<typename _ITp>
1525 inline _ITp
1526 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1527 __atomic_val_t<_ITp> __i,
1528 memory_order __m) noexcept
1529 { return __a->fetch_and(__i, __m); }
1530
1531 template<typename _ITp>
1532 inline _ITp
1533 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1534 __atomic_val_t<_ITp> __i,
1535 memory_order __m) noexcept
1536 { return __a->fetch_and(__i, __m); }
1537
1538 template<typename _ITp>
1539 inline _ITp
1540 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1541 __atomic_val_t<_ITp> __i,
1542 memory_order __m) noexcept
1543 { return __a->fetch_or(__i, __m); }
1544
1545 template<typename _ITp>
1546 inline _ITp
1547 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1548 __atomic_val_t<_ITp> __i,
1549 memory_order __m) noexcept
1550 { return __a->fetch_or(__i, __m); }
1551
1552 template<typename _ITp>
1553 inline _ITp
1554 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1555 __atomic_val_t<_ITp> __i,
1556 memory_order __m) noexcept
1557 { return __a->fetch_xor(__i, __m); }
1558
1559 template<typename _ITp>
1560 inline _ITp
1561 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1562 __atomic_val_t<_ITp> __i,
1563 memory_order __m) noexcept
1564 { return __a->fetch_xor(__i, __m); }
1565
1566 template<typename _ITp>
1567 inline _ITp
1568 atomic_fetch_add(atomic<_ITp>* __a,
1569 __atomic_diff_t<_ITp> __i) noexcept
1570 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1571
1572 template<typename _ITp>
1573 inline _ITp
1574 atomic_fetch_add(volatile atomic<_ITp>* __a,
1575 __atomic_diff_t<_ITp> __i) noexcept
1576 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1577
1578 template<typename _ITp>
1579 inline _ITp
1580 atomic_fetch_sub(atomic<_ITp>* __a,
1581 __atomic_diff_t<_ITp> __i) noexcept
1582 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1583
1584 template<typename _ITp>
1585 inline _ITp
1586 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1587 __atomic_diff_t<_ITp> __i) noexcept
1588 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1589
1590 template<typename _ITp>
1591 inline _ITp
1592 atomic_fetch_and(__atomic_base<_ITp>* __a,
1593 __atomic_val_t<_ITp> __i) noexcept
1594 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1595
1596 template<typename _ITp>
1597 inline _ITp
1598 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1599 __atomic_val_t<_ITp> __i) noexcept
1600 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1601
1602 template<typename _ITp>
1603 inline _ITp
1604 atomic_fetch_or(__atomic_base<_ITp>* __a,
1605 __atomic_val_t<_ITp> __i) noexcept
1606 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1607
1608 template<typename _ITp>
1609 inline _ITp
1610 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1611 __atomic_val_t<_ITp> __i) noexcept
1612 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1613
1614 template<typename _ITp>
1615 inline _ITp
1616 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1617 __atomic_val_t<_ITp> __i) noexcept
1618 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1619
1620 template<typename _ITp>
1621 inline _ITp
1622 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1623 __atomic_val_t<_ITp> __i) noexcept
1624 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1625
1626#ifdef __cpp_lib_atomic_float
1627 template<>
1628 struct atomic<float> : __atomic_float<float>
1629 {
1630 atomic() noexcept = default;
1631
1632 constexpr
1633 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1634 { }
1635
1636 atomic& operator=(const atomic&) volatile = delete;
1637 atomic& operator=(const atomic&) = delete;
1638
1639 using __atomic_float<float>::operator=;
1640 };
1641
1642 template<>
1643 struct atomic<double> : __atomic_float<double>
1644 {
1645 atomic() noexcept = default;
1646
1647 constexpr
1648 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1649 { }
1650
1651 atomic& operator=(const atomic&) volatile = delete;
1652 atomic& operator=(const atomic&) = delete;
1653
1654 using __atomic_float<double>::operator=;
1655 };
1656
1657 template<>
1658 struct atomic<long double> : __atomic_float<long double>
1659 {
1660 atomic() noexcept = default;
1661
1662 constexpr
1663 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1664 { }
1665
1666 atomic& operator=(const atomic&) volatile = delete;
1667 atomic& operator=(const atomic&) = delete;
1668
1669 using __atomic_float<long double>::operator=;
1670 };
1671
1672#ifdef __STDCPP_FLOAT16_T__
1673 template<>
1674 struct atomic<_Float16> : __atomic_float<_Float16>
1675 {
1676 atomic() noexcept = default;
1677
1678 constexpr
1679 atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1680 { }
1681
1682 atomic& operator=(const atomic&) volatile = delete;
1683 atomic& operator=(const atomic&) = delete;
1684
1685 using __atomic_float<_Float16>::operator=;
1686 };
1687#endif
1688
1689#ifdef __STDCPP_FLOAT32_T__
1690 template<>
1691 struct atomic<_Float32> : __atomic_float<_Float32>
1692 {
1693 atomic() noexcept = default;
1694
1695 constexpr
1696 atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1697 { }
1698
1699 atomic& operator=(const atomic&) volatile = delete;
1700 atomic& operator=(const atomic&) = delete;
1701
1702 using __atomic_float<_Float32>::operator=;
1703 };
1704#endif
1705
1706#ifdef __STDCPP_FLOAT64_T__
1707 template<>
1708 struct atomic<_Float64> : __atomic_float<_Float64>
1709 {
1710 atomic() noexcept = default;
1711
1712 constexpr
1713 atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1714 { }
1715
1716 atomic& operator=(const atomic&) volatile = delete;
1717 atomic& operator=(const atomic&) = delete;
1718
1719 using __atomic_float<_Float64>::operator=;
1720 };
1721#endif
1722
1723#ifdef __STDCPP_FLOAT128_T__
1724 template<>
1725 struct atomic<_Float128> : __atomic_float<_Float128>
1726 {
1727 atomic() noexcept = default;
1728
1729 constexpr
1730 atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1731 { }
1732
1733 atomic& operator=(const atomic&) volatile = delete;
1734 atomic& operator=(const atomic&) = delete;
1735
1736 using __atomic_float<_Float128>::operator=;
1737 };
1738#endif
1739
1740#ifdef __STDCPP_BFLOAT16_T__
1741 template<>
1742 struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1743 {
1744 atomic() noexcept = default;
1745
1746 constexpr
1747 atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1748 { }
1749
1750 atomic& operator=(const atomic&) volatile = delete;
1751 atomic& operator=(const atomic&) = delete;
1752
1753 using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1754 };
1755#endif
1756#endif // __cpp_lib_atomic_float
1757
1758#ifdef __cpp_lib_atomic_ref
1759 /// Class template to provide atomic operations on a non-atomic variable.
1760 template<typename _Tp>
1761 struct atomic_ref : __atomic_ref<_Tp>
1762 {
1763 explicit
1764 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1765 { }
1766
1767 atomic_ref& operator=(const atomic_ref&) = delete;
1768
1769 atomic_ref(const atomic_ref&) = default;
1770
1771 using __atomic_ref<_Tp>::operator=;
1772 };
1773#endif // __cpp_lib_atomic_ref
1774
1775#ifdef __cpp_lib_atomic_lock_free_type_aliases
1776# ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1777 using atomic_signed_lock_free
1778 = atomic<make_signed_t<__detail::__platform_wait_t>>;
1779 using atomic_unsigned_lock_free
1780 = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
1781# elif ATOMIC_INT_LOCK_FREE == 2
1782 using atomic_signed_lock_free = atomic<signed int>;
1783 using atomic_unsigned_lock_free = atomic<unsigned int>;
1784# elif ATOMIC_LONG_LOCK_FREE == 2
1785 using atomic_signed_lock_free = atomic<signed long>;
1786 using atomic_unsigned_lock_free = atomic<unsigned long>;
1787# elif ATOMIC_CHAR_LOCK_FREE == 2
1788 using atomic_signed_lock_free = atomic<signed char>;
1789 using atomic_unsigned_lock_free = atomic<unsigned char>;
1790# else
1791# error "libstdc++ bug: no lock-free atomics but they were emitted in <version>"
1792# endif
1793#endif
1794
1795 /// @} group atomics
1796
1797_GLIBCXX_END_NAMESPACE_VERSION
1798} // namespace
1799
1800#endif // C++11
1801
1802#endif // _GLIBCXX_ATOMIC
1803