libstdc++
atomic
Go to the documentation of this file.
1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2024 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 * This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#pragma GCC system_header
36
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
39#else
40
41#define __glibcxx_want_atomic_is_always_lock_free
42#define __glibcxx_want_atomic_flag_test
43#define __glibcxx_want_atomic_float
44#define __glibcxx_want_atomic_ref
45#define __glibcxx_want_atomic_lock_free_type_aliases
46#define __glibcxx_want_atomic_value_initialization
47#define __glibcxx_want_atomic_wait
48#include <bits/version.h>
49
50#include <bits/atomic_base.h>
51
52namespace std _GLIBCXX_VISIBILITY(default)
53{
54_GLIBCXX_BEGIN_NAMESPACE_VERSION
55
56 /**
57 * @addtogroup atomics
58 * @{
59 */
60
61 template<typename _Tp>
62 struct atomic;
63
64 /// atomic<bool>
65 // NB: No operators or fetch-operations for this type.
66 template<>
67 struct atomic<bool>
68 {
69 using value_type = bool;
70
71 private:
72 __atomic_base<bool> _M_base;
73
74 public:
75 atomic() noexcept = default;
76 ~atomic() noexcept = default;
77 atomic(const atomic&) = delete;
78 atomic& operator=(const atomic&) = delete;
79 atomic& operator=(const atomic&) volatile = delete;
80
81 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
82
83 bool
84 operator=(bool __i) noexcept
85 { return _M_base.operator=(__i); }
86
87 bool
88 operator=(bool __i) volatile noexcept
89 { return _M_base.operator=(__i); }
90
91 operator bool() const noexcept
92 { return _M_base.load(); }
93
94 operator bool() const volatile noexcept
95 { return _M_base.load(); }
96
97 bool
98 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
99
100 bool
101 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
102
103#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
104 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
105#endif
106
107 void
108 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
109 { _M_base.store(__i, __m); }
110
111 void
112 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
113 { _M_base.store(__i, __m); }
114
115 bool
116 load(memory_order __m = memory_order_seq_cst) const noexcept
117 { return _M_base.load(__m); }
118
119 bool
120 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
121 { return _M_base.load(__m); }
122
123 bool
124 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
125 { return _M_base.exchange(__i, __m); }
126
127 bool
128 exchange(bool __i,
129 memory_order __m = memory_order_seq_cst) volatile noexcept
130 { return _M_base.exchange(__i, __m); }
131
132 bool
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136
137 bool
138 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
139 memory_order __m2) volatile noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
141
142 bool
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146
147 bool
148 compare_exchange_weak(bool& __i1, bool __i2,
149 memory_order __m = memory_order_seq_cst) volatile noexcept
150 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
151
152 bool
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156
157 bool
158 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
159 memory_order __m2) volatile noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
161
162 bool
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166
167 bool
168 compare_exchange_strong(bool& __i1, bool __i2,
169 memory_order __m = memory_order_seq_cst) volatile noexcept
170 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
171
172#if __cpp_lib_atomic_wait
173 void
174 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
175 { _M_base.wait(__old, __m); }
176
177 // TODO add const volatile overload
178
179 void
180 notify_one() noexcept
181 { _M_base.notify_one(); }
182
183 void
184 notify_all() noexcept
185 { _M_base.notify_all(); }
186#endif // __cpp_lib_atomic_wait
187 };
188
189/// @cond undocumented
190#if __cpp_lib_atomic_value_initialization
191# define _GLIBCXX20_INIT(I) = I
192#else
193# define _GLIBCXX20_INIT(I)
194#endif
195/// @endcond
196
197 /**
198 * @brief Generic atomic type, primary class template.
199 *
200 * @tparam _Tp Type to be made atomic, must be trivially copyable.
201 */
202 template<typename _Tp>
203 struct atomic
204 {
205 using value_type = _Tp;
206
207 private:
208 // Align 1/2/4/8/16-byte types to at least their size.
209 static constexpr int _S_min_alignment
210 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
211 ? 0 : sizeof(_Tp);
212
213 static constexpr int _S_alignment
214 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
215
216 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
217
218 static_assert(__is_trivially_copyable(_Tp),
219 "std::atomic requires a trivially copyable type");
220
221 static_assert(sizeof(_Tp) > 0,
222 "Incomplete or zero-sized types are not supported");
223
224#if __cplusplus > 201703L
225 static_assert(is_copy_constructible_v<_Tp>);
226 static_assert(is_move_constructible_v<_Tp>);
227 static_assert(is_copy_assignable_v<_Tp>);
228 static_assert(is_move_assignable_v<_Tp>);
229#endif
230
231 public:
232 atomic() = default;
233 ~atomic() noexcept = default;
234 atomic(const atomic&) = delete;
235 atomic& operator=(const atomic&) = delete;
236 atomic& operator=(const atomic&) volatile = delete;
237
238 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
239 {
240#if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
241 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
242 __builtin_clear_padding(std::__addressof(_M_i));
243#endif
244 }
245
246 operator _Tp() const noexcept
247 { return load(); }
248
249 operator _Tp() const volatile noexcept
250 { return load(); }
251
252 _Tp
253 operator=(_Tp __i) noexcept
254 { store(__i); return __i; }
255
256 _Tp
257 operator=(_Tp __i) volatile noexcept
258 { store(__i); return __i; }
259
260 bool
261 is_lock_free() const noexcept
262 {
263 // Produce a fake, minimally aligned pointer.
264 return __atomic_is_lock_free(sizeof(_M_i),
265 reinterpret_cast<void *>(-_S_alignment));
266 }
267
268 bool
269 is_lock_free() const volatile noexcept
270 {
271 // Produce a fake, minimally aligned pointer.
272 return __atomic_is_lock_free(sizeof(_M_i),
273 reinterpret_cast<void *>(-_S_alignment));
274 }
275
276#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
277 static constexpr bool is_always_lock_free
278 = __atomic_always_lock_free(sizeof(_M_i), 0);
279#endif
280
281 void
282 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
283 {
284 __atomic_store(std::__addressof(_M_i),
285 __atomic_impl::__clear_padding(__i),
286 int(__m));
287 }
288
289 void
290 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
291 {
292 __atomic_store(std::__addressof(_M_i),
293 __atomic_impl::__clear_padding(__i),
294 int(__m));
295 }
296
297 _Tp
298 load(memory_order __m = memory_order_seq_cst) const noexcept
299 {
300 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
301 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
302 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
303 return *__ptr;
304 }
305
306 _Tp
307 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
308 {
309 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
310 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
311 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
312 return *__ptr;
313 }
314
315 _Tp
316 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
317 {
318 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
319 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
320 __atomic_exchange(std::__addressof(_M_i),
321 __atomic_impl::__clear_padding(__i),
322 __ptr, int(__m));
323 return *__ptr;
324 }
325
326 _Tp
327 exchange(_Tp __i,
328 memory_order __m = memory_order_seq_cst) volatile noexcept
329 {
330 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
331 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
332 __atomic_exchange(std::__addressof(_M_i),
333 __atomic_impl::__clear_padding(__i),
334 __ptr, int(__m));
335 return *__ptr;
336 }
337
338 bool
339 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
340 memory_order __f) noexcept
341 {
342 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
343 __s, __f);
344 }
345
346 bool
347 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
348 memory_order __f) volatile noexcept
349 {
350 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
351 __s, __f);
352 }
353
354 bool
355 compare_exchange_weak(_Tp& __e, _Tp __i,
356 memory_order __m = memory_order_seq_cst) noexcept
357 { return compare_exchange_weak(__e, __i, __m,
358 __cmpexch_failure_order(__m)); }
359
360 bool
361 compare_exchange_weak(_Tp& __e, _Tp __i,
362 memory_order __m = memory_order_seq_cst) volatile noexcept
363 { return compare_exchange_weak(__e, __i, __m,
364 __cmpexch_failure_order(__m)); }
365
366 bool
367 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
368 memory_order __f) noexcept
369 {
370 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
371 __s, __f);
372 }
373
374 bool
375 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
376 memory_order __f) volatile noexcept
377 {
378 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
379 __s, __f);
380 }
381
382 bool
383 compare_exchange_strong(_Tp& __e, _Tp __i,
384 memory_order __m = memory_order_seq_cst) noexcept
385 { return compare_exchange_strong(__e, __i, __m,
386 __cmpexch_failure_order(__m)); }
387
388 bool
389 compare_exchange_strong(_Tp& __e, _Tp __i,
390 memory_order __m = memory_order_seq_cst) volatile noexcept
391 { return compare_exchange_strong(__e, __i, __m,
392 __cmpexch_failure_order(__m)); }
393
394#if __cpp_lib_atomic_wait // C++ >= 20
395 void
396 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
397 {
398 std::__atomic_wait_address_v(std::addressof(_M_i), __old,
399 [__m, this] { return this->load(__m); });
400 }
401
402 // TODO add const volatile overload
403
404 void
405 notify_one() noexcept
406 { std::__atomic_notify_address(std::addressof(_M_i), false); }
407
408 void
409 notify_all() noexcept
410 { std::__atomic_notify_address(std::addressof(_M_i), true); }
411#endif // __cpp_lib_atomic_wait
412 };
413#undef _GLIBCXX20_INIT
414
415 /// Partial specialization for pointer types.
416 template<typename _Tp>
417 struct atomic<_Tp*>
418 {
419 using value_type = _Tp*;
420 using difference_type = ptrdiff_t;
421
422 typedef _Tp* __pointer_type;
423 typedef __atomic_base<_Tp*> __base_type;
424 __base_type _M_b;
425
426 atomic() noexcept = default;
427 ~atomic() noexcept = default;
428 atomic(const atomic&) = delete;
429 atomic& operator=(const atomic&) = delete;
430 atomic& operator=(const atomic&) volatile = delete;
431
432 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
433
434 operator __pointer_type() const noexcept
435 { return __pointer_type(_M_b); }
436
437 operator __pointer_type() const volatile noexcept
438 { return __pointer_type(_M_b); }
439
440 __pointer_type
441 operator=(__pointer_type __p) noexcept
442 { return _M_b.operator=(__p); }
443
444 __pointer_type
445 operator=(__pointer_type __p) volatile noexcept
446 { return _M_b.operator=(__p); }
447
448 __pointer_type
449 operator++(int) noexcept
450 {
451#if __cplusplus >= 201703L
452 static_assert( is_object_v<_Tp>, "pointer to object type" );
453#endif
454 return _M_b++;
455 }
456
457 __pointer_type
458 operator++(int) volatile noexcept
459 {
460#if __cplusplus >= 201703L
461 static_assert( is_object_v<_Tp>, "pointer to object type" );
462#endif
463 return _M_b++;
464 }
465
466 __pointer_type
467 operator--(int) noexcept
468 {
469#if __cplusplus >= 201703L
470 static_assert( is_object_v<_Tp>, "pointer to object type" );
471#endif
472 return _M_b--;
473 }
474
475 __pointer_type
476 operator--(int) volatile noexcept
477 {
478#if __cplusplus >= 201703L
479 static_assert( is_object_v<_Tp>, "pointer to object type" );
480#endif
481 return _M_b--;
482 }
483
484 __pointer_type
485 operator++() noexcept
486 {
487#if __cplusplus >= 201703L
488 static_assert( is_object_v<_Tp>, "pointer to object type" );
489#endif
490 return ++_M_b;
491 }
492
493 __pointer_type
494 operator++() volatile noexcept
495 {
496#if __cplusplus >= 201703L
497 static_assert( is_object_v<_Tp>, "pointer to object type" );
498#endif
499 return ++_M_b;
500 }
501
502 __pointer_type
503 operator--() noexcept
504 {
505#if __cplusplus >= 201703L
506 static_assert( is_object_v<_Tp>, "pointer to object type" );
507#endif
508 return --_M_b;
509 }
510
511 __pointer_type
512 operator--() volatile noexcept
513 {
514#if __cplusplus >= 201703L
515 static_assert( is_object_v<_Tp>, "pointer to object type" );
516#endif
517 return --_M_b;
518 }
519
520 __pointer_type
521 operator+=(ptrdiff_t __d) noexcept
522 {
523#if __cplusplus >= 201703L
524 static_assert( is_object_v<_Tp>, "pointer to object type" );
525#endif
526 return _M_b.operator+=(__d);
527 }
528
529 __pointer_type
530 operator+=(ptrdiff_t __d) volatile noexcept
531 {
532#if __cplusplus >= 201703L
533 static_assert( is_object_v<_Tp>, "pointer to object type" );
534#endif
535 return _M_b.operator+=(__d);
536 }
537
538 __pointer_type
539 operator-=(ptrdiff_t __d) noexcept
540 {
541#if __cplusplus >= 201703L
542 static_assert( is_object_v<_Tp>, "pointer to object type" );
543#endif
544 return _M_b.operator-=(__d);
545 }
546
547 __pointer_type
548 operator-=(ptrdiff_t __d) volatile noexcept
549 {
550#if __cplusplus >= 201703L
551 static_assert( is_object_v<_Tp>, "pointer to object type" );
552#endif
553 return _M_b.operator-=(__d);
554 }
555
556 bool
557 is_lock_free() const noexcept
558 { return _M_b.is_lock_free(); }
559
560 bool
561 is_lock_free() const volatile noexcept
562 { return _M_b.is_lock_free(); }
563
564#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
565 static constexpr bool is_always_lock_free
566 = ATOMIC_POINTER_LOCK_FREE == 2;
567#endif
568
569 void
570 store(__pointer_type __p,
571 memory_order __m = memory_order_seq_cst) noexcept
572 { return _M_b.store(__p, __m); }
573
574 void
575 store(__pointer_type __p,
576 memory_order __m = memory_order_seq_cst) volatile noexcept
577 { return _M_b.store(__p, __m); }
578
579 __pointer_type
580 load(memory_order __m = memory_order_seq_cst) const noexcept
581 { return _M_b.load(__m); }
582
583 __pointer_type
584 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
585 { return _M_b.load(__m); }
586
587 __pointer_type
588 exchange(__pointer_type __p,
589 memory_order __m = memory_order_seq_cst) noexcept
590 { return _M_b.exchange(__p, __m); }
591
592 __pointer_type
593 exchange(__pointer_type __p,
594 memory_order __m = memory_order_seq_cst) volatile noexcept
595 { return _M_b.exchange(__p, __m); }
596
597 bool
598 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
599 memory_order __m1, memory_order __m2) noexcept
600 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
601
602 bool
603 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
604 memory_order __m1,
605 memory_order __m2) volatile noexcept
606 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
607
608 bool
609 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
610 memory_order __m = memory_order_seq_cst) noexcept
611 {
612 return compare_exchange_weak(__p1, __p2, __m,
613 __cmpexch_failure_order(__m));
614 }
615
616 bool
617 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
618 memory_order __m = memory_order_seq_cst) volatile noexcept
619 {
620 return compare_exchange_weak(__p1, __p2, __m,
621 __cmpexch_failure_order(__m));
622 }
623
624 bool
625 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
626 memory_order __m1, memory_order __m2) noexcept
627 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
628
629 bool
630 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
631 memory_order __m1,
632 memory_order __m2) volatile noexcept
633 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
634
635 bool
636 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
637 memory_order __m = memory_order_seq_cst) noexcept
638 {
639 return _M_b.compare_exchange_strong(__p1, __p2, __m,
640 __cmpexch_failure_order(__m));
641 }
642
643 bool
644 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
645 memory_order __m = memory_order_seq_cst) volatile noexcept
646 {
647 return _M_b.compare_exchange_strong(__p1, __p2, __m,
648 __cmpexch_failure_order(__m));
649 }
650
651#if __cpp_lib_atomic_wait
652 void
653 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
654 { _M_b.wait(__old, __m); }
655
656 // TODO add const volatile overload
657
658 void
659 notify_one() noexcept
660 { _M_b.notify_one(); }
661
662 void
663 notify_all() noexcept
664 { _M_b.notify_all(); }
665#endif // __cpp_lib_atomic_wait
666
667 __pointer_type
668 fetch_add(ptrdiff_t __d,
669 memory_order __m = memory_order_seq_cst) noexcept
670 {
671#if __cplusplus >= 201703L
672 static_assert( is_object_v<_Tp>, "pointer to object type" );
673#endif
674 return _M_b.fetch_add(__d, __m);
675 }
676
677 __pointer_type
678 fetch_add(ptrdiff_t __d,
679 memory_order __m = memory_order_seq_cst) volatile noexcept
680 {
681#if __cplusplus >= 201703L
682 static_assert( is_object_v<_Tp>, "pointer to object type" );
683#endif
684 return _M_b.fetch_add(__d, __m);
685 }
686
687 __pointer_type
688 fetch_sub(ptrdiff_t __d,
689 memory_order __m = memory_order_seq_cst) noexcept
690 {
691#if __cplusplus >= 201703L
692 static_assert( is_object_v<_Tp>, "pointer to object type" );
693#endif
694 return _M_b.fetch_sub(__d, __m);
695 }
696
697 __pointer_type
698 fetch_sub(ptrdiff_t __d,
699 memory_order __m = memory_order_seq_cst) volatile noexcept
700 {
701#if __cplusplus >= 201703L
702 static_assert( is_object_v<_Tp>, "pointer to object type" );
703#endif
704 return _M_b.fetch_sub(__d, __m);
705 }
706 };
707
708
709 /// Explicit specialization for char.
710 template<>
711 struct atomic<char> : __atomic_base<char>
712 {
713 typedef char __integral_type;
714 typedef __atomic_base<char> __base_type;
715
716 atomic() noexcept = default;
717 ~atomic() noexcept = default;
718 atomic(const atomic&) = delete;
719 atomic& operator=(const atomic&) = delete;
720 atomic& operator=(const atomic&) volatile = delete;
721
722 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
723
724 using __base_type::operator __integral_type;
725 using __base_type::operator=;
726
727#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
728 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
729#endif
730 };
731
732 /// Explicit specialization for signed char.
733 template<>
734 struct atomic<signed char> : __atomic_base<signed char>
735 {
736 typedef signed char __integral_type;
737 typedef __atomic_base<signed char> __base_type;
738
739 atomic() noexcept= default;
740 ~atomic() noexcept = default;
741 atomic(const atomic&) = delete;
742 atomic& operator=(const atomic&) = delete;
743 atomic& operator=(const atomic&) volatile = delete;
744
745 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
746
747 using __base_type::operator __integral_type;
748 using __base_type::operator=;
749
750#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
751 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
752#endif
753 };
754
755 /// Explicit specialization for unsigned char.
756 template<>
757 struct atomic<unsigned char> : __atomic_base<unsigned char>
758 {
759 typedef unsigned char __integral_type;
760 typedef __atomic_base<unsigned char> __base_type;
761
762 atomic() noexcept= default;
763 ~atomic() noexcept = default;
764 atomic(const atomic&) = delete;
765 atomic& operator=(const atomic&) = delete;
766 atomic& operator=(const atomic&) volatile = delete;
767
768 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
769
770 using __base_type::operator __integral_type;
771 using __base_type::operator=;
772
773#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
774 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
775#endif
776 };
777
778 /// Explicit specialization for short.
779 template<>
780 struct atomic<short> : __atomic_base<short>
781 {
782 typedef short __integral_type;
783 typedef __atomic_base<short> __base_type;
784
785 atomic() noexcept = default;
786 ~atomic() noexcept = default;
787 atomic(const atomic&) = delete;
788 atomic& operator=(const atomic&) = delete;
789 atomic& operator=(const atomic&) volatile = delete;
790
791 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
792
793 using __base_type::operator __integral_type;
794 using __base_type::operator=;
795
796#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
797 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
798#endif
799 };
800
801 /// Explicit specialization for unsigned short.
802 template<>
803 struct atomic<unsigned short> : __atomic_base<unsigned short>
804 {
805 typedef unsigned short __integral_type;
806 typedef __atomic_base<unsigned short> __base_type;
807
808 atomic() noexcept = default;
809 ~atomic() noexcept = default;
810 atomic(const atomic&) = delete;
811 atomic& operator=(const atomic&) = delete;
812 atomic& operator=(const atomic&) volatile = delete;
813
814 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
815
816 using __base_type::operator __integral_type;
817 using __base_type::operator=;
818
819#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
820 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
821#endif
822 };
823
824 /// Explicit specialization for int.
825 template<>
826 struct atomic<int> : __atomic_base<int>
827 {
828 typedef int __integral_type;
829 typedef __atomic_base<int> __base_type;
830
831 atomic() noexcept = default;
832 ~atomic() noexcept = default;
833 atomic(const atomic&) = delete;
834 atomic& operator=(const atomic&) = delete;
835 atomic& operator=(const atomic&) volatile = delete;
836
837 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
838
839 using __base_type::operator __integral_type;
840 using __base_type::operator=;
841
842#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
843 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
844#endif
845 };
846
847 /// Explicit specialization for unsigned int.
848 template<>
849 struct atomic<unsigned int> : __atomic_base<unsigned int>
850 {
851 typedef unsigned int __integral_type;
852 typedef __atomic_base<unsigned int> __base_type;
853
854 atomic() noexcept = default;
855 ~atomic() noexcept = default;
856 atomic(const atomic&) = delete;
857 atomic& operator=(const atomic&) = delete;
858 atomic& operator=(const atomic&) volatile = delete;
859
860 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
861
862 using __base_type::operator __integral_type;
863 using __base_type::operator=;
864
865#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
866 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
867#endif
868 };
869
870 /// Explicit specialization for long.
871 template<>
872 struct atomic<long> : __atomic_base<long>
873 {
874 typedef long __integral_type;
875 typedef __atomic_base<long> __base_type;
876
877 atomic() noexcept = default;
878 ~atomic() noexcept = default;
879 atomic(const atomic&) = delete;
880 atomic& operator=(const atomic&) = delete;
881 atomic& operator=(const atomic&) volatile = delete;
882
883 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
884
885 using __base_type::operator __integral_type;
886 using __base_type::operator=;
887
888#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
889 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
890#endif
891 };
892
893 /// Explicit specialization for unsigned long.
894 template<>
895 struct atomic<unsigned long> : __atomic_base<unsigned long>
896 {
897 typedef unsigned long __integral_type;
898 typedef __atomic_base<unsigned long> __base_type;
899
900 atomic() noexcept = default;
901 ~atomic() noexcept = default;
902 atomic(const atomic&) = delete;
903 atomic& operator=(const atomic&) = delete;
904 atomic& operator=(const atomic&) volatile = delete;
905
906 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
907
908 using __base_type::operator __integral_type;
909 using __base_type::operator=;
910
911#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
912 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
913#endif
914 };
915
916 /// Explicit specialization for long long.
917 template<>
918 struct atomic<long long> : __atomic_base<long long>
919 {
920 typedef long long __integral_type;
921 typedef __atomic_base<long long> __base_type;
922
923 atomic() noexcept = default;
924 ~atomic() noexcept = default;
925 atomic(const atomic&) = delete;
926 atomic& operator=(const atomic&) = delete;
927 atomic& operator=(const atomic&) volatile = delete;
928
929 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
930
931 using __base_type::operator __integral_type;
932 using __base_type::operator=;
933
934#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
935 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
936#endif
937 };
938
939 /// Explicit specialization for unsigned long long.
940 template<>
941 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
942 {
943 typedef unsigned long long __integral_type;
944 typedef __atomic_base<unsigned long long> __base_type;
945
946 atomic() noexcept = default;
947 ~atomic() noexcept = default;
948 atomic(const atomic&) = delete;
949 atomic& operator=(const atomic&) = delete;
950 atomic& operator=(const atomic&) volatile = delete;
951
952 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
953
954 using __base_type::operator __integral_type;
955 using __base_type::operator=;
956
957#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
958 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
959#endif
960 };
961
962 /// Explicit specialization for wchar_t.
963 template<>
964 struct atomic<wchar_t> : __atomic_base<wchar_t>
965 {
966 typedef wchar_t __integral_type;
967 typedef __atomic_base<wchar_t> __base_type;
968
969 atomic() noexcept = default;
970 ~atomic() noexcept = default;
971 atomic(const atomic&) = delete;
972 atomic& operator=(const atomic&) = delete;
973 atomic& operator=(const atomic&) volatile = delete;
974
975 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
976
977 using __base_type::operator __integral_type;
978 using __base_type::operator=;
979
980#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
981 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
982#endif
983 };
984
985#ifdef _GLIBCXX_USE_CHAR8_T
986 /// Explicit specialization for char8_t.
987 template<>
988 struct atomic<char8_t> : __atomic_base<char8_t>
989 {
990 typedef char8_t __integral_type;
991 typedef __atomic_base<char8_t> __base_type;
992
993 atomic() noexcept = default;
994 ~atomic() noexcept = default;
995 atomic(const atomic&) = delete;
996 atomic& operator=(const atomic&) = delete;
997 atomic& operator=(const atomic&) volatile = delete;
998
999 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1000
1001 using __base_type::operator __integral_type;
1002 using __base_type::operator=;
1003
1004#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1005 static constexpr bool is_always_lock_free
1006 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1007#endif
1008 };
1009#endif
1010
1011 /// Explicit specialization for char16_t.
1012 template<>
1013 struct atomic<char16_t> : __atomic_base<char16_t>
1014 {
1015 typedef char16_t __integral_type;
1016 typedef __atomic_base<char16_t> __base_type;
1017
1018 atomic() noexcept = default;
1019 ~atomic() noexcept = default;
1020 atomic(const atomic&) = delete;
1021 atomic& operator=(const atomic&) = delete;
1022 atomic& operator=(const atomic&) volatile = delete;
1023
1024 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1025
1026 using __base_type::operator __integral_type;
1027 using __base_type::operator=;
1028
1029#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1030 static constexpr bool is_always_lock_free
1031 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1032#endif
1033 };
1034
1035 /// Explicit specialization for char32_t.
1036 template<>
1037 struct atomic<char32_t> : __atomic_base<char32_t>
1038 {
1039 typedef char32_t __integral_type;
1040 typedef __atomic_base<char32_t> __base_type;
1041
1042 atomic() noexcept = default;
1043 ~atomic() noexcept = default;
1044 atomic(const atomic&) = delete;
1045 atomic& operator=(const atomic&) = delete;
1046 atomic& operator=(const atomic&) volatile = delete;
1047
1048 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1049
1050 using __base_type::operator __integral_type;
1051 using __base_type::operator=;
1052
1053#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1054 static constexpr bool is_always_lock_free
1055 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1056#endif
1057 };
1058
1059
1060 /// atomic_bool
1062
1063 /// atomic_char
1065
1066 /// atomic_schar
1068
1069 /// atomic_uchar
1071
1072 /// atomic_short
1074
1075 /// atomic_ushort
1077
1078 /// atomic_int
1080
1081 /// atomic_uint
1083
1084 /// atomic_long
1086
1087 /// atomic_ulong
1089
1090 /// atomic_llong
1092
1093 /// atomic_ullong
1095
1096 /// atomic_wchar_t
1098
1099#ifdef _GLIBCXX_USE_CHAR8_T
1100 /// atomic_char8_t
1101 typedef atomic<char8_t> atomic_char8_t;
1102#endif
1103
1104 /// atomic_char16_t
1106
1107 /// atomic_char32_t
1109
1110#ifdef _GLIBCXX_USE_C99_STDINT
1111 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1112 // 2441. Exact-width atomic typedefs should be provided
1113
1114 /// atomic_int8_t
1116
1117 /// atomic_uint8_t
1119
1120 /// atomic_int16_t
1122
1123 /// atomic_uint16_t
1125
1126 /// atomic_int32_t
1128
1129 /// atomic_uint32_t
1131
1132 /// atomic_int64_t
1134
1135 /// atomic_uint64_t
1137#endif
1138
1139 /// atomic_int_least8_t
1141
1142 /// atomic_uint_least8_t
1144
1145 /// atomic_int_least16_t
1147
1148 /// atomic_uint_least16_t
1150
1151 /// atomic_int_least32_t
1153
1154 /// atomic_uint_least32_t
1156
1157 /// atomic_int_least64_t
1159
1160 /// atomic_uint_least64_t
1162
1163
1164 /// atomic_int_fast8_t
1166
1167 /// atomic_uint_fast8_t
1169
1170 /// atomic_int_fast16_t
1172
1173 /// atomic_uint_fast16_t
1175
1176 /// atomic_int_fast32_t
1178
1179 /// atomic_uint_fast32_t
1181
1182 /// atomic_int_fast64_t
1184
1185 /// atomic_uint_fast64_t
1187
1188
1189 /// atomic_intptr_t
1191
1192 /// atomic_uintptr_t
1194
1195 /// atomic_size_t
1197
1198 /// atomic_ptrdiff_t
1200
1201 /// atomic_intmax_t
1203
1204 /// atomic_uintmax_t
1206
1207 // Function definitions, atomic_flag operations.
1208 inline bool
1209 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1210 memory_order __m) noexcept
1211 { return __a->test_and_set(__m); }
1212
1213 inline bool
1214 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1215 memory_order __m) noexcept
1216 { return __a->test_and_set(__m); }
1217
1218#if __cpp_lib_atomic_flag_test
1219 inline bool
1220 atomic_flag_test(const atomic_flag* __a) noexcept
1221 { return __a->test(); }
1222
1223 inline bool
1224 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1225 { return __a->test(); }
1226
1227 inline bool
1228 atomic_flag_test_explicit(const atomic_flag* __a,
1229 memory_order __m) noexcept
1230 { return __a->test(__m); }
1231
1232 inline bool
1233 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1234 memory_order __m) noexcept
1235 { return __a->test(__m); }
1236#endif
1237
1238 inline void
1239 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1240 { __a->clear(__m); }
1241
1242 inline void
1243 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1244 memory_order __m) noexcept
1245 { __a->clear(__m); }
1246
1247 inline bool
1248 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1249 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1250
1251 inline bool
1252 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1253 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1254
1255 inline void
1256 atomic_flag_clear(atomic_flag* __a) noexcept
1257 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1258
1259 inline void
1260 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1261 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1262
1263#if __cpp_lib_atomic_wait
1264 inline void
1265 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1266 { __a->wait(__old); }
1267
1268 inline void
1269 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1270 memory_order __m) noexcept
1271 { __a->wait(__old, __m); }
1272
1273 inline void
1274 atomic_flag_notify_one(atomic_flag* __a) noexcept
1275 { __a->notify_one(); }
1276
1277 inline void
1278 atomic_flag_notify_all(atomic_flag* __a) noexcept
1279 { __a->notify_all(); }
1280#endif // __cpp_lib_atomic_wait
1281
1282 /// @cond undocumented
1283 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1284 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1285 template<typename _Tp>
1286 using __atomic_val_t = __type_identity_t<_Tp>;
1287 template<typename _Tp>
1288 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1289 /// @endcond
1290
1291 // [atomics.nonmembers] Non-member functions.
1292 // Function templates generally applicable to atomic types.
1293 template<typename _ITp>
1294 inline bool
1295 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1296 { return __a->is_lock_free(); }
1297
1298 template<typename _ITp>
1299 inline bool
1300 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1301 { return __a->is_lock_free(); }
1302
1303 template<typename _ITp>
1304 inline void
1305 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1306 { __a->store(__i, memory_order_relaxed); }
1307
1308 template<typename _ITp>
1309 inline void
1310 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1311 { __a->store(__i, memory_order_relaxed); }
1312
1313 template<typename _ITp>
1314 inline void
1315 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1316 memory_order __m) noexcept
1317 { __a->store(__i, __m); }
1318
1319 template<typename _ITp>
1320 inline void
1321 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1322 memory_order __m) noexcept
1323 { __a->store(__i, __m); }
1324
1325 template<typename _ITp>
1326 inline _ITp
1327 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1328 { return __a->load(__m); }
1329
1330 template<typename _ITp>
1331 inline _ITp
1332 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1333 memory_order __m) noexcept
1334 { return __a->load(__m); }
1335
1336 template<typename _ITp>
1337 inline _ITp
1338 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1339 memory_order __m) noexcept
1340 { return __a->exchange(__i, __m); }
1341
1342 template<typename _ITp>
1343 inline _ITp
1344 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1345 __atomic_val_t<_ITp> __i,
1346 memory_order __m) noexcept
1347 { return __a->exchange(__i, __m); }
1348
1349 template<typename _ITp>
1350 inline bool
1351 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1352 __atomic_val_t<_ITp>* __i1,
1353 __atomic_val_t<_ITp> __i2,
1354 memory_order __m1,
1355 memory_order __m2) noexcept
1356 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1357
1358 template<typename _ITp>
1359 inline bool
1360 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1361 __atomic_val_t<_ITp>* __i1,
1362 __atomic_val_t<_ITp> __i2,
1363 memory_order __m1,
1364 memory_order __m2) noexcept
1365 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1366
1367 template<typename _ITp>
1368 inline bool
1369 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1370 __atomic_val_t<_ITp>* __i1,
1371 __atomic_val_t<_ITp> __i2,
1372 memory_order __m1,
1373 memory_order __m2) noexcept
1374 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1375
1376 template<typename _ITp>
1377 inline bool
1378 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1379 __atomic_val_t<_ITp>* __i1,
1380 __atomic_val_t<_ITp> __i2,
1381 memory_order __m1,
1382 memory_order __m2) noexcept
1383 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1384
1385
1386 template<typename _ITp>
1387 inline void
1388 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1389 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1390
1391 template<typename _ITp>
1392 inline void
1393 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1394 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1395
1396 template<typename _ITp>
1397 inline _ITp
1398 atomic_load(const atomic<_ITp>* __a) noexcept
1399 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1400
1401 template<typename _ITp>
1402 inline _ITp
1403 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1404 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1405
1406 template<typename _ITp>
1407 inline _ITp
1408 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1409 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1410
1411 template<typename _ITp>
1412 inline _ITp
1413 atomic_exchange(volatile atomic<_ITp>* __a,
1414 __atomic_val_t<_ITp> __i) noexcept
1415 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1416
1417 template<typename _ITp>
1418 inline bool
1419 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1420 __atomic_val_t<_ITp>* __i1,
1421 __atomic_val_t<_ITp> __i2) noexcept
1422 {
1423 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1424 memory_order_seq_cst,
1425 memory_order_seq_cst);
1426 }
1427
1428 template<typename _ITp>
1429 inline bool
1430 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1431 __atomic_val_t<_ITp>* __i1,
1432 __atomic_val_t<_ITp> __i2) noexcept
1433 {
1434 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1435 memory_order_seq_cst,
1436 memory_order_seq_cst);
1437 }
1438
1439 template<typename _ITp>
1440 inline bool
1441 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1442 __atomic_val_t<_ITp>* __i1,
1443 __atomic_val_t<_ITp> __i2) noexcept
1444 {
1445 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1446 memory_order_seq_cst,
1447 memory_order_seq_cst);
1448 }
1449
1450 template<typename _ITp>
1451 inline bool
1452 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1453 __atomic_val_t<_ITp>* __i1,
1454 __atomic_val_t<_ITp> __i2) noexcept
1455 {
1456 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1457 memory_order_seq_cst,
1458 memory_order_seq_cst);
1459 }
1460
1461
1462#if __cpp_lib_atomic_wait
1463 template<typename _Tp>
1464 inline void
1465 atomic_wait(const atomic<_Tp>* __a,
1466 typename std::atomic<_Tp>::value_type __old) noexcept
1467 { __a->wait(__old); }
1468
1469 template<typename _Tp>
1470 inline void
1471 atomic_wait_explicit(const atomic<_Tp>* __a,
1472 typename std::atomic<_Tp>::value_type __old,
1473 std::memory_order __m) noexcept
1474 { __a->wait(__old, __m); }
1475
1476 template<typename _Tp>
1477 inline void
1478 atomic_notify_one(atomic<_Tp>* __a) noexcept
1479 { __a->notify_one(); }
1480
1481 template<typename _Tp>
1482 inline void
1483 atomic_notify_all(atomic<_Tp>* __a) noexcept
1484 { __a->notify_all(); }
1485#endif // __cpp_lib_atomic_wait
1486
1487 // Function templates for atomic_integral and atomic_pointer operations only.
1488 // Some operations (and, or, xor) are only available for atomic integrals,
1489 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1490
1491 template<typename _ITp>
1492 inline _ITp
1493 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1494 __atomic_diff_t<_ITp> __i,
1495 memory_order __m) noexcept
1496 { return __a->fetch_add(__i, __m); }
1497
1498 template<typename _ITp>
1499 inline _ITp
1500 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1501 __atomic_diff_t<_ITp> __i,
1502 memory_order __m) noexcept
1503 { return __a->fetch_add(__i, __m); }
1504
1505 template<typename _ITp>
1506 inline _ITp
1507 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1508 __atomic_diff_t<_ITp> __i,
1509 memory_order __m) noexcept
1510 { return __a->fetch_sub(__i, __m); }
1511
1512 template<typename _ITp>
1513 inline _ITp
1514 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1515 __atomic_diff_t<_ITp> __i,
1516 memory_order __m) noexcept
1517 { return __a->fetch_sub(__i, __m); }
1518
1519 template<typename _ITp>
1520 inline _ITp
1521 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1522 __atomic_val_t<_ITp> __i,
1523 memory_order __m) noexcept
1524 { return __a->fetch_and(__i, __m); }
1525
1526 template<typename _ITp>
1527 inline _ITp
1528 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1529 __atomic_val_t<_ITp> __i,
1530 memory_order __m) noexcept
1531 { return __a->fetch_and(__i, __m); }
1532
1533 template<typename _ITp>
1534 inline _ITp
1535 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1536 __atomic_val_t<_ITp> __i,
1537 memory_order __m) noexcept
1538 { return __a->fetch_or(__i, __m); }
1539
1540 template<typename _ITp>
1541 inline _ITp
1542 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1543 __atomic_val_t<_ITp> __i,
1544 memory_order __m) noexcept
1545 { return __a->fetch_or(__i, __m); }
1546
1547 template<typename _ITp>
1548 inline _ITp
1549 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1550 __atomic_val_t<_ITp> __i,
1551 memory_order __m) noexcept
1552 { return __a->fetch_xor(__i, __m); }
1553
1554 template<typename _ITp>
1555 inline _ITp
1556 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1557 __atomic_val_t<_ITp> __i,
1558 memory_order __m) noexcept
1559 { return __a->fetch_xor(__i, __m); }
1560
1561 template<typename _ITp>
1562 inline _ITp
1563 atomic_fetch_add(atomic<_ITp>* __a,
1564 __atomic_diff_t<_ITp> __i) noexcept
1565 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1566
1567 template<typename _ITp>
1568 inline _ITp
1569 atomic_fetch_add(volatile atomic<_ITp>* __a,
1570 __atomic_diff_t<_ITp> __i) noexcept
1571 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1572
1573 template<typename _ITp>
1574 inline _ITp
1575 atomic_fetch_sub(atomic<_ITp>* __a,
1576 __atomic_diff_t<_ITp> __i) noexcept
1577 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1578
1579 template<typename _ITp>
1580 inline _ITp
1581 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1582 __atomic_diff_t<_ITp> __i) noexcept
1583 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1584
1585 template<typename _ITp>
1586 inline _ITp
1587 atomic_fetch_and(__atomic_base<_ITp>* __a,
1588 __atomic_val_t<_ITp> __i) noexcept
1589 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1590
1591 template<typename _ITp>
1592 inline _ITp
1593 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1594 __atomic_val_t<_ITp> __i) noexcept
1595 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1596
1597 template<typename _ITp>
1598 inline _ITp
1599 atomic_fetch_or(__atomic_base<_ITp>* __a,
1600 __atomic_val_t<_ITp> __i) noexcept
1601 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1602
1603 template<typename _ITp>
1604 inline _ITp
1605 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1606 __atomic_val_t<_ITp> __i) noexcept
1607 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1608
1609 template<typename _ITp>
1610 inline _ITp
1611 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1612 __atomic_val_t<_ITp> __i) noexcept
1613 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1614
1615 template<typename _ITp>
1616 inline _ITp
1617 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1618 __atomic_val_t<_ITp> __i) noexcept
1619 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1620
1621#ifdef __cpp_lib_atomic_float
1622 template<>
1623 struct atomic<float> : __atomic_float<float>
1624 {
1625 atomic() noexcept = default;
1626
1627 constexpr
1628 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1629 { }
1630
1631 atomic& operator=(const atomic&) volatile = delete;
1632 atomic& operator=(const atomic&) = delete;
1633
1634 using __atomic_float<float>::operator=;
1635 };
1636
1637 template<>
1638 struct atomic<double> : __atomic_float<double>
1639 {
1640 atomic() noexcept = default;
1641
1642 constexpr
1643 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1644 { }
1645
1646 atomic& operator=(const atomic&) volatile = delete;
1647 atomic& operator=(const atomic&) = delete;
1648
1649 using __atomic_float<double>::operator=;
1650 };
1651
1652 template<>
1653 struct atomic<long double> : __atomic_float<long double>
1654 {
1655 atomic() noexcept = default;
1656
1657 constexpr
1658 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1659 { }
1660
1661 atomic& operator=(const atomic&) volatile = delete;
1662 atomic& operator=(const atomic&) = delete;
1663
1664 using __atomic_float<long double>::operator=;
1665 };
1666
1667#ifdef __STDCPP_FLOAT16_T__
1668 template<>
1669 struct atomic<_Float16> : __atomic_float<_Float16>
1670 {
1671 atomic() noexcept = default;
1672
1673 constexpr
1674 atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1675 { }
1676
1677 atomic& operator=(const atomic&) volatile = delete;
1678 atomic& operator=(const atomic&) = delete;
1679
1680 using __atomic_float<_Float16>::operator=;
1681 };
1682#endif
1683
1684#ifdef __STDCPP_FLOAT32_T__
1685 template<>
1686 struct atomic<_Float32> : __atomic_float<_Float32>
1687 {
1688 atomic() noexcept = default;
1689
1690 constexpr
1691 atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1692 { }
1693
1694 atomic& operator=(const atomic&) volatile = delete;
1695 atomic& operator=(const atomic&) = delete;
1696
1697 using __atomic_float<_Float32>::operator=;
1698 };
1699#endif
1700
1701#ifdef __STDCPP_FLOAT64_T__
1702 template<>
1703 struct atomic<_Float64> : __atomic_float<_Float64>
1704 {
1705 atomic() noexcept = default;
1706
1707 constexpr
1708 atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1709 { }
1710
1711 atomic& operator=(const atomic&) volatile = delete;
1712 atomic& operator=(const atomic&) = delete;
1713
1714 using __atomic_float<_Float64>::operator=;
1715 };
1716#endif
1717
1718#ifdef __STDCPP_FLOAT128_T__
1719 template<>
1720 struct atomic<_Float128> : __atomic_float<_Float128>
1721 {
1722 atomic() noexcept = default;
1723
1724 constexpr
1725 atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1726 { }
1727
1728 atomic& operator=(const atomic&) volatile = delete;
1729 atomic& operator=(const atomic&) = delete;
1730
1731 using __atomic_float<_Float128>::operator=;
1732 };
1733#endif
1734
1735#ifdef __STDCPP_BFLOAT16_T__
1736 template<>
1737 struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1738 {
1739 atomic() noexcept = default;
1740
1741 constexpr
1742 atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1743 { }
1744
1745 atomic& operator=(const atomic&) volatile = delete;
1746 atomic& operator=(const atomic&) = delete;
1747
1748 using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1749 };
1750#endif
1751#endif // __cpp_lib_atomic_float
1752
1753#ifdef __cpp_lib_atomic_ref
1754 /// Class template to provide atomic operations on a non-atomic variable.
1755 template<typename _Tp>
1756 struct atomic_ref : __atomic_ref<_Tp>
1757 {
1758 explicit
1759 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1760 { }
1761
1762 atomic_ref& operator=(const atomic_ref&) = delete;
1763
1764 atomic_ref(const atomic_ref&) = default;
1765
1766 using __atomic_ref<_Tp>::operator=;
1767 };
1768#endif // __cpp_lib_atomic_ref
1769
1770#ifdef __cpp_lib_atomic_lock_free_type_aliases
1771# ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1772 using atomic_signed_lock_free
1774 using atomic_unsigned_lock_free
1776# elif ATOMIC_INT_LOCK_FREE == 2
1777 using atomic_signed_lock_free = atomic<signed int>;
1778 using atomic_unsigned_lock_free = atomic<unsigned int>;
1779# elif ATOMIC_LONG_LOCK_FREE == 2
1780 using atomic_signed_lock_free = atomic<signed long>;
1781 using atomic_unsigned_lock_free = atomic<unsigned long>;
1782# elif ATOMIC_CHAR_LOCK_FREE == 2
1783 using atomic_signed_lock_free = atomic<signed char>;
1784 using atomic_unsigned_lock_free = atomic<unsigned char>;
1785# else
1786# error "libstdc++ bug: no lock-free atomics but they were emitted in <version>"
1787# endif
1788#endif
1789
1790 /// @} group atomics
1791
1792_GLIBCXX_END_NAMESPACE_VERSION
1793} // namespace
1794
1795#endif // C++11
1796
1797#endif // _GLIBCXX_ATOMIC
constexpr _Tp * addressof(_Tp &__r) noexcept
Returns the actual address of the object or function referenced by r, even in the presence of an over...
Definition move.h:175
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
Definition move.h:51
atomic< unsigned long > atomic_ulong
atomic_ulong
Definition atomic:1088
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
Definition atomic:1202
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
Definition atomic:1193
atomic< signed char > atomic_schar
atomic_schar
Definition atomic:1067
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
Definition atomic:1140
atomic< unsigned long long > atomic_ullong
atomic_ullong
Definition atomic:1094
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
Definition atomic:1168
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
Definition atomic:1190
atomic< int16_t > atomic_int16_t
atomic_int16_t
Definition atomic:1121
atomic< size_t > atomic_size_t
atomic_size_t
Definition atomic:1196
atomic< long > atomic_long
atomic_long
Definition atomic:1085
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
Definition atomic:1143
atomic< short > atomic_short
atomic_short
Definition atomic:1073
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
Definition atomic:1149
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
Definition atomic:1124
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
Definition atomic:1136
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
Definition atomic:1152
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
Definition atomic:1118
#define ATOMIC_BOOL_LOCK_FREE
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
Definition atomic:1097
atomic< unsigned int > atomic_uint
atomic_uint
Definition atomic:1082
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
Definition atomic:1155
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
Definition atomic:1186
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
Definition atomic:1177
atomic< char > atomic_char
atomic_char
Definition atomic:1064
atomic< int > atomic_int
atomic_int
Definition atomic:1079
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
Definition atomic:1161
atomic< int64_t > atomic_int64_t
atomic_int64_t
Definition atomic:1133
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
Definition atomic:1205
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
Definition atomic:1171
atomic< int32_t > atomic_int32_t
atomic_int32_t
Definition atomic:1127
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
Definition atomic:1174
atomic< int8_t > atomic_int8_t
atomic_int8_t
Definition atomic:1115
atomic< long long > atomic_llong
atomic_llong
Definition atomic:1091
atomic< char16_t > atomic_char16_t
atomic_char16_t
Definition atomic:1105
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
Definition atomic:1183
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
Definition atomic:1199
atomic< char32_t > atomic_char32_t
atomic_char32_t
Definition atomic:1108
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
Definition atomic:1146
atomic< unsigned char > atomic_uchar
atomic_uchar
Definition atomic:1070
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
Definition atomic:1165
memory_order
Enumeration for memory_order.
Definition atomic_base.h:65
atomic< unsigned short > atomic_ushort
atomic_ushort
Definition atomic:1076
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
Definition atomic:1158
atomic< bool > atomic_bool
atomic_bool
Definition atomic:1061
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
Definition atomic:1180
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
Definition atomic:1130
ISO C++ entities toplevel namespace is std.
Generic atomic type, primary class template.
Definition atomic:204
atomic_flag