libstdc++
atomic
Go to the documentation of this file.
1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2023 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 * This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#pragma GCC system_header
36
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
39#else
40
41#include <bits/atomic_base.h>
42
43namespace std _GLIBCXX_VISIBILITY(default)
44{
45_GLIBCXX_BEGIN_NAMESPACE_VERSION
46
47 /**
48 * @addtogroup atomics
49 * @{
50 */
51
52#if __cplusplus >= 201703L
53# define __cpp_lib_atomic_is_always_lock_free 201603L
54#endif
55
56 template<typename _Tp>
57 struct atomic;
58
59 /// atomic<bool>
60 // NB: No operators or fetch-operations for this type.
61 template<>
62 struct atomic<bool>
63 {
64 using value_type = bool;
65
66 private:
67 __atomic_base<bool> _M_base;
68
69 public:
70 atomic() noexcept = default;
71 ~atomic() noexcept = default;
72 atomic(const atomic&) = delete;
73 atomic& operator=(const atomic&) = delete;
74 atomic& operator=(const atomic&) volatile = delete;
75
76 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
77
78 bool
79 operator=(bool __i) noexcept
80 { return _M_base.operator=(__i); }
81
82 bool
83 operator=(bool __i) volatile noexcept
84 { return _M_base.operator=(__i); }
85
86 operator bool() const noexcept
87 { return _M_base.load(); }
88
89 operator bool() const volatile noexcept
90 { return _M_base.load(); }
91
92 bool
93 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
94
95 bool
96 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
97
98#if __cplusplus >= 201703L
99 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
100#endif
101
102 void
103 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
105
106 void
107 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108 { _M_base.store(__i, __m); }
109
110 bool
111 load(memory_order __m = memory_order_seq_cst) const noexcept
112 { return _M_base.load(__m); }
113
114 bool
115 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116 { return _M_base.load(__m); }
117
118 bool
119 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120 { return _M_base.exchange(__i, __m); }
121
122 bool
123 exchange(bool __i,
124 memory_order __m = memory_order_seq_cst) volatile noexcept
125 { return _M_base.exchange(__i, __m); }
126
127 bool
128 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129 memory_order __m2) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
131
132 bool
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136
137 bool
138 compare_exchange_weak(bool& __i1, bool __i2,
139 memory_order __m = memory_order_seq_cst) noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
141
142 bool
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) volatile noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146
147 bool
148 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149 memory_order __m2) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
151
152 bool
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156
157 bool
158 compare_exchange_strong(bool& __i1, bool __i2,
159 memory_order __m = memory_order_seq_cst) noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
161
162 bool
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) volatile noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166
167#if __cpp_lib_atomic_wait
168 void
169 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
170 { _M_base.wait(__old, __m); }
171
172 // TODO add const volatile overload
173
174 void
175 notify_one() noexcept
176 { _M_base.notify_one(); }
177
178 void
179 notify_all() noexcept
180 { _M_base.notify_all(); }
181#endif // __cpp_lib_atomic_wait
182 };
183
184/// @cond undocumented
185#if __cpp_lib_atomic_value_initialization
186# define _GLIBCXX20_INIT(I) = I
187#else
188# define _GLIBCXX20_INIT(I)
189#endif
190/// @endcond
191
192 /**
193 * @brief Generic atomic type, primary class template.
194 *
195 * @tparam _Tp Type to be made atomic, must be trivially copyable.
196 */
197 template<typename _Tp>
198 struct atomic
199 {
200 using value_type = _Tp;
201
202 private:
203 // Align 1/2/4/8/16-byte types to at least their size.
204 static constexpr int _S_min_alignment
205 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
206 ? 0 : sizeof(_Tp);
207
208 static constexpr int _S_alignment
209 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
210
211 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
212
213 static_assert(__is_trivially_copyable(_Tp),
214 "std::atomic requires a trivially copyable type");
215
216 static_assert(sizeof(_Tp) > 0,
217 "Incomplete or zero-sized types are not supported");
218
219#if __cplusplus > 201703L
220 static_assert(is_copy_constructible_v<_Tp>);
221 static_assert(is_move_constructible_v<_Tp>);
222 static_assert(is_copy_assignable_v<_Tp>);
223 static_assert(is_move_assignable_v<_Tp>);
224#endif
225
226 public:
227 atomic() = default;
228 ~atomic() noexcept = default;
229 atomic(const atomic&) = delete;
230 atomic& operator=(const atomic&) = delete;
231 atomic& operator=(const atomic&) volatile = delete;
232
233#pragma GCC diagnostic push
234#pragma GCC diagnostic ignored "-Wc++14-extensions" // constexpr ctor body
235 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
236 {
237#if __has_builtin(__builtin_clear_padding)
238 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
239 if (!std::__is_constant_evaluated())
240 __builtin_clear_padding(std::__addressof(_M_i));
241#endif
242 }
243#pragma GCC diagnostic pop
244
245 operator _Tp() const noexcept
246 { return load(); }
247
248 operator _Tp() const volatile noexcept
249 { return load(); }
250
251 _Tp
252 operator=(_Tp __i) noexcept
253 { store(__i); return __i; }
254
255 _Tp
256 operator=(_Tp __i) volatile noexcept
257 { store(__i); return __i; }
258
259 bool
260 is_lock_free() const noexcept
261 {
262 // Produce a fake, minimally aligned pointer.
263 return __atomic_is_lock_free(sizeof(_M_i),
264 reinterpret_cast<void *>(-_S_alignment));
265 }
266
267 bool
268 is_lock_free() const volatile noexcept
269 {
270 // Produce a fake, minimally aligned pointer.
271 return __atomic_is_lock_free(sizeof(_M_i),
272 reinterpret_cast<void *>(-_S_alignment));
273 }
274
275#if __cplusplus >= 201703L
276 static constexpr bool is_always_lock_free
277 = __atomic_always_lock_free(sizeof(_M_i), 0);
278#endif
279
280 void
281 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
282 {
283 __atomic_store(std::__addressof(_M_i),
284 __atomic_impl::__clear_padding(__i),
285 int(__m));
286 }
287
288 void
289 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
290 {
291 __atomic_store(std::__addressof(_M_i),
292 __atomic_impl::__clear_padding(__i),
293 int(__m));
294 }
295
296 _Tp
297 load(memory_order __m = memory_order_seq_cst) const noexcept
298 {
299 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
300 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
301 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
302 return *__ptr;
303 }
304
305 _Tp
306 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
307 {
308 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
309 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
310 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
311 return *__ptr;
312 }
313
314 _Tp
315 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
316 {
317 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
318 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
319 __atomic_exchange(std::__addressof(_M_i),
320 __atomic_impl::__clear_padding(__i),
321 __ptr, int(__m));
322 return *__ptr;
323 }
324
325 _Tp
326 exchange(_Tp __i,
327 memory_order __m = memory_order_seq_cst) volatile noexcept
328 {
329 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
330 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
331 __atomic_exchange(std::__addressof(_M_i),
332 __atomic_impl::__clear_padding(__i),
333 __ptr, int(__m));
334 return *__ptr;
335 }
336
337 bool
338 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
339 memory_order __f) noexcept
340 {
341 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
342 __s, __f);
343 }
344
345 bool
346 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
347 memory_order __f) volatile noexcept
348 {
349 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
350 __s, __f);
351 }
352
353 bool
354 compare_exchange_weak(_Tp& __e, _Tp __i,
355 memory_order __m = memory_order_seq_cst) noexcept
356 { return compare_exchange_weak(__e, __i, __m,
357 __cmpexch_failure_order(__m)); }
358
359 bool
360 compare_exchange_weak(_Tp& __e, _Tp __i,
361 memory_order __m = memory_order_seq_cst) volatile noexcept
362 { return compare_exchange_weak(__e, __i, __m,
363 __cmpexch_failure_order(__m)); }
364
365 bool
366 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
367 memory_order __f) noexcept
368 {
369 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
370 __s, __f);
371 }
372
373 bool
374 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
375 memory_order __f) volatile noexcept
376 {
377 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
378 __s, __f);
379 }
380
381 bool
382 compare_exchange_strong(_Tp& __e, _Tp __i,
383 memory_order __m = memory_order_seq_cst) noexcept
384 { return compare_exchange_strong(__e, __i, __m,
385 __cmpexch_failure_order(__m)); }
386
387 bool
388 compare_exchange_strong(_Tp& __e, _Tp __i,
389 memory_order __m = memory_order_seq_cst) volatile noexcept
390 { return compare_exchange_strong(__e, __i, __m,
391 __cmpexch_failure_order(__m)); }
392
393#if __cpp_lib_atomic_wait
394 void
395 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
396 {
397 std::__atomic_wait_address_v(std::addressof(_M_i), __old,
398 [__m, this] { return this->load(__m); });
399 }
400
401 // TODO add const volatile overload
402
403 void
404 notify_one() noexcept
405 { std::__atomic_notify_address(std::addressof(_M_i), false); }
406
407 void
408 notify_all() noexcept
409 { std::__atomic_notify_address(std::addressof(_M_i), true); }
410#endif // __cpp_lib_atomic_wait
411 };
412#undef _GLIBCXX20_INIT
413
414 /// Partial specialization for pointer types.
415 template<typename _Tp>
416 struct atomic<_Tp*>
417 {
418 using value_type = _Tp*;
419 using difference_type = ptrdiff_t;
420
421 typedef _Tp* __pointer_type;
422 typedef __atomic_base<_Tp*> __base_type;
423 __base_type _M_b;
424
425 atomic() noexcept = default;
426 ~atomic() noexcept = default;
427 atomic(const atomic&) = delete;
428 atomic& operator=(const atomic&) = delete;
429 atomic& operator=(const atomic&) volatile = delete;
430
431 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
432
433 operator __pointer_type() const noexcept
434 { return __pointer_type(_M_b); }
435
436 operator __pointer_type() const volatile noexcept
437 { return __pointer_type(_M_b); }
438
439 __pointer_type
440 operator=(__pointer_type __p) noexcept
441 { return _M_b.operator=(__p); }
442
443 __pointer_type
444 operator=(__pointer_type __p) volatile noexcept
445 { return _M_b.operator=(__p); }
446
447 __pointer_type
448 operator++(int) noexcept
449 {
450#if __cplusplus >= 201703L
451 static_assert( is_object<_Tp>::value, "pointer to object type" );
452#endif
453 return _M_b++;
454 }
455
456 __pointer_type
457 operator++(int) volatile noexcept
458 {
459#if __cplusplus >= 201703L
460 static_assert( is_object<_Tp>::value, "pointer to object type" );
461#endif
462 return _M_b++;
463 }
464
465 __pointer_type
466 operator--(int) noexcept
467 {
468#if __cplusplus >= 201703L
469 static_assert( is_object<_Tp>::value, "pointer to object type" );
470#endif
471 return _M_b--;
472 }
473
474 __pointer_type
475 operator--(int) volatile noexcept
476 {
477#if __cplusplus >= 201703L
478 static_assert( is_object<_Tp>::value, "pointer to object type" );
479#endif
480 return _M_b--;
481 }
482
483 __pointer_type
484 operator++() noexcept
485 {
486#if __cplusplus >= 201703L
487 static_assert( is_object<_Tp>::value, "pointer to object type" );
488#endif
489 return ++_M_b;
490 }
491
492 __pointer_type
493 operator++() volatile noexcept
494 {
495#if __cplusplus >= 201703L
496 static_assert( is_object<_Tp>::value, "pointer to object type" );
497#endif
498 return ++_M_b;
499 }
500
501 __pointer_type
502 operator--() noexcept
503 {
504#if __cplusplus >= 201703L
505 static_assert( is_object<_Tp>::value, "pointer to object type" );
506#endif
507 return --_M_b;
508 }
509
510 __pointer_type
511 operator--() volatile noexcept
512 {
513#if __cplusplus >= 201703L
514 static_assert( is_object<_Tp>::value, "pointer to object type" );
515#endif
516 return --_M_b;
517 }
518
519 __pointer_type
520 operator+=(ptrdiff_t __d) noexcept
521 {
522#if __cplusplus >= 201703L
523 static_assert( is_object<_Tp>::value, "pointer to object type" );
524#endif
525 return _M_b.operator+=(__d);
526 }
527
528 __pointer_type
529 operator+=(ptrdiff_t __d) volatile noexcept
530 {
531#if __cplusplus >= 201703L
532 static_assert( is_object<_Tp>::value, "pointer to object type" );
533#endif
534 return _M_b.operator+=(__d);
535 }
536
537 __pointer_type
538 operator-=(ptrdiff_t __d) noexcept
539 {
540#if __cplusplus >= 201703L
541 static_assert( is_object<_Tp>::value, "pointer to object type" );
542#endif
543 return _M_b.operator-=(__d);
544 }
545
546 __pointer_type
547 operator-=(ptrdiff_t __d) volatile noexcept
548 {
549#if __cplusplus >= 201703L
550 static_assert( is_object<_Tp>::value, "pointer to object type" );
551#endif
552 return _M_b.operator-=(__d);
553 }
554
555 bool
556 is_lock_free() const noexcept
557 { return _M_b.is_lock_free(); }
558
559 bool
560 is_lock_free() const volatile noexcept
561 { return _M_b.is_lock_free(); }
562
563#if __cplusplus >= 201703L
564 static constexpr bool is_always_lock_free
565 = ATOMIC_POINTER_LOCK_FREE == 2;
566#endif
567
568 void
569 store(__pointer_type __p,
570 memory_order __m = memory_order_seq_cst) noexcept
571 { return _M_b.store(__p, __m); }
572
573 void
574 store(__pointer_type __p,
575 memory_order __m = memory_order_seq_cst) volatile noexcept
576 { return _M_b.store(__p, __m); }
577
578 __pointer_type
579 load(memory_order __m = memory_order_seq_cst) const noexcept
580 { return _M_b.load(__m); }
581
582 __pointer_type
583 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
584 { return _M_b.load(__m); }
585
586 __pointer_type
587 exchange(__pointer_type __p,
588 memory_order __m = memory_order_seq_cst) noexcept
589 { return _M_b.exchange(__p, __m); }
590
591 __pointer_type
592 exchange(__pointer_type __p,
593 memory_order __m = memory_order_seq_cst) volatile noexcept
594 { return _M_b.exchange(__p, __m); }
595
596 bool
597 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
598 memory_order __m1, memory_order __m2) noexcept
599 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
600
601 bool
602 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
603 memory_order __m1,
604 memory_order __m2) volatile noexcept
605 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
606
607 bool
608 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
609 memory_order __m = memory_order_seq_cst) noexcept
610 {
611 return compare_exchange_weak(__p1, __p2, __m,
612 __cmpexch_failure_order(__m));
613 }
614
615 bool
616 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
617 memory_order __m = memory_order_seq_cst) volatile noexcept
618 {
619 return compare_exchange_weak(__p1, __p2, __m,
620 __cmpexch_failure_order(__m));
621 }
622
623 bool
624 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
625 memory_order __m1, memory_order __m2) noexcept
626 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
627
628 bool
629 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
630 memory_order __m1,
631 memory_order __m2) volatile noexcept
632 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
633
634 bool
635 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
636 memory_order __m = memory_order_seq_cst) noexcept
637 {
638 return _M_b.compare_exchange_strong(__p1, __p2, __m,
639 __cmpexch_failure_order(__m));
640 }
641
642 bool
643 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
644 memory_order __m = memory_order_seq_cst) volatile noexcept
645 {
646 return _M_b.compare_exchange_strong(__p1, __p2, __m,
647 __cmpexch_failure_order(__m));
648 }
649
650#if __cpp_lib_atomic_wait
651 void
652 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
653 { _M_b.wait(__old, __m); }
654
655 // TODO add const volatile overload
656
657 void
658 notify_one() noexcept
659 { _M_b.notify_one(); }
660
661 void
662 notify_all() noexcept
663 { _M_b.notify_all(); }
664#endif // __cpp_lib_atomic_wait
665
666 __pointer_type
667 fetch_add(ptrdiff_t __d,
668 memory_order __m = memory_order_seq_cst) noexcept
669 {
670#if __cplusplus >= 201703L
671 static_assert( is_object<_Tp>::value, "pointer to object type" );
672#endif
673 return _M_b.fetch_add(__d, __m);
674 }
675
676 __pointer_type
677 fetch_add(ptrdiff_t __d,
678 memory_order __m = memory_order_seq_cst) volatile noexcept
679 {
680#if __cplusplus >= 201703L
681 static_assert( is_object<_Tp>::value, "pointer to object type" );
682#endif
683 return _M_b.fetch_add(__d, __m);
684 }
685
686 __pointer_type
687 fetch_sub(ptrdiff_t __d,
688 memory_order __m = memory_order_seq_cst) noexcept
689 {
690#if __cplusplus >= 201703L
691 static_assert( is_object<_Tp>::value, "pointer to object type" );
692#endif
693 return _M_b.fetch_sub(__d, __m);
694 }
695
696 __pointer_type
697 fetch_sub(ptrdiff_t __d,
698 memory_order __m = memory_order_seq_cst) volatile noexcept
699 {
700#if __cplusplus >= 201703L
701 static_assert( is_object<_Tp>::value, "pointer to object type" );
702#endif
703 return _M_b.fetch_sub(__d, __m);
704 }
705 };
706
707
708 /// Explicit specialization for char.
709 template<>
710 struct atomic<char> : __atomic_base<char>
711 {
712 typedef char __integral_type;
713 typedef __atomic_base<char> __base_type;
714
715 atomic() noexcept = default;
716 ~atomic() noexcept = default;
717 atomic(const atomic&) = delete;
718 atomic& operator=(const atomic&) = delete;
719 atomic& operator=(const atomic&) volatile = delete;
720
721 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
722
723 using __base_type::operator __integral_type;
724 using __base_type::operator=;
725
726#if __cplusplus >= 201703L
727 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
728#endif
729 };
730
731 /// Explicit specialization for signed char.
732 template<>
733 struct atomic<signed char> : __atomic_base<signed char>
734 {
735 typedef signed char __integral_type;
736 typedef __atomic_base<signed char> __base_type;
737
738 atomic() noexcept= default;
739 ~atomic() noexcept = default;
740 atomic(const atomic&) = delete;
741 atomic& operator=(const atomic&) = delete;
742 atomic& operator=(const atomic&) volatile = delete;
743
744 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
745
746 using __base_type::operator __integral_type;
747 using __base_type::operator=;
748
749#if __cplusplus >= 201703L
750 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
751#endif
752 };
753
754 /// Explicit specialization for unsigned char.
755 template<>
756 struct atomic<unsigned char> : __atomic_base<unsigned char>
757 {
758 typedef unsigned char __integral_type;
759 typedef __atomic_base<unsigned char> __base_type;
760
761 atomic() noexcept= default;
762 ~atomic() noexcept = default;
763 atomic(const atomic&) = delete;
764 atomic& operator=(const atomic&) = delete;
765 atomic& operator=(const atomic&) volatile = delete;
766
767 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
768
769 using __base_type::operator __integral_type;
770 using __base_type::operator=;
771
772#if __cplusplus >= 201703L
773 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
774#endif
775 };
776
777 /// Explicit specialization for short.
778 template<>
779 struct atomic<short> : __atomic_base<short>
780 {
781 typedef short __integral_type;
782 typedef __atomic_base<short> __base_type;
783
784 atomic() noexcept = default;
785 ~atomic() noexcept = default;
786 atomic(const atomic&) = delete;
787 atomic& operator=(const atomic&) = delete;
788 atomic& operator=(const atomic&) volatile = delete;
789
790 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
791
792 using __base_type::operator __integral_type;
793 using __base_type::operator=;
794
795#if __cplusplus >= 201703L
796 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
797#endif
798 };
799
800 /// Explicit specialization for unsigned short.
801 template<>
802 struct atomic<unsigned short> : __atomic_base<unsigned short>
803 {
804 typedef unsigned short __integral_type;
805 typedef __atomic_base<unsigned short> __base_type;
806
807 atomic() noexcept = default;
808 ~atomic() noexcept = default;
809 atomic(const atomic&) = delete;
810 atomic& operator=(const atomic&) = delete;
811 atomic& operator=(const atomic&) volatile = delete;
812
813 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
814
815 using __base_type::operator __integral_type;
816 using __base_type::operator=;
817
818#if __cplusplus >= 201703L
819 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
820#endif
821 };
822
823 /// Explicit specialization for int.
824 template<>
825 struct atomic<int> : __atomic_base<int>
826 {
827 typedef int __integral_type;
828 typedef __atomic_base<int> __base_type;
829
830 atomic() noexcept = default;
831 ~atomic() noexcept = default;
832 atomic(const atomic&) = delete;
833 atomic& operator=(const atomic&) = delete;
834 atomic& operator=(const atomic&) volatile = delete;
835
836 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
837
838 using __base_type::operator __integral_type;
839 using __base_type::operator=;
840
841#if __cplusplus >= 201703L
842 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
843#endif
844 };
845
846 /// Explicit specialization for unsigned int.
847 template<>
848 struct atomic<unsigned int> : __atomic_base<unsigned int>
849 {
850 typedef unsigned int __integral_type;
851 typedef __atomic_base<unsigned int> __base_type;
852
853 atomic() noexcept = default;
854 ~atomic() noexcept = default;
855 atomic(const atomic&) = delete;
856 atomic& operator=(const atomic&) = delete;
857 atomic& operator=(const atomic&) volatile = delete;
858
859 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
860
861 using __base_type::operator __integral_type;
862 using __base_type::operator=;
863
864#if __cplusplus >= 201703L
865 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
866#endif
867 };
868
869 /// Explicit specialization for long.
870 template<>
871 struct atomic<long> : __atomic_base<long>
872 {
873 typedef long __integral_type;
874 typedef __atomic_base<long> __base_type;
875
876 atomic() noexcept = default;
877 ~atomic() noexcept = default;
878 atomic(const atomic&) = delete;
879 atomic& operator=(const atomic&) = delete;
880 atomic& operator=(const atomic&) volatile = delete;
881
882 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
883
884 using __base_type::operator __integral_type;
885 using __base_type::operator=;
886
887#if __cplusplus >= 201703L
888 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
889#endif
890 };
891
892 /// Explicit specialization for unsigned long.
893 template<>
894 struct atomic<unsigned long> : __atomic_base<unsigned long>
895 {
896 typedef unsigned long __integral_type;
897 typedef __atomic_base<unsigned long> __base_type;
898
899 atomic() noexcept = default;
900 ~atomic() noexcept = default;
901 atomic(const atomic&) = delete;
902 atomic& operator=(const atomic&) = delete;
903 atomic& operator=(const atomic&) volatile = delete;
904
905 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
906
907 using __base_type::operator __integral_type;
908 using __base_type::operator=;
909
910#if __cplusplus >= 201703L
911 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
912#endif
913 };
914
915 /// Explicit specialization for long long.
916 template<>
917 struct atomic<long long> : __atomic_base<long long>
918 {
919 typedef long long __integral_type;
920 typedef __atomic_base<long long> __base_type;
921
922 atomic() noexcept = default;
923 ~atomic() noexcept = default;
924 atomic(const atomic&) = delete;
925 atomic& operator=(const atomic&) = delete;
926 atomic& operator=(const atomic&) volatile = delete;
927
928 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
929
930 using __base_type::operator __integral_type;
931 using __base_type::operator=;
932
933#if __cplusplus >= 201703L
934 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
935#endif
936 };
937
938 /// Explicit specialization for unsigned long long.
939 template<>
940 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
941 {
942 typedef unsigned long long __integral_type;
943 typedef __atomic_base<unsigned long long> __base_type;
944
945 atomic() noexcept = default;
946 ~atomic() noexcept = default;
947 atomic(const atomic&) = delete;
948 atomic& operator=(const atomic&) = delete;
949 atomic& operator=(const atomic&) volatile = delete;
950
951 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
952
953 using __base_type::operator __integral_type;
954 using __base_type::operator=;
955
956#if __cplusplus >= 201703L
957 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
958#endif
959 };
960
961 /// Explicit specialization for wchar_t.
962 template<>
963 struct atomic<wchar_t> : __atomic_base<wchar_t>
964 {
965 typedef wchar_t __integral_type;
966 typedef __atomic_base<wchar_t> __base_type;
967
968 atomic() noexcept = default;
969 ~atomic() noexcept = default;
970 atomic(const atomic&) = delete;
971 atomic& operator=(const atomic&) = delete;
972 atomic& operator=(const atomic&) volatile = delete;
973
974 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
975
976 using __base_type::operator __integral_type;
977 using __base_type::operator=;
978
979#if __cplusplus >= 201703L
980 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
981#endif
982 };
983
984#ifdef _GLIBCXX_USE_CHAR8_T
985 /// Explicit specialization for char8_t.
986 template<>
987 struct atomic<char8_t> : __atomic_base<char8_t>
988 {
989 typedef char8_t __integral_type;
990 typedef __atomic_base<char8_t> __base_type;
991
992 atomic() noexcept = default;
993 ~atomic() noexcept = default;
994 atomic(const atomic&) = delete;
995 atomic& operator=(const atomic&) = delete;
996 atomic& operator=(const atomic&) volatile = delete;
997
998 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
999
1000 using __base_type::operator __integral_type;
1001 using __base_type::operator=;
1002
1003#if __cplusplus > 201402L
1004 static constexpr bool is_always_lock_free
1005 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1006#endif
1007 };
1008#endif
1009
1010 /// Explicit specialization for char16_t.
1011 template<>
1012 struct atomic<char16_t> : __atomic_base<char16_t>
1013 {
1014 typedef char16_t __integral_type;
1015 typedef __atomic_base<char16_t> __base_type;
1016
1017 atomic() noexcept = default;
1018 ~atomic() noexcept = default;
1019 atomic(const atomic&) = delete;
1020 atomic& operator=(const atomic&) = delete;
1021 atomic& operator=(const atomic&) volatile = delete;
1022
1023 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1024
1025 using __base_type::operator __integral_type;
1026 using __base_type::operator=;
1027
1028#if __cplusplus >= 201703L
1029 static constexpr bool is_always_lock_free
1030 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1031#endif
1032 };
1033
1034 /// Explicit specialization for char32_t.
1035 template<>
1036 struct atomic<char32_t> : __atomic_base<char32_t>
1037 {
1038 typedef char32_t __integral_type;
1039 typedef __atomic_base<char32_t> __base_type;
1040
1041 atomic() noexcept = default;
1042 ~atomic() noexcept = default;
1043 atomic(const atomic&) = delete;
1044 atomic& operator=(const atomic&) = delete;
1045 atomic& operator=(const atomic&) volatile = delete;
1046
1047 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1048
1049 using __base_type::operator __integral_type;
1050 using __base_type::operator=;
1051
1052#if __cplusplus >= 201703L
1053 static constexpr bool is_always_lock_free
1054 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1055#endif
1056 };
1057
1058
1059 /// atomic_bool
1061
1062 /// atomic_char
1064
1065 /// atomic_schar
1067
1068 /// atomic_uchar
1070
1071 /// atomic_short
1073
1074 /// atomic_ushort
1076
1077 /// atomic_int
1079
1080 /// atomic_uint
1082
1083 /// atomic_long
1085
1086 /// atomic_ulong
1088
1089 /// atomic_llong
1091
1092 /// atomic_ullong
1094
1095 /// atomic_wchar_t
1097
1098#ifdef _GLIBCXX_USE_CHAR8_T
1099 /// atomic_char8_t
1100 typedef atomic<char8_t> atomic_char8_t;
1101#endif
1102
1103 /// atomic_char16_t
1105
1106 /// atomic_char32_t
1108
1109#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1110 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1111 // 2441. Exact-width atomic typedefs should be provided
1112
1113 /// atomic_int8_t
1115
1116 /// atomic_uint8_t
1118
1119 /// atomic_int16_t
1121
1122 /// atomic_uint16_t
1124
1125 /// atomic_int32_t
1127
1128 /// atomic_uint32_t
1130
1131 /// atomic_int64_t
1133
1134 /// atomic_uint64_t
1136
1137
1138 /// atomic_int_least8_t
1140
1141 /// atomic_uint_least8_t
1143
1144 /// atomic_int_least16_t
1146
1147 /// atomic_uint_least16_t
1149
1150 /// atomic_int_least32_t
1152
1153 /// atomic_uint_least32_t
1155
1156 /// atomic_int_least64_t
1158
1159 /// atomic_uint_least64_t
1161
1162
1163 /// atomic_int_fast8_t
1165
1166 /// atomic_uint_fast8_t
1168
1169 /// atomic_int_fast16_t
1171
1172 /// atomic_uint_fast16_t
1174
1175 /// atomic_int_fast32_t
1177
1178 /// atomic_uint_fast32_t
1180
1181 /// atomic_int_fast64_t
1183
1184 /// atomic_uint_fast64_t
1186#endif
1187
1188
1189 /// atomic_intptr_t
1191
1192 /// atomic_uintptr_t
1194
1195 /// atomic_size_t
1197
1198 /// atomic_ptrdiff_t
1200
1201#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1202 /// atomic_intmax_t
1204
1205 /// atomic_uintmax_t
1207#endif
1208
1209 // Function definitions, atomic_flag operations.
1210 inline bool
1211 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1212 memory_order __m) noexcept
1213 { return __a->test_and_set(__m); }
1214
1215 inline bool
1216 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1217 memory_order __m) noexcept
1218 { return __a->test_and_set(__m); }
1219
1220#if __cpp_lib_atomic_flag_test
1221 inline bool
1222 atomic_flag_test(const atomic_flag* __a) noexcept
1223 { return __a->test(); }
1224
1225 inline bool
1226 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1227 { return __a->test(); }
1228
1229 inline bool
1230 atomic_flag_test_explicit(const atomic_flag* __a,
1231 memory_order __m) noexcept
1232 { return __a->test(__m); }
1233
1234 inline bool
1235 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1236 memory_order __m) noexcept
1237 { return __a->test(__m); }
1238#endif
1239
1240 inline void
1241 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1242 { __a->clear(__m); }
1243
1244 inline void
1245 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1246 memory_order __m) noexcept
1247 { __a->clear(__m); }
1248
1249 inline bool
1250 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1251 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1252
1253 inline bool
1254 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1255 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1256
1257 inline void
1258 atomic_flag_clear(atomic_flag* __a) noexcept
1259 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1260
1261 inline void
1262 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1263 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1264
1265#if __cpp_lib_atomic_wait
1266 inline void
1267 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1268 { __a->wait(__old); }
1269
1270 inline void
1271 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1272 memory_order __m) noexcept
1273 { __a->wait(__old, __m); }
1274
1275 inline void
1276 atomic_flag_notify_one(atomic_flag* __a) noexcept
1277 { __a->notify_one(); }
1278
1279 inline void
1280 atomic_flag_notify_all(atomic_flag* __a) noexcept
1281 { __a->notify_all(); }
1282#endif // __cpp_lib_atomic_wait
1283
1284 /// @cond undocumented
1285 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1286 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1287 template<typename _Tp>
1288 using __atomic_val_t = __type_identity_t<_Tp>;
1289 template<typename _Tp>
1290 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1291 /// @endcond
1292
1293 // [atomics.nonmembers] Non-member functions.
1294 // Function templates generally applicable to atomic types.
1295 template<typename _ITp>
1296 inline bool
1297 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1298 { return __a->is_lock_free(); }
1299
1300 template<typename _ITp>
1301 inline bool
1302 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1303 { return __a->is_lock_free(); }
1304
1305 template<typename _ITp>
1306 inline void
1307 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1308 { __a->store(__i, memory_order_relaxed); }
1309
1310 template<typename _ITp>
1311 inline void
1312 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1313 { __a->store(__i, memory_order_relaxed); }
1314
1315 template<typename _ITp>
1316 inline void
1317 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1318 memory_order __m) noexcept
1319 { __a->store(__i, __m); }
1320
1321 template<typename _ITp>
1322 inline void
1323 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1324 memory_order __m) noexcept
1325 { __a->store(__i, __m); }
1326
1327 template<typename _ITp>
1328 inline _ITp
1329 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1330 { return __a->load(__m); }
1331
1332 template<typename _ITp>
1333 inline _ITp
1334 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1335 memory_order __m) noexcept
1336 { return __a->load(__m); }
1337
1338 template<typename _ITp>
1339 inline _ITp
1340 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1341 memory_order __m) noexcept
1342 { return __a->exchange(__i, __m); }
1343
1344 template<typename _ITp>
1345 inline _ITp
1346 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1347 __atomic_val_t<_ITp> __i,
1348 memory_order __m) noexcept
1349 { return __a->exchange(__i, __m); }
1350
1351 template<typename _ITp>
1352 inline bool
1353 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1354 __atomic_val_t<_ITp>* __i1,
1355 __atomic_val_t<_ITp> __i2,
1356 memory_order __m1,
1357 memory_order __m2) noexcept
1358 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1359
1360 template<typename _ITp>
1361 inline bool
1362 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1363 __atomic_val_t<_ITp>* __i1,
1364 __atomic_val_t<_ITp> __i2,
1365 memory_order __m1,
1366 memory_order __m2) noexcept
1367 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1368
1369 template<typename _ITp>
1370 inline bool
1371 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1372 __atomic_val_t<_ITp>* __i1,
1373 __atomic_val_t<_ITp> __i2,
1374 memory_order __m1,
1375 memory_order __m2) noexcept
1376 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1377
1378 template<typename _ITp>
1379 inline bool
1380 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1381 __atomic_val_t<_ITp>* __i1,
1382 __atomic_val_t<_ITp> __i2,
1383 memory_order __m1,
1384 memory_order __m2) noexcept
1385 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1386
1387
1388 template<typename _ITp>
1389 inline void
1390 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1391 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1392
1393 template<typename _ITp>
1394 inline void
1395 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1396 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1397
1398 template<typename _ITp>
1399 inline _ITp
1400 atomic_load(const atomic<_ITp>* __a) noexcept
1401 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1402
1403 template<typename _ITp>
1404 inline _ITp
1405 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1406 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1407
1408 template<typename _ITp>
1409 inline _ITp
1410 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1411 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1412
1413 template<typename _ITp>
1414 inline _ITp
1415 atomic_exchange(volatile atomic<_ITp>* __a,
1416 __atomic_val_t<_ITp> __i) noexcept
1417 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1418
1419 template<typename _ITp>
1420 inline bool
1421 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1422 __atomic_val_t<_ITp>* __i1,
1423 __atomic_val_t<_ITp> __i2) noexcept
1424 {
1425 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1426 memory_order_seq_cst,
1427 memory_order_seq_cst);
1428 }
1429
1430 template<typename _ITp>
1431 inline bool
1432 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1433 __atomic_val_t<_ITp>* __i1,
1434 __atomic_val_t<_ITp> __i2) noexcept
1435 {
1436 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1437 memory_order_seq_cst,
1438 memory_order_seq_cst);
1439 }
1440
1441 template<typename _ITp>
1442 inline bool
1443 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1444 __atomic_val_t<_ITp>* __i1,
1445 __atomic_val_t<_ITp> __i2) noexcept
1446 {
1447 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1448 memory_order_seq_cst,
1449 memory_order_seq_cst);
1450 }
1451
1452 template<typename _ITp>
1453 inline bool
1454 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1455 __atomic_val_t<_ITp>* __i1,
1456 __atomic_val_t<_ITp> __i2) noexcept
1457 {
1458 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1459 memory_order_seq_cst,
1460 memory_order_seq_cst);
1461 }
1462
1463
1464#if __cpp_lib_atomic_wait
1465 template<typename _Tp>
1466 inline void
1467 atomic_wait(const atomic<_Tp>* __a,
1468 typename std::atomic<_Tp>::value_type __old) noexcept
1469 { __a->wait(__old); }
1470
1471 template<typename _Tp>
1472 inline void
1473 atomic_wait_explicit(const atomic<_Tp>* __a,
1474 typename std::atomic<_Tp>::value_type __old,
1475 std::memory_order __m) noexcept
1476 { __a->wait(__old, __m); }
1477
1478 template<typename _Tp>
1479 inline void
1480 atomic_notify_one(atomic<_Tp>* __a) noexcept
1481 { __a->notify_one(); }
1482
1483 template<typename _Tp>
1484 inline void
1485 atomic_notify_all(atomic<_Tp>* __a) noexcept
1486 { __a->notify_all(); }
1487#endif // __cpp_lib_atomic_wait
1488
1489 // Function templates for atomic_integral and atomic_pointer operations only.
1490 // Some operations (and, or, xor) are only available for atomic integrals,
1491 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1492
1493 template<typename _ITp>
1494 inline _ITp
1495 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1496 __atomic_diff_t<_ITp> __i,
1497 memory_order __m) noexcept
1498 { return __a->fetch_add(__i, __m); }
1499
1500 template<typename _ITp>
1501 inline _ITp
1502 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1503 __atomic_diff_t<_ITp> __i,
1504 memory_order __m) noexcept
1505 { return __a->fetch_add(__i, __m); }
1506
1507 template<typename _ITp>
1508 inline _ITp
1509 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1510 __atomic_diff_t<_ITp> __i,
1511 memory_order __m) noexcept
1512 { return __a->fetch_sub(__i, __m); }
1513
1514 template<typename _ITp>
1515 inline _ITp
1516 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1517 __atomic_diff_t<_ITp> __i,
1518 memory_order __m) noexcept
1519 { return __a->fetch_sub(__i, __m); }
1520
1521 template<typename _ITp>
1522 inline _ITp
1523 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1524 __atomic_val_t<_ITp> __i,
1525 memory_order __m) noexcept
1526 { return __a->fetch_and(__i, __m); }
1527
1528 template<typename _ITp>
1529 inline _ITp
1530 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1531 __atomic_val_t<_ITp> __i,
1532 memory_order __m) noexcept
1533 { return __a->fetch_and(__i, __m); }
1534
1535 template<typename _ITp>
1536 inline _ITp
1537 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1538 __atomic_val_t<_ITp> __i,
1539 memory_order __m) noexcept
1540 { return __a->fetch_or(__i, __m); }
1541
1542 template<typename _ITp>
1543 inline _ITp
1544 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1545 __atomic_val_t<_ITp> __i,
1546 memory_order __m) noexcept
1547 { return __a->fetch_or(__i, __m); }
1548
1549 template<typename _ITp>
1550 inline _ITp
1551 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1552 __atomic_val_t<_ITp> __i,
1553 memory_order __m) noexcept
1554 { return __a->fetch_xor(__i, __m); }
1555
1556 template<typename _ITp>
1557 inline _ITp
1558 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1559 __atomic_val_t<_ITp> __i,
1560 memory_order __m) noexcept
1561 { return __a->fetch_xor(__i, __m); }
1562
1563 template<typename _ITp>
1564 inline _ITp
1565 atomic_fetch_add(atomic<_ITp>* __a,
1566 __atomic_diff_t<_ITp> __i) noexcept
1567 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1568
1569 template<typename _ITp>
1570 inline _ITp
1571 atomic_fetch_add(volatile atomic<_ITp>* __a,
1572 __atomic_diff_t<_ITp> __i) noexcept
1573 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1574
1575 template<typename _ITp>
1576 inline _ITp
1577 atomic_fetch_sub(atomic<_ITp>* __a,
1578 __atomic_diff_t<_ITp> __i) noexcept
1579 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1580
1581 template<typename _ITp>
1582 inline _ITp
1583 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1584 __atomic_diff_t<_ITp> __i) noexcept
1585 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1586
1587 template<typename _ITp>
1588 inline _ITp
1589 atomic_fetch_and(__atomic_base<_ITp>* __a,
1590 __atomic_val_t<_ITp> __i) noexcept
1591 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1592
1593 template<typename _ITp>
1594 inline _ITp
1595 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1596 __atomic_val_t<_ITp> __i) noexcept
1597 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1598
1599 template<typename _ITp>
1600 inline _ITp
1601 atomic_fetch_or(__atomic_base<_ITp>* __a,
1602 __atomic_val_t<_ITp> __i) noexcept
1603 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1604
1605 template<typename _ITp>
1606 inline _ITp
1607 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1608 __atomic_val_t<_ITp> __i) noexcept
1609 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1610
1611 template<typename _ITp>
1612 inline _ITp
1613 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1614 __atomic_val_t<_ITp> __i) noexcept
1615 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1616
1617 template<typename _ITp>
1618 inline _ITp
1619 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1620 __atomic_val_t<_ITp> __i) noexcept
1621 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1622
1623#if __cplusplus > 201703L
1624#define __cpp_lib_atomic_float 201711L
1625 template<>
1626 struct atomic<float> : __atomic_float<float>
1627 {
1628 atomic() noexcept = default;
1629
1630 constexpr
1631 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1632 { }
1633
1634 atomic& operator=(const atomic&) volatile = delete;
1635 atomic& operator=(const atomic&) = delete;
1636
1637 using __atomic_float<float>::operator=;
1638 };
1639
1640 template<>
1641 struct atomic<double> : __atomic_float<double>
1642 {
1643 atomic() noexcept = default;
1644
1645 constexpr
1646 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1647 { }
1648
1649 atomic& operator=(const atomic&) volatile = delete;
1650 atomic& operator=(const atomic&) = delete;
1651
1652 using __atomic_float<double>::operator=;
1653 };
1654
1655 template<>
1656 struct atomic<long double> : __atomic_float<long double>
1657 {
1658 atomic() noexcept = default;
1659
1660 constexpr
1661 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1662 { }
1663
1664 atomic& operator=(const atomic&) volatile = delete;
1665 atomic& operator=(const atomic&) = delete;
1666
1667 using __atomic_float<long double>::operator=;
1668 };
1669
1670#ifdef __STDCPP_FLOAT16_T__
1671 template<>
1672 struct atomic<_Float16> : __atomic_float<_Float16>
1673 {
1674 atomic() noexcept = default;
1675
1676 constexpr
1677 atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1678 { }
1679
1680 atomic& operator=(const atomic&) volatile = delete;
1681 atomic& operator=(const atomic&) = delete;
1682
1683 using __atomic_float<_Float16>::operator=;
1684 };
1685#endif
1686
1687#ifdef __STDCPP_FLOAT32_T__
1688 template<>
1689 struct atomic<_Float32> : __atomic_float<_Float32>
1690 {
1691 atomic() noexcept = default;
1692
1693 constexpr
1694 atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1695 { }
1696
1697 atomic& operator=(const atomic&) volatile = delete;
1698 atomic& operator=(const atomic&) = delete;
1699
1700 using __atomic_float<_Float32>::operator=;
1701 };
1702#endif
1703
1704#ifdef __STDCPP_FLOAT64_T__
1705 template<>
1706 struct atomic<_Float64> : __atomic_float<_Float64>
1707 {
1708 atomic() noexcept = default;
1709
1710 constexpr
1711 atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1712 { }
1713
1714 atomic& operator=(const atomic&) volatile = delete;
1715 atomic& operator=(const atomic&) = delete;
1716
1717 using __atomic_float<_Float64>::operator=;
1718 };
1719#endif
1720
1721#ifdef __STDCPP_FLOAT128_T__
1722 template<>
1723 struct atomic<_Float128> : __atomic_float<_Float128>
1724 {
1725 atomic() noexcept = default;
1726
1727 constexpr
1728 atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1729 { }
1730
1731 atomic& operator=(const atomic&) volatile = delete;
1732 atomic& operator=(const atomic&) = delete;
1733
1734 using __atomic_float<_Float128>::operator=;
1735 };
1736#endif
1737
1738#ifdef __STDCPP_BFLOAT16_T__
1739 template<>
1740 struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1741 {
1742 atomic() noexcept = default;
1743
1744 constexpr
1745 atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1746 { }
1747
1748 atomic& operator=(const atomic&) volatile = delete;
1749 atomic& operator=(const atomic&) = delete;
1750
1751 using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1752 };
1753#endif
1754
1755#define __cpp_lib_atomic_ref 201806L
1756
1757 /// Class template to provide atomic operations on a non-atomic variable.
1758 template<typename _Tp>
1759 struct atomic_ref : __atomic_ref<_Tp>
1760 {
1761 explicit
1762 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1763 { }
1764
1765 atomic_ref& operator=(const atomic_ref&) = delete;
1766
1767 atomic_ref(const atomic_ref&) = default;
1768
1769 using __atomic_ref<_Tp>::operator=;
1770 };
1771
1772#define __cpp_lib_atomic_lock_free_type_aliases 201907L
1773#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1774 using atomic_signed_lock_free
1776 using atomic_unsigned_lock_free
1778#elif ATOMIC_INT_LOCK_FREE || !(ATOMIC_LONG_LOCK_FREE || ATOMIC_CHAR_LOCK_FREE)
1779 using atomic_signed_lock_free = atomic<signed int>;
1780 using atomic_unsigned_lock_free = atomic<unsigned int>;
1781#elif ATOMIC_LONG_LOCK_FREE
1782 using atomic_signed_lock_free = atomic<signed long>;
1783 using atomic_unsigned_lock_free = atomic<unsigned long>;
1784#elif ATOMIC_CHAR_LOCK_FREE
1785 using atomic_signed_lock_free = atomic<signed char>;
1786 using atomic_unsigned_lock_free = atomic<unsigned char>;
1787#endif
1788
1789#endif // C++2a
1790
1791 /// @} group atomics
1792
1793_GLIBCXX_END_NAMESPACE_VERSION
1794} // namespace
1795
1796#endif // C++11
1797
1798#endif // _GLIBCXX_ATOMIC
constexpr _Tp * addressof(_Tp &__r) noexcept
Returns the actual address of the object or function referenced by r, even in the presence of an over...
Definition move.h:138
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
Definition move.h:51
atomic< unsigned long > atomic_ulong
atomic_ulong
Definition atomic:1087
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
Definition atomic:1203
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
Definition atomic:1193
atomic< signed char > atomic_schar
atomic_schar
Definition atomic:1066
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
Definition atomic:1139
atomic< unsigned long long > atomic_ullong
atomic_ullong
Definition atomic:1093
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
Definition atomic:1167
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
Definition atomic:1190
atomic< int16_t > atomic_int16_t
atomic_int16_t
Definition atomic:1120
atomic< size_t > atomic_size_t
atomic_size_t
Definition atomic:1196
atomic< long > atomic_long
atomic_long
Definition atomic:1084
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
Definition atomic:1142
atomic< short > atomic_short
atomic_short
Definition atomic:1072
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
Definition atomic:1148
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
Definition atomic:1123
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
Definition atomic:1135
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
Definition atomic:1151
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
Definition atomic:1117
#define ATOMIC_BOOL_LOCK_FREE
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
Definition atomic:1096
atomic< unsigned int > atomic_uint
atomic_uint
Definition atomic:1081
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
Definition atomic:1154
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
Definition atomic:1185
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
Definition atomic:1176
atomic< char > atomic_char
atomic_char
Definition atomic:1063
atomic< int > atomic_int
atomic_int
Definition atomic:1078
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
Definition atomic:1160
atomic< int64_t > atomic_int64_t
atomic_int64_t
Definition atomic:1132
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
Definition atomic:1206
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
Definition atomic:1170
atomic< int32_t > atomic_int32_t
atomic_int32_t
Definition atomic:1126
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
Definition atomic:1173
atomic< int8_t > atomic_int8_t
atomic_int8_t
Definition atomic:1114
atomic< long long > atomic_llong
atomic_llong
Definition atomic:1090
atomic< char16_t > atomic_char16_t
atomic_char16_t
Definition atomic:1104
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
Definition atomic:1182
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
Definition atomic:1199
atomic< char32_t > atomic_char32_t
atomic_char32_t
Definition atomic:1107
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
Definition atomic:1145
atomic< unsigned char > atomic_uchar
atomic_uchar
Definition atomic:1069
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
Definition atomic:1164
memory_order
Enumeration for memory_order.
Definition atomic_base.h:63
atomic< unsigned short > atomic_ushort
atomic_ushort
Definition atomic:1075
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
Definition atomic:1157
atomic< bool > atomic_bool
atomic_bool
Definition atomic:1060
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
Definition atomic:1179
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
Definition atomic:1129
ISO C++ entities toplevel namespace is std.
constexpr _Tp exchange(_Tp &__obj, _Up &&__new_val) noexcept(__and_< is_nothrow_move_constructible< _Tp >, is_nothrow_assignable< _Tp &, _Up > >::value)
Assign __new_val to __obj and return its previous value.
Definition utility:97
Generic atomic type, primary class template.
Definition atomic:199
is_object
Definition type_traits:674
atomic_flag