libstdc++
atomic
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008-2020 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file include/atomic
26  * This is a Standard C++ Library header.
27  */
28 
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31 
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
34 
35 #pragma GCC system_header
36 
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
39 #else
40 
41 #include <bits/atomic_base.h>
42 
43 namespace std _GLIBCXX_VISIBILITY(default)
44 {
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
46 
47  /**
48  * @addtogroup atomics
49  * @{
50  */
51 
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603
54 #endif
55 
56  template<typename _Tp>
57  struct atomic;
58 
59  /// atomic<bool>
60  // NB: No operators or fetch-operations for this type.
61  template<>
62  struct atomic<bool>
63  {
64  using value_type = bool;
65 
66  private:
67  __atomic_base<bool> _M_base;
68 
69  public:
70  atomic() noexcept = default;
71  ~atomic() noexcept = default;
72  atomic(const atomic&) = delete;
73  atomic& operator=(const atomic&) = delete;
74  atomic& operator=(const atomic&) volatile = delete;
75 
76  constexpr atomic(bool __i) noexcept : _M_base(__i) { }
77 
78  bool
79  operator=(bool __i) noexcept
80  { return _M_base.operator=(__i); }
81 
82  bool
83  operator=(bool __i) volatile noexcept
84  { return _M_base.operator=(__i); }
85 
86  operator bool() const noexcept
87  { return _M_base.load(); }
88 
89  operator bool() const volatile noexcept
90  { return _M_base.load(); }
91 
92  bool
93  is_lock_free() const noexcept { return _M_base.is_lock_free(); }
94 
95  bool
96  is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
97 
98 #if __cplusplus >= 201703L
99  static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
100 #endif
101 
102  void
103  store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104  { _M_base.store(__i, __m); }
105 
106  void
107  store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108  { _M_base.store(__i, __m); }
109 
110  bool
111  load(memory_order __m = memory_order_seq_cst) const noexcept
112  { return _M_base.load(__m); }
113 
114  bool
115  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116  { return _M_base.load(__m); }
117 
118  bool
119  exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120  { return _M_base.exchange(__i, __m); }
121 
122  bool
123  exchange(bool __i,
124  memory_order __m = memory_order_seq_cst) volatile noexcept
125  { return _M_base.exchange(__i, __m); }
126 
127  bool
128  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129  memory_order __m2) noexcept
130  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
131 
132  bool
133  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134  memory_order __m2) volatile noexcept
135  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136 
137  bool
138  compare_exchange_weak(bool& __i1, bool __i2,
139  memory_order __m = memory_order_seq_cst) noexcept
140  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
141 
142  bool
143  compare_exchange_weak(bool& __i1, bool __i2,
144  memory_order __m = memory_order_seq_cst) volatile noexcept
145  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146 
147  bool
148  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149  memory_order __m2) noexcept
150  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
151 
152  bool
153  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154  memory_order __m2) volatile noexcept
155  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156 
157  bool
158  compare_exchange_strong(bool& __i1, bool __i2,
159  memory_order __m = memory_order_seq_cst) noexcept
160  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
161 
162  bool
163  compare_exchange_strong(bool& __i1, bool __i2,
164  memory_order __m = memory_order_seq_cst) volatile noexcept
165  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166  };
167 
168 #if __cplusplus <= 201703L
169 # define _GLIBCXX20_INIT(I)
170 #else
171 # define _GLIBCXX20_INIT(I) = I
172 #endif
173 
174  /**
175  * @brief Generic atomic type, primary class template.
176  *
177  * @tparam _Tp Type to be made atomic, must be trivially copyable.
178  */
179  template<typename _Tp>
180  struct atomic
181  {
182  using value_type = _Tp;
183 
184  private:
185  // Align 1/2/4/8/16-byte types to at least their size.
186  static constexpr int _S_min_alignment
187  = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
188  ? 0 : sizeof(_Tp);
189 
190  static constexpr int _S_alignment
191  = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
192 
193  alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
194 
195  static_assert(__is_trivially_copyable(_Tp),
196  "std::atomic requires a trivially copyable type");
197 
198  static_assert(sizeof(_Tp) > 0,
199  "Incomplete or zero-sized types are not supported");
200 
201 #if __cplusplus > 201703L
202  static_assert(is_copy_constructible_v<_Tp>);
203  static_assert(is_move_constructible_v<_Tp>);
204  static_assert(is_copy_assignable_v<_Tp>);
205  static_assert(is_move_assignable_v<_Tp>);
206 #endif
207 
208  public:
209  atomic() = default;
210  ~atomic() noexcept = default;
211  atomic(const atomic&) = delete;
212  atomic& operator=(const atomic&) = delete;
213  atomic& operator=(const atomic&) volatile = delete;
214 
215  constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
216 
217  operator _Tp() const noexcept
218  { return load(); }
219 
220  operator _Tp() const volatile noexcept
221  { return load(); }
222 
223  _Tp
224  operator=(_Tp __i) noexcept
225  { store(__i); return __i; }
226 
227  _Tp
228  operator=(_Tp __i) volatile noexcept
229  { store(__i); return __i; }
230 
231  bool
232  is_lock_free() const noexcept
233  {
234  // Produce a fake, minimally aligned pointer.
235  return __atomic_is_lock_free(sizeof(_M_i),
236  reinterpret_cast<void *>(-_S_alignment));
237  }
238 
239  bool
240  is_lock_free() const volatile noexcept
241  {
242  // Produce a fake, minimally aligned pointer.
243  return __atomic_is_lock_free(sizeof(_M_i),
244  reinterpret_cast<void *>(-_S_alignment));
245  }
246 
247 #if __cplusplus >= 201703L
248  static constexpr bool is_always_lock_free
249  = __atomic_always_lock_free(sizeof(_M_i), 0);
250 #endif
251 
252  void
253  store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
254  { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
255 
256  void
257  store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
258  { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
259 
260  _Tp
261  load(memory_order __m = memory_order_seq_cst) const noexcept
262  {
263  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
264  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
265  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
266  return *__ptr;
267  }
268 
269  _Tp
270  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
271  {
272  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
273  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
274  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
275  return *__ptr;
276  }
277 
278  _Tp
279  exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
280  {
281  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
282  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
283  __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
284  __ptr, int(__m));
285  return *__ptr;
286  }
287 
288  _Tp
289  exchange(_Tp __i,
290  memory_order __m = memory_order_seq_cst) volatile noexcept
291  {
292  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
293  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
294  __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
295  __ptr, int(__m));
296  return *__ptr;
297  }
298 
299  bool
300  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
301  memory_order __f) noexcept
302  {
303  return __atomic_compare_exchange(std::__addressof(_M_i),
304  std::__addressof(__e),
305  std::__addressof(__i),
306  true, int(__s), int(__f));
307  }
308 
309  bool
310  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
311  memory_order __f) volatile noexcept
312  {
313  return __atomic_compare_exchange(std::__addressof(_M_i),
314  std::__addressof(__e),
315  std::__addressof(__i),
316  true, int(__s), int(__f));
317  }
318 
319  bool
320  compare_exchange_weak(_Tp& __e, _Tp __i,
321  memory_order __m = memory_order_seq_cst) noexcept
322  { return compare_exchange_weak(__e, __i, __m,
323  __cmpexch_failure_order(__m)); }
324 
325  bool
326  compare_exchange_weak(_Tp& __e, _Tp __i,
327  memory_order __m = memory_order_seq_cst) volatile noexcept
328  { return compare_exchange_weak(__e, __i, __m,
329  __cmpexch_failure_order(__m)); }
330 
331  bool
332  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
333  memory_order __f) noexcept
334  {
335  return __atomic_compare_exchange(std::__addressof(_M_i),
336  std::__addressof(__e),
337  std::__addressof(__i),
338  false, int(__s), int(__f));
339  }
340 
341  bool
342  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
343  memory_order __f) volatile noexcept
344  {
345  return __atomic_compare_exchange(std::__addressof(_M_i),
346  std::__addressof(__e),
347  std::__addressof(__i),
348  false, int(__s), int(__f));
349  }
350 
351  bool
352  compare_exchange_strong(_Tp& __e, _Tp __i,
353  memory_order __m = memory_order_seq_cst) noexcept
354  { return compare_exchange_strong(__e, __i, __m,
355  __cmpexch_failure_order(__m)); }
356 
357  bool
358  compare_exchange_strong(_Tp& __e, _Tp __i,
359  memory_order __m = memory_order_seq_cst) volatile noexcept
360  { return compare_exchange_strong(__e, __i, __m,
361  __cmpexch_failure_order(__m)); }
362  };
363 #undef _GLIBCXX20_INIT
364 
365  /// Partial specialization for pointer types.
366  template<typename _Tp>
367  struct atomic<_Tp*>
368  {
369  using value_type = _Tp*;
370  using difference_type = ptrdiff_t;
371 
372  typedef _Tp* __pointer_type;
374  __base_type _M_b;
375 
376  atomic() noexcept = default;
377  ~atomic() noexcept = default;
378  atomic(const atomic&) = delete;
379  atomic& operator=(const atomic&) = delete;
380  atomic& operator=(const atomic&) volatile = delete;
381 
382  constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
383 
384  operator __pointer_type() const noexcept
385  { return __pointer_type(_M_b); }
386 
387  operator __pointer_type() const volatile noexcept
388  { return __pointer_type(_M_b); }
389 
390  __pointer_type
391  operator=(__pointer_type __p) noexcept
392  { return _M_b.operator=(__p); }
393 
394  __pointer_type
395  operator=(__pointer_type __p) volatile noexcept
396  { return _M_b.operator=(__p); }
397 
398  __pointer_type
399  operator++(int) noexcept
400  {
401 #if __cplusplus >= 201703L
402  static_assert( is_object<_Tp>::value, "pointer to object type" );
403 #endif
404  return _M_b++;
405  }
406 
407  __pointer_type
408  operator++(int) volatile noexcept
409  {
410 #if __cplusplus >= 201703L
411  static_assert( is_object<_Tp>::value, "pointer to object type" );
412 #endif
413  return _M_b++;
414  }
415 
416  __pointer_type
417  operator--(int) noexcept
418  {
419 #if __cplusplus >= 201703L
420  static_assert( is_object<_Tp>::value, "pointer to object type" );
421 #endif
422  return _M_b--;
423  }
424 
425  __pointer_type
426  operator--(int) volatile noexcept
427  {
428 #if __cplusplus >= 201703L
429  static_assert( is_object<_Tp>::value, "pointer to object type" );
430 #endif
431  return _M_b--;
432  }
433 
434  __pointer_type
435  operator++() noexcept
436  {
437 #if __cplusplus >= 201703L
438  static_assert( is_object<_Tp>::value, "pointer to object type" );
439 #endif
440  return ++_M_b;
441  }
442 
443  __pointer_type
444  operator++() volatile noexcept
445  {
446 #if __cplusplus >= 201703L
447  static_assert( is_object<_Tp>::value, "pointer to object type" );
448 #endif
449  return ++_M_b;
450  }
451 
452  __pointer_type
453  operator--() noexcept
454  {
455 #if __cplusplus >= 201703L
456  static_assert( is_object<_Tp>::value, "pointer to object type" );
457 #endif
458  return --_M_b;
459  }
460 
461  __pointer_type
462  operator--() volatile noexcept
463  {
464 #if __cplusplus >= 201703L
465  static_assert( is_object<_Tp>::value, "pointer to object type" );
466 #endif
467  return --_M_b;
468  }
469 
470  __pointer_type
471  operator+=(ptrdiff_t __d) noexcept
472  {
473 #if __cplusplus >= 201703L
474  static_assert( is_object<_Tp>::value, "pointer to object type" );
475 #endif
476  return _M_b.operator+=(__d);
477  }
478 
479  __pointer_type
480  operator+=(ptrdiff_t __d) volatile noexcept
481  {
482 #if __cplusplus >= 201703L
483  static_assert( is_object<_Tp>::value, "pointer to object type" );
484 #endif
485  return _M_b.operator+=(__d);
486  }
487 
488  __pointer_type
489  operator-=(ptrdiff_t __d) noexcept
490  {
491 #if __cplusplus >= 201703L
492  static_assert( is_object<_Tp>::value, "pointer to object type" );
493 #endif
494  return _M_b.operator-=(__d);
495  }
496 
497  __pointer_type
498  operator-=(ptrdiff_t __d) volatile noexcept
499  {
500 #if __cplusplus >= 201703L
501  static_assert( is_object<_Tp>::value, "pointer to object type" );
502 #endif
503  return _M_b.operator-=(__d);
504  }
505 
506  bool
507  is_lock_free() const noexcept
508  { return _M_b.is_lock_free(); }
509 
510  bool
511  is_lock_free() const volatile noexcept
512  { return _M_b.is_lock_free(); }
513 
514 #if __cplusplus >= 201703L
515  static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
516 #endif
517 
518  void
519  store(__pointer_type __p,
520  memory_order __m = memory_order_seq_cst) noexcept
521  { return _M_b.store(__p, __m); }
522 
523  void
524  store(__pointer_type __p,
525  memory_order __m = memory_order_seq_cst) volatile noexcept
526  { return _M_b.store(__p, __m); }
527 
528  __pointer_type
529  load(memory_order __m = memory_order_seq_cst) const noexcept
530  { return _M_b.load(__m); }
531 
532  __pointer_type
533  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
534  { return _M_b.load(__m); }
535 
536  __pointer_type
537  exchange(__pointer_type __p,
538  memory_order __m = memory_order_seq_cst) noexcept
539  { return _M_b.exchange(__p, __m); }
540 
541  __pointer_type
542  exchange(__pointer_type __p,
543  memory_order __m = memory_order_seq_cst) volatile noexcept
544  { return _M_b.exchange(__p, __m); }
545 
546  bool
547  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
548  memory_order __m1, memory_order __m2) noexcept
549  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
550 
551  bool
552  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
553  memory_order __m1,
554  memory_order __m2) volatile noexcept
555  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
556 
557  bool
558  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
559  memory_order __m = memory_order_seq_cst) noexcept
560  {
561  return compare_exchange_weak(__p1, __p2, __m,
562  __cmpexch_failure_order(__m));
563  }
564 
565  bool
566  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
567  memory_order __m = memory_order_seq_cst) volatile noexcept
568  {
569  return compare_exchange_weak(__p1, __p2, __m,
570  __cmpexch_failure_order(__m));
571  }
572 
573  bool
574  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
575  memory_order __m1, memory_order __m2) noexcept
576  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
577 
578  bool
579  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
580  memory_order __m1,
581  memory_order __m2) volatile noexcept
582  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
583 
584  bool
585  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
586  memory_order __m = memory_order_seq_cst) noexcept
587  {
588  return _M_b.compare_exchange_strong(__p1, __p2, __m,
589  __cmpexch_failure_order(__m));
590  }
591 
592  bool
593  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
594  memory_order __m = memory_order_seq_cst) volatile noexcept
595  {
596  return _M_b.compare_exchange_strong(__p1, __p2, __m,
597  __cmpexch_failure_order(__m));
598  }
599 
600  __pointer_type
601  fetch_add(ptrdiff_t __d,
602  memory_order __m = memory_order_seq_cst) noexcept
603  {
604 #if __cplusplus >= 201703L
605  static_assert( is_object<_Tp>::value, "pointer to object type" );
606 #endif
607  return _M_b.fetch_add(__d, __m);
608  }
609 
610  __pointer_type
611  fetch_add(ptrdiff_t __d,
612  memory_order __m = memory_order_seq_cst) volatile noexcept
613  {
614 #if __cplusplus >= 201703L
615  static_assert( is_object<_Tp>::value, "pointer to object type" );
616 #endif
617  return _M_b.fetch_add(__d, __m);
618  }
619 
620  __pointer_type
621  fetch_sub(ptrdiff_t __d,
622  memory_order __m = memory_order_seq_cst) noexcept
623  {
624 #if __cplusplus >= 201703L
625  static_assert( is_object<_Tp>::value, "pointer to object type" );
626 #endif
627  return _M_b.fetch_sub(__d, __m);
628  }
629 
630  __pointer_type
631  fetch_sub(ptrdiff_t __d,
632  memory_order __m = memory_order_seq_cst) volatile noexcept
633  {
634 #if __cplusplus >= 201703L
635  static_assert( is_object<_Tp>::value, "pointer to object type" );
636 #endif
637  return _M_b.fetch_sub(__d, __m);
638  }
639  };
640 
641 
642  /// Explicit specialization for char.
643  template<>
644  struct atomic<char> : __atomic_base<char>
645  {
646  typedef char __integral_type;
648 
649  atomic() noexcept = default;
650  ~atomic() noexcept = default;
651  atomic(const atomic&) = delete;
652  atomic& operator=(const atomic&) = delete;
653  atomic& operator=(const atomic&) volatile = delete;
654 
655  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
656 
657  using __base_type::operator __integral_type;
658  using __base_type::operator=;
659 
660 #if __cplusplus >= 201703L
661  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
662 #endif
663  };
664 
665  /// Explicit specialization for signed char.
666  template<>
667  struct atomic<signed char> : __atomic_base<signed char>
668  {
669  typedef signed char __integral_type;
671 
672  atomic() noexcept= default;
673  ~atomic() noexcept = default;
674  atomic(const atomic&) = delete;
675  atomic& operator=(const atomic&) = delete;
676  atomic& operator=(const atomic&) volatile = delete;
677 
678  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
679 
680  using __base_type::operator __integral_type;
681  using __base_type::operator=;
682 
683 #if __cplusplus >= 201703L
684  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
685 #endif
686  };
687 
688  /// Explicit specialization for unsigned char.
689  template<>
690  struct atomic<unsigned char> : __atomic_base<unsigned char>
691  {
692  typedef unsigned char __integral_type;
694 
695  atomic() noexcept= default;
696  ~atomic() noexcept = default;
697  atomic(const atomic&) = delete;
698  atomic& operator=(const atomic&) = delete;
699  atomic& operator=(const atomic&) volatile = delete;
700 
701  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
702 
703  using __base_type::operator __integral_type;
704  using __base_type::operator=;
705 
706 #if __cplusplus >= 201703L
707  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
708 #endif
709  };
710 
711  /// Explicit specialization for short.
712  template<>
713  struct atomic<short> : __atomic_base<short>
714  {
715  typedef short __integral_type;
717 
718  atomic() noexcept = default;
719  ~atomic() noexcept = default;
720  atomic(const atomic&) = delete;
721  atomic& operator=(const atomic&) = delete;
722  atomic& operator=(const atomic&) volatile = delete;
723 
724  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
725 
726  using __base_type::operator __integral_type;
727  using __base_type::operator=;
728 
729 #if __cplusplus >= 201703L
730  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
731 #endif
732  };
733 
734  /// Explicit specialization for unsigned short.
735  template<>
736  struct atomic<unsigned short> : __atomic_base<unsigned short>
737  {
738  typedef unsigned short __integral_type;
740 
741  atomic() noexcept = default;
742  ~atomic() noexcept = default;
743  atomic(const atomic&) = delete;
744  atomic& operator=(const atomic&) = delete;
745  atomic& operator=(const atomic&) volatile = delete;
746 
747  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
748 
749  using __base_type::operator __integral_type;
750  using __base_type::operator=;
751 
752 #if __cplusplus >= 201703L
753  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
754 #endif
755  };
756 
757  /// Explicit specialization for int.
758  template<>
759  struct atomic<int> : __atomic_base<int>
760  {
761  typedef int __integral_type;
763 
764  atomic() noexcept = default;
765  ~atomic() noexcept = default;
766  atomic(const atomic&) = delete;
767  atomic& operator=(const atomic&) = delete;
768  atomic& operator=(const atomic&) volatile = delete;
769 
770  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
771 
772  using __base_type::operator __integral_type;
773  using __base_type::operator=;
774 
775 #if __cplusplus >= 201703L
776  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
777 #endif
778  };
779 
780  /// Explicit specialization for unsigned int.
781  template<>
782  struct atomic<unsigned int> : __atomic_base<unsigned int>
783  {
784  typedef unsigned int __integral_type;
786 
787  atomic() noexcept = default;
788  ~atomic() noexcept = default;
789  atomic(const atomic&) = delete;
790  atomic& operator=(const atomic&) = delete;
791  atomic& operator=(const atomic&) volatile = delete;
792 
793  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
794 
795  using __base_type::operator __integral_type;
796  using __base_type::operator=;
797 
798 #if __cplusplus >= 201703L
799  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
800 #endif
801  };
802 
803  /// Explicit specialization for long.
804  template<>
805  struct atomic<long> : __atomic_base<long>
806  {
807  typedef long __integral_type;
809 
810  atomic() noexcept = default;
811  ~atomic() noexcept = default;
812  atomic(const atomic&) = delete;
813  atomic& operator=(const atomic&) = delete;
814  atomic& operator=(const atomic&) volatile = delete;
815 
816  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
817 
818  using __base_type::operator __integral_type;
819  using __base_type::operator=;
820 
821 #if __cplusplus >= 201703L
822  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
823 #endif
824  };
825 
826  /// Explicit specialization for unsigned long.
827  template<>
828  struct atomic<unsigned long> : __atomic_base<unsigned long>
829  {
830  typedef unsigned long __integral_type;
832 
833  atomic() noexcept = default;
834  ~atomic() noexcept = default;
835  atomic(const atomic&) = delete;
836  atomic& operator=(const atomic&) = delete;
837  atomic& operator=(const atomic&) volatile = delete;
838 
839  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
840 
841  using __base_type::operator __integral_type;
842  using __base_type::operator=;
843 
844 #if __cplusplus >= 201703L
845  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
846 #endif
847  };
848 
849  /// Explicit specialization for long long.
850  template<>
851  struct atomic<long long> : __atomic_base<long long>
852  {
853  typedef long long __integral_type;
855 
856  atomic() noexcept = default;
857  ~atomic() noexcept = default;
858  atomic(const atomic&) = delete;
859  atomic& operator=(const atomic&) = delete;
860  atomic& operator=(const atomic&) volatile = delete;
861 
862  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
863 
864  using __base_type::operator __integral_type;
865  using __base_type::operator=;
866 
867 #if __cplusplus >= 201703L
868  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
869 #endif
870  };
871 
872  /// Explicit specialization for unsigned long long.
873  template<>
874  struct atomic<unsigned long long> : __atomic_base<unsigned long long>
875  {
876  typedef unsigned long long __integral_type;
878 
879  atomic() noexcept = default;
880  ~atomic() noexcept = default;
881  atomic(const atomic&) = delete;
882  atomic& operator=(const atomic&) = delete;
883  atomic& operator=(const atomic&) volatile = delete;
884 
885  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
886 
887  using __base_type::operator __integral_type;
888  using __base_type::operator=;
889 
890 #if __cplusplus >= 201703L
891  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
892 #endif
893  };
894 
895  /// Explicit specialization for wchar_t.
896  template<>
897  struct atomic<wchar_t> : __atomic_base<wchar_t>
898  {
899  typedef wchar_t __integral_type;
901 
902  atomic() noexcept = default;
903  ~atomic() noexcept = default;
904  atomic(const atomic&) = delete;
905  atomic& operator=(const atomic&) = delete;
906  atomic& operator=(const atomic&) volatile = delete;
907 
908  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
909 
910  using __base_type::operator __integral_type;
911  using __base_type::operator=;
912 
913 #if __cplusplus >= 201703L
914  static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
915 #endif
916  };
917 
918 #ifdef _GLIBCXX_USE_CHAR8_T
919  /// Explicit specialization for char8_t.
920  template<>
921  struct atomic<char8_t> : __atomic_base<char8_t>
922  {
923  typedef char8_t __integral_type;
924  typedef __atomic_base<char8_t> __base_type;
925 
926  atomic() noexcept = default;
927  ~atomic() noexcept = default;
928  atomic(const atomic&) = delete;
929  atomic& operator=(const atomic&) = delete;
930  atomic& operator=(const atomic&) volatile = delete;
931 
932  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
933 
934  using __base_type::operator __integral_type;
935  using __base_type::operator=;
936 
937 #if __cplusplus > 201402L
938  static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
939 #endif
940  };
941 #endif
942 
943  /// Explicit specialization for char16_t.
944  template<>
945  struct atomic<char16_t> : __atomic_base<char16_t>
946  {
947  typedef char16_t __integral_type;
949 
950  atomic() noexcept = default;
951  ~atomic() noexcept = default;
952  atomic(const atomic&) = delete;
953  atomic& operator=(const atomic&) = delete;
954  atomic& operator=(const atomic&) volatile = delete;
955 
956  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
957 
958  using __base_type::operator __integral_type;
959  using __base_type::operator=;
960 
961 #if __cplusplus >= 201703L
962  static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
963 #endif
964  };
965 
966  /// Explicit specialization for char32_t.
967  template<>
968  struct atomic<char32_t> : __atomic_base<char32_t>
969  {
970  typedef char32_t __integral_type;
972 
973  atomic() noexcept = default;
974  ~atomic() noexcept = default;
975  atomic(const atomic&) = delete;
976  atomic& operator=(const atomic&) = delete;
977  atomic& operator=(const atomic&) volatile = delete;
978 
979  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
980 
981  using __base_type::operator __integral_type;
982  using __base_type::operator=;
983 
984 #if __cplusplus >= 201703L
985  static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
986 #endif
987  };
988 
989 
990  /// atomic_bool
992 
993  /// atomic_char
995 
996  /// atomic_schar
998 
999  /// atomic_uchar
1001 
1002  /// atomic_short
1004 
1005  /// atomic_ushort
1007 
1008  /// atomic_int
1010 
1011  /// atomic_uint
1013 
1014  /// atomic_long
1016 
1017  /// atomic_ulong
1019 
1020  /// atomic_llong
1022 
1023  /// atomic_ullong
1025 
1026  /// atomic_wchar_t
1028 
1029 #ifdef _GLIBCXX_USE_CHAR8_T
1030  /// atomic_char8_t
1031  typedef atomic<char8_t> atomic_char8_t;
1032 #endif
1033 
1034  /// atomic_char16_t
1036 
1037  /// atomic_char32_t
1039 
1040 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1041  // _GLIBCXX_RESOLVE_LIB_DEFECTS
1042  // 2441. Exact-width atomic typedefs should be provided
1043 
1044  /// atomic_int8_t
1046 
1047  /// atomic_uint8_t
1049 
1050  /// atomic_int16_t
1052 
1053  /// atomic_uint16_t
1055 
1056  /// atomic_int32_t
1058 
1059  /// atomic_uint32_t
1061 
1062  /// atomic_int64_t
1064 
1065  /// atomic_uint64_t
1067 
1068 
1069  /// atomic_int_least8_t
1071 
1072  /// atomic_uint_least8_t
1074 
1075  /// atomic_int_least16_t
1077 
1078  /// atomic_uint_least16_t
1080 
1081  /// atomic_int_least32_t
1083 
1084  /// atomic_uint_least32_t
1086 
1087  /// atomic_int_least64_t
1089 
1090  /// atomic_uint_least64_t
1092 
1093 
1094  /// atomic_int_fast8_t
1096 
1097  /// atomic_uint_fast8_t
1099 
1100  /// atomic_int_fast16_t
1102 
1103  /// atomic_uint_fast16_t
1105 
1106  /// atomic_int_fast32_t
1108 
1109  /// atomic_uint_fast32_t
1111 
1112  /// atomic_int_fast64_t
1114 
1115  /// atomic_uint_fast64_t
1117 #endif
1118 
1119 
1120  /// atomic_intptr_t
1122 
1123  /// atomic_uintptr_t
1125 
1126  /// atomic_size_t
1128 
1129  /// atomic_ptrdiff_t
1131 
1132 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1133  /// atomic_intmax_t
1135 
1136  /// atomic_uintmax_t
1138 #endif
1139 
1140  // Function definitions, atomic_flag operations.
1141  inline bool
1142  atomic_flag_test_and_set_explicit(atomic_flag* __a,
1143  memory_order __m) noexcept
1144  { return __a->test_and_set(__m); }
1145 
1146  inline bool
1147  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1148  memory_order __m) noexcept
1149  { return __a->test_and_set(__m); }
1150 
1151  inline void
1152  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1153  { __a->clear(__m); }
1154 
1155  inline void
1156  atomic_flag_clear_explicit(volatile atomic_flag* __a,
1157  memory_order __m) noexcept
1158  { __a->clear(__m); }
1159 
1160  inline bool
1161  atomic_flag_test_and_set(atomic_flag* __a) noexcept
1162  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1163 
1164  inline bool
1165  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1166  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1167 
1168  inline void
1169  atomic_flag_clear(atomic_flag* __a) noexcept
1170  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1171 
1172  inline void
1173  atomic_flag_clear(volatile atomic_flag* __a) noexcept
1174  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1175 
1176 
1177  template<typename _Tp>
1178  using __atomic_val_t = typename atomic<_Tp>::value_type;
1179  template<typename _Tp>
1180  using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1181 
1182  // [atomics.nonmembers] Non-member functions.
1183  // Function templates generally applicable to atomic types.
1184  template<typename _ITp>
1185  inline bool
1186  atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1187  { return __a->is_lock_free(); }
1188 
1189  template<typename _ITp>
1190  inline bool
1191  atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1192  { return __a->is_lock_free(); }
1193 
1194  template<typename _ITp>
1195  inline void
1196  atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1197  { __a->store(__i, memory_order_relaxed); }
1198 
1199  template<typename _ITp>
1200  inline void
1201  atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1202  { __a->store(__i, memory_order_relaxed); }
1203 
1204  template<typename _ITp>
1205  inline void
1206  atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1207  memory_order __m) noexcept
1208  { __a->store(__i, __m); }
1209 
1210  template<typename _ITp>
1211  inline void
1212  atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1213  memory_order __m) noexcept
1214  { __a->store(__i, __m); }
1215 
1216  template<typename _ITp>
1217  inline _ITp
1218  atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1219  { return __a->load(__m); }
1220 
1221  template<typename _ITp>
1222  inline _ITp
1223  atomic_load_explicit(const volatile atomic<_ITp>* __a,
1224  memory_order __m) noexcept
1225  { return __a->load(__m); }
1226 
1227  template<typename _ITp>
1228  inline _ITp
1229  atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1230  memory_order __m) noexcept
1231  { return __a->exchange(__i, __m); }
1232 
1233  template<typename _ITp>
1234  inline _ITp
1235  atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1236  __atomic_val_t<_ITp> __i,
1237  memory_order __m) noexcept
1238  { return __a->exchange(__i, __m); }
1239 
1240  template<typename _ITp>
1241  inline bool
1242  atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1243  __atomic_val_t<_ITp>* __i1,
1244  __atomic_val_t<_ITp> __i2,
1245  memory_order __m1,
1246  memory_order __m2) noexcept
1247  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1248 
1249  template<typename _ITp>
1250  inline bool
1251  atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1252  __atomic_val_t<_ITp>* __i1,
1253  __atomic_val_t<_ITp> __i2,
1254  memory_order __m1,
1255  memory_order __m2) noexcept
1256  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1257 
1258  template<typename _ITp>
1259  inline bool
1260  atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1261  __atomic_val_t<_ITp>* __i1,
1262  __atomic_val_t<_ITp> __i2,
1263  memory_order __m1,
1264  memory_order __m2) noexcept
1265  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1266 
1267  template<typename _ITp>
1268  inline bool
1269  atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1270  __atomic_val_t<_ITp>* __i1,
1271  __atomic_val_t<_ITp> __i2,
1272  memory_order __m1,
1273  memory_order __m2) noexcept
1274  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1275 
1276 
1277  template<typename _ITp>
1278  inline void
1279  atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1280  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1281 
1282  template<typename _ITp>
1283  inline void
1284  atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1285  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1286 
1287  template<typename _ITp>
1288  inline _ITp
1289  atomic_load(const atomic<_ITp>* __a) noexcept
1290  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1291 
1292  template<typename _ITp>
1293  inline _ITp
1294  atomic_load(const volatile atomic<_ITp>* __a) noexcept
1295  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1296 
1297  template<typename _ITp>
1298  inline _ITp
1299  atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1300  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1301 
1302  template<typename _ITp>
1303  inline _ITp
1304  atomic_exchange(volatile atomic<_ITp>* __a,
1305  __atomic_val_t<_ITp> __i) noexcept
1306  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1307 
1308  template<typename _ITp>
1309  inline bool
1310  atomic_compare_exchange_weak(atomic<_ITp>* __a,
1311  __atomic_val_t<_ITp>* __i1,
1312  __atomic_val_t<_ITp> __i2) noexcept
1313  {
1314  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1315  memory_order_seq_cst,
1316  memory_order_seq_cst);
1317  }
1318 
1319  template<typename _ITp>
1320  inline bool
1321  atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1322  __atomic_val_t<_ITp>* __i1,
1323  __atomic_val_t<_ITp> __i2) noexcept
1324  {
1325  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1326  memory_order_seq_cst,
1327  memory_order_seq_cst);
1328  }
1329 
1330  template<typename _ITp>
1331  inline bool
1332  atomic_compare_exchange_strong(atomic<_ITp>* __a,
1333  __atomic_val_t<_ITp>* __i1,
1334  __atomic_val_t<_ITp> __i2) noexcept
1335  {
1336  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1337  memory_order_seq_cst,
1338  memory_order_seq_cst);
1339  }
1340 
1341  template<typename _ITp>
1342  inline bool
1343  atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1344  __atomic_val_t<_ITp>* __i1,
1345  __atomic_val_t<_ITp> __i2) noexcept
1346  {
1347  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1348  memory_order_seq_cst,
1349  memory_order_seq_cst);
1350  }
1351 
1352  // Function templates for atomic_integral and atomic_pointer operations only.
1353  // Some operations (and, or, xor) are only available for atomic integrals,
1354  // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1355 
1356  template<typename _ITp>
1357  inline _ITp
1358  atomic_fetch_add_explicit(atomic<_ITp>* __a,
1359  __atomic_diff_t<_ITp> __i,
1360  memory_order __m) noexcept
1361  { return __a->fetch_add(__i, __m); }
1362 
1363  template<typename _ITp>
1364  inline _ITp
1365  atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1366  __atomic_diff_t<_ITp> __i,
1367  memory_order __m) noexcept
1368  { return __a->fetch_add(__i, __m); }
1369 
1370  template<typename _ITp>
1371  inline _ITp
1372  atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1373  __atomic_diff_t<_ITp> __i,
1374  memory_order __m) noexcept
1375  { return __a->fetch_sub(__i, __m); }
1376 
1377  template<typename _ITp>
1378  inline _ITp
1379  atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1380  __atomic_diff_t<_ITp> __i,
1381  memory_order __m) noexcept
1382  { return __a->fetch_sub(__i, __m); }
1383 
1384  template<typename _ITp>
1385  inline _ITp
1386  atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1387  __atomic_val_t<_ITp> __i,
1388  memory_order __m) noexcept
1389  { return __a->fetch_and(__i, __m); }
1390 
1391  template<typename _ITp>
1392  inline _ITp
1393  atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1394  __atomic_val_t<_ITp> __i,
1395  memory_order __m) noexcept
1396  { return __a->fetch_and(__i, __m); }
1397 
1398  template<typename _ITp>
1399  inline _ITp
1400  atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1401  __atomic_val_t<_ITp> __i,
1402  memory_order __m) noexcept
1403  { return __a->fetch_or(__i, __m); }
1404 
1405  template<typename _ITp>
1406  inline _ITp
1407  atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1408  __atomic_val_t<_ITp> __i,
1409  memory_order __m) noexcept
1410  { return __a->fetch_or(__i, __m); }
1411 
1412  template<typename _ITp>
1413  inline _ITp
1414  atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1415  __atomic_val_t<_ITp> __i,
1416  memory_order __m) noexcept
1417  { return __a->fetch_xor(__i, __m); }
1418 
1419  template<typename _ITp>
1420  inline _ITp
1421  atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1422  __atomic_val_t<_ITp> __i,
1423  memory_order __m) noexcept
1424  { return __a->fetch_xor(__i, __m); }
1425 
1426  template<typename _ITp>
1427  inline _ITp
1428  atomic_fetch_add(atomic<_ITp>* __a,
1429  __atomic_diff_t<_ITp> __i) noexcept
1430  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1431 
1432  template<typename _ITp>
1433  inline _ITp
1434  atomic_fetch_add(volatile atomic<_ITp>* __a,
1435  __atomic_diff_t<_ITp> __i) noexcept
1436  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1437 
1438  template<typename _ITp>
1439  inline _ITp
1440  atomic_fetch_sub(atomic<_ITp>* __a,
1441  __atomic_diff_t<_ITp> __i) noexcept
1442  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1443 
1444  template<typename _ITp>
1445  inline _ITp
1446  atomic_fetch_sub(volatile atomic<_ITp>* __a,
1447  __atomic_diff_t<_ITp> __i) noexcept
1448  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1449 
1450  template<typename _ITp>
1451  inline _ITp
1452  atomic_fetch_and(__atomic_base<_ITp>* __a,
1453  __atomic_val_t<_ITp> __i) noexcept
1454  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1455 
1456  template<typename _ITp>
1457  inline _ITp
1458  atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1459  __atomic_val_t<_ITp> __i) noexcept
1460  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1461 
1462  template<typename _ITp>
1463  inline _ITp
1464  atomic_fetch_or(__atomic_base<_ITp>* __a,
1465  __atomic_val_t<_ITp> __i) noexcept
1466  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1467 
1468  template<typename _ITp>
1469  inline _ITp
1470  atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1471  __atomic_val_t<_ITp> __i) noexcept
1472  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1473 
1474  template<typename _ITp>
1475  inline _ITp
1476  atomic_fetch_xor(__atomic_base<_ITp>* __a,
1477  __atomic_val_t<_ITp> __i) noexcept
1478  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1479 
1480  template<typename _ITp>
1481  inline _ITp
1482  atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1483  __atomic_val_t<_ITp> __i) noexcept
1484  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1485 
1486 #if __cplusplus > 201703L
1487 #define __cpp_lib_atomic_float 201711L
1488  template<>
1489  struct atomic<float> : __atomic_float<float>
1490  {
1491  atomic() noexcept = default;
1492 
1493  constexpr
1494  atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1495  { }
1496 
1497  atomic& operator=(const atomic&) volatile = delete;
1498  atomic& operator=(const atomic&) = delete;
1499 
1500  using __atomic_float<float>::operator=;
1501  };
1502 
1503  template<>
1504  struct atomic<double> : __atomic_float<double>
1505  {
1506  atomic() noexcept = default;
1507 
1508  constexpr
1509  atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1510  { }
1511 
1512  atomic& operator=(const atomic&) volatile = delete;
1513  atomic& operator=(const atomic&) = delete;
1514 
1515  using __atomic_float<double>::operator=;
1516  };
1517 
1518  template<>
1519  struct atomic<long double> : __atomic_float<long double>
1520  {
1521  atomic() noexcept = default;
1522 
1523  constexpr
1524  atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1525  { }
1526 
1527  atomic& operator=(const atomic&) volatile = delete;
1528  atomic& operator=(const atomic&) = delete;
1529 
1530  using __atomic_float<long double>::operator=;
1531  };
1532 
1533 #define __cpp_lib_atomic_ref 201806L
1534 
1535  /// Class template to provide atomic operations on a non-atomic variable.
1536  template<typename _Tp>
1537  struct atomic_ref : __atomic_ref<_Tp>
1538  {
1539  explicit
1540  atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1541  { }
1542 
1543  atomic_ref& operator=(const atomic_ref&) = delete;
1544 
1545  atomic_ref(const atomic_ref&) = default;
1546 
1547  using __atomic_ref<_Tp>::operator=;
1548  };
1549 
1550 #endif // C++2a
1551 
1552  /// @} group atomics
1553 
1554 _GLIBCXX_END_NAMESPACE_VERSION
1555 } // namespace
1556 
1557 #endif // C++11
1558 
1559 #endif // _GLIBCXX_ATOMIC
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
Definition: move.h:49
atomic< unsigned long > atomic_ulong
atomic_ulong
Definition: atomic:1018
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
Definition: atomic:1134
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
Definition: atomic:1124
atomic< signed char > atomic_schar
atomic_schar
Definition: atomic:997
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
Definition: atomic:1070
atomic< unsigned long long > atomic_ullong
atomic_ullong
Definition: atomic:1024
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
Definition: atomic:1098
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
Definition: atomic:1121
atomic< int16_t > atomic_int16_t
atomic_int16_t
Definition: atomic:1051
atomic< size_t > atomic_size_t
atomic_size_t
Definition: atomic:1127
atomic< long > atomic_long
atomic_long
Definition: atomic:1015
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
Definition: atomic:1073
atomic< short > atomic_short
atomic_short
Definition: atomic:1003
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
Definition: atomic:1079
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
Definition: atomic:1054
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
Definition: atomic:1066
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
Definition: atomic:1082
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
Definition: atomic:1048
#define ATOMIC_BOOL_LOCK_FREE
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
Definition: atomic:1027
atomic< unsigned int > atomic_uint
atomic_uint
Definition: atomic:1012
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
Definition: atomic:1085
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
Definition: atomic:1116
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
Definition: atomic:1107
atomic< char > atomic_char
atomic_char
Definition: atomic:994
atomic< int > atomic_int
atomic_int
Definition: atomic:1009
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
Definition: atomic:1091
atomic< int64_t > atomic_int64_t
atomic_int64_t
Definition: atomic:1063
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
Definition: atomic:1137
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
Definition: atomic:1101
atomic< int32_t > atomic_int32_t
atomic_int32_t
Definition: atomic:1057
memory_order
Enumeration for memory_order.
Definition: atomic_base.h:75
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
Definition: atomic:1104
atomic< int8_t > atomic_int8_t
atomic_int8_t
Definition: atomic:1045
atomic< long long > atomic_llong
atomic_llong
Definition: atomic:1021
atomic< char16_t > atomic_char16_t
atomic_char16_t
Definition: atomic:1035
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
Definition: atomic:1113
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
Definition: atomic:1130
atomic< char32_t > atomic_char32_t
atomic_char32_t
Definition: atomic:1038
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
Definition: atomic:1076
atomic< unsigned char > atomic_uchar
atomic_uchar
Definition: atomic:1000
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
Definition: atomic:1095
atomic< unsigned short > atomic_ushort
atomic_ushort
Definition: atomic:1006
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
Definition: atomic:1088
atomic< bool > atomic_bool
atomic_bool
Definition: atomic:991
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
Definition: atomic:1110
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
Definition: atomic:1060
ISO C++ entities toplevel namespace is std.
constexpr _Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
Definition: utility:291
Generic atomic type, primary class template.
Definition: atomic:181
atomic<bool>
Definition: atomic:63
Explicit specialization for char.
Definition: atomic:645
Explicit specialization for signed char.
Definition: atomic:668
Explicit specialization for unsigned char.
Definition: atomic:691
Explicit specialization for short.
Definition: atomic:714
Explicit specialization for unsigned short.
Definition: atomic:737
Explicit specialization for int.
Definition: atomic:760
Explicit specialization for unsigned int.
Definition: atomic:783
Explicit specialization for long.
Definition: atomic:806
Explicit specialization for unsigned long.
Definition: atomic:829
Explicit specialization for long long.
Definition: atomic:852
Explicit specialization for unsigned long long.
Definition: atomic:875
Explicit specialization for wchar_t.
Definition: atomic:898
Explicit specialization for char16_t.
Definition: atomic:946
Explicit specialization for char32_t.
Definition: atomic:969
is_object
Definition: type_traits:550
atomic_flag
Definition: atomic_base.h:187