libstdc++
cstdatomic
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008, 2009
4 // Free Software Foundation, Inc.
5 //
6 // This file is part of the GNU ISO C++ Library. This library is free
7 // software; you can redistribute it and/or modify it under the
8 // terms of the GNU General Public License as published by the
9 // Free Software Foundation; either version 3, or (at your option)
10 // any later version.
11 
12 // This library is distributed in the hope that it will be useful,
13 // but WITHOUT ANY WARRANTY; without even the implied warranty of
14 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 // GNU General Public License for more details.
16 
17 // Under Section 7 of GPL version 3, you are granted additional
18 // permissions described in the GCC Runtime Library Exception, version
19 // 3.1, as published by the Free Software Foundation.
20 
21 // You should have received a copy of the GNU General Public License and
22 // a copy of the GCC Runtime Library Exception along with this program;
23 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
24 // <http://www.gnu.org/licenses/>.
25 
26 /** @file cstdatomic
27  * This is a Standard C++ Library file. You should @c #include this file
28  * in your programs, rather than any of the "*.h" implementation files.
29  *
30  * This is the C++ version of the Standard C Library header @c stdatomic.h,
31  * and its contents are (mostly) the same as that header, but are all
32  * contained in the namespace @c std (except for names which are defined
33  * as macros in C).
34  */
35 
36 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
37 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
38 
39 #ifndef _GLIBCXX_STDATOMIC
40 #define _GLIBCXX_STDATOMIC 1
41 
42 #pragma GCC system_header
43 
44 #ifndef __GXX_EXPERIMENTAL_CXX0X__
45 # include <c++0x_warning.h>
46 #endif
47 
48 #include <stdatomic.h>
49 #include <cstddef>
50 
51 _GLIBCXX_BEGIN_NAMESPACE(std)
52 
53  /**
54  * @addtogroup atomics
55  * @{
56  */
57 
58  /// kill_dependency
59  template<typename _Tp>
60  inline _Tp
61  kill_dependency(_Tp __y)
62  {
63  _Tp ret(__y);
64  return ret;
65  }
66 
67  inline memory_order
68  __calculate_memory_order(memory_order __m)
69  {
70  const bool __cond1 = __m == memory_order_release;
71  const bool __cond2 = __m == memory_order_acq_rel;
72  memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
73  memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
74  return __mo2;
75  }
76 
77  //
78  // Three nested namespaces for atomic implementation details.
79  //
80  // The nested namespace inlined into std:: is determined by the value
81  // of the _GLIBCXX_ATOMIC_PROPERTY macro and the resulting
82  // ATOMIC_*_LOCK_FREE macros. See file stdatomic.h.
83  //
84  // 0 == __atomic0 == Never lock-free
85  // 1 == __atomic1 == Best available, sometimes lock-free
86  // 2 == __atomic2 == Always lock-free
87 #include <bits/atomic_0.h>
88 #include <bits/atomic_2.h>
89 
90  /// atomic
91  /// 29.4.3, Generic atomic type, primary class template.
92  template<typename _Tp>
93  struct atomic
94  {
95  private:
96  _Tp _M_i;
97 
98  public:
99  atomic() = default;
100  ~atomic() = default;
101  atomic(const atomic&) = delete;
102  atomic& operator=(const atomic&) = delete;
103 
104  atomic(_Tp __i) : _M_i(__i) { }
105 
106  operator _Tp() const volatile;
107 
108  _Tp
109  operator=(_Tp __i) volatile { store(__i); return __i; }
110 
111  bool
112  is_lock_free() const volatile;
113 
114  void
115  store(_Tp, memory_order = memory_order_seq_cst) volatile;
116 
117  _Tp
118  load(memory_order = memory_order_seq_cst) const volatile;
119 
120  _Tp
121  exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile;
122 
123  bool
124  compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile;
125 
126  bool
127  compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile;
128 
129  bool
130  compare_exchange_weak(_Tp&, _Tp,
131  memory_order = memory_order_seq_cst) volatile;
132 
133  bool
134  compare_exchange_strong(_Tp&, _Tp,
135  memory_order = memory_order_seq_cst) volatile;
136  };
137 
138 
139  /// Partial specialization for pointer types.
140  template<typename _Tp>
141  struct atomic<_Tp*> : atomic_address
142  {
143  atomic() = default;
144  ~atomic() = default;
145  atomic(const atomic&) = delete;
146  atomic& operator=(const atomic&) = delete;
147 
148  atomic(_Tp* __v) : atomic_address(__v) { }
149 
150  void
151  store(_Tp*, memory_order = memory_order_seq_cst) volatile;
152 
153  _Tp*
154  load(memory_order = memory_order_seq_cst) const volatile;
155 
156  _Tp*
157  exchange(_Tp*, memory_order = memory_order_seq_cst) volatile;
158 
159  bool
160  compare_exchange_weak(_Tp*&, _Tp*, memory_order, memory_order) volatile;
161 
162  bool
163  compare_exchange_strong(_Tp*&, _Tp*, memory_order, memory_order) volatile;
164 
165  bool
166  compare_exchange_weak(_Tp*&, _Tp*,
167  memory_order = memory_order_seq_cst) volatile;
168 
169  bool
170  compare_exchange_strong(_Tp*&, _Tp*,
171  memory_order = memory_order_seq_cst) volatile;
172 
173  _Tp*
174  fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
175 
176  _Tp*
177  fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
178 
179  operator _Tp*() const volatile
180  { return load(); }
181 
182  _Tp*
183  operator=(_Tp* __v) volatile
184  {
185  store(__v);
186  return __v;
187  }
188 
189  _Tp*
190  operator++(int) volatile { return fetch_add(1); }
191 
192  _Tp*
193  operator--(int) volatile { return fetch_sub(1); }
194 
195  _Tp*
196  operator++() volatile { return fetch_add(1) + 1; }
197 
198  _Tp*
199  operator--() volatile { return fetch_sub(1) - 1; }
200 
201  _Tp*
202  operator+=(ptrdiff_t __d) volatile
203  { return fetch_add(__d) + __d; }
204 
205  _Tp*
206  operator-=(ptrdiff_t __d) volatile
207  { return fetch_sub(__d) - __d; }
208  };
209 
210 
211  /// Explicit specialization for void*
212  template<>
213  struct atomic<void*> : public atomic_address
214  {
215  typedef void* __integral_type;
216  typedef atomic_address __base_type;
217 
218  atomic() = default;
219  ~atomic() = default;
220  atomic(const atomic&) = delete;
221  atomic& operator=(const atomic&) = delete;
222 
223  atomic(__integral_type __i) : __base_type(__i) { }
224 
225  using __base_type::operator __integral_type;
226  using __base_type::operator=;
227  };
228 
229  /// Explicit specialization for bool.
230  template<>
231  struct atomic<bool> : public atomic_bool
232  {
233  typedef bool __integral_type;
234  typedef atomic_bool __base_type;
235 
236  atomic() = default;
237  ~atomic() = default;
238  atomic(const atomic&) = delete;
239  atomic& operator=(const atomic&) = delete;
240 
241  atomic(__integral_type __i) : __base_type(__i) { }
242 
243  using __base_type::operator __integral_type;
244  using __base_type::operator=;
245  };
246 
247  /// Explicit specialization for char.
248  template<>
249  struct atomic<char> : public atomic_char
250  {
251  typedef char __integral_type;
252  typedef atomic_char __base_type;
253 
254  atomic() = default;
255  ~atomic() = default;
256  atomic(const atomic&) = delete;
257  atomic& operator=(const atomic&) = delete;
258 
259  atomic(__integral_type __i) : __base_type(__i) { }
260 
261  using __base_type::operator __integral_type;
262  using __base_type::operator=;
263  };
264 
265  /// Explicit specialization for signed char.
266  template<>
267  struct atomic<signed char> : public atomic_schar
268  {
269  typedef signed char __integral_type;
270  typedef atomic_schar __base_type;
271 
272  atomic() = default;
273  ~atomic() = default;
274  atomic(const atomic&) = delete;
275  atomic& operator=(const atomic&) = delete;
276 
277  atomic(__integral_type __i) : __base_type(__i) { }
278 
279  using __base_type::operator __integral_type;
280  using __base_type::operator=;
281  };
282 
283  /// Explicit specialization for unsigned char.
284  template<>
285  struct atomic<unsigned char> : public atomic_uchar
286  {
287  typedef unsigned char __integral_type;
288  typedef atomic_uchar __base_type;
289 
290  atomic() = default;
291  ~atomic() = default;
292  atomic(const atomic&) = delete;
293  atomic& operator=(const atomic&) = delete;
294 
295  atomic(__integral_type __i) : __base_type(__i) { }
296 
297  using __base_type::operator __integral_type;
298  using __base_type::operator=;
299  };
300 
301  /// Explicit specialization for short.
302  template<>
303  struct atomic<short> : public atomic_short
304  {
305  typedef short __integral_type;
306  typedef atomic_short __base_type;
307 
308  atomic() = default;
309  ~atomic() = default;
310  atomic(const atomic&) = delete;
311  atomic& operator=(const atomic&) = delete;
312 
313  atomic(__integral_type __i) : __base_type(__i) { }
314 
315  using __base_type::operator __integral_type;
316  using __base_type::operator=;
317  };
318 
319  /// Explicit specialization for unsigned short.
320  template<>
321  struct atomic<unsigned short> : public atomic_ushort
322  {
323  typedef unsigned short __integral_type;
324  typedef atomic_ushort __base_type;
325 
326  atomic() = default;
327  ~atomic() = default;
328  atomic(const atomic&) = delete;
329  atomic& operator=(const atomic&) = delete;
330 
331  atomic(__integral_type __i) : __base_type(__i) { }
332 
333  using __base_type::operator __integral_type;
334  using __base_type::operator=;
335  };
336 
337  /// Explicit specialization for int.
338  template<>
339  struct atomic<int> : atomic_int
340  {
341  typedef int __integral_type;
342  typedef atomic_int __base_type;
343 
344  atomic() = default;
345  ~atomic() = default;
346  atomic(const atomic&) = delete;
347  atomic& operator=(const atomic&) = delete;
348 
349  atomic(__integral_type __i) : __base_type(__i) { }
350 
351  using __base_type::operator __integral_type;
352  using __base_type::operator=;
353  };
354 
355  /// Explicit specialization for unsigned int.
356  template<>
357  struct atomic<unsigned int> : public atomic_uint
358  {
359  typedef unsigned int __integral_type;
360  typedef atomic_uint __base_type;
361 
362  atomic() = default;
363  ~atomic() = default;
364  atomic(const atomic&) = delete;
365  atomic& operator=(const atomic&) = delete;
366 
367  atomic(__integral_type __i) : __base_type(__i) { }
368 
369  using __base_type::operator __integral_type;
370  using __base_type::operator=;
371  };
372 
373  /// Explicit specialization for long.
374  template<>
375  struct atomic<long> : public atomic_long
376  {
377  typedef long __integral_type;
378  typedef atomic_long __base_type;
379 
380  atomic() = default;
381  ~atomic() = default;
382  atomic(const atomic&) = delete;
383  atomic& operator=(const atomic&) = delete;
384 
385  atomic(__integral_type __i) : __base_type(__i) { }
386 
387  using __base_type::operator __integral_type;
388  using __base_type::operator=;
389  };
390 
391  /// Explicit specialization for unsigned long.
392  template<>
393  struct atomic<unsigned long> : public atomic_ulong
394  {
395  typedef unsigned long __integral_type;
396  typedef atomic_ulong __base_type;
397 
398  atomic() = default;
399  ~atomic() = default;
400  atomic(const atomic&) = delete;
401  atomic& operator=(const atomic&) = delete;
402 
403  atomic(__integral_type __i) : __base_type(__i) { }
404 
405  using __base_type::operator __integral_type;
406  using __base_type::operator=;
407  };
408 
409  /// Explicit specialization for long long.
410  template<>
411  struct atomic<long long> : public atomic_llong
412  {
413  typedef long long __integral_type;
414  typedef atomic_llong __base_type;
415 
416  atomic() = default;
417  ~atomic() = default;
418  atomic(const atomic&) = delete;
419  atomic& operator=(const atomic&) = delete;
420 
421  atomic(__integral_type __i) : __base_type(__i) { }
422 
423  using __base_type::operator __integral_type;
424  using __base_type::operator=;
425  };
426 
427  /// Explicit specialization for unsigned long long.
428  template<>
429  struct atomic<unsigned long long> : public atomic_ullong
430  {
431  typedef unsigned long long __integral_type;
432  typedef atomic_ullong __base_type;
433 
434  atomic() = default;
435  ~atomic() = default;
436  atomic(const atomic&) = delete;
437  atomic& operator=(const atomic&) = delete;
438 
439  atomic(__integral_type __i) : __base_type(__i) { }
440 
441  using __base_type::operator __integral_type;
442  using __base_type::operator=;
443  };
444 
445  /// Explicit specialization for wchar_t.
446  template<>
447  struct atomic<wchar_t> : public atomic_wchar_t
448  {
449  typedef wchar_t __integral_type;
450  typedef atomic_wchar_t __base_type;
451 
452  atomic() = default;
453  ~atomic() = default;
454  atomic(const atomic&) = delete;
455  atomic& operator=(const atomic&) = delete;
456 
457  atomic(__integral_type __i) : __base_type(__i) { }
458 
459  using __base_type::operator __integral_type;
460  using __base_type::operator=;
461  };
462 
463  /// Explicit specialization for char16_t.
464  template<>
465  struct atomic<char16_t> : public atomic_char16_t
466  {
467  typedef char16_t __integral_type;
468  typedef atomic_char16_t __base_type;
469 
470  atomic() = default;
471  ~atomic() = default;
472  atomic(const atomic&) = delete;
473  atomic& operator=(const atomic&) = delete;
474 
475  atomic(__integral_type __i) : __base_type(__i) { }
476 
477  using __base_type::operator __integral_type;
478  using __base_type::operator=;
479  };
480 
481  /// Explicit specialization for char32_t.
482  template<>
483  struct atomic<char32_t> : public atomic_char32_t
484  {
485  typedef char32_t __integral_type;
486  typedef atomic_char32_t __base_type;
487 
488  atomic() = default;
489  ~atomic() = default;
490  atomic(const atomic&) = delete;
491  atomic& operator=(const atomic&) = delete;
492 
493  atomic(__integral_type __i) : __base_type(__i) { }
494 
495  using __base_type::operator __integral_type;
496  using __base_type::operator=;
497  };
498 
499 
500  template<typename _Tp>
501  _Tp*
502  atomic<_Tp*>::load(memory_order __m) const volatile
503  { return static_cast<_Tp*>(atomic_address::load(__m)); }
504 
505  template<typename _Tp>
506  _Tp*
507  atomic<_Tp*>::exchange(_Tp* __v, memory_order __m) volatile
508  { return static_cast<_Tp*>(atomic_address::exchange(__v, __m)); }
509 
510  template<typename _Tp>
511  bool
512  atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v, memory_order __m1,
513  memory_order __m2) volatile
514  {
515  void** __vr = reinterpret_cast<void**>(&__r);
516  void* __vv = static_cast<void*>(__v);
517  return atomic_address::compare_exchange_weak(*__vr, __vv, __m1, __m2);
518  }
519 
520  template<typename _Tp>
521  bool
522  atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
523  memory_order __m1,
524  memory_order __m2) volatile
525  {
526  void** __vr = reinterpret_cast<void**>(&__r);
527  void* __vv = static_cast<void*>(__v);
528  return atomic_address::compare_exchange_strong(*__vr, __vv, __m1, __m2);
529  }
530 
531  template<typename _Tp>
532  bool
533  atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v,
534  memory_order __m) volatile
535  {
536  return compare_exchange_weak(__r, __v, __m,
537  __calculate_memory_order(__m));
538  }
539 
540  template<typename _Tp>
541  bool
542  atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
543  memory_order __m) volatile
544  {
545  return compare_exchange_strong(__r, __v, __m,
546  __calculate_memory_order(__m));
547  }
548 
549  template<typename _Tp>
550  _Tp*
551  atomic<_Tp*>::fetch_add(ptrdiff_t __d, memory_order __m) volatile
552  {
553  void* __p = atomic_fetch_add_explicit(this, sizeof(_Tp) * __d, __m);
554  return static_cast<_Tp*>(__p);
555  }
556 
557  template<typename _Tp>
558  _Tp*
559  atomic<_Tp*>::fetch_sub(ptrdiff_t __d, memory_order __m) volatile
560  {
561  void* __p = atomic_fetch_sub_explicit(this, sizeof(_Tp) * __d, __m);
562  return static_cast<_Tp*>(__p);
563  }
564 
565  // Convenience function definitions, atomic_flag.
566  inline bool
567  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, memory_order __m)
568  { return __a->test_and_set(__m); }
569 
570  inline void
571  atomic_flag_clear_explicit(volatile atomic_flag* __a, memory_order __m)
572  { return __a->clear(__m); }
573 
574 
575  // Convenience function definitions, atomic_address.
576  inline bool
577  atomic_is_lock_free(const volatile atomic_address* __a)
578  { return __a->is_lock_free(); }
579 
580  inline void
581  atomic_store(volatile atomic_address* __a, void* __v)
582  { __a->store(__v); }
583 
584  inline void
585  atomic_store_explicit(volatile atomic_address* __a, void* __v,
586  memory_order __m)
587  { __a->store(__v, __m); }
588 
589  inline void*
590  atomic_load(const volatile atomic_address* __a)
591  { return __a->load(); }
592 
593  inline void*
594  atomic_load_explicit(const volatile atomic_address* __a, memory_order __m)
595  { return __a->load(__m); }
596 
597  inline void*
598  atomic_exchange(volatile atomic_address* __a, void* __v)
599  { return __a->exchange(__v); }
600 
601  inline void*
602  atomic_exchange_explicit(volatile atomic_address* __a, void* __v,
603  memory_order __m)
604  { return __a->exchange(__v, __m); }
605 
606  inline bool
607  atomic_compare_exchange_weak(volatile atomic_address* __a,
608  void** __v1, void* __v2)
609  {
610  return __a->compare_exchange_weak(*__v1, __v2, memory_order_seq_cst,
611  memory_order_seq_cst);
612  }
613 
614  inline bool
615  atomic_compare_exchange_strong(volatile atomic_address* __a,
616  void** __v1, void* __v2)
617  {
618  return __a->compare_exchange_strong(*__v1, __v2, memory_order_seq_cst,
619  memory_order_seq_cst);
620  }
621 
622  inline bool
623  atomic_compare_exchange_weak_explicit(volatile atomic_address* __a,
624  void** __v1, void* __v2,
625  memory_order __m1, memory_order __m2)
626  { return __a->compare_exchange_weak(*__v1, __v2, __m1, __m2); }
627 
628  inline bool
629  atomic_compare_exchange_strong_explicit(volatile atomic_address* __a,
630  void** __v1, void* __v2,
631  memory_order __m1, memory_order __m2)
632  { return __a->compare_exchange_strong(*__v1, __v2, __m1, __m2); }
633 
634  inline void*
635  atomic_fetch_add_explicit(volatile atomic_address* __a, ptrdiff_t __d,
636  memory_order __m)
637  { return __a->fetch_add(__d, __m); }
638 
639  inline void*
640  atomic_fetch_add(volatile atomic_address* __a, ptrdiff_t __d)
641  { return __a->fetch_add(__d); }
642 
643  inline void*
644  atomic_fetch_sub_explicit(volatile atomic_address* __a, ptrdiff_t __d,
645  memory_order __m)
646  { return __a->fetch_sub(__d, __m); }
647 
648  inline void*
649  atomic_fetch_sub(volatile atomic_address* __a, ptrdiff_t __d)
650  { return __a->fetch_sub(__d); }
651 
652 
653  // Convenience function definitions, atomic_bool.
654  inline bool
655  atomic_is_lock_free(const volatile atomic_bool* __a)
656  { return __a->is_lock_free(); }
657 
658  inline void
659  atomic_store(volatile atomic_bool* __a, bool __i)
660  { __a->store(__i); }
661 
662  inline void
663  atomic_store_explicit(volatile atomic_bool* __a, bool __i, memory_order __m)
664  { __a->store(__i, __m); }
665 
666  inline bool
667  atomic_load(const volatile atomic_bool* __a)
668  { return __a->load(); }
669 
670  inline bool
671  atomic_load_explicit(const volatile atomic_bool* __a, memory_order __m)
672  { return __a->load(__m); }
673 
674  inline bool
675  atomic_exchange(volatile atomic_bool* __a, bool __i)
676  { return __a->exchange(__i); }
677 
678  inline bool
679  atomic_exchange_explicit(volatile atomic_bool* __a, bool __i,
680  memory_order __m)
681  { return __a->exchange(__i, __m); }
682 
683  inline bool
684  atomic_compare_exchange_weak(volatile atomic_bool* __a, bool* __i1, bool __i2)
685  {
686  return __a->compare_exchange_weak(*__i1, __i2, memory_order_seq_cst,
687  memory_order_seq_cst);
688  }
689 
690  inline bool
691  atomic_compare_exchange_strong(volatile atomic_bool* __a,
692  bool* __i1, bool __i2)
693  {
694  return __a->compare_exchange_strong(*__i1, __i2, memory_order_seq_cst,
695  memory_order_seq_cst);
696  }
697 
698  inline bool
699  atomic_compare_exchange_weak_explicit(volatile atomic_bool* __a, bool* __i1,
700  bool __i2, memory_order __m1,
701  memory_order __m2)
702  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
703 
704  inline bool
705  atomic_compare_exchange_strong_explicit(volatile atomic_bool* __a,
706  bool* __i1, bool __i2,
707  memory_order __m1, memory_order __m2)
708  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
709 
710 
711 
712  // Free standing functions. Template argument should be constricted
713  // to intergral types as specified in the standard.
714  template<typename _ITp>
715  inline void
716  atomic_store_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
717  memory_order __m)
718  { __a->store(__i, __m); }
719 
720  template<typename _ITp>
721  inline _ITp
722  atomic_load_explicit(const volatile __atomic_base<_ITp>* __a,
723  memory_order __m)
724  { return __a->load(__m); }
725 
726  template<typename _ITp>
727  inline _ITp
728  atomic_exchange_explicit(volatile __atomic_base<_ITp>* __a,
729  _ITp __i, memory_order __m)
730  { return __a->exchange(__i, __m); }
731 
732  template<typename _ITp>
733  inline bool
734  atomic_compare_exchange_weak_explicit(volatile __atomic_base<_ITp>* __a,
735  _ITp* __i1, _ITp __i2,
736  memory_order __m1, memory_order __m2)
737  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
738 
739  template<typename _ITp>
740  inline bool
741  atomic_compare_exchange_strong_explicit(volatile __atomic_base<_ITp>* __a,
742  _ITp* __i1, _ITp __i2,
743  memory_order __m1,
744  memory_order __m2)
745  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
746 
747  template<typename _ITp>
748  inline _ITp
749  atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
750  memory_order __m)
751  { return __a->fetch_add(__i, __m); }
752 
753  template<typename _ITp>
754  inline _ITp
755  atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
756  memory_order __m)
757  { return __a->fetch_sub(__i, __m); }
758 
759  template<typename _ITp>
760  inline _ITp
761  atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
762  memory_order __m)
763  { return __a->fetch_and(__i, __m); }
764 
765  template<typename _ITp>
766  inline _ITp
767  atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
768  memory_order __m)
769  { return __a->fetch_or(__i, __m); }
770 
771  template<typename _ITp>
772  inline _ITp
773  atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
774  memory_order __m)
775  { return __a->fetch_xor(__i, __m); }
776 
777  template<typename _ITp>
778  inline bool
779  atomic_is_lock_free(const volatile __atomic_base<_ITp>* __a)
780  { return __a->is_lock_free(); }
781 
782  template<typename _ITp>
783  inline void
784  atomic_store(volatile __atomic_base<_ITp>* __a, _ITp __i)
785  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
786 
787  template<typename _ITp>
788  inline _ITp
789  atomic_load(const volatile __atomic_base<_ITp>* __a)
790  { return atomic_load_explicit(__a, memory_order_seq_cst); }
791 
792  template<typename _ITp>
793  inline _ITp
794  atomic_exchange(volatile __atomic_base<_ITp>* __a, _ITp __i)
795  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
796 
797  template<typename _ITp>
798  inline bool
799  atomic_compare_exchange_weak(volatile __atomic_base<_ITp>* __a,
800  _ITp* __i1, _ITp __i2)
801  {
802  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
803  memory_order_seq_cst,
804  memory_order_seq_cst);
805  }
806 
807  template<typename _ITp>
808  inline bool
809  atomic_compare_exchange_strong(volatile __atomic_base<_ITp>* __a,
810  _ITp* __i1, _ITp __i2)
811  {
812  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
813  memory_order_seq_cst,
814  memory_order_seq_cst);
815  }
816 
817  template<typename _ITp>
818  inline _ITp
819  atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i)
820  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
821 
822  template<typename _ITp>
823  inline _ITp
824  atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i)
825  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
826 
827  template<typename _ITp>
828  inline _ITp
829  atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i)
830  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
831 
832  template<typename _ITp>
833  inline _ITp
834  atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i)
835  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
836 
837  template<typename _ITp>
838  inline _ITp
839  atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i)
840  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
841 
842  // @} group atomics
843 
844 _GLIBCXX_END_NAMESPACE
845 
846 #endif
847 
848 
atomic 29.4.3, Generic atomic type, primary class template.
Definition: cstdatomic:93
_Tp kill_dependency(_Tp __y)
kill_dependency
Definition: cstdatomic:61
memory_order
Enumeration for memory_order.
Definition: stdatomic.h:47