Skip site navigation (1)Skip section navigation (2)
Date:      Thu, 3 May 2012 17:44:08 +0000 (UTC)
From:      David Chisnall <theraven@FreeBSD.org>
To:        src-committers@freebsd.org, svn-src-all@freebsd.org, svn-src-head@freebsd.org
Subject:   svn commit: r234976 - in head/contrib/libc++: include src
Message-ID:  <201205031744.q43Hi8fj054896@svn.freebsd.org>

next in thread | raw e-mail | index | archive | help
Author: theraven
Date: Thu May  3 17:44:07 2012
New Revision: 234976
URL: http://svn.freebsd.org/changeset/base/234976

Log:
  Import new version of libc++.  Among other improvements, this comes with an
  <atomic> header that works with clang 3.1 (and, importantly, the pre-3.1
  snapshot currently in head)

Modified:
  head/contrib/libc++/include/__config
  head/contrib/libc++/include/__tuple
  head/contrib/libc++/include/algorithm
  head/contrib/libc++/include/atomic
  head/contrib/libc++/include/cmath
  head/contrib/libc++/include/limits
  head/contrib/libc++/include/memory
  head/contrib/libc++/include/random
  head/contrib/libc++/include/system_error
  head/contrib/libc++/include/tuple
  head/contrib/libc++/include/type_traits
  head/contrib/libc++/include/utility
  head/contrib/libc++/src/iostream.cpp
  head/contrib/libc++/src/stdexcept.cpp
  head/contrib/libc++/src/utility.cpp
Directory Properties:
  head/contrib/libc++/   (props changed)

Modified: head/contrib/libc++/include/__config
==============================================================================
--- head/contrib/libc++/include/__config	Thu May  3 17:08:40 2012	(r234975)
+++ head/contrib/libc++/include/__config	Thu May  3 17:44:07 2012	(r234976)
@@ -384,7 +384,9 @@ template <unsigned> struct __static_asse
 #endif
 
 #ifdef _LIBCPP_HAS_NO_CONSTEXPR
-#define constexpr const
+#define _LIBCPP_CONSTEXPR
+#else
+#define _LIBCPP_CONSTEXPR constexpr
 #endif
 
 #ifndef __has_feature

Modified: head/contrib/libc++/include/__tuple
==============================================================================
--- head/contrib/libc++/include/__tuple	Thu May  3 17:08:40 2012	(r234975)
+++ head/contrib/libc++/include/__tuple	Thu May  3 17:44:07 2012	(r234976)
@@ -216,7 +216,7 @@ struct __tuple_convertible_imp : public 
 template <class _Tp0, class ..._Tp, class _Up0, class ..._Up>
 struct __tuple_convertible_imp<true, __tuple_types<_Tp0, _Tp...>, __tuple_types<_Up0, _Up...> >
     : public integral_constant<bool,
-                               is_constructible<_Up0, _Tp0>::value &&
+                               is_convertible<_Tp0, _Up0>::value &&
                                __tuple_convertible_imp<true, __tuple_types<_Tp...>, __tuple_types<_Up...> >::value> {};
 
 template <>
@@ -235,6 +235,33 @@ struct __tuple_convertible<_Tp, _Up, tru
              typename __make_tuple_types<_Tp>::type, typename __make_tuple_types<_Up>::type>
 {};
 
+// __tuple_constructible
+
+template <bool, class _Tp, class _Up>
+struct __tuple_constructible_imp : public false_type {};
+
+template <class _Tp0, class ..._Tp, class _Up0, class ..._Up>
+struct __tuple_constructible_imp<true, __tuple_types<_Tp0, _Tp...>, __tuple_types<_Up0, _Up...> >
+    : public integral_constant<bool,
+                               is_constructible<_Up0, _Tp0>::value &&
+                               __tuple_constructible_imp<true, __tuple_types<_Tp...>, __tuple_types<_Up...> >::value> {};
+
+template <>
+struct __tuple_constructible_imp<true, __tuple_types<>, __tuple_types<> >
+    : public true_type {};
+
+template <class _Tp, class _Up, bool = __tuple_like<typename remove_reference<_Tp>::type>::value,
+                                bool = __tuple_like<_Up>::value>
+struct __tuple_constructible
+    : public false_type {};
+
+template <class _Tp, class _Up>
+struct __tuple_constructible<_Tp, _Up, true, true>
+    : public __tuple_constructible_imp<tuple_size<typename remove_reference<_Tp>::type>::value ==
+                                     tuple_size<_Up>::value,
+             typename __make_tuple_types<_Tp>::type, typename __make_tuple_types<_Up>::type>
+{};
+
 // __tuple_assignable
 
 template <bool, class _Tp, class _Up>

Modified: head/contrib/libc++/include/algorithm
==============================================================================
--- head/contrib/libc++/include/algorithm	Thu May  3 17:08:40 2012	(r234975)
+++ head/contrib/libc++/include/algorithm	Thu May  3 17:44:07 2012	(r234976)
@@ -2508,11 +2508,16 @@ private:
     _Engine_result_type __mask0_;
     _Engine_result_type __mask1_;
 
+#ifdef _LIBCPP_HAS_NO_CONSTEXPR
     static const _Working_result_type _Rp = _Engine::_Max - _Engine::_Min
-                                                         + _Working_result_type(1);
-    static const size_t __m = __log2<_Working_result_type, _Rp>::value;
-    static const size_t _WDt = numeric_limits<_Working_result_type>::digits;
-    static const size_t _EDt = numeric_limits<_Engine_result_type>::digits;
+                                          + _Working_result_type(1);
+#else
+    static _LIBCPP_CONSTEXPR const _Working_result_type _Rp = _Engine::max() - _Engine::min()
+                                                      + _Working_result_type(1);
+#endif
+    static _LIBCPP_CONSTEXPR const size_t __m = __log2<_Working_result_type, _Rp>::value;
+    static _LIBCPP_CONSTEXPR const size_t _WDt = numeric_limits<_Working_result_type>::digits;
+    static _LIBCPP_CONSTEXPR const size_t _EDt = numeric_limits<_Engine_result_type>::digits;
 
 public:
     // constructors and seeding functions
@@ -2712,8 +2717,8 @@ public:
 
     result_type operator()();
 
-    static constexpr result_type min() {return _Min;}
-    static constexpr result_type max() {return _Max;}
+    static _LIBCPP_CONSTEXPR result_type min() {return _Min;}
+    static _LIBCPP_CONSTEXPR result_type max() {return _Max;}
 
     friend __rs_default __rs_get();
 };

Modified: head/contrib/libc++/include/atomic
==============================================================================
--- head/contrib/libc++/include/atomic	Thu May  3 17:08:40 2012	(r234975)
+++ head/contrib/libc++/include/atomic	Thu May  3 17:44:07 2012	(r234976)
@@ -29,7 +29,7 @@ typedef enum memory_order
     memory_order_seq_cst   // store-release load-acquire
 } memory_order;
 
-template <class T> T kill_dependency(T y);
+template <class T> T kill_dependency(T y) noexcept;
 
 // lock-free property
 
@@ -46,40 +46,40 @@ template <class T> T kill_dependency(T y
 
 typedef struct atomic_flag
 {
-    bool test_and_set(memory_order m = memory_order_seq_cst) volatile;
-    bool test_and_set(memory_order m = memory_order_seq_cst);
-    void clear(memory_order m = memory_order_seq_cst) volatile;
-    void clear(memory_order m = memory_order_seq_cst);
-    atomic_flag() = default;
+    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
+    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
+    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
+    void clear(memory_order m = memory_order_seq_cst) noexcept;
+    atomic_flag()  noexcept = default;
     atomic_flag(const atomic_flag&) = delete;
     atomic_flag& operator=(const atomic_flag&) = delete;
     atomic_flag& operator=(const atomic_flag&) volatile = delete;
 } atomic_flag;
 
 bool
-    atomic_flag_test_and_set(volatile atomic_flag* obj);
+    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
 
 bool
-    atomic_flag_test_and_set(atomic_flag* obj);
+    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
 
 bool
     atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
-                                      memory_order m);
+                                      memory_order m) noexcept;
 
 bool
-    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m);
+    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
 
 void
-    atomic_flag_clear(volatile atomic_flag* obj);
+    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
 
 void
-    atomic_flag_clear(atomic_flag* obj);
+    atomic_flag_clear(atomic_flag* obj) noexcept;
 
 void
-    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m);
+    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
 
 void
-    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m);
+    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
 
 #define ATOMIC_FLAG_INIT see below
 #define ATOMIC_VAR_INIT(value) see below
@@ -87,388 +87,388 @@ void
 template <class T>
 struct atomic
 {
-    bool is_lock_free() const volatile;
-    bool is_lock_free() const;
-    void store(T desr, memory_order m = memory_order_seq_cst) volatile;
-    void store(T desr, memory_order m = memory_order_seq_cst);
-    T load(memory_order m = memory_order_seq_cst) const volatile;
-    T load(memory_order m = memory_order_seq_cst) const;
-    operator T() const volatile;
-    operator T() const;
-    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile;
-    T exchange(T desr, memory_order m = memory_order_seq_cst);
+    bool is_lock_free() const volatile noexcept;
+    bool is_lock_free() const noexcept;
+    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
+    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
+    T load(memory_order m = memory_order_seq_cst) const noexcept;
+    operator T() const volatile noexcept;
+    operator T() const noexcept;
+    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_weak(T& expc, T desr,
-                               memory_order s, memory_order f) volatile;
-    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f);
+                               memory_order s, memory_order f) volatile noexcept;
+    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
     bool compare_exchange_strong(T& expc, T desr,
-                                 memory_order s, memory_order f) volatile;
+                                 memory_order s, memory_order f) volatile noexcept;
     bool compare_exchange_strong(T& expc, T desr,
-                                 memory_order s, memory_order f);
+                                 memory_order s, memory_order f) noexcept;
     bool compare_exchange_weak(T& expc, T desr,
-                               memory_order m = memory_order_seq_cst) volatile;
+                               memory_order m = memory_order_seq_cst) volatile noexcept;
     bool compare_exchange_weak(T& expc, T desr,
-                               memory_order m = memory_order_seq_cst);
+                               memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_strong(T& expc, T desr,
-                                memory_order m = memory_order_seq_cst) volatile;
+                                memory_order m = memory_order_seq_cst) volatile noexcept;
     bool compare_exchange_strong(T& expc, T desr,
-                                 memory_order m = memory_order_seq_cst);
+                                 memory_order m = memory_order_seq_cst) noexcept;
 
-    atomic() = default;
-    constexpr atomic(T desr);
+    atomic() noexcept = default;
+    constexpr atomic(T desr) noexcept;
     atomic(const atomic&) = delete;
     atomic& operator=(const atomic&) = delete;
     atomic& operator=(const atomic&) volatile = delete;
-    T operator=(T) volatile;
-    T operator=(T);
+    T operator=(T) volatile noexcept;
+    T operator=(T) noexcept;
 };
 
 template <>
 struct atomic<integral>
 {
-    bool is_lock_free() const volatile;
-    bool is_lock_free() const;
-    void store(integral desr, memory_order m = memory_order_seq_cst) volatile;
-    void store(integral desr, memory_order m = memory_order_seq_cst);
-    integral load(memory_order m = memory_order_seq_cst) const volatile;
-    integral load(memory_order m = memory_order_seq_cst) const;
-    operator integral() const volatile;
-    operator integral() const;
+    bool is_lock_free() const volatile noexcept;
+    bool is_lock_free() const noexcept;
+    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
+    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
+    integral load(memory_order m = memory_order_seq_cst) const noexcept;
+    operator integral() const volatile noexcept;
+    operator integral() const noexcept;
     integral exchange(integral desr,
-                      memory_order m = memory_order_seq_cst) volatile;
-    integral exchange(integral desr, memory_order m = memory_order_seq_cst);
+                      memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_weak(integral& expc, integral desr,
-                               memory_order s, memory_order f) volatile;
+                               memory_order s, memory_order f) volatile noexcept;
     bool compare_exchange_weak(integral& expc, integral desr,
-                               memory_order s, memory_order f);
+                               memory_order s, memory_order f) noexcept;
     bool compare_exchange_strong(integral& expc, integral desr,
-                                 memory_order s, memory_order f) volatile;
+                                 memory_order s, memory_order f) volatile noexcept;
     bool compare_exchange_strong(integral& expc, integral desr,
-                                 memory_order s, memory_order f);
+                                 memory_order s, memory_order f) noexcept;
     bool compare_exchange_weak(integral& expc, integral desr,
-                               memory_order m = memory_order_seq_cst) volatile;
+                               memory_order m = memory_order_seq_cst) volatile noexcept;
     bool compare_exchange_weak(integral& expc, integral desr,
-                               memory_order m = memory_order_seq_cst);
+                               memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_strong(integral& expc, integral desr,
-                                memory_order m = memory_order_seq_cst) volatile;
+                                memory_order m = memory_order_seq_cst) volatile noexcept;
     bool compare_exchange_strong(integral& expc, integral desr,
-                                 memory_order m = memory_order_seq_cst);
+                                 memory_order m = memory_order_seq_cst) noexcept;
 
     integral
-        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile;
-    integral fetch_add(integral op, memory_order m = memory_order_seq_cst);
+        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
     integral
-        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile;
-    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst);
+        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
     integral
-        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile;
-    integral fetch_and(integral op, memory_order m = memory_order_seq_cst);
+        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
     integral
-        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile;
-    integral fetch_or(integral op, memory_order m = memory_order_seq_cst);
+        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
     integral
-        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile;
-    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst);
+        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
 
-    atomic() = default;
-    constexpr atomic(integral desr);
+    atomic() noexcept = default;
+    constexpr atomic(integral desr) noexcept;
     atomic(const atomic&) = delete;
     atomic& operator=(const atomic&) = delete;
     atomic& operator=(const atomic&) volatile = delete;
-    integral operator=(integral desr) volatile;
-    integral operator=(integral desr);
+    integral operator=(integral desr) volatile noexcept;
+    integral operator=(integral desr) noexcept;
 
-    integral operator++(int) volatile;
-    integral operator++(int);
-    integral operator--(int) volatile;
-    integral operator--(int);
-    integral operator++() volatile;
-    integral operator++();
-    integral operator--() volatile;
-    integral operator--();
-    integral operator+=(integral op) volatile;
-    integral operator+=(integral op);
-    integral operator-=(integral op) volatile;
-    integral operator-=(integral op);
-    integral operator&=(integral op) volatile;
-    integral operator&=(integral op);
-    integral operator|=(integral op) volatile;
-    integral operator|=(integral op);
-    integral operator^=(integral op) volatile;
-    integral operator^=(integral op);
+    integral operator++(int) volatile noexcept;
+    integral operator++(int) noexcept;
+    integral operator--(int) volatile noexcept;
+    integral operator--(int) noexcept;
+    integral operator++() volatile noexcept;
+    integral operator++() noexcept;
+    integral operator--() volatile noexcept;
+    integral operator--() noexcept;
+    integral operator+=(integral op) volatile noexcept;
+    integral operator+=(integral op) noexcept;
+    integral operator-=(integral op) volatile noexcept;
+    integral operator-=(integral op) noexcept;
+    integral operator&=(integral op) volatile noexcept;
+    integral operator&=(integral op) noexcept;
+    integral operator|=(integral op) volatile noexcept;
+    integral operator|=(integral op) noexcept;
+    integral operator^=(integral op) volatile noexcept;
+    integral operator^=(integral op) noexcept;
 };
 
 template <class T>
 struct atomic<T*>
 {
-    bool is_lock_free() const volatile;
-    bool is_lock_free() const;
-    void store(T* desr, memory_order m = memory_order_seq_cst) volatile;
-    void store(T* desr, memory_order m = memory_order_seq_cst);
-    T* load(memory_order m = memory_order_seq_cst) const volatile;
-    T* load(memory_order m = memory_order_seq_cst) const;
-    operator T*() const volatile;
-    operator T*() const;
-    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile;
-    T* exchange(T* desr, memory_order m = memory_order_seq_cst);
+    bool is_lock_free() const volatile noexcept;
+    bool is_lock_free() const noexcept;
+    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
+    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
+    T* load(memory_order m = memory_order_seq_cst) const noexcept;
+    operator T*() const volatile noexcept;
+    operator T*() const noexcept;
+    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_weak(T*& expc, T* desr,
-                               memory_order s, memory_order f) volatile;
+                               memory_order s, memory_order f) volatile noexcept;
     bool compare_exchange_weak(T*& expc, T* desr,
-                               memory_order s, memory_order f);
+                               memory_order s, memory_order f) noexcept;
     bool compare_exchange_strong(T*& expc, T* desr,
-                                 memory_order s, memory_order f) volatile;
+                                 memory_order s, memory_order f) volatile noexcept;
     bool compare_exchange_strong(T*& expc, T* desr,
-                                 memory_order s, memory_order f);
+                                 memory_order s, memory_order f) noexcept;
     bool compare_exchange_weak(T*& expc, T* desr,
-                               memory_order m = memory_order_seq_cst) volatile;
+                               memory_order m = memory_order_seq_cst) volatile noexcept;
     bool compare_exchange_weak(T*& expc, T* desr,
-                               memory_order m = memory_order_seq_cst);
+                               memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_strong(T*& expc, T* desr,
-                                memory_order m = memory_order_seq_cst) volatile;
+                                memory_order m = memory_order_seq_cst) volatile noexcept;
     bool compare_exchange_strong(T*& expc, T* desr,
-                                 memory_order m = memory_order_seq_cst);
-    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile;
-    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst);
-    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile;
-    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst);
+                                 memory_order m = memory_order_seq_cst) noexcept;
+    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
+    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
 
-    atomic() = default;
-    constexpr atomic(T* desr);
+    atomic() noexcept = default;
+    constexpr atomic(T* desr) noexcept;
     atomic(const atomic&) = delete;
     atomic& operator=(const atomic&) = delete;
     atomic& operator=(const atomic&) volatile = delete;
 
-    T* operator=(T*) volatile;
-    T* operator=(T*);
-    T* operator++(int) volatile;
-    T* operator++(int);
-    T* operator--(int) volatile;
-    T* operator--(int);
-    T* operator++() volatile;
-    T* operator++();
-    T* operator--() volatile;
-    T* operator--();
-    T* operator+=(ptrdiff_t op) volatile;
-    T* operator+=(ptrdiff_t op);
-    T* operator-=(ptrdiff_t op) volatile;
-    T* operator-=(ptrdiff_t op);
+    T* operator=(T*) volatile noexcept;
+    T* operator=(T*) noexcept;
+    T* operator++(int) volatile noexcept;
+    T* operator++(int) noexcept;
+    T* operator--(int) volatile noexcept;
+    T* operator--(int) noexcept;
+    T* operator++() volatile noexcept;
+    T* operator++() noexcept;
+    T* operator--() volatile noexcept;
+    T* operator--() noexcept;
+    T* operator+=(ptrdiff_t op) volatile noexcept;
+    T* operator+=(ptrdiff_t op) noexcept;
+    T* operator-=(ptrdiff_t op) volatile noexcept;
+    T* operator-=(ptrdiff_t op) noexcept;
 };
 
 
 template <class T>
     bool
-    atomic_is_lock_free(const volatile atomic<T>* obj);
+    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
 
 template <class T>
     bool
-    atomic_is_lock_free(const atomic<T>* obj);
+    atomic_is_lock_free(const atomic<T>* obj) noexcept;
 
 template <class T>
     void
-    atomic_init(volatile atomic<T>* obj, T desr);
+    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
 
 template <class T>
     void
-    atomic_init(atomic<T>* obj, T desr);
+    atomic_init(atomic<T>* obj, T desr) noexcept;
 
 template <class T>
     void
-    atomic_store(volatile atomic<T>* obj, T desr);
+    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
 
 template <class T>
     void
-    atomic_store(atomic<T>* obj, T desr);
+    atomic_store(atomic<T>* obj, T desr) noexcept;
 
 template <class T>
     void
-    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m);
+    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
 
 template <class T>
     void
-    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m);
+    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
 
 template <class T>
     T
-    atomic_load(const volatile atomic<T>* obj);
+    atomic_load(const volatile atomic<T>* obj) noexcept;
 
 template <class T>
     T
-    atomic_load(const atomic<T>* obj);
+    atomic_load(const atomic<T>* obj) noexcept;
 
 template <class T>
     T
-    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m);
+    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
 
 template <class T>
     T
-    atomic_load_explicit(const atomic<T>* obj, memory_order m);
+    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
 
 template <class T>
     T
-    atomic_exchange(volatile atomic<T>* obj, T desr);
+    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
 
 template <class T>
     T
-    atomic_exchange(atomic<T>* obj, T desr);
+    atomic_exchange(atomic<T>* obj, T desr) noexcept;
 
 template <class T>
     T
-    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m);
+    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
 
 template <class T>
     T
-    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m);
+    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
 
 template <class T>
     bool
-    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr);
+    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
 
 template <class T>
     bool
-    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr);
+    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
 
 template <class T>
     bool
-    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr);
+    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
 
 template <class T>
     bool
-    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr);
+    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
 
 template <class T>
     bool
     atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
                                           T desr,
-                                          memory_order s, memory_order f);
+                                          memory_order s, memory_order f) noexcept;
 
 template <class T>
     bool
     atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
-                                          memory_order s, memory_order f);
+                                          memory_order s, memory_order f) noexcept;
 
 template <class T>
     bool
     atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
                                             T* expc, T desr,
-                                            memory_order s, memory_order f);
+                                            memory_order s, memory_order f) noexcept;
 
 template <class T>
     bool
     atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
                                             T desr,
-                                            memory_order s, memory_order f);
+                                            memory_order s, memory_order f) noexcept;
 
 template <class Integral>
     Integral
-    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op);
+    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
-    atomic_fetch_add(atomic<Integral>* obj, Integral op);
+    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
     atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class Integral>
     Integral
     atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class Integral>
     Integral
-    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op);
+    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
-    atomic_fetch_sub(atomic<Integral>* obj, Integral op);
+    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
     atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class Integral>
     Integral
     atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class Integral>
     Integral
-    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op);
+    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
-    atomic_fetch_and(atomic<Integral>* obj, Integral op);
+    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
     atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class Integral>
     Integral
     atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class Integral>
     Integral
-    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op);
+    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
-    atomic_fetch_or(atomic<Integral>* obj, Integral op);
+    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
     atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
-                             memory_order m);
+                             memory_order m) noexcept;
 template <class Integral>
     Integral
     atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
-                             memory_order m);
+                             memory_order m) noexcept;
 template <class Integral>
     Integral
-    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op);
+    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
-    atomic_fetch_xor(atomic<Integral>* obj, Integral op);
+    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
 
 template <class Integral>
     Integral
     atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class Integral>
     Integral
     atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
-                              memory_order m);
+                              memory_order m) noexcept;
 
 template <class T>
     T*
-    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op);
+    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
 
 template <class T>
     T*
-    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op);
+    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
 
 template <class T>
     T*
     atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class T>
     T*
-    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m);
+    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
 
 template <class T>
     T*
-    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op);
+    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
 
 template <class T>
     T*
-    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op);
+    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
 
 template <class T>
     T*
     atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
-                              memory_order m);
+                              memory_order m) noexcept;
 template <class T>
     T*
-    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m);
+    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
 
 // Atomics for standard typedef types
 
@@ -514,8 +514,8 @@ typedef atomic<uintmax_t> atomic_uintmax
 
 // fences
 
-void atomic_thread_fence(memory_order m);
-void atomic_signal_fence(memory_order m);
+void atomic_thread_fence(memory_order m) noexcept;
+void atomic_signal_fence(memory_order m) noexcept;
 
 }  // std
 
@@ -545,7 +545,7 @@ typedef enum memory_order
 template <class _Tp>
 inline _LIBCPP_INLINE_VISIBILITY
 _Tp
-kill_dependency(_Tp __y)
+kill_dependency(_Tp __y) _NOEXCEPT
 {
     return __y;
 }
@@ -558,70 +558,70 @@ struct __atomic_base  // false
     _Atomic(_Tp) __a_;
 
     _LIBCPP_INLINE_VISIBILITY
-    bool is_lock_free() const volatile
-        {return __atomic_is_lock_free(_Tp());}
+    bool is_lock_free() const volatile _NOEXCEPT
+        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
     _LIBCPP_INLINE_VISIBILITY
-    bool is_lock_free() const
-        {return __atomic_is_lock_free(_Tp());}
+    bool is_lock_free() const _NOEXCEPT
+        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
     _LIBCPP_INLINE_VISIBILITY
-    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile
-        {__atomic_store(&__a_, __d, __m);}
+    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {__c11_atomic_store(&__a_, __d, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    void store(_Tp __d, memory_order __m = memory_order_seq_cst)
-        {__atomic_store(&__a_, __d, __m);}
+    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {__c11_atomic_store(&__a_, __d, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp load(memory_order __m = memory_order_seq_cst) const volatile
-        {return __atomic_load(&__a_, __m);}
+    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
+        {return __c11_atomic_load(&__a_, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp load(memory_order __m = memory_order_seq_cst) const
-        {return __atomic_load(&__a_, __m);}
+    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
+        {return __c11_atomic_load(&__a_, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    operator _Tp() const volatile {return load();}
+    operator _Tp() const volatile _NOEXCEPT {return load();}
     _LIBCPP_INLINE_VISIBILITY
-    operator _Tp() const          {return load();}
+    operator _Tp() const _NOEXCEPT          {return load();}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_exchange(&__a_, __d, __m);}
+    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_exchange(&__a_, __d, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst)
-        {return __atomic_exchange(&__a_, __d, __m);}
+    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_exchange(&__a_, __d, __m);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_weak(_Tp& __e, _Tp __d,
-                               memory_order __s, memory_order __f) volatile
-        {return __atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
+                               memory_order __s, memory_order __f) volatile _NOEXCEPT
+        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_weak(_Tp& __e, _Tp __d,
-                               memory_order __s, memory_order __f)
-        {return __atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
+                               memory_order __s, memory_order __f) _NOEXCEPT
+        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_strong(_Tp& __e, _Tp __d,
-                                 memory_order __s, memory_order __f) volatile
-        {return __atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
+                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
+        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_strong(_Tp& __e, _Tp __d,
-                                 memory_order __s, memory_order __f)
-        {return __atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
+                                 memory_order __s, memory_order __f) _NOEXCEPT
+        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_weak(_Tp& __e, _Tp __d,
-                              memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
+                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_weak(_Tp& __e, _Tp __d,
-                               memory_order __m = memory_order_seq_cst)
-        {return __atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
+                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_strong(_Tp& __e, _Tp __d,
-                              memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
+                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
     _LIBCPP_INLINE_VISIBILITY
     bool compare_exchange_strong(_Tp& __e, _Tp __d,
-                                 memory_order __m = memory_order_seq_cst)
-        {return __atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
+                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
 
     _LIBCPP_INLINE_VISIBILITY
-    __atomic_base() {} // = default;
+    __atomic_base() _NOEXCEPT {} // = default;
     _LIBCPP_INLINE_VISIBILITY
-    /*constexpr*/ __atomic_base(_Tp __d) { __atomic_store(&__a_, __d, memory_order_seq_cst); }
+    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
     __atomic_base(const __atomic_base&) = delete;
     __atomic_base& operator=(const __atomic_base&) = delete;
@@ -642,77 +642,77 @@ struct __atomic_base<_Tp, true>
 {
     typedef __atomic_base<_Tp, false> __base;
     _LIBCPP_INLINE_VISIBILITY
-    __atomic_base() {} // = default;
+    __atomic_base() _NOEXCEPT {} // = default;
     _LIBCPP_INLINE_VISIBILITY
-    /*constexpr*/ __atomic_base(_Tp __d) : __base(__d) {}
+    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
 
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_fetch_add(&this->__a_, __op, __m);}
+    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst)
-        {return __atomic_fetch_add(&this->__a_, __op, __m);}
+    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_fetch_sub(&this->__a_, __op, __m);}
+    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst)
-        {return __atomic_fetch_sub(&this->__a_, __op, __m);}
+    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_fetch_and(&this->__a_, __op, __m);}
+    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst)
-        {return __atomic_fetch_and(&this->__a_, __op, __m);}
+    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_fetch_or(&this->__a_, __op, __m);}
+    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst)
-        {return __atomic_fetch_or(&this->__a_, __op, __m);}
+    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile
-        {return __atomic_fetch_xor(&this->__a_, __op, __m);}
+    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst)
-        {return __atomic_fetch_xor(&this->__a_, __op, __m);}
+    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
 
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator++(int) volatile      {return fetch_add(_Tp(1));}
+    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator++(int)               {return fetch_add(_Tp(1));}
+    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator--(int) volatile      {return fetch_sub(_Tp(1));}
+    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator--(int)               {return fetch_sub(_Tp(1));}
+    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator++() volatile         {return fetch_add(_Tp(1)) + _Tp(1);}
+    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator++()                  {return fetch_add(_Tp(1)) + _Tp(1);}
+    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator--() volatile         {return fetch_sub(_Tp(1)) - _Tp(1);}
+    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator--()                  {return fetch_sub(_Tp(1)) - _Tp(1);}
+    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator+=(_Tp __op) volatile {return fetch_add(__op) + __op;}
+    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator+=(_Tp __op)          {return fetch_add(__op) + __op;}
+    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator-=(_Tp __op) volatile {return fetch_sub(__op) - __op;}
+    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator-=(_Tp __op)          {return fetch_sub(__op) - __op;}
+    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator&=(_Tp __op) volatile {return fetch_and(__op) & __op;}
+    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator&=(_Tp __op)          {return fetch_and(__op) & __op;}
+    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator|=(_Tp __op) volatile {return fetch_or(__op) | __op;}
+    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator|=(_Tp __op)          {return fetch_or(__op) | __op;}
+    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator^=(_Tp __op) volatile {return fetch_xor(__op) ^ __op;}
+    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator^=(_Tp __op)          {return fetch_xor(__op) ^ __op;}
+    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
 };
 
 // atomic<T>
@@ -723,15 +723,15 @@ struct atomic
 {
     typedef __atomic_base<_Tp> __base;
     _LIBCPP_INLINE_VISIBILITY
-    atomic() {} // = default;
+    atomic() _NOEXCEPT {} // = default;
     _LIBCPP_INLINE_VISIBILITY
-    /*constexpr*/ atomic(_Tp __d) : __base(__d) {}
+    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
 
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator=(_Tp __d) volatile
+    _Tp operator=(_Tp __d) volatile _NOEXCEPT
         {__base::store(__d); return __d;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp operator=(_Tp __d)
+    _Tp operator=(_Tp __d) _NOEXCEPT
         {__base::store(__d); return __d;}
 };
 
@@ -743,56 +743,56 @@ struct atomic<_Tp*>
 {
     typedef __atomic_base<_Tp*> __base;
     _LIBCPP_INLINE_VISIBILITY
-    atomic() {} // = default;
+    atomic() _NOEXCEPT {} // = default;
     _LIBCPP_INLINE_VISIBILITY
-    /*constexpr*/ atomic(_Tp* __d) : __base(__d) {}
+    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
 
     _LIBCPP_INLINE_VISIBILITY
-    _Tp* operator=(_Tp* __d) volatile
+    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
         {__base::store(__d); return __d;}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp* operator=(_Tp* __d)
+    _Tp* operator=(_Tp* __d) _NOEXCEPT
         {__base::store(__d); return __d;}
 
     _LIBCPP_INLINE_VISIBILITY
     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
-                                                                        volatile
-        {return __atomic_fetch_add(&this->__a_, __op, __m);}
+                                                                        volatile _NOEXCEPT
+        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
     _LIBCPP_INLINE_VISIBILITY
-    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)

*** DIFF OUTPUT TRUNCATED AT 1000 LINES ***



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?201205031744.q43Hi8fj054896>