Index: include/atomic =================================================================== --- include/atomic +++ include/atomic @@ -577,11 +577,30 @@ _LIBCPP_BEGIN_NAMESPACE_STD -typedef enum memory_order +enum class memory_order { - memory_order_relaxed, memory_order_consume, memory_order_acquire, - memory_order_release, memory_order_acq_rel, memory_order_seq_cst -} memory_order; + relaxed, consume, acquire, release, acq_rel, seq_cst +}; + +#if _LIBCPP_STD_VER > 14 + +inline constexpr auto memory_order_relaxed = memory_order::relaxed; +inline constexpr auto memory_order_consume = memory_order::consume; +inline constexpr auto memory_order_acquire = memory_order::acquire; +inline constexpr auto memory_order_release = memory_order::release; +inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; +inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; + +#else // these not being inline isnt currently a problem but may become a problem + +constexpr auto memory_order_relaxed = memory_order::relaxed; +constexpr auto memory_order_consume = memory_order::consume; +constexpr auto memory_order_acquire = memory_order::acquire; +constexpr auto memory_order_release = memory_order::release; +constexpr auto memory_order_acq_rel = memory_order::acq_rel; +constexpr auto memory_order_seq_cst = memory_order::seq_cst; + +#endif #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) namespace __gcc_atomic { @@ -910,65 +929,65 @@ _LIBCPP_INLINE_VISIBILITY void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) - {__c11_atomic_store(&__a_, __d, __m);} + {__c11_atomic_store(&__a_, __d, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) - {__c11_atomic_store(&__a_, __d, __m);} + {__c11_atomic_store(&__a_, __d, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) - {return __c11_atomic_load(&__a_, __m);} + {return __c11_atomic_load(&__a_, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) - {return __c11_atomic_load(&__a_, __m);} + {return __c11_atomic_load(&__a_, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY operator _Tp() const volatile _NOEXCEPT {return load();} _LIBCPP_INLINE_VISIBILITY operator _Tp() const _NOEXCEPT {return load();} _LIBCPP_INLINE_VISIBILITY _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_exchange(&__a_, __d, __m);} + {return __c11_atomic_exchange(&__a_, __d, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_exchange(&__a_, __d, __m);} + {return __c11_atomic_exchange(&__a_, __d, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) - {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} + {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast(__s), static_cast(__f));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) - {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} + {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast(__s), static_cast(__f));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) - {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} + {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast(__s), static_cast(__f));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) - {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} + {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast(__s), static_cast(__f));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} + {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast(__m), static_cast(__m));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} + {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast(__m), static_cast(__m));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} + {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast(__m), static_cast(__m));} _LIBCPP_INLINE_VISIBILITY bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} + {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast(__m), static_cast(__m));} _LIBCPP_INLINE_VISIBILITY #ifndef _LIBCPP_CXX03_LANG @@ -1010,34 +1029,34 @@ _LIBCPP_INLINE_VISIBILITY _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_add(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_add(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_sub(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_sub(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_fetch_and(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_and(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_fetch_and(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_and(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_fetch_or(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_or(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_fetch_or(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_or(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_xor(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_xor(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));} @@ -1119,17 +1138,17 @@ _LIBCPP_INLINE_VISIBILITY _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_add(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_add(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_sub(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} + {return __c11_atomic_fetch_sub(&this->__a_, __op, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);} @@ -1721,17 +1740,17 @@ _Atomic(bool) __a_; _LIBCPP_INLINE_VISIBILITY - bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {return __c11_atomic_exchange(&__a_, true, __m);} + bool test_and_set(memory_order __m = memory_order::seq_cst) volatile _NOEXCEPT + {return __c11_atomic_exchange(&__a_, true, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY - bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT - {return __c11_atomic_exchange(&__a_, true, __m);} + bool test_and_set(memory_order __m = memory_order::seq_cst) _NOEXCEPT + {return __c11_atomic_exchange(&__a_, true, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY - void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT - {__c11_atomic_store(&__a_, false, __m);} + void clear(memory_order __m = memory_order::seq_cst) volatile _NOEXCEPT + {__c11_atomic_store(&__a_, false, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY - void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT - {__c11_atomic_store(&__a_, false, __m);} + void clear(memory_order __m = memory_order::seq_cst) _NOEXCEPT + {__c11_atomic_store(&__a_, false, static_cast(__m));} _LIBCPP_INLINE_VISIBILITY #ifndef _LIBCPP_CXX03_LANG @@ -1817,14 +1836,14 @@ void atomic_thread_fence(memory_order __m) _NOEXCEPT { - __c11_atomic_thread_fence(__m); + __c11_atomic_thread_fence(static_cast(__m)); } inline _LIBCPP_INLINE_VISIBILITY void atomic_signal_fence(memory_order __m) _NOEXCEPT { - __c11_atomic_signal_fence(__m); + __c11_atomic_signal_fence(static_cast(__m)); } // Atomics for standard typedef types Index: src/experimental/memory_resource.cpp =================================================================== --- src/experimental/memory_resource.cpp +++ src/experimental/memory_resource.cpp @@ -109,11 +109,11 @@ new_res = new_res ? new_res : new_delete_resource(); // TODO: Can a weaker ordering be used? return _VSTD::atomic_exchange_explicit( - &__res, new_res, memory_order::memory_order_acq_rel); + &__res, new_res, memory_order::acq_rel); } else { return _VSTD::atomic_load_explicit( - &__res, memory_order::memory_order_acquire); + &__res, memory_order::acquire); } #elif !defined(_LIBCPP_HAS_NO_THREADS) _LIBCPP_SAFE_STATIC static memory_resource * res = &res_init.resources.new_delete_res; Index: test/std/atomics/atomics.order/memory_order.pass.cpp =================================================================== --- test/std/atomics/atomics.order/memory_order.pass.cpp +++ test/std/atomics/atomics.order/memory_order.pass.cpp @@ -21,14 +21,22 @@ int main(int, char**) { - assert(std::memory_order_relaxed == 0); - assert(std::memory_order_consume == 1); - assert(std::memory_order_acquire == 2); - assert(std::memory_order_release == 3); - assert(std::memory_order_acq_rel == 4); - assert(std::memory_order_seq_cst == 5); + assert(std::memory_order_relaxed == std::memory_order::relaxed); + assert(std::memory_order_consume == std::memory_order::consume); + assert(std::memory_order_acquire == std::memory_order::acquire); + assert(std::memory_order_release == std::memory_order::release); + assert(std::memory_order_acq_rel == std::memory_order::acq_rel); + assert(std::memory_order_seq_cst == std::memory_order::seq_cst); + + assert(static_cast(std::memory_order_relaxed) == 0); + assert(static_cast(std::memory_order_consume) == 1); + assert(static_cast(std::memory_order_acquire) == 2); + assert(static_cast(std::memory_order_release) == 3); + assert(static_cast(std::memory_order_acq_rel) == 4); + assert(static_cast(std::memory_order_seq_cst) == 5); + std::memory_order o = std::memory_order_seq_cst; - assert(o == 5); + assert(static_cast(o) == 5); - return 0; + return 0; }