diff options
Diffstat (limited to 'erts')
54 files changed, 19393 insertions, 2980 deletions
diff --git a/erts/aclocal.m4 b/erts/aclocal.m4 index b64380e817..3ebea66d30 100644 --- a/erts/aclocal.m4 +++ b/erts/aclocal.m4 @@ -679,6 +679,55 @@ AC_SUBST(ERTS_INTERNAL_X_LIBS) ]) +AC_DEFUN(ETHR_CHK_SYNC_OP, +[ + AC_MSG_CHECKING([for $3-bit $1()]) + case "$2" in + "1") sync_call="$1(&var);";; + "2") sync_call="$1(&var, ($4) 0);";; + "3") sync_call="$1(&var, ($4) 0, ($4) 0);";; + esac + have_sync_op=no + AC_TRY_LINK([], + [ + $4 res; + volatile $4 var; + res = $sync_call + ], + [have_sync_op=yes]) + test $have_sync_op = yes && $5 + AC_MSG_RESULT([$have_sync_op]) +]) + +AC_DEFUN(ETHR_CHK_INTERLOCKED, +[ + ilckd="$1" + AC_MSG_CHECKING([for ${ilckd}()]) + case "$2" in + "1") ilckd_call="${ilckd}(var);";; + "2") ilckd_call="${ilckd}(var, ($3) 0);";; + "3") ilckd_call="${ilckd}(var, ($3) 0, ($3) 0);";; + "4") ilckd_call="${ilckd}(var, ($3) 0, ($3) 0, arr);";; + esac + have_interlocked_op=no + AC_TRY_LINK( + [ + #define WIN32_LEAN_AND_MEAN + #include <windows.h> + #include <intrin.h> + ], + [ + volatile $3 *var; + volatile $3 arr[2]; + + $ilckd_call + return 0; + ], + [have_interlocked_op=yes]) + test $have_interlocked_op = yes && $4 + AC_MSG_RESULT([$have_interlocked_op]) +]) + dnl ---------------------------------------------------------------------- dnl dnl ERL_FIND_ETHR_LIB @@ -750,121 +799,41 @@ case "$THR_LIB_NAME" in AC_DEFINE(ETHR_WIN32_THREADS, 1, [Define if you have win32 threads]) - have_ilckd=no - AC_MSG_CHECKING([for _InterlockedCompareExchange64()]) - AC_TRY_LINK([ - #define WIN32_LEAN_AND_MEAN - #include <windows.h> - ], - [ - volatile __int64 *var; - _InterlockedCompareExchange64(var, (__int64) 1, (__int64) 0); - return 0; - ], - have_ilckd=yes) - AC_MSG_RESULT([$have_ilckd]) - test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64, 1, [Define if you have _InterlockedCompareExchange64()]) - - AC_CHECK_SIZEOF(void *) - case "$ac_cv_sizeof_void_p-$have_ilckd" in - 8-no) - ethr_have_native_atomics=no - ethr_have_native_spinlock=no;; - *) - ethr_have_native_atomics=yes - ethr_have_native_spinlock=yes;; - esac - - have_ilckd=no - AC_MSG_CHECKING([for _InterlockedDecrement64()]) - AC_TRY_LINK([ - #define WIN32_LEAN_AND_MEAN - #include <windows.h> - ], - [ - volatile __int64 *var; - _InterlockedDecrement64(var); - return 0; - ], - have_ilckd=yes) - AC_MSG_RESULT([$have_ilckd]) - test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDDECREMENT64, 1, [Define if you have _InterlockedDecrement64()]) - - have_ilckd=no - AC_MSG_CHECKING([for _InterlockedIncrement64()]) - AC_TRY_LINK([ - #define WIN32_LEAN_AND_MEAN - #include <windows.h> - ], - [ - volatile __int64 *var; - _InterlockedIncrement64(var); - return 0; - ], - have_ilckd=yes) - AC_MSG_RESULT([$have_ilckd]) - test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDINCREMENT64, 1, [Define if you have _InterlockedIncrement64()]) - - have_ilckd=no - AC_MSG_CHECKING([for _InterlockedExchangeAdd64()]) - AC_TRY_LINK([ - #define WIN32_LEAN_AND_MEAN - #include <windows.h> - ], - [ - volatile __int64 *var; - _InterlockedExchangeAdd64(var, (__int64) 1); - return 0; - ], - have_ilckd=yes) - AC_MSG_RESULT([$have_ilckd]) - test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDEXCHANGEADD64, 1, [Define if you have _InterlockedExchangeAdd64()]) - - have_ilckd=no - AC_MSG_CHECKING([for _InterlockedExchange64()]) - AC_TRY_LINK([ - #define WIN32_LEAN_AND_MEAN - #include <windows.h> - ], - [ - volatile __int64 *var; - _InterlockedExchange64(var, (__int64) 1); - return 0; - ], - have_ilckd=yes) - AC_MSG_RESULT([$have_ilckd]) - test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDEXCHANGE64, 1, [Define if you have _InterlockedExchange64()]) - - have_ilckd=no - AC_MSG_CHECKING([for _InterlockedAnd64()]) - AC_TRY_LINK([ - #define WIN32_LEAN_AND_MEAN - #include <windows.h> - ], - [ - volatile __int64 *var; - _InterlockedAnd64(var, (__int64) 1); - return 0; - ], - have_ilckd=yes) - AC_MSG_RESULT([$have_ilckd]) - test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDAND64, 1, [Define if you have _InterlockedAnd64()]) - - have_ilckd=no - AC_MSG_CHECKING([for _InterlockedOr64()]) - AC_TRY_LINK([ - #define WIN32_LEAN_AND_MEAN - #include <windows.h> - ], - [ - volatile __int64 *var; - _InterlockedOr64(var, (__int64) 1); - return 0; - ], - have_ilckd=yes) - AC_MSG_RESULT([$have_ilckd]) - test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDOR64, 1, [Define if you have _InterlockedOr64()]) - + ETHR_CHK_INTERLOCKED([_InterlockedDecrement], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT, 1, [Define if you have _InterlockedDecrement()])) + ETHR_CHK_INTERLOCKED([_InterlockedDecrement_rel], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT_REL, 1, [Define if you have _InterlockedDecrement_rel()])) + ETHR_CHK_INTERLOCKED([_InterlockedIncrement], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT, 1, [Define if you have _InterlockedIncrement()])) + ETHR_CHK_INTERLOCKED([_InterlockedIncrement_acq], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ, 1, [Define if you have _InterlockedIncrement_acq()])) + ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD, 1, [Define if you have _InterlockedExchangeAdd()])) + ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd_acq], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ, 1, [Define if you have _InterlockedExchangeAdd_acq()])) + ETHR_CHK_INTERLOCKED([_InterlockedAnd], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDAND, 1, [Define if you have _InterlockedAnd()])) + ETHR_CHK_INTERLOCKED([_InterlockedOr], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDOR, 1, [Define if you have _InterlockedOr()])) + ETHR_CHK_INTERLOCKED([_InterlockedExchange], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGE, 1, [Define if you have _InterlockedExchange()])) + ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange], [3], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE, 1, [Define if you have _InterlockedCompareExchange()])) + test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes + ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange_acq], [3], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ, 1, [Define if you have _InterlockedCompareExchange_acq()])) + test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes + ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange_rel], [3], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL, 1, [Define if you have _InterlockedCompareExchange_rel()])) + test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes + + ETHR_CHK_INTERLOCKED([_InterlockedDecrement64], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT64, 1, [Define if you have _InterlockedDecrement64()])) + ETHR_CHK_INTERLOCKED([_InterlockedDecrement64_rel], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT64_REL, 1, [Define if you have _InterlockedDecrement64_rel()])) + ETHR_CHK_INTERLOCKED([_InterlockedIncrement64], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT64, 1, [Define if you have _InterlockedIncrement64()])) + ETHR_CHK_INTERLOCKED([_InterlockedIncrement64_acq], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ, 1, [Define if you have _InterlockedIncrement64_acq()])) + ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD64, 1, [Define if you have _InterlockedExchangeAdd64()])) + ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd64_acq], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ, 1, [Define if you have _InterlockedExchangeAdd64_acq()])) + ETHR_CHK_INTERLOCKED([_InterlockedAnd64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDAND64, 1, [Define if you have _InterlockedAnd64()])) + ETHR_CHK_INTERLOCKED([_InterlockedOr64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDOR64, 1, [Define if you have _InterlockedOr64()])) + ETHR_CHK_INTERLOCKED([_InterlockedExchange64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGE64, 1, [Define if you have _InterlockedExchange64()])) + ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange64], [3], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64, 1, [Define if you have _InterlockedCompareExchange64()])) + test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes + ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange64_acq], [3], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ, 1, [Define if you have _InterlockedCompareExchange64_acq()])) + test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes + ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange64_rel], [3], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL, 1, [Define if you have _InterlockedCompareExchange64_rel()])) + test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes + + ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange128], [4], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE128, 1, [Define if you have _InterlockedCompareExchange128()])) + + test "$ethr_have_native_atomics" = "yes" && ethr_have_native_spinlock=yes ;; pthread) @@ -1100,35 +1069,51 @@ case "$THR_LIB_NAME" in AC_MSG_RESULT([$linux_futex]) test $linux_futex = yes && AC_DEFINE(ETHR_HAVE_LINUX_FUTEX, 1, [Define if you have a linux futex implementation.]) - AC_MSG_CHECKING([for GCC atomic operations]) - ethr_have_gcc_atomic_ops=no - AC_TRY_LINK([], - [ - long res; - volatile long val; - res = __sync_val_compare_and_swap(&val, (long) 1, (long) 0); - res = __sync_add_and_fetch(&val, (long) 1); - res = __sync_sub_and_fetch(&val, (long) 1); - res = __sync_fetch_and_and(&val, (long) 1); - res = __sync_fetch_and_or(&val, (long) 1); - ], - [ethr_have_native_atomics=yes - ethr_have_gcc_atomic_ops=yes]) - AC_MSG_RESULT([$ethr_have_gcc_atomic_ops]) - test $ethr_have_gcc_atomic_ops = yes && AC_DEFINE(ETHR_HAVE_GCC_ATOMIC_OPS, 1, [Define if you have gcc atomic operations]) + AC_CHECK_SIZEOF(int) + AC_CHECK_SIZEOF(long) + AC_CHECK_SIZEOF(long long) + AC_CHECK_SIZEOF(__int128_t) + + if test "$ac_cv_sizeof_int" = "4"; then + int32="int" + elif test "$ac_cv_sizeof_long" = "4"; then + int32="long" + elif test "$ac_cv_sizeof_long_long" = "4"; then + int32="long long" + else + AC_MSG_ERROR([No 32-bit type found]) + fi - case "$host_cpu" in - sun4u | sparc64 | sun4v) - ethr_have_native_atomics=yes;; - i86pc | i*86 | x86_64 | amd64) - ethr_have_native_atomics=yes;; - macppc | ppc | "Power Macintosh") - ethr_have_native_atomics=yes;; - tile) - ethr_have_native_atomics=yes;; - *) - ;; - esac + if test "$ac_cv_sizeof_int" = "8"; then + int64="int" + elif test "$ac_cv_sizeof_long" = "8"; then + int64="long" + elif test "$ac_cv_sizeof_long_long" = "8"; then + int64="long long" + else + AC_MSG_ERROR([No 64-bit type found]) + fi + + int128=no + if test "$ac_cv_sizeof___int128_t" = "16"; then + int128="__int128_t" + fi + + ETHR_CHK_SYNC_OP([__sync_val_compare_and_swap], [3], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32, 1, [Define if you have __sync_val_compare_and_swap() for 32-bit integers])) + test "$have_sync_op" = "yes" && ethr_have_native_atomics=yes + ETHR_CHK_SYNC_OP([__sync_add_and_fetch], [2], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_ADD_AND_FETCH32, 1, [Define if you have __sync_add_and_fetch() for 32-bit integers])) + ETHR_CHK_SYNC_OP([__sync_fetch_and_and], [2], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_AND32, 1, [Define if you have __sync_fetch_and_and() for 32-bit integers])) + ETHR_CHK_SYNC_OP([__sync_fetch_and_or], [2], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_OR32, 1, [Define if you have __sync_fetch_and_or() for 32-bit integers])) + + ETHR_CHK_SYNC_OP([__sync_val_compare_and_swap], [3], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64, 1, [Define if you have __sync_val_compare_and_swap() for 64-bit integers])) + test "$have_sync_op" = "yes" && ethr_have_native_atomics=yes + ETHR_CHK_SYNC_OP([__sync_add_and_fetch], [2], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_ADD_AND_FETCH64, 1, [Define if you have __sync_add_and_fetch() for 64-bit integers])) + ETHR_CHK_SYNC_OP([__sync_fetch_and_and], [2], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_AND64, 1, [Define if you have __sync_fetch_and_and() for 64-bit integers])) + ETHR_CHK_SYNC_OP([__sync_fetch_and_or], [2], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_OR64, 1, [Define if you have __sync_fetch_and_or() for 64-bit integers])) + + if test $int128 != no; then + ETHR_CHK_SYNC_OP([__sync_val_compare_and_swap], [3], [128], [$int128], AC_DEFINE(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128, 1, [Define if you have __sync_val_compare_and_swap() for 128-bit integers])) + fi AC_MSG_CHECKING([for a usable libatomic_ops implementation]) case "x$with_libatomic_ops" in @@ -1175,6 +1160,34 @@ case "$THR_LIB_NAME" in AC_MSG_ERROR([No usable libatomic_ops implementation found]) fi + case "$host_cpu" in + sparc | sun4u | sparc64 | sun4v) + case "$with_sparc_memory_order" in + "TSO") + AC_DEFINE(ETHR_SPARC_TSO, 1, [Define if only run in Sparc TSO mode]);; + "PSO") + AC_DEFINE(ETHR_SPARC_PSO, 1, [Define if only run in Sparc PSO, or TSO mode]);; + "RMO"|"") + AC_DEFINE(ETHR_SPARC_RMO, 1, [Define if run in Sparc RMO, PSO, or TSO mode]);; + *) + AC_MSG_ERROR([Unsupported Sparc memory order: $with_sparc_memory_order]);; + esac + ethr_have_native_atomics=yes;; + i86pc | i*86 | x86_64 | amd64) + if test "$enable_x86_out_of_order" = "yes"; then + AC_DEFINE(ETHR_X86_OUT_OF_ORDER, 1, [Define if x86/x86_64 out of order instructions should be synchronized]) + fi + ethr_have_native_atomics=yes;; + macppc | ppc | "Power Macintosh") + ethr_have_native_atomics=yes;; + tile) + ethr_have_native_atomics=yes;; + *) + ;; + esac + + test ethr_have_native_atomics = "yes" && ethr_have_native_spinlock=yes + dnl Restore LIBS LIBS=$saved_libs dnl restore CPPFLAGS @@ -1210,6 +1223,8 @@ AC_CHECK_SIZEOF(long long) AC_DEFINE_UNQUOTED(ETHR_SIZEOF_LONG_LONG, $ac_cv_sizeof_long_long, [Define to the size of long long]) AC_CHECK_SIZEOF(__int64) AC_DEFINE_UNQUOTED(ETHR_SIZEOF___INT64, $ac_cv_sizeof___int64, [Define to the size of __int64]) +AC_CHECK_SIZEOF(__int128_t) +AC_DEFINE_UNQUOTED(ETHR_SIZEOF___INT128_T, $ac_cv_sizeof___int128_t, [Define to the size of __int128_t]) case X$erl_xcomp_bigendian in @@ -1232,6 +1247,10 @@ AC_ARG_ENABLE(native-ethr-impls, *) disable_native_ethr_impls=no ;; esac ], disable_native_ethr_impls=no) +AC_ARG_ENABLE(x86-out-of-order, + AS_HELP_STRING([--enable-x86-out-of-order], + [enable x86/x84_64 out of order support (default disabled)])) + test "X$disable_native_ethr_impls" = "Xyes" && AC_DEFINE(ETHR_DISABLE_NATIVE_IMPLS, 1, [Define if you want to disable native ethread implementations]) @@ -1250,55 +1269,101 @@ AC_ARG_WITH(libatomic_ops, AS_HELP_STRING([--with-libatomic_ops=PATH], [specify and prefer usage of libatomic_ops in the ethread library])) -AC_ARG_ENABLE(ethread-pre-pentium4-compatibility, - AS_HELP_STRING([--enable-ethread-pre-pentium4-compatibility], - [enable compatibility with x86 processors before pentium 4 (back to 486) in the ethread library]), -[ - case "$enable_ethread_pre_pentium4_compatibility" in - yes|no) ;; - *) enable_ethread_pre_pentium4_compatibility=check;; - esac -], -[enable_ethread_pre_pentium4_compatibility=check]) - -test "$cross_compiling" != "yes" || enable_ethread_pre_pentium4_compatibility=no - -case "$enable_ethread_pre_pentium4_compatibility-$host_cpu" in - check-i86pc | check-i*86) - AC_MSG_CHECKING([whether pre pentium 4 compatibility should forced]) - AC_RUN_IFELSE([ -#if defined(__GNUC__) -# if defined(ETHR_PREFER_LIBATOMIC_OPS_NATIVE_IMPLS) -# define CHECK_LIBATOMIC_OPS__ -# else -# define CHECK_GCC_ASM__ -# endif -#elif defined(ETHR_HAVE_LIBATOMIC_OPS) -# define CHECK_LIBATOMIC_OPS__ +AC_ARG_WITH(with_sparc_memory_order, + AS_HELP_STRING([--with-sparc-memory-order=TSO|PSO|RMO], + [specify sparc memory order (defaults to RMO)])) + +ETHR_X86_SSE2_ASM=no +case "$GCC-$ac_cv_sizeof_void_p-$host_cpu" in + yes-4-i86pc | yes-4-i*86 | yes-4-x86_64 | yes-4-amd64) + AC_MSG_CHECKING([for gcc sse2 asm support]) + save_CFLAGS="$CFLAGS" + CFLAGS="$CFLAGS -msse2" + gcc_sse2_asm=no + AC_TRY_COMPILE([], + [ + long long x, *y; + __asm__ __volatile__("movq %1, %0\n\t" : "=x"(x) : "m"(*y) : "memory"); + ], + [gcc_sse2_asm=yes]) + CFLAGS="$save_CFLAGS" + AC_MSG_RESULT([$gcc_sse2_asm]) + if test "$gcc_sse2_asm" = "yes"; then + AC_DEFINE(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT, 1, [Define if you use a gcc that supports -msse2 and understand sse2 specific asm statements]) + ETHR_X86_SSE2_ASM=yes + fi + ;; + *) + ;; +esac + +case "$GCC-$host_cpu" in + yes-i86pc | yes-i*86 | yes-x86_64 | yes-amd64) + gcc_dw_cmpxchg_asm=no + AC_MSG_CHECKING([for gcc double word cmpxchg asm support]) + AC_TRY_COMPILE([], + [ + char xchgd; + long new[2], xchg[2], *p; + __asm__ __volatile__( +#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__ + "pushl %%ebx\n\t" + "movl %8, %%ebx\n\t" #endif -#if defined(CHECK_LIBATOMIC_OPS__) -#include "atomic_ops.h" +#if ETHR_SIZEOF_PTR == 4 + "lock; cmpxchg8b %0\n\t" +#else + "lock; cmpxchg16b %0\n\t" #endif -int main(void) -{ -#if defined(CHECK_GCC_ASM__) - __asm__ __volatile__("mfence" : : : "memory"); -#elif defined(CHECK_LIBATOMIC_OPS__) - AO_nop_full(); + "setz %3\n\t" +#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__ + "popl %%ebx\n\t" #endif - return 0; -} + : "=m"(*p), "=d"(xchg[1]), "=a"(xchg[0]), "=c"(xchgd) + : "m"(*p), "1"(xchg[1]), "2"(xchg[0]), "3"(new[1]), +#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__ + "r"(new[0]) +#else + "b"(new[0]) +#endif + : "cc", "memory"); + ], - [enable_ethread_pre_pentium4_compatibility=no], - [enable_ethread_pre_pentium4_compatibility=yes], - [enable_ethread_pre_pentium4_compatibility=no]) - AC_MSG_RESULT([$enable_ethread_pre_pentium4_compatibility]);; + [gcc_dw_cmpxchg_asm=yes]) + if test $gcc_dw_cmpxchg_asm = no && test $ac_cv_sizeof_void_p = 4; then + AC_TRY_COMPILE([], + [ + char xchgd; + long new[2], xchg[2], *p; +#if !defined(__PIC__) || !__PIC__ +# error nope +#endif + __asm__ __volatile__( + "pushl %%ebx\n\t" + "movl (%7), %%ebx\n\t" + "movl 4(%7), %%ecx\n\t" + "lock; cmpxchg8b %0\n\t" + "setz %3\n\t" + "popl %%ebx\n\t" + : "=m"(*p), "=d"(xchg[1]), "=a"(xchg[0]), "=c"(xchgd) + : "m"(*p), "1"(xchg[1]), "2"(xchg[0]), "3"(new) + : "cc", "memory"); + + ], + [gcc_dw_cmpxchg_asm=yes]) + if test "$gcc_dw_cmpxchg_asm" = "yes"; then + AC_DEFINE(ETHR_CMPXCHG8B_REGISTER_SHORTAGE, 1, [Define if you get a register shortage with cmpxchg8b and position independent code]) + fi + fi + AC_MSG_RESULT([$gcc_dw_cmpxchg_asm]) + if test "$gcc_dw_cmpxchg_asm" = "yes"; then + AC_DEFINE(ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT, 1, [Define if you use a gcc that supports the double word cmpxchg instruction]) + fi;; *) ;; esac -test $enable_ethread_pre_pentium4_compatibility = yes && - AC_DEFINE(ETHR_PRE_PENTIUM4_COMPAT, 1, [Define if you want compatibilty with x86 processors before pentium4.]) + AC_DEFINE(ETHR_HAVE_ETHREAD_DEFINES, 1, \ [Define if you have all ethread defines]) @@ -1309,6 +1374,7 @@ AC_SUBST(ETHR_LIB_NAME) AC_SUBST(ETHR_DEFS) AC_SUBST(ETHR_THR_LIB_BASE) AC_SUBST(ETHR_THR_LIB_BASE_DIR) +AC_SUBST(ETHR_X86_SSE2_ASM) ]) diff --git a/erts/emulator/beam/erl_bif_info.c b/erts/emulator/beam/erl_bif_info.c index f264bf44df..ee5258f2ee 100644 --- a/erts/emulator/beam/erl_bif_info.c +++ b/erts/emulator/beam/erl_bif_info.c @@ -2545,6 +2545,70 @@ BIF_RETTYPE system_info_1(BIF_ALIST_1) hp = hsz ? HAlloc(BIF_P, hsz) : NULL; res = erts_bld_uint(&hp, NULL, erts_dist_buf_busy_limit); BIF_RET(res); + } else if (ERTS_IS_ATOM_STR("print_ethread_info", BIF_ARG_1)) { + int i; + char **str; +#ifdef ETHR_NATIVE_ATOMIC32_IMPL + erts_printf("32-bit native atomics: %s\n", + ETHR_NATIVE_ATOMIC32_IMPL); + str = ethr_native_atomic32_ops(); + for (i = 0; str[i]; i++) + erts_printf("ethr_native_atomic32_%s()\n", str[i]); +#endif +#ifdef ETHR_NATIVE_ATOMIC64_IMPL + erts_printf("64-bit native atomics: %s\n", + ETHR_NATIVE_ATOMIC64_IMPL); + str = ethr_native_atomic64_ops(); + for (i = 0; str[i]; i++) + erts_printf("ethr_native_atomic64_%s()\n", str[i]); +#endif +#ifdef ETHR_NATIVE_DW_ATOMIC_IMPL + if (ethr_have_native_dw_atomic()) { + erts_printf("Double word native atomics: %s\n", + ETHR_NATIVE_DW_ATOMIC_IMPL); + str = ethr_native_dw_atomic_ops(); + for (i = 0; str[i]; i++) + erts_printf("ethr_native_dw_atomic_%s()\n", str[i]); + str = ethr_native_su_dw_atomic_ops(); + for (i = 0; str[i]; i++) + erts_printf("ethr_native_su_dw_atomic_%s()\n", str[i]); + } +#endif +#ifdef ETHR_NATIVE_SPINLOCK_IMPL + erts_printf("Native spin-locks: %s\n", ETHR_NATIVE_SPINLOCK_IMPL); +#endif +#ifdef ETHR_NATIVE_RWSPINLOCK_IMPL + erts_printf("Native rwspin-locks: %s\n", ETHR_NATIVE_RWSPINLOCK_IMPL); +#endif +#ifdef ETHR_X86_RUNTIME_CONF_HAVE_SSE2__ + erts_printf("SSE2 support: %s\n", (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__ + ? "yes" : "no")); +#endif +#ifdef ETHR_X86_OUT_OF_ORDER + erts_printf("x86" +#ifdef ARCH_64 + "_64" +#endif + " out of order\n"); +#endif +#ifdef ETHR_SPARC_TSO + erts_printf("Sparc TSO\n"); +#endif +#ifdef ETHR_SPARC_PSO + erts_printf("Sparc PSO\n"); +#endif +#ifdef ETHR_SPARC_RMO + erts_printf("Sparc RMO\n"); +#endif +#if defined(ETHR_PPC_HAVE_LWSYNC) + erts_printf("Have lwsync instruction: yes\n"); +#elif defined(ETHR_PPC_HAVE_NO_LWSYNC) + erts_printf("Have lwsync instruction: no\n"); +#elif defined(ETHR_PPC_RUNTIME_CONF_HAVE_LWSYNC__) + erts_printf("Have lwsync instruction: %s (runtime test)\n", + ETHR_PPC_RUNTIME_CONF_HAVE_LWSYNC__ ? "yes" : "no"); +#endif + BIF_RET(am_true); } BIF_ERROR(BIF_P, BADARG); diff --git a/erts/emulator/beam/erl_process_lock.h b/erts/emulator/beam/erl_process_lock.h index 355179f084..1b96646999 100644 --- a/erts/emulator/beam/erl_process_lock.h +++ b/erts/emulator/beam/erl_process_lock.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2007-2010. All Rights Reserved. + * Copyright Ericsson AB 2007-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -41,10 +41,10 @@ #define ERTS_PROC_LOCK_SPINLOCK_IMPL 0 #define ERTS_PROC_LOCK_MUTEX_IMPL 0 -#if defined(ETHR_HAVE_OPTIMIZED_ATOMIC_OPS) +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) # undef ERTS_PROC_LOCK_ATOMIC_IMPL # define ERTS_PROC_LOCK_ATOMIC_IMPL 1 -#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCK) +#elif defined(ETHR_HAVE_NATIVE_SPINLOCKS) # undef ERTS_PROC_LOCK_SPINLOCK_IMPL # define ERTS_PROC_LOCK_SPINLOCK_IMPL 1 #else diff --git a/erts/emulator/beam/erl_smp.h b/erts/emulator/beam/erl_smp.h index 287327bfe1..797378b315 100644 --- a/erts/emulator/beam/erl_smp.h +++ b/erts/emulator/beam/erl_smp.h @@ -54,8 +54,9 @@ typedef erts_cnd_t erts_smp_cnd_t; typedef erts_rwmtx_opt_t erts_smp_rwmtx_opt_t; typedef erts_rwmtx_t erts_smp_rwmtx_t; typedef erts_tsd_key_t erts_smp_tsd_key_t; -typedef erts_atomic_t erts_smp_atomic_t; -typedef erts_atomic32_t erts_smp_atomic32_t; +#define erts_smp_dw_atomic_t erts_dw_atomic_t +#define erts_smp_atomic_t erts_atomic_t +#define erts_smp_atomic32_t erts_atomic32_t typedef erts_spinlock_t erts_smp_spinlock_t; typedef erts_rwlock_t erts_smp_rwlock_t; void erts_thr_fatal_error(int, char *); /* implemented in erl_init.c */ @@ -83,8 +84,9 @@ typedef struct { } erts_smp_rwmtx_opt_t; typedef int erts_smp_rwmtx_t; typedef int erts_smp_tsd_key_t; -typedef SWord erts_smp_atomic_t; -typedef Uint32 erts_smp_atomic32_t; +#define erts_smp_dw_atomic_t erts_no_dw_atomic_t +#define erts_smp_atomic_t erts_no_atomic_t +#define erts_smp_atomic32_t erts_no_atomic32_t #if __GNUC__ > 2 typedef struct { } erts_smp_spinlock_t; typedef struct { } erts_smp_rwlock_t; @@ -160,82 +162,6 @@ ERTS_GLB_INLINE int erts_smp_rwmtx_tryrwlock(erts_smp_rwmtx_t *rwmtx); ERTS_GLB_INLINE void erts_smp_rwmtx_rwunlock(erts_smp_rwmtx_t *rwmtx); ERTS_GLB_INLINE int erts_smp_lc_rwmtx_is_rlocked(erts_smp_rwmtx_t *mtx); ERTS_GLB_INLINE int erts_smp_lc_rwmtx_is_rwlocked(erts_smp_rwmtx_t *mtx); -ERTS_GLB_INLINE void erts_smp_atomic_init(erts_smp_atomic_t *var, - erts_aint_t i); -ERTS_GLB_INLINE void erts_smp_atomic_set(erts_smp_atomic_t *var, erts_aint_t i); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_read(erts_smp_atomic_t *var); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_inctest(erts_smp_atomic_t *incp); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_dectest(erts_smp_atomic_t *decp); -ERTS_GLB_INLINE void erts_smp_atomic_inc(erts_smp_atomic_t *incp); -ERTS_GLB_INLINE void erts_smp_atomic_dec(erts_smp_atomic_t *decp); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_addtest(erts_smp_atomic_t *addp, - erts_aint_t i); -ERTS_GLB_INLINE void erts_smp_atomic_add(erts_smp_atomic_t *addp, - erts_aint_t i); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_xchg(erts_smp_atomic_t *xchgp, - erts_aint_t new); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_cmpxchg(erts_smp_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t expected); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_bor(erts_smp_atomic_t *var, - erts_aint_t mask); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_band(erts_smp_atomic_t *var, - erts_aint_t mask); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_read_acqb(erts_smp_atomic_t *var); -ERTS_GLB_INLINE void erts_smp_atomic_set_relb(erts_smp_atomic_t *var, - erts_aint_t i); -ERTS_GLB_INLINE void erts_smp_atomic_dec_relb(erts_smp_atomic_t *decp); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_dectest_relb(erts_smp_atomic_t *decp); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_cmpxchg_acqb(erts_smp_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp); -ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_cmpxchg_relb(erts_smp_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp); -ERTS_GLB_INLINE void -erts_smp_atomic32_init(erts_smp_atomic32_t *var, erts_aint32_t i); -ERTS_GLB_INLINE void -erts_smp_atomic32_set(erts_smp_atomic32_t *var, erts_aint32_t i); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_read(erts_smp_atomic32_t *var); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_inctest(erts_smp_atomic32_t *incp); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_dectest(erts_smp_atomic32_t *decp); -ERTS_GLB_INLINE void -erts_smp_atomic32_inc(erts_smp_atomic32_t *incp); -ERTS_GLB_INLINE void -erts_smp_atomic32_dec(erts_smp_atomic32_t *decp); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_addtest(erts_smp_atomic32_t *addp, erts_aint32_t i); -ERTS_GLB_INLINE void -erts_smp_atomic32_add(erts_smp_atomic32_t *addp, erts_aint32_t i); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_xchg(erts_smp_atomic32_t *xchgp, erts_aint32_t new); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_cmpxchg(erts_smp_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t expected); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_bor(erts_smp_atomic32_t *var, erts_aint32_t mask); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_band(erts_smp_atomic32_t *var, erts_aint32_t mask); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_read_acqb(erts_smp_atomic32_t *var); -ERTS_GLB_INLINE void -erts_smp_atomic32_set_relb(erts_smp_atomic32_t *var, erts_aint32_t i); -ERTS_GLB_INLINE void -erts_smp_atomic32_dec_relb(erts_smp_atomic32_t *decp); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_dectest_relb(erts_smp_atomic32_t *decp); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_cmpxchg_acqb(erts_smp_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp); -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_cmpxchg_relb(erts_smp_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp); ERTS_GLB_INLINE void erts_smp_spinlock_init_x(erts_smp_spinlock_t *lock, char *name, Eterm extra); @@ -279,6 +205,464 @@ ERTS_GLB_INLINE void erts_smp_thr_sigmask(int how, ERTS_GLB_INLINE void erts_smp_thr_sigwait(const sigset_t *set, int *sig); #endif /* #ifdef ERTS_THR_HAVE_SIG_FUNCS */ +/* + * Functions implementing atomic operations with with no (nob), + * full (mb), acquire (acqb), release (relb), read (rb), and + * write (wb) memory barriers. + * + * If SMP support has been disabled, they are mapped to functions + * that performs the same operation, but aren't atomic and don't + * imply memory barriers. + */ + +#ifdef ERTS_SMP + +/* Double word size atomics */ + +#define erts_smp_dw_atomic_init_nob erts_dw_atomic_init_nob +#define erts_smp_dw_atomic_set_nob erts_dw_atomic_set_nob +#define erts_smp_dw_atomic_read_nob erts_dw_atomic_read_nob +#define erts_smp_dw_atomic_cmpxchg_nob erts_dw_atomic_cmpxchg_nob + +#define erts_smp_dw_atomic_init_mb erts_dw_atomic_init_mb +#define erts_smp_dw_atomic_set_mb erts_dw_atomic_set_mb +#define erts_smp_dw_atomic_read_mb erts_dw_atomic_read_mb +#define erts_smp_dw_atomic_cmpxchg_mb erts_dw_atomic_cmpxchg_mb + +#define erts_smp_dw_atomic_init_acqb erts_dw_atomic_init_acqb +#define erts_smp_dw_atomic_set_acqb erts_dw_atomic_set_acqb +#define erts_smp_dw_atomic_read_acqb erts_dw_atomic_read_acqb +#define erts_smp_dw_atomic_cmpxchg_acqb erts_dw_atomic_cmpxchg_acqb + +#define erts_smp_dw_atomic_init_relb erts_dw_atomic_init_relb +#define erts_smp_dw_atomic_set_relb erts_dw_atomic_set_relb +#define erts_smp_dw_atomic_read_relb erts_dw_atomic_read_relb +#define erts_smp_dw_atomic_cmpxchg_relb erts_dw_atomic_cmpxchg_relb + +#define erts_smp_dw_atomic_init_rb erts_dw_atomic_init_rb +#define erts_smp_dw_atomic_set_rb erts_dw_atomic_set_rb +#define erts_smp_dw_atomic_read_rb erts_dw_atomic_read_rb +#define erts_smp_dw_atomic_cmpxchg_rb erts_dw_atomic_cmpxchg_rb + +#define erts_smp_dw_atomic_init_wb erts_dw_atomic_init_wb +#define erts_smp_dw_atomic_set_wb erts_dw_atomic_set_wb +#define erts_smp_dw_atomic_read_wb erts_dw_atomic_read_wb +#define erts_smp_dw_atomic_cmpxchg_wb erts_dw_atomic_cmpxchg_wb + +/* Word size atomics */ + +#define erts_smp_atomic_init_nob erts_atomic_init_nob +#define erts_smp_atomic_set_nob erts_atomic_set_nob +#define erts_smp_atomic_read_nob erts_atomic_read_nob +#define erts_smp_atomic_inc_read_nob erts_atomic_inc_read_nob +#define erts_smp_atomic_dec_read_nob erts_atomic_dec_read_nob +#define erts_smp_atomic_inc_nob erts_atomic_inc_nob +#define erts_smp_atomic_dec_nob erts_atomic_dec_nob +#define erts_smp_atomic_add_read_nob erts_atomic_add_read_nob +#define erts_smp_atomic_add_nob erts_atomic_add_nob +#define erts_smp_atomic_read_bor_nob erts_atomic_read_bor_nob +#define erts_smp_atomic_read_band_nob erts_atomic_read_band_nob +#define erts_smp_atomic_xchg_nob erts_atomic_xchg_nob +#define erts_smp_atomic_cmpxchg_nob erts_atomic_cmpxchg_nob + +#define erts_smp_atomic_init_mb erts_atomic_init_mb +#define erts_smp_atomic_set_mb erts_atomic_set_mb +#define erts_smp_atomic_read_mb erts_atomic_read_mb +#define erts_smp_atomic_inc_read_mb erts_atomic_inc_read_mb +#define erts_smp_atomic_dec_read_mb erts_atomic_dec_read_mb +#define erts_smp_atomic_inc_mb erts_atomic_inc_mb +#define erts_smp_atomic_dec_mb erts_atomic_dec_mb +#define erts_smp_atomic_add_read_mb erts_atomic_add_read_mb +#define erts_smp_atomic_add_mb erts_atomic_add_mb +#define erts_smp_atomic_read_bor_mb erts_atomic_read_bor_mb +#define erts_smp_atomic_read_band_mb erts_atomic_read_band_mb +#define erts_smp_atomic_xchg_mb erts_atomic_xchg_mb +#define erts_smp_atomic_cmpxchg_mb erts_atomic_cmpxchg_mb + +#define erts_smp_atomic_init_acqb erts_atomic_init_acqb +#define erts_smp_atomic_set_acqb erts_atomic_set_acqb +#define erts_smp_atomic_read_acqb erts_atomic_read_acqb +#define erts_smp_atomic_inc_read_acqb erts_atomic_inc_read_acqb +#define erts_smp_atomic_dec_read_acqb erts_atomic_dec_read_acqb +#define erts_smp_atomic_inc_acqb erts_atomic_inc_acqb +#define erts_smp_atomic_dec_acqb erts_atomic_dec_acqb +#define erts_smp_atomic_add_read_acqb erts_atomic_add_read_acqb +#define erts_smp_atomic_add_acqb erts_atomic_add_acqb +#define erts_smp_atomic_read_bor_acqb erts_atomic_read_bor_acqb +#define erts_smp_atomic_read_band_acqb erts_atomic_read_band_acqb +#define erts_smp_atomic_xchg_acqb erts_atomic_xchg_acqb +#define erts_smp_atomic_cmpxchg_acqb erts_atomic_cmpxchg_acqb + +#define erts_smp_atomic_init_relb erts_atomic_init_relb +#define erts_smp_atomic_set_relb erts_atomic_set_relb +#define erts_smp_atomic_read_relb erts_atomic_read_relb +#define erts_smp_atomic_inc_read_relb erts_atomic_inc_read_relb +#define erts_smp_atomic_dec_read_relb erts_atomic_dec_read_relb +#define erts_smp_atomic_inc_relb erts_atomic_inc_relb +#define erts_smp_atomic_dec_relb erts_atomic_dec_relb +#define erts_smp_atomic_add_read_relb erts_atomic_add_read_relb +#define erts_smp_atomic_add_relb erts_atomic_add_relb +#define erts_smp_atomic_read_bor_relb erts_atomic_read_bor_relb +#define erts_smp_atomic_read_band_relb erts_atomic_read_band_relb +#define erts_smp_atomic_xchg_relb erts_atomic_xchg_relb +#define erts_smp_atomic_cmpxchg_relb erts_atomic_cmpxchg_relb + +#define erts_smp_atomic_init_rb erts_atomic_init_rb +#define erts_smp_atomic_set_rb erts_atomic_set_rb +#define erts_smp_atomic_read_rb erts_atomic_read_rb +#define erts_smp_atomic_inc_read_rb erts_atomic_inc_read_rb +#define erts_smp_atomic_dec_read_rb erts_atomic_dec_read_rb +#define erts_smp_atomic_inc_rb erts_atomic_inc_rb +#define erts_smp_atomic_dec_rb erts_atomic_dec_rb +#define erts_smp_atomic_add_read_rb erts_atomic_add_read_rb +#define erts_smp_atomic_add_rb erts_atomic_add_rb +#define erts_smp_atomic_read_bor_rb erts_atomic_read_bor_rb +#define erts_smp_atomic_read_band_rb erts_atomic_read_band_rb +#define erts_smp_atomic_xchg_rb erts_atomic_xchg_rb +#define erts_smp_atomic_cmpxchg_rb erts_atomic_cmpxchg_rb + +#define erts_smp_atomic_init_wb erts_atomic_init_wb +#define erts_smp_atomic_set_wb erts_atomic_set_wb +#define erts_smp_atomic_read_wb erts_atomic_read_wb +#define erts_smp_atomic_inc_read_wb erts_atomic_inc_read_wb +#define erts_smp_atomic_dec_read_wb erts_atomic_dec_read_wb +#define erts_smp_atomic_inc_wb erts_atomic_inc_wb +#define erts_smp_atomic_dec_wb erts_atomic_dec_wb +#define erts_smp_atomic_add_read_wb erts_atomic_add_read_wb +#define erts_smp_atomic_add_wb erts_atomic_add_wb +#define erts_smp_atomic_read_bor_wb erts_atomic_read_bor_wb +#define erts_smp_atomic_read_band_wb erts_atomic_read_band_wb +#define erts_smp_atomic_xchg_wb erts_atomic_xchg_wb +#define erts_smp_atomic_cmpxchg_wb erts_atomic_cmpxchg_wb + +/* 32-bit atomics */ + +#define erts_smp_atomic32_init_nob erts_atomic32_init_nob +#define erts_smp_atomic32_set_nob erts_atomic32_set_nob +#define erts_smp_atomic32_read_nob erts_atomic32_read_nob +#define erts_smp_atomic32_inc_read_nob erts_atomic32_inc_read_nob +#define erts_smp_atomic32_dec_read_nob erts_atomic32_dec_read_nob +#define erts_smp_atomic32_inc_nob erts_atomic32_inc_nob +#define erts_smp_atomic32_dec_nob erts_atomic32_dec_nob +#define erts_smp_atomic32_add_read_nob erts_atomic32_add_read_nob +#define erts_smp_atomic32_add_nob erts_atomic32_add_nob +#define erts_smp_atomic32_read_bor_nob erts_atomic32_read_bor_nob +#define erts_smp_atomic32_read_band_nob erts_atomic32_read_band_nob +#define erts_smp_atomic32_xchg_nob erts_atomic32_xchg_nob +#define erts_smp_atomic32_cmpxchg_nob erts_atomic32_cmpxchg_nob + +#define erts_smp_atomic32_init_mb erts_atomic32_init_mb +#define erts_smp_atomic32_set_mb erts_atomic32_set_mb +#define erts_smp_atomic32_read_mb erts_atomic32_read_mb +#define erts_smp_atomic32_inc_read_mb erts_atomic32_inc_read_mb +#define erts_smp_atomic32_dec_read_mb erts_atomic32_dec_read_mb +#define erts_smp_atomic32_inc_mb erts_atomic32_inc_mb +#define erts_smp_atomic32_dec_mb erts_atomic32_dec_mb +#define erts_smp_atomic32_add_read_mb erts_atomic32_add_read_mb +#define erts_smp_atomic32_add_mb erts_atomic32_add_mb +#define erts_smp_atomic32_read_bor_mb erts_atomic32_read_bor_mb +#define erts_smp_atomic32_read_band_mb erts_atomic32_read_band_mb +#define erts_smp_atomic32_xchg_mb erts_atomic32_xchg_mb +#define erts_smp_atomic32_cmpxchg_mb erts_atomic32_cmpxchg_mb + +#define erts_smp_atomic32_init_acqb erts_atomic32_init_acqb +#define erts_smp_atomic32_set_acqb erts_atomic32_set_acqb +#define erts_smp_atomic32_read_acqb erts_atomic32_read_acqb +#define erts_smp_atomic32_inc_read_acqb erts_atomic32_inc_read_acqb +#define erts_smp_atomic32_dec_read_acqb erts_atomic32_dec_read_acqb +#define erts_smp_atomic32_inc_acqb erts_atomic32_inc_acqb +#define erts_smp_atomic32_dec_acqb erts_atomic32_dec_acqb +#define erts_smp_atomic32_add_read_acqb erts_atomic32_add_read_acqb +#define erts_smp_atomic32_add_acqb erts_atomic32_add_acqb +#define erts_smp_atomic32_read_bor_acqb erts_atomic32_read_bor_acqb +#define erts_smp_atomic32_read_band_acqb erts_atomic32_read_band_acqb +#define erts_smp_atomic32_xchg_acqb erts_atomic32_xchg_acqb +#define erts_smp_atomic32_cmpxchg_acqb erts_atomic32_cmpxchg_acqb + +#define erts_smp_atomic32_init_relb erts_atomic32_init_relb +#define erts_smp_atomic32_set_relb erts_atomic32_set_relb +#define erts_smp_atomic32_read_relb erts_atomic32_read_relb +#define erts_smp_atomic32_inc_read_relb erts_atomic32_inc_read_relb +#define erts_smp_atomic32_dec_read_relb erts_atomic32_dec_read_relb +#define erts_smp_atomic32_inc_relb erts_atomic32_inc_relb +#define erts_smp_atomic32_dec_relb erts_atomic32_dec_relb +#define erts_smp_atomic32_add_read_relb erts_atomic32_add_read_relb +#define erts_smp_atomic32_add_relb erts_atomic32_add_relb +#define erts_smp_atomic32_read_bor_relb erts_atomic32_read_bor_relb +#define erts_smp_atomic32_read_band_relb erts_atomic32_read_band_relb +#define erts_smp_atomic32_xchg_relb erts_atomic32_xchg_relb +#define erts_smp_atomic32_cmpxchg_relb erts_atomic32_cmpxchg_relb + +#define erts_smp_atomic32_init_rb erts_atomic32_init_rb +#define erts_smp_atomic32_set_rb erts_atomic32_set_rb +#define erts_smp_atomic32_read_rb erts_atomic32_read_rb +#define erts_smp_atomic32_inc_read_rb erts_atomic32_inc_read_rb +#define erts_smp_atomic32_dec_read_rb erts_atomic32_dec_read_rb +#define erts_smp_atomic32_inc_rb erts_atomic32_inc_rb +#define erts_smp_atomic32_dec_rb erts_atomic32_dec_rb +#define erts_smp_atomic32_add_read_rb erts_atomic32_add_read_rb +#define erts_smp_atomic32_add_rb erts_atomic32_add_rb +#define erts_smp_atomic32_read_bor_rb erts_atomic32_read_bor_rb +#define erts_smp_atomic32_read_band_rb erts_atomic32_read_band_rb +#define erts_smp_atomic32_xchg_rb erts_atomic32_xchg_rb +#define erts_smp_atomic32_cmpxchg_rb erts_atomic32_cmpxchg_rb + +#define erts_smp_atomic32_init_wb erts_atomic32_init_wb +#define erts_smp_atomic32_set_wb erts_atomic32_set_wb +#define erts_smp_atomic32_read_wb erts_atomic32_read_wb +#define erts_smp_atomic32_inc_read_wb erts_atomic32_inc_read_wb +#define erts_smp_atomic32_dec_read_wb erts_atomic32_dec_read_wb +#define erts_smp_atomic32_inc_wb erts_atomic32_inc_wb +#define erts_smp_atomic32_dec_wb erts_atomic32_dec_wb +#define erts_smp_atomic32_add_read_wb erts_atomic32_add_read_wb +#define erts_smp_atomic32_add_wb erts_atomic32_add_wb +#define erts_smp_atomic32_read_bor_wb erts_atomic32_read_bor_wb +#define erts_smp_atomic32_read_band_wb erts_atomic32_read_band_wb +#define erts_smp_atomic32_xchg_wb erts_atomic32_xchg_wb +#define erts_smp_atomic32_cmpxchg_wb erts_atomic32_cmpxchg_wb + +#else /* !ERTS_SMP */ + +/* Double word size atomics */ + +#define erts_smp_dw_atomic_init_nob erts_no_dw_atomic_set +#define erts_smp_dw_atomic_set_nob erts_no_dw_atomic_set +#define erts_smp_dw_atomic_read_nob erts_no_dw_atomic_read +#define erts_smp_dw_atomic_cmpxchg_nob erts_no_dw_atomic_cmpxchg + +#define erts_smp_dw_atomic_init_mb erts_no_dw_atomic_init +#define erts_smp_dw_atomic_set_mb erts_no_dw_atomic_set +#define erts_smp_dw_atomic_read_mb erts_no_dw_atomic_read +#define erts_smp_dw_atomic_cmpxchg_mb erts_no_dw_atomic_cmpxchg + +#define erts_smp_dw_atomic_init_acqb erts_no_dw_atomic_init +#define erts_smp_dw_atomic_set_acqb erts_no_dw_atomic_set +#define erts_smp_dw_atomic_read_acqb erts_no_dw_atomic_read +#define erts_smp_dw_atomic_cmpxchg_acqb erts_no_dw_atomic_cmpxchg + +#define erts_smp_dw_atomic_init_relb erts_no_dw_atomic_init +#define erts_smp_dw_atomic_set_relb erts_no_dw_atomic_set +#define erts_smp_dw_atomic_read_relb erts_no_dw_atomic_read +#define erts_smp_dw_atomic_cmpxchg_relb erts_no_dw_atomic_cmpxchg + +#define erts_smp_dw_atomic_init_rb erts_no_dw_atomic_init +#define erts_smp_dw_atomic_set_rb erts_no_dw_atomic_set +#define erts_smp_dw_atomic_read_rb erts_no_dw_atomic_read +#define erts_smp_dw_atomic_cmpxchg_rb erts_no_dw_atomic_cmpxchg + +#define erts_smp_dw_atomic_init_wb erts_no_dw_atomic_init +#define erts_smp_dw_atomic_set_wb erts_no_dw_atomic_set +#define erts_smp_dw_atomic_read_wb erts_no_dw_atomic_read +#define erts_smp_dw_atomic_cmpxchg_wb erts_no_dw_atomic_cmpxchg + +/* Word size atomics */ + +#define erts_smp_atomic_init_nob erts_no_atomic_set +#define erts_smp_atomic_set_nob erts_no_atomic_set +#define erts_smp_atomic_read_nob erts_no_atomic_read +#define erts_smp_atomic_inc_read_nob erts_no_atomic_inc_read +#define erts_smp_atomic_dec_read_nob erts_no_atomic_dec_read +#define erts_smp_atomic_inc_nob erts_no_atomic_inc +#define erts_smp_atomic_dec_nob erts_no_atomic_dec +#define erts_smp_atomic_add_read_nob erts_no_atomic_add_read +#define erts_smp_atomic_add_nob erts_no_atomic_add +#define erts_smp_atomic_read_bor_nob erts_no_atomic_read_bor +#define erts_smp_atomic_read_band_nob erts_no_atomic_read_band +#define erts_smp_atomic_xchg_nob erts_no_atomic_xchg +#define erts_smp_atomic_cmpxchg_nob erts_no_atomic_cmpxchg + +#define erts_smp_atomic_init_mb erts_no_atomic_set +#define erts_smp_atomic_set_mb erts_no_atomic_set +#define erts_smp_atomic_read_mb erts_no_atomic_read +#define erts_smp_atomic_inc_read_mb erts_no_atomic_inc_read +#define erts_smp_atomic_dec_read_mb erts_no_atomic_dec_read +#define erts_smp_atomic_inc_mb erts_no_atomic_inc +#define erts_smp_atomic_dec_mb erts_no_atomic_dec +#define erts_smp_atomic_add_read_mb erts_no_atomic_add_read +#define erts_smp_atomic_add_mb erts_no_atomic_add +#define erts_smp_atomic_read_bor_mb erts_no_atomic_read_bor +#define erts_smp_atomic_read_band_mb erts_no_atomic_read_band +#define erts_smp_atomic_xchg_mb erts_no_atomic_xchg +#define erts_smp_atomic_cmpxchg_mb erts_no_atomic_cmpxchg + +#define erts_smp_atomic_init_acqb erts_no_atomic_set +#define erts_smp_atomic_set_acqb erts_no_atomic_set +#define erts_smp_atomic_read_acqb erts_no_atomic_read +#define erts_smp_atomic_inc_read_acqb erts_no_atomic_inc_read +#define erts_smp_atomic_dec_read_acqb erts_no_atomic_dec_read +#define erts_smp_atomic_inc_acqb erts_no_atomic_inc +#define erts_smp_atomic_dec_acqb erts_no_atomic_dec +#define erts_smp_atomic_add_read_acqb erts_no_atomic_add_read +#define erts_smp_atomic_add_acqb erts_no_atomic_add +#define erts_smp_atomic_read_bor_acqb erts_no_atomic_read_bor +#define erts_smp_atomic_read_band_acqb erts_no_atomic_read_band +#define erts_smp_atomic_xchg_acqb erts_no_atomic_xchg +#define erts_smp_atomic_cmpxchg_acqb erts_no_atomic_cmpxchg + +#define erts_smp_atomic_init_relb erts_no_atomic_set +#define erts_smp_atomic_set_relb erts_no_atomic_set +#define erts_smp_atomic_read_relb erts_no_atomic_read +#define erts_smp_atomic_inc_read_relb erts_no_atomic_inc_read +#define erts_smp_atomic_dec_read_relb erts_no_atomic_dec_read +#define erts_smp_atomic_inc_relb erts_no_atomic_inc +#define erts_smp_atomic_dec_relb erts_no_atomic_dec +#define erts_smp_atomic_add_read_relb erts_no_atomic_add_read +#define erts_smp_atomic_add_relb erts_no_atomic_add +#define erts_smp_atomic_read_bor_relb erts_no_atomic_read_bor +#define erts_smp_atomic_read_band_relb erts_no_atomic_read_band +#define erts_smp_atomic_xchg_relb erts_no_atomic_xchg +#define erts_smp_atomic_cmpxchg_relb erts_no_atomic_cmpxchg + +#define erts_smp_atomic_init_rb erts_no_atomic_set +#define erts_smp_atomic_set_rb erts_no_atomic_set +#define erts_smp_atomic_read_rb erts_no_atomic_read +#define erts_smp_atomic_inc_read_rb erts_no_atomic_inc_read +#define erts_smp_atomic_dec_read_rb erts_no_atomic_dec_read +#define erts_smp_atomic_inc_rb erts_no_atomic_inc +#define erts_smp_atomic_dec_rb erts_no_atomic_dec +#define erts_smp_atomic_add_read_rb erts_no_atomic_add_read +#define erts_smp_atomic_add_rb erts_no_atomic_add +#define erts_smp_atomic_read_bor_rb erts_no_atomic_read_bor +#define erts_smp_atomic_read_band_rb erts_no_atomic_read_band +#define erts_smp_atomic_xchg_rb erts_no_atomic_xchg +#define erts_smp_atomic_cmpxchg_rb erts_no_atomic_cmpxchg + +#define erts_smp_atomic_init_wb erts_no_atomic_set +#define erts_smp_atomic_set_wb erts_no_atomic_set +#define erts_smp_atomic_read_wb erts_no_atomic_read +#define erts_smp_atomic_inc_read_wb erts_no_atomic_inc_read +#define erts_smp_atomic_dec_read_wb erts_no_atomic_dec_read +#define erts_smp_atomic_inc_wb erts_no_atomic_inc +#define erts_smp_atomic_dec_wb erts_no_atomic_dec +#define erts_smp_atomic_add_read_wb erts_no_atomic_add_read +#define erts_smp_atomic_add_wb erts_no_atomic_add +#define erts_smp_atomic_read_bor_wb erts_no_atomic_read_bor +#define erts_smp_atomic_read_band_wb erts_no_atomic_read_band +#define erts_smp_atomic_xchg_wb erts_no_atomic_xchg +#define erts_smp_atomic_cmpxchg_wb erts_no_atomic_cmpxchg + +/* 32-bit atomics */ + +#define erts_smp_atomic32_init_nob erts_no_atomic32_set +#define erts_smp_atomic32_set_nob erts_no_atomic32_set +#define erts_smp_atomic32_read_nob erts_no_atomic32_read +#define erts_smp_atomic32_inc_read_nob erts_no_atomic32_inc_read +#define erts_smp_atomic32_dec_read_nob erts_no_atomic32_dec_read +#define erts_smp_atomic32_inc_nob erts_no_atomic32_inc +#define erts_smp_atomic32_dec_nob erts_no_atomic32_dec +#define erts_smp_atomic32_add_read_nob erts_no_atomic32_add_read +#define erts_smp_atomic32_add_nob erts_no_atomic32_add +#define erts_smp_atomic32_read_bor_nob erts_no_atomic32_read_bor +#define erts_smp_atomic32_read_band_nob erts_no_atomic32_read_band +#define erts_smp_atomic32_xchg_nob erts_no_atomic32_xchg +#define erts_smp_atomic32_cmpxchg_nob erts_no_atomic32_cmpxchg + +#define erts_smp_atomic32_init_mb erts_no_atomic32_set +#define erts_smp_atomic32_set_mb erts_no_atomic32_set +#define erts_smp_atomic32_read_mb erts_no_atomic32_read +#define erts_smp_atomic32_inc_read_mb erts_no_atomic32_inc_read +#define erts_smp_atomic32_dec_read_mb erts_no_atomic32_dec_read +#define erts_smp_atomic32_inc_mb erts_no_atomic32_inc +#define erts_smp_atomic32_dec_mb erts_no_atomic32_dec +#define erts_smp_atomic32_add_read_mb erts_no_atomic32_add_read +#define erts_smp_atomic32_add_mb erts_no_atomic32_add +#define erts_smp_atomic32_read_bor_mb erts_no_atomic32_read_bor +#define erts_smp_atomic32_read_band_mb erts_no_atomic32_read_band +#define erts_smp_atomic32_xchg_mb erts_no_atomic32_xchg +#define erts_smp_atomic32_cmpxchg_mb erts_no_atomic32_cmpxchg + +#define erts_smp_atomic32_init_acqb erts_no_atomic32_set +#define erts_smp_atomic32_set_acqb erts_no_atomic32_set +#define erts_smp_atomic32_read_acqb erts_no_atomic32_read +#define erts_smp_atomic32_inc_read_acqb erts_no_atomic32_inc_read +#define erts_smp_atomic32_dec_read_acqb erts_no_atomic32_dec_read +#define erts_smp_atomic32_inc_acqb erts_no_atomic32_inc +#define erts_smp_atomic32_dec_acqb erts_no_atomic32_dec +#define erts_smp_atomic32_add_read_acqb erts_no_atomic32_add_read +#define erts_smp_atomic32_add_acqb erts_no_atomic32_add +#define erts_smp_atomic32_read_bor_acqb erts_no_atomic32_read_bor +#define erts_smp_atomic32_read_band_acqb erts_no_atomic32_read_band +#define erts_smp_atomic32_xchg_acqb erts_no_atomic32_xchg +#define erts_smp_atomic32_cmpxchg_acqb erts_no_atomic32_cmpxchg + +#define erts_smp_atomic32_init_relb erts_no_atomic32_set +#define erts_smp_atomic32_set_relb erts_no_atomic32_set +#define erts_smp_atomic32_read_relb erts_no_atomic32_read +#define erts_smp_atomic32_inc_read_relb erts_no_atomic32_inc_read +#define erts_smp_atomic32_dec_read_relb erts_no_atomic32_dec_read +#define erts_smp_atomic32_inc_relb erts_no_atomic32_inc +#define erts_smp_atomic32_dec_relb erts_no_atomic32_dec +#define erts_smp_atomic32_add_read_relb erts_no_atomic32_add_read +#define erts_smp_atomic32_add_relb erts_no_atomic32_add +#define erts_smp_atomic32_read_bor_relb erts_no_atomic32_read_bor +#define erts_smp_atomic32_read_band_relb erts_no_atomic32_read_band +#define erts_smp_atomic32_xchg_relb erts_no_atomic32_xchg +#define erts_smp_atomic32_cmpxchg_relb erts_no_atomic32_cmpxchg + +#define erts_smp_atomic32_init_rb erts_no_atomic32_set +#define erts_smp_atomic32_set_rb erts_no_atomic32_set +#define erts_smp_atomic32_read_rb erts_no_atomic32_read +#define erts_smp_atomic32_inc_read_rb erts_no_atomic32_inc_read +#define erts_smp_atomic32_dec_read_rb erts_no_atomic32_dec_read +#define erts_smp_atomic32_inc_rb erts_no_atomic32_inc +#define erts_smp_atomic32_dec_rb erts_no_atomic32_dec +#define erts_smp_atomic32_add_read_rb erts_no_atomic32_add_read +#define erts_smp_atomic32_add_rb erts_no_atomic32_add +#define erts_smp_atomic32_read_bor_rb erts_no_atomic32_read_bor +#define erts_smp_atomic32_read_band_rb erts_no_atomic32_read_band +#define erts_smp_atomic32_xchg_rb erts_no_atomic32_xchg +#define erts_smp_atomic32_cmpxchg_rb erts_no_atomic32_cmpxchg + +#define erts_smp_atomic32_init_wb erts_no_atomic32_set +#define erts_smp_atomic32_set_wb erts_no_atomic32_set +#define erts_smp_atomic32_read_wb erts_no_atomic32_read +#define erts_smp_atomic32_inc_read_wb erts_no_atomic32_inc_read +#define erts_smp_atomic32_dec_read_wb erts_no_atomic32_dec_read +#define erts_smp_atomic32_inc_wb erts_no_atomic32_inc +#define erts_smp_atomic32_dec_wb erts_no_atomic32_dec +#define erts_smp_atomic32_add_read_wb erts_no_atomic32_add_read +#define erts_smp_atomic32_add_wb erts_no_atomic32_add +#define erts_smp_atomic32_read_bor_wb erts_no_atomic32_read_bor +#define erts_smp_atomic32_read_band_wb erts_no_atomic32_read_band +#define erts_smp_atomic32_xchg_wb erts_no_atomic32_xchg +#define erts_smp_atomic32_cmpxchg_wb erts_no_atomic32_cmpxchg + +#endif /* !ERTS_SMP */ + +#ifndef ERTS_NO_DEPRECATED_ATOMICS + +/* Deprecated functions to replace */ + +#define erts_smp_atomic_init erts_smp_atomic_init_nob +#define erts_smp_atomic_set erts_smp_atomic_set_nob +#define erts_smp_atomic_read erts_smp_atomic_read_nob +#define erts_smp_atomic_inctest erts_smp_atomic_inc_read_mb +#define erts_smp_atomic_dectest erts_smp_atomic_dec_read_mb +#define erts_smp_atomic_inc erts_smp_atomic_inc_mb +#define erts_smp_atomic_dec erts_smp_atomic_dec_mb +#define erts_smp_atomic_addtest erts_smp_atomic_add_read_mb +#define erts_smp_atomic_add erts_smp_atomic_add_mb +#define erts_smp_atomic_xchg erts_smp_atomic_xchg_mb +#define erts_smp_atomic_cmpxchg erts_smp_atomic_cmpxchg_mb +#define erts_smp_atomic_bor erts_smp_atomic_read_bor_mb +#define erts_smp_atomic_band erts_smp_atomic_read_band_mb + +#define erts_smp_atomic32_init erts_smp_atomic32_init_nob +#define erts_smp_atomic32_set erts_smp_atomic32_set_nob +#define erts_smp_atomic32_read erts_smp_atomic32_read_nob +#define erts_smp_atomic32_inctest erts_smp_atomic32_inc_read_mb +#define erts_smp_atomic32_dectest erts_smp_atomic32_dec_read_mb +#define erts_smp_atomic32_inc erts_smp_atomic32_inc_mb +#define erts_smp_atomic32_dec erts_smp_atomic32_dec_mb +#define erts_smp_atomic32_addtest erts_smp_atomic32_add_read_mb +#define erts_smp_atomic32_add erts_smp_atomic32_add_mb +#define erts_smp_atomic32_xchg erts_smp_atomic32_xchg_mb +#define erts_smp_atomic32_cmpxchg erts_smp_atomic32_cmpxchg_mb +#define erts_smp_atomic32_bor erts_smp_atomic32_read_bor_mb +#define erts_smp_atomic32_band erts_smp_atomic32_read_band_mb + +#endif + #if ERTS_GLB_INLINE_INCL_FUNC_DEF @@ -655,434 +1039,6 @@ erts_smp_lc_rwmtx_is_rwlocked(erts_smp_rwmtx_t *mtx) } ERTS_GLB_INLINE void -erts_smp_atomic_init(erts_smp_atomic_t *var, erts_aint_t i) -{ -#ifdef ERTS_SMP - erts_atomic_init(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic_set(erts_smp_atomic_t *var, erts_aint_t i) -{ -#ifdef ERTS_SMP - erts_atomic_set(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_read(erts_smp_atomic_t *var) -{ -#ifdef ERTS_SMP - return erts_atomic_read(var); -#else - return *var; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_inctest(erts_smp_atomic_t *incp) -{ -#ifdef ERTS_SMP - return erts_atomic_inctest(incp); -#else - return ++(*incp); -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_dectest(erts_smp_atomic_t *decp) -{ -#ifdef ERTS_SMP - return erts_atomic_dectest(decp); -#else - return --(*decp); -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic_inc(erts_smp_atomic_t *incp) -{ -#ifdef ERTS_SMP - erts_atomic_inc(incp); -#else - ++(*incp); -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic_dec(erts_smp_atomic_t *decp) -{ -#ifdef ERTS_SMP - erts_atomic_dec(decp); -#else - --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_addtest(erts_smp_atomic_t *addp, erts_aint_t i) -{ -#ifdef ERTS_SMP - return erts_atomic_addtest(addp, i); -#else - return *addp += i; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic_add(erts_smp_atomic_t *addp, erts_aint_t i) -{ -#ifdef ERTS_SMP - erts_atomic_add(addp, i); -#else - *addp += i; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_xchg(erts_smp_atomic_t *xchgp, erts_aint_t new) -{ -#ifdef ERTS_SMP - return erts_atomic_xchg(xchgp, new); -#else - erts_aint_t old; - old = *xchgp; - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_cmpxchg(erts_smp_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t expected) -{ -#ifdef ERTS_SMP - return erts_atomic_cmpxchg(xchgp, new, expected); -#else - erts_aint_t old = *xchgp; - if (old == expected) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_bor(erts_smp_atomic_t *var, erts_aint_t mask) -{ -#ifdef ERTS_SMP - return erts_atomic_bor(var, mask); -#else - erts_aint_t old; - old = *var; - *var |= mask; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_band(erts_smp_atomic_t *var, erts_aint_t mask) -{ -#ifdef ERTS_SMP - return erts_atomic_band(var, mask); -#else - erts_aint_t old; - old = *var; - *var &= mask; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_read_acqb(erts_smp_atomic_t *var) -{ -#ifdef ERTS_SMP - return erts_atomic_read_acqb(var); -#else - return *var; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic_set_relb(erts_smp_atomic_t *var, erts_aint_t i) -{ -#ifdef ERTS_SMP - erts_atomic_set_relb(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic_dec_relb(erts_smp_atomic_t *decp) -{ -#ifdef ERTS_SMP - erts_atomic_dec_relb(decp); -#else - --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_dectest_relb(erts_smp_atomic_t *decp) -{ -#ifdef ERTS_SMP - return erts_atomic_dectest_relb(decp); -#else - return --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_cmpxchg_acqb(erts_smp_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp) -{ -#ifdef ERTS_SMP - return erts_atomic_cmpxchg_acqb(xchgp, new, exp); -#else - erts_aint_t old = *xchgp; - if (old == exp) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_smp_atomic_cmpxchg_relb(erts_smp_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp) -{ -#ifdef ERTS_SMP - return erts_atomic_cmpxchg_relb(xchgp, new, exp); -#else - erts_aint_t old = *xchgp; - if (old == exp) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic32_init(erts_smp_atomic32_t *var, erts_aint32_t i) -{ -#ifdef ERTS_SMP - erts_atomic32_init(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic32_set(erts_smp_atomic32_t *var, erts_aint32_t i) -{ -#ifdef ERTS_SMP - erts_atomic32_set(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_read(erts_smp_atomic32_t *var) -{ -#ifdef ERTS_SMP - return erts_atomic32_read(var); -#else - return *var; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_inctest(erts_smp_atomic32_t *incp) -{ -#ifdef ERTS_SMP - return erts_atomic32_inctest(incp); -#else - return ++(*incp); -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_dectest(erts_smp_atomic32_t *decp) -{ -#ifdef ERTS_SMP - return erts_atomic32_dectest(decp); -#else - return --(*decp); -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic32_inc(erts_smp_atomic32_t *incp) -{ -#ifdef ERTS_SMP - erts_atomic32_inc(incp); -#else - ++(*incp); -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic32_dec(erts_smp_atomic32_t *decp) -{ -#ifdef ERTS_SMP - erts_atomic32_dec(decp); -#else - --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_addtest(erts_smp_atomic32_t *addp, erts_aint32_t i) -{ -#ifdef ERTS_SMP - return erts_atomic32_addtest(addp, i); -#else - return *addp += i; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic32_add(erts_smp_atomic32_t *addp, erts_aint32_t i) -{ -#ifdef ERTS_SMP - erts_atomic32_add(addp, i); -#else - *addp += i; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_xchg(erts_smp_atomic32_t *xchgp, erts_aint32_t new) -{ -#ifdef ERTS_SMP - return erts_atomic32_xchg(xchgp, new); -#else - erts_aint32_t old; - old = *xchgp; - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_cmpxchg(erts_smp_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t expected) -{ -#ifdef ERTS_SMP - return erts_atomic32_cmpxchg(xchgp, new, expected); -#else - erts_aint32_t old = *xchgp; - if (old == expected) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_bor(erts_smp_atomic32_t *var, erts_aint32_t mask) -{ -#ifdef ERTS_SMP - return erts_atomic32_bor(var, mask); -#else - erts_aint32_t old; - old = *var; - *var |= mask; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_band(erts_smp_atomic32_t *var, erts_aint32_t mask) -{ -#ifdef ERTS_SMP - return erts_atomic32_band(var, mask); -#else - erts_aint32_t old; - old = *var; - *var &= mask; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_read_acqb(erts_smp_atomic32_t *var) -{ -#ifdef ERTS_SMP - return erts_atomic32_read_acqb(var); -#else - return *var; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic32_set_relb(erts_smp_atomic32_t *var, erts_aint32_t i) -{ -#ifdef ERTS_SMP - erts_atomic32_set_relb(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE void -erts_smp_atomic32_dec_relb(erts_smp_atomic32_t *decp) -{ -#ifdef ERTS_SMP - erts_atomic32_dec_relb(decp); -#else - --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_dectest_relb(erts_smp_atomic32_t *decp) -{ -#ifdef ERTS_SMP - return erts_atomic32_dectest_relb(decp); -#else - return --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_cmpxchg_acqb(erts_smp_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp) -{ -#ifdef ERTS_SMP - return erts_atomic32_cmpxchg_acqb(xchgp, new, exp); -#else - erts_aint32_t old = *xchgp; - if (old == exp) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_smp_atomic32_cmpxchg_relb(erts_smp_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp) -{ -#ifdef ERTS_SMP - return erts_atomic32_cmpxchg_relb(xchgp, new, exp); -#else - erts_aint32_t old = *xchgp; - if (old == exp) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE void erts_smp_spinlock_init_x(erts_smp_spinlock_t *lock, char *name, Eterm extra) { #ifdef ERTS_SMP @@ -1308,3 +1264,37 @@ erts_smp_thr_sigwait(const sigset_t *set, int *sig) #endif /* #if ERTS_GLB_INLINE_INCL_FUNC_DEF */ #endif /* ERL_SMP_H */ + +#ifdef ERTS_UNDEF_DEPRECATED_ATOMICS + +/* Deprecated functions to replace */ + +#undef erts_smp_atomic_init +#undef erts_smp_atomic_set +#undef erts_smp_atomic_read +#undef erts_smp_atomic_inctest +#undef erts_smp_atomic_dectest +#undef erts_smp_atomic_inc +#undef erts_smp_atomic_dec +#undef erts_smp_atomic_addtest +#undef erts_smp_atomic_add +#undef erts_smp_atomic_xchg +#undef erts_smp_atomic_cmpxchg +#undef erts_smp_atomic_bor +#undef erts_smp_atomic_band + +#undef erts_smp_atomic32_init +#undef erts_smp_atomic32_set +#undef erts_smp_atomic32_read +#undef erts_smp_atomic32_inctest +#undef erts_smp_atomic32_dectest +#undef erts_smp_atomic32_inc +#undef erts_smp_atomic32_dec +#undef erts_smp_atomic32_addtest +#undef erts_smp_atomic32_add +#undef erts_smp_atomic32_xchg +#undef erts_smp_atomic32_cmpxchg +#undef erts_smp_atomic32_bor +#undef erts_smp_atomic32_band + +#endif diff --git a/erts/emulator/beam/erl_threads.h b/erts/emulator/beam/erl_threads.h index 8c9cace0c5..a2833b3580 100644 --- a/erts/emulator/beam/erl_threads.h +++ b/erts/emulator/beam/erl_threads.h @@ -28,6 +28,11 @@ #define ERTS_SPIN_BODY ETHR_SPIN_BODY #include "sys.h" + +typedef struct { SWord sint[2]; } erts_no_dw_atomic_t; +typedef SWord erts_no_atomic_t; +typedef Sint32 erts_no_atomic32_t; + #ifdef USE_THREADS #define ETHR_TRY_INLINE_FUNCS @@ -99,10 +104,12 @@ typedef ethr_rwmutex_opt erts_rwmtx_opt_t; typedef ethr_tsd_key erts_tsd_key_t; typedef ethr_ts_event erts_tse_t; -typedef ethr_sint_t erts_aint_t; -typedef ethr_atomic_t erts_atomic_t; -typedef ethr_sint32_t erts_aint32_t; -typedef ethr_atomic32_t erts_atomic32_t; +#define erts_dw_aint_t ethr_dw_sint_t +#define erts_dw_atomic_t ethr_dw_atomic_t +#define erts_aint_t ethr_sint_t +#define erts_atomic_t ethr_atomic_t +#define erts_aint32_t ethr_sint32_t +#define erts_atomic32_t ethr_atomic32_t /* spinlock */ typedef struct { @@ -164,10 +171,12 @@ typedef struct { typedef int erts_rwmtx_t; typedef int erts_tsd_key_t; typedef int erts_tse_t; -typedef SWord erts_aint_t; -typedef SWord erts_atomic_t; -typedef SWord erts_aint32_t; -typedef SWord erts_atomic32_t; +#define erts_dw_aint_t erts_no_dw_atomic_t +#define erts_dw_atomic_t erts_no_dw_atomic_t +#define erts_aint_t SWord +#define erts_atomic_t erts_no_atomic_t +#define erts_aint32_t Sint32 +#define erts_atomic32_t erts_no_atomic32_t #if __GNUC__ > 2 typedef struct { } erts_spinlock_t; typedef struct { } erts_rwlock_t; @@ -247,65 +256,51 @@ ERTS_GLB_INLINE int erts_rwmtx_tryrwlock(erts_rwmtx_t *rwmtx); ERTS_GLB_INLINE void erts_rwmtx_rwunlock(erts_rwmtx_t *rwmtx); ERTS_GLB_INLINE int erts_lc_rwmtx_is_rlocked(erts_rwmtx_t *mtx); ERTS_GLB_INLINE int erts_lc_rwmtx_is_rwlocked(erts_rwmtx_t *mtx); -ERTS_GLB_INLINE void erts_atomic_init(erts_atomic_t *var, erts_aint_t i); -ERTS_GLB_INLINE void erts_atomic_set(erts_atomic_t *var, erts_aint_t i); -ERTS_GLB_INLINE erts_aint_t erts_atomic_read(erts_atomic_t *var); -ERTS_GLB_INLINE erts_aint_t erts_atomic_inctest(erts_atomic_t *incp); -ERTS_GLB_INLINE erts_aint_t erts_atomic_dectest(erts_atomic_t *decp); -ERTS_GLB_INLINE void erts_atomic_inc(erts_atomic_t *incp); -ERTS_GLB_INLINE void erts_atomic_dec(erts_atomic_t *decp); -ERTS_GLB_INLINE erts_aint_t erts_atomic_addtest(erts_atomic_t *addp, - erts_aint_t i); -ERTS_GLB_INLINE void erts_atomic_add(erts_atomic_t *addp, erts_aint_t i); -ERTS_GLB_INLINE erts_aint_t erts_atomic_xchg(erts_atomic_t *xchgp, - erts_aint_t new); -ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg(erts_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t expected); -ERTS_GLB_INLINE erts_aint_t erts_atomic_bor(erts_atomic_t *var, - erts_aint_t mask); -ERTS_GLB_INLINE erts_aint_t erts_atomic_band(erts_atomic_t *var, - erts_aint_t mask); -ERTS_GLB_INLINE erts_aint_t erts_atomic_read_acqb(erts_atomic_t *var); -ERTS_GLB_INLINE void erts_atomic_set_relb(erts_atomic_t *var, erts_aint_t i); -ERTS_GLB_INLINE void erts_atomic_dec_relb(erts_atomic_t *decp); -ERTS_GLB_INLINE erts_aint_t erts_atomic_dectest_relb(erts_atomic_t *decp); -ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_acqb(erts_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp); -ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_relb(erts_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp); -ERTS_GLB_INLINE void erts_atomic32_init(erts_atomic32_t *var, erts_aint32_t i); -ERTS_GLB_INLINE void erts_atomic32_set(erts_atomic32_t *var, erts_aint32_t i); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_read(erts_atomic32_t *var); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_inctest(erts_atomic32_t *incp); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_dectest(erts_atomic32_t *decp); -ERTS_GLB_INLINE void erts_atomic32_inc(erts_atomic32_t *incp); -ERTS_GLB_INLINE void erts_atomic32_dec(erts_atomic32_t *decp); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_addtest(erts_atomic32_t *addp, - erts_aint32_t i); -ERTS_GLB_INLINE void erts_atomic32_add(erts_atomic32_t *addp, erts_aint32_t i); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_xchg(erts_atomic32_t *xchgp, - erts_aint32_t new); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_cmpxchg(erts_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t expected); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_bor(erts_atomic32_t *var, - erts_aint32_t mask); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_band(erts_atomic32_t *var, - erts_aint32_t mask); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_read_acqb(erts_atomic32_t *var); -ERTS_GLB_INLINE void erts_atomic32_set_relb(erts_atomic32_t *var, - erts_aint32_t i); -ERTS_GLB_INLINE void erts_atomic32_dec_relb(erts_atomic32_t *decp); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_dectest_relb(erts_atomic32_t *decp); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_cmpxchg_acqb(erts_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp); -ERTS_GLB_INLINE erts_aint32_t erts_atomic32_cmpxchg_relb(erts_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp); + +ERTS_GLB_INLINE void erts_no_dw_atomic_set(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val); +ERTS_GLB_INLINE void erts_no_dw_atomic_read(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val); +ERTS_GLB_INLINE int erts_no_dw_atomic_cmpxchg(erts_no_dw_atomic_t *var, + erts_no_dw_atomic_t *val, + erts_no_dw_atomic_t *old_val); +ERTS_GLB_INLINE void erts_no_atomic_set(erts_no_atomic_t *var, erts_aint_t i); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_read(erts_no_atomic_t *var); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_inc_read(erts_no_atomic_t *incp); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_dec_read(erts_no_atomic_t *decp); +ERTS_GLB_INLINE void erts_no_atomic_inc(erts_no_atomic_t *incp); +ERTS_GLB_INLINE void erts_no_atomic_dec(erts_no_atomic_t *decp); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_add_read(erts_no_atomic_t *addp, + erts_aint_t i); +ERTS_GLB_INLINE void erts_no_atomic_add(erts_no_atomic_t *addp, erts_aint_t i); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_read_bor(erts_no_atomic_t *var, + erts_aint_t mask); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_read_band(erts_no_atomic_t *var, + erts_aint_t mask); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_xchg(erts_no_atomic_t *xchgp, + erts_aint_t new); +ERTS_GLB_INLINE erts_aint_t erts_no_atomic_cmpxchg(erts_no_atomic_t *xchgp, + erts_aint_t new, + erts_aint_t expected); +ERTS_GLB_INLINE void erts_no_atomic32_set(erts_no_atomic32_t *var, + erts_aint32_t i); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_read(erts_no_atomic32_t *var); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_inc_read(erts_no_atomic32_t *incp); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_dec_read(erts_no_atomic32_t *decp); +ERTS_GLB_INLINE void erts_no_atomic32_inc(erts_no_atomic32_t *incp); +ERTS_GLB_INLINE void erts_no_atomic32_dec(erts_no_atomic32_t *decp); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_add_read(erts_no_atomic32_t *addp, + erts_aint32_t i); +ERTS_GLB_INLINE void erts_no_atomic32_add(erts_no_atomic32_t *addp, + erts_aint32_t i); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_read_bor(erts_no_atomic32_t *var, + erts_aint32_t mask); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_read_band(erts_no_atomic32_t *var, + erts_aint32_t mask); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_xchg(erts_no_atomic32_t *xchgp, + erts_aint32_t new); +ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_cmpxchg(erts_no_atomic32_t *xchgp, + erts_aint32_t new, + erts_aint32_t expected); + ERTS_GLB_INLINE void erts_spinlock_init_x_opt(erts_spinlock_t *lock, char *name, Eterm extra, @@ -362,6 +357,464 @@ ERTS_GLB_INLINE void erts_thr_sigmask(int how, const sigset_t *set, ERTS_GLB_INLINE void erts_thr_sigwait(const sigset_t *set, int *sig); #endif /* #ifdef HAVE_ETHR_SIG_FUNCS */ +/* + * Functions implementing atomic operations with with no (nob), + * full (mb), acquire (acqb), release (relb), read (rb), and + * write (wb) memory barriers. + * + * If thread support has been disabled, they are mapped to + * functions that performs the same operation, but aren't atomic + * and don't imply memory barriers. + */ + +#ifdef USE_THREADS + +/* Double word size atomics */ + +#define erts_dw_atomic_init_nob ethr_dw_atomic_init +#define erts_dw_atomic_set_nob ethr_dw_atomic_set +#define erts_dw_atomic_read_nob ethr_dw_atomic_read +#define erts_dw_atomic_cmpxchg_nob ethr_dw_atomic_cmpxchg + +#define erts_dw_atomic_init_mb ethr_dw_atomic_init_mb +#define erts_dw_atomic_set_mb ethr_dw_atomic_set_mb +#define erts_dw_atomic_read_mb ethr_dw_atomic_read_mb +#define erts_dw_atomic_cmpxchg_mb ethr_dw_atomic_cmpxchg_mb + +#define erts_dw_atomic_init_acqb ethr_dw_atomic_init_acqb +#define erts_dw_atomic_set_acqb ethr_dw_atomic_set_acqb +#define erts_dw_atomic_read_acqb ethr_dw_atomic_read_acqb +#define erts_dw_atomic_cmpxchg_acqb ethr_dw_atomic_cmpxchg_acqb + +#define erts_dw_atomic_init_relb ethr_dw_atomic_init_relb +#define erts_dw_atomic_set_relb ethr_dw_atomic_set_relb +#define erts_dw_atomic_read_relb ethr_dw_atomic_read_relb +#define erts_dw_atomic_cmpxchg_relb ethr_dw_atomic_cmpxchg_relb + +#define erts_dw_atomic_init_rb ethr_dw_atomic_init_rb +#define erts_dw_atomic_set_rb ethr_dw_atomic_set_rb +#define erts_dw_atomic_read_rb ethr_dw_atomic_read_rb +#define erts_dw_atomic_cmpxchg_rb ethr_dw_atomic_cmpxchg_rb + +#define erts_dw_atomic_init_wb ethr_dw_atomic_init_wb +#define erts_dw_atomic_set_wb ethr_dw_atomic_set_wb +#define erts_dw_atomic_read_wb ethr_dw_atomic_read_wb +#define erts_dw_atomic_cmpxchg_wb ethr_dw_atomic_cmpxchg_wb + +/* Word size atomics */ + +#define erts_atomic_init_nob ethr_atomic_init +#define erts_atomic_set_nob ethr_atomic_set +#define erts_atomic_read_nob ethr_atomic_read +#define erts_atomic_inc_read_nob ethr_atomic_inc_read +#define erts_atomic_dec_read_nob ethr_atomic_dec_read +#define erts_atomic_inc_nob ethr_atomic_inc +#define erts_atomic_dec_nob ethr_atomic_dec +#define erts_atomic_add_read_nob ethr_atomic_add_read +#define erts_atomic_add_nob ethr_atomic_add +#define erts_atomic_read_bor_nob ethr_atomic_read_bor +#define erts_atomic_read_band_nob ethr_atomic_read_band +#define erts_atomic_xchg_nob ethr_atomic_xchg +#define erts_atomic_cmpxchg_nob ethr_atomic_cmpxchg + +#define erts_atomic_init_mb ethr_atomic_init_mb +#define erts_atomic_set_mb ethr_atomic_set_mb +#define erts_atomic_read_mb ethr_atomic_read_mb +#define erts_atomic_inc_read_mb ethr_atomic_inc_read_mb +#define erts_atomic_dec_read_mb ethr_atomic_dec_read_mb +#define erts_atomic_inc_mb ethr_atomic_inc_mb +#define erts_atomic_dec_mb ethr_atomic_dec_mb +#define erts_atomic_add_read_mb ethr_atomic_add_read_mb +#define erts_atomic_add_mb ethr_atomic_add_mb +#define erts_atomic_read_bor_mb ethr_atomic_read_bor_mb +#define erts_atomic_read_band_mb ethr_atomic_read_band_mb +#define erts_atomic_xchg_mb ethr_atomic_xchg_mb +#define erts_atomic_cmpxchg_mb ethr_atomic_cmpxchg_mb + +#define erts_atomic_init_acqb ethr_atomic_init_acqb +#define erts_atomic_set_acqb ethr_atomic_set_acqb +#define erts_atomic_read_acqb ethr_atomic_read_acqb +#define erts_atomic_inc_read_acqb ethr_atomic_inc_read_acqb +#define erts_atomic_dec_read_acqb ethr_atomic_dec_read_acqb +#define erts_atomic_inc_acqb ethr_atomic_inc_acqb +#define erts_atomic_dec_acqb ethr_atomic_dec_acqb +#define erts_atomic_add_read_acqb ethr_atomic_add_read_acqb +#define erts_atomic_add_acqb ethr_atomic_add_acqb +#define erts_atomic_read_bor_acqb ethr_atomic_read_bor_acqb +#define erts_atomic_read_band_acqb ethr_atomic_read_band_acqb +#define erts_atomic_xchg_acqb ethr_atomic_xchg_acqb +#define erts_atomic_cmpxchg_acqb ethr_atomic_cmpxchg_acqb + +#define erts_atomic_init_relb ethr_atomic_init_relb +#define erts_atomic_set_relb ethr_atomic_set_relb +#define erts_atomic_read_relb ethr_atomic_read_relb +#define erts_atomic_inc_read_relb ethr_atomic_inc_read_relb +#define erts_atomic_dec_read_relb ethr_atomic_dec_read_relb +#define erts_atomic_inc_relb ethr_atomic_inc_relb +#define erts_atomic_dec_relb ethr_atomic_dec_relb +#define erts_atomic_add_read_relb ethr_atomic_add_read_relb +#define erts_atomic_add_relb ethr_atomic_add_relb +#define erts_atomic_read_bor_relb ethr_atomic_read_bor_relb +#define erts_atomic_read_band_relb ethr_atomic_read_band_relb +#define erts_atomic_xchg_relb ethr_atomic_xchg_relb +#define erts_atomic_cmpxchg_relb ethr_atomic_cmpxchg_relb + +#define erts_atomic_init_rb ethr_atomic_init_rb +#define erts_atomic_set_rb ethr_atomic_set_rb +#define erts_atomic_read_rb ethr_atomic_read_rb +#define erts_atomic_inc_read_rb ethr_atomic_inc_read_rb +#define erts_atomic_dec_read_rb ethr_atomic_dec_read_rb +#define erts_atomic_inc_rb ethr_atomic_inc_rb +#define erts_atomic_dec_rb ethr_atomic_dec_rb +#define erts_atomic_add_read_rb ethr_atomic_add_read_rb +#define erts_atomic_add_rb ethr_atomic_add_rb +#define erts_atomic_read_bor_rb ethr_atomic_read_bor_rb +#define erts_atomic_read_band_rb ethr_atomic_read_band_rb +#define erts_atomic_xchg_rb ethr_atomic_xchg_rb +#define erts_atomic_cmpxchg_rb ethr_atomic_cmpxchg_rb + +#define erts_atomic_init_wb ethr_atomic_init_wb +#define erts_atomic_set_wb ethr_atomic_set_wb +#define erts_atomic_read_wb ethr_atomic_read_wb +#define erts_atomic_inc_read_wb ethr_atomic_inc_read_wb +#define erts_atomic_dec_read_wb ethr_atomic_dec_read_wb +#define erts_atomic_inc_wb ethr_atomic_inc_wb +#define erts_atomic_dec_wb ethr_atomic_dec_wb +#define erts_atomic_add_read_wb ethr_atomic_add_read_wb +#define erts_atomic_add_wb ethr_atomic_add_wb +#define erts_atomic_read_bor_wb ethr_atomic_read_bor_wb +#define erts_atomic_read_band_wb ethr_atomic_read_band_wb +#define erts_atomic_xchg_wb ethr_atomic_xchg_wb +#define erts_atomic_cmpxchg_wb ethr_atomic_cmpxchg_wb + +/* 32-bit atomics */ + +#define erts_atomic32_init_nob ethr_atomic32_init +#define erts_atomic32_set_nob ethr_atomic32_set +#define erts_atomic32_read_nob ethr_atomic32_read +#define erts_atomic32_inc_read_nob ethr_atomic32_inc_read +#define erts_atomic32_dec_read_nob ethr_atomic32_dec_read +#define erts_atomic32_inc_nob ethr_atomic32_inc +#define erts_atomic32_dec_nob ethr_atomic32_dec +#define erts_atomic32_add_read_nob ethr_atomic32_add_read +#define erts_atomic32_add_nob ethr_atomic32_add +#define erts_atomic32_read_bor_nob ethr_atomic32_read_bor +#define erts_atomic32_read_band_nob ethr_atomic32_read_band +#define erts_atomic32_xchg_nob ethr_atomic32_xchg +#define erts_atomic32_cmpxchg_nob ethr_atomic32_cmpxchg + +#define erts_atomic32_init_mb ethr_atomic32_init_mb +#define erts_atomic32_set_mb ethr_atomic32_set_mb +#define erts_atomic32_read_mb ethr_atomic32_read_mb +#define erts_atomic32_inc_read_mb ethr_atomic32_inc_read_mb +#define erts_atomic32_dec_read_mb ethr_atomic32_dec_read_mb +#define erts_atomic32_inc_mb ethr_atomic32_inc_mb +#define erts_atomic32_dec_mb ethr_atomic32_dec_mb +#define erts_atomic32_add_read_mb ethr_atomic32_add_read_mb +#define erts_atomic32_add_mb ethr_atomic32_add_mb +#define erts_atomic32_read_bor_mb ethr_atomic32_read_bor_mb +#define erts_atomic32_read_band_mb ethr_atomic32_read_band_mb +#define erts_atomic32_xchg_mb ethr_atomic32_xchg_mb +#define erts_atomic32_cmpxchg_mb ethr_atomic32_cmpxchg_mb + +#define erts_atomic32_init_acqb ethr_atomic32_init_acqb +#define erts_atomic32_set_acqb ethr_atomic32_set_acqb +#define erts_atomic32_read_acqb ethr_atomic32_read_acqb +#define erts_atomic32_inc_read_acqb ethr_atomic32_inc_read_acqb +#define erts_atomic32_dec_read_acqb ethr_atomic32_dec_read_acqb +#define erts_atomic32_inc_acqb ethr_atomic32_inc_acqb +#define erts_atomic32_dec_acqb ethr_atomic32_dec_acqb +#define erts_atomic32_add_read_acqb ethr_atomic32_add_read_acqb +#define erts_atomic32_add_acqb ethr_atomic32_add_acqb +#define erts_atomic32_read_bor_acqb ethr_atomic32_read_bor_acqb +#define erts_atomic32_read_band_acqb ethr_atomic32_read_band_acqb +#define erts_atomic32_xchg_acqb ethr_atomic32_xchg_acqb +#define erts_atomic32_cmpxchg_acqb ethr_atomic32_cmpxchg_acqb + +#define erts_atomic32_init_relb ethr_atomic32_init_relb +#define erts_atomic32_set_relb ethr_atomic32_set_relb +#define erts_atomic32_read_relb ethr_atomic32_read_relb +#define erts_atomic32_inc_read_relb ethr_atomic32_inc_read_relb +#define erts_atomic32_dec_read_relb ethr_atomic32_dec_read_relb +#define erts_atomic32_inc_relb ethr_atomic32_inc_relb +#define erts_atomic32_dec_relb ethr_atomic32_dec_relb +#define erts_atomic32_add_read_relb ethr_atomic32_add_read_relb +#define erts_atomic32_add_relb ethr_atomic32_add_relb +#define erts_atomic32_read_bor_relb ethr_atomic32_read_bor_relb +#define erts_atomic32_read_band_relb ethr_atomic32_read_band_relb +#define erts_atomic32_xchg_relb ethr_atomic32_xchg_relb +#define erts_atomic32_cmpxchg_relb ethr_atomic32_cmpxchg_relb + +#define erts_atomic32_init_rb ethr_atomic32_init_rb +#define erts_atomic32_set_rb ethr_atomic32_set_rb +#define erts_atomic32_read_rb ethr_atomic32_read_rb +#define erts_atomic32_inc_read_rb ethr_atomic32_inc_read_rb +#define erts_atomic32_dec_read_rb ethr_atomic32_dec_read_rb +#define erts_atomic32_inc_rb ethr_atomic32_inc_rb +#define erts_atomic32_dec_rb ethr_atomic32_dec_rb +#define erts_atomic32_add_read_rb ethr_atomic32_add_read_rb +#define erts_atomic32_add_rb ethr_atomic32_add_rb +#define erts_atomic32_read_bor_rb ethr_atomic32_read_bor_rb +#define erts_atomic32_read_band_rb ethr_atomic32_read_band_rb +#define erts_atomic32_xchg_rb ethr_atomic32_xchg_rb +#define erts_atomic32_cmpxchg_rb ethr_atomic32_cmpxchg_rb + +#define erts_atomic32_init_wb ethr_atomic32_init_wb +#define erts_atomic32_set_wb ethr_atomic32_set_wb +#define erts_atomic32_read_wb ethr_atomic32_read_wb +#define erts_atomic32_inc_read_wb ethr_atomic32_inc_read_wb +#define erts_atomic32_dec_read_wb ethr_atomic32_dec_read_wb +#define erts_atomic32_inc_wb ethr_atomic32_inc_wb +#define erts_atomic32_dec_wb ethr_atomic32_dec_wb +#define erts_atomic32_add_read_wb ethr_atomic32_add_read_wb +#define erts_atomic32_add_wb ethr_atomic32_add_wb +#define erts_atomic32_read_bor_wb ethr_atomic32_read_bor_wb +#define erts_atomic32_read_band_wb ethr_atomic32_read_band_wb +#define erts_atomic32_xchg_wb ethr_atomic32_xchg_wb +#define erts_atomic32_cmpxchg_wb ethr_atomic32_cmpxchg_wb + +#else /* !USE_THREADS */ + +/* Double word size atomics */ + +#define erts_dw_atomic_init_nob erts_no_dw_atomic_set +#define erts_dw_atomic_set_nob erts_no_dw_atomic_set +#define erts_dw_atomic_read_nob erts_no_dw_atomic_read +#define erts_dw_atomic_cmpxchg_nob erts_no_dw_atomic_cmpxchg + +#define erts_dw_atomic_init_mb erts_no_dw_atomic_init +#define erts_dw_atomic_set_mb erts_no_dw_atomic_set +#define erts_dw_atomic_read_mb erts_no_dw_atomic_read +#define erts_dw_atomic_cmpxchg_mb erts_no_dw_atomic_cmpxchg + +#define erts_dw_atomic_init_acqb erts_no_dw_atomic_init +#define erts_dw_atomic_set_acqb erts_no_dw_atomic_set +#define erts_dw_atomic_read_acqb erts_no_dw_atomic_read +#define erts_dw_atomic_cmpxchg_acqb erts_no_dw_atomic_cmpxchg + +#define erts_dw_atomic_init_relb erts_no_dw_atomic_init +#define erts_dw_atomic_set_relb erts_no_dw_atomic_set +#define erts_dw_atomic_read_relb erts_no_dw_atomic_read +#define erts_dw_atomic_cmpxchg_relb erts_no_dw_atomic_cmpxchg + +#define erts_dw_atomic_init_rb erts_no_dw_atomic_init +#define erts_dw_atomic_set_rb erts_no_dw_atomic_set +#define erts_dw_atomic_read_rb erts_no_dw_atomic_read +#define erts_dw_atomic_cmpxchg_rb erts_no_dw_atomic_cmpxchg + +#define erts_dw_atomic_init_wb erts_no_dw_atomic_init +#define erts_dw_atomic_set_wb erts_no_dw_atomic_set +#define erts_dw_atomic_read_wb erts_no_dw_atomic_read +#define erts_dw_atomic_cmpxchg_wb erts_no_dw_atomic_cmpxchg + +/* Word size atomics */ + +#define erts_atomic_init_nob erts_no_atomic_set +#define erts_atomic_set_nob erts_no_atomic_set +#define erts_atomic_read_nob erts_no_atomic_read +#define erts_atomic_inc_read_nob erts_no_atomic_inc_read +#define erts_atomic_dec_read_nob erts_no_atomic_dec_read +#define erts_atomic_inc_nob erts_no_atomic_inc +#define erts_atomic_dec_nob erts_no_atomic_dec +#define erts_atomic_add_read_nob erts_no_atomic_add_read +#define erts_atomic_add_nob erts_no_atomic_add +#define erts_atomic_read_bor_nob erts_no_atomic_read_bor +#define erts_atomic_read_band_nob erts_no_atomic_read_band +#define erts_atomic_xchg_nob erts_no_atomic_xchg +#define erts_atomic_cmpxchg_nob erts_no_atomic_cmpxchg + +#define erts_atomic_init_mb erts_no_atomic_set +#define erts_atomic_set_mb erts_no_atomic_set +#define erts_atomic_read_mb erts_no_atomic_read +#define erts_atomic_inc_read_mb erts_no_atomic_inc_read +#define erts_atomic_dec_read_mb erts_no_atomic_dec_read +#define erts_atomic_inc_mb erts_no_atomic_inc +#define erts_atomic_dec_mb erts_no_atomic_dec +#define erts_atomic_add_read_mb erts_no_atomic_add_read +#define erts_atomic_add_mb erts_no_atomic_add +#define erts_atomic_read_bor_mb erts_no_atomic_read_bor +#define erts_atomic_read_band_mb erts_no_atomic_read_band +#define erts_atomic_xchg_mb erts_no_atomic_xchg +#define erts_atomic_cmpxchg_mb erts_no_atomic_cmpxchg + +#define erts_atomic_init_acqb erts_no_atomic_set +#define erts_atomic_set_acqb erts_no_atomic_set +#define erts_atomic_read_acqb erts_no_atomic_read +#define erts_atomic_inc_read_acqb erts_no_atomic_inc_read +#define erts_atomic_dec_read_acqb erts_no_atomic_dec_read +#define erts_atomic_inc_acqb erts_no_atomic_inc +#define erts_atomic_dec_acqb erts_no_atomic_dec +#define erts_atomic_add_read_acqb erts_no_atomic_add_read +#define erts_atomic_add_acqb erts_no_atomic_add +#define erts_atomic_read_bor_acqb erts_no_atomic_read_bor +#define erts_atomic_read_band_acqb erts_no_atomic_read_band +#define erts_atomic_xchg_acqb erts_no_atomic_xchg +#define erts_atomic_cmpxchg_acqb erts_no_atomic_cmpxchg + +#define erts_atomic_init_relb erts_no_atomic_set +#define erts_atomic_set_relb erts_no_atomic_set +#define erts_atomic_read_relb erts_no_atomic_read +#define erts_atomic_inc_read_relb erts_no_atomic_inc_read +#define erts_atomic_dec_read_relb erts_no_atomic_dec_read +#define erts_atomic_inc_relb erts_no_atomic_inc +#define erts_atomic_dec_relb erts_no_atomic_dec +#define erts_atomic_add_read_relb erts_no_atomic_add_read +#define erts_atomic_add_relb erts_no_atomic_add +#define erts_atomic_read_bor_relb erts_no_atomic_read_bor +#define erts_atomic_read_band_relb erts_no_atomic_read_band +#define erts_atomic_xchg_relb erts_no_atomic_xchg +#define erts_atomic_cmpxchg_relb erts_no_atomic_cmpxchg + +#define erts_atomic_init_rb erts_no_atomic_set +#define erts_atomic_set_rb erts_no_atomic_set +#define erts_atomic_read_rb erts_no_atomic_read +#define erts_atomic_inc_read_rb erts_no_atomic_inc_read +#define erts_atomic_dec_read_rb erts_no_atomic_dec_read +#define erts_atomic_inc_rb erts_no_atomic_inc +#define erts_atomic_dec_rb erts_no_atomic_dec +#define erts_atomic_add_read_rb erts_no_atomic_add_read +#define erts_atomic_add_rb erts_no_atomic_add +#define erts_atomic_read_bor_rb erts_no_atomic_read_bor +#define erts_atomic_read_band_rb erts_no_atomic_read_band +#define erts_atomic_xchg_rb erts_no_atomic_xchg +#define erts_atomic_cmpxchg_rb erts_no_atomic_cmpxchg + +#define erts_atomic_init_wb erts_no_atomic_set +#define erts_atomic_set_wb erts_no_atomic_set +#define erts_atomic_read_wb erts_no_atomic_read +#define erts_atomic_inc_read_wb erts_no_atomic_inc_read +#define erts_atomic_dec_read_wb erts_no_atomic_dec_read +#define erts_atomic_inc_wb erts_no_atomic_inc +#define erts_atomic_dec_wb erts_no_atomic_dec +#define erts_atomic_add_read_wb erts_no_atomic_add_read +#define erts_atomic_add_wb erts_no_atomic_add +#define erts_atomic_read_bor_wb erts_no_atomic_read_bor +#define erts_atomic_read_band_wb erts_no_atomic_read_band +#define erts_atomic_xchg_wb erts_no_atomic_xchg +#define erts_atomic_cmpxchg_wb erts_no_atomic_cmpxchg + +/* 32-bit atomics */ + +#define erts_atomic32_init_nob erts_no_atomic32_set +#define erts_atomic32_set_nob erts_no_atomic32_set +#define erts_atomic32_read_nob erts_no_atomic32_read +#define erts_atomic32_inc_read_nob erts_no_atomic32_inc_read +#define erts_atomic32_dec_read_nob erts_no_atomic32_dec_read +#define erts_atomic32_inc_nob erts_no_atomic32_inc +#define erts_atomic32_dec_nob erts_no_atomic32_dec +#define erts_atomic32_add_read_nob erts_no_atomic32_add_read +#define erts_atomic32_add_nob erts_no_atomic32_add +#define erts_atomic32_read_bor_nob erts_no_atomic32_read_bor +#define erts_atomic32_read_band_nob erts_no_atomic32_read_band +#define erts_atomic32_xchg_nob erts_no_atomic32_xchg +#define erts_atomic32_cmpxchg_nob erts_no_atomic32_cmpxchg + +#define erts_atomic32_init_mb erts_no_atomic32_set +#define erts_atomic32_set_mb erts_no_atomic32_set +#define erts_atomic32_read_mb erts_no_atomic32_read +#define erts_atomic32_inc_read_mb erts_no_atomic32_inc_read +#define erts_atomic32_dec_read_mb erts_no_atomic32_dec_read +#define erts_atomic32_inc_mb erts_no_atomic32_inc +#define erts_atomic32_dec_mb erts_no_atomic32_dec +#define erts_atomic32_add_read_mb erts_no_atomic32_add_read +#define erts_atomic32_add_mb erts_no_atomic32_add +#define erts_atomic32_read_bor_mb erts_no_atomic32_read_bor +#define erts_atomic32_read_band_mb erts_no_atomic32_read_band +#define erts_atomic32_xchg_mb erts_no_atomic32_xchg +#define erts_atomic32_cmpxchg_mb erts_no_atomic32_cmpxchg + +#define erts_atomic32_init_acqb erts_no_atomic32_set +#define erts_atomic32_set_acqb erts_no_atomic32_set +#define erts_atomic32_read_acqb erts_no_atomic32_read +#define erts_atomic32_inc_read_acqb erts_no_atomic32_inc_read +#define erts_atomic32_dec_read_acqb erts_no_atomic32_dec_read +#define erts_atomic32_inc_acqb erts_no_atomic32_inc +#define erts_atomic32_dec_acqb erts_no_atomic32_dec +#define erts_atomic32_add_read_acqb erts_no_atomic32_add_read +#define erts_atomic32_add_acqb erts_no_atomic32_add +#define erts_atomic32_read_bor_acqb erts_no_atomic32_read_bor +#define erts_atomic32_read_band_acqb erts_no_atomic32_read_band +#define erts_atomic32_xchg_acqb erts_no_atomic32_xchg +#define erts_atomic32_cmpxchg_acqb erts_no_atomic32_cmpxchg + +#define erts_atomic32_init_relb erts_no_atomic32_set +#define erts_atomic32_set_relb erts_no_atomic32_set +#define erts_atomic32_read_relb erts_no_atomic32_read +#define erts_atomic32_inc_read_relb erts_no_atomic32_inc_read +#define erts_atomic32_dec_read_relb erts_no_atomic32_dec_read +#define erts_atomic32_inc_relb erts_no_atomic32_inc +#define erts_atomic32_dec_relb erts_no_atomic32_dec +#define erts_atomic32_add_read_relb erts_no_atomic32_add_read +#define erts_atomic32_add_relb erts_no_atomic32_add +#define erts_atomic32_read_bor_relb erts_no_atomic32_read_bor +#define erts_atomic32_read_band_relb erts_no_atomic32_read_band +#define erts_atomic32_xchg_relb erts_no_atomic32_xchg +#define erts_atomic32_cmpxchg_relb erts_no_atomic32_cmpxchg + +#define erts_atomic32_init_rb erts_no_atomic32_set +#define erts_atomic32_set_rb erts_no_atomic32_set +#define erts_atomic32_read_rb erts_no_atomic32_read +#define erts_atomic32_inc_read_rb erts_no_atomic32_inc_read +#define erts_atomic32_dec_read_rb erts_no_atomic32_dec_read +#define erts_atomic32_inc_rb erts_no_atomic32_inc +#define erts_atomic32_dec_rb erts_no_atomic32_dec +#define erts_atomic32_add_read_rb erts_no_atomic32_add_read +#define erts_atomic32_add_rb erts_no_atomic32_add +#define erts_atomic32_read_bor_rb erts_no_atomic32_read_bor +#define erts_atomic32_read_band_rb erts_no_atomic32_read_band +#define erts_atomic32_xchg_rb erts_no_atomic32_xchg +#define erts_atomic32_cmpxchg_rb erts_no_atomic32_cmpxchg + +#define erts_atomic32_init_wb erts_no_atomic32_set +#define erts_atomic32_set_wb erts_no_atomic32_set +#define erts_atomic32_read_wb erts_no_atomic32_read +#define erts_atomic32_inc_read_wb erts_no_atomic32_inc_read +#define erts_atomic32_dec_read_wb erts_no_atomic32_dec_read +#define erts_atomic32_inc_wb erts_no_atomic32_inc +#define erts_atomic32_dec_wb erts_no_atomic32_dec +#define erts_atomic32_add_read_wb erts_no_atomic32_add_read +#define erts_atomic32_add_wb erts_no_atomic32_add +#define erts_atomic32_read_bor_wb erts_no_atomic32_read_bor +#define erts_atomic32_read_band_wb erts_no_atomic32_read_band +#define erts_atomic32_xchg_wb erts_no_atomic32_xchg +#define erts_atomic32_cmpxchg_wb erts_no_atomic32_cmpxchg + +#endif /* !USE_THREADS */ + +#ifndef ERTS_NO_DEPRECATED_ATOMICS + +/* Deprecated functions to replace */ + +#define erts_atomic_init erts_atomic_init_nob +#define erts_atomic_set erts_atomic_set_nob +#define erts_atomic_read erts_atomic_read_nob +#define erts_atomic_inctest erts_atomic_inc_read_mb +#define erts_atomic_dectest erts_atomic_dec_read_mb +#define erts_atomic_inc erts_atomic_inc_mb +#define erts_atomic_dec erts_atomic_dec_mb +#define erts_atomic_addtest erts_atomic_add_read_mb +#define erts_atomic_add erts_atomic_add_mb +#define erts_atomic_xchg erts_atomic_xchg_mb +#define erts_atomic_cmpxchg erts_atomic_cmpxchg_mb +#define erts_atomic_bor erts_atomic_read_bor_mb +#define erts_atomic_band erts_atomic_read_band_mb + +#define erts_atomic32_init erts_atomic32_init_nob +#define erts_atomic32_set erts_atomic32_set_nob +#define erts_atomic32_read erts_atomic32_read_nob +#define erts_atomic32_inctest erts_atomic32_inc_read_mb +#define erts_atomic32_dectest erts_atomic32_dec_read_mb +#define erts_atomic32_inc erts_atomic32_inc_mb +#define erts_atomic32_dec erts_atomic32_dec_mb +#define erts_atomic32_addtest erts_atomic32_add_read_mb +#define erts_atomic32_add erts_atomic32_add_mb +#define erts_atomic32_xchg erts_atomic32_xchg_mb +#define erts_atomic32_cmpxchg erts_atomic32_cmpxchg_mb +#define erts_atomic32_bor erts_atomic32_read_bor_mb +#define erts_atomic32_band erts_atomic32_read_band_mb + +#endif + #if ERTS_GLB_INLINE_INCL_FUNC_DEF ERTS_GLB_INLINE void @@ -995,428 +1448,206 @@ erts_lc_rwmtx_is_rwlocked(erts_rwmtx_t *mtx) #endif } +/* No atomic ops */ + ERTS_GLB_INLINE void -erts_atomic_init(erts_atomic_t *var, erts_aint_t i) +erts_no_dw_atomic_set(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val) { -#ifdef USE_THREADS - ethr_atomic_init(var, i); -#else - *var = i; -#endif + var->sint[0] = val->sint[0]; + var->sint[1] = val->sint[1]; } ERTS_GLB_INLINE void -erts_atomic_set(erts_atomic_t *var, erts_aint_t i) +erts_no_dw_atomic_read(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val) +{ + val->sint[0] = var->sint[0]; + val->sint[1] = var->sint[1]; +} + +ERTS_GLB_INLINE int erts_no_dw_atomic_cmpxchg(erts_no_dw_atomic_t *var, + erts_no_dw_atomic_t *new_val, + erts_no_dw_atomic_t *old_val) +{ + if (var->sint[0] != old_val->sint[0] || var->sint[1] != old_val->sint[1]) { + erts_no_dw_atomic_read(var, old_val); + return 0; + } + else { + erts_no_dw_atomic_set(var, new_val); + return !0; + } +} + +ERTS_GLB_INLINE void +erts_no_atomic_set(erts_no_atomic_t *var, erts_aint_t i) { -#ifdef USE_THREADS - ethr_atomic_set(var, i); -#else *var = i; -#endif } ERTS_GLB_INLINE erts_aint_t -erts_atomic_read(erts_atomic_t *var) +erts_no_atomic_read(erts_no_atomic_t *var) { -#ifdef USE_THREADS - return ethr_atomic_read(var); -#else return *var; -#endif } ERTS_GLB_INLINE erts_aint_t -erts_atomic_inctest(erts_atomic_t *incp) +erts_no_atomic_inc_read(erts_no_atomic_t *incp) { -#ifdef USE_THREADS - return ethr_atomic_inc_read(incp); -#else return ++(*incp); -#endif } ERTS_GLB_INLINE erts_aint_t -erts_atomic_dectest(erts_atomic_t *decp) +erts_no_atomic_dec_read(erts_no_atomic_t *decp) { -#ifdef USE_THREADS - return ethr_atomic_dec_read(decp); -#else return --(*decp); -#endif } ERTS_GLB_INLINE void -erts_atomic_inc(erts_atomic_t *incp) +erts_no_atomic_inc(erts_no_atomic_t *incp) { -#ifdef USE_THREADS - ethr_atomic_inc(incp); -#else ++(*incp); -#endif } ERTS_GLB_INLINE void -erts_atomic_dec(erts_atomic_t *decp) +erts_no_atomic_dec(erts_no_atomic_t *decp) { -#ifdef USE_THREADS - ethr_atomic_dec(decp); -#else --(*decp); -#endif } ERTS_GLB_INLINE erts_aint_t -erts_atomic_addtest(erts_atomic_t *addp, erts_aint_t i) +erts_no_atomic_add_read(erts_no_atomic_t *addp, erts_aint_t i) { -#ifdef USE_THREADS - return ethr_atomic_add_read(addp, i); -#else return *addp += i; -#endif } ERTS_GLB_INLINE void -erts_atomic_add(erts_atomic_t *addp, erts_aint_t i) +erts_no_atomic_add(erts_no_atomic_t *addp, erts_aint_t i) { -#ifdef USE_THREADS - ethr_atomic_add(addp, i); -#else *addp += i; -#endif } ERTS_GLB_INLINE erts_aint_t -erts_atomic_xchg(erts_atomic_t *xchgp, erts_aint_t new) +erts_no_atomic_read_bor(erts_no_atomic_t *var, erts_aint_t mask) { -#ifdef USE_THREADS - return ethr_atomic_xchg(xchgp, new); -#else - erts_aint_t old = *xchgp; - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_atomic_cmpxchg(erts_atomic_t *xchgp, erts_aint_t new, erts_aint_t expected) -{ -#ifdef USE_THREADS - return ethr_atomic_cmpxchg(xchgp, new, expected); -#else - erts_aint_t old = *xchgp; - if (old == expected) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_atomic_bor(erts_atomic_t *var, erts_aint_t mask) -{ -#ifdef USE_THREADS - return ethr_atomic_read_bor(var, mask); -#else erts_aint_t old; old = *var; *var |= mask; return old; -#endif } ERTS_GLB_INLINE erts_aint_t -erts_atomic_band(erts_atomic_t *var, erts_aint_t mask) +erts_no_atomic_read_band(erts_no_atomic_t *var, erts_aint_t mask) { -#ifdef USE_THREADS - return ethr_atomic_read_band(var, mask); -#else erts_aint_t old; old = *var; *var &= mask; return old; -#endif } ERTS_GLB_INLINE erts_aint_t -erts_atomic_read_acqb(erts_atomic_t *var) +erts_no_atomic_xchg(erts_no_atomic_t *xchgp, erts_aint_t new) { -#ifdef USE_THREADS - return ethr_atomic_read_acqb(var); -#else - return *var; -#endif -} - -ERTS_GLB_INLINE void -erts_atomic_set_relb(erts_atomic_t *var, erts_aint_t i) -{ -#ifdef USE_THREADS - ethr_atomic_set_relb(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE void -erts_atomic_dec_relb(erts_atomic_t *decp) -{ -#ifdef USE_THREADS - ethr_atomic_dec_relb(decp); -#else - --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint_t -erts_atomic_dectest_relb(erts_atomic_t *decp) -{ -#ifdef USE_THREADS - return ethr_atomic_dec_read_relb(decp); -#else - return --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_acqb(erts_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp) -{ -#ifdef USE_THREADS - return ethr_atomic_cmpxchg_acqb(xchgp, new, exp); -#else erts_aint_t old = *xchgp; - if (old == exp) - *xchgp = new; + *xchgp = new; return old; -#endif } -ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_relb(erts_atomic_t *xchgp, - erts_aint_t new, - erts_aint_t exp) +ERTS_GLB_INLINE erts_aint_t +erts_no_atomic_cmpxchg(erts_no_atomic_t *xchgp, + erts_aint_t new, + erts_aint_t expected) { -#ifdef USE_THREADS - return ethr_atomic_cmpxchg_relb(xchgp, new, exp); -#else erts_aint_t old = *xchgp; - if (old == exp) + if (old == expected) *xchgp = new; return old; -#endif } /* atomic32 */ ERTS_GLB_INLINE void -erts_atomic32_init(erts_atomic32_t *var, erts_aint32_t i) -{ -#ifdef USE_THREADS - ethr_atomic32_init(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE void -erts_atomic32_set(erts_atomic32_t *var, erts_aint32_t i) +erts_no_atomic32_set(erts_no_atomic32_t *var, erts_aint32_t i) { -#ifdef USE_THREADS - ethr_atomic32_set(var, i); -#else *var = i; -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_read(erts_atomic32_t *var) +erts_no_atomic32_read(erts_no_atomic32_t *var) { -#ifdef USE_THREADS - return ethr_atomic32_read(var); -#else return *var; -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_inctest(erts_atomic32_t *incp) +erts_no_atomic32_inc_read(erts_no_atomic32_t *incp) { -#ifdef USE_THREADS - return ethr_atomic32_inc_read(incp); -#else return ++(*incp); -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_dectest(erts_atomic32_t *decp) +erts_no_atomic32_dec_read(erts_no_atomic32_t *decp) { -#ifdef USE_THREADS - return ethr_atomic32_dec_read(decp); -#else return --(*decp); -#endif } ERTS_GLB_INLINE void -erts_atomic32_inc(erts_atomic32_t *incp) +erts_no_atomic32_inc(erts_no_atomic32_t *incp) { -#ifdef USE_THREADS - ethr_atomic32_inc(incp); -#else ++(*incp); -#endif } ERTS_GLB_INLINE void -erts_atomic32_dec(erts_atomic32_t *decp) +erts_no_atomic32_dec(erts_no_atomic32_t *decp) { -#ifdef USE_THREADS - ethr_atomic32_dec(decp); -#else --(*decp); -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_addtest(erts_atomic32_t *addp, erts_aint32_t i) +erts_no_atomic32_add_read(erts_no_atomic32_t *addp, erts_aint32_t i) { -#ifdef USE_THREADS - return ethr_atomic32_add_read(addp, i); -#else return *addp += i; -#endif } ERTS_GLB_INLINE void -erts_atomic32_add(erts_atomic32_t *addp, erts_aint32_t i) +erts_no_atomic32_add(erts_no_atomic32_t *addp, erts_aint32_t i) { -#ifdef USE_THREADS - ethr_atomic32_add(addp, i); -#else *addp += i; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_xchg(erts_atomic32_t *xchgp, erts_aint32_t new) -{ -#ifdef USE_THREADS - return ethr_atomic32_xchg(xchgp, new); -#else - erts_aint32_t old = *xchgp; - *xchgp = new; - return old; -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_cmpxchg(erts_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t expected) +erts_no_atomic32_read_bor(erts_no_atomic32_t *var, erts_aint32_t mask) { -#ifdef USE_THREADS - return ethr_atomic32_cmpxchg(xchgp, new, expected); -#else - erts_aint32_t old = *xchgp; - if (old == expected) - *xchgp = new; - return old; -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_bor(erts_atomic32_t *var, erts_aint32_t mask) -{ -#ifdef USE_THREADS - return ethr_atomic32_read_bor(var, mask); -#else erts_aint32_t old; old = *var; *var |= mask; return old; -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_band(erts_atomic32_t *var, erts_aint32_t mask) +erts_no_atomic32_read_band(erts_no_atomic32_t *var, erts_aint32_t mask) { -#ifdef USE_THREADS - return ethr_atomic32_read_band(var, mask); -#else erts_aint32_t old; old = *var; *var &= mask; return old; -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_read_acqb(erts_atomic32_t *var) -{ -#ifdef USE_THREADS - return ethr_atomic32_read_acqb(var); -#else - return *var; -#endif -} - -ERTS_GLB_INLINE void -erts_atomic32_set_relb(erts_atomic32_t *var, erts_aint32_t i) -{ -#ifdef USE_THREADS - ethr_atomic32_set_relb(var, i); -#else - *var = i; -#endif -} - -ERTS_GLB_INLINE void -erts_atomic32_dec_relb(erts_atomic32_t *decp) +erts_no_atomic32_xchg(erts_no_atomic32_t *xchgp, erts_aint32_t new) { -#ifdef USE_THREADS - ethr_atomic32_dec_relb(decp); -#else - --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_dectest_relb(erts_atomic32_t *decp) -{ -#ifdef USE_THREADS - return ethr_atomic32_dec_read_relb(decp); -#else - return --(*decp); -#endif -} - -ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_cmpxchg_acqb(erts_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp) -{ -#ifdef USE_THREADS - return ethr_atomic32_cmpxchg_acqb(xchgp, new, exp); -#else erts_aint32_t old = *xchgp; - if (old == exp) - *xchgp = new; + *xchgp = new; return old; -#endif } ERTS_GLB_INLINE erts_aint32_t -erts_atomic32_cmpxchg_relb(erts_atomic32_t *xchgp, - erts_aint32_t new, - erts_aint32_t exp) +erts_no_atomic32_cmpxchg(erts_no_atomic32_t *xchgp, + erts_aint32_t new, + erts_aint32_t expected) { -#ifdef USE_THREADS - return ethr_atomic32_cmpxchg_relb(xchgp, new, exp); -#else erts_aint32_t old = *xchgp; - if (old == exp) + if (old == expected) *xchgp = new; return old; -#endif } /* spinlock */ @@ -1887,3 +2118,37 @@ erts_thr_sigwait(const sigset_t *set, int *sig) #endif /* #if ERTS_GLB_INLINE_INCL_FUNC_DEF */ #endif /* #ifndef ERL_THREAD_H__ */ + +#ifdef ERTS_UNDEF_DEPRECATED_ATOMICS + +/* Deprecated functions to replace */ + +#undef erts_atomic_init +#undef erts_atomic_set +#undef erts_atomic_read +#undef erts_atomic_inctest +#undef erts_atomic_dectest +#undef erts_atomic_inc +#undef erts_atomic_dec +#undef erts_atomic_addtest +#undef erts_atomic_add +#undef erts_atomic_xchg +#undef erts_atomic_cmpxchg +#undef erts_atomic_bor +#undef erts_atomic_band + +#undef erts_atomic32_init +#undef erts_atomic32_set +#undef erts_atomic32_read +#undef erts_atomic32_inctest +#undef erts_atomic32_dectest +#undef erts_atomic32_inc +#undef erts_atomic32_dec +#undef erts_atomic32_addtest +#undef erts_atomic32_add +#undef erts_atomic32_xchg +#undef erts_atomic32_cmpxchg +#undef erts_atomic32_bor +#undef erts_atomic32_band + +#endif diff --git a/erts/include/internal/ethr_atomics.h b/erts/include/internal/ethr_atomics.h index 1caf4d0567..0f3c26f1df 100644 --- a/erts/include/internal/ethr_atomics.h +++ b/erts/include/internal/ethr_atomics.h @@ -1,7 +1,16 @@ /* + * --------------- DO NOT EDIT THIS FILE! --------------- + * This file was automatically generated by the + * $ERL_TOP/erts/lib_src/utils/make_atomics_api script. + * If you need to make changes, edit the script and + * regenerate this file. + * --------------- DO NOT EDIT THIS FILE! --------------- + */ + +/* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -18,709 +27,8818 @@ */ /* - * Description: The ethread atomic API + * Description: The ethread atomics API * Author: Rickard Green */ -#ifndef ETHR_ATOMIC_H__ -#define ETHR_ATOMIC_H__ +/* + * This file maps native atomic implementations to ethread + * API atomics. If no native atomic implementation + * is available, a less efficient fallback is used instead. + * The API consists of 32-bit size, word size (pointer size), + * and double word size atomics. + * + * The following atomic operations are implemented for + * 32-bit size, and word size atomics: + * - cmpxchg + * - xchg + * - set + * - init + * - add_read + * - read + * - inc_read + * - dec_read + * - add + * - inc + * - dec + * - read_band + * - read_bor + * + * The following atomic operations are implemented for + * double word size atomics: + * - cmpxchg + * - set + * - read + * - init + * + * Appart from a function implementing the atomic operation + * with unspecified memory barrier semantics, there are + * functions implementing each operation with the following + * memory barrier semantics: + * - rb (read barrier) + * - wb (write barrier) + * - acqb (acquire barrier) + * - relb (release barrier) + * - mb (full memory barrier) + * + * We implement all of these operation/barrier + * combinations, regardless of whether they are useful + * or not (some of them are useless). + * + * Double word size atomic functions are on the followning + * form: + * ethr_dw_atomic_<OP>[_<BARRIER>] + * + * Word size atomic functions are on the followning + * form: + * ethr_atomic_<OP>[_<BARRIER>] + * + * 32-bit size atomic functions are on the followning + * form: + * ethr_atomic32_<OP>[_<BARRIER>] + * + * Apart from the operation/barrier functions + * described above also 'addr' functions are implemented + * which return the actual memory address used of the + * atomic variable. The 'addr' functions have no barrier + * versions. + * + * The native atomic implementation does not need to + * implement all operation/barrier combinations. + * Functions that have no native implementation will be + * constructed from existing native functionality. These + * functions will perform the wanted operation and will + * produce sufficient memory barriers, but may + * in some cases be less efficient than pure native + * versions. + * + * When we create ethread API operation/barrier functions by + * adding barriers before and after native operations it is + * assumed that: + * - A native read operation begins, and ends with a load. + * - A native set operation begins, and ends with a store. + * - An init operation begins with either a load, or a store, + * and ends with either a load, or a store. + * - All other operations begins with a load, and ends with + * either a load, or a store. + * + * This is the minimum functionality that a native + * implementation needs to provide: + * + * - Functions that need to be implemented: + * + * - ethr_native_[dw_|su_dw_]atomic[BITS]_addr + * - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>] + * (at least one cmpxchg of optional barrier) + * + * - Macros that needs to be defined: + * + * A macro informing about the presence of the native + * implementation: + * + * - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS] + * + * A macro naming (a string constant) the implementation: + * + * - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL + * + * Each implemented native atomic function has to + * be accompanied by a defined macro on the following + * form informing about its presence: + * + * - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>] + * + * A (sparc-v9 style) membar macro: + * + * - ETHR_MEMBAR(B) + * + * Which takes a combination of the following macros + * or:ed (using |) together: + * + * - ETHR_LoadLoad + * - ETHR_LoadStore + * - ETHR_StoreLoad + * - ETHR_StoreStore + * + */ -#if !defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) -# define ETHR_NEED_ATOMIC_PROTOTYPES__ -#endif +#ifndef ETHR_ATOMICS_H__ +#define ETHR_ATOMICS_H__ -#ifndef ETHR_HAVE_NATIVE_ATOMICS +#undef ETHR_AMC_FALLBACK__ +#undef ETHR_AMC_NO_ATMCS__ +#undef ETHR_AMC_ATMC_T__ +#undef ETHR_AMC_ATMC_FUNC__ + +/* -- 32-bit atomics -- */ + +#undef ETHR_NAINT32_T__ +#undef ETHR_NATMC32_FUNC__ +#undef ETHR_NATMC32_ADDR_FUNC__ +#undef ETHR_NATMC32_BITS__ +#if defined(ETHR_HAVE_NATIVE_ATOMIC32) +# define ETHR_NEED_NATMC32_ADDR +# define ETHR_NATMC32_ADDR_FUNC__ ethr_native_atomic32_addr +typedef ethr_native_atomic32_t ethr_atomic32_t; +# define ETHR_NAINT32_T__ ethr_sint32_t +# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X +# define ETHR_NATMC32_BITS__ 32 +#elif defined(ETHR_HAVE_NATIVE_ATOMIC64) +# define ETHR_NEED_NATMC64_ADDR +#ifdef ETHR_BIGENDIAN +# define ETHR_NATMC32_ADDR_FUNC__(VAR) \ + (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1) +#else +# define ETHR_NATMC32_ADDR_FUNC__(VAR) \ + ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) +#endif +typedef ethr_native_atomic64_t ethr_atomic32_t; +# define ETHR_NAINT32_T__ ethr_sint64_t +# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_NATMC32_BITS__ 64 +#else /* - * No native atomic implementation available. :( + * No native atomics usable for 32-bits atomics :( * Use fallback... */ typedef ethr_sint32_t ethr_atomic32_t; -typedef ethr_sint_t ethr_atomic_t; -#else -/* - * Map ethread native atomics to ethread API atomics. - * - * We do at least have a native atomic implementation that - * can handle integers of a size larger than or equal to - * the size of pointers. - */ +#endif -/* -- Pointer size atomics -- */ +#undef ETHR_ATMC32_INLINE__ +#ifdef ETHR_NATMC32_BITS__ +# ifdef ETHR_TRY_INLINE_FUNCS +# define ETHR_ATMC32_INLINE__ +# endif +# define ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS +#endif + +#if !defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +# define ETHR_NEED_ATMC32_PROTOTYPES__ +#endif + +#ifndef ETHR_INLINE_ATMC32_FUNC_NAME_ +# define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X +#endif + +#undef ETHR_ATMC32_FUNC__ +#define ETHR_ATMC32_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) + + +/* -- Word size atomics -- */ + +#undef ETHR_NEED_NATMC32_ADDR +#undef ETHR_NEED_NATMC64_ADDR #undef ETHR_NAINT_T__ #undef ETHR_NATMC_FUNC__ #undef ETHR_NATMC_ADDR_FUNC__ -#if ETHR_SIZEOF_PTR == 8 -# if defined(ETHR_HAVE_NATIVE_ATOMIC64) -# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr +#undef ETHR_NATMC_BITS__ +#if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC64) +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC64_ADDR +# endif +# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr typedef ethr_native_atomic64_t ethr_atomic_t; -# define ETHR_NAINT_T__ ethr_sint64_t -# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X -# else -# error "Missing native atomic implementation" +# define ETHR_NAINT_T__ ethr_sint64_t +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_NATMC_BITS__ 64 +#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC32) +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC32_ADDR # endif -#elif ETHR_SIZEOF_PTR == 4 # define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic32_addr -# ifdef ETHR_HAVE_NATIVE_ATOMIC32 typedef ethr_native_atomic32_t ethr_atomic_t; -# define ETHR_NAINT_T__ ethr_sint32_t -# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X -# elif defined(ETHR_HAVE_NATIVE_ATOMIC64) +# define ETHR_NAINT_T__ ethr_sint32_t +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X +# define ETHR_NATMC_BITS__ 32 +#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64) +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC64_ADDR +# endif +#ifdef ETHR_BIGENDIAN +# define ETHR_NATMC_ADDR_FUNC__(VAR) \ + (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1) +#else +# define ETHR_NATMC_ADDR_FUNC__(VAR) \ + ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) +#endif typedef ethr_native_atomic64_t ethr_atomic_t; -# define ETHR_NATMC_T__ ethr_native_atomic64_t -# define ETHR_NAINT_T__ ethr_sint64_t -# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_NATMC_T__ ethr_native_atomic64_t +# define ETHR_NAINT_T__ ethr_sint64_t +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_NATMC_BITS__ 64 +#else +/* + * No native atomics usable for pointer size atomics :( + * Use fallback... + */ + +# if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) +# define ETHR_AMC_FALLBACK__ +# define ETHR_AMC_NO_ATMCS__ 2 +# define ETHR_AMC_SINT_T__ ethr_sint32_t +# define ETHR_AMC_ATMC_T__ ethr_atomic32_t +# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) +typedef struct { + ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__]; +} ethr_amc_t; +typedef struct { + ethr_amc_t amc; + ethr_sint_t sint; +} ethr_atomic_t; +# else /* locked fallback */ +typedef ethr_sint_t ethr_atomic_t; +# endif +#endif + +#undef ETHR_ATMC_INLINE__ +#ifdef ETHR_NATMC_BITS__ +# ifdef ETHR_TRY_INLINE_FUNCS +# define ETHR_ATMC_INLINE__ +# endif +# define ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS +#endif + +#if !defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +# define ETHR_NEED_ATMC_PROTOTYPES__ +#endif + +#ifndef ETHR_INLINE_ATMC_FUNC_NAME_ +# define ETHR_INLINE_ATMC_FUNC_NAME_(X) X +#endif + +#undef ETHR_ATMC_FUNC__ +#define ETHR_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X) + +/* -- Double word atomics -- */ + +#undef ETHR_SU_DW_NAINT_T__ +#undef ETHR_SU_DW_NATMC_FUNC__ +#undef ETHR_SU_DW_NATMC_ADDR_FUNC__ +#undef ETHR_DW_NATMC_FUNC__ +#undef ETHR_DW_NATMC_ADDR_FUNC__ +#undef ETHR_DW_NATMC_BITS__ +#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC) +# define ETHR_NEED_DW_NATMC_ADDR +# define ETHR_DW_NATMC_ADDR_FUNC__ ethr_native_dw_atomic_addr +# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_dw_atomic_t +# define ETHR_DW_NATMC_FUNC__(X) ethr_native_dw_atomic_ ## X +# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_su_dw_atomic_ ## X +# if ETHR_SIZEOF_PTR == 8 +# define ETHR_DW_NATMC_BITS__ 128 +# elif ETHR_SIZEOF_PTR == 4 +# define ETHR_DW_NATMC_BITS__ 64 # else -# error "Missing native atomic implementation" +# error "Word size not supported" +# endif +# ifdef ETHR_NATIVE_SU_DW_SINT_T +# define ETHR_SU_DW_NAINT_T__ ETHR_NATIVE_SU_DW_SINT_T # endif +#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64) +# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC64_ADDR +# endif +# define ETHR_DW_NATMC_ADDR_FUNC__(VAR) \ + ((ethr_dw_sint_t *) ethr_native_atomic64_addr((VAR))) +# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_atomic64_t +# define ETHR_SU_DW_NAINT_T__ ethr_sint64_t +# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_DW_NATMC_BITS__ 64 #endif -/* -- 32-bit atomics -- */ +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) +#define ETHR_DW_ATOMIC_FUNC__(X) ethr_dw_atomic_ ## X ## _fallback__ +#else +#define ETHR_DW_ATOMIC_FUNC__(X) ethr_dw_atomic_ ## X +#endif -#undef ETHR_NAINT32_T__ -#undef ETHR_NATMC32_FUNC__ -#if defined(ETHR_HAVE_NATIVE_ATOMIC32) -typedef ethr_native_atomic32_t ethr_atomic32_t; -# define ETHR_NAINT32_T__ ethr_sint32_t -# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X -#elif defined(ETHR_HAVE_NATIVE_ATOMIC64) -typedef ethr_native_atomic64_t ethr_atomic32_t; -# define ETHR_NAINT32_T__ ethr_sint64_t -# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X +#if !defined(ETHR_DW_NATMC_BITS__) || defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) +# define ETHR_NEED_DW_FALLBACK__ +#endif + +#if defined(ETHR_NEED_DW_FALLBACK__) +/* + * No native atomics usable for double word atomics :( + * Use fallback... + */ + +# ifndef ETHR_AMC_FALLBACK__ +# if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) +# define ETHR_AMC_FALLBACK__ +# define ETHR_AMC_NO_ATMCS__ 1 +# define ETHR_AMC_SINT_T__ ethr_sint_t +# define ETHR_AMC_ATMC_T__ ethr_atomic_t +# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X) +# elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) +# define ETHR_AMC_FALLBACK__ +# define ETHR_AMC_NO_ATMCS__ 2 +# define ETHR_AMC_SINT_T__ ethr_sint32_t +# define ETHR_AMC_ATMC_T__ ethr_atomic32_t +# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) +# endif +# ifdef ETHR_AMC_FALLBACK__ +typedef struct { + ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__]; +} ethr_amc_t; +# endif +# endif + +typedef struct { +#ifdef ETHR_AMC_FALLBACK__ + ethr_amc_t amc; +#endif + ethr_sint_t sint[2]; +} ethr_dw_atomic_fallback_t; + +#endif + +typedef union { +#ifdef ETHR_NATIVE_DW_ATOMIC_T__ + ETHR_NATIVE_DW_ATOMIC_T__ native; +#endif +#ifdef ETHR_NEED_DW_FALLBACK__ + ethr_dw_atomic_fallback_t fallback; +#endif + ethr_sint_t sint[2]; +} ethr_dw_atomic_t; + +typedef union { +#ifdef ETHR_SU_DW_NAINT_T__ + ETHR_SU_DW_NAINT_T__ dw_sint; +#endif + ethr_sint_t sint[2]; +} ethr_dw_sint_t; + +#ifdef ETHR_BIGENDIAN +# define ETHR_DW_SINT_LOW_WORD 1 +# define ETHR_DW_SINT_HIGH_WORD 0 #else -# error "Missing native atomic implementation" +# define ETHR_DW_SINT_LOW_WORD 0 +# define ETHR_DW_SINT_HIGH_WORD 1 #endif +#undef ETHR_DW_ATMC_INLINE__ +#ifdef ETHR_DW_NATMC_BITS__ +# ifdef ETHR_TRY_INLINE_FUNCS +# define ETHR_ATMC32_INLINE__ +# endif +# define ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS #endif -#ifdef ETHR_NEED_ATOMIC_PROTOTYPES__ -ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *); -void ethr_atomic_init(ethr_atomic_t *, ethr_sint_t); -void ethr_atomic_set(ethr_atomic_t *, ethr_sint_t); -ethr_sint_t ethr_atomic_read(ethr_atomic_t *); -ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *); -ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *); -void ethr_atomic_inc(ethr_atomic_t *); -void ethr_atomic_dec(ethr_atomic_t *); -ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *, ethr_sint_t); -void ethr_atomic_add(ethr_atomic_t *, ethr_sint_t); -ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *, ethr_sint_t); -ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *, ethr_sint_t); -ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *, ethr_sint_t); -ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *, ethr_sint_t, ethr_sint_t); -ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *); -ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *); -void ethr_atomic_set_relb(ethr_atomic_t *, ethr_sint_t); -void ethr_atomic_dec_relb(ethr_atomic_t *); -ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *); -ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *, ethr_sint_t, ethr_sint_t); -ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *, ethr_sint_t, ethr_sint_t); - -ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *); -void ethr_atomic32_init(ethr_atomic32_t *, ethr_sint32_t); -void ethr_atomic32_set(ethr_atomic32_t *, ethr_sint32_t); -ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *); -ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *); -ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *); -void ethr_atomic32_inc(ethr_atomic32_t *); -void ethr_atomic32_dec(ethr_atomic32_t *); -ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *, ethr_sint32_t); -void ethr_atomic32_add(ethr_atomic32_t *, ethr_sint32_t); -ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *, ethr_sint32_t); -ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *, ethr_sint32_t); -ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *, ethr_sint32_t); -ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *, - ethr_sint32_t, - ethr_sint32_t); -ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *); -ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *); -void ethr_atomic32_set_relb(ethr_atomic32_t *, ethr_sint32_t); -void ethr_atomic32_dec_relb(ethr_atomic32_t *); -ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *); -ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *, - ethr_sint32_t, - ethr_sint32_t); -ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *, - ethr_sint32_t, - ethr_sint32_t); +#if !defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +# define ETHR_NEED_DW_ATMC_PROTOTYPES__ #endif -int ethr_init_atomics(void); +#ifndef ETHR_INLINE_DW_ATMC_FUNC_NAME_ +# define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X +#endif + +#undef ETHR_DW_ATMC_FUNC__ +#define ETHR_DW_ATMC_FUNC__(X) ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_dw_atomic_ ## X) + +#if defined(ETHR_NEED_DW_ATMC_PROTOTYPES__) +int ethr_have_native_dw_atomic(void); +#endif +#if defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +static ETHR_INLINE int +ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_have_native_dw_atomic)(void) +{ +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + return ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__; +#elif defined(ETHR_DW_NATMC_BITS__) + return 1; +#else + return 0; +#endif +} +#endif + +/* -- Misc -- */ #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +/* + * Unusual values are used by read() fallbacks implemented via cmpxchg(). + * We want to use an unusual value in hope that it is more efficient + * not to match the value in memory. + * + * - Negative integer values are probably more unusual. + * - Very large absolute integer values are probably more unusual. + * - Odd pointers are probably more unusual (only char pointers can be odd). + */ +# define ETHR_UNUSUAL_SINT32_VAL__ ((ethr_sint32_t) 0x81818181) +# if ETHR_SIZEOF_PTR == 4 +# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) ETHR_UNUSUAL_SINT32_VAL__) +# elif ETHR_SIZEOF_PTR == 8 +# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) 0x8181818181818181L) +# else +# error "Word size not supported" +# endif +# if defined(ETHR_NEED_DW_NATMC_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR) +# error "No ethr_native_dw_atomic_addr() available" +# endif +# if defined(ETHR_NEED_NATMC32_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR) +# error "No ethr_native_atomic32_addr() available" +# endif +# if defined(ETHR_NEED_NATMC64_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR) +# error "No ethr_native_atomic64_addr() available" +# endif +#endif + +#if defined(__GNUC__) +# ifndef ETHR_COMPILER_BARRIER +# define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory") +# endif +#elif defined(ETHR_WIN32_THREADS) +# ifndef ETHR_COMPILER_BARRIER +# include <intrin.h> +# pragma intrinsic(_ReadWriteBarrier) +# define ETHR_COMPILER_BARRIER _ReadWriteBarrier() +# endif +#endif -#ifndef ETHR_HAVE_NATIVE_ATOMICS +void ethr_compiler_barrier_fallback(void); +#ifndef ETHR_COMPILER_BARRIER +# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback() +#endif + +int ethr_init_atomics(void); + +/* info */ +char **ethr_native_atomic32_ops(void); +char **ethr_native_atomic64_ops(void); +char **ethr_native_dw_atomic_ops(void); +char **ethr_native_su_dw_atomic_ops(void); + +#if !defined(ETHR_DW_NATMC_BITS__) && !defined(ETHR_NATMC_BITS__) && !defined(ETHR_NATMC32_BITS__) /* - * Fallbacks for atomics used in absence of a native implementation. + * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only, + * i.e. when no native atomic implementation exist and only our lock + * based atomic fallback is used, a noop is sufficient. */ +# undef ETHR_MEMORY_BARRIER +# undef ETHR_WRITE_MEMORY_BARRIER +# undef ETHR_READ_MEMORY_BARRIER +# undef ETHR_READ_DEPEND_MEMORY_BARRIER +# undef ETHR_MEMBAR +# define ETHR_MEMBAR(B) do { } while (0) +#endif + +#ifndef ETHR_MEMBAR +# error "No ETHR_MEMBAR defined" +#endif -#define ETHR_ATOMIC_ADDR_BITS 10 -#define ETHR_ATOMIC_ADDR_SHIFT 6 +#define ETHR_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore) +#define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMBAR(ETHR_StoreStore) +#define ETHR_READ_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad) +#ifdef ETHR_READ_DEPEND_MEMORY_BARRIER +# undef ETHR_ORDERED_READ_DEPEND +#else +# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER +# define ETHR_ORDERED_READ_DEPEND +#endif -typedef struct { - union { - ethr_spinlock_t lck; - char buf[ETHR_CACHE_LINE_SIZE]; - } u; -} ethr_atomic_protection_t; -extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATOMIC_ADDR_BITS]; +/* ---------- Double word size atomic implementation ---------- */ + + +#ifdef ETHR_NEED_DW_ATMC_PROTOTYPES__ +ethr_sint_t *ethr_dw_atomic_addr(ethr_dw_atomic_t *var); +int ethr_dw_atomic_cmpxchg(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ethr_dw_atomic_cmpxchg_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ethr_dw_atomic_cmpxchg_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ethr_dw_atomic_cmpxchg_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ethr_dw_atomic_cmpxchg_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ethr_dw_atomic_cmpxchg_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +void ethr_dw_atomic_set(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_set_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_set_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_set_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_set_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_set_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_read(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_read_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_read_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_read_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_read_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_read_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_init(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_init_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_init_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_init_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_init_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ethr_dw_atomic_init_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) +ethr_sint_t *ETHR_DW_ATOMIC_FUNC__(addr)(ethr_dw_atomic_t *var); +int ETHR_DW_ATOMIC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val); +void ETHR_DW_ATOMIC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +void ETHR_DW_ATOMIC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val); +#endif +#endif /* ETHR_NEED_DW_ATMC_PROTOTYPES__ */ + +#if (defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) \ + && (defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__))) + +#if !defined(ETHR_DW_NATMC_BITS__) +# error "Missing native atomic implementation" +#elif defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC) +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG +# undef ETHR_HAVE_DW_NATMC_CMPXCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG +# define ETHR_HAVE_DW_NATMC_CMPXCHG 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RB +# define ETHR_HAVE_DW_NATMC_CMPXCHG_RB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_WB +# define ETHR_HAVE_DW_NATMC_CMPXCHG_WB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_WB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_ACQB +# define ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_ACQB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RELB +# define ETHR_HAVE_DW_NATMC_CMPXCHG_RELB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RELB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB +# define ETHR_HAVE_DW_NATMC_CMPXCHG_MB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_SET +# undef ETHR_HAVE_DW_NATMC_SET +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET +# define ETHR_HAVE_DW_NATMC_SET 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET +# define ETHR_HAVE_SU_DW_NATMC_SET 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_SET_RB +# undef ETHR_HAVE_DW_NATMC_SET_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RB +# define ETHR_HAVE_DW_NATMC_SET_RB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RB +# define ETHR_HAVE_SU_DW_NATMC_SET_RB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_SET_WB +# undef ETHR_HAVE_DW_NATMC_SET_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_WB +# define ETHR_HAVE_DW_NATMC_SET_WB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_WB +# define ETHR_HAVE_SU_DW_NATMC_SET_WB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_SET_ACQB +# undef ETHR_HAVE_DW_NATMC_SET_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_ACQB +# define ETHR_HAVE_DW_NATMC_SET_ACQB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_ACQB +# define ETHR_HAVE_SU_DW_NATMC_SET_ACQB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_SET_RELB +# undef ETHR_HAVE_DW_NATMC_SET_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RELB +# define ETHR_HAVE_DW_NATMC_SET_RELB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RELB +# define ETHR_HAVE_SU_DW_NATMC_SET_RELB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_SET_MB +# undef ETHR_HAVE_DW_NATMC_SET_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_MB +# define ETHR_HAVE_DW_NATMC_SET_MB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_MB +# define ETHR_HAVE_SU_DW_NATMC_SET_MB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_READ +# undef ETHR_HAVE_DW_NATMC_READ +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ +# define ETHR_HAVE_DW_NATMC_READ 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ +# define ETHR_HAVE_SU_DW_NATMC_READ 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_READ_RB +# undef ETHR_HAVE_DW_NATMC_READ_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RB +# define ETHR_HAVE_DW_NATMC_READ_RB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RB +# define ETHR_HAVE_SU_DW_NATMC_READ_RB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_READ_WB +# undef ETHR_HAVE_DW_NATMC_READ_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_WB +# define ETHR_HAVE_DW_NATMC_READ_WB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_WB +# define ETHR_HAVE_SU_DW_NATMC_READ_WB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_READ_ACQB +# undef ETHR_HAVE_DW_NATMC_READ_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_ACQB +# define ETHR_HAVE_DW_NATMC_READ_ACQB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_ACQB +# define ETHR_HAVE_SU_DW_NATMC_READ_ACQB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_READ_RELB +# undef ETHR_HAVE_DW_NATMC_READ_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RELB +# define ETHR_HAVE_DW_NATMC_READ_RELB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RELB +# define ETHR_HAVE_SU_DW_NATMC_READ_RELB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_READ_MB +# undef ETHR_HAVE_DW_NATMC_READ_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_MB +# define ETHR_HAVE_DW_NATMC_READ_MB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_MB +# define ETHR_HAVE_SU_DW_NATMC_READ_MB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_INIT +# undef ETHR_HAVE_DW_NATMC_INIT +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT +# define ETHR_HAVE_DW_NATMC_INIT 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT +# define ETHR_HAVE_SU_DW_NATMC_INIT 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_INIT_RB +# undef ETHR_HAVE_DW_NATMC_INIT_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RB +# define ETHR_HAVE_DW_NATMC_INIT_RB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RB +# define ETHR_HAVE_SU_DW_NATMC_INIT_RB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_INIT_WB +# undef ETHR_HAVE_DW_NATMC_INIT_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_WB +# define ETHR_HAVE_DW_NATMC_INIT_WB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_WB +# define ETHR_HAVE_SU_DW_NATMC_INIT_WB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_INIT_ACQB +# undef ETHR_HAVE_DW_NATMC_INIT_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_ACQB +# define ETHR_HAVE_DW_NATMC_INIT_ACQB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_ACQB +# define ETHR_HAVE_SU_DW_NATMC_INIT_ACQB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_INIT_RELB +# undef ETHR_HAVE_DW_NATMC_INIT_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RELB +# define ETHR_HAVE_DW_NATMC_INIT_RELB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RELB +# define ETHR_HAVE_SU_DW_NATMC_INIT_RELB 1 +# endif +# undef ETHR_HAVE_SU_DW_NATMC_INIT_MB +# undef ETHR_HAVE_DW_NATMC_INIT_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_MB +# define ETHR_HAVE_DW_NATMC_INIT_MB 1 +# endif +# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_MB +# define ETHR_HAVE_SU_DW_NATMC_INIT_MB 1 +# endif +#elif ETHR_DW_NATMC_BITS__ == 64 +# undef ETHR_HAVE_DW_NATMC_CMPXCHG +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG 1 +# endif +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RB +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_WB +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RELB +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_CMPXCHG_MB +# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB +# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_SET +# undef ETHR_HAVE_SU_DW_NATMC_SET +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET +# define ETHR_HAVE_SU_DW_NATMC_SET 1 +# endif +# undef ETHR_HAVE_DW_NATMC_SET_RB +# undef ETHR_HAVE_SU_DW_NATMC_SET_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB +# define ETHR_HAVE_SU_DW_NATMC_SET_RB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_SET_WB +# undef ETHR_HAVE_SU_DW_NATMC_SET_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB +# define ETHR_HAVE_SU_DW_NATMC_SET_WB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_SET_ACQB +# undef ETHR_HAVE_SU_DW_NATMC_SET_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB +# define ETHR_HAVE_SU_DW_NATMC_SET_ACQB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_SET_RELB +# undef ETHR_HAVE_SU_DW_NATMC_SET_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB +# define ETHR_HAVE_SU_DW_NATMC_SET_RELB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_SET_MB +# undef ETHR_HAVE_SU_DW_NATMC_SET_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB +# define ETHR_HAVE_SU_DW_NATMC_SET_MB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_READ +# undef ETHR_HAVE_SU_DW_NATMC_READ +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ +# define ETHR_HAVE_SU_DW_NATMC_READ 1 +# endif +# undef ETHR_HAVE_DW_NATMC_READ_RB +# undef ETHR_HAVE_SU_DW_NATMC_READ_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB +# define ETHR_HAVE_SU_DW_NATMC_READ_RB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_READ_WB +# undef ETHR_HAVE_SU_DW_NATMC_READ_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB +# define ETHR_HAVE_SU_DW_NATMC_READ_WB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_READ_ACQB +# undef ETHR_HAVE_SU_DW_NATMC_READ_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB +# define ETHR_HAVE_SU_DW_NATMC_READ_ACQB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_READ_RELB +# undef ETHR_HAVE_SU_DW_NATMC_READ_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB +# define ETHR_HAVE_SU_DW_NATMC_READ_RELB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_READ_MB +# undef ETHR_HAVE_SU_DW_NATMC_READ_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB +# define ETHR_HAVE_SU_DW_NATMC_READ_MB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_INIT +# undef ETHR_HAVE_SU_DW_NATMC_INIT +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT +# define ETHR_HAVE_SU_DW_NATMC_INIT 1 +# endif +# undef ETHR_HAVE_DW_NATMC_INIT_RB +# undef ETHR_HAVE_SU_DW_NATMC_INIT_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB +# define ETHR_HAVE_SU_DW_NATMC_INIT_RB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_INIT_WB +# undef ETHR_HAVE_SU_DW_NATMC_INIT_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB +# define ETHR_HAVE_SU_DW_NATMC_INIT_WB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_INIT_ACQB +# undef ETHR_HAVE_SU_DW_NATMC_INIT_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB +# define ETHR_HAVE_SU_DW_NATMC_INIT_ACQB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_INIT_RELB +# undef ETHR_HAVE_SU_DW_NATMC_INIT_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB +# define ETHR_HAVE_SU_DW_NATMC_INIT_RELB 1 +# endif +# undef ETHR_HAVE_DW_NATMC_INIT_MB +# undef ETHR_HAVE_SU_DW_NATMC_INIT_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB +# define ETHR_HAVE_SU_DW_NATMC_INIT_MB 1 +# endif +#else +# error "Invalid native atomic size" +#endif + -#define ETHR_ATOMIC_PTR2LCK__(PTR) \ -(ðr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATOMIC_ADDR_SHIFT) \ - & ((1 << ETHR_ATOMIC_ADDR_BITS) - 1))].u.lck) +#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC) +#if (!defined(ETHR_HAVE_DW_NATMC_CMPXCHG) \ + && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) \ + && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) \ + && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) \ + && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) \ + && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)) +# error "No native cmpxchg() op available" +#endif + + +/* + * Read op used together with cmpxchg() fallback when no native op present. + */ +#if defined(ETHR_HAVE_DW_NATMC_READ) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + ETHR_DW_NATMC_FUNC__(read)(VAR, VAL) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(VAR) +#elif defined(ETHR_HAVE_DW_NATMC_READ_RB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + ETHR_DW_NATMC_FUNC__(read_rb)(VAR, VAL) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(VAR) +#elif defined(ETHR_HAVE_DW_NATMC_READ_WB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + ETHR_DW_NATMC_FUNC__(read_wb)(VAR, VAL) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(VAR) +#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + ETHR_DW_NATMC_FUNC__(read_acqb)(VAR, VAL) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(VAR) +#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + ETHR_DW_NATMC_FUNC__(read_relb)(VAR, VAL) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(VAR) +#elif defined(ETHR_HAVE_DW_NATMC_READ_MB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + ETHR_DW_NATMC_FUNC__(read_mb)(VAR, VAL) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ + VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(VAR) +#else +/* + * We have no native read() op; guess zero and then use the + * the atomics actual value returned from cmpxchg(). + */ +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \ +do { \ + VAL.sint[0] = (ethr_sint_t) 0; \ + VAL.sint[1] = (ethr_sint_t) 0; \ +} while (0) +#endif -#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS) \ -do { \ - ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP)); \ - ethr_spin_lock(slp__); \ - { EXPS; } \ - ethr_spin_unlock(slp__); \ +/* + * Native cmpxchg() fallback used when no native op present. + */ +#define ETHR_DW_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \ +do { \ + int res__; \ + ethr_dw_sint_t AVAL, exp_act__; \ + ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, exp_act__); \ + do { \ + AVAL.sint[0] = exp_act__.sint[0]; \ + AVAL.sint[1] = exp_act__.sint[1]; \ + { OPS; } \ + res__ = CMPXCHG(VAR, AVAL.sint, exp_act__.sint); \ + } while (__builtin_expect(res__ == 0, 0)); \ } while (0) + +#elif defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC) + +#if (!defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) \ + && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) \ + && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) \ + && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) \ + && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) \ + && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)) +# error "No native cmpxchg() op available" +#endif + + +/* + * Read op used together with cmpxchg() fallback when no native op present. + */ +#if defined(ETHR_HAVE_SU_DW_NATMC_READ) +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_SU_DW_NATMC_FUNC__(read)(VAR) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_SU_DW_NATMC_FUNC__(read_rb)(VAR) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_SU_DW_NATMC_FUNC__(read_wb)(VAR) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_SU_DW_NATMC_FUNC__(read_acqb)(VAR) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_SU_DW_NATMC_FUNC__(read_relb)(VAR) +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_SU_DW_NATMC_FUNC__(read_mb)(VAR) +#else +/* + * We have no native read() op; guess zero and then use the + * the atomics actual value returned from cmpxchg(). + */ +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ((ETHR_SU_DW_NAINT_T__) 0) +#endif + +/* + * Native cmpxchg() fallback used when no native op present. + */ +#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \ +do { \ + ETHR_SU_DW_NAINT_T__ AVAL; \ + ETHR_SU_DW_NAINT_T__ new__, act__, exp__; \ + act__ = ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR); \ + do { \ + exp__ = act__; \ + AVAL = (ETHR_SU_DW_NAINT_T__) act__; \ + { OPS; } \ + new__ = (ETHR_SU_DW_NAINT_T__) AVAL; \ + act__ = CMPXCHG(VAR, new__, exp__); \ + } while (__builtin_expect(act__ != exp__, 0)); \ +} while (0) + + +#else +# error "?!?" +#endif + + + +/* --- addr() --- */ + +static ETHR_INLINE ethr_sint_t *ETHR_DW_ATMC_FUNC__(addr)(ethr_dw_atomic_t *var) +{ +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + + return (ethr_sint_t *) ETHR_DW_NATMC_ADDR_FUNC__((&var->native)); + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { return ETHR_DW_ATOMIC_FUNC__(addr)(var); } +#endif + +} + + +/* --- cmpxchg() --- */ + + +static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint); +#else +#error "Missing implementation of ethr_dw_atomic_cmpxchg()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg)(var, val, old_val); } +#endif + + return res; +} + +static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_dw_atomic_cmpxchg_rb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(var, val, old_val); } +#endif + + return res; +} + +static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_StoreStore); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_StoreStore); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_StoreStore); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_StoreStore); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint); +#else +#error "Missing implementation of ethr_dw_atomic_cmpxchg_wb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(var, val, old_val); } +#endif + + return res; +} + +static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_dw_atomic_cmpxchg_acqb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(var, val, old_val); } +#endif + + return res; +} + +static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint); +#else +#error "Missing implementation of ethr_dw_atomic_cmpxchg_relb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(var, val, old_val); } +#endif + + return res; +} + +static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NAINT_T__ act; + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NAINT_T__ act; + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + res = (act == old_val->dw_sint); + old_val->dw_sint = act; +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_dw_atomic_cmpxchg_mb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(var, val, old_val); } +#endif + + return res; +} + + +/* --- set() --- */ + + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_SET) + ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB) + ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB) + ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB) + ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB) + ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB) + ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET) + ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RB) + ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_WB) + ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB) + ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB) + ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_MB) + ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#else +#error "Missing implementation of ethr_dw_atomic_set()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(set)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_SET_RB) + ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET) + ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB) + ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB) + ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB) + ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB) + ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RB) + ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET) + ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_SET_MB) + ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_WB) + ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB) + ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB) + ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_dw_atomic_set_rb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(set_rb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_SET_WB) + ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB) + ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_WB) + ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_MB) + ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#else +#error "Missing implementation of ethr_dw_atomic_set_wb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(set_wb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB) + ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB) + ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET) + ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB) + ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB) + ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB) + ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB) + ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RB) + ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET) + ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET_MB) + ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_WB) + ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB) + ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_dw_atomic_set_acqb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(set_acqb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB) + ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB) + ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB) + ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_MB) + ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#else +#error "Missing implementation of ethr_dw_atomic_set_relb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(set_relb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_SET_MB) + ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB) + ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET_MB) + ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB) + ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_dw_atomic_set_mb()!" +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(set_mb)(var, val); } +#endif + +} + + +/* --- read() --- */ + + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_READ) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native); +#elif defined(ETHR_HAVE_DW_NATMC_READ) + ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RB) + ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_WB) + ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB) + ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB) + ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_MB) + ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint); +#else + ethr_dw_sint_t tmp; + tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__; + tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[0] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[1] = ETHR_UNUSUAL_SINT_VAL__; + (void) ETHR_DW_ATMC_FUNC__(cmpxchg)(var, &tmp, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(read)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RB) + ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ) + ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_READ_MB) + ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_WB) + ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB) + ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB) + ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ethr_dw_sint_t tmp; + tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__; + tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[0] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[1] = ETHR_UNUSUAL_SINT_VAL__; + (void) ETHR_DW_ATMC_FUNC__(cmpxchg_rb)(var, &tmp, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(read_rb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ) + ETHR_MEMBAR(ETHR_StoreStore); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_StoreStore); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native); +#elif defined(ETHR_HAVE_DW_NATMC_READ_WB) + ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_MB) + ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint); +#else + ethr_dw_sint_t tmp; + tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__; + tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[0] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[1] = ETHR_UNUSUAL_SINT_VAL__; + (void) ETHR_DW_ATMC_FUNC__(cmpxchg_wb)(var, &tmp, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(read_wb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB) + ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RB) + ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ) + ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ_MB) + ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_WB) + ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB) + ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#else + ethr_dw_sint_t tmp; + tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__; + tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[0] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[1] = ETHR_UNUSUAL_SINT_VAL__; + (void) ETHR_DW_ATMC_FUNC__(cmpxchg_acqb)(var, &tmp, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(read_acqb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB) + ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_MB) + ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint); +#else + ethr_dw_sint_t tmp; + tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__; + tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[0] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[1] = ETHR_UNUSUAL_SINT_VAL__; + (void) ETHR_DW_ATMC_FUNC__(cmpxchg_relb)(var, &tmp, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(read_relb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_READ_MB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB) + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ_MB) + ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB) + ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_DW_NATMC_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#else + ethr_dw_sint_t tmp; + tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__; + tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[0] = ETHR_UNUSUAL_SINT_VAL__; + val->sint[1] = ETHR_UNUSUAL_SINT_VAL__; + (void) ETHR_DW_ATMC_FUNC__(cmpxchg_mb)(var, &tmp, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(read_mb)(var, val); } +#endif + +} + + +/* --- init() --- */ + + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_INIT) + ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB) + ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB) + ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB) + ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB) + ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB) + ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT) + ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB) + ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB) + ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB) + ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB) + ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB) + ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint); +#else + ETHR_DW_ATMC_FUNC__(set)(var, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(init)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB) + ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT) + ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB) + ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB) + ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB) + ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB) + ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB) + ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT) + ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB) + ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB) + ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB) + ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB) + ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_DW_ATMC_FUNC__(set_rb)(var, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(init_rb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB) + ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB) + ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB) + ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB) + ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint); +#else + ETHR_DW_ATMC_FUNC__(set_wb)(var, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(init_wb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB) + ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB) + ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT) + ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB) + ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB) + ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB) + ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB) + ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB) + ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT) + ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB) + ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB) + ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB) + ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_DW_ATMC_FUNC__(set_acqb)(var, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(init_acqb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB) + ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB) + ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB) + ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB) + ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint); +#else + ETHR_DW_ATMC_FUNC__(set_relb)(var, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(init_relb)(var, val); } +#endif + +} + +static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) { +#endif + +#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB) + ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB) + ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB) + ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB) + ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_DW_NATMC_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_DW_ATMC_FUNC__(set_mb)(var, val); +#endif + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + } else { ETHR_DW_ATOMIC_FUNC__(init_mb)(var, val); } +#endif + +} + +#endif /* ETHR_DW_ATMC_INLINE__ */ + + +/* ---------- Word size atomic implementation ---------- */ + + +#ifdef ETHR_NEED_ATMC_PROTOTYPES__ +ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val); +ethr_sint_t ethr_atomic_cmpxchg_rb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val); +ethr_sint_t ethr_atomic_cmpxchg_wb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val); +ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val); +ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val); +ethr_sint_t ethr_atomic_cmpxchg_mb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val); +ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_xchg_rb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_xchg_wb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_xchg_acqb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_xchg_relb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_xchg_mb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_set_rb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_set_wb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_set_acqb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_set_mb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_init_rb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_init_wb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_init_acqb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_init_relb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_init_mb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_add_read_rb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_add_read_wb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_add_read_acqb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_add_read_relb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_add_read_mb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_read_rb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_read_wb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_read_relb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_read_mb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_inc_read_rb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_inc_read_wb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_inc_read_relb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_inc_read_mb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_dec_read_rb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_dec_read_wb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_dec_read_acqb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_dec_read_mb(ethr_atomic_t *var); +void ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_add_rb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_add_wb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_add_acqb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_add_relb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_add_mb(ethr_atomic_t *var, ethr_sint_t val); +void ethr_atomic_inc(ethr_atomic_t *var); +void ethr_atomic_inc_rb(ethr_atomic_t *var); +void ethr_atomic_inc_wb(ethr_atomic_t *var); +void ethr_atomic_inc_acqb(ethr_atomic_t *var); +void ethr_atomic_inc_relb(ethr_atomic_t *var); +void ethr_atomic_inc_mb(ethr_atomic_t *var); +void ethr_atomic_dec(ethr_atomic_t *var); +void ethr_atomic_dec_rb(ethr_atomic_t *var); +void ethr_atomic_dec_wb(ethr_atomic_t *var); +void ethr_atomic_dec_acqb(ethr_atomic_t *var); +void ethr_atomic_dec_relb(ethr_atomic_t *var); +void ethr_atomic_dec_mb(ethr_atomic_t *var); +ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_band_rb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_band_wb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_band_acqb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_band_relb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_band_mb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_bor_rb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_bor_wb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_bor_acqb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_bor_relb(ethr_atomic_t *var, ethr_sint_t val); +ethr_sint_t ethr_atomic_read_bor_mb(ethr_atomic_t *var, ethr_sint_t val); +#endif /* ETHR_NEED_ATMC_PROTOTYPES__ */ + +#if (defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \ + && (defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__))) + +#if !defined(ETHR_NATMC_BITS__) +# error "Missing native atomic implementation" +#elif ETHR_NATMC_BITS__ == 64 +# undef ETHR_HAVE_NATMC_CMPXCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG +# define ETHR_HAVE_NATMC_CMPXCHG 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB +# define ETHR_HAVE_NATMC_CMPXCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB +# define ETHR_HAVE_NATMC_CMPXCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB +# define ETHR_HAVE_NATMC_CMPXCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB +# define ETHR_HAVE_NATMC_CMPXCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB +# define ETHR_HAVE_NATMC_CMPXCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG +# define ETHR_HAVE_NATMC_XCHG 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB +# define ETHR_HAVE_NATMC_XCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB +# define ETHR_HAVE_NATMC_XCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB +# define ETHR_HAVE_NATMC_XCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB +# define ETHR_HAVE_NATMC_XCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB +# define ETHR_HAVE_NATMC_XCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC_SET +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET +# define ETHR_HAVE_NATMC_SET 1 +# endif +# undef ETHR_HAVE_NATMC_SET_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB +# define ETHR_HAVE_NATMC_SET_RB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB +# define ETHR_HAVE_NATMC_SET_WB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB +# define ETHR_HAVE_NATMC_SET_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB +# define ETHR_HAVE_NATMC_SET_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB +# define ETHR_HAVE_NATMC_SET_MB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT +# define ETHR_HAVE_NATMC_INIT 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB +# define ETHR_HAVE_NATMC_INIT_RB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB +# define ETHR_HAVE_NATMC_INIT_WB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB +# define ETHR_HAVE_NATMC_INIT_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB +# define ETHR_HAVE_NATMC_INIT_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB +# define ETHR_HAVE_NATMC_INIT_MB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN +# define ETHR_HAVE_NATMC_ADD_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB +# define ETHR_HAVE_NATMC_ADD_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB +# define ETHR_HAVE_NATMC_ADD_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB +# define ETHR_HAVE_NATMC_ADD_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB +# define ETHR_HAVE_NATMC_ADD_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB +# define ETHR_HAVE_NATMC_ADD_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC_READ +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ +# define ETHR_HAVE_NATMC_READ 1 +# endif +# undef ETHR_HAVE_NATMC_READ_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB +# define ETHR_HAVE_NATMC_READ_RB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB +# define ETHR_HAVE_NATMC_READ_WB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB +# define ETHR_HAVE_NATMC_READ_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB +# define ETHR_HAVE_NATMC_READ_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB +# define ETHR_HAVE_NATMC_READ_MB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN +# define ETHR_HAVE_NATMC_INC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB +# define ETHR_HAVE_NATMC_INC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB +# define ETHR_HAVE_NATMC_INC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB +# define ETHR_HAVE_NATMC_INC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB +# define ETHR_HAVE_NATMC_INC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB +# define ETHR_HAVE_NATMC_INC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN +# define ETHR_HAVE_NATMC_DEC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB +# define ETHR_HAVE_NATMC_DEC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB +# define ETHR_HAVE_NATMC_DEC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB +# define ETHR_HAVE_NATMC_DEC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB +# define ETHR_HAVE_NATMC_DEC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB +# define ETHR_HAVE_NATMC_DEC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD +# define ETHR_HAVE_NATMC_ADD 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB +# define ETHR_HAVE_NATMC_ADD_RB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB +# define ETHR_HAVE_NATMC_ADD_WB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB +# define ETHR_HAVE_NATMC_ADD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB +# define ETHR_HAVE_NATMC_ADD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB +# define ETHR_HAVE_NATMC_ADD_MB 1 +# endif +# undef ETHR_HAVE_NATMC_INC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC +# define ETHR_HAVE_NATMC_INC 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB +# define ETHR_HAVE_NATMC_INC_RB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB +# define ETHR_HAVE_NATMC_INC_WB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB +# define ETHR_HAVE_NATMC_INC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB +# define ETHR_HAVE_NATMC_INC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB +# define ETHR_HAVE_NATMC_INC_MB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC +# define ETHR_HAVE_NATMC_DEC 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB +# define ETHR_HAVE_NATMC_DEC_RB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB +# define ETHR_HAVE_NATMC_DEC_WB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB +# define ETHR_HAVE_NATMC_DEC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB +# define ETHR_HAVE_NATMC_DEC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB +# define ETHR_HAVE_NATMC_DEC_MB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD +# define ETHR_HAVE_NATMC_AND_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB +# define ETHR_HAVE_NATMC_AND_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB +# define ETHR_HAVE_NATMC_AND_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB +# define ETHR_HAVE_NATMC_AND_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB +# define ETHR_HAVE_NATMC_AND_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB +# define ETHR_HAVE_NATMC_AND_RETOLD_MB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD +# define ETHR_HAVE_NATMC_OR_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB +# define ETHR_HAVE_NATMC_OR_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB +# define ETHR_HAVE_NATMC_OR_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB +# define ETHR_HAVE_NATMC_OR_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB +# define ETHR_HAVE_NATMC_OR_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB +# define ETHR_HAVE_NATMC_OR_RETOLD_MB 1 +# endif +#elif ETHR_NATMC_BITS__ == 32 +# undef ETHR_HAVE_NATMC_CMPXCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG +# define ETHR_HAVE_NATMC_CMPXCHG 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB +# define ETHR_HAVE_NATMC_CMPXCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB +# define ETHR_HAVE_NATMC_CMPXCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB +# define ETHR_HAVE_NATMC_CMPXCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB +# define ETHR_HAVE_NATMC_CMPXCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_CMPXCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB +# define ETHR_HAVE_NATMC_CMPXCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG +# define ETHR_HAVE_NATMC_XCHG 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB +# define ETHR_HAVE_NATMC_XCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB +# define ETHR_HAVE_NATMC_XCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB +# define ETHR_HAVE_NATMC_XCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB +# define ETHR_HAVE_NATMC_XCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_XCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB +# define ETHR_HAVE_NATMC_XCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC_SET +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET +# define ETHR_HAVE_NATMC_SET 1 +# endif +# undef ETHR_HAVE_NATMC_SET_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB +# define ETHR_HAVE_NATMC_SET_RB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB +# define ETHR_HAVE_NATMC_SET_WB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB +# define ETHR_HAVE_NATMC_SET_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB +# define ETHR_HAVE_NATMC_SET_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_SET_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB +# define ETHR_HAVE_NATMC_SET_MB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT +# define ETHR_HAVE_NATMC_INIT 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB +# define ETHR_HAVE_NATMC_INIT_RB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB +# define ETHR_HAVE_NATMC_INIT_WB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB +# define ETHR_HAVE_NATMC_INIT_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB +# define ETHR_HAVE_NATMC_INIT_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_INIT_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB +# define ETHR_HAVE_NATMC_INIT_MB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN +# define ETHR_HAVE_NATMC_ADD_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB +# define ETHR_HAVE_NATMC_ADD_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB +# define ETHR_HAVE_NATMC_ADD_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB +# define ETHR_HAVE_NATMC_ADD_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB +# define ETHR_HAVE_NATMC_ADD_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB +# define ETHR_HAVE_NATMC_ADD_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC_READ +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ +# define ETHR_HAVE_NATMC_READ 1 +# endif +# undef ETHR_HAVE_NATMC_READ_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB +# define ETHR_HAVE_NATMC_READ_RB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB +# define ETHR_HAVE_NATMC_READ_WB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB +# define ETHR_HAVE_NATMC_READ_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB +# define ETHR_HAVE_NATMC_READ_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_READ_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB +# define ETHR_HAVE_NATMC_READ_MB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN +# define ETHR_HAVE_NATMC_INC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB +# define ETHR_HAVE_NATMC_INC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB +# define ETHR_HAVE_NATMC_INC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB +# define ETHR_HAVE_NATMC_INC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB +# define ETHR_HAVE_NATMC_INC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB +# define ETHR_HAVE_NATMC_INC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN +# define ETHR_HAVE_NATMC_DEC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB +# define ETHR_HAVE_NATMC_DEC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB +# define ETHR_HAVE_NATMC_DEC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB +# define ETHR_HAVE_NATMC_DEC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB +# define ETHR_HAVE_NATMC_DEC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB +# define ETHR_HAVE_NATMC_DEC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD +# define ETHR_HAVE_NATMC_ADD 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB +# define ETHR_HAVE_NATMC_ADD_RB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB +# define ETHR_HAVE_NATMC_ADD_WB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB +# define ETHR_HAVE_NATMC_ADD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB +# define ETHR_HAVE_NATMC_ADD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_ADD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB +# define ETHR_HAVE_NATMC_ADD_MB 1 +# endif +# undef ETHR_HAVE_NATMC_INC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC +# define ETHR_HAVE_NATMC_INC 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB +# define ETHR_HAVE_NATMC_INC_RB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB +# define ETHR_HAVE_NATMC_INC_WB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB +# define ETHR_HAVE_NATMC_INC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB +# define ETHR_HAVE_NATMC_INC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_INC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB +# define ETHR_HAVE_NATMC_INC_MB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC +# define ETHR_HAVE_NATMC_DEC 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB +# define ETHR_HAVE_NATMC_DEC_RB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB +# define ETHR_HAVE_NATMC_DEC_WB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB +# define ETHR_HAVE_NATMC_DEC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB +# define ETHR_HAVE_NATMC_DEC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_DEC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB +# define ETHR_HAVE_NATMC_DEC_MB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD +# define ETHR_HAVE_NATMC_AND_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB +# define ETHR_HAVE_NATMC_AND_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB +# define ETHR_HAVE_NATMC_AND_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB +# define ETHR_HAVE_NATMC_AND_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB +# define ETHR_HAVE_NATMC_AND_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_AND_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB +# define ETHR_HAVE_NATMC_AND_RETOLD_MB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD +# define ETHR_HAVE_NATMC_OR_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB +# define ETHR_HAVE_NATMC_OR_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB +# define ETHR_HAVE_NATMC_OR_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB +# define ETHR_HAVE_NATMC_OR_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB +# define ETHR_HAVE_NATMC_OR_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC_OR_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB +# define ETHR_HAVE_NATMC_OR_RETOLD_MB 1 +# endif +#else +# error "Invalid native atomic size" #endif +#if (!defined(ETHR_HAVE_NATMC_CMPXCHG) \ + && !defined(ETHR_HAVE_NATMC_CMPXCHG_RB) \ + && !defined(ETHR_HAVE_NATMC_CMPXCHG_WB) \ + && !defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) \ + && !defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) \ + && !defined(ETHR_HAVE_NATMC_CMPXCHG_MB)) +# error "No native cmpxchg() op available" +#endif + + /* - * --- Pointer size atomics --------------------------------------------------- + * Read op used together with cmpxchg() fallback when no native op present. */ +#if defined(ETHR_HAVE_NATMC_READ) +#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC_FUNC__(read)(VAR) +#elif defined(ETHR_HAVE_NATMC_READ_RB) +#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC_FUNC__(read_rb)(VAR) +#elif defined(ETHR_HAVE_NATMC_READ_WB) +#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC_FUNC__(read_wb)(VAR) +#elif defined(ETHR_HAVE_NATMC_READ_ACQB) +#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC_FUNC__(read_acqb)(VAR) +#elif defined(ETHR_HAVE_NATMC_READ_RELB) +#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC_FUNC__(read_relb)(VAR) +#elif defined(ETHR_HAVE_NATMC_READ_MB) +#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC_FUNC__(read_mb)(VAR) +#else +/* + * We have no native read() op; guess zero and then use the + * the atomics actual value returned from cmpxchg(). + */ +#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \ + ((ETHR_NAINT_T__) 0) +#endif + +/* + * Native cmpxchg() fallback used when no native op present. + */ +#define ETHR_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \ +do { \ + ethr_sint_t AVAL; \ + ETHR_NAINT_T__ new__, act__, exp__; \ + act__ = ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR); \ + do { \ + exp__ = act__; \ + AVAL = (ethr_sint_t) act__; \ + { OPS; } \ + new__ = (ETHR_NAINT_T__) AVAL; \ + act__ = CMPXCHG(VAR, new__, exp__); \ + } while (__builtin_expect(act__ != exp__, 0)); \ +} while (0) + + + +/* --- addr() --- */ -static ETHR_INLINE ethr_sint_t * -ETHR_INLINE_FUNC_NAME_(ethr_atomic_addr)(ethr_atomic_t *var) +static ETHR_INLINE ethr_sint_t *ETHR_ATMC_FUNC__(addr)(ethr_atomic_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS return (ethr_sint_t *) ETHR_NATMC_ADDR_FUNC__(var); + +} + + +/* --- cmpxchg() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_CMPXCHG) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); #else - return (ethr_sint_t *) var; +#error "Missing implementation of ethr_atomic_cmpxchg()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic_init)(ethr_atomic_t *var, ethr_sint_t i) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_rb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) i); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i); +#error "Missing implementation of ethr_atomic_cmpxchg_rb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic_set)(ethr_atomic_t *var, ethr_sint_t i) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_wb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) i); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i); +#error "Missing implementation of ethr_atomic_cmpxchg_wb()!" #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_read)(ethr_atomic_t *var) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_acqb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else +#error "Missing implementation of ethr_atomic_cmpxchg_acqb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_relb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#if defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#else +#error "Missing implementation of ethr_atomic_cmpxchg_relb()!" +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_mb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic_cmpxchg_mb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic_add)(ethr_atomic_t *var, ethr_sint_t incr) + +/* --- xchg() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) incr); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_XCHG) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += incr); +#error "Missing implementation of ethr_atomic_xchg()!" #endif -} - -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_add_read)(ethr_atomic_t *var, ethr_sint_t i) + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_rb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) i); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_XCHG_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_XCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_XCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); #else +#error "Missing implementation of ethr_atomic_xchg_rb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_wb)(ethr_atomic_t *var, ethr_sint_t val) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += i; res = *var); +#if defined(ETHR_HAVE_NATMC_XCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); +#else +#error "Missing implementation of ethr_atomic_xchg_wb()!" +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_acqb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_XCHG_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_XCHG) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_XCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_XCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic_xchg_acqb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc)(ethr_atomic_t *var) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_relb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC_FUNC__(inc)(var); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_XCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#error "Missing implementation of ethr_atomic_xchg_relb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec)(ethr_atomic_t *var) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_mb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC_FUNC__(dec)(var); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_XCHG_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_XCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_XCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_XCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic_xchg_mb()!" +#endif + return res; +} + + +/* --- set() --- */ + + +static ETHR_INLINE void ETHR_ATMC_FUNC__(set)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_SET) + ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_RB) + ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_WB) + ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_ACQB) + ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_RELB) + ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_MB) + ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val); +#else + (void) ETHR_ATMC_FUNC__(xchg)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(set_rb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_SET_RB) + ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET) + ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_SET_MB) + ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_WB) + ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_SET_ACQB) + ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_SET_RELB) + ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + (void) ETHR_ATMC_FUNC__(xchg_rb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(set_wb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_SET_WB) + ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_MB) + ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val); +#else + (void) ETHR_ATMC_FUNC__(xchg_wb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(set_acqb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_SET_ACQB) + ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_RB) + ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_SET) + ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_SET_MB) + ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_WB) + ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_SET_RELB) + ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(xchg_acqb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(set_relb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_SET_RELB) + ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_MB) + ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val); +#else + (void) ETHR_ATMC_FUNC__(xchg_relb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(set_mb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_SET_MB) + ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_RELB) + ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(xchg_mb)(var, val); +#endif +} + + +/* --- init() --- */ + + +static ETHR_INLINE void ETHR_ATMC_FUNC__(init)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_INIT) + ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_RB) + ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_WB) + ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_ACQB) + ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_RELB) + ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_MB) + ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val); +#else + ETHR_ATMC_FUNC__(set)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(init_rb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_INIT_RB) + ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT) + ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INIT_MB) + ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_WB) + ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INIT_ACQB) + ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INIT_RELB) + ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATMC_FUNC__(set_rb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(init_wb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_INIT_WB) + ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_MB) + ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val); +#else + ETHR_ATMC_FUNC__(set_wb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(init_acqb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_INIT_ACQB) + ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_RB) + ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INIT) + ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INIT_MB) + ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_WB) + ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INIT_RELB) + ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATMC_FUNC__(set_acqb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(init_relb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_INIT_RELB) + ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_MB) + ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val); +#else + ETHR_ATMC_FUNC__(set_relb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(init_mb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_INIT_MB) + ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_RELB) + ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATMC_FUNC__(set_mb)(var, val); +#endif +} + + +/* --- add_read() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_ADD_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#else +#error "Missing implementation of ethr_atomic_add_read()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_rb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_ADD_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_atomic_add_read_rb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_wb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_ADD_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); +#else +#error "Missing implementation of ethr_atomic_add_read_wb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_acqb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic_add_read_acqb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_relb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); +#error "Missing implementation of ethr_atomic_add_read_relb()!" #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read)(ethr_atomic_t *var) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_mb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_ADD_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else +#error "Missing implementation of ethr_atomic_add_read_mb()!" +#endif + return res; +} + + +/* --- read() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read)(ethr_atomic_t *var) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#if defined(ETHR_HAVE_NATMC_READ) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var); +#elif defined(ETHR_HAVE_NATMC_READ_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var); +#else + res = ETHR_ATMC_FUNC__(cmpxchg)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__); +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_rb)(ethr_atomic_t *var) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_READ_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC_READ) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_READ_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_READ_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_READ_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + res = ETHR_ATMC_FUNC__(cmpxchg_rb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__); #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read)(ethr_atomic_t *var) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_wb)(ethr_atomic_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_READ_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var); +#elif defined(ETHR_HAVE_NATMC_READ) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var); +#elif defined(ETHR_HAVE_NATMC_READ_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var); #else + res = ETHR_ATMC_FUNC__(cmpxchg_wb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__); +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_acqb)(ethr_atomic_t *var) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#if defined(ETHR_HAVE_NATMC_READ_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC_READ) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC_READ_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC_READ_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#else + res = ETHR_ATMC_FUNC__(cmpxchg_acqb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__); +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_relb)(ethr_atomic_t *var) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_READ_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var); +#elif defined(ETHR_HAVE_NATMC_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var); +#elif defined(ETHR_HAVE_NATMC_READ_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var); +#else + res = ETHR_ATMC_FUNC__(cmpxchg_relb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__); #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_band)(ethr_atomic_t *var, - ethr_sint_t mask) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_mb)(ethr_atomic_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, - (ETHR_NAINT_T__) mask); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_READ_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); #else + res = ETHR_ATMC_FUNC__(cmpxchg_mb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__); +#endif + return res; +} + + +/* --- inc_read() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read)(ethr_atomic_t *var) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= mask); +#if defined(ETHR_HAVE_NATMC_INC_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var); +#else + res = ETHR_ATMC_FUNC__(add_read)(var, (ethr_sint_t) 1); +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_rb)(ethr_atomic_t *var) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_INC_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + res = ETHR_ATMC_FUNC__(add_read_rb)(var, (ethr_sint_t) 1); #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_bor)(ethr_atomic_t *var, - ethr_sint_t mask) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_wb)(ethr_atomic_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, - (ETHR_NAINT_T__) mask); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_INC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var); #else + res = ETHR_ATMC_FUNC__(add_read_wb)(var, (ethr_sint_t) 1); +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_acqb)(ethr_atomic_t *var) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + res = ETHR_ATMC_FUNC__(add_read_acqb)(var, (ethr_sint_t) 1); +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_relb)(ethr_atomic_t *var) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= mask); +#if defined(ETHR_HAVE_NATMC_INC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var); +#else + res = ETHR_ATMC_FUNC__(add_read_relb)(var, (ethr_sint_t) 1); +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_mb)(ethr_atomic_t *var) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_INC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + res = ETHR_ATMC_FUNC__(add_read_mb)(var, (ethr_sint_t) 1); #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_xchg)(ethr_atomic_t *var, ethr_sint_t new) + +/* --- dec_read() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read)(ethr_atomic_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, - (ETHR_NAINT_T__) new); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_DEC_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var); #else + res = ETHR_ATMC_FUNC__(add_read)(var, (ethr_sint_t) -1); +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_rb)(ethr_atomic_t *var) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = new); +#if defined(ETHR_HAVE_NATMC_DEC_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + res = ETHR_ATMC_FUNC__(add_read_rb)(var, (ethr_sint_t) -1); +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_wb)(ethr_atomic_t *var) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_DEC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var); +#else + res = ETHR_ATMC_FUNC__(add_read_wb)(var, (ethr_sint_t) -1); #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(ethr_atomic_t *var, - ethr_sint_t new, - ethr_sint_t exp) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_acqb)(ethr_atomic_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, - (ETHR_NAINT_T__) new, - (ETHR_NAINT_T__) exp); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else + res = ETHR_ATMC_FUNC__(add_read_acqb)(var, (ethr_sint_t) -1); +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_relb)(ethr_atomic_t *var) +{ ethr_sint_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, - { - res = *var; - if (__builtin_expect(res == exp, 1)) - *var = new; - }); +#if defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var); +#else + res = ETHR_ATMC_FUNC__(add_read_relb)(var, (ethr_sint_t) -1); +#endif return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_mb)(ethr_atomic_t *var) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_DEC_RETURN_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + res = ETHR_ATMC_FUNC__(add_read_mb)(var, (ethr_sint_t) -1); #endif + return res; } -/* - * Important memory barrier requirements. - * - * The following atomic operations *must* supply a memory barrier of - * at least the type specified by its suffix: - * _acqb = acquire barrier - * _relb = release barrier - */ -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_acqb)(ethr_atomic_t *var) +/* --- add() --- */ + + +static ETHR_INLINE void ETHR_ATMC_FUNC__(add)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var); +#if defined(ETHR_HAVE_NATMC_ADD) + ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RB) + ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_WB) + ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_ACQB) + ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RELB) + ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_MB) + ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic_read)(var); + (void) ETHR_ATMC_FUNC__(add_read)(var, val); #endif } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read_acqb)(ethr_atomic_t *var) +static ETHR_INLINE void ETHR_ATMC_FUNC__(add_rb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var); +#if defined(ETHR_HAVE_NATMC_ADD_RB) + ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD) + ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_ADD_MB) + ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_WB) + ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_ADD_ACQB) + ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_ADD_RELB) + ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read)(var); + (void) ETHR_ATMC_FUNC__(add_read_rb)(var, val); #endif } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic_set_relb)(ethr_atomic_t *var, - ethr_sint_t val) +static ETHR_INLINE void ETHR_ATMC_FUNC__(add_wb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val); +#if defined(ETHR_HAVE_NATMC_ADD_WB) + ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_MB) + ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val); #else - ETHR_INLINE_FUNC_NAME_(ethr_atomic_set)(var, val); + (void) ETHR_ATMC_FUNC__(add_read_wb)(var, val); #endif } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_relb)(ethr_atomic_t *var) +static ETHR_INLINE void ETHR_ATMC_FUNC__(add_acqb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS +#if defined(ETHR_HAVE_NATMC_ADD_ACQB) + ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RB) + ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD) + ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_MB) + ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_WB) + ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RELB) + ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(add_read_acqb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(add_relb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_ADD_RELB) + ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_MB) + ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val); +#else + (void) ETHR_ATMC_FUNC__(add_read_relb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(add_mb)(ethr_atomic_t *var, ethr_sint_t val) +{ +#if defined(ETHR_HAVE_NATMC_ADD_MB) + ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_RELB) + ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_ADD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_ADD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(add_read_mb)(var, val); +#endif +} + + +/* --- inc() --- */ + + +static ETHR_INLINE void ETHR_ATMC_FUNC__(inc)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_INC) + ETHR_NATMC_FUNC__(inc)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RB) + ETHR_NATMC_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_WB) + ETHR_NATMC_FUNC__(inc_wb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_ACQB) + ETHR_NATMC_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RELB) + ETHR_NATMC_FUNC__(inc_relb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_MB) + ETHR_NATMC_FUNC__(inc_mb)(var); +#else + (void) ETHR_ATMC_FUNC__(inc_read)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_rb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_INC_RB) + ETHR_NATMC_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC) + ETHR_NATMC_FUNC__(inc)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INC_MB) + ETHR_NATMC_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_WB) + ETHR_NATMC_FUNC__(inc_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INC_ACQB) + ETHR_NATMC_FUNC__(inc_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_INC_RELB) + ETHR_NATMC_FUNC__(inc_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + (void) ETHR_ATMC_FUNC__(inc_read_rb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_wb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_INC_WB) + ETHR_NATMC_FUNC__(inc_wb)(var); +#elif defined(ETHR_HAVE_NATMC_INC) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(inc)(var); +#elif defined(ETHR_HAVE_NATMC_INC_MB) + ETHR_NATMC_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(inc_relb)(var); +#else + (void) ETHR_ATMC_FUNC__(inc_read_wb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_acqb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_INC_ACQB) + ETHR_NATMC_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RB) + ETHR_NATMC_FUNC__(inc_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC) + ETHR_NATMC_FUNC__(inc)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_MB) + ETHR_NATMC_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_WB) + ETHR_NATMC_FUNC__(inc_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RELB) + ETHR_NATMC_FUNC__(inc_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(inc_read_acqb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_relb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_INC_RELB) + ETHR_NATMC_FUNC__(inc_relb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc_wb)(var); +#elif defined(ETHR_HAVE_NATMC_INC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc)(var); +#elif defined(ETHR_HAVE_NATMC_INC_MB) + ETHR_NATMC_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc_acqb)(var); +#else + (void) ETHR_ATMC_FUNC__(inc_read_relb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_mb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_INC_MB) + ETHR_NATMC_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_RELB) + ETHR_NATMC_FUNC__(inc_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_INC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_INC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(inc)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(inc_read_mb)(var); +#endif +} + + +/* --- dec() --- */ + + +static ETHR_INLINE void ETHR_ATMC_FUNC__(dec)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_DEC) + ETHR_NATMC_FUNC__(dec)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RB) + ETHR_NATMC_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_WB) + ETHR_NATMC_FUNC__(dec_wb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_ACQB) + ETHR_NATMC_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RELB) + ETHR_NATMC_FUNC__(dec_relb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_MB) + ETHR_NATMC_FUNC__(dec_mb)(var); +#else + (void) ETHR_ATMC_FUNC__(dec_read)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_rb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_DEC_RB) + ETHR_NATMC_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC) + ETHR_NATMC_FUNC__(dec)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_DEC_MB) + ETHR_NATMC_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_WB) + ETHR_NATMC_FUNC__(dec_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_DEC_ACQB) + ETHR_NATMC_FUNC__(dec_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_DEC_RELB) + ETHR_NATMC_FUNC__(dec_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + (void) ETHR_ATMC_FUNC__(dec_read_rb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_wb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_DEC_WB) + ETHR_NATMC_FUNC__(dec_wb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(dec)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_MB) + ETHR_NATMC_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_FUNC__(dec_relb)(var); +#else + (void) ETHR_ATMC_FUNC__(dec_read_wb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_acqb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_DEC_ACQB) + ETHR_NATMC_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RB) + ETHR_NATMC_FUNC__(dec_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC) + ETHR_NATMC_FUNC__(dec)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_MB) + ETHR_NATMC_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_WB) + ETHR_NATMC_FUNC__(dec_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RELB) + ETHR_NATMC_FUNC__(dec_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(dec_read_acqb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_relb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_DEC_RELB) + ETHR_NATMC_FUNC__(dec_relb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec_wb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_MB) + ETHR_NATMC_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec_acqb)(var); +#else + (void) ETHR_ATMC_FUNC__(dec_read_relb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_mb)(ethr_atomic_t *var) +{ +#if defined(ETHR_HAVE_NATMC_DEC_MB) + ETHR_NATMC_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_RELB) ETHR_NATMC_FUNC__(dec_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC_DEC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_DEC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_FUNC__(dec)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC_FUNC__(dec_read_mb)(var); +#endif +} + + +/* --- read_band() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_AND_RETOLD) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#else +#error "Missing implementation of ethr_atomic_read_band()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_rb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_AND_RETOLD_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_atomic_read_band_rb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_wb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_AND_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); #else - ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec)(var); +#error "Missing implementation of ethr_atomic_read_band_wb()!" #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read_relb)(ethr_atomic_t *var) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_acqb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read)(var); +#error "Missing implementation of ethr_atomic_read_band_acqb()!" #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg_acqb)(ethr_atomic_t *var, - ethr_sint_t new, - ethr_sint_t exp) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_relb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, - (ETHR_NAINT_T__) new, - (ETHR_NAINT_T__) exp); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(var, new, exp); +#error "Missing implementation of ethr_atomic_read_band_relb()!" #endif + return res; } -static ETHR_INLINE ethr_sint_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg_relb)(ethr_atomic_t *var, - ethr_sint_t new, - ethr_sint_t exp) +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_mb)(ethr_atomic_t *var, ethr_sint_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, - (ETHR_NAINT_T__) new, - (ETHR_NAINT_T__) exp); + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_AND_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_AND_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(var, new, exp); +#error "Missing implementation of ethr_atomic_read_band_mb()!" #endif + return res; } + +/* --- read_bor() --- */ + + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_OR_RETOLD) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#else +#error "Missing implementation of ethr_atomic_read_bor()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_rb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_OR_RETOLD_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_atomic_read_bor_rb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_wb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_OR_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); +#else +#error "Missing implementation of ethr_atomic_read_bor_wb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_acqb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic_read_bor_acqb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_relb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#else +#error "Missing implementation of ethr_atomic_read_bor_relb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_mb)(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; +#if defined(ETHR_HAVE_NATMC_OR_RETOLD_MB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB) + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_OR_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic_read_bor_mb()!" +#endif + return res; +} + +#endif /* ETHR_ATMC_INLINE__ */ + + +/* ---------- 32-bit atomic implementation ---------- */ + + +#ifdef ETHR_NEED_ATMC32_PROTOTYPES__ +ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val); +ethr_sint32_t ethr_atomic32_cmpxchg_rb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val); +ethr_sint32_t ethr_atomic32_cmpxchg_wb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val); +ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val); +ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val); +ethr_sint32_t ethr_atomic32_cmpxchg_mb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val); +ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_xchg_rb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_xchg_wb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_xchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_xchg_relb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_xchg_mb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_set_rb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_set_wb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_set_acqb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_set_mb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_init_rb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_init_wb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_init_acqb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_init_relb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_init_mb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_add_read_rb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_add_read_wb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_add_read_acqb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_add_read_relb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_add_read_mb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_read_rb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_read_wb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_read_relb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_read_mb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_inc_read_rb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_inc_read_wb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_inc_read_relb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_inc_read_mb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_dec_read_rb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_dec_read_wb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_dec_read_acqb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_dec_read_mb(ethr_atomic32_t *var); +void ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_add_rb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_add_wb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_add_acqb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_add_relb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_add_mb(ethr_atomic32_t *var, ethr_sint32_t val); +void ethr_atomic32_inc(ethr_atomic32_t *var); +void ethr_atomic32_inc_rb(ethr_atomic32_t *var); +void ethr_atomic32_inc_wb(ethr_atomic32_t *var); +void ethr_atomic32_inc_acqb(ethr_atomic32_t *var); +void ethr_atomic32_inc_relb(ethr_atomic32_t *var); +void ethr_atomic32_inc_mb(ethr_atomic32_t *var); +void ethr_atomic32_dec(ethr_atomic32_t *var); +void ethr_atomic32_dec_rb(ethr_atomic32_t *var); +void ethr_atomic32_dec_wb(ethr_atomic32_t *var); +void ethr_atomic32_dec_acqb(ethr_atomic32_t *var); +void ethr_atomic32_dec_relb(ethr_atomic32_t *var); +void ethr_atomic32_dec_mb(ethr_atomic32_t *var); +ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_band_rb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_band_wb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_band_acqb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_band_relb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_band_mb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_bor_rb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_bor_wb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_bor_acqb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_bor_relb(ethr_atomic32_t *var, ethr_sint32_t val); +ethr_sint32_t ethr_atomic32_read_bor_mb(ethr_atomic32_t *var, ethr_sint32_t val); +#endif /* ETHR_NEED_ATMC32_PROTOTYPES__ */ + +#if (defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) \ + && (defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__))) + +#if !defined(ETHR_NATMC32_BITS__) +# error "Missing native atomic implementation" +#elif ETHR_NATMC32_BITS__ == 64 +# undef ETHR_HAVE_NATMC32_CMPXCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG +# define ETHR_HAVE_NATMC32_CMPXCHG 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB +# define ETHR_HAVE_NATMC32_CMPXCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB +# define ETHR_HAVE_NATMC32_CMPXCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB +# define ETHR_HAVE_NATMC32_CMPXCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB +# define ETHR_HAVE_NATMC32_CMPXCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB +# define ETHR_HAVE_NATMC32_CMPXCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG +# define ETHR_HAVE_NATMC32_XCHG 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB +# define ETHR_HAVE_NATMC32_XCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB +# define ETHR_HAVE_NATMC32_XCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB +# define ETHR_HAVE_NATMC32_XCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB +# define ETHR_HAVE_NATMC32_XCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB +# define ETHR_HAVE_NATMC32_XCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET +# define ETHR_HAVE_NATMC32_SET 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB +# define ETHR_HAVE_NATMC32_SET_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB +# define ETHR_HAVE_NATMC32_SET_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB +# define ETHR_HAVE_NATMC32_SET_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB +# define ETHR_HAVE_NATMC32_SET_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB +# define ETHR_HAVE_NATMC32_SET_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT +# define ETHR_HAVE_NATMC32_INIT 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB +# define ETHR_HAVE_NATMC32_INIT_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB +# define ETHR_HAVE_NATMC32_INIT_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB +# define ETHR_HAVE_NATMC32_INIT_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB +# define ETHR_HAVE_NATMC32_INIT_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB +# define ETHR_HAVE_NATMC32_INIT_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN +# define ETHR_HAVE_NATMC32_ADD_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB +# define ETHR_HAVE_NATMC32_ADD_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB +# define ETHR_HAVE_NATMC32_ADD_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB +# define ETHR_HAVE_NATMC32_ADD_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB +# define ETHR_HAVE_NATMC32_ADD_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB +# define ETHR_HAVE_NATMC32_ADD_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ +# define ETHR_HAVE_NATMC32_READ 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB +# define ETHR_HAVE_NATMC32_READ_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB +# define ETHR_HAVE_NATMC32_READ_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB +# define ETHR_HAVE_NATMC32_READ_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB +# define ETHR_HAVE_NATMC32_READ_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB +# define ETHR_HAVE_NATMC32_READ_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN +# define ETHR_HAVE_NATMC32_INC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB +# define ETHR_HAVE_NATMC32_INC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB +# define ETHR_HAVE_NATMC32_INC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB +# define ETHR_HAVE_NATMC32_INC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB +# define ETHR_HAVE_NATMC32_INC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB +# define ETHR_HAVE_NATMC32_INC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN +# define ETHR_HAVE_NATMC32_DEC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB +# define ETHR_HAVE_NATMC32_DEC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB +# define ETHR_HAVE_NATMC32_DEC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB +# define ETHR_HAVE_NATMC32_DEC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB +# define ETHR_HAVE_NATMC32_DEC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB +# define ETHR_HAVE_NATMC32_DEC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD +# define ETHR_HAVE_NATMC32_ADD 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB +# define ETHR_HAVE_NATMC32_ADD_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB +# define ETHR_HAVE_NATMC32_ADD_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB +# define ETHR_HAVE_NATMC32_ADD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB +# define ETHR_HAVE_NATMC32_ADD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB +# define ETHR_HAVE_NATMC32_ADD_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC +# define ETHR_HAVE_NATMC32_INC 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB +# define ETHR_HAVE_NATMC32_INC_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB +# define ETHR_HAVE_NATMC32_INC_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB +# define ETHR_HAVE_NATMC32_INC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB +# define ETHR_HAVE_NATMC32_INC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB +# define ETHR_HAVE_NATMC32_INC_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC +# define ETHR_HAVE_NATMC32_DEC 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB +# define ETHR_HAVE_NATMC32_DEC_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB +# define ETHR_HAVE_NATMC32_DEC_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB +# define ETHR_HAVE_NATMC32_DEC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB +# define ETHR_HAVE_NATMC32_DEC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB +# define ETHR_HAVE_NATMC32_DEC_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD +# define ETHR_HAVE_NATMC32_AND_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB +# define ETHR_HAVE_NATMC32_AND_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB +# define ETHR_HAVE_NATMC32_AND_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB +# define ETHR_HAVE_NATMC32_AND_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB +# define ETHR_HAVE_NATMC32_AND_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB +# define ETHR_HAVE_NATMC32_AND_RETOLD_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD +# define ETHR_HAVE_NATMC32_OR_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB +# define ETHR_HAVE_NATMC32_OR_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB +# define ETHR_HAVE_NATMC32_OR_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB +# define ETHR_HAVE_NATMC32_OR_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB +# define ETHR_HAVE_NATMC32_OR_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB +# define ETHR_HAVE_NATMC32_OR_RETOLD_MB 1 +# endif +#elif ETHR_NATMC32_BITS__ == 32 +# undef ETHR_HAVE_NATMC32_CMPXCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG +# define ETHR_HAVE_NATMC32_CMPXCHG 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB +# define ETHR_HAVE_NATMC32_CMPXCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB +# define ETHR_HAVE_NATMC32_CMPXCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB +# define ETHR_HAVE_NATMC32_CMPXCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB +# define ETHR_HAVE_NATMC32_CMPXCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_CMPXCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB +# define ETHR_HAVE_NATMC32_CMPXCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG +# define ETHR_HAVE_NATMC32_XCHG 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB +# define ETHR_HAVE_NATMC32_XCHG_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB +# define ETHR_HAVE_NATMC32_XCHG_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB +# define ETHR_HAVE_NATMC32_XCHG_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB +# define ETHR_HAVE_NATMC32_XCHG_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_XCHG_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB +# define ETHR_HAVE_NATMC32_XCHG_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET +# define ETHR_HAVE_NATMC32_SET 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB +# define ETHR_HAVE_NATMC32_SET_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB +# define ETHR_HAVE_NATMC32_SET_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB +# define ETHR_HAVE_NATMC32_SET_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB +# define ETHR_HAVE_NATMC32_SET_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_SET_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB +# define ETHR_HAVE_NATMC32_SET_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT +# define ETHR_HAVE_NATMC32_INIT 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB +# define ETHR_HAVE_NATMC32_INIT_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB +# define ETHR_HAVE_NATMC32_INIT_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB +# define ETHR_HAVE_NATMC32_INIT_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB +# define ETHR_HAVE_NATMC32_INIT_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_INIT_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB +# define ETHR_HAVE_NATMC32_INIT_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN +# define ETHR_HAVE_NATMC32_ADD_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB +# define ETHR_HAVE_NATMC32_ADD_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB +# define ETHR_HAVE_NATMC32_ADD_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB +# define ETHR_HAVE_NATMC32_ADD_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB +# define ETHR_HAVE_NATMC32_ADD_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB +# define ETHR_HAVE_NATMC32_ADD_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ +# define ETHR_HAVE_NATMC32_READ 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB +# define ETHR_HAVE_NATMC32_READ_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB +# define ETHR_HAVE_NATMC32_READ_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB +# define ETHR_HAVE_NATMC32_READ_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB +# define ETHR_HAVE_NATMC32_READ_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_READ_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB +# define ETHR_HAVE_NATMC32_READ_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN +# define ETHR_HAVE_NATMC32_INC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB +# define ETHR_HAVE_NATMC32_INC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB +# define ETHR_HAVE_NATMC32_INC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB +# define ETHR_HAVE_NATMC32_INC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB +# define ETHR_HAVE_NATMC32_INC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB +# define ETHR_HAVE_NATMC32_INC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN +# define ETHR_HAVE_NATMC32_DEC_RETURN 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB +# define ETHR_HAVE_NATMC32_DEC_RETURN_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB +# define ETHR_HAVE_NATMC32_DEC_RETURN_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB +# define ETHR_HAVE_NATMC32_DEC_RETURN_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB +# define ETHR_HAVE_NATMC32_DEC_RETURN_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RETURN_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB +# define ETHR_HAVE_NATMC32_DEC_RETURN_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD +# define ETHR_HAVE_NATMC32_ADD 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB +# define ETHR_HAVE_NATMC32_ADD_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB +# define ETHR_HAVE_NATMC32_ADD_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB +# define ETHR_HAVE_NATMC32_ADD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB +# define ETHR_HAVE_NATMC32_ADD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_ADD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB +# define ETHR_HAVE_NATMC32_ADD_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC +# define ETHR_HAVE_NATMC32_INC 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB +# define ETHR_HAVE_NATMC32_INC_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB +# define ETHR_HAVE_NATMC32_INC_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB +# define ETHR_HAVE_NATMC32_INC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB +# define ETHR_HAVE_NATMC32_INC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_INC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB +# define ETHR_HAVE_NATMC32_INC_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC +# define ETHR_HAVE_NATMC32_DEC 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB +# define ETHR_HAVE_NATMC32_DEC_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB +# define ETHR_HAVE_NATMC32_DEC_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB +# define ETHR_HAVE_NATMC32_DEC_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB +# define ETHR_HAVE_NATMC32_DEC_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_DEC_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB +# define ETHR_HAVE_NATMC32_DEC_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD +# define ETHR_HAVE_NATMC32_AND_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB +# define ETHR_HAVE_NATMC32_AND_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB +# define ETHR_HAVE_NATMC32_AND_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB +# define ETHR_HAVE_NATMC32_AND_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB +# define ETHR_HAVE_NATMC32_AND_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_AND_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB +# define ETHR_HAVE_NATMC32_AND_RETOLD_MB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD +# define ETHR_HAVE_NATMC32_OR_RETOLD 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_RB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB +# define ETHR_HAVE_NATMC32_OR_RETOLD_RB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_WB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB +# define ETHR_HAVE_NATMC32_OR_RETOLD_WB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_ACQB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB +# define ETHR_HAVE_NATMC32_OR_RETOLD_ACQB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_RELB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB +# define ETHR_HAVE_NATMC32_OR_RETOLD_RELB 1 +# endif +# undef ETHR_HAVE_NATMC32_OR_RETOLD_MB +# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB +# define ETHR_HAVE_NATMC32_OR_RETOLD_MB 1 +# endif +#else +# error "Invalid native atomic size" +#endif + +#if (!defined(ETHR_HAVE_NATMC32_CMPXCHG) \ + && !defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) \ + && !defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) \ + && !defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) \ + && !defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) \ + && !defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)) +# error "No native cmpxchg() op available" +#endif + + /* - * --- 32-bit atomics --------------------------------------------------------- + * Read op used together with cmpxchg() fallback when no native op present. */ +#if defined(ETHR_HAVE_NATMC32_READ) +#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC32_FUNC__(read)(VAR) +#elif defined(ETHR_HAVE_NATMC32_READ_RB) +#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC32_FUNC__(read_rb)(VAR) +#elif defined(ETHR_HAVE_NATMC32_READ_WB) +#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC32_FUNC__(read_wb)(VAR) +#elif defined(ETHR_HAVE_NATMC32_READ_ACQB) +#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC32_FUNC__(read_acqb)(VAR) +#elif defined(ETHR_HAVE_NATMC32_READ_RELB) +#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC32_FUNC__(read_relb)(VAR) +#elif defined(ETHR_HAVE_NATMC32_READ_MB) +#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \ + ETHR_NATMC32_FUNC__(read_mb)(VAR) +#else +/* + * We have no native read() op; guess zero and then use the + * the atomics actual value returned from cmpxchg(). + */ +#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \ + ((ETHR_NAINT32_T__) 0) +#endif + +/* + * Native cmpxchg() fallback used when no native op present. + */ +#define ETHR_NATMC32_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \ +do { \ + ethr_sint32_t AVAL; \ + ETHR_NAINT32_T__ new__, act__, exp__; \ + act__ = ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR); \ + do { \ + exp__ = act__; \ + AVAL = (ethr_sint32_t) act__; \ + { OPS; } \ + new__ = (ETHR_NAINT32_T__) AVAL; \ + act__ = CMPXCHG(VAR, new__, exp__); \ + } while (__builtin_expect(act__ != exp__, 0)); \ +} while (0) -static ETHR_INLINE ethr_sint32_t * -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_addr)(ethr_atomic32_t *var) + + +/* --- addr() --- */ + +static ETHR_INLINE ethr_sint32_t *ETHR_ATMC32_FUNC__(addr)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return ethr_native_atomic32_addr(var); + return (ethr_sint32_t *) ETHR_NATMC32_ADDR_FUNC__(var); + +} + + +/* --- cmpxchg() --- */ + + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_CMPXCHG) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); #else - return (ethr_sint32_t *) var; +#error "Missing implementation of ethr_atomic32_cmpxchg()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_init)(ethr_atomic32_t *var, - ethr_sint32_t i) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_rb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) i); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i); +#error "Missing implementation of ethr_atomic32_cmpxchg_rb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set)(ethr_atomic32_t *var, ethr_sint32_t i) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_wb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) i); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i); +#error "Missing implementation of ethr_atomic32_cmpxchg_wb()!" #endif + return res; } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read)(ethr_atomic32_t *var) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_acqb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else +#error "Missing implementation of ethr_atomic32_cmpxchg_acqb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_relb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#if defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#else +#error "Missing implementation of ethr_atomic32_cmpxchg_relb()!" +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_mb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_cmpxchg_mb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_add)(ethr_atomic32_t *var, - ethr_sint32_t incr) + +/* --- xchg() --- */ + + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg)(ethr_atomic32_t *var, ethr_sint32_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) incr); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_XCHG) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += incr); +#error "Missing implementation of ethr_atomic32_xchg()!" #endif -} - -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_add_read)(ethr_atomic32_t *var, - ethr_sint32_t i) + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_rb)(ethr_atomic32_t *var, ethr_sint32_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) - ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) i); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_XCHG_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_XCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad); #else +#error "Missing implementation of ethr_atomic32_xchg_rb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_wb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += i; res = *var); +#if defined(ETHR_HAVE_NATMC32_XCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); +#else +#error "Missing implementation of ethr_atomic32_xchg_wb()!" +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_acqb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_XCHG_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_XCHG) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_XCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_xchg_acqb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc)(ethr_atomic32_t *var) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_relb)(ethr_atomic32_t *var, ethr_sint32_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC32_FUNC__(inc)(var); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_XCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#error "Missing implementation of ethr_atomic32_xchg_relb()!" #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec)(ethr_atomic32_t *var) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_mb)(ethr_atomic32_t *var, ethr_sint32_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC32_FUNC__(dec)(var); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_XCHG_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_XCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_XCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_XCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_xchg_mb()!" +#endif + return res; +} + + +/* --- set() --- */ + + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(set)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_SET) + ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_RB) + ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_WB) + ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_ACQB) + ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_RELB) + ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_MB) + ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val); +#else + (void) ETHR_ATMC32_FUNC__(xchg)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_rb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_SET_RB) + ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET) + ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_SET_MB) + ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_WB) + ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_SET_ACQB) + ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_SET_RELB) + ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + (void) ETHR_ATMC32_FUNC__(xchg_rb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_wb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_SET_WB) + ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_MB) + ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val); +#else + (void) ETHR_ATMC32_FUNC__(xchg_wb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_acqb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_SET_ACQB) + ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_RB) + ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_SET) + ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_SET_MB) + ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_WB) + ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_SET_RELB) + ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC32_FUNC__(xchg_acqb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_relb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_SET_RELB) + ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_MB) + ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val); +#else + (void) ETHR_ATMC32_FUNC__(xchg_relb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_mb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_SET_MB) + ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_RELB) + ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_SET_ACQB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_SET_WB) + ETHR_MEMBAR(ETHR_LoadStore); + ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_SET_RB) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_SET) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC32_FUNC__(xchg_mb)(var, val); +#endif +} + + +/* --- init() --- */ + + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(init)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_INIT) + ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_RB) + ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_WB) + ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB) + ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_RELB) + ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_MB) + ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val); #else - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); + ETHR_ATMC32_FUNC__(set)(var, val); #endif } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read)(ethr_atomic32_t *var) +static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_rb)(ethr_atomic32_t *var, ethr_sint32_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var); +#if defined(ETHR_HAVE_NATMC32_INIT_RB) + ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT) + ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INIT_MB) + ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_WB) + ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB) + ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INIT_RELB) + ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); #else + ETHR_ATMC32_FUNC__(set_rb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_wb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_INIT_WB) + ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_MB) + ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val); +#else + ETHR_ATMC32_FUNC__(set_wb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_acqb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_INIT_ACQB) + ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_RB) + ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INIT) + ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INIT_MB) + ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_WB) + ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INIT_RELB) + ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATMC32_FUNC__(set_acqb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_relb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_INIT_RELB) + ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_MB) + ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val); +#else + ETHR_ATMC32_FUNC__(set_relb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_mb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_INIT_MB) + ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_RELB) + ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_INIT_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INIT_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INIT) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATMC32_FUNC__(set_mb)(var, val); +#endif +} + + +/* --- add_read() --- */ + + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read)(ethr_atomic32_t *var, ethr_sint32_t val) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#if defined(ETHR_HAVE_NATMC32_ADD_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#else +#error "Missing implementation of ethr_atomic32_add_read()!" +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_rb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_atomic32_add_read_rb()!" #endif + return res; } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read)(ethr_atomic32_t *var) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_wb)(ethr_atomic32_t *var, ethr_sint32_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); #else +#error "Missing implementation of ethr_atomic32_add_read_wb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_acqb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_add_read_acqb()!" +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_relb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#else +#error "Missing implementation of ethr_atomic32_add_read_relb()!" #endif + return res; } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_band)(ethr_atomic32_t *var, - ethr_sint32_t mask) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_mb)(ethr_atomic32_t *var, ethr_sint32_t val) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) - ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) mask); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else +#error "Missing implementation of ethr_atomic32_add_read_mb()!" +#endif + return res; +} + + +/* --- read() --- */ + + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read)(ethr_atomic32_t *var) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= mask); +#if defined(ETHR_HAVE_NATMC32_READ) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var); +#else + res = ETHR_ATMC32_FUNC__(cmpxchg)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__); +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_rb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_READ_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_READ_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_READ_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_READ_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + res = ETHR_ATMC32_FUNC__(cmpxchg_rb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__); #endif + return res; } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_bor)(ethr_atomic32_t *var, - ethr_sint32_t mask) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_wb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return - (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, - (ETHR_NAINT32_T__) mask); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_READ_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var); #else + res = ETHR_ATMC32_FUNC__(cmpxchg_wb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__); +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_acqb)(ethr_atomic32_t *var) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= mask); +#if defined(ETHR_HAVE_NATMC32_READ_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC32_READ) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC32_READ_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC32_READ_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#else + res = ETHR_ATMC32_FUNC__(cmpxchg_acqb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__); +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_relb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_READ_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var); +#else + res = ETHR_ATMC32_FUNC__(cmpxchg_relb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__); #endif + return res; } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_xchg)(ethr_atomic32_t *var, - ethr_sint32_t new) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_mb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, - (ETHR_NAINT32_T__) new); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_READ_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC32_READ_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_READ_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC32_READ_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore); +#elif defined(ETHR_HAVE_NATMC32_READ) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); #else + res = ETHR_ATMC32_FUNC__(cmpxchg_mb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__); +#endif + return res; +} + + +/* --- inc_read() --- */ + + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read)(ethr_atomic32_t *var) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = new); +#if defined(ETHR_HAVE_NATMC32_INC_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var); +#else + res = ETHR_ATMC32_FUNC__(add_read)(var, (ethr_sint32_t) 1); +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_rb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_INC_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + res = ETHR_ATMC32_FUNC__(add_read_rb)(var, (ethr_sint32_t) 1); #endif + return res; } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(ethr_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t exp) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_wb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, - (ETHR_NAINT32_T__) new, - (ETHR_NAINT32_T__) exp); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_INC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var); #else + res = ETHR_ATMC32_FUNC__(add_read_wb)(var, (ethr_sint32_t) 1); +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_acqb)(ethr_atomic32_t *var) +{ ethr_sint32_t res; - ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, - { - res = *var; - if (__builtin_expect(res == exp, 1)) - *var = new; - }); +#if defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + res = ETHR_ATMC32_FUNC__(add_read_acqb)(var, (ethr_sint32_t) 1); +#endif return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_relb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var); +#else + res = ETHR_ATMC32_FUNC__(add_read_relb)(var, (ethr_sint32_t) 1); #endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_mb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_INC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + res = ETHR_ATMC32_FUNC__(add_read_mb)(var, (ethr_sint32_t) 1); +#endif + return res; } -/* - * Important memory barrier requirements. - * - * The following atomic operations *must* supply a memory barrier of - * at least the type specified by its suffix: - * _acqb = acquire barrier - * _relb = release barrier - */ -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_acqb)(ethr_atomic32_t *var) +/* --- dec_read() --- */ + + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_DEC_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read)(var); + res = ETHR_ATMC32_FUNC__(add_read)(var, (ethr_sint32_t) -1); #endif + return res; } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read_acqb)(ethr_atomic32_t *var) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_rb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read)(var); + res = ETHR_ATMC32_FUNC__(add_read_rb)(var, (ethr_sint32_t) -1); #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set_relb)(ethr_atomic32_t *var, - ethr_sint32_t val) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_wb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val); + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var); +#else + res = ETHR_ATMC32_FUNC__(add_read_wb)(var, (ethr_sint32_t) -1); +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_acqb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else - ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set)(var, val); + res = ETHR_ATMC32_FUNC__(add_read_acqb)(var, (ethr_sint32_t) -1); #endif + return res; } -static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_relb)(ethr_atomic32_t *var) +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_relb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var); +#else + res = ETHR_ATMC32_FUNC__(add_read_relb)(var, (ethr_sint32_t) -1); +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_mb)(ethr_atomic32_t *var) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + res = ETHR_ATMC32_FUNC__(add_read_mb)(var, (ethr_sint32_t) -1); +#endif + return res; +} + + +/* --- add() --- */ + + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(add)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_ADD) + ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RB) + ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_WB) + ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB) + ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RELB) + ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_MB) + ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val); +#else + (void) ETHR_ATMC32_FUNC__(add_read)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_rb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_ADD_RB) + ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD) + ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_ADD_MB) + ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_WB) + ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB) + ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_ADD_RELB) + ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + (void) ETHR_ATMC32_FUNC__(add_read_rb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_wb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_ADD_WB) + ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_MB) + ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val); +#else + (void) ETHR_ATMC32_FUNC__(add_read_wb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_acqb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_ADD_ACQB) + ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RB) + ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD) + ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_MB) + ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_WB) + ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RELB) + ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC32_FUNC__(add_read_acqb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_relb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_ADD_RELB) + ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_MB) + ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val); +#else + (void) ETHR_ATMC32_FUNC__(add_read_relb)(var, val); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_mb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ +#if defined(ETHR_HAVE_NATMC32_ADD_MB) + ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_RELB) + ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_ADD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_ADD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC32_FUNC__(add_read_mb)(var, val); +#endif +} + + +/* --- inc() --- */ + + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_INC) + ETHR_NATMC32_FUNC__(inc)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RB) + ETHR_NATMC32_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_WB) + ETHR_NATMC32_FUNC__(inc_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_ACQB) + ETHR_NATMC32_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RELB) + ETHR_NATMC32_FUNC__(inc_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_MB) + ETHR_NATMC32_FUNC__(inc_mb)(var); +#else + (void) ETHR_ATMC32_FUNC__(inc_read)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_rb)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_INC_RB) + ETHR_NATMC32_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC) + ETHR_NATMC32_FUNC__(inc)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INC_MB) + ETHR_NATMC32_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_WB) + ETHR_NATMC32_FUNC__(inc_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INC_ACQB) + ETHR_NATMC32_FUNC__(inc_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_INC_RELB) + ETHR_NATMC32_FUNC__(inc_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + (void) ETHR_ATMC32_FUNC__(inc_read_rb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_wb)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_INC_WB) + ETHR_NATMC32_FUNC__(inc_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(inc)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_MB) + ETHR_NATMC32_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(inc_relb)(var); +#else + (void) ETHR_ATMC32_FUNC__(inc_read_wb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_acqb)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_INC_ACQB) + ETHR_NATMC32_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RB) + ETHR_NATMC32_FUNC__(inc_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC) + ETHR_NATMC32_FUNC__(inc)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_MB) + ETHR_NATMC32_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_WB) + ETHR_NATMC32_FUNC__(inc_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RELB) + ETHR_NATMC32_FUNC__(inc_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC32_FUNC__(inc_read_acqb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_relb)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_INC_RELB) + ETHR_NATMC32_FUNC__(inc_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_MB) + ETHR_NATMC32_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc_acqb)(var); +#else + (void) ETHR_ATMC32_FUNC__(inc_read_relb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_mb)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_INC_MB) + ETHR_NATMC32_FUNC__(inc_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_RELB) + ETHR_NATMC32_FUNC__(inc_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_INC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_INC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(inc)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + (void) ETHR_ATMC32_FUNC__(inc_read_mb)(var); +#endif +} + + +/* --- dec() --- */ + + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS +#if defined(ETHR_HAVE_NATMC32_DEC) + ETHR_NATMC32_FUNC__(dec)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RB) + ETHR_NATMC32_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_WB) + ETHR_NATMC32_FUNC__(dec_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB) + ETHR_NATMC32_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RELB) + ETHR_NATMC32_FUNC__(dec_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_MB) + ETHR_NATMC32_FUNC__(dec_mb)(var); +#else + (void) ETHR_ATMC32_FUNC__(dec_read)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_rb)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_DEC_RB) + ETHR_NATMC32_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC) + ETHR_NATMC32_FUNC__(dec)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_DEC_MB) + ETHR_NATMC32_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_WB) + ETHR_NATMC32_FUNC__(dec_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB) + ETHR_NATMC32_FUNC__(dec_acqb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_DEC_RELB) + ETHR_NATMC32_FUNC__(dec_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + (void) ETHR_ATMC32_FUNC__(dec_read_rb)(var); +#endif +} + +static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_wb)(ethr_atomic32_t *var) +{ +#if defined(ETHR_HAVE_NATMC32_DEC_WB) + ETHR_NATMC32_FUNC__(dec_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(dec)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_MB) + ETHR_NATMC32_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RELB) + ETHR_MEMBAR(ETHR_StoreStore); ETHR_NATMC32_FUNC__(dec_relb)(var); #else - ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec)(var); + (void) ETHR_ATMC32_FUNC__(dec_read_wb)(var); #endif } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read_relb)(ethr_atomic32_t *var) +static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_acqb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var); +#if defined(ETHR_HAVE_NATMC32_DEC_ACQB) + ETHR_NATMC32_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RB) + ETHR_NATMC32_FUNC__(dec_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC) + ETHR_NATMC32_FUNC__(dec)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_MB) + ETHR_NATMC32_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_WB) + ETHR_NATMC32_FUNC__(dec_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RELB) + ETHR_NATMC32_FUNC__(dec_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read)(var); + (void) ETHR_ATMC32_FUNC__(dec_read_acqb)(var); #endif } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg_acqb)(ethr_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t exp) +static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_relb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) - ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, - (ETHR_NAINT32_T__) new, - (ETHR_NAINT32_T__) exp); +#if defined(ETHR_HAVE_NATMC32_DEC_RELB) + ETHR_NATMC32_FUNC__(dec_relb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec_wb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_MB) + ETHR_NATMC32_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec_rb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec_acqb)(var); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(var, new, exp); + (void) ETHR_ATMC32_FUNC__(dec_read_relb)(var); #endif } -static ETHR_INLINE ethr_sint32_t -ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg_relb)(ethr_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t exp) +static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_mb)(ethr_atomic32_t *var) { -#ifdef ETHR_HAVE_NATIVE_ATOMICS - return (ethr_sint32_t) - ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, - (ETHR_NAINT32_T__) new, - (ETHR_NAINT32_T__) exp); +#if defined(ETHR_HAVE_NATMC32_DEC_MB) + ETHR_NATMC32_FUNC__(dec_mb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_RELB) + ETHR_NATMC32_FUNC__(dec_relb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec_acqb)(var); +#elif defined(ETHR_HAVE_NATMC32_DEC_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec_wb)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec_rb)(var); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_DEC) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_FUNC__(dec)(var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); #else - return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(var, new, exp); + (void) ETHR_ATMC32_FUNC__(dec_read_mb)(var); #endif } -#endif /* ETHR_TRY_INLINE_FUNCS */ +/* --- read_band() --- */ -#undef ETHR_NAINT_T__ -#undef ETHR_NATMC_FUNC__ -#undef ETHR_NATMC_ADDR_FUNC__ -#undef ETHR_NAINT32_T__ -#undef ETHR_NATMC32_FUNC__ +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_AND_RETOLD) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#else +#error "Missing implementation of ethr_atomic32_read_band()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_rb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_atomic32_read_band_rb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_wb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); +#else +#error "Missing implementation of ethr_atomic32_read_band_wb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_acqb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_read_band_acqb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_relb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#else +#error "Missing implementation of ethr_atomic32_read_band_relb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_mb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_read_band_mb()!" +#endif + return res; +} + + +/* --- read_bor() --- */ + + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_OR_RETOLD) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#else +#error "Missing implementation of ethr_atomic32_read_bor()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_rb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else +#error "Missing implementation of ethr_atomic32_read_bor_rb()!" +#endif + return res; +} +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_wb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); +#else +#error "Missing implementation of ethr_atomic32_read_bor_wb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_acqb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_read_bor_acqb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_relb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#else +#error "Missing implementation of ethr_atomic32_read_bor_relb()!" +#endif + return res; +} + +static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_mb)(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; +#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB) + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#elif defined(ETHR_HAVE_NATMC32_CMPXCHG) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else +#error "Missing implementation of ethr_atomic32_read_bor_mb()!" #endif + return res; +} + +#endif /* ETHR_ATMC32_INLINE__ */ + +#endif /* ETHR_ATOMICS_H__ */ diff --git a/erts/include/internal/ethr_internal.h b/erts/include/internal/ethr_internal.h index e9c3daf783..c9b1db5b46 100644 --- a/erts/include/internal/ethr_internal.h +++ b/erts/include/internal/ethr_internal.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -63,5 +63,9 @@ int ethr_late_init_common__(ethr_late_init_data *lid); void ethr_run_exit_handlers__(void); void ethr_ts_event_destructor__(void *vtsep); +#if defined(ETHR_X86_RUNTIME_CONF__) +int ethr_x86_have_cpuid__(void); +void ethr_x86_cpuid__(int *eax, int *ebx, int *ecx, int *edx); +#endif #endif diff --git a/erts/include/internal/ethr_mutex.h b/erts/include/internal/ethr_mutex.h index fadaf1e2a4..a0685ea3c0 100644 --- a/erts/include/internal/ethr_mutex.h +++ b/erts/include/internal/ethr_mutex.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -54,6 +54,10 @@ # endif #endif +#ifndef ETHR_INLINE_MTX_FUNC_NAME_ +# define ETHR_INLINE_MTX_FUNC_NAME_(X) X +#endif + #if defined(ETHR_USE_OWN_RWMTX_IMPL__) || defined(ETHR_USE_OWN_MTX_IMPL__) #ifdef ETHR_DEBUG @@ -505,7 +509,7 @@ void ethr_mutex_lock_wait__(ethr_mutex *, ethr_sint32_t); void ethr_mutex_unlock_wake__(ethr_mutex *, ethr_sint32_t); static ETHR_INLINE int -ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx) { ethr_sint32_t act; int res; @@ -529,7 +533,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx) { ethr_sint32_t act; ETHR_MTX_HARD_DEBUG_FENCE_CHK(mtx); @@ -549,7 +553,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx) { ethr_sint32_t act; ETHR_COMPILER_BARRIER; @@ -574,7 +578,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx) #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_MUTEX_IMPL__) static ETHR_INLINE int -ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx) { int res; res = pthread_mutex_trylock(&mtx->pt_mtx); @@ -584,7 +588,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx) { int res = pthread_mutex_lock(&mtx->pt_mtx); if (res != 0) @@ -592,7 +596,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx) { int res = pthread_mutex_unlock(&mtx->pt_mtx); if (res != 0) @@ -615,7 +619,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx) #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_MUTEX_IMPL__) static ETHR_INLINE int -ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx) { int res = pthread_rwlock_tryrdlock(&rwmtx->pt_rwlock); if (res != 0 && res != EBUSY) @@ -624,7 +628,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx) { int res = pthread_rwlock_rdlock(&rwmtx->pt_rwlock); if (res != 0) @@ -632,7 +636,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx) { int res = pthread_rwlock_unlock(&rwmtx->pt_rwlock); if (res != 0) @@ -640,7 +644,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx) } static ETHR_INLINE int -ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx) { int res = pthread_rwlock_trywrlock(&rwmtx->pt_rwlock); if (res != 0 && res != EBUSY) @@ -649,7 +653,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx) { int res = pthread_rwlock_wrlock(&rwmtx->pt_rwlock); if (res != 0) @@ -657,7 +661,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx) } static ETHR_INLINE void -ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwunlock)(ethr_rwmutex *rwmtx) +ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rwunlock)(ethr_rwmutex *rwmtx) { int res = pthread_rwlock_unlock(&rwmtx->pt_rwlock); if (res != 0) diff --git a/erts/include/internal/ethr_optimized_fallbacks.h b/erts/include/internal/ethr_optimized_fallbacks.h index 8e04692856..45399d18e6 100644 --- a/erts/include/internal/ethr_optimized_fallbacks.h +++ b/erts/include/internal/ethr_optimized_fallbacks.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -18,152 +18,172 @@ */ /* - * Description: "Optimized" fallbacks used when native ops are missing + * Description: Optimized fallbacks used when native ops are missing * Author: Rickard Green */ #ifndef ETHR_OPTIMIZED_FALLBACKS_H__ #define ETHR_OPTIMIZED_FALLBACKS_H__ -#ifdef ETHR_HAVE_NATIVE_ATOMICS -#define ETHR_HAVE_OPTIMIZED_ATOMIC_OPS 1 +#if defined(ETHR_HAVE_NATIVE_SPINLOCKS) + +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__) + +static ETHR_INLINE int +ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock) +{ + return 0; +} + #endif -#ifdef ETHR_HAVE_NATIVE_SPINLOCKS -#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1 #elif defined(ETHR_HAVE_PTHREAD_SPIN_LOCK) -/* --- Optimized spinlocks using pthread spinlocks -------------------------- */ -#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1 +/* --- Native spinlocks using pthread spinlocks -------------------------- */ +#define ETHR_HAVE_NATIVE_SPINLOCKS 1 + +#define ETHR_NATIVE_SPINLOCK_IMPL "pthread" -typedef pthread_spinlock_t ethr_opt_spinlock_t; +typedef pthread_spinlock_t ethr_native_spinlock_t; #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__) -static ETHR_INLINE int -ethr_opt_spinlock_init(ethr_opt_spinlock_t *lock) +static ETHR_INLINE void +ethr_native_spinlock_init(ethr_native_spinlock_t *lock) { - return pthread_spin_init((pthread_spinlock_t *) lock, 0); + int err = pthread_spin_init((pthread_spinlock_t *) lock, 0); + if (err) + ETHR_FATAL_ERROR__(err); } static ETHR_INLINE int -ethr_opt_spinlock_destroy(ethr_opt_spinlock_t *lock) +ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock) { return pthread_spin_destroy((pthread_spinlock_t *) lock); } - -static ETHR_INLINE int -ethr_opt_spin_unlock(ethr_opt_spinlock_t *lock) +static ETHR_INLINE void +ethr_native_spin_unlock(ethr_native_spinlock_t *lock) { - return pthread_spin_unlock((pthread_spinlock_t *) lock); + int err = pthread_spin_unlock((pthread_spinlock_t *) lock); + if (err) + ETHR_FATAL_ERROR__(err); } -static ETHR_INLINE int -ethr_opt_spin_lock(ethr_opt_spinlock_t *lock) +static ETHR_INLINE void +ethr_native_spin_lock(ethr_native_spinlock_t *lock) { - return pthread_spin_lock((pthread_spinlock_t *) lock); + int err = pthread_spin_lock((pthread_spinlock_t *) lock); + if (err) + ETHR_FATAL_ERROR__(err); } #endif -#elif defined(ETHR_HAVE_NATIVE_ATOMICS) +#elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) /* --- Native spinlocks using native atomics -------------------------------- */ #define ETHR_HAVE_NATIVE_SPINLOCKS 1 -#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1 - -#if defined(ETHR_HAVE_NATIVE_ATOMIC32) -typedef ethr_native_atomic32_t ethr_native_spinlock_t; -# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X -#elif defined(ETHR_HAVE_NATIVE_ATOMIC64) -typedef ethr_native_atomic64_t ethr_native_spinlock_t; -# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X -#else -# error "Missing native atomic implementation" -#endif + +#define ETHR_NATIVE_SPINLOCK_IMPL "native-atomics" + +typedef ethr_atomic32_t ethr_native_spinlock_t; #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__) +#undef ETHR_NSPN_AOP__ +#define ETHR_NSPN_AOP__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) + static ETHR_INLINE void ethr_native_spinlock_init(ethr_native_spinlock_t *lock) { - ETHR_NATMC_FUNC__(init)(lock, 0); + ETHR_NSPN_AOP__(init)(lock, 0); +} + +static ETHR_INLINE int +ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock) +{ + return ETHR_NSPN_AOP__(read)(lock) == 0 ? 0 : EBUSY; } static ETHR_INLINE void ethr_native_spin_unlock(ethr_native_spinlock_t *lock) { - ETHR_COMPILER_BARRIER; - ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) == 1); - ETHR_NATMC_FUNC__(set_relb)(lock, 0); + ETHR_ASSERT(ETHR_NSPN_AOP__(read)(lock) == 1); + ETHR_NSPN_AOP__(set_relb)(lock, 0); } static ETHR_INLINE void ethr_native_spin_lock(ethr_native_spinlock_t *lock) { - while (ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, 1, 0) != 0) { - while (ETHR_NATMC_FUNC__(read)(lock) != 0) + while (ETHR_NSPN_AOP__(cmpxchg_acqb)(lock, 1, 0) != 0) { + while (ETHR_NSPN_AOP__(read)(lock) != 0) ETHR_SPIN_BODY; } ETHR_COMPILER_BARRIER; } -#endif +#undef ETHR_NSPN_AOP__ -#undef ETHR_NATMC_FUNC__ +#endif #endif -#ifdef ETHR_HAVE_NATIVE_RWSPINLOCKS -#define ETHR_HAVE_OPTIMIZED_RWSPINLOCKS 1 -#elif defined(ETHR_HAVE_NATIVE_ATOMICS) +#if defined(ETHR_HAVE_NATIVE_RWSPINLOCKS) + +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__) + +static ETHR_INLINE int +ethr_native_rwlock_destroy(ethr_native_rwlock_t *lock) +{ + return 0; +} + +#endif + +#elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) /* --- Native rwspinlocks using native atomics ------------------------------ */ #define ETHR_HAVE_NATIVE_RWSPINLOCKS 1 -#define ETHR_HAVE_OPTIMIZED_RWSPINLOCKS 1 +#define ETHR_NATIVE_RWSPINLOCK_IMPL "native-atomics" -#if defined(ETHR_HAVE_NATIVE_ATOMIC32) -typedef ethr_native_atomic32_t ethr_native_rwlock_t; -# define ETHR_NAINT_T__ ethr_sint32_t +typedef ethr_atomic32_t ethr_native_rwlock_t; # define ETHR_WLOCK_FLAG__ (((ethr_sint32_t) 1) << 30) -# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X -#elif defined(ETHR_HAVE_NATIVE_ATOMIC64) -typedef ethr_native_atomic64_t ethr_native_rwlock_t; -# define ETHR_NAINT_T__ ethr_sint64_t -# define ETHR_WLOCK_FLAG__ (((ethr_sint64_t) 1) << 62) -# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X -#else -# error "Missing native atomic implementation" -#endif #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__) +#undef ETHR_NRWSPN_AOP__ +#define ETHR_NRWSPN_AOP__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) + static ETHR_INLINE void ethr_native_rwlock_init(ethr_native_rwlock_t *lock) { - ETHR_NATMC_FUNC__(init)(lock, 0); + ETHR_NRWSPN_AOP__(init)(lock, 0); +} + +static ETHR_INLINE int +ethr_native_rwlock_destroy(ethr_native_rwlock_t *lock) +{ + return ETHR_NRWSPN_AOP__(read)(lock) == 0 ? 0 : EBUSY; } static ETHR_INLINE void ethr_native_read_unlock(ethr_native_rwlock_t *lock) { - ETHR_COMPILER_BARRIER; -#ifdef DEBUG - ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) >= 0); -#endif - ETHR_NATMC_FUNC__(dec_relb)(lock); + ETHR_ASSERT(ETHR_NRWSPN_AOP__(read)(lock) >= 0); + ETHR_NRWSPN_AOP__(dec_relb)(lock); } static ETHR_INLINE void ethr_native_read_lock(ethr_native_rwlock_t *lock) { - ETHR_NAINT_T__ act, exp = 0; + ethr_sint32_t act, exp = 0; while (1) { - act = ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, exp+1, exp); + act = ETHR_NRWSPN_AOP__(cmpxchg_acqb)(lock, exp+1, exp); if (act == exp) break; + /* Wait for writer to leave */ while (act & ETHR_WLOCK_FLAG__) { ETHR_SPIN_BODY; - act = ETHR_NATMC_FUNC__(read)(lock); + act = ETHR_NRWSPN_AOP__(read)(lock); } exp = act; } @@ -173,36 +193,37 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock) static ETHR_INLINE void ethr_native_write_unlock(ethr_native_rwlock_t *lock) { - ETHR_COMPILER_BARRIER; - ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) == ETHR_WLOCK_FLAG__); - ETHR_NATMC_FUNC__(set_relb)(lock, 0); + ETHR_ASSERT(ETHR_NRWSPN_AOP__(read)(lock) == ETHR_WLOCK_FLAG__); + ETHR_NRWSPN_AOP__(set_relb)(lock, 0); } static ETHR_INLINE void ethr_native_write_lock(ethr_native_rwlock_t *lock) { - ETHR_NAINT_T__ act, exp = 0; + ethr_sint32_t act, exp = 0; while (1) { - act = ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, exp|ETHR_WLOCK_FLAG__, exp); + act = ETHR_NRWSPN_AOP__(cmpxchg_acqb)(lock, exp|ETHR_WLOCK_FLAG__, exp); if (act == exp) break; - ETHR_SPIN_BODY; - exp = act & ~ETHR_WLOCK_FLAG__; + /* Wait for writer to leave */ + while (act & ETHR_WLOCK_FLAG__) { + ETHR_SPIN_BODY; + act = ETHR_NRWSPN_AOP__(read)(lock); + } + exp = act; } act |= ETHR_WLOCK_FLAG__; /* Wait for readers to leave */ while (act != ETHR_WLOCK_FLAG__) { ETHR_SPIN_BODY; - act = ETHR_NATMC_FUNC__(read_acqb)(lock); + act = ETHR_NRWSPN_AOP__(read_acqb)(lock); } ETHR_COMPILER_BARRIER; } -#endif +#undef ETHR_NRWSPN_AOP__ -#undef ETHR_NAINT_T__ -#undef ETHR_NATMC_FUNC__ -#undef ETHR_WLOCK_FLAG__ +#endif #endif diff --git a/erts/include/internal/ethread.h b/erts/include/internal/ethread.h index 4cd95faf6a..8ad0ded144 100644 --- a/erts/include/internal/ethread.h +++ b/erts/include/internal/ethread.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2004-2010. All Rights Reserved. + * Copyright Ericsson AB 2004-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -33,10 +33,6 @@ #include <stdlib.h> #include "erl_errno.h" -#undef ETHR_HAVE_OPTIMIZED_ATOMIC_OPS -#undef ETHR_HAVE_OPTIMIZED_SPINLOCK -#undef ETHR_HAVE_OPTIMIZED_RWSPINLOCK - #if defined(DEBUG) # define ETHR_DEBUG #endif @@ -68,7 +64,7 @@ #endif /* Assume 64-byte cache line size */ -#define ETHR_CACHE_LINE_SIZE ((ethr_uint_t) 64) +#define ETHR_CACHE_LINE_SIZE 64 #define ETHR_CACHE_LINE_MASK (ETHR_CACHE_LINE_SIZE - 1) #define ETHR_CACHE_LINE_ALIGN_SIZE(SZ) \ @@ -251,13 +247,90 @@ typedef ethr_sint64_t ethr_sint_t; typedef ethr_uint64_t ethr_uint_t; #endif -/* __builtin_expect() is needed by both native atomics code - * and the fallback code */ -#if !defined(__GNUC__) || (__GNUC__ < 2) || (__GNUC__ == 2 && __GNUC_MINOR__ < 96) +#if defined(ETHR_SIZEOF___INT128_T) && ETHR_SIZEOF___INT128_T == 16 +#define ETHR_HAVE_INT128_T +typedef __int128_t ethr_sint128_t; +typedef __uint128_t ethr_uint128_t; +#endif + +#define ETHR_FATAL_ERROR__(ERR) \ + ethr_fatal_error__(__FILE__, __LINE__, __func__, (ERR)) + +ETHR_PROTO_NORETURN__ ethr_fatal_error__(const char *file, + int line, + const char *func, + int err); + +#if !defined(__GNUC__) +# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) 0 +#elif !defined(__GNUC_MINOR__) +# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \ + ((__GNUC__ << 24) >= (((MAJ) << 24) | ((MIN) << 12) | (PL))) +#elif !defined(__GNUC_PATCHLEVEL__) +# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \ + (((__GNUC__ << 24) | (__GNUC_MINOR__ << 12)) >= (((MAJ) << 24) | ((MIN) << 12) | (PL))) +#else +# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \ + (((__GNUC__ << 24) | (__GNUC_MINOR__ << 12) | __GNUC_PATCHLEVEL__) >= (((MAJ) << 24) | ((MIN) << 12) | (PL))) +#endif + +#if !ETHR_AT_LEAST_GCC_VSN__(2, 96, 0) #define __builtin_expect(X, Y) (X) #endif -/* For CPU-optimised atomics, spinlocks, and rwlocks. */ +#if ETHR_AT_LEAST_GCC_VSN__(3, 1, 1) +# define ETHR_CHOOSE_EXPR __builtin_choose_expr +#else +# define ETHR_CHOOSE_EXPR(B, E1, E2) ((B) ? (E1) : (E2)) +#endif + +#if ((defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))) \ + || (defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_AMD64)))) +# define ETHR_X86_RUNTIME_CONF__ + +# define ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__ \ + (__builtin_expect(ethr_runtime__.conf.have_dw_cmpxchg != 0, 1)) +# define ETHR_X86_RUNTIME_CONF_HAVE_NO_DW_CMPXCHG__ \ + (__builtin_expect(ethr_runtime__.conf.have_dw_cmpxchg == 0, 0)) +# define ETHR_X86_RUNTIME_CONF_HAVE_SSE2__ \ + (__builtin_expect(ethr_runtime__.conf.have_sse2 != 0, 1)) +# define ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__ \ + (__builtin_expect(ethr_runtime__.conf.have_sse2 == 0, 0)) +#endif + +#if (defined(__GNUC__) \ + && !defined(ETHR_PPC_HAVE_LWSYNC) \ + && !defined(ETHR_PPC_HAVE_NO_LWSYNC) \ + && (defined(__powerpc__) || defined(__ppc__) || defined(__powerpc64__))) +# define ETHR_PPC_RUNTIME_CONF__ + +# define ETHR_PPC_RUNTIME_CONF_HAVE_LWSYNC__ \ + (__builtin_expect(ethr_runtime__.conf.have_lwsync != 0, 1)) +# define ETHR_PPC_RUNTIME_CONF_HAVE_NO_LWSYNC__ \ + (__builtin_expect(ethr_runtime__.conf.have_lwsync == 0, 0)) +#endif + +typedef struct { +#if defined(ETHR_X86_RUNTIME_CONF__) + int have_dw_cmpxchg; + int have_sse2; +#endif +#if defined(ETHR_PPC_RUNTIME_CONF__) + int have_lwsync; +#endif + int dummy; +} ethr_runtime_conf_t; + + +typedef union { + ethr_runtime_conf_t conf; + char pad__[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_runtime_conf_t))+ETHR_CACHE_LINE_SIZE]; +} ethr_runtime_t; + + +extern ethr_runtime_t ethr_runtime__; + +/* For native CPU-optimised atomics, spinlocks, and rwlocks. */ #if !defined(ETHR_DISABLE_NATIVE_IMPLS) # if defined(__GNUC__) # if defined(ETHR_PREFER_GCC_NATIVE_IMPLS) @@ -265,7 +338,7 @@ typedef ethr_uint64_t ethr_uint_t; # elif defined(ETHR_PREFER_LIBATOMIC_OPS_NATIVE_IMPLS) # include "libatomic_ops/ethread.h" # endif -# ifndef ETHR_HAVE_NATIVE_ATOMICS +# if !defined(ETHR_HAVE_NATIVE_ATOMIC32) && !defined(ETHR_HAVE_NATIVE_ATOMIC64) # if ETHR_SIZEOF_PTR == 4 # if defined(__i386__) # include "i386/ethread.h" @@ -283,8 +356,10 @@ typedef ethr_uint64_t ethr_uint_t; # include "sparc64/ethread.h" # endif # endif +#if 0 # include "gcc/ethread.h" # include "libatomic_ops/ethread.h" +#endif # endif # elif defined(ETHR_HAVE_LIBATOMIC_OPS) # include "libatomic_ops/ethread.h" @@ -293,10 +368,9 @@ typedef ethr_uint64_t ethr_uint_t; # endif #endif /* !ETHR_DISABLE_NATIVE_IMPLS */ +#include "ethr_atomics.h" /* The atomics API */ + #if defined(__GNUC__) -# ifndef ETHR_COMPILER_BARRIER -# define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory") -# endif # ifndef ETHR_SPIN_BODY # if defined(__i386__) || defined(__x86_64__) # define ETHR_SPIN_BODY __asm__ __volatile__("rep;nop" : : : "memory") @@ -309,11 +383,6 @@ typedef ethr_uint64_t ethr_uint_t; # endif # endif #elif defined(ETHR_WIN32_THREADS) -# ifndef ETHR_COMPILER_BARRIER -# include <intrin.h> -# pragma intrinsic(_ReadWriteBarrier) -# define ETHR_COMPILER_BARRIER _ReadWriteBarrier() -# endif # ifndef ETHR_SPIN_BODY # define ETHR_SPIN_BODY do {YieldProcessor();ETHR_COMPILER_BARRIER;} while(0) # endif @@ -321,43 +390,6 @@ typedef ethr_uint64_t ethr_uint_t; #define ETHR_YIELD_AFTER_BUSY_LOOPS 50 -#ifndef ETHR_HAVE_NATIVE_ATOMICS -/* - * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only, - * i.e. when our lock based atomic fallback is used, a noop is sufficient. - */ -#define ETHR_MEMORY_BARRIER do { } while (0) -#define ETHR_WRITE_MEMORY_BARRIER do { } while (0) -#define ETHR_READ_MEMORY_BARRIER do { } while (0) -#define ETHR_READ_DEPEND_MEMORY_BARRIER do { } while (0) -#endif - -#ifndef ETHR_WRITE_MEMORY_BARRIER -# define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMORY_BARRIER -# define ETHR_WRITE_MEMORY_BARRIER_IS_FULL -#endif -#ifndef ETHR_READ_MEMORY_BARRIER -# define ETHR_READ_MEMORY_BARRIER ETHR_MEMORY_BARRIER -# define ETHR_READ_MEMORY_BARRIER_IS_FULL -#endif -#ifndef ETHR_READ_DEPEND_MEMORY_BARRIER -# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER -# define ETHR_READ_DEPEND_MEMORY_BARRIER_IS_COMPILER_BARRIER -#endif - -#define ETHR_FATAL_ERROR__(ERR) \ - ethr_fatal_error__(__FILE__, __LINE__, __func__, (ERR)) - -ETHR_PROTO_NORETURN__ ethr_fatal_error__(const char *file, - int line, - const char *func, - int err); - -void ethr_compiler_barrier_fallback(void); -#ifndef ETHR_COMPILER_BARRIER -# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback() -#endif - #ifndef ETHR_SPIN_BODY # define ETHR_SPIN_BODY ETHR_COMPILER_BARRIER #endif @@ -460,8 +492,6 @@ void ethr_compiler_barrier(void); #if defined(ETHR_HAVE_NATIVE_SPINLOCKS) typedef ethr_native_spinlock_t ethr_spinlock_t; -#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS) -typedef ethr_opt_spinlock_t ethr_spinlock_t; #elif defined(__WIN32__) typedef CRITICAL_SECTION ethr_spinlock_t; #else @@ -483,8 +513,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spinlock_init)(ethr_spinlock_t *lock) #ifdef ETHR_HAVE_NATIVE_SPINLOCKS ethr_native_spinlock_init(lock); return 0; -#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS) - return ethr_opt_spinlock_init((ethr_opt_spinlock_t *) lock); #elif defined(__WIN32__) if (!InitializeCriticalSectionAndSpinCount((CRITICAL_SECTION *) lock, INT_MAX)) return ethr_win_get_errno__(); @@ -498,9 +526,7 @@ static ETHR_INLINE int ETHR_INLINE_FUNC_NAME_(ethr_spinlock_destroy)(ethr_spinlock_t *lock) { #ifdef ETHR_HAVE_NATIVE_SPINLOCKS - return 0; -#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS) - return ethr_opt_spinlock_destroy((ethr_opt_spinlock_t *) lock); + return ethr_native_spinlock_destroy(lock); #elif defined(__WIN32__) DeleteCriticalSection((CRITICAL_SECTION *) lock); return 0; @@ -514,10 +540,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_unlock)(ethr_spinlock_t *lock) { #ifdef ETHR_HAVE_NATIVE_SPINLOCKS ethr_native_spin_unlock(lock); -#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS) - int err = ethr_opt_spin_unlock((ethr_opt_spinlock_t *) lock); - if (err) - ETHR_FATAL_ERROR__(err); #elif defined(__WIN32__) LeaveCriticalSection((CRITICAL_SECTION *) lock); #else @@ -532,10 +554,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_lock)(ethr_spinlock_t *lock) { #ifdef ETHR_HAVE_NATIVE_SPINLOCKS ethr_native_spin_lock(lock); -#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS) - int err = ethr_opt_spin_lock((ethr_opt_spinlock_t *) lock); - if (err) - ETHR_FATAL_ERROR__(err); #elif defined(__WIN32__) EnterCriticalSection((CRITICAL_SECTION *) lock); #else @@ -547,8 +565,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_lock)(ethr_spinlock_t *lock) #endif /* ETHR_TRY_INLINE_FUNCS */ -#include "ethr_atomics.h" - typedef struct ethr_ts_event_ ethr_ts_event; /* Needed by ethr_mutex.h */ #if defined(ETHR_WIN32_THREADS) diff --git a/erts/include/internal/ethread_header_config.h.in b/erts/include/internal/ethread_header_config.h.in index f394d790d2..dd3599f86d 100644 --- a/erts/include/internal/ethread_header_config.h.in +++ b/erts/include/internal/ethread_header_config.h.in @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2004-2010. All Rights Reserved. + * Copyright Ericsson AB 2004-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -32,6 +32,9 @@ /* Define to the size of __int64 */ #undef ETHR_SIZEOF___INT64 +/* Define to the size of __int128_t */ +#undef ETHR_SIZEOF___INT128_T + /* Define if bigendian */ #undef ETHR_BIGENDIAN @@ -69,8 +72,44 @@ /* Define if you have a linux futex implementation. */ #undef ETHR_HAVE_LINUX_FUTEX -/* Define if you have gcc atomic operations */ -#undef ETHR_HAVE_GCC_ATOMIC_OPS +/* Define if x86/x86_64 out of order instructions should be synchronized */ +#undef ETHR_X86_OUT_OF_ORDER + +/* Define if only run in Sparc TSO mode */ +#undef ETHR_SPARC_TSO + +/* Define if only run in Sparc PSO, or TSO mode */ +#undef ETHR_SPARC_PSO + +/* Define if run in Sparc RMO, PSO, or TSO mode */ +#undef ETHR_SPARC_RMO + +/* Define if you have __sync_add_and_fetch() for 32-bit integers */ +#undef ETHR_HAVE___SYNC_ADD_AND_FETCH32 + +/* Define if you have __sync_add_and_fetch() for 64-bit integers */ +#undef ETHR_HAVE___SYNC_ADD_AND_FETCH64 + +/* Define if you have __sync_fetch_and_and() for 32-bit integers */ +#undef ETHR_HAVE___SYNC_FETCH_AND_AND32 + +/* Define if you have __sync_fetch_and_and() for 64-bit integers */ +#undef ETHR_HAVE___SYNC_FETCH_AND_AND64 + +/* Define if you have __sync_fetch_and_or() for 32-bit integers */ +#undef ETHR_HAVE___SYNC_FETCH_AND_OR32 + +/* Define if you have __sync_fetch_and_or() for 64-bit integers */ +#undef ETHR_HAVE___SYNC_FETCH_AND_OR64 + +/* Define if you have __sync_val_compare_and_swap() for 32-bit integers */ +#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32 + +/* Define if you have __sync_val_compare_and_swap() for 64-bit integers */ +#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64 + +/* Define if you have __sync_val_compare_and_swap() for 128-bit integers */ +#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128 /* Define if you prefer gcc native ethread implementations */ #undef ETHR_PREFER_GCC_NATIVE_IMPLS @@ -90,8 +129,14 @@ /* Define if sched_yield() returns an int. */ #undef ETHR_SCHED_YIELD_RET_INT -/* Define if you want compatibilty with x86 processors before pentium4. */ -#undef ETHR_PRE_PENTIUM4_COMPAT +/* Define if you use a gcc that supports -msse2 and understand sse2 specific asm statements */ +#undef ETHR_GCC_HAVE_SSE2_ASM_SUPPORT + +/* Define if you use a gcc that supports the double word cmpxchg instruction */ +#undef ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT + +/* Define if you get a register shortage with cmpxchg8b and position independent code */ +#undef ETHR_CMPXCHG8B_REGISTER_SHORTAGE /* Define if you have the pthread_rwlockattr_setkind_np() function. */ #undef ETHR_HAVE_PTHREAD_RWLOCKATTR_SETKIND_NP @@ -115,23 +160,74 @@ /* Define to the size of AO_t if libatomic_ops is used */ #undef ETHR_SIZEOF_AO_T +/* Define if you have _InterlockedAnd() */ +#undef ETHR_HAVE__INTERLOCKEDAND + +/* Define if you have _InterlockedAnd64() */ +#undef ETHR_HAVE__INTERLOCKEDAND64 + +/* Define if you have _InterlockedCompareExchange() */ +#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE + /* Define if you have _InterlockedCompareExchange64() */ #undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64 +/* Define if you have _InterlockedCompareExchange64_acq() */ +#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ + +/* Define if you have _InterlockedCompareExchange64_rel() */ +#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL + +/* Define if you have _InterlockedCompareExchange_acq() */ +#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ + +/* Define if you have _InterlockedCompareExchange_rel() */ +#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL + +/* Define if you have _InterlockedDecrement() */ +#undef ETHR_HAVE__INTERLOCKEDDECREMENT + /* Define if you have _InterlockedDecrement64() */ #undef ETHR_HAVE__INTERLOCKEDDECREMENT64 -/* Define if you have _InterlockedIncrement64() */ -#undef ETHR_HAVE__INTERLOCKEDINCREMENT64 +/* Define if you have _InterlockedDecrement64_rel() */ +#undef ETHR_HAVE__INTERLOCKEDDECREMENT64_REL -/* Define if you have _InterlockedExchangeAdd64() */ -#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64 +/* Define if you have _InterlockedDecrement_rel() */ +#undef ETHR_HAVE__INTERLOCKEDDECREMENT_REL + +/* Define if you have _InterlockedExchange() */ +#undef ETHR_HAVE__INTERLOCKEDEXCHANGE /* Define if you have _InterlockedExchange64() */ #undef ETHR_HAVE__INTERLOCKEDEXCHANGE64 -/* Define if you have _InterlockedAnd64() */ -#undef ETHR_HAVE__INTERLOCKEDAND64 +/* Define if you have _InterlockedExchangeAdd() */ +#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD + +/* Define if you have _InterlockedExchangeAdd64() */ +#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64 + +/* Define if you have _InterlockedExchangeAdd64_acq() */ +#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ + +/* Define if you have _InterlockedExchangeAdd_acq() */ +#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ + +/* Define if you have _InterlockedIncrement() */ +#undef ETHR_HAVE__INTERLOCKEDINCREMENT + +/* Define if you have _InterlockedIncrement64() */ +#undef ETHR_HAVE__INTERLOCKEDINCREMENT64 + +/* Define if you have _InterlockedIncrement64_acq() */ +#undef ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ + +/* Define if you have _InterlockedIncrement_acq() */ +#undef ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ + +/* Define if you have _InterlockedOr() */ +#undef ETHR_HAVE__INTERLOCKEDOR /* Define if you have _InterlockedOr64() */ #undef ETHR_HAVE__INTERLOCKEDOR64 diff --git a/erts/include/internal/gcc/ethr_atomic.h b/erts/include/internal/gcc/ethr_atomic.h index 16935084b1..f598f8537b 100644 --- a/erts/include/internal/gcc/ethr_atomic.h +++ b/erts/include/internal/gcc/ethr_atomic.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -25,11 +25,15 @@ #undef ETHR_INCLUDE_ATOMIC_IMPL__ #if !defined(ETHR_GCC_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__) #define ETHR_GCC_ATOMIC32_H__ -#define ETHR_INCLUDE_ATOMIC_IMPL__ 4 +#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32) +# define ETHR_INCLUDE_ATOMIC_IMPL__ 4 +#endif #undef ETHR_ATOMIC_WANT_32BIT_IMPL__ #elif !defined(ETHR_GCC_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__) #define ETHR_GCC_ATOMIC64_H__ -#define ETHR_INCLUDE_ATOMIC_IMPL__ 8 +#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64) +# define ETHR_INCLUDE_ATOMIC_IMPL__ 8 +#endif #undef ETHR_ATOMIC_WANT_64BIT_IMPL__ #endif @@ -45,58 +49,38 @@ # define ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ 1 #endif -#if defined(__x86_64__) || (defined(__i386__) \ - && !defined(ETHR_PRE_PENTIUM4_COMPAT)) -# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 1 -#else -# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 0 -#endif - -/* - * According to the documentation this is what we want: - * #define ETHR_MEMORY_BARRIER __sync_synchronize() - * However, __sync_synchronize() is known to erroneously be - * a noop on at least some platforms with some gcc versions. - * This has suposedly been fixed in some gcc version, but we - * don't know from which version. Therefore, we only use - * it when it has been verified to work. Otherwise - * we use a workaround. - */ -#if defined(__mips__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 2)) -/* __sync_synchronize() has been verified to work here */ -#define ETHR_MEMORY_BARRIER __sync_synchronize() -#define ETHR_READ_DEPEND_MEMORY_BARRIER __sync_synchronize() -#elif defined(__x86_64__) || (defined(__i386__) \ - && !defined(ETHR_PRE_PENTIUM4_COMPAT)) -/* Use fence instructions directly instead of workaround */ -#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory") -#define ETHR_WRITE_MEMORY_BARRIER __asm__ __volatile__("sfence" : : : "memory") -#define ETHR_READ_MEMORY_BARRIER __asm__ __volatile__("lfence" : : : "memory") -#define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("" : : : "memory") -#else -/* Workaround */ -#define ETHR_MEMORY_BARRIER \ -do { \ - volatile ethr_sint32_t x___ = 0; \ - (void) __sync_val_compare_and_swap(&x___, (ethr_sint32_t) 0, (ethr_sint32_t) 1); \ -} while (0) -#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_MEMORY_BARRIER -#endif - -#define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory") - #endif /* ETHR_GCC_ATOMIC_COMMON__ */ #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 #define ETHR_HAVE_NATIVE_ATOMIC32 1 +#define ETHR_NATIVE_ATOMIC32_IMPL "gcc" #define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X #define ETHR_ATMC_T__ ethr_native_atomic32_t #define ETHR_AINT_T__ ethr_sint32_t +#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH32) +# define ETHR_HAVE___SYNC_ADD_AND_FETCH +#endif +#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND32) +# define ETHR_HAVE___SYNC_FETCH_AND_AND +#endif +#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR32) +# define ETHR_HAVE___SYNC_FETCH_AND_OR +#endif #elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8 #define ETHR_HAVE_NATIVE_ATOMIC64 1 +#define ETHR_NATIVE_ATOMIC64_IMPL "gcc" #define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X #define ETHR_ATMC_T__ ethr_native_atomic64_t #define ETHR_AINT_T__ ethr_sint64_t +#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH64) +# define ETHR_HAVE___SYNC_ADD_AND_FETCH +#endif +#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND64) +# define ETHR_HAVE___SYNC_FETCH_AND_AND +#endif +#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR64) +# define ETHR_HAVE___SYNC_FETCH_AND_OR +#endif #else #error "Unsupported integer size" #endif @@ -108,183 +92,120 @@ typedef struct { #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ * ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var) { return (ETHR_AINT_T__ *) &var->counter; } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) -{ #if ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ - var->counter = value; + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1 #else - /* - * Unfortunately no __sync_store() or similar exist in the gcc atomic - * op interface. We therefore have to simulate it this way... - */ - ETHR_AINT_T__ act = 0, exp; - do { - exp = act; - act = __sync_val_compare_and_swap(&var->counter, exp, value); - } while (act != exp); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1 #endif -} static ETHR_INLINE void -ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) +ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) { - ETHR_NATMC_FUNC__(set)(var, value); + var->counter = value; } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) -{ +#endif /* ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ */ + #if ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ - return var->counter; + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1 #else - /* - * Unfortunately no __sync_fetch() or similar exist in the gcc atomic - * op interface. We therefore have to simulate it this way... - */ - return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 0); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1 #endif -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) -{ - (void) __sync_add_and_fetch(&var->counter, incr); -} static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) +ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) { - return __sync_add_and_fetch(&var->counter, incr); + return var->counter; } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var) -{ - (void) __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 1); -} +#endif /* ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ */ -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var) -{ - (void) __sync_sub_and_fetch(&var->counter, (ETHR_AINT_T__) 1); -} +#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH) -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var) -{ - return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 1); -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var) +ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) { - return __sync_sub_and_fetch(&var->counter, (ETHR_AINT_T__) 1); + return __sync_add_and_fetch(&var->counter, incr); } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) -{ - return __sync_fetch_and_and(&var->counter, mask); -} +#endif -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) -{ - return (ETHR_AINT_T__) __sync_fetch_and_or(&var->counter, mask); -} +#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND) -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ old) -{ - return __sync_val_compare_and_swap(&var->counter, old, new); -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new) +ETHR_NATMC_FUNC__(and_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) { - ETHR_AINT_T__ exp, act = 0; - do { - exp = act; - act = __sync_val_compare_and_swap(&var->counter, exp, new); - } while (act != exp); - return act; + return __sync_fetch_and_and(&var->counter, mask); } -/* - * Atomic ops with at least specified barriers. - */ - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var) -{ -#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ - ETHR_AINT_T__ val = var->counter; - ETHR_COMPILER_BARRIER; - return val; -#else - return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 0); #endif -} -static ETHR_INLINE void -ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ -#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ - ETHR_COMPILER_BARRIER; - var->counter = i; +#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB 1 #else - (void) ETHR_NATMC_FUNC__(xchg)(var, i); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB 1 #endif -} static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var) -{ - return ETHR_NATMC_FUNC__(inc_return)(var); -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var) +ETHR_NATMC_FUNC__(or_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) { - ETHR_NATMC_FUNC__(dec)(var); + return (ETHR_AINT_T__) __sync_fetch_and_or(&var->counter, mask); } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var) -{ - return ETHR_NATMC_FUNC__(dec_return)(var); -} +#endif -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ old) -{ - return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old); -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ old) +ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var, + ETHR_AINT_T__ new, + ETHR_AINT_T__ old) { - return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old); + return __sync_val_compare_and_swap(&var->counter, old, new); } -#endif +#endif /* ETHR_TRY_INLINE_FUNCS */ #undef ETHR_NATMC_FUNC__ #undef ETHR_ATMC_T__ #undef ETHR_AINT_T__ #undef ETHR_AINT_SUFFIX__ +#undef ETHR_HAVE___SYNC_ADD_AND_FETCH +#undef ETHR_HAVE___SYNC_FETCH_AND_AND +#undef ETHR_HAVE___SYNC_FETCH_AND_OR #endif diff --git a/erts/include/internal/gcc/ethr_dw_atomic.h b/erts/include/internal/gcc/ethr_dw_atomic.h new file mode 100644 index 0000000000..6736f9c547 --- /dev/null +++ b/erts/include/internal/gcc/ethr_dw_atomic.h @@ -0,0 +1,115 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Native double word atomics using gcc's builtins + * Author: Rickard Green + */ + +#undef ETHR_INCLUDE_DW_ATOMIC_IMPL__ +#ifndef ETHR_GCC_DW_ATOMIC_H__ +# define ETHR_GCC_DW_ATOMIC_H__ +# if ((ETHR_SIZEOF_PTR == 4 \ + && defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64)) \ + || (ETHR_SIZEOF_PTR == 8 \ + && defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128) \ + && defined(ETHR_HAVE_INT128_T))) +# define ETHR_INCLUDE_DW_ATOMIC_IMPL__ +# endif +#endif + +#ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__ +# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC +# define ETHR_NATIVE_DW_ATOMIC_IMPL "gcc" + +# if defined(__i386__) || defined(__x86_64__) +/* + * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used + * at runtime in order to determine if native or fallback implementation + * should be used. + */ +# define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \ + ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__ +# endif + +# if ETHR_SIZEOF_PTR == 4 +# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7 +# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t +# elif ETHR_SIZEOF_PTR == 8 +# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf +# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint128_t +# endif + +typedef volatile ETHR_NATIVE_SU_DW_SINT_T * ethr_native_dw_ptr_t; + +/* + * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned + * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is + * not common, and at least some glibc malloc implementations + * only 4 byte align in 32-bit mode. + * + * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte + * aligned memory in 32-bit mode. A malloc implementation that does + * not adhere to these alignment requirements is seriously broken, + * and we wont bother trying to work around it. + * + * Since memory alignment may be off by one word we need to align at + * runtime. We, therefore, need an extra word allocated. + */ +#define ETHR_DW_NATMC_MEM__(VAR) \ + (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__]) +typedef union { + volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint; + volatile ethr_sint_t sint[3]; + volatile char c[ETHR_SIZEOF_PTR*3]; +} ethr_native_dw_atomic_t; + +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) + +# ifdef ETHR_DEBUG +# define ETHR_DW_DBG_ALIGNED__(PTR) \ + ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0); +# else +# define ETHR_DW_DBG_ALIGNED__(PTR) +# endif + +#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR +static ETHR_INLINE ethr_sint_t * +ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var) +{ + return (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var); +} + + +#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB + +static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T +ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var, + ETHR_NATIVE_SU_DW_SINT_T new, + ETHR_NATIVE_SU_DW_SINT_T old) +{ + ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var); + ETHR_DW_DBG_ALIGNED__(p); + return __sync_val_compare_and_swap(p, old, new); +} + +#endif /* ETHR_TRY_INLINE_FUNCS */ + +#endif /* ETHR_GCC_DW_ATOMIC_H__ */ + diff --git a/erts/include/internal/gcc/ethr_membar.h b/erts/include/internal/gcc/ethr_membar.h new file mode 100644 index 0000000000..7d428fc68e --- /dev/null +++ b/erts/include/internal/gcc/ethr_membar.h @@ -0,0 +1,73 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Memory barriers when using gcc's builtins + * Author: Rickard Green + */ + +#ifndef ETHR_GCC_MEMBAR_H__ +#define ETHR_GCC_MEMBAR_H__ + +#define ETHR_LoadLoad (1 << 0) +#define ETHR_LoadStore (1 << 1) +#define ETHR_StoreLoad (1 << 2) +#define ETHR_StoreStore (1 << 3) + +/* + * According to the documentation __sync_synchronize() will + * issue a full memory barrier. However, __sync_synchronize() + * is known to erroneously be a noop on at least some + * platforms with some gcc versions. This has suposedly been + * fixed in some gcc version, but we don't know from which + * version. Therefore, we only use it when it has been + * verified to work. Otherwise we use the workaround + * below. + */ + +#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32) +# define ETHR_MB_T__ ethr_sint32_t +#elif defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64) +# define ETHR_MB_T__ ethr_sint64_t +#else +# error "No __sync_val_compare_and_swap" +#endif +#define ETHR_SYNC_SYNCHRONIZE_WORKAROUND__ \ +do { \ + volatile ETHR_MB_T__ x___ = 0; \ + (void) __sync_val_compare_and_swap(&x___, (ETHR_MB_T__) 0, (ETHR_MB_T__) 1); \ +} while (0) + +#define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory") + +#if defined(__mips__) && ETHR_AT_LEAST_GCC_VSN__(4, 2, 0) +# define ETHR_MEMBAR(B) __sync_synchronize() +# define ETHR_READ_DEPEND_MEMORY_BARRIER __sync_synchronize() +#elif ((defined(__powerpc__) || defined(__ppc__)) \ + && ETHR_AT_LEAST_GCC_VSN__(4, 1, 2)) +# define ETHR_MEMBAR(B) __sync_synchronize() +#else /* Use workaround */ +# define ETHR_MEMBAR(B) \ + ETHR_SYNC_SYNCHRONIZE_WORKAROUND__ +# define ETHR_READ_DEPEND_MEMORY_BARRIER \ + ETHR_SYNC_SYNCHRONIZE_WORKAROUND__ +#endif + + +#endif /* ETHR_GCC_MEMBAR_H__ */ diff --git a/erts/include/internal/gcc/ethread.h b/erts/include/internal/gcc/ethread.h index 392a1aa2b2..fcfdc39441 100644 --- a/erts/include/internal/gcc/ethread.h +++ b/erts/include/internal/gcc/ethread.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -25,16 +25,24 @@ #ifndef ETHREAD_GCC_H__ #define ETHREAD_GCC_H__ -#if !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_GCC_ATOMIC_OPS) -#define ETHR_HAVE_NATIVE_ATOMICS 1 +#ifndef ETHR_MEMBAR +# include "ethr_membar.h" +#endif + +#if !defined(ETHR_HAVE_NATIVE_ATOMIC32) +# define ETHR_ATOMIC_WANT_32BIT_IMPL__ +# include "ethr_atomic.h" +#endif -#define ETHR_ATOMIC_WANT_32BIT_IMPL__ -#include "ethr_atomic.h" -#if ETHR_SIZEOF_PTR == 8 +#if ETHR_SIZEOF_PTR == 8 && !defined(ETHR_HAVE_NATIVE_ATOMIC64) # define ETHR_ATOMIC_WANT_64BIT_IMPL__ # include "ethr_atomic.h" #endif +#if (!defined(ETHR_HAVE_NATIVE_DW_ATOMIC) \ + && !(ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)) \ + && !(ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC128))) +# include "ethr_dw_atomic.h" #endif #endif diff --git a/erts/include/internal/i386/atomic.h b/erts/include/internal/i386/atomic.h index 4e402f261a..fc1b619935 100644 --- a/erts/include/internal/i386/atomic.h +++ b/erts/include/internal/i386/atomic.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -25,53 +25,42 @@ */ #undef ETHR_INCLUDE_ATOMIC_IMPL__ -#if !defined(ETHR_X86_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__) -#define ETHR_X86_ATOMIC32_H__ -#define ETHR_INCLUDE_ATOMIC_IMPL__ 4 -#undef ETHR_ATOMIC_WANT_32BIT_IMPL__ -#elif !defined(ETHR_X86_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__) -#define ETHR_X86_ATOMIC64_H__ -#define ETHR_INCLUDE_ATOMIC_IMPL__ 8 -#undef ETHR_ATOMIC_WANT_64BIT_IMPL__ +#if !defined(ETHR_X86_ATOMIC32_H__) \ + && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__) +# define ETHR_X86_ATOMIC32_H__ +# define ETHR_INCLUDE_ATOMIC_IMPL__ 4 +# undef ETHR_ATOMIC_WANT_32BIT_IMPL__ +#elif !defined(ETHR_X86_ATOMIC64_H__) \ + && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__) +# define ETHR_X86_ATOMIC64_H__ +# define ETHR_INCLUDE_ATOMIC_IMPL__ 8 +# undef ETHR_ATOMIC_WANT_64BIT_IMPL__ #endif #ifdef ETHR_INCLUDE_ATOMIC_IMPL__ -#ifndef ETHR_X86_ATOMIC_COMMON__ -#define ETHR_X86_ATOMIC_COMMON__ - -#define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1 - -#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT) -#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory") -#define ETHR_WRITE_MEMORY_BARRIER __asm__ __volatile__("sfence" : : : "memory") -#define ETHR_READ_MEMORY_BARRIER __asm__ __volatile__("lfence" : : : "memory") -#define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("" : : : "memory") -#else -#define ETHR_MEMORY_BARRIER \ -do { \ - volatile ethr_sint32_t x___ = 0; \ - __asm__ __volatile__("lock; incl %0" : "=m"(x___) : "m"(x___) : "memory"); \ -} while (0) -#endif - -#endif /* ETHR_X86_ATOMIC_COMMON__ */ - -#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 -#define ETHR_HAVE_NATIVE_ATOMIC32 1 -#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X -#define ETHR_ATMC_T__ ethr_native_atomic32_t -#define ETHR_AINT_T__ ethr_sint32_t -#define ETHR_AINT_SUFFIX__ "l" -#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8 -#define ETHR_HAVE_NATIVE_ATOMIC64 1 -#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X -#define ETHR_ATMC_T__ ethr_native_atomic64_t -#define ETHR_AINT_T__ ethr_sint64_t -#define ETHR_AINT_SUFFIX__ "q" -#else -#error "Unsupported integer size" -#endif +# ifndef ETHR_X86_ATOMIC_COMMON__ +# define ETHR_X86_ATOMIC_COMMON__ +# define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1 +# endif /* ETHR_X86_ATOMIC_COMMON__ */ + +# if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_NATIVE_ATOMIC32 1 +# define ETHR_NATIVE_ATOMIC32_IMPL "ethread" +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X +# define ETHR_ATMC_T__ ethr_native_atomic32_t +# define ETHR_AINT_T__ ethr_sint32_t +# define ETHR_AINT_SUFFIX__ "l" +# elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8 +# define ETHR_HAVE_NATIVE_ATOMIC64 1 +# define ETHR_NATIVE_ATOMIC64_IMPL "ethread" +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_ATMC_T__ ethr_native_atomic64_t +# define ETHR_AINT_T__ ethr_sint64_t +# define ETHR_AINT_SUFFIX__ "q" +# else +# error "Unsupported integer size" +# endif /* An atomic is an aligned ETHR_AINT_T__ accessed via locked operations. */ @@ -81,87 +70,28 @@ typedef struct { #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ * ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var) { return (ETHR_AINT_T__ *) &var->counter; } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ - var->counter = i; -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ - var->counter = i; -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) -{ - return var->counter; -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) -{ - __asm__ __volatile__( - "lock; add" ETHR_AINT_SUFFIX__ " %1, %0" - : "=m"(var->counter) - : "ir"(incr), "m"(var->counter)); -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var) -{ - __asm__ __volatile__( - "lock; inc" ETHR_AINT_SUFFIX__ " %0" - : "=m"(var->counter) - : "m"(var->counter)); -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var) -{ - __asm__ __volatile__( - "lock; dec" ETHR_AINT_SUFFIX__ " %0" - : "=m"(var->counter) - : "m"(var->counter)); -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) -{ - ETHR_AINT_T__ tmp; - - tmp = incr; - __asm__ __volatile__( - "lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */ - : "=r"(tmp) - : "m"(var->counter), "0"(tmp)); - /* now tmp is the atomic's previous value */ - return tmp + incr; -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var) -{ - return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) 1); -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var) -{ - return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) -1); -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ old) +ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var, + ETHR_AINT_T__ new, + ETHR_AINT_T__ old) { __asm__ __volatile__( "lock; cmpxchg" ETHR_AINT_SUFFIX__ " %2, %3" @@ -171,110 +101,148 @@ ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, return old; } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) -{ - ETHR_AINT_T__ tmp, old; - - tmp = var->counter; - do { - old = tmp; - tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp & mask, tmp); - } while (__builtin_expect(tmp != old, 0)); - /* now tmp is the atomic's previous value */ - return tmp; -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) -{ - ETHR_AINT_T__ tmp, old; - - tmp = var->counter; - do { - old = tmp; - tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp | mask, tmp); - } while (__builtin_expect(tmp != old, 0)); - /* now tmp is the atomic's previous value */ - return tmp; -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val) +ETHR_NATMC_FUNC__(xchg_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val) { ETHR_AINT_T__ tmp = val; __asm__ __volatile__( "xchg" ETHR_AINT_SUFFIX__ " %0, %1" : "=r"(tmp) - : "m"(var->counter), "0"(tmp)); + : "m"(var->counter), "0"(tmp) + : "memory"); /* now tmp is the atomic's previous value */ return tmp; } -/* - * Atomic ops with at least specified barriers. - */ +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1 +#endif -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var) +static ETHR_INLINE void +ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) { - ETHR_AINT_T__ val; -#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT) - val = var->counter; + var->counter = i; +} + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1 #else - val = ETHR_NATMC_FUNC__(add_return)(var, 0); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1 #endif - __asm__ __volatile__("" : : : "memory"); - return val; -} static ETHR_INLINE void ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) { - __asm__ __volatile__("" : : : "memory"); -#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT) - var->counter = i; +#if defined(_M_IX86) + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + (void) ETHR_NATMC_FUNC__(xchg_mb)(var, i); + else +#endif /* _M_IX86 */ + { + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + var->counter = i; + } +} + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB 1 #else - (void) ETHR_NATMC_FUNC__(xchg)(var, i); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB 1 #endif + +static ETHR_INLINE void +ETHR_NATMC_FUNC__(set_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) +{ + (void) ETHR_NATMC_FUNC__(xchg_mb)(var, i); } +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var) +ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) { - ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var); - __asm__ __volatile__("" : : : "memory"); - return res; + return var->counter; } +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB 1 +#endif + static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var) +ETHR_NATMC_FUNC__(add_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) { - __asm__ __volatile__("" : : : "memory"); - ETHR_NATMC_FUNC__(dec)(var); -} + __asm__ __volatile__( + "lock; add" ETHR_AINT_SUFFIX__ " %1, %0" + : "=m"(var->counter) + : "ir"(incr), "m"(var->counter) + : "memory"); +} -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB 1 +#endif + +static ETHR_INLINE void +ETHR_NATMC_FUNC__(inc_mb)(ETHR_ATMC_T__ *var) { - __asm__ __volatile__("" : : : "memory"); - return ETHR_NATMC_FUNC__(dec_return)(var); + __asm__ __volatile__( + "lock; inc" ETHR_AINT_SUFFIX__ " %0" + : "=m"(var->counter) + : "m"(var->counter) + : "memory"); } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ old) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB 1 +#endif + +static ETHR_INLINE void +ETHR_NATMC_FUNC__(dec_mb)(ETHR_ATMC_T__ *var) { - return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old); + __asm__ __volatile__( + "lock; dec" ETHR_AINT_SUFFIX__ " %0" + : "=m"(var->counter) + : "m"(var->counter) + : "memory"); } +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ old) +ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) { - return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old); + ETHR_AINT_T__ tmp; + + tmp = incr; + __asm__ __volatile__( + "lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */ + : "=r"(tmp) + : "m"(var->counter), "0"(tmp) + : "memory"); + /* now tmp is the atomic's previous value */ + return tmp + incr; } #endif /* ETHR_TRY_INLINE_FUNCS */ diff --git a/erts/include/internal/i386/ethr_dw_atomic.h b/erts/include/internal/i386/ethr_dw_atomic.h new file mode 100644 index 0000000000..9fb89bbe43 --- /dev/null +++ b/erts/include/internal/i386/ethr_dw_atomic.h @@ -0,0 +1,278 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Native double word atomics for x86/x86_64 + * Author: Rickard Green + */ + +#ifndef ETHR_X86_DW_ATOMIC_H__ +#define ETHR_X86_DW_ATOMIC_H__ + +#ifdef ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT + +#define ETHR_HAVE_NATIVE_DW_ATOMIC +#define ETHR_NATIVE_DW_ATOMIC_IMPL "ethread" + +/* + * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used + * at runtime in order to determine if native or fallback implementation + * should be used. + */ +#define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \ + ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__ + +#if ETHR_SIZEOF_PTR == 4 +typedef volatile ethr_sint64_t * ethr_native_dw_ptr_t; +# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7 +# define ETHR_DW_CMPXCHG_SFX__ "8b" +# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t +#else +#ifdef ETHR_HAVE_INT128_T +# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint128_t +typedef volatile ethr_sint128_t * ethr_native_dw_ptr_t; +#else +typedef struct { + ethr_sint64_t sint64[2]; +} ethr_native_sint128_t__; +typedef volatile ethr_native_sint128_t__ * ethr_native_dw_ptr_t; +#endif +# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf +# define ETHR_DW_CMPXCHG_SFX__ "16b" +#endif + +/* + * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned + * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is + * not common, and at least some glibc malloc implementations + * only 4 byte align in 32-bit mode. + * + * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte + * aligned memory in 32-bit mode. A malloc implementation that does + * not adhere to these alignment requirements is seriously broken, + * and we wont bother trying to work around it. + * + * Since memory alignment may be off by one word we need to align at + * runtime. We, therefore, need an extra word allocated. + */ +#define ETHR_DW_NATMC_MEM__(VAR) \ + (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__]) +typedef union { +#ifdef ETHR_NATIVE_SU_DW_SINT_T + volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint; +#endif + volatile ethr_sint_t sint[3]; + volatile char c[ETHR_SIZEOF_PTR*3]; +} ethr_native_dw_atomic_t; + + +#if (defined(ETHR_TRY_INLINE_FUNCS) \ + || defined(ETHR_ATOMIC_IMPL__) \ + || defined(ETHR_X86_SSE2_ASM_C__)) \ + && ETHR_SIZEOF_PTR == 4 \ + && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT) +ethr_sint64_t +ethr_sse2_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var); +void +ethr_sse2_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var, + ethr_sint64_t val); +#endif + +#if (defined(ETHR_TRY_INLINE_FUNCS) \ + || defined(ETHR_ATOMIC_IMPL__) \ + || defined(ETHR_X86_SSE2_ASM_C__)) +# ifdef ETHR_DEBUG +# define ETHR_DW_DBG_ALIGNED__(PTR) \ + ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0); +# else +# define ETHR_DW_DBG_ALIGNED__(PTR) +# endif +#endif + +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) + +#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR +static ETHR_INLINE ethr_sint_t * +ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var) +{ + return (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var); +} + +#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__ +/* + * When position independent code is used in 32-bit mode, the EBX register + * is used for storage of global offset table address, and we may not + * use it as input or output in an asm. We need to save and restore the + * EBX register explicitly (for some reason gcc doesn't provide this + * service to us). + */ +# define ETHR_NO_CLOBBER_EBX__ 1 +#else +# define ETHR_NO_CLOBBER_EBX__ 0 +#endif + +#if ETHR_NO_CLOBBER_EBX__ && !defined(ETHR_CMPXCHG8B_REGISTER_SHORTAGE) +/* When no optimization is on, we'll run into a register shortage */ +# if defined(ETHR_DEBUG) || defined(DEBUG) || defined(VALGRIND) \ + || defined(GCOV) || defined(PURIFY) || defined(PURECOV) +# define ETHR_CMPXCHG8B_REGISTER_SHORTAGE 1 +# else +# define ETHR_CMPXCHG8B_REGISTER_SHORTAGE 0 +# endif +#endif + + +#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB + +static ETHR_INLINE int +ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var, + ethr_sint_t *new, + ethr_sint_t *xchg) +{ + ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var); + char xchgd; + + ETHR_DW_DBG_ALIGNED__(p); + + __asm__ __volatile__( +#if ETHR_NO_CLOBBER_EBX__ + "pushl %%ebx\n\t" +# if ETHR_CMPXCHG8B_REGISTER_SHORTAGE + "movl (%7), %%ebx\n\t" + "movl 4(%7), %%ecx\n\t" +# else + "movl %8, %%ebx\n\t" +# endif +#endif + "lock; cmpxchg" ETHR_DW_CMPXCHG_SFX__ " %0\n\t" + "setz %3\n\t" +#if ETHR_NO_CLOBBER_EBX__ + "popl %%ebx\n\t" +#endif + : "=m"(*p), "=d"(xchg[1]), "=a"(xchg[0]), "=c"(xchgd) + : "m"(*p), "1"(xchg[1]), "2"(xchg[0]), +#if ETHR_NO_CLOBBER_EBX__ +# if ETHR_CMPXCHG8B_REGISTER_SHORTAGE + "3"(new) +# else + "3"(new[1]), + "r"(new[0]) +# endif +#else + "3"(new[1]), + "b"(new[0]) +#endif + : "cc", "memory"); + + return (int) xchgd; +} + +#undef ETHR_NO_CLOBBER_EBX__ + +#if ETHR_SIZEOF_PTR == 4 && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT) + +typedef union { + ethr_sint64_t sint64; + ethr_sint_t sint[2]; +} ethr_dw_atomic_no_sse2_convert_t; + +#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ + +static ETHR_INLINE ethr_sint64_t +ethr_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var) +{ + if (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__) + return ethr_sse2_native_su_dw_atomic_read(var); + else { + ethr_sint_t new[2]; + ethr_dw_atomic_no_sse2_convert_t xchg; + new[0] = new[1] = xchg.sint[0] = xchg.sint[1] = 0x83838383; + (void) ethr_native_dw_atomic_cmpxchg_mb(var, new, xchg.sint); + return xchg.sint64; + } +} + +#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET + +static ETHR_INLINE void +ethr_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var, + ethr_sint64_t val) +{ + if (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__) + ethr_sse2_native_su_dw_atomic_set(var, val); + else { + ethr_sint_t xchg[2] = {0, 0}; + ethr_dw_atomic_no_sse2_convert_t new; + new.sint64 = val; + while (!ethr_native_dw_atomic_cmpxchg_mb(var, new.sint, xchg)); + } +} + +#endif /* ETHR_SIZEOF_PTR == 4 */ + +#endif /* ETHR_TRY_INLINE_FUNCS */ + +#if defined(ETHR_X86_SSE2_ASM_C__) \ + && ETHR_SIZEOF_PTR == 4 \ + && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT) + +/* + * 8-byte aligned loads and stores of 64-bit values are atomic from + * pentium and forward. An ordinary volatile load or store in 32-bit + * mode generates two 32-bit operations (at least with gcc-4.1.2 using + * -msse2). In order to guarantee one 64-bit load/store operation + * from/to memory we load/store via an xmm register using movq. + * + * Load/store can be achieved using cmpxchg8b, however, using movq is + * much faster. Unfortunately we cannot do the same thing in 64-bit + * mode; instead, we have to do loads and stores via cmpxchg16b. + * + * We do not inline these, but instead compile these into a separate + * object file using -msse2. This since we don't want to use -msse2 for + * the whole system. If we detect sse2 support (pentium4 and forward) + * at runtime, we use them; otherwise, we fall back to using cmpxchg8b + * for loads and stores. This way the binary can be moved between + * processors with and without sse2 support. + */ + +ethr_sint64_t +ethr_sse2_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var) +{ + ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var); + ethr_sint64_t val; + ETHR_DW_DBG_ALIGNED__(p); + __asm__ __volatile__("movq %1, %0\n\t" : "=x"(val) : "m"(*p) : "memory"); + return val; +} + +void +ethr_sse2_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var, + ethr_sint64_t val) +{ + ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var); + ETHR_DW_DBG_ALIGNED__(p); + __asm__ __volatile__("movq %1, %0\n\t" : "=m"(*p) : "x"(val) : "memory"); +} + +#endif /* ETHR_X86_SSE2_ASM_C__ */ + +#endif /* ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT */ + +#endif /* ETHR_X86_DW_ATOMIC_H__ */ + diff --git a/erts/include/internal/i386/ethr_membar.h b/erts/include/internal/i386/ethr_membar.h new file mode 100644 index 0000000000..92d9de7f3f --- /dev/null +++ b/erts/include/internal/i386/ethr_membar.h @@ -0,0 +1,114 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Memory barriers for x86/x86-64 + * Author: Rickard Green + */ + +#ifndef ETHR_X86_MEMBAR_H__ +#define ETHR_X86_MEMBAR_H__ + +#define ETHR_LoadLoad (1 << 0) +#define ETHR_LoadStore (1 << 1) +#define ETHR_StoreLoad (1 << 2) +#define ETHR_StoreStore (1 << 3) + +#define ETHR_NO_SSE2_MEMORY_BARRIER__ \ +do { \ + volatile ethr_sint32_t x__ = 0; \ + __asm__ __volatile__ ("lock; orl $0x0, %0\n\t" \ + : "=m"(x__) \ + : "m"(x__) \ + : "memory"); \ +} while (0) + +static __inline__ void +ethr_cfence__(void) +{ + __asm__ __volatile__ ("" : : : "memory"); +} + +static __inline__ void +ethr_mfence__(void) +{ +#if ETHR_SIZEOF_PTR == 4 + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + ETHR_NO_SSE2_MEMORY_BARRIER__; + else +#endif + __asm__ __volatile__ ("mfence\n\t" : : : "memory"); +} + +static __inline__ void +ethr_sfence__(void) +{ +#if ETHR_SIZEOF_PTR == 4 + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + ETHR_NO_SSE2_MEMORY_BARRIER__; + else +#endif + __asm__ __volatile__ ("sfence\n\t" : : : "memory"); +} + +static __inline__ void +ethr_lfence__(void) +{ +#if ETHR_SIZEOF_PTR == 4 + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + ETHR_NO_SSE2_MEMORY_BARRIER__; + else +#endif + __asm__ __volatile__ ("lfence\n\t" : : : "memory"); +} + +#define ETHR_X86_OUT_OF_ORDER_MEMBAR(B) \ + ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \ + ethr_sfence__(), \ + ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \ + ethr_lfence__(), \ + ethr_mfence__())) + +#ifdef ETHR_X86_OUT_OF_ORDER + +#define ETHR_MEMBAR(B) \ + ETHR_X86_OUT_OF_ORDER_MEMBAR((B)) + +#else /* !ETHR_X86_OUT_OF_ORDER (the default) */ + +/* + * We assume that only stores before loads may be reordered. That is, + * we assume that *no* instructions like these are used: + * - CLFLUSH, + * - streaming stores executed with non-temporal move, + * - string operations, or + * - other instructions which aren't LoadLoad, LoadStore, and StoreStore + * ordered by themselves + * If such instructions are used, either insert memory barriers + * using ETHR_X86_OUT_OF_ORDER_MEMBAR() at appropriate places, or + * define ETHR_X86_OUT_OF_ORDER. For more info see Intel 64 and IA-32 + * Architectures Software Developer's Manual; Vol 3A; Chapter 8.2.2. + */ + +#define ETHR_MEMBAR(B) \ + ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_mfence__(), ethr_cfence__()) + +#endif /* !ETHR_X86_OUT_OF_ORDER */ + +#endif /* ETHR_X86_MEMBAR_H__ */ diff --git a/erts/include/internal/i386/ethread.h b/erts/include/internal/i386/ethread.h index b5a17caefb..80e4dc7b99 100644 --- a/erts/include/internal/i386/ethread.h +++ b/erts/include/internal/i386/ethread.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -24,17 +24,15 @@ #ifndef ETHREAD_I386_ETHREAD_H #define ETHREAD_I386_ETHREAD_H +#include "ethr_membar.h" #define ETHR_ATOMIC_WANT_32BIT_IMPL__ #include "atomic.h" #if ETHR_SIZEOF_PTR == 8 # define ETHR_ATOMIC_WANT_64BIT_IMPL__ # include "atomic.h" #endif +#include "ethr_dw_atomic.h" #include "spinlock.h" #include "rwlock.h" -#define ETHR_HAVE_NATIVE_ATOMICS 1 -#define ETHR_HAVE_NATIVE_SPINLOCKS 1 -#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1 - #endif /* ETHREAD_I386_ETHREAD_H */ diff --git a/erts/include/internal/i386/rwlock.h b/erts/include/internal/i386/rwlock.h index be47f459ce..1a8cd7da0c 100644 --- a/erts/include/internal/i386/rwlock.h +++ b/erts/include/internal/i386/rwlock.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -26,6 +26,9 @@ #ifndef ETHREAD_I386_RWLOCK_H #define ETHREAD_I386_RWLOCK_H +#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1 +#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread" + /* XXX: describe the algorithm */ typedef struct { volatile int lock; diff --git a/erts/include/internal/i386/spinlock.h b/erts/include/internal/i386/spinlock.h index 0325324895..a84fba91b1 100644 --- a/erts/include/internal/i386/spinlock.h +++ b/erts/include/internal/i386/spinlock.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -24,6 +24,9 @@ #ifndef ETHREAD_I386_SPINLOCK_H #define ETHREAD_I386_SPINLOCK_H +#define ETHR_HAVE_NATIVE_SPINLOCKS 1 +#define ETHR_NATIVE_SPINLOCK_IMPL "ethread" + /* A spinlock is the low byte of an aligned 32-bit integer. * A non-zero value means that the lock is locked. */ @@ -46,16 +49,20 @@ ethr_native_spin_unlock(ethr_native_spinlock_t *lock) * On i386 this needs to be a locked operation * to avoid Pentium Pro errata 66 and 92. */ -#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT) - __asm__ __volatile__("" : : : "memory"); - *(unsigned char*)&lock->lock = 0; -#else - char tmp = 0; - __asm__ __volatile__( - "xchgb %b0, %1" - : "=q"(tmp), "=m"(lock->lock) - : "0"(tmp) : "memory"); +#if !defined(__x86_64__) + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) { + char tmp = 0; + __asm__ __volatile__( + "xchgb %b0, %1" + : "=q"(tmp), "=m"(lock->lock) + : "0"(tmp) : "memory"); + } + else #endif + { + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + *(unsigned char*)&lock->lock = 0; + } } static ETHR_INLINE int diff --git a/erts/include/internal/libatomic_ops/ethr_atomic.h b/erts/include/internal/libatomic_ops/ethr_atomic.h index 93bc06036f..fb1288c330 100644 --- a/erts/include/internal/libatomic_ops/ethr_atomic.h +++ b/erts/include/internal/libatomic_ops/ethr_atomic.h @@ -25,16 +25,6 @@ #ifndef ETHR_LIBATOMIC_OPS_ATOMIC_H__ #define ETHR_LIBATOMIC_OPS_ATOMIC_H__ -#if !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_LIBATOMIC_OPS) -#define ETHR_HAVE_NATIVE_ATOMICS 1 - -#if (defined(__i386__) && !defined(ETHR_PRE_PENTIUM4_COMPAT)) \ - || defined(__x86_64__) -#define AO_USE_PENTIUM4_INSTRS -#endif - -#include "atomic_ops.h" - /* * libatomic_ops can be downloaded from: * http://www.hpl.hp.com/research/linux/atomic_ops/ @@ -46,21 +36,18 @@ * - AO_store() * - AO_compare_and_swap() * - * The `AO_t' type also have to be at least as large as the `void *' type. */ -#if ETHR_SIZEOF_AO_T < ETHR_SIZEOF_PTR -#error The AO_t type is too small -#endif - #if ETHR_SIZEOF_AO_T == 4 #define ETHR_HAVE_NATIVE_ATOMIC32 1 +#define ETHR_NATIVE_ATOMIC32_IMPL "libatomic_ops" #define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X #define ETHR_ATMC_T__ ethr_native_atomic32_t #define ETHR_AINT_T__ ethr_sint32_t #define ETHR_AINT_SUFFIX__ "l" #elif ETHR_SIZEOF_AO_T == 8 #define ETHR_HAVE_NATIVE_ATOMIC64 1 +#define ETHR_NATIVE_ATOMIC64_IMPL "libatomic_ops" #define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X #define ETHR_ATMC_T__ ethr_native_atomic64_t #define ETHR_AINT_T__ ethr_sint64_t @@ -69,61 +56,29 @@ #error "Unsupported integer size" #endif -#if ETHR_SIZEOF_AO_T == 8 -typedef union { - volatile AO_t counter; - ethr_sint32_t sint32[2]; -} ETHR_ATMC_T__; -#else typedef struct { volatile AO_t counter; } ETHR_ATMC_T__; -#endif -#define ETHR_MEMORY_BARRIER AO_nop_full() -#ifdef AO_HAVE_nop_write -# define ETHR_WRITE_MEMORY_BARRIER AO_nop_write() -#else -# define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMORY_BARRIER -#endif -#ifdef AO_HAVE_nop_read -# define ETHR_READ_MEMORY_BARRIER AO_nop_read() -#else -# define ETHR_READ_MEMORY_BARRIER ETHR_MEMORY_BARRIER -#endif -#ifdef AO_NO_DD_ORDERING -# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_READ_MEMORY_BARRIER +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1 #else -# define ETHR_READ_DEPEND_MEMORY_BARRIER AO_compiler_barrier() +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1 #endif -#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) - static ETHR_INLINE ETHR_AINT_T__ * ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var) { return (ETHR_AINT_T__ *) &var->counter; } -#if ETHR_SIZEOF_AO_T == 8 -/* - * We also need to provide an ethr_native_atomic32_addr(), since - * this 64-bit implementation will be used implementing 32-bit - * native atomics. - */ - -static ETHR_INLINE ethr_sint32_t * -ethr_native_atomic32_addr(ETHR_ATMC_T__ *var) -{ - ETHR_ASSERT(((void *) &var->sint32[0]) == ((void *) &var->counter)); -#ifdef ETHR_BIGENDIAN - return &var->sint32[1]; +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1 #else - return &var->sint32[0]; +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1 #endif -} - -#endif /* ETHR_SIZEOF_AO_T == 8 */ static ETHR_INLINE void ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) @@ -131,197 +86,198 @@ ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) AO_store(&var->counter, (AO_t) value); } +#ifdef AO_HAVE_store_release + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1 +#endif + static ETHR_INLINE void -ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) +ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) { - ETHR_NATMC_FUNC__(set)(var, value); + AO_store_release(&var->counter, (AO_t) value); } +#endif + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) { return (ETHR_AINT_T__) AO_load(&var->counter); } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) -{ -#ifdef AO_HAVE_fetch_and_add_full - return ((ETHR_AINT_T__) AO_fetch_and_add_full(&var->counter, (AO_t) incr)) + incr; +#ifdef AO_HAVE_load_acquire + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB 1 #else - while (1) { - AO_t exp = AO_load(&var->counter); - AO_t new = exp + (AO_t) incr; - if (AO_compare_and_swap_full(&var->counter, exp, new)) - return (ETHR_AINT_T__) new; - } +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB 1 #endif -} -static ETHR_INLINE void -ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var) { - (void) ETHR_NATMC_FUNC__(add_return)(var, incr); + return (ETHR_AINT_T__) AO_load_acquire(&var->counter); } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var) -{ -#ifdef AO_HAVE_fetch_and_add1_full - return ((ETHR_AINT_T__) AO_fetch_and_add1_full(&var->counter)) + 1; -#else - return ETHR_NATMC_FUNC__(add_return)(var, 1); #endif -} -static ETHR_INLINE void -ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var) -{ - (void) ETHR_NATMC_FUNC__(inc_return)(var); -} +#ifdef AO_HAVE_fetch_and_add -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var) -{ -#ifdef AO_HAVE_fetch_and_sub1_full - return ((ETHR_AINT_T__) AO_fetch_and_sub1_full(&var->counter)) - 1; +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1 #else - return ETHR_NATMC_FUNC__(add_return)(var, -1); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN 1 #endif -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var) -{ - (void) ETHR_NATMC_FUNC__(dec_return)(var); -} static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) +ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) { - while (1) { - AO_t exp = AO_load(&var->counter); - AO_t new = exp & ((AO_t) mask); - if (AO_compare_and_swap_full(&var->counter, exp, new)) - return (ETHR_AINT_T__) exp; - } + return ((ETHR_AINT_T__) AO_fetch_and_add(&var->counter, (AO_t) incr)) + incr; } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) -{ - while (1) { - AO_t exp = AO_load(&var->counter); - AO_t new = exp | ((AO_t) mask); - if (AO_compare_and_swap_full(&var->counter, exp, new)) - return (ETHR_AINT_T__) exp; - } -} +#endif -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ exp) -{ - ETHR_AINT_T__ act; - do { - if (AO_compare_and_swap_full(&var->counter, (AO_t) exp, (AO_t) new)) - return exp; - act = (ETHR_AINT_T__) AO_load(&var->counter); - } while (act == exp); - return act; -} +#ifdef AO_HAVE_fetch_and_add1 + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new) +ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var) { - while (1) { - AO_t exp = AO_load(&var->counter); - if (AO_compare_and_swap_full(&var->counter, exp, (AO_t) new)) - return (ETHR_AINT_T__) exp; - } + return ((ETHR_AINT_T__) AO_fetch_and_add1(&var->counter)) + 1; } -/* - * Atomic ops with at least specified barriers. - */ +#endif -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var) -{ -#ifdef AO_HAVE_load_acquire - return (ETHR_AINT_T__) AO_load_acquire(&var->counter); +#ifdef AO_HAVE_fetch_and_add1_acquire + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1 #else - ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var); - ETHR_MEMORY_BARRIER; - return res; +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB 1 #endif -} static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var) { -#ifdef AO_HAVE_fetch_and_add1_acquire return ((ETHR_AINT_T__) AO_fetch_and_add1_acquire(&var->counter)) + 1; +} + +#endif + +#ifdef AO_HAVE_fetch_and_sub1 + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN 1 #else - ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(add_return)(var, 1); - return res; +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN 1 #endif -} -static ETHR_INLINE void -ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var) { -#ifdef AO_HAVE_store_release - AO_store_release(&var->counter, (AO_t) value); + return ((ETHR_AINT_T__) AO_fetch_and_sub1(&var->counter)) - 1; +} + +#endif + +#ifdef AO_HAVE_fetch_and_sub1_release + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB 1 #else - ETHR_MEMORY_BARRIER; - ETHR_NATMC_FUNC__(set)(var, value); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB 1 #endif -} static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var) { -#ifdef AO_HAVE_fetch_and_sub1_release return ((ETHR_AINT_T__) AO_fetch_and_sub1_release(&var->counter)) - 1; +} + +#endif + +#ifdef AO_HAVE_compare_and_swap + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1 #else - return ETHR_NATMC_FUNC__(dec_return)(var); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG 1 #endif -} -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, + ETHR_AINT_T__ new, + ETHR_AINT_T__ exp) { - (void) ETHR_NATMC_FUNC__(dec_return_relb)(var); + ETHR_AINT_T__ act; + do { + if (AO_compare_and_swap(&var->counter, (AO_t) exp, (AO_t) new)) + return exp; + act = (ETHR_AINT_T__) AO_load(&var->counter); + } while (act == exp); + return act; } +#endif + +#ifdef AO_HAVE_compare_and_swap_acquire + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ exp) { -#ifdef AO_HAVE_compare_and_swap_acquire ETHR_AINT_T__ act; do { if (AO_compare_and_swap_acquire(&var->counter, (AO_t) exp, (AO_t) new)) return exp; +#ifdef AO_HAVE_load_acquire + act = (ETHR_AINT_T__) AO_load_acquire(&var->counter); +#else act = (ETHR_AINT_T__) AO_load(&var->counter); +#endif } while (act == exp); +#ifndef AO_HAVE_load_acquire AO_nop_full(); +#endif return act; +} + +#endif + +#ifdef AO_HAVE_compare_and_swap_release + +#if ETHR_SIZEOF_AO_T == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB 1 #else - ETHR_AINT_T__ act = ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp); - return act; +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB 1 #endif -} static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ exp) { -#ifdef AO_HAVE_compare_and_swap_release ETHR_AINT_T__ act; do { if (AO_compare_and_swap_release(&var->counter, (AO_t) exp, (AO_t) new)) @@ -329,11 +285,9 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, act = (ETHR_AINT_T__) AO_load(&var->counter); } while (act == exp); return act; -#else - return ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp); -#endif } +#endif #endif /* ETHR_TRY_INLINE_FUNCS */ @@ -341,6 +295,4 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, #undef ETHR_ATMC_T__ #undef ETHR_AINT_T__ -#endif /* !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_LIBATOMIC_OPS) */ - #endif /* ETHR_LIBATOMIC_OPS_ATOMIC_H__ */ diff --git a/erts/include/internal/libatomic_ops/ethr_membar.h b/erts/include/internal/libatomic_ops/ethr_membar.h new file mode 100644 index 0000000000..b8530a0094 --- /dev/null +++ b/erts/include/internal/libatomic_ops/ethr_membar.h @@ -0,0 +1,75 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Memory barriers when using libatomic_ops + * Author: Rickard Green + */ + +#ifndef ETHR_LIBATOMIC_OPS_MEMBAR_H__ +#define ETHR_LIBATOMIC_OPS_MEMBAR_H__ + +#define ETHR_LoadLoad (1 << 0) +#define ETHR_LoadStore (1 << 1) +#define ETHR_StoreLoad (1 << 2) +#define ETHR_StoreStore (1 << 3) + +#ifndef AO_HAVE_nop_full +# error "No AO_nop_full()" +#endif + +static __inline__ void +ethr_mb__(void) +{ + AO_nop_full(); +} + +static __inline__ void +ethr_rb__(void) +{ +#ifdef AO_HAVE_nop_read + AO_nop_read(); +#else + AO_nop_full(); +#endif +} + +static __inline__ void +ethr_wb__(void) +{ +#ifdef AO_HAVE_nop_write + AO_nop_write(); +#else + AO_nop_full(); +#endif +} + +#define ETHR_MEMBAR(B) \ + ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \ + ethr_wb__(), \ + ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \ + ethr_rb__(), \ + ethr_mb__())) + +#define ETHR_COMPILER_BARRIER AO_compiler_barrier() +#ifdef AO_NO_DD_ORDERING +# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_READ_MEMORY_BARRIER +#endif + +#endif /* ETHR_LIBATOMIC_OPS_MEMBAR_H__ */ diff --git a/erts/include/internal/libatomic_ops/ethread.h b/erts/include/internal/libatomic_ops/ethread.h index ee73ba73bc..e1fdd588bb 100644 --- a/erts/include/internal/libatomic_ops/ethread.h +++ b/erts/include/internal/libatomic_ops/ethread.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -25,6 +25,18 @@ #ifndef ETHREAD_LIBATOMIC_OPS_H__ #define ETHREAD_LIBATOMIC_OPS_H__ +#if (defined(ETHR_HAVE_LIBATOMIC_OPS) \ + && ((ETHR_SIZEOF_AO_T == 4 && !defined(ETHR_HAVE_NATIVE_ATOMIC32)) \ + || (ETHR_SIZEOF_AO_T == 8 && !defined(ETHR_HAVE_NATIVE_ATOMIC64)))) + +#if defined(__x86_64__) +#define AO_USE_PENTIUM4_INSTRS +#endif + +#include "atomic_ops.h" +#include "ethr_membar.h" #include "ethr_atomic.h" #endif + +#endif diff --git a/erts/include/internal/ppc32/atomic.h b/erts/include/internal/ppc32/atomic.h index 522f433649..6001620677 100644 --- a/erts/include/internal/ppc32/atomic.h +++ b/erts/include/internal/ppc32/atomic.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -29,27 +29,31 @@ #define ETHREAD_PPC_ATOMIC_H #define ETHR_HAVE_NATIVE_ATOMIC32 1 +#define ETHR_NATIVE_ATOMIC32_IMPL "ethread" typedef struct { volatile ethr_sint32_t counter; } ethr_native_atomic32_t; -#define ETHR_MEMORY_BARRIER __asm__ __volatile__("sync" : : : "memory") - #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1 + static ETHR_INLINE ethr_sint32_t * ethr_native_atomic32_addr(ethr_native_atomic32_t *var) { return (ethr_sint32_t *) &var->counter; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1 + static ETHR_INLINE void -ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i) +ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i) { var->counter = i; } -#define ethr_native_atomic32_set(v, i) ethr_native_atomic32_init((v), (i)) + +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1 static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_read(ethr_native_atomic32_t *var) @@ -57,57 +61,68 @@ ethr_native_atomic32_read(ethr_native_atomic32_t *var) return var->counter; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr) { ethr_sint32_t tmp; __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%1\n\t" "add %0,%2,%0\n\t" "stwcx. %0,0,%1\n\t" "bne- 1b\n\t" - "isync" : "=&r"(tmp) : "r"(&var->counter), "r"(incr) : "cc", "memory"); return tmp; } -static ETHR_INLINE void -ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr) +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB 1 + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_add_return_acqb(ethr_native_atomic32_t *var, ethr_sint32_t incr) { - /* XXX: could use weaker version here w/o eieio+isync */ - (void)ethr_native_atomic32_add_return(var, incr); + ethr_sint32_t res; + res = ethr_native_atomic32_add_return(var, incr); + __asm__ __volatile("isync\n\t" : : : "memory"); + return res; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var) { ethr_sint32_t tmp; __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%1\n\t" "addic %0,%0,1\n\t" /* due to addi's (rA|0) behaviour */ "stwcx. %0,0,%1\n\t" "bne- 1b\n\t" - "isync" : "=&r"(tmp) : "r"(&var->counter) : "cc", "memory"); return tmp; } -static ETHR_INLINE void -ethr_native_atomic32_inc(ethr_native_atomic32_t *var) +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1 + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var) { - /* XXX: could use weaker version here w/o eieio+isync */ - (void)ethr_native_atomic32_inc_return(var); + ethr_sint32_t res; + res = ethr_native_atomic32_inc_return(var); + __asm__ __volatile("isync\n\t" : : : "memory"); + return res; } + + +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN 1 static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var) @@ -115,82 +130,120 @@ ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var) ethr_sint32_t tmp; __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%1\n\t" "addic %0,%0,-1\n\t" "stwcx. %0,0,%1\n\t" "bne- 1b\n\t" - "isync" : "=&r"(tmp) : "r"(&var->counter) : "cc", "memory"); return tmp; } -static ETHR_INLINE void -ethr_native_atomic32_dec(ethr_native_atomic32_t *var) +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB 1 + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_dec_return_acqb(ethr_native_atomic32_t *var) { - /* XXX: could use weaker version here w/o eieio+isync */ - (void)ethr_native_atomic32_dec_return(var); + ethr_sint32_t res; + res = ethr_native_atomic32_dec_return(var); + __asm__ __volatile("isync\n\t" : : : "memory"); + return res; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask) { ethr_sint32_t old, new; __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%2\n\t" "and %1,%0,%3\n\t" "stwcx. %1,0,%2\n\t" "bne- 1b\n\t" - "isync" : "=&r"(old), "=&r"(new) : "r"(&var->counter), "r"(mask) : "cc", "memory"); return old; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB 1 + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_and_retold_acqb(ethr_native_atomic32_t *var, ethr_sint32_t mask) +{ + ethr_sint32_t res; + res = ethr_native_atomic32_and_retold(var, mask); + __asm__ __volatile("isync\n\t" : : : "memory"); + return res; +} + +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask) { ethr_sint32_t old, new; __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%2\n\t" "or %1,%0,%3\n\t" "stwcx. %1,0,%2\n\t" "bne- 1b\n\t" - "isync" : "=&r"(old), "=&r"(new) : "r"(&var->counter), "r"(mask) : "cc", "memory"); return old; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB 1 + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_or_retold_acqb(ethr_native_atomic32_t *var, ethr_sint32_t mask) +{ + ethr_sint32_t res; + res = ethr_native_atomic32_or_retold(var, mask); + __asm__ __volatile("isync\n\t" : : : "memory"); + return res; +} + + +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val) { ethr_sint32_t tmp; __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%1\n\t" "stwcx. %2,0,%1\n\t" "bne- 1b\n\t" - "isync" : "=&r"(tmp) : "r"(&var->counter), "r"(val) : "cc", "memory"); return tmp; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB 1 + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_xchg_acqb(ethr_native_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + res = ethr_native_atomic32_xchg(var, val); + __asm__ __volatile("isync\n\t" : : : "memory"); + return res; +} + +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var, ethr_sint32_t new, @@ -199,14 +252,12 @@ ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var, ethr_sint32_t old; __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%2\n\t" "cmpw 0,%0,%3\n\t" "bne 2f\n\t" "stwcx. %1,0,%2\n\t" "bne- 1b\n\t" - "isync\n" "2:" : "=&r"(old) : "r"(new), "r"(&var->counter), "r"(expected) @@ -215,25 +266,30 @@ ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var, return old; } -/* - * Atomic ops with at least specified barriers. - */ +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1 -static ETHR_INLINE long -ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var, + ethr_sint32_t new, + ethr_sint32_t expected) { - long res = ethr_native_atomic32_read(var); - ETHR_MEMORY_BARRIER; - return res; -} + ethr_sint32_t old; -#define ethr_native_atomic32_set_relb ethr_native_atomic32_xchg -#define ethr_native_atomic32_inc_return_acqb ethr_native_atomic32_inc_return -#define ethr_native_atomic32_dec_relb ethr_native_atomic32_dec_return -#define ethr_native_atomic32_dec_return_relb ethr_native_atomic32_dec_return + __asm__ __volatile__( + "1:\t" + "lwarx %0,0,%2\n\t" + "cmpw 0,%0,%3\n\t" + "bne 2f\n\t" + "stwcx. %1,0,%2\n\t" + "bne- 1b\n\t" + "isync\n" + "2:" + : "=&r"(old) + : "r"(new), "r"(&var->counter), "r"(expected) + : "cc", "memory"); -#define ethr_native_atomic32_cmpxchg_acqb ethr_native_atomic32_cmpxchg -#define ethr_native_atomic32_cmpxchg_relb ethr_native_atomic32_cmpxchg + return old; +} #endif /* ETHR_TRY_INLINE_FUNCS */ diff --git a/erts/include/internal/ppc32/ethr_membar.h b/erts/include/internal/ppc32/ethr_membar.h new file mode 100644 index 0000000000..ff5cc86bfb --- /dev/null +++ b/erts/include/internal/ppc32/ethr_membar.h @@ -0,0 +1,63 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Memory barriers for PowerPC + * Author: Rickard Green + */ + +#ifndef ETHR_PPC_MEMBAR_H__ +#define ETHR_PPC_MEMBAR_H__ + +#define ETHR_LoadLoad (1 << 0) +#define ETHR_LoadStore (1 << 1) +#define ETHR_StoreLoad (1 << 2) +#define ETHR_StoreStore (1 << 3) + +static __inline__ void +ethr_lwsync__(void) +{ +#ifdef ETHR_PPC_HAVE_NO_LWSYNC + __asm__ __volatile__ ("sync\n\t" : : : "memory"); +#else +#ifndef ETHR_PPC_HAVE_LWSYNC + if (ETHR_PPC_RUNTIME_CONF_HAVE_NO_LWSYNC__) + __asm__ __volatile__ ("sync\n\t" : : : "memory"); + else +#endif + __asm__ __volatile__ ("lwsync\n\t" : : : "memory"); +#endif +} + +static __inline__ void +ethr_sync__(void) +{ + __asm__ __volatile__ ("sync\n\t" : : : "memory"); +} + +/* + * According to the "memory barrier intstructions" section of + * http://www.ibm.com/developerworks/systems/articles/powerpc.html + * we want to use sync when a StoreLoad is needed and lwsync for + * everything else. + */ +#define ETHR_MEMBAR(B) \ + ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_sync__(), ethr_lwsync__()) + +#endif diff --git a/erts/include/internal/ppc32/ethread.h b/erts/include/internal/ppc32/ethread.h index 3b619e9d01..e41c83c5da 100644 --- a/erts/include/internal/ppc32/ethread.h +++ b/erts/include/internal/ppc32/ethread.h @@ -24,12 +24,9 @@ #ifndef ETHREAD_PPC32_ETHREAD_H #define ETHREAD_PPC32_ETHREAD_H +#include "ethr_membar.h" #include "atomic.h" #include "spinlock.h" #include "rwlock.h" -#define ETHR_HAVE_NATIVE_ATOMICS 1 -#define ETHR_HAVE_NATIVE_SPINLOCKS 1 -#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1 - #endif /* ETHREAD_PPC32_ETHREAD_H */ diff --git a/erts/include/internal/ppc32/rwlock.h b/erts/include/internal/ppc32/rwlock.h index 19ec26ab68..311f000b69 100644 --- a/erts/include/internal/ppc32/rwlock.h +++ b/erts/include/internal/ppc32/rwlock.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -23,12 +23,14 @@ * * Based on the examples in Appendix E of Motorola's * "Programming Environments Manual For 32-Bit Implementations - * of the PowerPC Architecture". Uses eieio instead of sync - * in the unlock sequence, as suggested in the manual. + * of the PowerPC Architecture". */ #ifndef ETHREAD_PPC_RWLOCK_H #define ETHREAD_PPC_RWLOCK_H +#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1 +#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread" + /* Unlocked if zero, read-locked if negative, write-locked if +1. */ typedef struct { volatile int lock; @@ -47,9 +49,10 @@ ethr_native_read_unlock(ethr_native_rwlock_t *lock) { int tmp; - /* this is eieio + ethr_native_atomic_inc() - isync */ + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + + /* this is ethr_native_atomic_inc() - isync */ __asm__ __volatile__( - "eieio\n\t" "1:\t" "lwarx %0,0,%1\n\t" "addic %0,%0,1\n\t" @@ -105,7 +108,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock) static ETHR_INLINE void ethr_native_write_unlock(ethr_native_rwlock_t *lock) { - __asm__ __volatile__("eieio" : : : "memory"); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); lock->lock = 0; } diff --git a/erts/include/internal/ppc32/spinlock.h b/erts/include/internal/ppc32/spinlock.h index c8460a3e8a..4c95ec9efb 100644 --- a/erts/include/internal/ppc32/spinlock.h +++ b/erts/include/internal/ppc32/spinlock.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -23,12 +23,14 @@ * * Based on the examples in Appendix E of Motorola's * "Programming Environments Manual For 32-Bit Implementations - * of the PowerPC Architecture". Uses eieio instead of sync - * in the unlock sequence, as suggested in the manual. + * of the PowerPC Architecture". */ #ifndef ETHREAD_PPC_SPINLOCK_H #define ETHREAD_PPC_SPINLOCK_H +#define ETHR_HAVE_NATIVE_SPINLOCKS 1 +#define ETHR_NATIVE_SPINLOCK_IMPL "ethread" + /* Unlocked if zero, locked if non-zero. */ typedef struct { volatile unsigned int lock; @@ -45,7 +47,7 @@ ethr_native_spinlock_init(ethr_native_spinlock_t *lock) static ETHR_INLINE void ethr_native_spin_unlock(ethr_native_spinlock_t *lock) { - __asm__ __volatile__("eieio" : : : "memory"); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); lock->lock = 0; } diff --git a/erts/include/internal/pthread/ethr_event.h b/erts/include/internal/pthread/ethr_event.h index 4c29b28536..d0a77990cc 100644 --- a/erts/include/internal/pthread/ethr_event.h +++ b/erts/include/internal/pthread/ethr_event.h @@ -21,7 +21,7 @@ * Author: Rickard Green */ -#if defined(ETHR_HAVE_LINUX_FUTEX) && defined(ETHR_HAVE_NATIVE_ATOMICS) +#if defined(ETHR_HAVE_LINUX_FUTEX) && defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) /* --- Linux futex implementation of ethread events ------------------------- */ #define ETHR_LINUX_FUTEX_IMPL__ @@ -62,8 +62,7 @@ static void ETHR_INLINE ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e) { ethr_sint32_t val; - ETHR_MEMORY_BARRIER; - val = ethr_atomic32_xchg(&e->futex, ETHR_EVENT_ON__); + val = ethr_atomic32_xchg_mb(&e->futex, ETHR_EVENT_ON__); if (val == ETHR_EVENT_OFF_WAITER__) { int res = ETHR_FUTEX__(&e->futex, ETHR_FUTEX_WAKE__, 1); if (res != 0) @@ -99,8 +98,7 @@ static void ETHR_INLINE ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e) { ethr_sint32_t val; - ETHR_MEMORY_BARRIER; - val = ethr_atomic32_xchg(&e->state, ETHR_EVENT_ON__); + val = ethr_atomic32_xchg_mb(&e->state, ETHR_EVENT_ON__); if (val == ETHR_EVENT_OFF_WAITER__) { int res = pthread_mutex_lock(&e->mtx); if (res != 0) diff --git a/erts/include/internal/sparc32/atomic.h b/erts/include/internal/sparc32/atomic.h index c297522ab1..fe1daaa9cf 100644 --- a/erts/include/internal/sparc32/atomic.h +++ b/erts/include/internal/sparc32/atomic.h @@ -35,23 +35,16 @@ #ifdef ETHR_INCLUDE_ATOMIC_IMPL__ -#ifndef ETHR_SPARC_V9_ATOMIC_COMMON__ -#define ETHR_SPARC_V9_ATOMIC_COMMON__ - -#define ETHR_MEMORY_BARRIER \ - __asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreLoad|#StoreStore\n" \ - : : : "memory") - -#endif /* ETHR_SPARC_V9_ATOMIC_COMMON__ */ - #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 #define ETHR_HAVE_NATIVE_ATOMIC32 1 +#define ETHR_NATIVE_ATOMIC32_IMPL "ethread" #define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X #define ETHR_ATMC_T__ ethr_native_atomic32_t #define ETHR_AINT_T__ ethr_sint32_t #define ETHR_CAS__ "cas" #elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8 #define ETHR_HAVE_NATIVE_ATOMIC64 1 +#define ETHR_NATIVE_ATOMIC64_IMPL "ethread" #define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X #define ETHR_ATMC_T__ ethr_native_atomic64_t #define ETHR_AINT_T__ ethr_sint64_t @@ -66,17 +59,23 @@ typedef struct { #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ * ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var) { return (ETHR_AINT_T__ *) &var->counter; } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ - var->counter = i; -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1 +#endif static ETHR_INLINE void ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) @@ -84,182 +83,49 @@ ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) var->counter = i; } +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) { return var->counter; } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) -{ - ETHR_AINT_T__ old, tmp; - - __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory"); - do { - old = var->counter; - tmp = old+incr; - __asm__ __volatile__( - ETHR_CAS__ " [%2], %1, %0" - : "=&r"(tmp) - : "r"(old), "r"(&var->counter), "0"(tmp) - : "memory"); - } while (__builtin_expect(old != tmp, 0)); - __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory"); - return old+incr; -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) -{ - (void)ETHR_NATMC_FUNC__(add_return)(var, incr); -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var) -{ - return ETHR_NATMC_FUNC__(add_return)(var, 1); -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var) -{ - (void)ETHR_NATMC_FUNC__(add_return)(var, 1); -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var) -{ - return ETHR_NATMC_FUNC__(add_return)(var, -1); -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var) -{ - (void)ETHR_NATMC_FUNC__(add_return)(var, -1); -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) -{ - ETHR_AINT_T__ old, tmp; - - __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory"); - do { - old = var->counter; - tmp = old & mask; - __asm__ __volatile__( - ETHR_CAS__ " [%2], %1, %0" - : "=&r"(tmp) - : "r"(old), "r"(&var->counter), "0"(tmp) - : "memory"); - } while (__builtin_expect(old != tmp, 0)); - __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory"); - return old; -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) -{ - ETHR_AINT_T__ old, tmp; - - __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory"); - do { - old = var->counter; - tmp = old | mask; - __asm__ __volatile__( - ETHR_CAS__ " [%2], %1, %0" - : "=&r"(tmp) - : "r"(old), "r"(&var->counter), "0"(tmp) - : "memory"); - } while (__builtin_expect(old != tmp, 0)); - __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory"); - return old; -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val) -{ - ETHR_AINT_T__ old, new; - - __asm__ __volatile__("membar #LoadLoad|#StoreLoad" : : : "memory"); - do { - old = var->counter; - new = val; - __asm__ __volatile__( - ETHR_CAS__ " [%2], %1, %0" - : "=&r"(new) - : "r"(old), "r"(&var->counter), "0"(new) - : "memory"); - } while (__builtin_expect(old != new, 0)); - __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory"); - return old; -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG 1 +#endif static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old) { - __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory"); __asm__ __volatile__( ETHR_CAS__ " [%2], %1, %0" : "=&r"(new) : "r"(old), "r"(&var->counter), "0"(new) : "memory"); - __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory"); return new; } -/* - * Atomic ops with at least specified barriers. - */ - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var) -{ - ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var); - __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory"); - return res; -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ - __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory"); - ETHR_NATMC_FUNC__(set)(var, i); -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var) -{ - ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var); - return res; -} - -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var) -{ - ETHR_NATMC_FUNC__(dec)(var); -} - -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var) -{ - return ETHR_NATMC_FUNC__(dec_return)(var); -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old) { ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(cmpxchg)(var, new, old); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); return res; } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old) -{ - return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old); -} - #endif /* ETHR_TRY_INLINE_FUNCS */ #undef ETHR_NATMC_FUNC__ diff --git a/erts/include/internal/sparc32/ethr_membar.h b/erts/include/internal/sparc32/ethr_membar.h new file mode 100644 index 0000000000..6eb0c5a1d6 --- /dev/null +++ b/erts/include/internal/sparc32/ethr_membar.h @@ -0,0 +1,115 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Memory barriers for sparc-v9 + * Author: Rickard Green + */ + +#ifndef ETHR_SPARC_V9_MEMBAR_H__ +#define ETHR_SPARC_V9_MEMBAR_H__ + +#if defined(ETHR_SPARC_TSO) +/* --- Total Store Order ------------------------------------------------ */ + +#define ETHR_LoadLoad 0 +#define ETHR_LoadStore 0 +#define ETHR_StoreLoad 1 +#define ETHR_StoreStore 0 + +static __inline__ void +ethr_cb__(void) +{ + __asm__ __volatile__ ("" : : : "memory"); +} + +static __inline__ void +ethr_StoreLoad__(void) +{ + __asm__ __volatile__ ("membar #StoreLoad\n\t" : : : "memory"); +} + + +#define ETHR_MEMBAR(B) \ + ETHR_CHOOSE_EXPR((B), ethr_StoreLoad__(), ethr_cb__()) + +#elif defined(ETHR_SPARC_PSO) +/* --- Partial Store Order ---------------------------------------------- */ + +#define ETHR_LoadLoad 0 +#define ETHR_LoadStore 0 +#define ETHR_StoreLoad (1 << 0) +#define ETHR_StoreStore (1 << 1) + +static __inline__ void +ethr_cb__(void) +{ + __asm__ __volatile__ ("" : : : "memory"); +} + +static __inline__ void +ethr_StoreLoad__(void) +{ + __asm__ __volatile__ ("membar #StoreLoad\n\t" : : : "memory"); +} + +static __inline__ void +ethr_StoreStore__(void) +{ + __asm__ __volatile__ ("membar #StoreStore\n\t" : : : "memory"); +} + +static __inline__ void +ethr_StoreLoad_StoreStore__(void) +{ + __asm__ __volatile__ ("membar #StoreLoad|StoreStore\n\t" : : : "memory"); +} + +#define ETHR_MEMBAR(B) \ + ETHR_CHOOSE_EXPR( \ + (B) == ETHR_StoreLoad, \ + ethr_StoreLoad__(), \ + ETHR_CHOOSE_EXPR( \ + (B) == ETHR_StoreStore, \ + ethr_StoreStore__(), \ + ETHR_CHOOSE_EXPR( \ + (B) == (ETHR_StoreLoad|ETHR_StoreStore), \ + ethr_StoreLoad_StoreStore__(), \ + ethr_cb__()))) + +#elif defined(ETHR_SPARC_RMO) +/* --- Relaxed Memory Order --------------------------------------------- */ + +# define ETHR_LoadLoad #LoadLoad +# define ETHR_LoadStore #LoadStore +# define ETHR_StoreLoad #StoreLoad +# define ETHR_StoreStore #StoreStore + +# define ETHR_MEMBAR_AUX__(B) \ + __asm__ __volatile__("membar " #B "\n\t" : : : "memory") + +# define ETHR_MEMBAR(B) ETHR_MEMBAR_AUX__(B) + +#else + +# error "No memory order defined" + +#endif + +#endif diff --git a/erts/include/internal/sparc32/ethread.h b/erts/include/internal/sparc32/ethread.h index aea9794390..5ad92d3da7 100644 --- a/erts/include/internal/sparc32/ethread.h +++ b/erts/include/internal/sparc32/ethread.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -24,6 +24,7 @@ #ifndef ETHREAD_SPARC32_ETHREAD_H #define ETHREAD_SPARC32_ETHREAD_H +#include "ethr_membar.h" #define ETHR_ATOMIC_WANT_32BIT_IMPL__ #include "atomic.h" #if ETHR_SIZEOF_PTR == 8 @@ -33,8 +34,4 @@ #include "spinlock.h" #include "rwlock.h" -#define ETHR_HAVE_NATIVE_ATOMICS 1 -#define ETHR_HAVE_NATIVE_SPINLOCKS 1 -#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1 - #endif /* ETHREAD_SPARC32_ETHREAD_H */ diff --git a/erts/include/internal/sparc32/rwlock.h b/erts/include/internal/sparc32/rwlock.h index 465ec96866..8b6f2e9c57 100644 --- a/erts/include/internal/sparc32/rwlock.h +++ b/erts/include/internal/sparc32/rwlock.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -24,6 +24,9 @@ #ifndef ETHREAD_SPARC32_RWLOCK_H #define ETHREAD_SPARC32_RWLOCK_H +#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1 +#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread" + /* Unlocked if zero, read-locked if positive, write-locked if -1. */ typedef struct { volatile int lock; @@ -42,7 +45,7 @@ ethr_native_read_unlock(ethr_native_rwlock_t *lock) { unsigned int old, new; - __asm__ __volatile__("membar #LoadLoad|#StoreLoad"); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); do { old = lock->lock; new = old-1; @@ -70,7 +73,7 @@ ethr_native_read_trylock(ethr_native_rwlock_t *lock) : "r"(old), "r"(&lock->lock), "0"(new) : "memory"); } while (__builtin_expect(old != new, 0)); - __asm__ __volatile__("membar #StoreLoad|#StoreStore"); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); return 1; } @@ -87,7 +90,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock) if (__builtin_expect(ethr_native_read_trylock(lock) != 0, 1)) break; do { - __asm__ __volatile__("membar #LoadLoad"); + ETHR_MEMBAR(ETHR_LoadLoad); } while (ethr_native_read_is_locked(lock)); } } @@ -95,7 +98,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock) static ETHR_INLINE void ethr_native_write_unlock(ethr_native_rwlock_t *lock) { - __asm__ __volatile__("membar #LoadStore|#StoreStore"); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); lock->lock = 0; } @@ -115,7 +118,7 @@ ethr_native_write_trylock(ethr_native_rwlock_t *lock) : "r"(old), "r"(&lock->lock), "0"(new) : "memory"); } while (__builtin_expect(old != new, 0)); - __asm__ __volatile__("membar #StoreLoad|#StoreStore"); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); return 1; } @@ -132,7 +135,7 @@ ethr_native_write_lock(ethr_native_rwlock_t *lock) if (__builtin_expect(ethr_native_write_trylock(lock) != 0, 1)) break; do { - __asm__ __volatile__("membar #LoadLoad"); + ETHR_MEMBAR(ETHR_LoadLoad); } while (ethr_native_write_is_locked(lock)); } } diff --git a/erts/include/internal/sparc32/spinlock.h b/erts/include/internal/sparc32/spinlock.h index 493d514210..d4e36e09cf 100644 --- a/erts/include/internal/sparc32/spinlock.h +++ b/erts/include/internal/sparc32/spinlock.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2005-2010. All Rights Reserved. + * Copyright Ericsson AB 2005-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -24,6 +24,9 @@ #ifndef ETHR_SPARC32_SPINLOCK_H #define ETHR_SPARC32_SPINLOCK_H +#define ETHR_HAVE_NATIVE_SPINLOCKS 1 +#define ETHR_NATIVE_SPINLOCK_IMPL "ethread" + /* Locked with ldstub, so unlocked when 0 and locked when non-zero. */ typedef struct { volatile unsigned char lock; @@ -40,7 +43,7 @@ ethr_native_spinlock_init(ethr_native_spinlock_t *lock) static ETHR_INLINE void ethr_native_spin_unlock(ethr_native_spinlock_t *lock) { - __asm__ __volatile__("membar #LoadStore|#StoreStore"); + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); lock->lock = 0; } @@ -51,10 +54,10 @@ ethr_native_spin_trylock(ethr_native_spinlock_t *lock) __asm__ __volatile__( "ldstub [%1], %0\n\t" - "membar #StoreLoad|#StoreStore" : "=r"(prev) : "r"(&lock->lock) : "memory"); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); return prev == 0; } @@ -71,7 +74,7 @@ ethr_native_spin_lock(ethr_native_spinlock_t *lock) if (__builtin_expect(ethr_native_spin_trylock(lock) != 0, 1)) break; do { - __asm__ __volatile__("membar #LoadLoad"); + ETHR_MEMBAR(ETHR_LoadLoad); } while (ethr_native_spin_is_locked(lock)); } } diff --git a/erts/include/internal/tile/atomic.h b/erts/include/internal/tile/atomic.h index 5697afda25..1f1553c346 100644 --- a/erts/include/internal/tile/atomic.h +++ b/erts/include/internal/tile/atomic.h @@ -25,6 +25,7 @@ #define ETHREAD_TILE_ATOMIC_H #define ETHR_HAVE_NATIVE_ATOMIC32 1 +#define ETHR_NATIVE_ATOMIC32_IMPL "tilera" #include <atomic.h> @@ -34,27 +35,25 @@ typedef struct { volatile ethr_sint32_t counter; } ethr_native_atomic32_t; -#define ETHR_MEMORY_BARRIER __insn_mf() - #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1 + static ETHR_INLINE ethr_sint32_t * ethr_native_atomic32_addr(ethr_native_atomic32_t *var) { return (ethr_sint32_t *) &var->counter; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT 1 + static ETHR_INLINE void ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i) { var->counter = i; } -static ETHR_INLINE void -ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i) -{ - atomic_exchange_acq(&var->counter, i); -} +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1 static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_read(ethr_native_atomic32_t *var) @@ -62,139 +61,80 @@ ethr_native_atomic32_read(ethr_native_atomic32_t *var) return var->counter; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB 1 + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var) +{ + return atomic_compare_and_exchange_val_acq(&var->counter, + 0x81818181, + 0x81818181); +} + +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD 1 + static ETHR_INLINE void ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr) { - ETHR_MEMORY_BARRIER; atomic_add(&var->counter, incr); - ETHR_MEMORY_BARRIER; -} - +} + +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC 1 + static ETHR_INLINE void ethr_native_atomic32_inc(ethr_native_atomic32_t *var) { - ETHR_MEMORY_BARRIER; atomic_increment(&var->counter); - ETHR_MEMORY_BARRIER; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC 1 + static ETHR_INLINE void ethr_native_atomic32_dec(ethr_native_atomic32_t *var) { - ETHR_MEMORY_BARRIER; atomic_decrement(&var->counter); - ETHR_MEMORY_BARRIER; } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr) { - ethr_sint32_t res; - ETHR_MEMORY_BARRIER; - res = atomic_exchange_and_add(&var->counter, incr) + incr; - ETHR_MEMORY_BARRIER; - return res; + return atomic_exchange_and_add(&var->counter, incr) + incr; } -static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var) -{ - return ethr_native_atomic32_add_return(var, 1); -} - -static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var) -{ - return ethr_native_atomic32_add_return(var, -1); -} +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD 1 static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask) { - ethr_sint32_t res; - ETHR_MEMORY_BARRIER; - res = atomic_and_val(&var->counter, mask); - ETHR_MEMORY_BARRIER; - return res; + return atomic_and_val(&var->counter, mask); } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD 1 + static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask) { - ethr_sint32_t res; - ETHR_MEMORY_BARRIER; - res = atomic_or_val(&var->counter, mask); - ETHR_MEMORY_BARRIER; - return res; + return atomic_or_val(&var->counter, mask); } +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB 1 + static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val) +ethr_native_atomic32_xchg_acqb(ethr_native_atomic32_t *var, ethr_sint32_t val) { - ETHR_MEMORY_BARRIER; return atomic_exchange_acq(&var->counter, val); } -static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t expected) -{ - ETHR_MEMORY_BARRIER; - return atomic_compare_and_exchange_val_acq(&var->counter, new, expected); -} - -/* - * Atomic ops with at least specified barriers. - */ - -static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var) -{ - ethr_sint32_t res = ethr_native_atomic32_read(var); - ETHR_MEMORY_BARRIER; - return res; -} - -static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var) -{ - return ethr_native_atomic32_inc_return(var); -} - -static ETHR_INLINE void -ethr_native_atomic32_set_relb(ethr_native_atomic32_t *var, ethr_sint32_t val) -{ - ETHR_MEMORY_BARRIER; - ethr_native_atomic32_set(var, val); -} - -static ETHR_INLINE void -ethr_native_atomic32_dec_relb(ethr_native_atomic32_t *var) -{ - ethr_native_atomic32_dec(var); -} - -static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_dec_return_relb(ethr_native_atomic32_t *var) -{ - return ethr_native_atomic32_dec_return(var); -} +#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1 static ETHR_INLINE ethr_sint32_t ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var, ethr_sint32_t new, - ethr_sint32_t exp) + ethr_sint32_t expected) { - return ethr_native_atomic32_cmpxchg(var, new, exp); -} - -static ETHR_INLINE ethr_sint32_t -ethr_native_atomic32_cmpxchg_relb(ethr_native_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t exp) -{ - return ethr_native_atomic32_cmpxchg(var, new, exp); + return atomic_compare_and_exchange_val_acq(&var->counter, new, expected); } #endif /* ETHR_TRY_INLINE_FUNCS */ diff --git a/erts/include/internal/tile/ethr_membar.h b/erts/include/internal/tile/ethr_membar.h new file mode 100644 index 0000000000..7cb4f3cf9a --- /dev/null +++ b/erts/include/internal/tile/ethr_membar.h @@ -0,0 +1,35 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Memory barriers for TILE64/TILEPro + * Author: Rickard Green + */ + +#ifndef ETHR_TILE_MEMBAR_H__ +#define ETHR_TILE_MEMBAR_H__ + +#define ETHR_LoadLoad (1 << 0) +#define ETHR_LoadStore (1 << 1) +#define ETHR_StoreLoad (1 << 2) +#define ETHR_StoreStore (1 << 3) + +#define ETHR_MEMBAR(B) __insn_mf() + +#endif diff --git a/erts/include/internal/tile/ethread.h b/erts/include/internal/tile/ethread.h index 2de4d42bc6..7f579b50e7 100644 --- a/erts/include/internal/tile/ethread.h +++ b/erts/include/internal/tile/ethread.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2008-2009. All Rights Reserved. + * Copyright Ericsson AB 2008-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -23,8 +23,7 @@ #ifndef ETHREAD_TILE_ETHREAD_H #define ETHREAD_TILE_ETHREAD_H +#include "ethr_membar.h" #include "atomic.h" -#define ETHR_HAVE_NATIVE_ATOMICS 1 - #endif /* ETHREAD_TILE_ETHREAD_H */ diff --git a/erts/include/internal/win/ethr_atomic.h b/erts/include/internal/win/ethr_atomic.h index 60def01a7e..e11f1abf47 100644 --- a/erts/include/internal/win/ethr_atomic.h +++ b/erts/include/internal/win/ethr_atomic.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -24,364 +24,408 @@ #undef ETHR_INCLUDE_ATOMIC_IMPL__ #if !defined(ETHR_WIN_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__) -#define ETHR_WIN_ATOMIC32_H__ -#define ETHR_INCLUDE_ATOMIC_IMPL__ 4 -#undef ETHR_ATOMIC_WANT_32BIT_IMPL__ +# define ETHR_WIN_ATOMIC32_H__ +# if (defined(ETHR_MEMBAR) \ + && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE) \ + && defined(ETHR_HAVE__INTERLOCKEDEXCHANGE)) +# define ETHR_INCLUDE_ATOMIC_IMPL__ 4 +# endif +# undef ETHR_ATOMIC_WANT_32BIT_IMPL__ #elif !defined(ETHR_WIN_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__) -#define ETHR_WIN_ATOMIC64_H__ -#ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64 -/* _InterlockedCompareExchange64() required... */ -#define ETHR_INCLUDE_ATOMIC_IMPL__ 8 +# define ETHR_WIN_ATOMIC64_H__ +# if (defined(ETHR_MEMBAR) \ + && (defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64) \ + || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ) \ + || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL))) +# define ETHR_INCLUDE_ATOMIC_IMPL__ 8 +# endif +# undef ETHR_ATOMIC_WANT_64BIT_IMPL__ #endif -#undef ETHR_ATOMIC_WANT_64BIT_IMPL__ + +#if !defined(_MSC_VER) || _MSC_VER < 1400 +# undef ETHR_INCLUDE_ATOMIC_IMPL__ #endif #ifdef ETHR_INCLUDE_ATOMIC_IMPL__ -#if defined(_MSC_VER) && _MSC_VER >= 1400 +# ifndef ETHR_WIN_ATOMIC_COMMON__ +# define ETHR_WIN_ATOMIC_COMMON__ + +# if defined(_M_IX86) || defined(_M_AMD64) || defined(_M_IA64) +# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 1 +# else +# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 0 +# endif + +# endif /* ETHR_WIN_ATOMIC_COMMON__ */ + +# if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 + +# define ETHR_HAVE_NATIVE_ATOMIC32 1 +# define ETHR_NATIVE_ATOMIC32_IMPL "windows-interlocked" + +# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT +# define ETHR_WIN_HAVE_DEC +# pragma intrinsic(_InterlockedDecrement) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT_REL +# define ETHR_WIN_HAVE_DEC_REL +# pragma intrinsic(_InterlockedDecrement_rel) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT +# define ETHR_WIN_HAVE_INC +# pragma intrinsic(_InterlockedIncrement) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ +# define ETHR_WIN_HAVE_INC_ACQ +# pragma intrinsic(_InterlockedIncrement_acq) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD +# define ETHR_WIN_HAVE_XCHG_ADD +# pragma intrinsic(_InterlockedExchangeAdd) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ +# define ETHR_WIN_HAVE_XCHG_ADD_ACQ +# pragma intrinsic(_InterlockedExchangeAdd_acq) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE +# define ETHR_WIN_HAVE_XCHG +# pragma intrinsic(_InterlockedExchange) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDAND +# define ETHR_WIN_HAVE_AND +# pragma intrinsic(_InterlockedAnd) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDOR +# define ETHR_WIN_HAVE_OR +# pragma intrinsic(_InterlockedOr) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE +# define ETHR_WIN_HAVE_CMPXCHG +# pragma intrinsic(_InterlockedCompareExchange) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ +# define ETHR_WIN_HAVE_CMPXCHG_ACQ +# pragma intrinsic(_InterlockedCompareExchange_acq) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL +# define ETHR_WIN_HAVE_CMPXCHG_REL +# pragma intrinsic(_InterlockedCompareExchange_rel) +# endif + +# define ETHR_ILCKD__(X) _Interlocked ## X +# define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## _acq +# define ETHR_ILCKD_REL__(X) _Interlocked ## X ## _rel + +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X +# define ETHR_ATMC_T__ ethr_native_atomic32_t +# define ETHR_AINT_T__ ethr_sint32_t + +# elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8 + +# define ETHR_HAVE_NATIVE_ATOMIC64 1 +# define ETHR_NATIVE_ATOMIC64_IMPL "windows-interlocked" + +# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64 +# define ETHR_WIN_HAVE_DEC +# pragma intrinsic(_InterlockedDecrement64) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64_REL +# define ETHR_WIN_HAVE_DEC_REL +# pragma intrinsic(_InterlockedDecrement64_rel) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ +# define ETHR_WIN_HAVE_INC_ACQ +# pragma intrinsic(_InterlockedIncrement64_acq) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64 +# define ETHR_WIN_HAVE_INC +# pragma intrinsic(_InterlockedIncrement64) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64 +# define ETHR_WIN_HAVE_XCHG_ADD +# pragma intrinsic(_InterlockedExchangeAdd64) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ +# define ETHR_WIN_HAVE_XCHG_ADD_ACQ +# pragma intrinsic(_InterlockedExchangeAdd64_acq) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE64 +# define ETHR_WIN_HAVE_XCHG +# pragma intrinsic(_InterlockedExchange64) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDAND64 +# define ETHR_WIN_HAVE_AND +# pragma intrinsic(_InterlockedAnd64) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDOR64 +# define ETHR_WIN_HAVE_OR +# pragma intrinsic(_InterlockedOr64) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64 +# define ETHR_WIN_HAVE_CMPXCHG +# pragma intrinsic(_InterlockedCompareExchange64) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ +# define ETHR_WIN_HAVE_CMPXCHG_ACQ +# pragma intrinsic(_InterlockedCompareExchange64_acq) +# endif +# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL +# define ETHR_WIN_HAVE_CMPXCHG_REL +# pragma intrinsic(_InterlockedCompareExchange64_rel) +# endif + +# define ETHR_ILCKD__(X) _Interlocked ## X ## 64 +# define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64_acq +# define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64_rel + +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_ATMC_T__ ethr_native_atomic64_t +# define ETHR_AINT_T__ ethr_sint64_t + +# else +# error "Unsupported integer size" +# endif -#ifndef ETHR_WIN_ATOMIC_COMMON__ -#define ETHR_WIN_ATOMIC_COMMON__ +typedef struct { + volatile ETHR_AINT_T__ value; +} ETHR_ATMC_T__; -#define ETHR_HAVE_NATIVE_ATOMICS 1 +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) -#if defined(_M_IX86) || defined(_M_AMD64) || defined(_M_IA64) -# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 1 +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1 #else -# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 0 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1 #endif -#if defined(_M_AMD64) || (defined(_M_IX86) \ - && !defined(ETHR_PRE_PENTIUM4_COMPAT)) -# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 1 -#else -# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 0 -#endif -/* - * No configure test checking for interlocked acquire/release - * versions have been written, yet. It should define - * ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS if, and - * only if, all used interlocked operations with barriers - * exists. - * - * Note, that these are pure optimizations for the itanium - * processor. - */ +static ETHR_INLINE ETHR_AINT_T__ * +ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var) +{ + return (ETHR_AINT_T__ *) &var->value; +} -#include <intrin.h> -#undef ETHR_COMPILER_BARRIER -#define ETHR_COMPILER_BARRIER _ReadWriteBarrier() -#pragma intrinsic(_ReadWriteBarrier) -#pragma intrinsic(_InterlockedCompareExchange) - -#if defined(_M_AMD64) || (defined(_M_IX86) \ - && !defined(ETHR_PRE_PENTIUM4_COMPAT)) -#include <emmintrin.h> -#include <mmintrin.h> -#pragma intrinsic(_mm_mfence) -#define ETHR_MEMORY_BARRIER _mm_mfence() -#pragma intrinsic(_mm_sfence) -#define ETHR_WRITE_MEMORY_BARRIER _mm_sfence() -#pragma intrinsic(_mm_lfence) -#define ETHR_READ_MEMORY_BARRIER _mm_lfence() -#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER +#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1 #else - -#define ETHR_MEMORY_BARRIER \ -do { \ - volatile long x___ = 0; \ - _InterlockedCompareExchange(&x___, (long) 1, (long) 0); \ -} while (0) - +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1 #endif -#endif /* ETHR_WIN_ATOMIC_COMMON__ */ +static ETHR_INLINE void +ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) +{ + var->value = i; +} #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1 +#endif -#define ETHR_HAVE_NATIVE_ATOMIC32 1 +static ETHR_INLINE void +ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) +{ +#if defined(_M_IX86) + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + (void) ETHR_ILCKD__(Exchange)(&var->value, i); + else +#endif /* _M_IX86 */ + { + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + var->value = i; + } +} -/* - * All used operations available as 32-bit intrinsics - */ +#endif /* ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ */ -#pragma intrinsic(_InterlockedDecrement) -#pragma intrinsic(_InterlockedIncrement) -#pragma intrinsic(_InterlockedExchangeAdd) -#pragma intrinsic(_InterlockedExchange) -#pragma intrinsic(_InterlockedAnd) -#pragma intrinsic(_InterlockedOr) -#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS -#pragma intrinsic(_InterlockedExchangeAdd_acq) -#pragma intrinsic(_InterlockedIncrement_acq) -#pragma intrinsic(_InterlockedDecrement_rel) -#pragma intrinsic(_InterlockedCompareExchange_acq) -#pragma intrinsic(_InterlockedCompareExchange_rel) -#endif +#if defined(ETHR_WIN_HAVE_XCHG) -#define ETHR_ILCKD__(X) _Interlocked ## X -#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS -#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## _acq -#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## _rel +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB 1 #else -#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X -#define ETHR_ILCKD_REL__(X) _Interlocked ## X +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB 1 #endif -#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X -#define ETHR_ATMC_T__ ethr_native_atomic32_t -#define ETHR_AINT_T__ ethr_sint32_t - -#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8 +static ETHR_INLINE void +ETHR_NATMC_FUNC__(set_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) +{ + (void) ETHR_ILCKD__(Exchange)(&var->value, i); +} -#define ETHR_HAVE_NATIVE_ATOMIC64 1 +#endif -/* - * _InterlockedCompareExchange64() is required. The other may not - * be available, but if so, we can generate them. - */ -#pragma intrinsic(_InterlockedCompareExchange64) +#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ -#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ -#define ETHR_OWN_ILCKD_INIT_VAL__(PTR) *(PTR) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1 #else -#define ETHR_OWN_ILCKD_INIT_VAL__(PTR) (__int64) 0 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1 #endif -#define ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, PTR, NEW, ACT, EXP, OPS, RET) \ -{ \ - __int64 NEW, ACT, EXP; \ - ACT = ETHR_OWN_ILCKD_INIT_VAL__(PTR); \ - do { \ - EXP = ACT; \ - { OPS; } \ - ACT = _InterlockedCompareExchange64(PTR, NEW, EXP); \ - } while (ACT != EXP); \ - return RET; \ +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) +{ + return var->value; } -#define ETHR_OWN_ILCKD_1_IMPL__(FUNC, NEW, ACT, EXP, OPS, RET) \ -static __forceinline __int64 \ -FUNC(__int64 volatile *ptr) \ -ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, ptr, NEW, ACT, EXP, OPS, RET) - -#define ETHR_OWN_ILCKD_2_IMPL__(FUNC, NEW, ACT, EXP, OPS, ARG, RET) \ -static __forceinline __int64 \ -FUNC(__int64 volatile *ptr, __int64 ARG) \ -ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, ptr, NEW, ACT, EXP, OPS, RET) +#endif /* ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ */ +#if defined(ETHR_WIN_HAVE_XCHG_ADD) -#ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64 -#pragma intrinsic(_InterlockedDecrement64) -#else -ETHR_OWN_ILCKD_1_IMPL__(_InterlockedDecrement64, new, act, exp, - new = act - 1, new) -#endif -#ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64 -#pragma intrinsic(_InterlockedIncrement64) -#else -ETHR_OWN_ILCKD_1_IMPL__(_InterlockedIncrement64, new, act, exp, - new = act + 1, new) -#endif -#ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64 -#pragma intrinsic(_InterlockedExchangeAdd64) -#else -ETHR_OWN_ILCKD_2_IMPL__(_InterlockedExchangeAdd64, new, act, exp, - new = act + arg, arg, act) -#endif -#ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE64 -#pragma intrinsic(_InterlockedExchange64) -#else -ETHR_OWN_ILCKD_2_IMPL__(_InterlockedExchange64, new, act, exp, - new = arg, arg, act) -#endif -#ifdef ETHR_HAVE__INTERLOCKEDAND64 -#pragma intrinsic(_InterlockedAnd64) -#else -ETHR_OWN_ILCKD_2_IMPL__(_InterlockedAnd64, new, act, exp, - new = act & arg, arg, act) -#endif -#ifdef ETHR_HAVE__INTERLOCKEDOR64 -#pragma intrinsic(_InterlockedOr64) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1 #else -ETHR_OWN_ILCKD_2_IMPL__(_InterlockedOr64, new, act, exp, - new = act | arg, arg, act) -#endif -#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS -#pragma intrinsic(_InterlockedExchangeAdd64_acq) -#pragma intrinsic(_InterlockedIncrement64_acq) -#pragma intrinsic(_InterlockedDecrement64_rel) -#pragma intrinsic(_InterlockedCompareExchange64_acq) -#pragma intrinsic(_InterlockedCompareExchange64_rel) +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1 #endif -#define ETHR_ILCKD__(X) _Interlocked ## X ## 64 -#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS -#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64_acq -#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64_rel -#else -#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64 -#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64 +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) +{ + return ETHR_ILCKD__(ExchangeAdd)(&var->value, i) + i; +} + #endif -#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X -#define ETHR_ATMC_T__ ethr_native_atomic64_t -#define ETHR_AINT_T__ ethr_sint64_t +#if defined(ETHR_WIN_HAVE_INC) +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB 1 #else -#error "Unsupported integer size" +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB 1 #endif -typedef struct { - volatile ETHR_AINT_T__ value; -} ETHR_ATMC_T__; - -#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) - -static ETHR_INLINE ETHR_AINT_T__ * -ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(inc_return_mb)(ETHR_ATMC_T__ *var) { - return (ETHR_AINT_T__ *) &var->value; + return ETHR_ILCKD__(Increment)(&var->value); } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ -#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ - var->value = i; -#else - (void) ETHR_ILCKD__(Exchange)(&var->value, i); #endif -} -static ETHR_INLINE void -ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ -#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ - var->value = i; +#if defined(ETHR_WIN_HAVE_INC_ACQ) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1 #else - (void) ETHR_ILCKD__(Exchange)(&var->value, i); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB 1 #endif -} static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) +ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var) { -#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ - return var->value; -#else - return ETHR_ILCKD__(ExchangeAdd)(&var->value, (ETHR_AINT_T__) 0); -#endif + return ETHR_ILCKD_ACQ__(Increment)(&var->value); } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) -{ - (void) ETHR_ILCKD__(ExchangeAdd)(&var->value, incr); -} +#endif + +#if defined(ETHR_WIN_HAVE_DEC) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) +ETHR_NATMC_FUNC__(dec_return_mb)(ETHR_ATMC_T__ *var) { - return ETHR_ILCKD__(ExchangeAdd)(&var->value, i) + i; + return ETHR_ILCKD__(Decrement)(&var->value); } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var) -{ - (void) ETHR_ILCKD__(Increment)(&var->value); -} +#endif -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var) -{ - (void) ETHR_ILCKD__(Decrement)(&var->value); -} +#if defined(ETHR_WIN_HAVE_DEC_REL) -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var) -{ - return ETHR_ILCKD__(Increment)(&var->value); -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var) +ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var) { - return ETHR_ILCKD__(Decrement)(&var->value); + return ETHR_ILCKD_REL__(Decrement)(&var->value); } +#endif + +#if defined(ETHR_WIN_HAVE_AND) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) +ETHR_NATMC_FUNC__(and_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) { return ETHR_ILCKD__(And)(&var->value, mask); } +#endif + +#if defined(ETHR_WIN_HAVE_OR) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) +ETHR_NATMC_FUNC__(or_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) { return ETHR_ILCKD__(Or)(&var->value, mask); } -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, - ETHR_AINT_T__ new, - ETHR_AINT_T__ old) -{ - return ETHR_ILCKD__(CompareExchange)(&var->value, new, old); -} +#endif +#if defined(ETHR_WIN_HAVE_XCHG) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new) +ETHR_NATMC_FUNC__(xchg_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new) { return ETHR_ILCKD__(Exchange)(&var->value, new); } -/* - * Atomic ops with at least specified barriers. - */ +#endif -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var) -{ -#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ - ETHR_AINT_T__ val = var->value; - ETHR_COMPILER_BARRIER; - return val; +#if defined(ETHR_WIN_HAVE_CMPXCHG) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1 #else - return ETHR_ILCKD_ACQ__(ExchangeAdd)(&var->value, (ETHR_AINT_T__) 0); +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1 #endif -} static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var) +ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var, + ETHR_AINT_T__ new, + ETHR_AINT_T__ old) { - return ETHR_ILCKD_ACQ__(Increment)(&var->value); + return ETHR_ILCKD__(CompareExchange)(&var->value, new, old); } -static ETHR_INLINE void -ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i) -{ -#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ - ETHR_COMPILER_BARRIER; - var->value = i; -#else - (void) ETHR_ILCKD_REL__(Exchange)(&var->value, i); #endif -} -static ETHR_INLINE void -ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var) -{ - (void) ETHR_ILCKD_REL__(Decrement)(&var->value); -} +#if defined(ETHR_WIN_HAVE_CMPXCHG_ACQ) -static ETHR_INLINE ETHR_AINT_T__ -ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var) -{ - return ETHR_ILCKD_REL__(Decrement)(&var->value); -} +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1 +#endif static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, @@ -391,6 +435,16 @@ ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, return ETHR_ILCKD_ACQ__(CompareExchange)(&var->value, new, old); } +#endif + +#if defined(ETHR_WIN_HAVE_CMPXCHG_REL) + +#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4 +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB 1 +#else +# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB 1 +#endif + static ETHR_INLINE ETHR_AINT_T__ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, @@ -399,6 +453,8 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, return ETHR_ILCKD_REL__(CompareExchange)(&var->value, new, old); } +#endif + #endif /* ETHR_TRY_INLINE_FUNCS */ #undef ETHR_ILCKD__ @@ -408,8 +464,17 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, #undef ETHR_ATMC_T__ #undef ETHR_AINT_T__ #undef ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ -#undef ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ - -#endif /* _MSC_VER */ +#undef ETHR_WIN_HAVE_CMPXCHG +#undef ETHR_WIN_HAVE_DEC +#undef ETHR_WIN_HAVE_INC +#undef ETHR_WIN_HAVE_XCHG_ADD +#undef ETHR_WIN_HAVE_XCHG +#undef ETHR_WIN_HAVE_AND +#undef ETHR_WIN_HAVE_OR +#undef ETHR_WIN_HAVE_XCHG_ADD_ACQ +#undef ETHR_WIN_HAVE_INC_ACQ +#undef ETHR_WIN_HAVE_DEC_REL +#undef ETHR_WIN_HAVE_CMPXCHG_ACQ +#undef ETHR_WIN_HAVE_CMPXCHG_REL #endif /* ETHR_INCLUDE_ATOMIC_IMPL__ */ diff --git a/erts/include/internal/win/ethr_dw_atomic.h b/erts/include/internal/win/ethr_dw_atomic.h new file mode 100644 index 0000000000..a3e7ffc3aa --- /dev/null +++ b/erts/include/internal/win/ethr_dw_atomic.h @@ -0,0 +1,154 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Native double word atomics for windows + * Author: Rickard Green + */ + +#undef ETHR_INCLUDE_DW_ATOMIC_IMPL__ +#ifndef ETHR_X86_DW_ATOMIC_H__ +# define ETHR_X86_DW_ATOMIC_H__ +# if ((ETHR_SIZEOF_PTR == 4 \ + && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64)) \ + || (ETHR_SIZEOF_PTR == 8 \ + && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE128))) +# define ETHR_INCLUDE_DW_ATOMIC_IMPL__ +# endif +#endif + +#ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__ + +# if ETHR_SIZEOF_PTR == 4 +# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC +# else +# define ETHR_HAVE_NATIVE_DW_ATOMIC +# endif +# define ETHR_NATIVE_DW_ATOMIC_IMPL "windows-interlocked" + +# if defined(_M_IX86) || defined(_M_AMD64) +/* + * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used + * at runtime in order to determine if native or fallback implementation + * should be used. + */ +# define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \ + ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__ +# endif + +# include <intrin.h> +# if ETHR_SIZEOF_PTR == 4 +# pragma intrinsic(_InterlockedCompareExchange64) +# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7 +# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t +# else +# pragma intrinsic(_InterlockedCompareExchange128) +# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf +# endif + +typedef volatile __int64 * ethr_native_dw_ptr_t; + +/* + * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned + * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is + * not common, and at least some glibc malloc implementations + * only 4 byte align in 32-bit mode. + * + * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte + * aligned memory in 32-bit mode. A malloc implementation that does + * not adhere to these alignment requirements is seriously broken, + * and we wont bother trying to work around it. + * + * Since memory alignment may be off by one word we need to align at + * runtime. We, therefore, need an extra word allocated. + */ +#define ETHR_DW_NATMC_MEM__(VAR) \ + (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__]) +typedef union { +#ifdef ETHR_NATIVE_SU_DW_SINT_T + volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint; +#endif + volatile ethr_sint_t sint[3]; + volatile char c[ETHR_SIZEOF_PTR*3]; +} ethr_native_dw_atomic_t; + +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) + +#ifdef ETHR_DEBUG +# define ETHR_DW_DBG_ALIGNED__(PTR) \ + ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0); +#else +# define ETHR_DW_DBG_ALIGNED__(PTR) +#endif + +#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR + +static ETHR_INLINE ethr_sint_t * +ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var) +{ + ethr_sint_t *p = (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var); + ETHR_DW_DBG_ALIGNED__(p); + return p; +} + + +#if ETHR_SIZEOF_PTR == 4 + +#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB + +static ETHR_INLINE ethr_sint64_t +ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var, + ethr_sint64_t new, + ethr_sint64_t exp) +{ + ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var); + ETHR_DW_DBG_ALIGNED__(p); + return (ethr_sint64_t) _InterlockedCompareExchange64(p, new, exp); +} + +#elif ETHR_SIZEOF_PTR == 8 + +#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB + +#ifdef ETHR_BIGENDIAN +# define ETHR_WIN_LOW_WORD__ 1 +# define ETHR_WIN_HIGH_WORD__ 0 +#else +# define ETHR_WIN_LOW_WORD__ 0 +# define ETHR_WIN_HIGH_WORD__ 1 +#endif + +static ETHR_INLINE int +ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var, + ethr_sint_t *new, + ethr_sint_t *xchg) +{ + ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var); + ETHR_DW_DBG_ALIGNED__(p); + return (int) _InterlockedCompareExchange128(p, + new[ETHR_WIN_HIGH_WORD__], + new[ETHR_WIN_LOW_WORD__], + xchg); +} + +#endif + +#endif /* ETHR_TRY_INLINE_FUNCS */ + +#endif /* ETHR_INCLUDE_DW_ATOMIC_IMPL__ */ diff --git a/erts/include/internal/win/ethr_event.h b/erts/include/internal/win/ethr_event.h index 598816b2c6..6363174a74 100644 --- a/erts/include/internal/win/ethr_event.h +++ b/erts/include/internal/win/ethr_event.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2009-2010. All Rights Reserved. + * Copyright Ericsson AB 2009-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -21,12 +21,12 @@ * Author: Rickard Green */ -#define ETHR_EVENT_OFF_WAITER__ ((long) -1) -#define ETHR_EVENT_OFF__ ((long) 1) -#define ETHR_EVENT_ON__ ((long) 0) +#define ETHR_EVENT_OFF_WAITER__ ((ethr_sint32_t) -1) +#define ETHR_EVENT_OFF__ ((ethr_sint32_t) 1) +#define ETHR_EVENT_ON__ ((ethr_sint32_t) 0) typedef struct { - volatile long state; + ethr_atomic32_t state; HANDLE handle; } ethr_event; @@ -38,7 +38,7 @@ static ETHR_INLINE void ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e) { /* _InterlockedExchange() imply a full memory barrier which is important */ - long state = _InterlockedExchange(&e->state, ETHR_EVENT_ON__); + ethr_sint32_t state = ethr_atomic32_xchg_wb(&e->state, ETHR_EVENT_ON__); if (state == ETHR_EVENT_OFF_WAITER__) { if (!SetEvent(e->handle)) ETHR_FATAL_ERROR__(ethr_win_get_errno__()); @@ -48,8 +48,8 @@ ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e) static ETHR_INLINE void ETHR_INLINE_FUNC_NAME_(ethr_event_reset)(ethr_event *e) { - /* _InterlockedExchange() imply a full memory barrier which is important */ - InterlockedExchange(&e->state, ETHR_EVENT_OFF__); + ethr_atomic32_set(&e->state, ETHR_EVENT_OFF__); + ETHR_MEMORY_BARRIER; } #endif diff --git a/erts/include/internal/win/ethr_membar.h b/erts/include/internal/win/ethr_membar.h new file mode 100644 index 0000000000..8237660b2c --- /dev/null +++ b/erts/include/internal/win/ethr_membar.h @@ -0,0 +1,145 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: Memory barriers for Windows + * Author: Rickard Green + */ + +#if (!defined(ETHR_WIN_MEMBAR_H__) \ + && (defined(_MSC_VER) && _MSC_VER >= 1400) \ + && (defined(_M_AMD64) \ + || defined(_M_IA64) \ + || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE))) +#define ETHR_WIN_MEMBAR_H__ + +#define ETHR_LoadLoad (1 << 0) +#define ETHR_LoadStore (1 << 1) +#define ETHR_StoreLoad (1 << 2) +#define ETHR_StoreStore (1 << 3) + +#include <intrin.h> +#undef ETHR_COMPILER_BARRIER +#define ETHR_COMPILER_BARRIER _ReadWriteBarrier() +#pragma intrinsic(_ReadWriteBarrier) + +#pragma intrinsic(_InterlockedCompareExchange) + +#define ETHR_MB_USING_INTERLOCKED__ \ +do { \ + volatile long x___ = 0; \ + (void) _InterlockedCompareExchange(&x___, 2, 1); \ +} while (0) + +#if defined(_M_IA64) + +#define ETHR_MEMBAR(B) __mf() + +#elif defined(_M_AMD64) || defined(_M_IX86) + +#include <emmintrin.h> +#include <mmintrin.h> + +#if ETHR_SIZEOF_PTR == 4 +# define ETHR_NO_SSE2_MB__ ETHR_MB_USING_INTERLOCKED__ +#endif +#pragma intrinsic(_mm_mfence) +#pragma intrinsic(_mm_sfence) +#pragma intrinsic(_mm_lfence) + +static __forceinline void +ethr_cfence__(void) +{ + _ReadWriteBarrier(); +} + +static __forceinline void +ethr_mfence__(void) +{ +#if ETHR_SIZEOF_PTR == 4 + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + ETHR_NO_SSE2_MB__; + else +#endif + _mm_mfence(); +} + +static __forceinline void +ethr_sfence__(void) +{ +#if ETHR_SIZEOF_PTR == 4 + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + ETHR_NO_SSE2_MB__; + else +#endif + _mm_sfence(); +} + +static __forceinline void +ethr_lfence__(void) +{ +#if ETHR_SIZEOF_PTR == 4 + if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) + ETHR_NO_SSE2_MB__; + else +#endif + _mm_lfence(); +} + +#define ETHR_X86_OUT_OF_ORDER_MEMBAR(B) \ + ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \ + ethr_sfence__(), \ + ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \ + ethr_lfence__(), \ + ethr_mfence__())) + +#ifdef ETHR_X86_OUT_OF_ORDER + +#define ETHR_MEMBAR(B) \ + ETHR_X86_OUT_OF_ORDER_MEMBAR((B)) + +#else /* !ETHR_X86_OUT_OF_ORDER (the default) */ + +/* + * We assume that only stores before loads may be reordered. That is, + * we assume that *no* instructions like these are used: + * - CLFLUSH, + * - streaming stores executed with non-temporal move, + * - string operations, or + * - other instructions which aren't LoadLoad, LoadStore, and StoreStore + * ordered by themselves + * If such instructions are used, either insert memory barriers + * using ETHR_X86_OUT_OF_ORDER_MEMBAR() at appropriate places, or + * define ETHR_X86_OUT_OF_ORDER. For more info see Intel 64 and IA-32 + * Architectures Software Developer's Manual; Vol 3A; Chapter 8.2.2. + */ + +#define ETHR_MEMBAR(B) \ + ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_mfence__(), ethr_cfence__()) + +#endif + +#else /* No knowledge about platform; use interlocked fallback */ + +#define ETHR_MEMBAR(B) ETHR_MB_USING_INTERLOCKED__ +#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_MB_USING_INTERLOCKED__ + +#endif + +#endif /* ETHR_WIN_MEMBAR_H__ */ diff --git a/erts/include/internal/win/ethread.h b/erts/include/internal/win/ethread.h index c01b17cf14..8be35e810e 100644 --- a/erts/include/internal/win/ethread.h +++ b/erts/include/internal/win/ethread.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -25,11 +25,13 @@ #ifndef ETHREAD_WIN_H__ #define ETHREAD_WIN_H__ +#include "ethr_membar.h" #define ETHR_ATOMIC_WANT_32BIT_IMPL__ #include "ethr_atomic.h" #if ETHR_SIZEOF_PTR == 8 # define ETHR_ATOMIC_WANT_64BIT_IMPL__ # include "ethr_atomic.h" #endif +#include "ethr_dw_atomic.h" #endif diff --git a/erts/lib_src/Makefile.in b/erts/lib_src/Makefile.in index 757b3b24e2..12b8732735 100644 --- a/erts/lib_src/Makefile.in +++ b/erts/lib_src/Makefile.in @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 2004-2010. All Rights Reserved. +# Copyright Ericsson AB 2004-2011. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -65,7 +65,7 @@ TYPE_SUFFIX=.purecov PRE_LD=purecov $(PURECOV_BUILD_OPTIONS) else ifeq ($(TYPE),gcov) -CFLAGS=@DEBUG_CFLAGS@ -fprofile-arcs -ftest-coverage -O0 +CFLAGS=@DEBUG_CFLAGS@ -DGCOV -fprofile-arcs -ftest-coverage -O0 TYPE_SUFFIX=.gcov PRE_LD= else @@ -288,6 +288,10 @@ ETHREAD_LIB_SRC=common/ethr_aux.c \ common/ethr_cbf.c \ $(ETHR_THR_LIB_BASE_DIR)/ethread.c \ $(ETHR_THR_LIB_BASE_DIR)/ethr_event.c +ETHR_X86_SSE2_ASM=@ETHR_X86_SSE2_ASM@ +ifeq ($(ETHR_X86_SSE2_ASM),yes) +ETHREAD_LIB_SRC += pthread/ethr_x86_sse2_asm.c +endif ETHREAD_LIB_NAME=ethread$(TYPE_SUFFIX) ifeq ($(USING_VC),yes) @@ -382,10 +386,8 @@ $(ERTS_LIB): $(ERTS_LIB_OBJS) # Object files # -ifeq ($(TYPE)-@GCC@,debug-yes) -$(r_OBJ_DIR)/ethr_aux.o: common/ethr_aux.c - $(CC) $(THR_DEFS) $(CFLAGS) -Wno-unused-function $(INCLUDES) -c $< -o $@ -endif +$(r_OBJ_DIR)/ethr_x86_sse2_asm.o: pthread/ethr_x86_sse2_asm.c + $(CC) -msse2 $(THR_DEFS) $(CFLAGS) $(INCLUDES) -c $< -o $@ $(r_OBJ_DIR)/%.o: common/%.c $(CC) $(THR_DEFS) $(CFLAGS) $(INCLUDES) -c $< -o $@ diff --git a/erts/lib_src/common/ethr_atomics.c b/erts/lib_src/common/ethr_atomics.c index 94557d904a..5796bdc22e 100644 --- a/erts/lib_src/common/ethr_atomics.c +++ b/erts/lib_src/common/ethr_atomics.c @@ -1,7 +1,16 @@ /* + * --------------- DO NOT EDIT THIS FILE! --------------- + * This file was automatically generated by the + * $ERL_TOP/erts/lib_src/utils/make_atomics_api script. + * If you need to make changes, edit the script and + * regenerate this file. + * --------------- DO NOT EDIT THIS FILE! --------------- + */ + +/* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -18,385 +27,4329 @@ */ /* - * Description: The ethread atomic API + * Description: The ethread atomics API * Author: Rickard Green */ +/* + * This file maps native atomic implementations to ethread + * API atomics. If no native atomic implementation + * is available, a less efficient fallback is used instead. + * The API consists of 32-bit size, word size (pointer size), + * and double word size atomics. + * + * The following atomic operations are implemented for + * 32-bit size, and word size atomics: + * - cmpxchg + * - xchg + * - set + * - init + * - add_read + * - read + * - inc_read + * - dec_read + * - add + * - inc + * - dec + * - read_band + * - read_bor + * + * The following atomic operations are implemented for + * double word size atomics: + * - cmpxchg + * - set + * - read + * - init + * + * Appart from a function implementing the atomic operation + * with unspecified memory barrier semantics, there are + * functions implementing each operation with the following + * memory barrier semantics: + * - rb (read barrier) + * - wb (write barrier) + * - acqb (acquire barrier) + * - relb (release barrier) + * - mb (full memory barrier) + * + * We implement all of these operation/barrier + * combinations, regardless of whether they are useful + * or not (some of them are useless). + * + * Double word size atomic functions are on the followning + * form: + * ethr_dw_atomic_<OP>[_<BARRIER>] + * + * Word size atomic functions are on the followning + * form: + * ethr_atomic_<OP>[_<BARRIER>] + * + * 32-bit size atomic functions are on the followning + * form: + * ethr_atomic32_<OP>[_<BARRIER>] + * + * Apart from the operation/barrier functions + * described above also 'addr' functions are implemented + * which return the actual memory address used of the + * atomic variable. The 'addr' functions have no barrier + * versions. + * + * The native atomic implementation does not need to + * implement all operation/barrier combinations. + * Functions that have no native implementation will be + * constructed from existing native functionality. These + * functions will perform the wanted operation and will + * produce sufficient memory barriers, but may + * in some cases be less efficient than pure native + * versions. + * + * When we create ethread API operation/barrier functions by + * adding barriers before and after native operations it is + * assumed that: + * - A native read operation begins, and ends with a load. + * - A native set operation begins, and ends with a store. + * - An init operation begins with either a load, or a store, + * and ends with either a load, or a store. + * - All other operations begins with a load, and ends with + * either a load, or a store. + * + * This is the minimum functionality that a native + * implementation needs to provide: + * + * - Functions that need to be implemented: + * + * - ethr_native_[dw_|su_dw_]atomic[BITS]_addr + * - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>] + * (at least one cmpxchg of optional barrier) + * + * - Macros that needs to be defined: + * + * A macro informing about the presence of the native + * implementation: + * + * - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS] + * + * A macro naming (a string constant) the implementation: + * + * - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL + * + * Each implemented native atomic function has to + * be accompanied by a defined macro on the following + * form informing about its presence: + * + * - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>] + * + * A (sparc-v9 style) membar macro: + * + * - ETHR_MEMBAR(B) + * + * Which takes a combination of the following macros + * or:ed (using |) together: + * + * - ETHR_LoadLoad + * - ETHR_LoadStore + * - ETHR_StoreLoad + * - ETHR_StoreStore + * + */ + + #ifdef HAVE_CONFIG_H #include "config.h" #endif -#define ETHR_INLINE_FUNC_NAME_(X) X ## __ +#define ETHR_TRY_INLINE_FUNCS +#define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X ## __ +#define ETHR_INLINE_ATMC_FUNC_NAME_(X) X ## __ +#define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X ## __ #define ETHR_ATOMIC_IMPL__ #include "ethread.h" #include "ethr_internal.h" -#ifndef ETHR_HAVE_NATIVE_ATOMICS -ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATOMIC_ADDR_BITS]; +#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \ + || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)) +/* + * Spinlock based fallback for atomics used in absence of a native + * implementation. + */ + +#define ETHR_ATMC_FLLBK_ADDR_BITS 10 +#define ETHR_ATMC_FLLBK_ADDR_SHIFT 6 + +typedef struct { + union { + ethr_spinlock_t lck; + char buf[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_spinlock_t))]; + } u; +} ethr_atomic_protection_t; + +extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS]; + +#define ETHR_ATOMIC_PTR2LCK__(PTR) \ +(ðr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATMC_FLLBK_ADDR_SHIFT) \ + & ((1 << ETHR_ATMC_FLLBK_ADDR_BITS) - 1))].u.lck) + + +#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS) \ +do { \ + ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP)); \ + ethr_spin_lock(slp__); \ + { EXPS; } \ + ethr_spin_unlock(slp__); \ +} while (0) + +ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS]; + +#endif + + +#if defined(ETHR_AMC_FALLBACK__) + +/* + * Fallback for large sized (word and/or double word size) atomics using + * an "Atomic Modification Counter" based on smaller sized native atomics. + * + * We use a 63-bit modification counter and a one bit exclusive flag. + * If 32-bit native atomics are used, we need two 32-bit native atomics. + * The exclusive flag is the least significant bit, or if multiple atomics + * are used, the least significant bit of the least significant atomic. + * + * When using the AMC fallback the following is true: + * - Reads of the same atomic variable can be done in parallel. + * - Uncontended reads doesn't cause any cache line invalidations, + * since no modifications are done. + * - Assuming that the AMC atomic(s) and the integer(s) containing the + * value of the implemented atomic resides in the same cache line, + * modifications will only cause invalidations of one cache line. + * + * When using the spinlock based fallback none of the above is true, + * however, the spinlock based fallback consumes less memory. + */ + +# if ETHR_AMC_NO_ATMCS__ != 1 && ETHR_AMC_NO_ATMCS__ != 2 +# error "Not supported" +# endif +# define ETHR_AMC_MAX_TRY_READ__ 10 +# ifdef ETHR_DEBUG +# define ETHR_DBG_CHK_EXCL_STATE(ASP, S) \ +do { \ + ETHR_AMC_SINT_T__ act = ETHR_AMC_ATMC_FUNC__(read)(&(ASP)->atomic[0]); \ + ETHR_ASSERT(act == (S) + 1); \ + ETHR_ASSERT(act & 1); \ +} while (0) +# else +# define ETHR_DBG_CHK_EXCL_STATE(ASP, S) +# endif + +static ETHR_INLINE void +amc_init(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val) +{ + avar[0] = val[0]; + if (dw) + avar[1] = val[1]; +#if ETHR_AMC_NO_ATMCS__ == 2 + ETHR_AMC_ATMC_FUNC__(init)(&amc->atomic[1], 0); #endif + ETHR_AMC_ATMC_FUNC__(init_wb)(&amc->atomic[0], 0); +} + +static ETHR_INLINE ETHR_AMC_SINT_T__ +amc_set_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ prev_state0) +{ + ETHR_AMC_SINT_T__ state0 = prev_state0; + /* Set exclusive flag. */ + while (1) { + ETHR_AMC_SINT_T__ act_state0, new_state0; + while (state0 & 1) { /* Wait until exclusive bit has been cleared */ + ETHR_SPIN_BODY; + state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); + } + /* Try to set exclusive bit */ + new_state0 = state0 + 1; + act_state0 = ETHR_AMC_ATMC_FUNC__(cmpxchg_acqb)(&amc->atomic[0], + new_state0, + state0); + if (state0 == act_state0) + return state0; /* old state0 */ + state0 = act_state0; + } +} + +static ETHR_INLINE void +amc_inc_mc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0) +{ + ETHR_AMC_SINT_T__ state0 = old_state0; + + /* Increment modification counter and reset exclusive flag. */ + + ETHR_DBG_CHK_EXCL_STATE(amc, state0); + + state0 += 2; + + ETHR_ASSERT((state0 & 1) == 0); + +#if ETHR_AMC_NO_ATMCS__ == 2 + if (state0 == 0) { + /* + * state0 wrapped, so we need to increment state1. There is no need + * for atomic inc op, since this is always done while having exclusive + * flag. + */ + ETHR_AMC_SINT_T__ state1 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]); + state1++; + ETHR_AMC_ATMC_FUNC__(set)(&amc->atomic[1], state1); + } +#endif + ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], state0); +} + +static ETHR_INLINE void +amc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0) +{ + ETHR_DBG_CHK_EXCL_STATE(amc, old_state0); + /* + * Reset exclusive flag, but leave modification counter unchanged, + * i.e., restore state to what it was before setting exclusive + * flag. + */ + ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], old_state0); +} + +static ETHR_INLINE void +amc_set(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val) +{ + ETHR_AMC_SINT_T__ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); + + state0 = amc_set_excl(amc, state0); + + avar[0] = val[0]; + if (dw) + avar[1] = val[1]; + + amc_inc_mc_unset_excl(amc, state0); +} + +static ETHR_INLINE int +amc_try_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar, + ethr_sint_t *val, ETHR_AMC_SINT_T__ *state0p) +{ + /* *state0p should contain last read value if aborting */ + ETHR_AMC_SINT_T__ old_state0; +#if ETHR_AMC_NO_ATMCS__ == 2 + ETHR_AMC_SINT_T__ state1; + int abrt; +#endif + + *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]); + if ((*state0p) & 1) + return 0; /* exclusive flag set; abort */ +#if ETHR_AMC_NO_ATMCS__ == 2 + state1 = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[1]); +#else + ETHR_COMPILER_BARRIER; +#endif + + val[0] = avar[0]; + if (dw) + val[1] = avar[1]; + + ETHR_READ_MEMORY_BARRIER; + + /* + * Abort if state has changed (i.e, either the exclusive + * flag is set, or modification counter changed). + */ + old_state0 = *state0p; +#if ETHR_AMC_NO_ATMCS__ == 2 + *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]); + abrt = (old_state0 != *state0p); + abrt |= (state1 != ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1])); + return abrt == 0; +#else + *state0p = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); + return old_state0 == *state0p; +#endif +} + +static ETHR_INLINE void +amc_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val) +{ + ETHR_AMC_SINT_T__ state0; + int i; + +#if ETHR_AMC_MAX_TRY_READ__ == 0 + state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); +#else + for (i = 0; i < ETHR_AMC_MAX_TRY_READ__; i++) { + if (amc_try_read(amc, dw, avar, val, &state0)) + return; /* read success */ + ETHR_SPIN_BODY; + } +#endif + + state0 = amc_set_excl(amc, state0); + + val[0] = avar[0]; + if (dw) + val[1] = avar[1]; + + amc_unset_excl(amc, state0); +} + +static ETHR_INLINE int +amc_cmpxchg(ethr_amc_t *amc, int dw, ethr_sint_t *avar, + ethr_sint_t *new, ethr_sint_t *xchg) +{ + ethr_sint_t val[2]; + ETHR_AMC_SINT_T__ state0; + + if (amc_try_read(amc, dw, avar, val, &state0)) { + if (val[0] != xchg[0] || (dw && val[1] != xchg[1])) { + xchg[0] = val[0]; + if (dw) + xchg[1] = val[1]; + return 0; /* failed */ + } + /* Operation will succeed if not interrupted */ + } + + state0 = amc_set_excl(amc, state0); + + if (xchg[0] != avar[0] || (dw && xchg[1] != avar[1])) { + xchg[0] = avar[0]; + if (dw) + xchg[1] = avar[1]; + + ETHR_DBG_CHK_EXCL_STATE(amc, state0); + + amc_unset_excl(amc, state0); + return 0; /* failed */ + } + + avar[0] = new[0]; + if (dw) + avar[1] = new[1]; + + amc_inc_mc_unset_excl(amc, state0); + return 1; +} + + +#define ETHR_AMC_MODIFICATION_OPS__(AMC, OPS) \ +do { \ + ETHR_AMC_SINT_T__ state0__; \ + state0__ = ETHR_AMC_ATMC_FUNC__(read)(&(AMC)->atomic[0]); \ + state0__ = amc_set_excl((AMC), state0__); \ + { OPS; } \ + amc_inc_mc_unset_excl((AMC), state0__); \ +} while (0) + +#endif /* amc fallback */ + int ethr_init_atomics(void) { -#ifndef ETHR_HAVE_NATIVE_ATOMICS - { - int i; - for (i = 0; i < (1 << ETHR_ATOMIC_ADDR_BITS); i++) { - int res = ethr_spinlock_init(ðr_atomic_protection__[i].u.lck); - if (res != 0) - return res; - } +#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \ + || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)) + int i; + for (i = 0; i < (1 << ETHR_ATMC_FLLBK_ADDR_BITS); i++) { + int res = ethr_spinlock_init(ðr_atomic_protection__[i].u.lck); + if (res != 0) + return res; } #endif return 0; } + +/* ---------- Double word size atomic implementation ---------- */ + + + /* - * --- Pointer size atomics --------------------------------------------------- + * Double word atomics need runtime test. */ -ethr_sint_t * -ethr_atomic_addr(ethr_atomic_t *var) +int ethr_have_native_dw_atomic(void) +{ + return ethr_have_native_dw_atomic__(); +} + + +/* --- addr() --- */ + +ethr_sint_t *ETHR_DW_ATOMIC_FUNC__(addr)(ethr_dw_atomic_t *var) +{ + ethr_sint_t *res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + res = ethr_dw_atomic_addr__(var); +#elif defined(ETHR_AMC_FALLBACK__) + res = (ethr_sint_t *) ((&var->fallback))->sint; +#else + res = (ethr_sint_t *) (&var->fallback); +#endif + return res; +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +ethr_sint_t *ethr_dw_atomic_addr(ethr_dw_atomic_t *var) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + return ethr_dw_atomic_addr__(var); +} +#endif + + +/* -- cmpxchg() -- */ + + +int ETHR_DW_ATOMIC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + res = ethr_dw_atomic_cmpxchg__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), + { + res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]); + if (res) { + (&var->fallback)->sint[0] = val->sint[0]; + (&var->fallback)->sint[1] = val->sint[1]; + } + else { + old_val->sint[0] = (&var->fallback)->sint[0]; + old_val->sint[1] = (&var->fallback)->sint[1]; + } + }); +#endif + return res; +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +int ethr_dw_atomic_cmpxchg(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + return ethr_dw_atomic_cmpxchg__(var, val, old_val); +} +#endif + +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + res = ethr_dw_atomic_cmpxchg_rb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), + { + res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]); + if (res) { + (&var->fallback)->sint[0] = val->sint[0]; + (&var->fallback)->sint[1] = val->sint[1]; + } + else { + old_val->sint[0] = (&var->fallback)->sint[0]; + old_val->sint[1] = (&var->fallback)->sint[1]; + } + }); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +int ethr_dw_atomic_cmpxchg_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + return ethr_dw_atomic_cmpxchg_rb__(var, val, old_val); +} +#endif + +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + res = ethr_dw_atomic_cmpxchg_wb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), + { + res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]); + if (res) { + (&var->fallback)->sint[0] = val->sint[0]; + (&var->fallback)->sint[1] = val->sint[1]; + } + else { + old_val->sint[0] = (&var->fallback)->sint[0]; + old_val->sint[1] = (&var->fallback)->sint[1]; + } + }); +#endif + return res; +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +int ethr_dw_atomic_cmpxchg_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + return ethr_dw_atomic_cmpxchg_wb__(var, val, old_val); +} +#endif + +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + res = ethr_dw_atomic_cmpxchg_acqb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), + { + res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]); + if (res) { + (&var->fallback)->sint[0] = val->sint[0]; + (&var->fallback)->sint[1] = val->sint[1]; + } + else { + old_val->sint[0] = (&var->fallback)->sint[0]; + old_val->sint[1] = (&var->fallback)->sint[1]; + } + }); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +int ethr_dw_atomic_cmpxchg_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + return ethr_dw_atomic_cmpxchg_acqb__(var, val, old_val); +} +#endif + +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + res = ethr_dw_atomic_cmpxchg_relb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), + { + res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]); + if (res) { + (&var->fallback)->sint[0] = val->sint[0]; + (&var->fallback)->sint[1] = val->sint[1]; + } + else { + old_val->sint[0] = (&var->fallback)->sint[0]; + old_val->sint[1] = (&var->fallback)->sint[1]; + } + }); +#endif + return res; +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +int ethr_dw_atomic_cmpxchg_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + return ethr_dw_atomic_cmpxchg_relb__(var, val, old_val); +} +#endif + +int ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + int res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + res = ethr_dw_atomic_cmpxchg_mb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), + { + res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]); + if (res) { + (&var->fallback)->sint[0] = val->sint[0]; + (&var->fallback)->sint[1] = val->sint[1]; + } + else { + old_val->sint[0] = (&var->fallback)->sint[0]; + old_val->sint[1] = (&var->fallback)->sint[1]; + } + }); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +int ethr_dw_atomic_cmpxchg_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + return ethr_dw_atomic_cmpxchg_mb__(var, val, old_val); +} +#endif + + +/* -- set() -- */ + + +void ETHR_DW_ATOMIC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_set__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_set(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_set__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_set_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_set_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_set_rb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_set_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_set_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_set_wb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_set_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_set_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_set_acqb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_set_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_set_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_set_relb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_set_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_set_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_set_mb__(var, val); +} +#endif + + +/* -- read() -- */ + + +void ETHR_DW_ATOMIC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_read__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_read(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_read__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_read_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_read_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_read_rb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_read_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_read_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_read_wb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_read_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_LoadStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_read_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_read_acqb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_read_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_read_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_read_relb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_read_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_LoadStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_read_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); + ethr_dw_atomic_read_mb__(var, val); +} +#endif + + +/* -- init() -- */ + + +void ETHR_DW_ATOMIC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_init__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_init(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); + ethr_dw_atomic_init__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_init_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_init_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); + ethr_dw_atomic_init_rb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_init_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_init_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); + ethr_dw_atomic_init_wb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_init_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_init_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); + ethr_dw_atomic_init_acqb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_init_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_init_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); + ethr_dw_atomic_init_relb__(var, val); +} +#endif + +void ETHR_DW_ATOMIC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + ethr_dw_atomic_init_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ +void ethr_dw_atomic_init_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val) +{ + ETHR_ASSERT(var); + ethr_dw_atomic_init_mb__(var, val); +} +#endif + + +/* ---------- Word size atomic implementation ---------- */ + + + + +/* --- addr() --- */ + +ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *var) +{ + ethr_sint_t *res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_addr__(var); +#elif defined(ETHR_AMC_FALLBACK__) + res = (ethr_sint_t *) (var)->sint; +#else + res = (ethr_sint_t *) var; +#endif + return res; +} + + +/* -- cmpxchg() -- */ + + +ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_cmpxchg__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + res = old_val; + (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); +#endif + return res; +} + +ethr_sint_t ethr_atomic_cmpxchg_rb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_cmpxchg_rb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + res = old_val; + (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint_t ethr_atomic_cmpxchg_wb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_cmpxchg_wb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + res = old_val; + (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); +#endif + return res; +} + +ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_cmpxchg_acqb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + res = old_val; + (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_cmpxchg_relb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = old_val; + (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); +#endif + return res; +} + +ethr_sint_t ethr_atomic_cmpxchg_mb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_cmpxchg_mb__(var, val, old_val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + res = old_val; + (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- xchg() -- */ + + +ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_xchg__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_xchg_rb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_xchg_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint_t ethr_atomic_xchg_wb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_xchg_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_xchg_acqb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_xchg_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint_t ethr_atomic_xchg_relb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_xchg_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_xchg_mb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_xchg_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- set() -- */ + + +void ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_set__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_set(&var->amc, 0, &var->sint, &val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic_set_rb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_set_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_set(&var->amc, 0, &var->sint, &val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +void ethr_atomic_set_wb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_set_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + amc_set(&var->amc, 0, &var->sint, &val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic_set_acqb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_set_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_set(&var->amc, 0, &var->sint, &val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +void ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_set_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + amc_set(&var->amc, 0, &var->sint, &val); +#else + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic_set_mb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_set_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + amc_set(&var->amc, 0, &var->sint, &val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + + +/* -- init() -- */ + + +void ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_init__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_init(&var->amc, 0, &var->sint, &val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic_init_rb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_init_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_init(&var->amc, 0, &var->sint, &val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +void ethr_atomic_init_wb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_init_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + amc_init(&var->amc, 0, &var->sint, &val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic_init_acqb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_init_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + amc_init(&var->amc, 0, &var->sint, &val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +void ethr_atomic_init_relb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_init_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + amc_init(&var->amc, 0, &var->sint, &val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic_init_mb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_init_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + amc_init(&var->amc, 0, &var->sint, &val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + + +/* -- add_read() -- */ + + +ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_add_read__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); +#endif + return res; +} + +ethr_sint_t ethr_atomic_add_read_rb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_add_read_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint_t ethr_atomic_add_read_wb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_add_read_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); +#endif + return res; +} + +ethr_sint_t ethr_atomic_add_read_acqb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_add_read_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint_t ethr_atomic_add_read_relb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_add_read_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); +#endif + return res; +} + +ethr_sint_t ethr_atomic_add_read_mb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_add_read_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- read() -- */ + + +ethr_sint_t ethr_atomic_read(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read__(var); +#elif defined(ETHR_AMC_FALLBACK__) + amc_read(&var->amc, 0, &var->sint, &res); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_rb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_rb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + amc_read(&var->amc, 0, &var->sint, &res); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_wb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_wb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + amc_read(&var->amc, 0, &var->sint, &res); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_acqb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + amc_read(&var->amc, 0, &var->sint, &res); + ETHR_MEMBAR(ETHR_LoadStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_relb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_relb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + amc_read(&var->amc, 0, &var->sint, &res); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_mb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_mb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + amc_read(&var->amc, 0, &var->sint, &res); + ETHR_MEMBAR(ETHR_LoadStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#endif + return res; +} + + +/* -- inc_read() -- */ + + +ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_inc_read__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint)); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#endif + return res; +} + +ethr_sint_t ethr_atomic_inc_read_rb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_inc_read_rb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint_t ethr_atomic_inc_read_wb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_inc_read_wb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint)); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#endif + return res; +} + +ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_inc_read_acqb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint_t ethr_atomic_inc_read_relb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_inc_read_relb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint)); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#endif + return res; +} + +ethr_sint_t ethr_atomic_inc_read_mb(ethr_atomic_t *var) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_inc_read_mb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- dec_read() -- */ + + +ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *var) { + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_addr__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_dec_read__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint)); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#endif + return res; } -void -ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t i) +ethr_sint_t ethr_atomic_dec_read_rb(ethr_atomic_t *var) { + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic_init__(var, i); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_dec_read_rb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; } -void -ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t i) +ethr_sint_t ethr_atomic_dec_read_wb(ethr_atomic_t *var) { + ethr_sint_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic_set__(var, i); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_dec_read_wb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint)); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#endif + return res; } -ethr_sint_t -ethr_atomic_read(ethr_atomic_t *var) +ethr_sint_t ethr_atomic_dec_read_acqb(ethr_atomic_t *var) { + ethr_sint_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_read__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_dec_read_acqb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; } -ethr_sint_t -ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t incr) +ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *var) { + ethr_sint_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_add_read__(var, incr); -} +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_dec_read_relb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint)); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#endif + return res; +} -ethr_sint_t -ethr_atomic_inc_read(ethr_atomic_t *var) +ethr_sint_t ethr_atomic_dec_read_mb(ethr_atomic_t *var) { + ethr_sint_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_inc_read__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_dec_read_mb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; } -ethr_sint_t -ethr_atomic_dec_read(ethr_atomic_t *var) + +/* -- add() -- */ + + +void ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t val) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_dec_read__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_add__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); +#endif + +} + +void ethr_atomic_add_rb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_add_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + } -void -ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t incr) +void ethr_atomic_add_wb(ethr_atomic_t *var, ethr_sint_t val) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic_add__(var, incr); -} - -void -ethr_atomic_inc(ethr_atomic_t *var) +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_add_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); +#endif + +} + +void ethr_atomic_add_acqb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_add_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +void ethr_atomic_add_relb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_add_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); +#endif + +} + +void ethr_atomic_add_mb(ethr_atomic_t *var, ethr_sint_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_add_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + + +/* -- inc() -- */ + + +void ethr_atomic_inc(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) ethr_atomic_inc__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint)); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#endif + } -void -ethr_atomic_dec(ethr_atomic_t *var) +void ethr_atomic_inc_rb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic_dec__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_inc_rb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + } -ethr_sint_t -ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t mask) +void ethr_atomic_inc_wb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_read_band__(var, mask); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_inc_wb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint)); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#endif + } -ethr_sint_t -ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t mask) +void ethr_atomic_inc_acqb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_read_bor__(var, mask); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_inc_acqb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + } -ethr_sint_t -ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t new) +void ethr_atomic_inc_relb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_xchg__(var, new); -} +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_inc_relb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint)); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#endif + +} -ethr_sint_t -ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t new, ethr_sint_t expected) +void ethr_atomic_inc_mb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_cmpxchg__(var, new, expected); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_inc_mb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + } -ethr_sint_t -ethr_atomic_read_acqb(ethr_atomic_t *var) + +/* -- dec() -- */ + + +void ethr_atomic_dec(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_read_acqb__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_dec__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint)); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); +#endif + } -ethr_sint_t -ethr_atomic_inc_read_acqb(ethr_atomic_t *var) +void ethr_atomic_dec_rb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_inc_read_acqb__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_dec_rb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + } -void -ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t i) +void ethr_atomic_dec_wb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic_set_relb__(var, i); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_dec_wb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint)); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); +#endif + } -void -ethr_atomic_dec_relb(ethr_atomic_t *var) +void ethr_atomic_dec_acqb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_dec_acqb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +void ethr_atomic_dec_relb(ethr_atomic_t *var) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) ethr_atomic_dec_relb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint)); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); +#endif + } -ethr_sint_t -ethr_atomic_dec_read_relb(ethr_atomic_t *var) +void ethr_atomic_dec_mb(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_dec_read_relb__(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + ethr_atomic_dec_mb__(var); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + } -ethr_sint_t -ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t new, ethr_sint_t exp) + +/* -- read_band() -- */ + + +ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t val) { + ethr_sint_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_cmpxchg_acqb__(var, new, exp); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_band__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); +#endif + return res; } -ethr_sint_t -ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t new, ethr_sint_t exp) +ethr_sint_t ethr_atomic_read_band_rb(ethr_atomic_t *var, ethr_sint_t val) { + ethr_sint_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic_cmpxchg_relb__(var, new, exp); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_band_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; } +ethr_sint_t ethr_atomic_read_band_wb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_band_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_band_acqb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_band_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_band_relb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_band_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_band_mb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_band_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- read_bor() -- */ + + +ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_bor__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_bor_rb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_bor_rb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_bor_wb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_bor_wb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_bor_acqb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_bor_acqb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_bor_relb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_bor_relb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); +#endif + return res; +} + +ethr_sint_t ethr_atomic_read_bor_mb(ethr_atomic_t *var, ethr_sint_t val) +{ + ethr_sint_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) + res = ethr_atomic_read_bor_mb__(var, val); +#elif defined(ETHR_AMC_FALLBACK__) + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* ---------- 32-bit atomic implementation ---------- */ + + + + +/* --- addr() --- */ + +ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *var) +{ + ethr_sint32_t *res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_addr__(var); + +#else + res = (ethr_sint32_t *) var; +#endif + return res; +} + + +/* -- cmpxchg() -- */ + + +ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_cmpxchg__(var, val, old_val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_cmpxchg_rb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_cmpxchg_rb__(var, val, old_val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_cmpxchg_wb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_cmpxchg_wb__(var, val, old_val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_cmpxchg_acqb__(var, val, old_val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_cmpxchg_relb__(var, val, old_val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_cmpxchg_mb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_cmpxchg_mb__(var, val, old_val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- xchg() -- */ + + +ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_xchg__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_xchg_rb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_xchg_rb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_xchg_wb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_xchg_wb__(var, val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_xchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_xchg_acqb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_xchg_relb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_xchg_relb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_xchg_mb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_xchg_mb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- set() -- */ + + +void ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_set__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic32_set_rb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_set_rb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +void ethr_atomic32_set_wb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_set_wb__(var, val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic32_set_acqb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_set_acqb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +void ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_set_relb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic32_set_mb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_set_mb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + + +/* -- init() -- */ + + +void ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_init__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic32_init_rb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_init_rb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +void ethr_atomic32_init_wb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_init_wb__(var, val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic32_init_acqb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_init_acqb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +void ethr_atomic32_init_relb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_init_relb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); +#endif + +} + +void ethr_atomic32_init_mb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_init_mb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + + +/* -- add_read() -- */ + + +ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_add_read__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_add_read_rb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_add_read_rb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_add_read_wb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_add_read_wb__(var, val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_add_read_acqb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_add_read_acqb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_add_read_relb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_add_read_relb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_add_read_mb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_add_read_mb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- read() -- */ + + +ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_rb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_rb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_wb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_wb__(var); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_acqb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_relb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_relb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_mb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_mb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); +#endif + return res; +} + + +/* -- inc_read() -- */ + + +ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_inc_read__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_inc_read_rb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_inc_read_rb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_inc_read_wb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_inc_read_wb__(var); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_inc_read_acqb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_inc_read_relb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_inc_read_relb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_inc_read_mb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_inc_read_mb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- dec_read() -- */ -/* - * --- 32-bit atomics --------------------------------------------------------- - */ -ethr_sint32_t * -ethr_atomic32_addr(ethr_atomic32_t *var) +ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *var) { + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_addr__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_dec_read__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#endif + return res; } -void -ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t i) +ethr_sint32_t ethr_atomic32_dec_read_rb(ethr_atomic32_t *var) { + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic32_init__(var, i); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_dec_read_rb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; } -void -ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t i) +ethr_sint32_t ethr_atomic32_dec_read_wb(ethr_atomic32_t *var) { + ethr_sint32_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic32_set__(var, i); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_dec_read_wb__(var); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#endif + return res; } -ethr_sint32_t -ethr_atomic32_read(ethr_atomic32_t *var) +ethr_sint32_t ethr_atomic32_dec_read_acqb(ethr_atomic32_t *var) { + ethr_sint32_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_read__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_dec_read_acqb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; } +ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *var) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_dec_read_relb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); +#endif + return res; +} -ethr_sint32_t -ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t incr) +ethr_sint32_t ethr_atomic32_dec_read_mb(ethr_atomic32_t *var) { + ethr_sint32_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_add_read__(var, incr); -} +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_dec_read_mb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- add() -- */ -ethr_sint32_t -ethr_atomic32_inc_read(ethr_atomic32_t *var) + +void ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t val) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_inc_read__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_add__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); +#endif + } -ethr_sint32_t -ethr_atomic32_dec_read(ethr_atomic32_t *var) +void ethr_atomic32_add_rb(ethr_atomic32_t *var, ethr_sint32_t val) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_dec_read__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_add_rb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + +} + +void ethr_atomic32_add_wb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_add_wb__(var, val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); +#endif + +} + +void ethr_atomic32_add_acqb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_add_acqb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + +void ethr_atomic32_add_relb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_add_relb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); +#endif + } -void -ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t incr) +void ethr_atomic32_add_mb(ethr_atomic32_t *var, ethr_sint32_t val) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic32_add__(var, incr); -} - -void -ethr_atomic32_inc(ethr_atomic32_t *var) +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_add_mb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + + +/* -- inc() -- */ + + +void ethr_atomic32_inc(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) ethr_atomic32_inc__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#endif + } -void -ethr_atomic32_dec(ethr_atomic32_t *var) +void ethr_atomic32_inc_rb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic32_dec__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_inc_rb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + } -ethr_sint32_t -ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t mask) +void ethr_atomic32_inc_wb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_read_band__(var, mask); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_inc_wb__(var); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#endif + } -ethr_sint32_t -ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t mask) +void ethr_atomic32_inc_acqb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_read_bor__(var, mask); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_inc_acqb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + } -ethr_sint32_t -ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t new) +void ethr_atomic32_inc_relb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_xchg__(var, new); -} +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_inc_relb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); +#endif -ethr_sint32_t -ethr_atomic32_cmpxchg(ethr_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t expected) +} + +void ethr_atomic32_inc_mb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_cmpxchg__(var, new, expected); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_inc_mb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + } -ethr_sint32_t -ethr_atomic32_read_acqb(ethr_atomic32_t *var) + +/* -- dec() -- */ + + +void ethr_atomic32_dec(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_read_acqb__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_dec__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); +#endif + +} + +void ethr_atomic32_dec_rb(ethr_atomic32_t *var) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_dec_rb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + } -ethr_sint32_t -ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var) +void ethr_atomic32_dec_wb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_inc_read_acqb__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_dec_wb__(var); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); +#endif + } -void -ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t i) +void ethr_atomic32_dec_acqb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - ethr_atomic32_set_relb__(var, i); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_dec_acqb__(var); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + } -void -ethr_atomic32_dec_relb(ethr_atomic32_t *var) +void ethr_atomic32_dec_relb(ethr_atomic32_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) ethr_atomic32_dec_relb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); +#endif + +} + +void ethr_atomic32_dec_mb(ethr_atomic32_t *var) +{ + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + ethr_atomic32_dec_mb__(var); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var)); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + +} + + +/* -- read_band() -- */ + + +ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_band__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_band_rb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_band_rb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_band_wb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_band_wb__(var, val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_band_acqb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_band_acqb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_band_relb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_band_relb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_band_mb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_band_mb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + +/* -- read_bor() -- */ + + +ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_bor__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_bor_rb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_bor_rb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); + ETHR_MEMBAR(ETHR_LoadLoad); +#endif + return res; } -ethr_sint32_t -ethr_atomic32_dec_read_relb(ethr_atomic32_t *var) +ethr_sint32_t ethr_atomic32_read_bor_wb(ethr_atomic32_t *var, ethr_sint32_t val) { + ethr_sint32_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_dec_read_relb__(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_bor_wb__(var, val); +#else + ETHR_MEMBAR(ETHR_StoreStore); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); +#endif + return res; } -ethr_sint32_t -ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t exp) +ethr_sint32_t ethr_atomic32_read_bor_acqb(ethr_atomic32_t *var, ethr_sint32_t val) { + ethr_sint32_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_cmpxchg_acqb__(var, new, exp); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_bor_acqb__(var, val); +#else + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; } -ethr_sint32_t -ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var, - ethr_sint32_t new, - ethr_sint32_t exp) +ethr_sint32_t ethr_atomic32_read_bor_relb(ethr_atomic32_t *var, ethr_sint32_t val) { + ethr_sint32_t res; ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); - return ethr_atomic32_cmpxchg_relb__(var, new, exp); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_bor_relb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); +#endif + return res; +} + +ethr_sint32_t ethr_atomic32_read_bor_mb(ethr_atomic32_t *var, ethr_sint32_t val) +{ + ethr_sint32_t res; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(var); +#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) + res = ethr_atomic32_read_bor_mb__(var, val); +#else + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad); + ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val); + ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore); +#endif + return res; +} + + + +/* --------- Info functions --------- */ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) +char *zero_ops[] = {NULL}; +#endif + + +static char *native_su_dw_atomic_ops[] = { +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG + "cmpxchg", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RB + "cmpxchg_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_WB + "cmpxchg_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_ACQB + "cmpxchg_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RELB + "cmpxchg_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB + "cmpxchg_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET + "set", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RB + "set_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_WB + "set_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_ACQB + "set_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RELB + "set_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_MB + "set_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ + "read", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RB + "read_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_WB + "read_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_ACQB + "read_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RELB + "read_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_MB + "read_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT + "init", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RB + "init_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_WB + "init_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_ACQB + "init_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RELB + "init_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_MB + "init_mb", +#endif + NULL +}; + +char ** +ethr_native_su_dw_atomic_ops(void) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (!ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + return &zero_ops[0]; +#endif + return &native_su_dw_atomic_ops[0]; +} + + +static char *native_dw_atomic_ops[] = { +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG + "cmpxchg", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RB + "cmpxchg_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_WB + "cmpxchg_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_ACQB + "cmpxchg_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RELB + "cmpxchg_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB + "cmpxchg_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET + "set", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RB + "set_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_WB + "set_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_ACQB + "set_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RELB + "set_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_MB + "set_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ + "read", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RB + "read_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_WB + "read_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_ACQB + "read_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RELB + "read_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_MB + "read_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT + "init", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RB + "init_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_WB + "init_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_ACQB + "init_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RELB + "init_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_MB + "init_mb", +#endif + NULL +}; + +char ** +ethr_native_dw_atomic_ops(void) +{ + +#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + if (!ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) + return &zero_ops[0]; +#endif + return &native_dw_atomic_ops[0]; +} + + +static char *native_atomic64_ops[] = { +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG + "cmpxchg", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB + "cmpxchg_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB + "cmpxchg_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB + "cmpxchg_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB + "cmpxchg_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB + "cmpxchg_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG + "xchg", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB + "xchg_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB + "xchg_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB + "xchg_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB + "xchg_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB + "xchg_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET + "set", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB + "set_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB + "set_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB + "set_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB + "set_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB + "set_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT + "init", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB + "init_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB + "init_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB + "init_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB + "init_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB + "init_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN + "add_return", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB + "add_return_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB + "add_return_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB + "add_return_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB + "add_return_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB + "add_return_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ + "read", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB + "read_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB + "read_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB + "read_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB + "read_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB + "read_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN + "inc_return", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB + "inc_return_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB + "inc_return_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB + "inc_return_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB + "inc_return_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB + "inc_return_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN + "dec_return", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB + "dec_return_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB + "dec_return_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB + "dec_return_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB + "dec_return_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB + "dec_return_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD + "add", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB + "add_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB + "add_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB + "add_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB + "add_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB + "add_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC + "inc", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB + "inc_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB + "inc_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB + "inc_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB + "inc_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB + "inc_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC + "dec", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB + "dec_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB + "dec_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB + "dec_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB + "dec_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB + "dec_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD + "and_retold", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB + "and_retold_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB + "and_retold_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB + "and_retold_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB + "and_retold_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB + "and_retold_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD + "or_retold", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB + "or_retold_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB + "or_retold_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB + "or_retold_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB + "or_retold_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB + "or_retold_mb", +#endif + NULL +}; + +char ** +ethr_native_atomic64_ops(void) +{ + + return &native_atomic64_ops[0]; } + +static char *native_atomic32_ops[] = { +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG + "cmpxchg", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB + "cmpxchg_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB + "cmpxchg_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB + "cmpxchg_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB + "cmpxchg_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB + "cmpxchg_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG + "xchg", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB + "xchg_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB + "xchg_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB + "xchg_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB + "xchg_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB + "xchg_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET + "set", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB + "set_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB + "set_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB + "set_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB + "set_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB + "set_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT + "init", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB + "init_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB + "init_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB + "init_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB + "init_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB + "init_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN + "add_return", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB + "add_return_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB + "add_return_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB + "add_return_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB + "add_return_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB + "add_return_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ + "read", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB + "read_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB + "read_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB + "read_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB + "read_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB + "read_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN + "inc_return", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB + "inc_return_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB + "inc_return_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB + "inc_return_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB + "inc_return_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB + "inc_return_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN + "dec_return", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB + "dec_return_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB + "dec_return_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB + "dec_return_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB + "dec_return_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB + "dec_return_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD + "add", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB + "add_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB + "add_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB + "add_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB + "add_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB + "add_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC + "inc", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB + "inc_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB + "inc_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB + "inc_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB + "inc_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB + "inc_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC + "dec", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB + "dec_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB + "dec_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB + "dec_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB + "dec_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB + "dec_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD + "and_retold", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB + "and_retold_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB + "and_retold_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB + "and_retold_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB + "and_retold_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB + "and_retold_mb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD + "or_retold", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB + "or_retold_rb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB + "or_retold_wb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB + "or_retold_acqb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB + "or_retold_relb", +#endif +#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB + "or_retold_mb", +#endif + NULL +}; + +char ** +ethr_native_atomic32_ops(void) +{ + + return &native_atomic32_ops[0]; +} diff --git a/erts/lib_src/common/ethr_aux.c b/erts/lib_src/common/ethr_aux.c index 2c3e25a805..521640317e 100644 --- a/erts/lib_src/common/ethr_aux.c +++ b/erts/lib_src/common/ethr_aux.c @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -31,10 +31,6 @@ #define ETHR_INLINE_FUNC_NAME_(X) X ## __ #define ETHR_AUX_IMPL__ -#define ETHR_ATOMIC_IMPL__ /* Needed in order to pull in - native atomic implementations - for optimized fallbacks of - spinlocks and rwspinlocks */ #include "ethread.h" #include "ethr_internal.h" #include <string.h> @@ -75,10 +71,87 @@ static int main_threads; static int init_ts_event_alloc(void); +ethr_runtime_t ethr_runtime__ +#ifdef __GNUC__ +__attribute__ ((aligned (ETHR_CACHE_LINE_SIZE))) +#endif + ; + +#if defined(ETHR_X86_RUNTIME_CONF__) + +/* + * x86/x86_64 specifics shared between windows and + * pthread implementations. + */ + +#define ETHR_IS_X86_VENDOR(V, B, C, D) \ + (sizeof(V) == 13 && is_x86_vendor((V), (B), (C), (D))) + +static ETHR_INLINE int +is_x86_vendor(char *str, int ebx, int ecx, int edx) +{ + return (*((int *) &str[0]) == ebx + && *((int *) &str[sizeof(int)]) == edx + && *((int *) &str[sizeof(int)*2]) == ecx); +} + +static void +x86_init(void) +{ + int eax, ebx, ecx, edx; + + eax = ebx = ecx = edx = 0; + + ethr_x86_cpuid__(&eax, &ebx, &ecx, &edx); + + if (eax > 0 + && (ETHR_IS_X86_VENDOR("GenuineIntel", ebx, ecx, edx) + || ETHR_IS_X86_VENDOR("AuthenticAMD", ebx, ecx, edx))) { + eax = 1; + ethr_x86_cpuid__(&eax, &ebx, &ecx, &edx); + } + else { + /* + * The meaning of the feature flags for this + * vendor have not been verified. + */ + eax = ebx = ecx = edx = 0; + } + + /* + * The feature flags tested below have only been verified + * for vendors checked above. Also note that only these + * feature flags have been verified to have these specific + * meanings. If another feature flag test is introduced, + * it has to be verified to have the same meaning for all + * vendors above. + */ + +#if ETHR_SIZEOF_PTR == 8 + /* bit 13 of ecx is set if we have cmpxchg16b */ + ethr_runtime__.conf.have_dw_cmpxchg = (ecx & (1 << 13)); +#elif ETHR_SIZEOF_PTR == 4 + /* bit 8 of edx is set if we have cmpxchg8b */ + ethr_runtime__.conf.have_dw_cmpxchg = (edx & (1 << 8)); +#else +# error "Not supported" +#endif + /* bit 26 of edx is set if we have sse2 */ + ethr_runtime__.conf.have_sse2 = (edx & (1 << 26)); +} + +#endif /* ETHR_X86_RUNTIME_CONF__ */ + + int ethr_init_common__(ethr_init_data *id) { int res; + +#if defined(ETHR_X86_RUNTIME_CONF__) + x86_init(); +#endif + if (id) { ethr_thr_prepare_func__ = id->thread_create_prepare_func; ethr_thr_parent_func__ = id->thread_create_parent_func; diff --git a/erts/lib_src/common/ethr_mutex.c b/erts/lib_src/common/ethr_mutex.c index 2ddef32dfc..81fd6af80a 100644 --- a/erts/lib_src/common/ethr_mutex.c +++ b/erts/lib_src/common/ethr_mutex.c @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -26,8 +26,9 @@ #include "config.h" #endif -#define ETHR_INLINE_FUNC_NAME_(X) X ## __ +#define ETHR_INLINE_MTX_FUNC_NAME_(X) X ## __ #define ETHR_MUTEX_IMPL__ +#define ETHR_TRY_INLINE_FUNCS #include <limits.h> #include "ethread.h" diff --git a/erts/lib_src/pthread/ethr_x86_sse2_asm.c b/erts/lib_src/pthread/ethr_x86_sse2_asm.c new file mode 100644 index 0000000000..6cbe73cf16 --- /dev/null +++ b/erts/lib_src/pthread/ethr_x86_sse2_asm.c @@ -0,0 +1,31 @@ +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB 2011. All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the "License"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an "AS IS" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: sse2 asm:s + * Author: Rickard Green + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +/* ETHR_X86_SSE2_ASM_C__ will trigger asm:s to compile to be included */ +#define ETHR_X86_SSE2_ASM_C__ +#include "ethread.h" diff --git a/erts/lib_src/pthread/ethread.c b/erts/lib_src/pthread/ethread.c index f047104103..ad29249bac 100644 --- a/erts/lib_src/pthread/ethread.c +++ b/erts/lib_src/pthread/ethread.c @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -121,6 +121,98 @@ ethr_ts_event *ethr_get_tse__(void) return pthread_getspecific(ethr_ts_event_key__); } +#if defined(ETHR_PPC_RUNTIME_CONF__) + +static volatile int lwsync_caused_sigill; + +static void +handle_lwsync_sigill(int signum) +{ + lwsync_caused_sigill = 1; +} + +static int +ppc_init__(void) +{ + struct sigaction act, oact; + lwsync_caused_sigill = 0; + + sigemptyset(&act.sa_mask); + act.sa_flags = 0; + act.sa_handler = handle_lwsync_sigill; + if (sigaction(SIGILL, &act, &oact) != 0) + return errno; + + __asm__ __volatile__ ("lwsync\n\t" : : : "memory"); + + act.sa_flags = 0; + act.sa_handler = SIG_DFL; + if (sigaction(SIGILL, &act, &oact) != 0) + return errno; + + ethr_runtime__.conf.have_lwsync = (int) !lwsync_caused_sigill; + return 0; +} + +#endif + +#if defined(ETHR_X86_RUNTIME_CONF__) + +void +ethr_x86_cpuid__(int *eax, int *ebx, int *ecx, int *edx) +{ +#if ETHR_SIZEOF_PTR == 4 + int have_cpuid; + /* + * If it is possible to toggle eflags bit 21, + * we have the cpuid instruction. + */ + __asm__ ("pushf\n\t" + "popl %%eax\n\t" + "movl %%eax, %%ecx\n\t" + "xorl $0x200000, %%eax\n\t" + "pushl %%eax\n\t" + "popf\n\t" + "pushf\n\t" + "popl %%eax\n\t" + "movl $0x0, %0\n\t" + "xorl %%ecx, %%eax\n\t" + "jz no_cpuid\n\t" + "movl $0x1, %0\n\t" + "no_cpuid:\n\t" + : "=r"(have_cpuid) + : + : "%eax", "%ecx", "cc"); + if (!have_cpuid) { + *eax = *ebx = *ecx = *edx = 0; + return; + } +#endif +#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__ + /* + * When position independet code is used in 32-bit mode, the B register + * is used for storage of global offset table address, and we may not + * use it as input or output in an asm. We need to save and restore the + * B register explicitly (for some reason gcc doesn't provide this + * service to us). + */ + __asm__ ("pushl %%ebx\n\t" + "cpuid\n\t" + "movl %%ebx, %1\n\t" + "popl %%ebx\n\t" + : "=a"(*eax), "=r"(*ebx), "=c"(*ecx), "=d"(*edx) + : "0"(*eax) + : "cc"); +#else + __asm__ ("cpuid\n\t" + : "=a"(*eax), "=b"(*ebx), "=c"(*ecx), "=d"(*edx) + : "0"(*eax) + : "cc"); +#endif +} + +#endif /* ETHR_X86_RUNTIME_CONF__ */ + /* * -------------------------------------------------------------------------- * Exported functions @@ -137,6 +229,12 @@ ethr_init(ethr_init_data *id) ethr_not_inited__ = 0; +#if defined(ETHR_PPC_RUNTIME_CONF__) + res = ppc_init__(); + if (res != 0) + goto error; +#endif + res = ethr_init_common__(id); if (res != 0) goto error; @@ -146,6 +244,8 @@ ethr_init(ethr_init_data *id) child_wait_spin_count = 0; res = pthread_key_create(ðr_ts_event_key__, ethr_ts_event_destructor__); + if (res != 0) + goto error; return 0; error: diff --git a/erts/lib_src/utils/make_atomics_api b/erts/lib_src/utils/make_atomics_api new file mode 100755 index 0000000000..f4e71c7618 --- /dev/null +++ b/erts/lib_src/utils/make_atomics_api @@ -0,0 +1,2186 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2011. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-mode(compile). + +%%%------------------------------------------------------------------- +%%% @author Rickard Green <[email protected]> +%%% @copyright (C) 2011, Rickard Green +%%% @doc +%%% Generation of the ethread atomic API +%%% @end +%%% Created : 17 Jan 2011 by Rickard Green <[email protected]> +%%%------------------------------------------------------------------- + +-define(H_FILE, "erts/include/internal/ethr_atomics.h"). +-define(C_FILE, "erts/lib_src/common/ethr_atomics.c"). + +%% These order constraints are important: +%% - 'cmpxchg' needs to appear before 'read' +%% - 'xchg' needs to apper before 'set' +%% - 'set' needs to apper before 'init' +%% - 'add_read' needs to apper before 'add', 'inc_read', and 'dec_read' +%% - 'inc_read' needs to apper before and 'inc' +%% - 'dec_read' needs to apper before and 'dec' +-define(ATOMIC_OPS, [cmpxchg, xchg, set, init, add_read, + read, inc_read, dec_read, add, inc, + dec, read_band, read_bor]). + +-define(DW_ATOMIC_OPS, [cmpxchg, set, read, init]). +-define(DW_FUNC_MACRO, "ETHR_DW_ATOMIC_FUNC__"). +-define(DW_RTCHK_MACRO, "ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__"). + +%% Barrier versions we implement +-define(BARRIERS, [none, rb, wb, acqb, relb, mb]). + +-define(ATOMIC_SIZES, ["dword", "word", "32"]). + +-define(HAVE_NATIVE_ATOMIC, "ETHR_HAVE_ETHR_NATIVE_ATOMIC"). + +-define(SU_DW_SINT_FIELD, "dw_sint"). +-define(DW_SINT_FIELD, "sint"). + +%% Fallback +-define(ETHR_ATMC_FLLBK_ADDR_BITS, "10"). +-define(ETHR_ATMC_FLLBK_ADDR_SHIFT, "6"). + +-record(atomic_context, {dw, + amc_fallback, + ret_type, + ret_var, + arg1, + arg2, + arg3, + have_native_atomic_ops, + atomic, + atomic_t, + addr_aint_t, + aint_t, + naint_t, + 'NATMC', + 'ATMC', + unusual_val}). + +atomic_context("dword") -> + #atomic_context{dw = true, + amc_fallback = true, + ret_type = "int", + ret_var = "res", + arg1 = "var", + arg2 = "val", + arg3 = "old_val", + have_native_atomic_ops = "ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS", + atomic = "ethr_dw_atomic", + atomic_t = "ethr_dw_atomic_t", + addr_aint_t = "ethr_sint_t", + aint_t = "ethr_dw_sint_t", + naint_t = "ETHR_SU_DW_NAINT_T__", + 'NATMC' = "DW_NATMC", + 'ATMC' = "DW_ATMC", + unusual_val = "ETHR_UNUSUAL_SINT_VAL__"}; +atomic_context(Size) -> + {SizeSuffix, HaveSize, AMC} = case Size of + "word" -> {"", "WORD_SZ", true}; + _ -> {Size, Size++"BIT", false} + end, + AintT = ["ethr_sint", SizeSuffix, "_t"], + #atomic_context{dw = false, + amc_fallback = AMC, + ret_type = AintT, + ret_var = "res", + arg1 = "var", + arg2 = "val", + arg3 = "old_val", + have_native_atomic_ops = ["ETHR_HAVE_", HaveSize, "_NATIVE_ATOMIC_OPS"], + atomic = ["ethr_atomic", SizeSuffix], + atomic_t = ["ethr_atomic", SizeSuffix, "_t"], + addr_aint_t = AintT, + aint_t = AintT, + naint_t = ["ETHR_NAINT", SizeSuffix, "_T__"], + 'NATMC' = ["NATMC", SizeSuffix], + 'ATMC' = ["ATMC", SizeSuffix], + unusual_val = ["ETHR_UNUSUAL_SINT", SizeSuffix, "_VAL__"]}. + +-record(op_context, {ret, var, val1, val2}). + +-define(POTENTIAL_NBITS, ["64", "32"]). + +is_return_op(#atomic_context{dw = false}, add) -> false; +is_return_op(#atomic_context{dw = false}, inc) -> false; +is_return_op(#atomic_context{dw = false}, dec) -> false; +is_return_op(#atomic_context{dw = true}, read) -> false; +is_return_op(_AC, init) -> false; +is_return_op(_AC, set) -> false; +is_return_op(_AC, _OP) -> true. + +native(add_read) -> add_return; +native(inc_read) -> inc_return; +native(dec_read) -> dec_return; +native(read_band) -> and_retold; +native(read_bor) -> or_retold; +native(Op) -> Op. + +op(Op, #op_context{var = Var, val1 = Val1}) when Op == init; Op == set -> + [Var, " = ", Val1]; +op(read, #op_context{ret = Ret, var = Var}) -> + [Ret, " = ", Var]; +op(add_read, OpC) -> + [op(add, OpC), "; ", op(read, OpC)]; +op(add, #op_context{var = Var, val1 = Val1}) -> + [Var, " += ", Val1]; +op(inc, #op_context{var = Var}) -> + ["++(", Var, ")"]; +op(dec, #op_context{var = Var}) -> + ["--(", Var, ")"]; +op(inc_read, #op_context{ret = Ret, var = Var}) -> + [Ret, " = ++(", Var, ")"]; +op(dec_read, #op_context{ret = Ret, var = Var}) -> + [Ret, " = --(", Var, ")"]; +op(read_band, #op_context{var = Var, val1 = Val1} = OpC) -> + [op(read, OpC), "; ", Var, " &= ", Val1]; +op(read_bor, #op_context{var = Var, val1 = Val1} = OpC) -> + [op(read, OpC), "; ", Var, " |= ", Val1]; +op(xchg, OpC) -> + [op(read, OpC), "; ", op(set, OpC)]; +op(cmpxchg, #op_context{ret = Ret, var = Var, val1 = Val1, val2 = Val2}) -> + [Ret, " = (", Var, " == ", Val2, " ? (", Var, " = ", Val1, ", ", Val2, ") : ", Var, ")"]. + +dw_op(Op, #op_context{var = Var, val1 = Val1}) when Op == init; Op == set -> + [Var, "[0] = ", Val1, "[0]; ", Var, "[1] = ", Val1, "[1]"]; +dw_op(read, #op_context{var = Var, val1 = Val1}) -> + [Val1, "[0] = ", Var, "[0]; ", Val1, "[1] = ", Var, "[1]"]; +dw_op(cmpxchg, #op_context{ret = Ret, var = Var, val1 = Val1, val2 = Val2}) -> + [" + { + ", Ret, " = (", Var, "[0] == ", Val2, "[0] && ", Var, "[1] == ", Val2, "[1]); + if (", Ret, ") { + ", Var, "[0] = ", Val1, "[0]; + ", Var, "[1] = ", Val1, "[1]; + } + else { + ", Val2, "[0] = ", Var, "[0]; + ", Val2, "[1] = ", Var, "[1]; + } + }"]. + +op_head_tail(init) -> {undef, undef}; +op_head_tail(set) -> {store, store}; +op_head_tail(read) -> {load, load}; +op_head_tail(_) -> {load, undef}. + +op_barrier_ext(none) -> ""; +op_barrier_ext(Barrier) -> [$_, a2l(Barrier)]. + +op_call(addr, _DW, Ret, Func, Arg1, _Arg2, _Arg3, _TypeCast) -> + [Ret, " ", Func, "(", Arg1, ");"]; +op_call(Op, false, Ret, Func, Arg1, _Arg2, _Arg3, _TypeCast) when Op == read; + Op == inc_read; + Op == inc_return; + Op == dec_read; + Op == dec_return -> + [Ret, " ", Func, "(", Arg1, ");"]; +op_call(Op, false, _Ret, Func, Arg1, _Arg2, _Arg3, _TypeCast) when Op == inc; + Op == dec -> + [Func, "(", Arg1, ");"]; +op_call(Op, false, Ret, Func, Arg1, Arg2, _Arg3, TypeCast) when Op == add_return; + Op == add_read; + Op == read_band; + Op == and_retold; + Op == read_bor; + Op == or_retold; + Op == xchg -> + [Ret, " ", Func, "(", Arg1, ",", TypeCast, " ", Arg2, ");"]; +op_call(cmpxchg, _DW, Ret, Func, Arg1, Arg2, Arg3, TypeCast) -> + [Ret, " ", Func, "(", Arg1, ",", TypeCast, " ", Arg2, ",", TypeCast, " ", Arg3, ");"]; +op_call(_Op, _DW, _Ret, Func, Arg1, Arg2, _Arg3, TypeCast) -> + [Func, "(", Arg1, ",", TypeCast, " ", Arg2, ");"]. % set, init, add (!= dw), read (== dw) + +native_op_call(#atomic_context{dw = DW, + ret_var = RetVar, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3, + aint_t = AintT, + 'NATMC' = NATMC, + naint_t = NAintT}, + Op, B, TypeCasts) -> + op_call(Op, + DW, + [RetVar, " =", + case TypeCasts of + true -> [" (", AintT, ")"]; + false -> "" + end], + ["ETHR_", NATMC, "_FUNC__(", opstr(native(Op)), op_barrier_ext(B), ")"], + Arg1, + Arg2, + Arg3, + case TypeCasts of + true -> [" (", NAintT, ")"]; + false -> "" + end). + +simple_fallback(#atomic_context{arg1 = Arg1, + arg2 = Arg2, + 'ATMC' = ATMC}, + init, B) -> %% Also double word + [" ETHR_", ATMC, "_FUNC__(set", op_barrier_ext(B),")(", Arg1, ", ", Arg2, ");\n"]; +simple_fallback(#atomic_context{dw = false, + arg1 = Arg1, + arg2 = Arg2, + 'ATMC' = ATMC}, + set, B) -> + [" (void) ETHR_", ATMC, "_FUNC__(xchg", op_barrier_ext(B),")(", Arg1, ", ", Arg2, ");\n"]; +simple_fallback(#atomic_context{dw = false, + arg1 = Arg1, + arg2 = Arg2, + 'ATMC' = ATMC}, + add, B) -> + [" (void) ETHR_", ATMC, "_FUNC__(add_read", op_barrier_ext(B), ")(", Arg1, ", ", Arg2, ");\n"]; +simple_fallback(#atomic_context{dw = false, + ret_var = RetVar, + arg1 = Arg1, + aint_t = AintT, + 'ATMC' = ATMC}, + inc_read, B) -> + [" ", RetVar, " = ETHR_", ATMC, "_FUNC__(add_read", op_barrier_ext(B), ")(", Arg1, ", (", AintT,") 1);\n"]; +simple_fallback(#atomic_context{dw = false, + ret_var = RetVar, + arg1 = Arg1, + aint_t = AintT, + 'ATMC' = ATMC}, + dec_read, B) -> + [" ", RetVar, " = ETHR_", ATMC, "_FUNC__(add_read", op_barrier_ext(B), ")(", Arg1, ", (", AintT,") -1);\n"]; +simple_fallback(#atomic_context{dw = false, + arg1 = Arg1, + 'ATMC' = ATMC}, + inc, B) -> + [" (void) ETHR_", ATMC, "_FUNC__(inc_read", op_barrier_ext(B), ")(", Arg1, ");\n"]; +simple_fallback(#atomic_context{dw = false, + arg1 = Arg1, + 'ATMC' = ATMC}, + dec, B) -> + [" (void) ETHR_", ATMC, "_FUNC__(dec_read", op_barrier_ext(B), ")(", Arg1, ");\n"]; +simple_fallback(#atomic_context{dw = false, + unusual_val = UnusualVal, + ret_var = RetVar, + arg1 = Arg1, + aint_t = AintT, + 'ATMC' = ATMC}, + read, B) -> + [" ", RetVar, " = ETHR_", ATMC, "_FUNC__(cmpxchg", op_barrier_ext(B), ")(", Arg1, ", (", AintT, ") ", UnusualVal, ", (", AintT,") ", UnusualVal, ");\n"]; +simple_fallback(#atomic_context{dw = true, + unusual_val = UnusualVal, + arg1 = Arg1, + arg2 = Arg2, + aint_t = AintT, + 'ATMC' = ATMC}, + read, B) -> + [" ", AintT, " tmp; + tmp.", ?DW_SINT_FIELD, "[0] = ", UnusualVal, "; + tmp.", ?DW_SINT_FIELD, "[1] = ", UnusualVal, "; + ", Arg2, "->", ?DW_SINT_FIELD, "[0] = ", UnusualVal, "; + ", Arg2, "->", ?DW_SINT_FIELD, "[1] = ", UnusualVal, "; + (void) ETHR_", ATMC, "_FUNC__(cmpxchg", op_barrier_ext(B), ")(", Arg1, ", &tmp, ", Arg2, "); +" + ]; +simple_fallback(_AC, _Op, _B) -> + []. + +func_header(AC, prototype, MacroName, Op, B) -> + [func_header(AC, implementation, MacroName, Op, B), ";"]; +func_header(#atomic_context{'ATMC' = ATMC} = AC, inline_implementation, _MacroName, Op, B) -> + do_func_header(AC, Op, "static ETHR_INLINE ", + ["ETHR_", ATMC, "_FUNC__(", opstr(Op), op_barrier_ext(B), ")"]); +func_header(#atomic_context{atomic = Atomic} = AC, implementation, false, Op, B) -> + do_func_header(AC, Op, "", [Atomic, "_", opstr(Op), op_barrier_ext(B)]); +func_header(AC, implementation, MacroName, Op, B) -> + do_func_header(AC, Op, "", [MacroName, "(", opstr(Op), op_barrier_ext(B), ")"]). + + +do_func_header(#atomic_context{atomic_t = AtomicT, + addr_aint_t = AddrAintT, + arg1 = Arg1}, + addr, Inline, Func) -> + [Inline, AddrAintT, " *", Func, "(", AtomicT, " *", Arg1, ")"]; +do_func_header(#atomic_context{dw = false, + atomic_t = AtomicT, + aint_t = AintT, + arg1 = Arg1, + arg2 = Arg2}, + Op, Inline, Func) when Op == init; + Op == set; + Op == add -> + [Inline, "void ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " ", Arg2, ")"]; +do_func_header(#atomic_context{dw = false, + atomic_t = AtomicT, + arg1 = Arg1}, + Op, Inline, Func) when Op == inc; + Op == dec -> + [Inline, "void ", Func, "(", AtomicT, " *", Arg1, ")"]; +do_func_header(#atomic_context{dw = false, + atomic_t = AtomicT, + aint_t = AintT, + arg1 = Arg1}, + Op, Inline, Func) when Op == read; + Op == inc_read; + Op == dec_read -> + [Inline, AintT, " ", Func, "(", AtomicT, " *", Arg1, ")"]; +do_func_header(#atomic_context{dw = false, + atomic_t = AtomicT, + aint_t = AintT, + arg1 = Arg1, + arg2 = Arg2}, + Op, Inline, Func) when Op == add_read; + Op == read_band; + Op == read_bor; + Op == xchg -> + [Inline, AintT, " ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " ", Arg2, ")"]; +do_func_header(#atomic_context{dw = false, + atomic_t = AtomicT, + aint_t = AintT, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3}, + cmpxchg, Inline, Func) -> + [Inline, AintT, " ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " ", Arg2, ", ", AintT, " ", Arg3, ")"]; +do_func_header(#atomic_context{dw = true, + atomic_t = AtomicT, + aint_t = AintT, + arg1 = Arg1, + arg2 = Arg2}, + Op, Inline, Func) when Op == init; + Op == set; + Op == read -> + [Inline, "void ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " *", Arg2, ")"]; +do_func_header(#atomic_context{dw = true, + atomic_t = AtomicT, + aint_t = AintT, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3}, + cmpxchg, Inline, Func) -> + [Inline, "int ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " *", Arg2, ", ", AintT, " *", Arg3, ")"]. + + +xbarriers(_Op, none, _NB) -> + {"", ""}; + +xbarriers(_Op, acqb, NB) when NB == acqb; NB == mb -> + {"", ""}; +xbarriers(Op, acqb, NB) -> + case {op_head_tail(Op), NB} of + {{_, load}, rb} -> {"", "ETHR_MEMBAR(ETHR_LoadStore);"}; + {{_, load}, _} -> {"", "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);"}; + {{_, store}, _} -> {"", "ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);"}; + {_, rb} -> {"", "ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);"}; + _ -> {"", "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);"} + end; + +xbarriers(_Op, relb, NB) when NB == relb; NB == mb -> + {"", ""}; +xbarriers(Op, relb, NB) -> + case {op_head_tail(Op), NB} of + {{store, _}, wb} -> {"ETHR_MEMBAR(ETHR_LoadStore);", ""}; + {{store, _}, _} -> {"ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);", ""}; + {{load, _}, _} -> {"ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);", ""}; + {_, wb} -> {"ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);", ""}; + _ -> {"ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);", ""} + end; + +xbarriers(_Op, wb, NB) when NB == wb; NB == mb -> + {"", ""}; +xbarriers(_Op, wb, _NB) -> + {"ETHR_MEMBAR(ETHR_StoreStore);", ""}; + +xbarriers(_Op, rb, NB) when NB == rb; NB == mb -> + {"", ""}; +xbarriers(_Op, rb, _NB) -> + {"", "ETHR_MEMBAR(ETHR_LoadLoad);"}; + +xbarriers(_Op, mb, mb) -> + {"", ""}; +xbarriers(Op, mb, NB) -> + MB = "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);", + {Head, Tail} = op_head_tail(Op), + PreOp = case {Head, NB} of + {_, relb} -> ""; + {store, wb} -> "ETHR_MEMBAR(ETHR_LoadStore);"; + {store, _} -> "ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);"; + {load, _} -> "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);"; + {_, wb} -> "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);"; + _ -> MB + end, + PostOp = case {Tail, NB} of + {_, acqb} -> ""; + {load, rb} -> "ETHR_MEMBAR(ETHR_LoadStore);"; + {load, _} -> "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);"; + {store, _} -> "ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);"; + {_, rb} -> "ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);"; + _ -> MB + end, + {PreOp, PostOp}. + +try_barrier_order_first(none) -> + [none, rb, wb, acqb, relb]; +try_barrier_order_first(acqb) -> + [acqb, rb, none, mb]; +try_barrier_order_first(relb) -> + [relb, wb, none, mb]; +try_barrier_order_first(rb) -> + [rb, none, mb]; +try_barrier_order_first(wb) -> + [wb, none, mb]; +try_barrier_order_first(mb) -> + [mb, relb, acqb, wb, rb, none]. + +try_barrier_order(B) -> + First = try_barrier_order_first(B), + First ++ (?BARRIERS -- First). + +native_barrier_op(#atomic_context{'NATMC' = NATMC} = AC, If, ExtraDecl, Op, B, NB, TypeCasts) -> + NOpStr = opstr(native(Op)), + CapNOpStr = to_upper(NOpStr), + NBExt = op_barrier_ext(NB), + CapNBExt = to_upper(NBExt), + {PreB, PostB} = xbarriers(Op, B, NB), + [If, " defined(ETHR_HAVE_", NATMC, "_", CapNOpStr, CapNBExt, ")\n", + ExtraDecl, + case PreB of + "" -> ""; + _ -> [" ", PreB, "\n"] + end, + " ", native_op_call(AC, Op, NB, TypeCasts), "\n", + case PostB of + "" -> ""; + _ -> [" ", PostB, "\n"] + end]. + +dw_native_barrier_op(#atomic_context{arg1 = Arg1, arg2 = Arg2, arg3 = Arg3} = AC, If, ExtraDecl, Op, B, NB) -> + native_barrier_op(AC#atomic_context{arg1 = ["&", Arg1, "->native"], + arg2 = [Arg2, "->", ?DW_SINT_FIELD], + arg3 = [Arg3, "->", ?DW_SINT_FIELD]}, + If, ExtraDecl, Op, B, NB, false). + +su_dw_native_barrier_op(#atomic_context{dw = true, + naint_t = NAintT, + ret_var = RetVar, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3, + 'NATMC' = NATMC} = AC, If, cmpxchg, B, NB) -> + SU = ["->", ?SU_DW_SINT_FIELD], + TmpVar = "act", + SUArg1 = ["&", Arg1, "->native"], + SUArg2 = [Arg2, SU], + SUArg3 = [Arg3, SU], + ExtraDecl = [" ", NAintT, " ", TmpVar, ";\n"], + [native_barrier_op(AC#atomic_context{dw = false, + ret_var = TmpVar, + arg1 = SUArg1, + arg2 = SUArg2, + arg3 = SUArg3, + 'NATMC' = ["SU_", NATMC]}, + If, ExtraDecl, cmpxchg, B, NB, false), + " ", RetVar, " = (", TmpVar, " == ", SUArg3, "); + ", SUArg3, " = ", TmpVar, "; +" + ]; +su_dw_native_barrier_op(#atomic_context{dw = true, + arg1 = Arg1, + arg2 = Arg2, + 'NATMC' = NATMC} = AC, If, Op, B, NB) -> + SUArg1 = ["&", Arg1, "->native"], + SUArg2 = [Arg2, "->", ?SU_DW_SINT_FIELD], + native_barrier_op(AC#atomic_context{dw = false, + ret_var = SUArg2, + arg1 = SUArg1, + arg2 = SUArg2, + arg3 = not_used, + 'NATMC' = ["SU_", NATMC]}, If, "", Op, B, NB, false). + +cmpxchg_fallback_define(#atomic_context{dw = false, aint_t = AintT} = AC) -> + do_cmpxchg_fallback_define(AC, true, AintT); +cmpxchg_fallback_define(#atomic_context{dw = true, + 'NATMC' = NATMC, + naint_t = NAintT} = AC) -> + ["\n\n#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC)\n", + do_cmpxchg_fallback_define(AC, false, not_used), + "\n\n#elif defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)\n", + do_cmpxchg_fallback_define(AC#atomic_context{'NATMC' = ["SU_", NATMC], + naint_t = NAintT}, + true, + NAintT), + " + +#else +# error \"?!?\" +#endif +"]. + +do_cmpxchg_fallback_define(#atomic_context{'NATMC' = NATMC, + aint_t = AintT, + naint_t = NAintT}, + SU, SUType) -> + + ReadFunc = fun (IF) -> + fun (B) -> + BExt = op_barrier_ext(B), + CapBExt = to_upper(BExt), + [IF, " defined(ETHR_HAVE_", NATMC, "_READ", CapBExt, ")", + case SU of + true -> [" +#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR) \\ + ETHR_", NATMC, "_FUNC__(read", BExt, ")(VAR) +" + ]; + false -> [" +#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, VAL) \\ + ETHR_", NATMC, "_FUNC__(read", BExt, ")(VAR, VAL) +#elif defined(ETHR_HAVE_SU_", NATMC, "_READ", CapBExt, ") +#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, VAL) \\ + VAL.", ?SU_DW_SINT_FIELD, " = ETHR_SU_", NATMC, "_FUNC__(read", BExt, ")(VAR) +" + ] + end] + end + end, + NotDefCMPXCHG = fun (B) -> + CapBExt = to_upper(op_barrier_ext(B)), + ["!defined(ETHR_HAVE_", NATMC, "_CMPXCHG", CapBExt, ")"] + end, + NoneTryBarrierOrder = try_barrier_order(none), + %% First a sanity check + [" +#if (", NotDefCMPXCHG(hd(?BARRIERS)) , + lists:map(fun (B) -> + [" \\ + && ", NotDefCMPXCHG(B)] + end, + tl(?BARRIERS)), ") +# error \"No native cmpxchg() op available\" +#endif + + +/* + * Read op used together with cmpxchg() fallback when no native op present. + */ +", + + %% Read op to use with cmpxchg fallback + (ReadFunc("#if"))(hd(NoneTryBarrierOrder)), + lists:map(ReadFunc("#elif"), tl(NoneTryBarrierOrder)), +"#else +/* + * We have no native read() op; guess zero and then use the + * the atomics actual value returned from cmpxchg(). + */", + case SU of + true -> [" +#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR) \\ + ((", NAintT, ") 0)"]; + false -> [" +#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, VAL) \\ +do { \\ + VAL.", ?DW_SINT_FIELD, "[0] = (ethr_sint_t) 0; \\ + VAL.", ?DW_SINT_FIELD, "[1] = (ethr_sint_t) 0; \\ +} while (0)"] + end, " +#endif +", + + %% The fallback + " +/* + * Native cmpxchg() fallback used when no native op present. + */ +#define ETHR_", NATMC, "_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \\ +do { \\", + case SU of + true -> [" + ", SUType, " AVAL; \\ + ", NAintT, " new__, act__, exp__; \\ + act__ = ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR); \\ + do { \\ + exp__ = act__; \\ + AVAL = (", SUType, ") act__; \\ + { OPS; } \\ + new__ = (", NAintT, ") AVAL; \\ + act__ = CMPXCHG(VAR, new__, exp__); \\ + } while (__builtin_expect(act__ != exp__, 0)); \\"]; + false -> [" + int res__; \\ + ", AintT, " AVAL, exp_act__; \\ + ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, exp_act__); \\ + do { \\ + AVAL.", ?DW_SINT_FIELD, "[0] = exp_act__.", ?DW_SINT_FIELD, "[0]; \\ + AVAL.", ?DW_SINT_FIELD, "[1] = exp_act__.", ?DW_SINT_FIELD, "[1]; \\ + { OPS; } \\ + res__ = CMPXCHG(VAR, AVAL.", ?DW_SINT_FIELD, ", exp_act__.", ?DW_SINT_FIELD, "); \\ + } while (__builtin_expect(res__ == 0, 0)); \\"] + end, " +} while (0) +" + ]. + +cmpxchg_fallbacks(#atomic_context{}, _SUDW, cmpxchg, _B) -> + ""; %% No need for a fallback +cmpxchg_fallbacks(#atomic_context{dw = DW, + ret_var = RetVar, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3, + 'NATMC' = NATMC}, + SUDW, Op, B) -> + Operation = case DW of + false -> + op(Op, #op_context{ret = RetVar, + var = "aval", + val1 = Arg2, + val2 = Arg3}); + true -> + case SUDW of + true -> + op(Op, #op_context{ret = [Arg2, "->", ?SU_DW_SINT_FIELD], + var = "aval", + val1 = [Arg2, "->", ?SU_DW_SINT_FIELD]}); + false -> + dw_op(Op, #op_context{ret = RetVar, + var = ["aval.", ?DW_SINT_FIELD], + val1 = [Arg2, "->", ?DW_SINT_FIELD]}) + end + end, + [lists:map(fun (NB) -> + NativeVar = case DW of + true -> ["&", Arg1, "->native"]; + false -> Arg1 + end, + NBExt = op_barrier_ext(NB), + CapNBExt = to_upper(NBExt), + {PreB, PostB} = xbarriers(cmpxchg, B, NB), + ["#elif defined(ETHR_HAVE_", NATMC, "_CMPXCHG", CapNBExt, ")\n", + case PreB of + "" -> ""; + _ -> [" ", PreB, "\n"] + end, + " ETHR_", NATMC, "_CMPXCHG_FALLBACK__(ETHR_", NATMC, "_FUNC__(cmpxchg", NBExt, "), ", NativeVar, ", aval, ", Operation, ");\n", + case PostB of + "" -> ""; + _ -> [" ", PostB, "\n"] + end] + end, + try_barrier_order(B))]. + +translate_have_defs(#atomic_context{dw = DW, 'NATMC' = NATMC}) -> + [" +#if !defined(ETHR_", NATMC, "_BITS__) +# error \"Missing native atomic implementation\"", + lists:map(fun (NBits) -> + {HaveInPrefix, + HaveOutPrefix, + HaveInPrefixExtra, + HaveOutPrefixExtra, + NativeTypeCheck} = case NBits of + "dw" -> + {"ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC", + ["ETHR_HAVE_", NATMC], + "ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC", + ["ETHR_HAVE_SU_", NATMC], + "\n#elif defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)"}; + _ -> + {[?HAVE_NATIVE_ATOMIC, NBits], + case DW of + true -> ["ETHR_HAVE_SU_", NATMC]; + false -> ["ETHR_HAVE_", NATMC] + end, + false, + ["ETHR_HAVE_", NATMC], + ["\n#elif ETHR_", NATMC, "_BITS__ == ", NBits]} + end, + [NativeTypeCheck, + lists:map(fun (Op) -> + NOpStr = opstr(native(Op)), + CapNOpStr = to_upper(NOpStr), + lists:map(fun (B) -> + NBExt = op_barrier_ext(B), + CapNBExt = to_upper(NBExt), + HaveOutDef = [HaveOutPrefix, "_", CapNOpStr, CapNBExt], + HaveOutDefExtra = [HaveOutPrefixExtra, "_", CapNOpStr, CapNBExt], + [case DW of + true -> + ["\n# undef ", HaveOutDefExtra]; + false -> + "" + end, " +# undef ", HaveOutDef," +# ifdef ", HaveInPrefix, "_", CapNOpStr, CapNBExt, " +# define ", HaveOutDef, " 1 +# endif", + case HaveInPrefixExtra of + false -> ""; + _ -> [" +# ifdef ", HaveInPrefixExtra, "_", CapNOpStr, CapNBExt, " +# define ", HaveOutDefExtra, " 1 +# endif" + ] + end] + end, + ?BARRIERS) + end, + case DW of + true -> ?DW_ATOMIC_OPS; + false -> ?ATOMIC_OPS + end)] + end, + case DW of + true -> ["dw", "64"]; + false -> ?POTENTIAL_NBITS + end), + " +#else +# error \"Invalid native atomic size\" +#endif +"]. + + + +make_prototypes(#atomic_context{dw = DW, 'ATMC' = ATMC} = AC) -> + MkProt = fun (MacroName) -> + %% addr() is special + [func_header(AC, prototype, MacroName, addr, none), "\n", + lists:map(fun (Op) -> + lists:map(fun (B) -> + [func_header(AC, prototype, MacroName, Op, B), "\n"] + end, + ?BARRIERS) + end, + case DW of + true -> ?DW_ATOMIC_OPS; + false -> ?ATOMIC_OPS + end)] + end, + [" +#ifdef ETHR_NEED_", ATMC, "_PROTOTYPES__ +", + MkProt(false), + case DW of + true -> ["#if defined(", ?DW_RTCHK_MACRO, ")\n", + MkProt(?DW_FUNC_MACRO), + "#endif\n"]; + false -> "" + end, + "#endif /* ETHR_NEED_", ATMC, "_PROTOTYPES__ */\n"]. + +rtchk_fallback_call(Return, #atomic_context{dw = DW, + ret_var = RetVar, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3}, + Op, B) -> + op_call(Op, DW, case Return of + true -> "return"; + false -> [RetVar, " ="] + end, [?DW_FUNC_MACRO, "(", opstr(Op), op_barrier_ext(B), ")"], Arg1, Arg2, Arg3, ""). + +make_implementations(#atomic_context{dw = DW, + ret_type = RetType, + ret_var = RetVar, + arg1 = Arg1, + addr_aint_t = AddrAintT, + atomic = Atomic, + have_native_atomic_ops = HaveNativeAtomicOps, + 'ATMC' = ATMC, + 'NATMC' = NATMC} = AC) -> + NativeVar = case DW of + true -> ["(&", Arg1, "->native)"]; + false -> Arg1 + end, + RtchkBegin = [" +#if defined(", ?DW_RTCHK_MACRO, ") + if (", ?DW_RTCHK_MACRO, ") { +#endif +"], + RtchkEnd = fun (Return, Operation, Barrier) -> + [" +#if defined(", ?DW_RTCHK_MACRO, ") + } else { ", rtchk_fallback_call(Return, AC, Operation, Barrier), " } +#endif\n" + ] + end, + [" +#if (defined(", HaveNativeAtomicOps, ") \\ + && (defined(ETHR_", ATMC, "_INLINE__) || defined(ETHR_ATOMIC_IMPL__))) +", + translate_have_defs(AC), + cmpxchg_fallback_define(AC), + %% addr() is special + " + + +/* --- addr() --- */ + +", func_header(AC, inline_implementation, false, addr, none), " +{", case DW of + true -> RtchkBegin; + false -> "" + end, " + return (", AddrAintT, " *) ETHR_", NATMC, "_ADDR_FUNC__(", NativeVar, "); +",case DW of + true -> RtchkEnd(true, addr, none); + false -> "" + end, " +} +", + lists:map(fun (Op) -> + OpStr = opstr(Op), + [" + +/* --- ", OpStr, "() --- */ + +", + lists:map(fun (B) -> + TryBarriers = try_barrier_order(B), + [" +", func_header(AC, inline_implementation, false, Op, B), " +{ +", + case is_return_op(AC, Op) of + true -> + [" ", RetType, " ", RetVar, ";\n"]; + _ -> "" + end, + case DW of + true -> + [RtchkBegin, + "\n", + su_dw_native_barrier_op(AC, "#if", Op, B, hd(TryBarriers)), + lists:map(fun (NB) -> + su_dw_native_barrier_op(AC, "#elif", Op, B, NB) + end, + tl(TryBarriers)), + lists:map(fun (NB) -> + dw_native_barrier_op(AC, "#elif", "", Op, B, NB) + end, + TryBarriers), + case simple_fallback(AC, Op, B) of + "" -> + %% No simple fallback available; + %% use cmpxchg() fallbacks... + [cmpxchg_fallbacks(AC#atomic_context{'NATMC' = ["SU_", NATMC]}, true, Op, B), + cmpxchg_fallbacks(AC, false, Op, B), + "#else +#error \"Missing implementation of ", Atomic, "_", opstr(Op), op_barrier_ext(B), "()!\" +#endif +" + ]; + SimpleFallback -> + ["#else\n", SimpleFallback, "#endif\n"] + end, + RtchkEnd(false, Op, B), "\n"]; + false -> + [native_barrier_op(AC, "#if", "", Op, B, hd(TryBarriers), true), + lists:map(fun (NB) -> + native_barrier_op(AC, "#elif", "", Op, B, NB, true) + end, + tl(TryBarriers)), + case simple_fallback(AC, Op, B) of + "" -> + %% No simple fallback available; + %% use cmpxchg() fallbacks... + [cmpxchg_fallbacks(AC, false, Op, B), + "#else +#error \"Missing implementation of ", Atomic, "_", opstr(Op), op_barrier_ext(B), "()!\" +#endif +" + ]; + SimpleFallback -> + ["#else\n", SimpleFallback, "#endif\n"] + end] + end, + case is_return_op(AC, Op) of + true -> + [" return ", RetVar, ";\n"]; + false -> + "" + end, + "}\n"] + end, + ?BARRIERS)] + end, + case DW of + true -> ?DW_ATOMIC_OPS; + false -> ?ATOMIC_OPS + end), + " +#endif /* ETHR_", ATMC, "_INLINE__ */ +" + ]. + +atomic_implementation_comment(AtomicSize) -> + CSz = case AtomicSize of + "dword" -> "Double word size"; + "word" -> "Word size"; + _ -> AtomicSize ++ "-bit" + end, + [" + +/* ---------- ", CSz, " atomic implementation ---------- */ + +" + ]. + +write_h_file(FileName) -> + {ok, FD} = file:open(FileName, [write, latin1]), + ok = file:write(FD, comments()), + ok = file:write(FD, " +#ifndef ETHR_ATOMICS_H__ +#define ETHR_ATOMICS_H__ +" + ), + ok = file:write(FD, h_top()), + ok = lists:foreach(fun (AtomicSize) -> + AC = atomic_context(AtomicSize), + ok = file:write(FD, + [atomic_implementation_comment(AtomicSize), + make_prototypes(AC), + make_implementations(AC)]) + end, + ?ATOMIC_SIZES), + ok = file:write(FD, " +#endif /* ETHR_ATOMICS_H__ */ +" + ), + ok = file:close(FD). + + +make_native_impl_op(#atomic_context{dw = DW, + atomic = Atomic, + have_native_atomic_ops = HaveNativeAtomicOps, + ret_var = RetVar, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3}, Op, B) -> + ["#if defined(", HaveNativeAtomicOps, ")", + case DW of + true -> [" && !defined(", ?DW_RTCHK_MACRO, ")"]; + false -> "" + end, + "\n", + " ", op_call(Op, DW, [RetVar, " = "], [Atomic, "_", opstr(Op), op_barrier_ext(B), "__"], Arg1, Arg2, Arg3, ""), + "\n"]. + +amc_op_dw_arg(#atomic_context{dw = false}) -> + "0"; +amc_op_dw_arg(#atomic_context{dw = true}) -> + "1". + +amc_op_arg_prefix(#atomic_context{dw = false}) -> + "&"; +amc_op_arg_prefix(#atomic_context{dw = true}) -> + "". + +amc_sint_arg(#atomic_context{dw = DW, arg2 = Arg}, arg2) -> + amc_sint_arg(DW, Arg); +amc_sint_arg(#atomic_context{dw = DW, arg3 = Arg}, arg3) -> + amc_sint_arg(DW, Arg); +amc_sint_arg(#atomic_context{dw = DW, ret_var = Arg}, ret_var) -> + amc_sint_arg(DW, Arg); +amc_sint_arg(true, Arg) -> + [Arg, "->" ?DW_SINT_FIELD]; +amc_sint_arg(false, Arg) -> + ["&", Arg]. + +amc_op_call(#atomic_context{arg1 = Arg1} = AC, init) -> + [" amc_init(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ");\n"]; +amc_op_call(#atomic_context{arg1 = Arg1} = AC, set) -> + [" amc_set(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ");\n"]; +amc_op_call(#atomic_context{dw = false, arg1 = Arg1} = AC, read) -> + [" amc_read(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, ret_var), ");\n"]; +amc_op_call(#atomic_context{dw = true, arg1 = Arg1} = AC, read) -> + [" amc_read(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ");\n"]; +amc_op_call(#atomic_context{dw = false, arg1 = Arg1, arg3 = Arg3, ret_var = RetVar} = AC, cmpxchg) -> + [" ", RetVar, " = ", Arg3, "; + (void) amc_cmpxchg(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ", ", amc_sint_arg(AC, ret_var), ");\n"]; +amc_op_call(#atomic_context{dw = true, arg1 = Arg1, ret_var = RetVar} = AC, cmpxchg) -> + [" ", RetVar, " = amc_cmpxchg(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ", ", amc_sint_arg(AC, arg3), ");\n"]; +amc_op_call(#atomic_context{dw = DW, arg1 = Arg1, arg2 = Arg2, arg3 = Arg3, ret_var = RetVar}, Op) -> + OpCtxt = #op_context{ret = RetVar, var = [Arg1,"->sint"], val1 = Arg2, val2 = Arg3}, + OpStr = case DW of + true -> dw_op(Op, OpCtxt); + false -> op(Op, OpCtxt) + end, + [" ETHR_AMC_MODIFICATION_OPS__(&", Arg1, "->amc, ", OpStr, ");\n"]. + +make_amc_fallback_op(#atomic_context{amc_fallback = false}, _Op, _B) -> + ""; +make_amc_fallback_op(#atomic_context{amc_fallback = true} = AC, Op, B) -> + NB = case Op of + read -> rb; + _ -> none + end, + {PreB, PostB} = xbarriers(Op, B, NB), + ["#elif defined(ETHR_AMC_FALLBACK__)\n", + case PreB of + "" -> ""; + _ -> [" ", PreB, "\n"] + end, + amc_op_call(AC, Op), + case PostB of + "" -> ""; + _ -> [" ", PostB, "\n"] + end]. + +make_locked_fallback_op(#atomic_context{dw = DW, + ret_var = RetVar, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3}, Op, B) -> + OpStr = case DW of + true -> + dw_op(Op, #op_context{ret = RetVar, + var = [Arg1, "->" ?DW_SINT_FIELD], + val1 = [Arg2, "->" ?DW_SINT_FIELD], + val2 = [Arg3, "->" ?DW_SINT_FIELD]}); + false -> + op(Op, #op_context{ret = RetVar, + var = ["*", Arg1], + val1 = Arg2, + val2 = Arg3}) + end, + {PreB, PostB} = xbarriers(Op, B, none), + ["#else\n", + case PreB of + "" -> ""; + _ -> [" ", PreB, "\n"] + end, + [" ETHR_ATOMIC_OP_FALLBACK_IMPL__(", Arg1, ", ", OpStr, ");\n"], + case PostB of + "" -> ""; + _ -> [" ", PostB, "\n"] + end, + "#endif\n"]. + +make_symbol_to_fallback_impl(#atomic_context{dw = true, + atomic = Atomic, + arg1 = Arg1, + arg2 = Arg2, + arg3 = Arg3} = AC, + Op, B) -> + [" +#ifdef ", ?DW_RTCHK_MACRO, " +", func_header(AC, implementation, false, Op, B), " +{", + case Op of + init -> ""; + _ -> ["\n ETHR_ASSERT(!ethr_not_inited__);"] + end, " + ETHR_ASSERT(", Arg1, "); + ", op_call(Op, true, "return", [Atomic, "_", opstr(Op), op_barrier_ext(B), "__"], Arg1, Arg2, Arg3, ""), " +} +#endif +" + ]; +make_symbol_to_fallback_impl(_, _, _) -> + "". + +make_symbol_implementations(#atomic_context{dw = DW, + amc_fallback = AMC, + ret_type = RetType, + addr_aint_t = AddrAintT, + ret_var = RetVar, + arg1 = Arg1} = AC) -> + FallbackVar = case DW of + true -> ["(&", Arg1, "->fallback)"]; + false -> Arg1 + end, + [" +", + case DW of + true -> [" +/* + * Double word atomics need runtime test. + */ + +int ethr_have_native_dw_atomic(void) +{ + return ethr_have_native_dw_atomic__(); +} + "]; + false -> "" + end, " + +/* --- addr() --- */ + +", func_header(AC, implementation, + case DW of + true -> ?DW_FUNC_MACRO; + false -> false + end, addr, none), " +{ + ", AddrAintT, " *", RetVar, "; + ETHR_ASSERT(!ethr_not_inited__); + ETHR_ASSERT(", Arg1, "); +", make_native_impl_op(AC, addr, none), + case AMC of + true -> ["#elif defined(ETHR_AMC_FALLBACK__) + ", RetVar ," = (", AddrAintT, " *) (", FallbackVar, ")->sint;"]; + false -> "" + end, " +#else + ", RetVar, " = (", AddrAintT, " *) ", FallbackVar, "; +#endif + return ", RetVar, "; +} +", + make_symbol_to_fallback_impl(AC, addr, none), + lists:map(fun (Op) -> + [" + +/* -- ", opstr(Op), "() -- */ + +", + lists:map(fun (B) -> + ["\n", + func_header(AC, implementation, + case DW of + true -> ?DW_FUNC_MACRO; + false -> false + end, Op, B), + "\n{\n", + case is_return_op(AC, Op) of + true -> [" ", RetType, " ", RetVar, ";\n"]; + false -> "" + end, + case Op of + init -> ""; + _ -> [" ETHR_ASSERT(!ethr_not_inited__);\n"] + end, + [" ETHR_ASSERT(", Arg1, ");\n"], + make_native_impl_op(AC, Op, B), + make_amc_fallback_op(AC#atomic_context{arg1 = FallbackVar}, Op, B), + make_locked_fallback_op(AC#atomic_context{arg1 = FallbackVar}, Op, B), + case is_return_op(AC, Op) of + true -> [" return ", RetVar, ";" + ]; + false -> + "" + end, + "\n}\n", + make_symbol_to_fallback_impl(AC, Op, B)] + end, + ?BARRIERS)] + end, + case DW of + true -> ?DW_ATOMIC_OPS; + false -> ?ATOMIC_OPS + end)]. + +make_info_functions() -> + [" + + +/* --------- Info functions --------- */ + +#if defined(", ?DW_RTCHK_MACRO, ") +char *zero_ops[] = {NULL}; +#endif +", + [lists:map(fun (NBits) -> + {DW, Bits} = case NBits of + "su_dw" -> {"su_dw_", ""}; + "dw" -> {"dw_", ""}; + _ -> {"", NBits} + end, + [" + +static char *native_", DW, "atomic", Bits, "_ops[] = {", + lists:map(fun (Op) -> + NOpStr = opstr(native(Op)), + CapNOpStr = to_upper(NOpStr), + lists:map(fun (B) -> + HaveNative = case NBits of + "dw" -> + "ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC"; + "su_dw" -> + "ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC"; + _ -> + [?HAVE_NATIVE_ATOMIC, NBits] + end, + NBExt = op_barrier_ext(B), + CapNBExt = to_upper(NBExt), + [" +#ifdef ", HaveNative, "_", CapNOpStr, CapNBExt, " + \"", NOpStr, NBExt, "\", +#endif" + ] + end, + ?BARRIERS) + end, + case NBits of + "dw" -> ?DW_ATOMIC_OPS; + "su_dw" -> ?DW_ATOMIC_OPS; + _ -> ?ATOMIC_OPS + end), " + NULL +}; + +char ** +ethr_native_", DW, "atomic", Bits, "_ops(void) +{ +", + case DW of + "" -> ""; + _ -> [" +#if defined(", ?DW_RTCHK_MACRO, ") + if (!", ?DW_RTCHK_MACRO, ") + return &zero_ops[0]; +#endif" + ] + end, " + return &native_", DW, "atomic", Bits, "_ops[0]; +} +" + ] + end, ["su_dw", "dw" | ?POTENTIAL_NBITS])]]. + +write_c_file(FileName) -> + {ok, FD} = file:open(FileName, [write, latin1]), + ok = file:write(FD, comments()), + ok = file:write(FD, c_top()), + lists:foreach(fun (AtomicSize) -> + ok = file:write(FD, + [atomic_implementation_comment(AtomicSize), + make_symbol_implementations(atomic_context(AtomicSize))]) + end, + ?ATOMIC_SIZES), + ok = file:write(FD, make_info_functions()). + + +main([]) -> + case os:getenv("ERL_TOP") of + false -> + io:format("$ERL_TOP not set!~n", []), + halt(1); + ErlTop -> + HFile = filename:join(ErlTop, ?H_FILE), + WHFile = fun () -> + write_h_file(HFile) + end, + CFile = filename:join(ErlTop, ?C_FILE), + WCFile = fun () -> + write_c_file(CFile) + end, + case erlang:system_info(schedulers_online) of + 1 -> + WHFile(), + WCFile(); + _ -> + {HPid, HMon} = spawn_monitor(WHFile), + {CPid, CMon} = spawn_monitor(WCFile), + receive + {'DOWN', HMon, process, HPid, HReason} -> + normal = HReason + end, + receive + {'DOWN', CMon, process, CPid, CReason} -> + normal = CReason + end + end, + io:format("Wrote: ~s~n", [HFile]), + io:format("Wrote: ~s~n", [CFile]), + init:stop() + end. + +a2l(A) -> + atom_to_list(A). + +opstr(A) -> + a2l(A). + +to_upper([]) -> + []; +to_upper([C|Cs]) when is_list(C) -> + [to_upper(C)|to_upper(Cs)]; +to_upper([C|Cs]) when is_integer(C), 97 =< C, C =< 122 -> + [C-32|to_upper(Cs)]; +to_upper([C|Cs]) -> + [C|to_upper(Cs)]. + + +comments() -> + Years = case erlang:date() of + {2011, _, _} -> "2011"; + {Y, _, _} -> "2011-"++integer_to_list(Y) + end, + ["/* + * --------------- DO NOT EDIT THIS FILE! --------------- + * This file was automatically generated by the + * \$ERL_TOP/erts/lib_src/utils/make_atomics_api script. + * If you need to make changes, edit the script and + * regenerate this file. + * --------------- DO NOT EDIT THIS FILE! --------------- + */ + +/* + * %CopyrightBegin% + * + * Copyright Ericsson AB ", Years, ". All Rights Reserved. + * + * The contents of this file are subject to the Erlang Public License, + * Version 1.1, (the \"License\"); you may not use this file except in + * compliance with the License. You should have received a copy of the + * Erlang Public License along with this software. If not, it can be + * retrieved online at http://www.erlang.org/. + * + * Software distributed under the License is distributed on an \"AS IS\" + * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + * the License for the specific language governing rights and limitations + * under the License. + * + * %CopyrightEnd% + */ + +/* + * Description: The ethread atomics API + * Author: Rickard Green + */ + +/* + * This file maps native atomic implementations to ethread + * API atomics. If no native atomic implementation + * is available, a less efficient fallback is used instead. + * The API consists of 32-bit size, word size (pointer size), + * and double word size atomics. + * + * The following atomic operations are implemented for + * 32-bit size, and word size atomics: +", + lists:map(fun (Op) -> + [" * - ", opstr(Op), "\n"] + end, + ?ATOMIC_OPS), + " * + * The following atomic operations are implemented for + * double word size atomics: +", + lists:map(fun (Op) -> + [" * - ", opstr(Op), "\n"] + end, + ?DW_ATOMIC_OPS), + " * + * Appart from a function implementing the atomic operation + * with unspecified memory barrier semantics, there are + * functions implementing each operation with the following + * memory barrier semantics: +", + lists:map(fun (none) -> + ""; + (rb) -> + [" * - rb (read barrier)\n"]; + (wb) -> + [" * - wb (write barrier)\n"]; + (acqb) -> + [" * - acqb (acquire barrier)\n"]; + (relb) -> + [" * - relb (release barrier)\n"]; + (mb) -> + [" * - mb (full memory barrier)\n"]; + (B) -> + [" * - ", a2l(B), "\n"] + end, + ?BARRIERS), + " * + * We implement all of these operation/barrier + * combinations, regardless of whether they are useful + * or not (some of them are useless). + * + * Double word size atomic functions are on the followning + * form: + * ethr_dw_atomic_<OP>[_<BARRIER>] + * + * Word size atomic functions are on the followning + * form: + * ethr_atomic_<OP>[_<BARRIER>] + * + * 32-bit size atomic functions are on the followning + * form: + * ethr_atomic32_<OP>[_<BARRIER>] + * + * Apart from the operation/barrier functions + * described above also 'addr' functions are implemented + * which return the actual memory address used of the + * atomic variable. The 'addr' functions have no barrier + * versions. + * + * The native atomic implementation does not need to + * implement all operation/barrier combinations. + * Functions that have no native implementation will be + * constructed from existing native functionality. These + * functions will perform the wanted operation and will + * produce sufficient memory barriers, but may + * in some cases be less efficient than pure native + * versions. + * + * When we create ethread API operation/barrier functions by + * adding barriers before and after native operations it is + * assumed that: + * - A native read operation begins, and ends with a load. + * - A native set operation begins, and ends with a store. + * - An init operation begins with either a load, or a store, + * and ends with either a load, or a store. + * - All other operations begins with a load, and ends with + * either a load, or a store. + * + * This is the minimum functionality that a native + * implementation needs to provide: + * + * - Functions that need to be implemented: + * + * - ethr_native_[dw_|su_dw_]atomic[BITS]_addr + * - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>] + * (at least one cmpxchg of optional barrier) + * + * - Macros that needs to be defined: + * + * A macro informing about the presence of the native + * implementation: + * + * - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS] + * + * A macro naming (a string constant) the implementation: + * + * - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL + * + * Each implemented native atomic function has to + * be accompanied by a defined macro on the following + * form informing about its presence: + * + * - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>] + * + * A (sparc-v9 style) membar macro: + * + * - ETHR_MEMBAR(B) + * + * Which takes a combination of the following macros + * or:ed (using |) together: + * + * - ETHR_LoadLoad + * - ETHR_LoadStore + * - ETHR_StoreLoad + * - ETHR_StoreStore + * + */ +" + ]. + +h_top() -> + [" +#undef ETHR_AMC_FALLBACK__ +#undef ETHR_AMC_NO_ATMCS__ +#undef ETHR_AMC_ATMC_T__ +#undef ETHR_AMC_ATMC_FUNC__ + +/* -- 32-bit atomics -- */ + +#undef ETHR_NAINT32_T__ +#undef ETHR_NATMC32_FUNC__ +#undef ETHR_NATMC32_ADDR_FUNC__ +#undef ETHR_NATMC32_BITS__ +#if defined(ETHR_HAVE_NATIVE_ATOMIC32) +# define ETHR_NEED_NATMC32_ADDR +# define ETHR_NATMC32_ADDR_FUNC__ ethr_native_atomic32_addr +typedef ethr_native_atomic32_t ethr_atomic32_t; +# define ETHR_NAINT32_T__ ethr_sint32_t +# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X +# define ETHR_NATMC32_BITS__ 32 +#elif defined(ETHR_HAVE_NATIVE_ATOMIC64) +# define ETHR_NEED_NATMC64_ADDR +#ifdef ETHR_BIGENDIAN +# define ETHR_NATMC32_ADDR_FUNC__(VAR) \\ + (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1) +#else +# define ETHR_NATMC32_ADDR_FUNC__(VAR) \\ + ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) +#endif +typedef ethr_native_atomic64_t ethr_atomic32_t; +# define ETHR_NAINT32_T__ ethr_sint64_t +# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_NATMC32_BITS__ 64 +#else +/* + * No native atomics usable for 32-bits atomics :( + * Use fallback... + */ +typedef ethr_sint32_t ethr_atomic32_t; +#endif + +#undef ETHR_ATMC32_INLINE__ +#ifdef ETHR_NATMC32_BITS__ +# ifdef ETHR_TRY_INLINE_FUNCS +# define ETHR_ATMC32_INLINE__ +# endif +# define ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS +#endif + +#if !defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +# define ETHR_NEED_ATMC32_PROTOTYPES__ +#endif + +#ifndef ETHR_INLINE_ATMC32_FUNC_NAME_ +# define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X +#endif + +#undef ETHR_ATMC32_FUNC__ +#define ETHR_ATMC32_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) + + +/* -- Word size atomics -- */ + +#undef ETHR_NEED_NATMC32_ADDR +#undef ETHR_NEED_NATMC64_ADDR + +#undef ETHR_NAINT_T__ +#undef ETHR_NATMC_FUNC__ +#undef ETHR_NATMC_ADDR_FUNC__ +#undef ETHR_NATMC_BITS__ +#if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC64) +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC64_ADDR +# endif +# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr +typedef ethr_native_atomic64_t ethr_atomic_t; +# define ETHR_NAINT_T__ ethr_sint64_t +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_NATMC_BITS__ 64 +#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC32) +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC32_ADDR +# endif +# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic32_addr +typedef ethr_native_atomic32_t ethr_atomic_t; +# define ETHR_NAINT_T__ ethr_sint32_t +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X +# define ETHR_NATMC_BITS__ 32 +#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64) +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC64_ADDR +# endif +#ifdef ETHR_BIGENDIAN +# define ETHR_NATMC_ADDR_FUNC__(VAR) \\ + (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1) +#else +# define ETHR_NATMC_ADDR_FUNC__(VAR) \\ + ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) +#endif +typedef ethr_native_atomic64_t ethr_atomic_t; +# define ETHR_NATMC_T__ ethr_native_atomic64_t +# define ETHR_NAINT_T__ ethr_sint64_t +# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_NATMC_BITS__ 64 +#else +/* + * No native atomics usable for pointer size atomics :( + * Use fallback... + */ + +# if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) +# define ETHR_AMC_FALLBACK__ +# define ETHR_AMC_NO_ATMCS__ 2 +# define ETHR_AMC_SINT_T__ ethr_sint32_t +# define ETHR_AMC_ATMC_T__ ethr_atomic32_t +# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) +typedef struct { + ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__]; +} ethr_amc_t; +typedef struct { + ethr_amc_t amc; + ethr_sint_t sint; +} ethr_atomic_t; +# else /* locked fallback */ +typedef ethr_sint_t ethr_atomic_t; +# endif +#endif + +#undef ETHR_ATMC_INLINE__ +#ifdef ETHR_NATMC_BITS__ +# ifdef ETHR_TRY_INLINE_FUNCS +# define ETHR_ATMC_INLINE__ +# endif +# define ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS +#endif + +#if !defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +# define ETHR_NEED_ATMC_PROTOTYPES__ +#endif + +#ifndef ETHR_INLINE_ATMC_FUNC_NAME_ +# define ETHR_INLINE_ATMC_FUNC_NAME_(X) X +#endif + +#undef ETHR_ATMC_FUNC__ +#define ETHR_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X) + +/* -- Double word atomics -- */ + +#undef ETHR_SU_DW_NAINT_T__ +#undef ETHR_SU_DW_NATMC_FUNC__ +#undef ETHR_SU_DW_NATMC_ADDR_FUNC__ +#undef ETHR_DW_NATMC_FUNC__ +#undef ETHR_DW_NATMC_ADDR_FUNC__ +#undef ETHR_DW_NATMC_BITS__ +#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC) +# define ETHR_NEED_DW_NATMC_ADDR +# define ETHR_DW_NATMC_ADDR_FUNC__ ethr_native_dw_atomic_addr +# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_dw_atomic_t +# define ETHR_DW_NATMC_FUNC__(X) ethr_native_dw_atomic_ ## X +# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_su_dw_atomic_ ## X +# if ETHR_SIZEOF_PTR == 8 +# define ETHR_DW_NATMC_BITS__ 128 +# elif ETHR_SIZEOF_PTR == 4 +# define ETHR_DW_NATMC_BITS__ 64 +# else +# error \"Word size not supported\" +# endif +# ifdef ETHR_NATIVE_SU_DW_SINT_T +# define ETHR_SU_DW_NAINT_T__ ETHR_NATIVE_SU_DW_SINT_T +# endif +#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64) +# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC +# ifndef ETHR_NEED_NATMC64_ADDR +# define ETHR_NEED_NATMC64_ADDR +# endif +# define ETHR_DW_NATMC_ADDR_FUNC__(VAR) \\ + ((ethr_dw_sint_t *) ethr_native_atomic64_addr((VAR))) +# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_atomic64_t +# define ETHR_SU_DW_NAINT_T__ ethr_sint64_t +# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +# define ETHR_DW_NATMC_BITS__ 64 +#endif + +#if defined(", ?DW_RTCHK_MACRO, ") +#define ", ?DW_FUNC_MACRO, "(X) ethr_dw_atomic_ ## X ## _fallback__ +#else +#define ", ?DW_FUNC_MACRO, "(X) ethr_dw_atomic_ ## X +#endif + +#if !defined(ETHR_DW_NATMC_BITS__) || defined(", ?DW_RTCHK_MACRO, ") +# define ETHR_NEED_DW_FALLBACK__ +#endif + +#if defined(ETHR_NEED_DW_FALLBACK__) +/* + * No native atomics usable for double word atomics :( + * Use fallback... + */ + +# ifndef ETHR_AMC_FALLBACK__ +# if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) +# define ETHR_AMC_FALLBACK__ +# define ETHR_AMC_NO_ATMCS__ 1 +# define ETHR_AMC_SINT_T__ ethr_sint_t +# define ETHR_AMC_ATMC_T__ ethr_atomic_t +# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X) +# elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) +# define ETHR_AMC_FALLBACK__ +# define ETHR_AMC_NO_ATMCS__ 2 +# define ETHR_AMC_SINT_T__ ethr_sint32_t +# define ETHR_AMC_ATMC_T__ ethr_atomic32_t +# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X) +# endif +# ifdef ETHR_AMC_FALLBACK__ +typedef struct { + ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__]; +} ethr_amc_t; +# endif +# endif + +typedef struct { +#ifdef ETHR_AMC_FALLBACK__ + ethr_amc_t amc; +#endif + ethr_sint_t sint[2]; +} ethr_dw_atomic_fallback_t; + +#endif + +typedef union { +#ifdef ETHR_NATIVE_DW_ATOMIC_T__ + ETHR_NATIVE_DW_ATOMIC_T__ native; +#endif +#ifdef ETHR_NEED_DW_FALLBACK__ + ethr_dw_atomic_fallback_t fallback; +#endif + ethr_sint_t sint[2]; +} ethr_dw_atomic_t; + +typedef union { +#ifdef ETHR_SU_DW_NAINT_T__ + ETHR_SU_DW_NAINT_T__ ", ?SU_DW_SINT_FIELD, "; +#endif + ethr_sint_t ", ?DW_SINT_FIELD, "[2]; +} ethr_dw_sint_t; + +#ifdef ETHR_BIGENDIAN +# define ETHR_DW_SINT_LOW_WORD 1 +# define ETHR_DW_SINT_HIGH_WORD 0 +#else +# define ETHR_DW_SINT_LOW_WORD 0 +# define ETHR_DW_SINT_HIGH_WORD 1 +#endif + +#undef ETHR_DW_ATMC_INLINE__ +#ifdef ETHR_DW_NATMC_BITS__ +# ifdef ETHR_TRY_INLINE_FUNCS +# define ETHR_ATMC32_INLINE__ +# endif +# define ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS +#endif + +#if !defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +# define ETHR_NEED_DW_ATMC_PROTOTYPES__ +#endif + +#ifndef ETHR_INLINE_DW_ATMC_FUNC_NAME_ +# define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X +#endif + +#undef ETHR_DW_ATMC_FUNC__ +#define ETHR_DW_ATMC_FUNC__(X) ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_dw_atomic_ ## X) + +#if defined(ETHR_NEED_DW_ATMC_PROTOTYPES__) +int ethr_have_native_dw_atomic(void); +#endif +#if defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__) +static ETHR_INLINE int +ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_have_native_dw_atomic)(void) +{ +#if defined(", ?DW_RTCHK_MACRO, ") + return ", ?DW_RTCHK_MACRO, "; +#elif defined(ETHR_DW_NATMC_BITS__) + return 1; +#else + return 0; +#endif +} +#endif + +/* -- Misc -- */ + +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +/* + * Unusual values are used by read() fallbacks implemented via cmpxchg(). + * We want to use an unusual value in hope that it is more efficient + * not to match the value in memory. + * + * - Negative integer values are probably more unusual. + * - Very large absolute integer values are probably more unusual. + * - Odd pointers are probably more unusual (only char pointers can be odd). + */ +# define ETHR_UNUSUAL_SINT32_VAL__ ((ethr_sint32_t) 0x81818181) +# if ETHR_SIZEOF_PTR == 4 +# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) ETHR_UNUSUAL_SINT32_VAL__) +# elif ETHR_SIZEOF_PTR == 8 +# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) 0x8181818181818181L) +# else +# error \"Word size not supported\" +# endif +# if defined(ETHR_NEED_DW_NATMC_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR) +# error \"No ethr_native_dw_atomic_addr() available\" +# endif +# if defined(ETHR_NEED_NATMC32_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR) +# error \"No ethr_native_atomic32_addr() available\" +# endif +# if defined(ETHR_NEED_NATMC64_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR) +# error \"No ethr_native_atomic64_addr() available\" +# endif +#endif + +#if defined(__GNUC__) +# ifndef ETHR_COMPILER_BARRIER +# define ETHR_COMPILER_BARRIER __asm__ __volatile__(\"\" : : : \"memory\") +# endif +#elif defined(ETHR_WIN32_THREADS) +# ifndef ETHR_COMPILER_BARRIER +# include <intrin.h> +# pragma intrinsic(_ReadWriteBarrier) +# define ETHR_COMPILER_BARRIER _ReadWriteBarrier() +# endif +#endif + +void ethr_compiler_barrier_fallback(void); +#ifndef ETHR_COMPILER_BARRIER +# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback() +#endif + +int ethr_init_atomics(void); + +/* info */ +char **ethr_native_atomic32_ops(void); +char **ethr_native_atomic64_ops(void); +char **ethr_native_dw_atomic_ops(void); +char **ethr_native_su_dw_atomic_ops(void); + +#if !defined(ETHR_DW_NATMC_BITS__) && !defined(ETHR_NATMC_BITS__) && !defined(ETHR_NATMC32_BITS__) +/* + * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only, + * i.e. when no native atomic implementation exist and only our lock + * based atomic fallback is used, a noop is sufficient. + */ +# undef ETHR_MEMORY_BARRIER +# undef ETHR_WRITE_MEMORY_BARRIER +# undef ETHR_READ_MEMORY_BARRIER +# undef ETHR_READ_DEPEND_MEMORY_BARRIER +# undef ETHR_MEMBAR +# define ETHR_MEMBAR(B) do { } while (0) +#endif + +#ifndef ETHR_MEMBAR +# error \"No ETHR_MEMBAR defined\" +#endif + +#define ETHR_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore) +#define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMBAR(ETHR_StoreStore) +#define ETHR_READ_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad) +#ifdef ETHR_READ_DEPEND_MEMORY_BARRIER +# undef ETHR_ORDERED_READ_DEPEND +#else +# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER +# define ETHR_ORDERED_READ_DEPEND +#endif +"]. + +c_top() -> + [" + +#ifdef HAVE_CONFIG_H +#include \"config.h\" +#endif + +#define ETHR_TRY_INLINE_FUNCS +#define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X ## __ +#define ETHR_INLINE_ATMC_FUNC_NAME_(X) X ## __ +#define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X ## __ +#define ETHR_ATOMIC_IMPL__ + +#include \"ethread.h\" +#include \"ethr_internal.h\" + +#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \\ + || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)) +/* + * Spinlock based fallback for atomics used in absence of a native + * implementation. + */ + +#define ETHR_ATMC_FLLBK_ADDR_BITS ", ?ETHR_ATMC_FLLBK_ADDR_BITS, " +#define ETHR_ATMC_FLLBK_ADDR_SHIFT ", ?ETHR_ATMC_FLLBK_ADDR_SHIFT, " + +typedef struct { + union { + ethr_spinlock_t lck; + char buf[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_spinlock_t))]; + } u; +} ethr_atomic_protection_t; + +extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS]; + +#define ETHR_ATOMIC_PTR2LCK__(PTR) \\ +(ðr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATMC_FLLBK_ADDR_SHIFT) \\ + & ((1 << ETHR_ATMC_FLLBK_ADDR_BITS) - 1))].u.lck) + + +#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS) \\ +do { \\ + ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP)); \\ + ethr_spin_lock(slp__); \\ + { EXPS; } \\ + ethr_spin_unlock(slp__); \\ +} while (0) + +ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS]; + +#endif + +", make_amc_fallback(), " + +int +ethr_init_atomics(void) +{ +#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \\ + || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)) + int i; + for (i = 0; i < (1 << ETHR_ATMC_FLLBK_ADDR_BITS); i++) { + int res = ethr_spinlock_init(ðr_atomic_protection__[i].u.lck); + if (res != 0) + return res; + } +#endif + return 0; +} +"]. + +make_amc_fallback() -> + [" +#if defined(ETHR_AMC_FALLBACK__) + +/* + * Fallback for large sized (word and/or double word size) atomics using + * an \"Atomic Modification Counter\" based on smaller sized native atomics. + * + * We use a 63-bit modification counter and a one bit exclusive flag. + * If 32-bit native atomics are used, we need two 32-bit native atomics. + * The exclusive flag is the least significant bit, or if multiple atomics + * are used, the least significant bit of the least significant atomic. + * + * When using the AMC fallback the following is true: + * - Reads of the same atomic variable can be done in parallel. + * - Uncontended reads doesn't cause any cache line invalidations, + * since no modifications are done. + * - Assuming that the AMC atomic(s) and the integer(s) containing the + * value of the implemented atomic resides in the same cache line, + * modifications will only cause invalidations of one cache line. + * + * When using the spinlock based fallback none of the above is true, + * however, the spinlock based fallback consumes less memory. + */ + +# if ETHR_AMC_NO_ATMCS__ != 1 && ETHR_AMC_NO_ATMCS__ != 2 +# error \"Not supported\" +# endif +# define ETHR_AMC_MAX_TRY_READ__ 10 +# ifdef ETHR_DEBUG +# define ETHR_DBG_CHK_EXCL_STATE(ASP, S) \\ +do { \\ + ETHR_AMC_SINT_T__ act = ETHR_AMC_ATMC_FUNC__(read)(&(ASP)->atomic[0]); \\ + ETHR_ASSERT(act == (S) + 1); \\ + ETHR_ASSERT(act & 1); \\ +} while (0) +# else +# define ETHR_DBG_CHK_EXCL_STATE(ASP, S) +# endif + +static ETHR_INLINE void +amc_init(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val) +{ + avar[0] = val[0]; + if (dw) + avar[1] = val[1]; +#if ETHR_AMC_NO_ATMCS__ == 2 + ETHR_AMC_ATMC_FUNC__(init)(&amc->atomic[1], 0); +#endif + ETHR_AMC_ATMC_FUNC__(init_wb)(&amc->atomic[0], 0); +} + +static ETHR_INLINE ETHR_AMC_SINT_T__ +amc_set_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ prev_state0) +{ + ETHR_AMC_SINT_T__ state0 = prev_state0; + /* Set exclusive flag. */ + while (1) { + ETHR_AMC_SINT_T__ act_state0, new_state0; + while (state0 & 1) { /* Wait until exclusive bit has been cleared */ + ETHR_SPIN_BODY; + state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); + } + /* Try to set exclusive bit */ + new_state0 = state0 + 1; + act_state0 = ETHR_AMC_ATMC_FUNC__(cmpxchg_acqb)(&amc->atomic[0], + new_state0, + state0); + if (state0 == act_state0) + return state0; /* old state0 */ + state0 = act_state0; + } +} + +static ETHR_INLINE void +amc_inc_mc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0) +{ + ETHR_AMC_SINT_T__ state0 = old_state0; + + /* Increment modification counter and reset exclusive flag. */ + + ETHR_DBG_CHK_EXCL_STATE(amc, state0); + + state0 += 2; + + ETHR_ASSERT((state0 & 1) == 0); + +#if ETHR_AMC_NO_ATMCS__ == 2 + if (state0 == 0) { + /* + * state0 wrapped, so we need to increment state1. There is no need + * for atomic inc op, since this is always done while having exclusive + * flag. + */ + ETHR_AMC_SINT_T__ state1 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]); + state1++; + ETHR_AMC_ATMC_FUNC__(set)(&amc->atomic[1], state1); + } +#endif + ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], state0); +} + +static ETHR_INLINE void +amc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0) +{ + ETHR_DBG_CHK_EXCL_STATE(amc, old_state0); + /* + * Reset exclusive flag, but leave modification counter unchanged, + * i.e., restore state to what it was before setting exclusive + * flag. + */ + ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], old_state0); +} + +static ETHR_INLINE void +amc_set(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val) +{ + ETHR_AMC_SINT_T__ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); + + state0 = amc_set_excl(amc, state0); + + avar[0] = val[0]; + if (dw) + avar[1] = val[1]; + + amc_inc_mc_unset_excl(amc, state0); +} + +static ETHR_INLINE int +amc_try_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar, + ethr_sint_t *val, ETHR_AMC_SINT_T__ *state0p) +{ + /* *state0p should contain last read value if aborting */ + ETHR_AMC_SINT_T__ old_state0; +#if ETHR_AMC_NO_ATMCS__ == 2 + ETHR_AMC_SINT_T__ state1; + int abrt; +#endif + + *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]); + if ((*state0p) & 1) + return 0; /* exclusive flag set; abort */ +#if ETHR_AMC_NO_ATMCS__ == 2 + state1 = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[1]); +#else + ETHR_COMPILER_BARRIER; +#endif + + val[0] = avar[0]; + if (dw) + val[1] = avar[1]; + + ETHR_READ_MEMORY_BARRIER; + + /* + * Abort if state has changed (i.e, either the exclusive + * flag is set, or modification counter changed). + */ + old_state0 = *state0p; +#if ETHR_AMC_NO_ATMCS__ == 2 + *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]); + abrt = (old_state0 != *state0p); + abrt |= (state1 != ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1])); + return abrt == 0; +#else + *state0p = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); + return old_state0 == *state0p; +#endif +} + +static ETHR_INLINE void +amc_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val) +{ + ETHR_AMC_SINT_T__ state0; + int i; + +#if ETHR_AMC_MAX_TRY_READ__ == 0 + state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]); +#else + for (i = 0; i < ETHR_AMC_MAX_TRY_READ__; i++) { + if (amc_try_read(amc, dw, avar, val, &state0)) + return; /* read success */ + ETHR_SPIN_BODY; + } +#endif + + state0 = amc_set_excl(amc, state0); + + val[0] = avar[0]; + if (dw) + val[1] = avar[1]; + + amc_unset_excl(amc, state0); +} + +static ETHR_INLINE int +amc_cmpxchg(ethr_amc_t *amc, int dw, ethr_sint_t *avar, + ethr_sint_t *new, ethr_sint_t *xchg) +{ + ethr_sint_t val[2]; + ETHR_AMC_SINT_T__ state0; + + if (amc_try_read(amc, dw, avar, val, &state0)) { + if (val[0] != xchg[0] || (dw && val[1] != xchg[1])) { + xchg[0] = val[0]; + if (dw) + xchg[1] = val[1]; + return 0; /* failed */ + } + /* Operation will succeed if not interrupted */ + } + + state0 = amc_set_excl(amc, state0); + + if (xchg[0] != avar[0] || (dw && xchg[1] != avar[1])) { + xchg[0] = avar[0]; + if (dw) + xchg[1] = avar[1]; + + ETHR_DBG_CHK_EXCL_STATE(amc, state0); + + amc_unset_excl(amc, state0); + return 0; /* failed */ + } + + avar[0] = new[0]; + if (dw) + avar[1] = new[1]; + + amc_inc_mc_unset_excl(amc, state0); + return 1; +} + + +#define ETHR_AMC_MODIFICATION_OPS__(AMC, OPS) \\ +do { \\ + ETHR_AMC_SINT_T__ state0__; \\ + state0__ = ETHR_AMC_ATMC_FUNC__(read)(&(AMC)->atomic[0]); \\ + state0__ = amc_set_excl((AMC), state0__); \\ + { OPS; } \\ + amc_inc_mc_unset_excl((AMC), state0__); \\ +} while (0) + +#endif /* amc fallback */ +"]. + diff --git a/erts/lib_src/win/ethr_event.c b/erts/lib_src/win/ethr_event.c index 68f093f49c..bc2f635c26 100644 --- a/erts/lib_src/win/ethr_event.c +++ b/erts/lib_src/win/ethr_event.c @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2009-2010. All Rights Reserved. + * Copyright Ericsson AB 2009-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -28,13 +28,10 @@ /* --- Windows implementation of thread events ------------------------------ */ -#pragma intrinsic(_InterlockedExchangeAdd) -#pragma intrinsic(_InterlockedCompareExchange) - int ethr_event_init(ethr_event *e) { - e->state = ETHR_EVENT_OFF__; + ethr_atomic32_init(&e->state, ETHR_EVENT_OFF__); e->handle = CreateEvent(NULL, FALSE, FALSE, NULL); if (e->handle == INVALID_HANDLE_VALUE) return ethr_win_get_errno__(); @@ -63,7 +60,6 @@ ethr_event_reset(ethr_event *e) static ETHR_INLINE int wait(ethr_event *e, int spincount) { - LONG state; DWORD code; int sc, res, until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; @@ -73,13 +69,9 @@ wait(ethr_event *e, int spincount) sc = spincount; while (1) { - long on; + ethr_sint32_t state; while (1) { -#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ - state = e->state; -#else - state = _InterlockedExchangeAdd(&e->state, (LONG) 0); -#endif + state = ethr_atomic32_read(&e->state); if (state == ETHR_EVENT_ON__) return 0; if (sc == 0) @@ -95,9 +87,9 @@ wait(ethr_event *e, int spincount) } if (state != ETHR_EVENT_OFF_WAITER__) { - state = _InterlockedCompareExchange(&e->state, - ETHR_EVENT_OFF_WAITER__, - ETHR_EVENT_OFF__); + state = ethr_atomic32_cmpxchg(&e->state, + ETHR_EVENT_OFF_WAITER__, + ETHR_EVENT_OFF__); if (state == ETHR_EVENT_ON__) return 0; ETHR_ASSERT(state == ETHR_EVENT_OFF__); diff --git a/erts/lib_src/win/ethread.c b/erts/lib_src/win/ethread.c index 789a360b11..3abda6de4c 100644 --- a/erts/lib_src/win/ethread.c +++ b/erts/lib_src/win/ethread.c @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2010. All Rights Reserved. + * Copyright Ericsson AB 2010-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -35,6 +35,7 @@ #include <winerror.h> #include <stdio.h> #include <limits.h> +#include <intrin.h> #define ETHR_INLINE_FUNC_NAME_(X) X ## __ #define ETHREAD_IMPL__ @@ -158,6 +159,25 @@ ethr_abort__(void) #endif } +#if defined(ETHR_X86_RUNTIME_CONF__) + +#pragma intrinsic(__cpuid) + +void +ethr_x86_cpuid__(int *eax, int *ebx, int *ecx, int *edx) +{ + int CPUInfo[4]; + + __cpuid(CPUInfo, *eax); + + *eax = CPUInfo[0]; + *ebx = CPUInfo[1]; + *ecx = CPUInfo[2]; + *edx = CPUInfo[3]; +} + +#endif /* ETHR_X86_RUNTIME_CONF__ */ + /* * ---------------------------------------------------------------------------- * Exported functions diff --git a/erts/test/ethread_SUITE.erl b/erts/test/ethread_SUITE.erl index 71d8c1c679..4206bebfe7 100644 --- a/erts/test/ethread_SUITE.erl +++ b/erts/test/ethread_SUITE.erl @@ -47,14 +47,29 @@ spinlock/1, rwspinlock/1, rwmutex/1, - atomic/1]). + atomic/1, + dw_atomic_massage/1]). -include_lib("test_server/include/test_server.hrl"). -tests() -> - [create_join_thread, equal_tids, mutex, try_lock_mutex, - cond_wait, broadcast, detached_thread, - max_threads, tsd, spinlock, rwspinlock, rwmutex, atomic]. +tests() -> + [create_join_thread, + equal_tids, + mutex, + try_lock_mutex, + cond_wait, + broadcast, + detached_thread, + max_threads, + tsd, + spinlock, + rwspinlock, + rwmutex, + atomic, + dw_atomic_massage]. + +all(doc) -> []; +all(suite) -> tests(). suite() -> [{ct_hooks,[ts_install_cth]}]. @@ -211,6 +226,13 @@ atomic(suite) -> atomic(Config) -> run_case(Config, "atomic", ""). +dw_atomic_massage(doc) -> + ["Massage double word atomics"]; +dw_atomic_massage(suite) -> + []; +dw_atomic_massage(Config) -> + run_case(Config, "dw_atomic_massage", ""). + %% %% %% Auxiliary functions diff --git a/erts/test/ethread_SUITE_data/ethread_tests.c b/erts/test/ethread_SUITE_data/ethread_tests.c index 0b59ff5aa6..7e7e133d6c 100644 --- a/erts/test/ethread_SUITE_data/ethread_tests.c +++ b/erts/test/ethread_SUITE_data/ethread_tests.c @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2004-2010. All Rights Reserved. + * Copyright Ericsson AB 2004-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -1310,6 +1310,9 @@ rwmutex_test(void) * Tests atomics. */ +#define AT_AINT32_MAX 0x7fffffff +#define AT_AINT32_MIN 0x80000000 + #define AT_THREADS 4 #define AT_ITER 10000 @@ -1320,12 +1323,428 @@ static ethr_atomic_t at_go; static ethr_atomic_t at_done; static ethr_atomic_t at_data; +#define AT_TEST_INIT(T, A, B) \ +do { \ + ethr_ ## A ## _init ## B(&A, 17); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 17); \ +} while (0) + +#define AT_TEST_SET(T, A, B) \ +do { \ + ethr_ ## A ## _set ## B(&A, 4711); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \ +} while (0) + +#define AT_TEST_XCHG(T, A, B) \ +do { \ + ethr_ ## A ## _set ## B(&A, 4711); \ + ASSERT(ethr_ ## A ## _xchg ## B(&A, 17) == 4711); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 17); \ +} while (0) + +#define AT_TEST_CMPXCHG(T, A, B) \ +do { \ + ethr_ ## A ## _set ## B(&A, 4711); \ + ASSERT(ethr_ ## A ## _cmpxchg ## B(&A, 17, 33) == 4711); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \ + ASSERT(ethr_ ## A ## _cmpxchg ## B(&A, 17, 4711) == 4711); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 17); \ +} while (0) + +#define AT_TEST_ADD_READ(T, A, B) \ +do { \ + T var_ = AT_AINT32_MAX; \ + var_ += 4711; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \ + ASSERT(ethr_ ## A ## _add_read ## B(&A, 4711) == var_); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + var_ = AT_AINT32_MIN; \ + var_ -= 4711; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \ + ASSERT(ethr_ ## A ## _add_read ## B(&A, -4711) == var_); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + ethr_ ## A ## _set ## B(&A, 4711); \ + ASSERT(ethr_ ## A ## _add_read ## B(&A, 10) == 4721); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 4721); \ +} while (0) + +#define AT_TEST_ADD(T, A, B) \ +do { \ + T var_ = AT_AINT32_MAX; \ + var_ += 4711; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \ + ethr_ ## A ## _add ## B(&A, 4711); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + var_ = AT_AINT32_MIN; \ + var_ -= 4711; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \ + ethr_ ## A ## _add ## B(&A, -4711); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + ethr_ ## A ## _set ## B(&A, 11); \ + ethr_ ## A ## _add ## B(&A, 4700); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \ +} while (0) + +#define AT_TEST_INC_READ(T, A, B) \ +do { \ + T var_ = AT_AINT32_MAX; \ + var_++; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \ + ASSERT(ethr_ ## A ## _inc_read ## B(&A) == var_); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + ethr_ ## A ## _set ## B(&A, 4710); \ + ASSERT(ethr_ ## A ## _inc_read ## B(&A) == 4711); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \ +} while (0) + +#define AT_TEST_DEC_READ(T, A, B) \ +do { \ + T var_ = AT_AINT32_MIN; \ + var_--; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \ + ASSERT(ethr_ ## A ## _dec_read ## B(&A) == var_); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + ethr_ ## A ## _set ## B(&A, 17); \ + ASSERT(ethr_ ## A ## _dec_read ## B(&A) == 16); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 16); \ +} while (0) + + +#define AT_TEST_INC(T, A, B) \ +do { \ + T var_ = AT_AINT32_MAX; \ + var_++; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \ + ethr_ ## A ## _inc ## B(&A); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + ethr_ ## A ## _set ## B(&A, 4710); \ + ethr_ ## A ## _inc ## B(&A); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \ +} while (0) + +#define AT_TEST_DEC(T, A, B) \ +do { \ + T var_ = AT_AINT32_MIN; \ + var_--; \ + ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \ + ethr_ ## A ## _dec ## B(&A); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \ + ethr_ ## A ## _set ## B(&A, 17); \ + ethr_ ## A ## _dec ## B(&A); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 16); \ +} while (0) + +#define AT_TEST_READ_BAND(T, A, B) \ +do { \ + ethr_ ## A ## _set ## B(&A, 0x13131313); \ + ASSERT(ethr_ ## A ## _read_band ## B(&A, 0x31313131) == 0x13131313); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 0x11111111); \ +} while (0) + +#define AT_TEST_READ_BOR(T, A, B) \ +do { \ + ethr_ ## A ## _set ## B(&A, 0x11111111); \ + ASSERT(ethr_ ## A ## _read_bor ## B(&A, 0x23232323) == 0x11111111); \ + ASSERT(ethr_ ## A ## _read ## B(&A) == 0x33333333); \ +} while (0) + + +static void +atomic_basic_test(void) +{ + /* + * Verify that each op does what it is expected + * to do for at least one input. + */ + ethr_atomic32_t atomic32; + ethr_atomic_t atomic; + + print_line("AT_AINT32_MAX=%d",AT_AINT32_MAX); + print_line("AT_AINT32_MIN=%d",AT_AINT32_MIN); + + AT_TEST_INIT(ethr_sint32_t, atomic32, ); + AT_TEST_SET(ethr_sint32_t, atomic32, ); + AT_TEST_XCHG(ethr_sint32_t, atomic32, ); + AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, ); + AT_TEST_ADD_READ(ethr_sint32_t, atomic32, ); + AT_TEST_ADD(ethr_sint32_t, atomic32, ); + AT_TEST_INC_READ(ethr_sint32_t, atomic32, ); + AT_TEST_DEC_READ(ethr_sint32_t, atomic32, ); + AT_TEST_INC(ethr_sint32_t, atomic32, ); + AT_TEST_DEC(ethr_sint32_t, atomic32, ); + AT_TEST_READ_BAND(ethr_sint32_t, atomic32, ); + AT_TEST_READ_BOR(ethr_sint32_t, atomic32, ); + + AT_TEST_INIT(ethr_sint32_t, atomic32, _acqb); + AT_TEST_SET(ethr_sint32_t, atomic32, _acqb); + AT_TEST_XCHG(ethr_sint32_t, atomic32, _acqb); + AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _acqb); + AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _acqb); + AT_TEST_ADD(ethr_sint32_t, atomic32, _acqb); + AT_TEST_INC_READ(ethr_sint32_t, atomic32, _acqb); + AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _acqb); + AT_TEST_INC(ethr_sint32_t, atomic32, _acqb); + AT_TEST_DEC(ethr_sint32_t, atomic32, _acqb); + AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _acqb); + AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _acqb); + + AT_TEST_INIT(ethr_sint32_t, atomic32, _relb); + AT_TEST_SET(ethr_sint32_t, atomic32, _relb); + AT_TEST_XCHG(ethr_sint32_t, atomic32, _relb); + AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _relb); + AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _relb); + AT_TEST_ADD(ethr_sint32_t, atomic32, _relb); + AT_TEST_INC_READ(ethr_sint32_t, atomic32, _relb); + AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _relb); + AT_TEST_INC(ethr_sint32_t, atomic32, _relb); + AT_TEST_DEC(ethr_sint32_t, atomic32, _relb); + AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _relb); + AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _relb); + + AT_TEST_INIT(ethr_sint32_t, atomic32, _rb); + AT_TEST_SET(ethr_sint32_t, atomic32, _rb); + AT_TEST_XCHG(ethr_sint32_t, atomic32, _rb); + AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _rb); + AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _rb); + AT_TEST_ADD(ethr_sint32_t, atomic32, _rb); + AT_TEST_INC_READ(ethr_sint32_t, atomic32, _rb); + AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _rb); + AT_TEST_INC(ethr_sint32_t, atomic32, _rb); + AT_TEST_DEC(ethr_sint32_t, atomic32, _rb); + AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _rb); + AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _rb); + + AT_TEST_INIT(ethr_sint32_t, atomic32, _wb); + AT_TEST_SET(ethr_sint32_t, atomic32, _wb); + AT_TEST_XCHG(ethr_sint32_t, atomic32, _wb); + AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _wb); + AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _wb); + AT_TEST_ADD(ethr_sint32_t, atomic32, _wb); + AT_TEST_INC_READ(ethr_sint32_t, atomic32, _wb); + AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _wb); + AT_TEST_INC(ethr_sint32_t, atomic32, _wb); + AT_TEST_DEC(ethr_sint32_t, atomic32, _wb); + AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _wb); + AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _wb); + + AT_TEST_INIT(ethr_sint32_t, atomic32, _mb); + AT_TEST_SET(ethr_sint32_t, atomic32, _mb); + AT_TEST_XCHG(ethr_sint32_t, atomic32, _mb); + AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _mb); + AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _mb); + AT_TEST_ADD(ethr_sint32_t, atomic32, _mb); + AT_TEST_INC_READ(ethr_sint32_t, atomic32, _mb); + AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _mb); + AT_TEST_INC(ethr_sint32_t, atomic32, _mb); + AT_TEST_DEC(ethr_sint32_t, atomic32, _mb); + AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _mb); + AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _mb); + + AT_TEST_INIT(ethr_sint_t, atomic, ); + AT_TEST_SET(ethr_sint_t, atomic, ); + AT_TEST_XCHG(ethr_sint_t, atomic, ); + AT_TEST_CMPXCHG(ethr_sint_t, atomic, ); + AT_TEST_ADD_READ(ethr_sint_t, atomic, ); + AT_TEST_ADD(ethr_sint_t, atomic, ); + AT_TEST_INC_READ(ethr_sint_t, atomic, ); + AT_TEST_DEC_READ(ethr_sint_t, atomic, ); + AT_TEST_INC(ethr_sint_t, atomic, ); + AT_TEST_DEC(ethr_sint_t, atomic, ); + AT_TEST_READ_BAND(ethr_sint_t, atomic, ); + AT_TEST_READ_BOR(ethr_sint_t, atomic, ); + + AT_TEST_INIT(ethr_sint_t, atomic, _acqb); + AT_TEST_SET(ethr_sint_t, atomic, _acqb); + AT_TEST_XCHG(ethr_sint_t, atomic, _acqb); + AT_TEST_CMPXCHG(ethr_sint_t, atomic, _acqb); + AT_TEST_ADD_READ(ethr_sint_t, atomic, _acqb); + AT_TEST_ADD(ethr_sint_t, atomic, _acqb); + AT_TEST_INC_READ(ethr_sint_t, atomic, _acqb); + AT_TEST_DEC_READ(ethr_sint_t, atomic, _acqb); + AT_TEST_INC(ethr_sint_t, atomic, _acqb); + AT_TEST_DEC(ethr_sint_t, atomic, _acqb); + AT_TEST_READ_BAND(ethr_sint_t, atomic, _acqb); + AT_TEST_READ_BOR(ethr_sint_t, atomic, _acqb); + + AT_TEST_INIT(ethr_sint_t, atomic, _relb); + AT_TEST_SET(ethr_sint_t, atomic, _relb); + AT_TEST_XCHG(ethr_sint_t, atomic, _relb); + AT_TEST_CMPXCHG(ethr_sint_t, atomic, _relb); + AT_TEST_ADD_READ(ethr_sint_t, atomic, _relb); + AT_TEST_ADD(ethr_sint_t, atomic, _relb); + AT_TEST_INC_READ(ethr_sint_t, atomic, _relb); + AT_TEST_DEC_READ(ethr_sint_t, atomic, _relb); + AT_TEST_INC(ethr_sint_t, atomic, _relb); + AT_TEST_DEC(ethr_sint_t, atomic, _relb); + AT_TEST_READ_BAND(ethr_sint_t, atomic, _relb); + AT_TEST_READ_BOR(ethr_sint_t, atomic, _relb); + + AT_TEST_INIT(ethr_sint_t, atomic, _rb); + AT_TEST_SET(ethr_sint_t, atomic, _rb); + AT_TEST_XCHG(ethr_sint_t, atomic, _rb); + AT_TEST_CMPXCHG(ethr_sint_t, atomic, _rb); + AT_TEST_ADD_READ(ethr_sint_t, atomic, _rb); + AT_TEST_ADD(ethr_sint_t, atomic, _rb); + AT_TEST_INC_READ(ethr_sint_t, atomic, _rb); + AT_TEST_DEC_READ(ethr_sint_t, atomic, _rb); + AT_TEST_INC(ethr_sint_t, atomic, _rb); + AT_TEST_DEC(ethr_sint_t, atomic, _rb); + AT_TEST_READ_BAND(ethr_sint_t, atomic, _rb); + AT_TEST_READ_BOR(ethr_sint_t, atomic, _rb); + + AT_TEST_INIT(ethr_sint_t, atomic, _wb); + AT_TEST_SET(ethr_sint_t, atomic, _wb); + AT_TEST_XCHG(ethr_sint_t, atomic, _wb); + AT_TEST_CMPXCHG(ethr_sint_t, atomic, _wb); + AT_TEST_ADD_READ(ethr_sint_t, atomic, _wb); + AT_TEST_ADD(ethr_sint_t, atomic, _wb); + AT_TEST_INC_READ(ethr_sint_t, atomic, _wb); + AT_TEST_DEC_READ(ethr_sint_t, atomic, _wb); + AT_TEST_INC(ethr_sint_t, atomic, _wb); + AT_TEST_DEC(ethr_sint_t, atomic, _wb); + AT_TEST_READ_BAND(ethr_sint_t, atomic, _wb); + AT_TEST_READ_BOR(ethr_sint_t, atomic, _wb); + + AT_TEST_INIT(ethr_sint_t, atomic, _mb); + AT_TEST_SET(ethr_sint_t, atomic, _mb); + AT_TEST_XCHG(ethr_sint_t, atomic, _mb); + AT_TEST_CMPXCHG(ethr_sint_t, atomic, _mb); + AT_TEST_ADD_READ(ethr_sint_t, atomic, _mb); + AT_TEST_ADD(ethr_sint_t, atomic, _mb); + AT_TEST_INC_READ(ethr_sint_t, atomic, _mb); + AT_TEST_DEC_READ(ethr_sint_t, atomic, _mb); + AT_TEST_INC(ethr_sint_t, atomic, _mb); + AT_TEST_DEC(ethr_sint_t, atomic, _mb); + AT_TEST_READ_BAND(ethr_sint_t, atomic, _mb); + AT_TEST_READ_BOR(ethr_sint_t, atomic, _mb); + + /* Double word */ + { + ethr_dw_atomic_t dw_atomic; + ethr_dw_sint_t dw0, dw1; + dw0.sint[0] = 4711; + dw0.sint[1] = 4712; + + /* init */ + ethr_dw_atomic_init(&dw_atomic, &dw0); + ethr_dw_atomic_read(&dw_atomic, &dw1); + ETHR_ASSERT(dw1.sint[0] == 4711); + ETHR_ASSERT(dw1.sint[1] == 4712); + + /* set */ + dw0.sint[0] = 42; + dw0.sint[1] = ~((ethr_sint_t) 0); + ethr_dw_atomic_set(&dw_atomic, &dw0); + ethr_dw_atomic_read(&dw_atomic, &dw1); + ASSERT(dw1.sint[0] == 42); + ASSERT(dw1.sint[1] == ~((ethr_sint_t) 0)); + + /* cmpxchg */ + dw0.sint[0] = 17; + dw0.sint[1] = 18; + dw1.sint[0] = 19; + dw1.sint[1] = 20; + ASSERT(!ethr_dw_atomic_cmpxchg(&dw_atomic, &dw1, &dw0)); + ethr_dw_atomic_read(&dw_atomic, &dw0); + ASSERT(dw0.sint[0] == 42); + ASSERT(dw0.sint[1] == ~((ethr_sint_t) 0)); + + ASSERT(ethr_dw_atomic_cmpxchg(&dw_atomic, &dw1, &dw0)); + + ethr_dw_atomic_read(&dw_atomic, &dw0); + ASSERT(dw0.sint[0] == 19); + ASSERT(dw0.sint[1] == 20); + } +} + + +#define AT_DW_MIN 12 +#define AT_DW_MAX 42 +#define AT_DW_THREADS (AT_DW_MAX - AT_DW_MIN + 1) + +#define AT_DW_LOOPS 200000 +#define AT_DW_R_LOOPS 10 + +ethr_dw_atomic_t at_dw_atomic; + +void +at_dw_valid(ethr_dw_sint_t *dw) +{ + int i; + char c; + char *cp; + + ASSERT(dw->sint[0] == dw->sint[1]); + + cp = (char *) &dw->sint[0]; + c = cp[0]; + + ASSERT(AT_DW_MIN <= c && c <= AT_DW_MAX); + + for (i = 0; i < sizeof(ethr_sint_t); i++) + ASSERT(c == cp[i]); +} + +void * +at_dw_thr(void *vval) +{ + int l, r; + ethr_sint_t val = (ethr_sint_t) vval; + ethr_dw_sint_t dw; + ethr_dw_sint_t my_dw; + + my_dw.sint[0] = val; + my_dw.sint[1] = val; + + ethr_dw_atomic_set(&at_dw_atomic, &my_dw); + for (l = 0; l < AT_DW_LOOPS; l++) { + for (r = 0; r < AT_DW_R_LOOPS; r++) { + ethr_dw_atomic_read(&at_dw_atomic, &dw); + at_dw_valid(&dw); + } + ethr_dw_atomic_set(&at_dw_atomic, &my_dw); + for (r = 0; r < AT_DW_R_LOOPS; r++) { + ethr_dw_atomic_read(&at_dw_atomic, &dw); + at_dw_valid(&dw); + } + dw.sint[0] = 0; + dw.sint[1] = 0; + while (1) { + if (ethr_dw_atomic_cmpxchg(&at_dw_atomic, &my_dw, &dw)) + break; + } + } +} + +static void +dw_atomic_massage_test(void) +{ + int i, res; + ethr_tid tid[AT_DW_THREADS]; + ethr_thr_opts thr_opts = ETHR_THR_OPTS_DEFAULT_INITER; + ethr_dw_sint_t dw; + + dw.sint[0] = dw.sint[1] = 0; + + ethr_dw_atomic_init(&at_dw_atomic, &dw); + + for (i = AT_DW_MIN; i <= AT_DW_MAX; i++) { + ethr_sint_t val; + memset(&val, i, sizeof(ethr_sint_t)); + res = ethr_thr_create(&tid[i-AT_DW_MIN], at_dw_thr, (void *) val, &thr_opts); + ASSERT(res == 0); + } + for (i = AT_DW_MIN; i <= AT_DW_MAX; i++) { + res = ethr_thr_join(tid[i-AT_DW_MIN], NULL); + ASSERT(res == 0); + } +} + void * at_thread(void *unused) { int i; long val, go; - val = ethr_atomic_inc_read(&at_ready); ASSERT(val > 0); @@ -1373,7 +1792,6 @@ at_thread(void *unused) return NULL; } - static void atomic_test(void) { @@ -1382,6 +1800,8 @@ atomic_test(void) ethr_tid tid[AT_THREADS]; ethr_thr_opts thr_opts = ETHR_THR_OPTS_DEFAULT_INITER; + atomic_basic_test(); + #if ETHR_SIZEOF_PTR > 4 at_rm_val = ((long) 1) << 57; at_set_val = ((long) 1) << 60; @@ -1493,6 +1913,8 @@ main(int argc, char *argv[]) rwmutex_test(); else if (strcmp(testcase, "atomic") == 0) atomic_test(); + else if (strcmp(testcase, "dw_atomic_massage") == 0) + dw_atomic_massage_test(); else skip("Test case \"%s\" not implemented yet", testcase); |