aboutsummaryrefslogtreecommitdiffstats
path: root/erts
diff options
context:
space:
mode:
authorRickard Green <[email protected]>2011-07-08 13:56:10 +0200
committerRickard Green <[email protected]>2011-07-08 13:56:10 +0200
commitc6bc815813f57fb7dfffe704c31a4124a0fe755e (patch)
tree6d142c893ee4e9f800b094bba26537147cf5b2de /erts
parent5c62fdc1914000f3da921c82a82b6dc30783db53 (diff)
parent0204e80cba378dfc1140a7f98d96705d470bddde (diff)
downloadotp-c6bc815813f57fb7dfffe704c31a4124a0fe755e.tar.gz
otp-c6bc815813f57fb7dfffe704c31a4124a0fe755e.tar.bz2
otp-c6bc815813f57fb7dfffe704c31a4124a0fe755e.zip
Merge branch 'rickard/atomics-api/OTP-9014' into major
* rickard/atomics-api/OTP-9014: Use new atomic API in runtime system Improve ethread atomics
Diffstat (limited to 'erts')
-rw-r--r--erts/aclocal.m4434
-rw-r--r--erts/emulator/beam/atom.c8
-rw-r--r--erts/emulator/beam/beam_bp.c12
-rw-r--r--erts/emulator/beam/beam_bp.h6
-rw-r--r--erts/emulator/beam/bif.c18
-rw-r--r--erts/emulator/beam/break.c7
-rw-r--r--erts/emulator/beam/dist.c16
-rw-r--r--erts/emulator/beam/dist.h4
-rw-r--r--erts/emulator/beam/erl_bif_ddll.c8
-rw-r--r--erts/emulator/beam/erl_bif_info.c92
-rw-r--r--erts/emulator/beam/erl_bits.c10
-rw-r--r--erts/emulator/beam/erl_cpu_topology.c4
-rw-r--r--erts/emulator/beam/erl_db.c56
-rw-r--r--erts/emulator/beam/erl_db.h8
-rw-r--r--erts/emulator/beam/erl_db_hash.c101
-rw-r--r--erts/emulator/beam/erl_db_tree.c24
-rw-r--r--erts/emulator/beam/erl_db_util.c9
-rw-r--r--erts/emulator/beam/erl_init.c16
-rw-r--r--erts/emulator/beam/erl_monitors.c12
-rw-r--r--erts/emulator/beam/erl_node_tables.c6
-rw-r--r--erts/emulator/beam/erl_port_task.c49
-rw-r--r--erts/emulator/beam/erl_port_task.h8
-rw-r--r--erts/emulator/beam/erl_process.c257
-rw-r--r--erts/emulator/beam/erl_process.h4
-rw-r--r--erts/emulator/beam/erl_process_dump.c2
-rw-r--r--erts/emulator/beam/erl_process_lock.c15
-rw-r--r--erts/emulator/beam/erl_process_lock.h22
-rw-r--r--erts/emulator/beam/erl_smp.h973
-rw-r--r--erts/emulator/beam/erl_threads.h927
-rw-r--r--erts/emulator/beam/erl_time.h4
-rw-r--r--erts/emulator/beam/global.h25
-rw-r--r--erts/emulator/beam/io.c42
-rw-r--r--erts/emulator/beam/safe_hash.c10
-rw-r--r--erts/emulator/beam/sys.h47
-rw-r--r--erts/emulator/beam/time.c4
-rw-r--r--erts/emulator/beam/utils.c24
-rw-r--r--erts/emulator/hipe/hipe_native_bif.c4
-rw-r--r--erts/emulator/sys/common/erl_check_io.c28
-rw-r--r--erts/emulator/sys/common/erl_poll.c94
-rw-r--r--erts/emulator/sys/unix/sys.c38
-rw-r--r--erts/emulator/sys/win32/erl_poll.c62
-rw-r--r--erts/emulator/sys/win32/sys.c49
-rw-r--r--erts/emulator/sys/win32/sys_interrupt.c4
-rw-r--r--erts/include/internal/ethr_atomics.h8932
-rw-r--r--erts/include/internal/ethr_internal.h6
-rw-r--r--erts/include/internal/ethr_mutex.h30
-rw-r--r--erts/include/internal/ethr_optimized_fallbacks.h175
-rw-r--r--erts/include/internal/ethread.h162
-rw-r--r--erts/include/internal/ethread_header_config.h.in118
-rw-r--r--erts/include/internal/gcc/ethr_atomic.h251
-rw-r--r--erts/include/internal/gcc/ethr_dw_atomic.h115
-rw-r--r--erts/include/internal/gcc/ethr_membar.h73
-rw-r--r--erts/include/internal/gcc/ethread.h20
-rw-r--r--erts/include/internal/i386/atomic.h334
-rw-r--r--erts/include/internal/i386/ethr_dw_atomic.h278
-rw-r--r--erts/include/internal/i386/ethr_membar.h114
-rw-r--r--erts/include/internal/i386/ethread.h8
-rw-r--r--erts/include/internal/i386/rwlock.h5
-rw-r--r--erts/include/internal/i386/spinlock.h27
-rw-r--r--erts/include/internal/libatomic_ops/ethr_atomic.h298
-rw-r--r--erts/include/internal/libatomic_ops/ethr_membar.h75
-rw-r--r--erts/include/internal/libatomic_ops/ethread.h14
-rw-r--r--erts/include/internal/ppc32/atomic.h148
-rw-r--r--erts/include/internal/ppc32/ethr_membar.h63
-rw-r--r--erts/include/internal/ppc32/ethread.h5
-rw-r--r--erts/include/internal/ppc32/rwlock.h15
-rw-r--r--erts/include/internal/ppc32/spinlock.h10
-rw-r--r--erts/include/internal/pthread/ethr_event.h8
-rw-r--r--erts/include/internal/sparc32/atomic.h194
-rw-r--r--erts/include/internal/sparc32/ethr_membar.h115
-rw-r--r--erts/include/internal/sparc32/ethread.h7
-rw-r--r--erts/include/internal/sparc32/rwlock.h17
-rw-r--r--erts/include/internal/sparc32/spinlock.h11
-rw-r--r--erts/include/internal/tile/atomic.h136
-rw-r--r--erts/include/internal/tile/ethr_membar.h35
-rw-r--r--erts/include/internal/tile/ethread.h5
-rw-r--r--erts/include/internal/win/ethr_atomic.h595
-rw-r--r--erts/include/internal/win/ethr_dw_atomic.h154
-rw-r--r--erts/include/internal/win/ethr_event.h16
-rw-r--r--erts/include/internal/win/ethr_membar.h145
-rw-r--r--erts/include/internal/win/ethread.h4
-rw-r--r--erts/lib_src/Makefile.in14
-rw-r--r--erts/lib_src/common/ethr_atomics.c4259
-rw-r--r--erts/lib_src/common/ethr_aux.c83
-rw-r--r--erts/lib_src/common/ethr_mutex.c5
-rw-r--r--erts/lib_src/pthread/ethr_x86_sse2_asm.c31
-rw-r--r--erts/lib_src/pthread/ethread.c102
-rwxr-xr-xerts/lib_src/utils/make_atomics_api2186
-rw-r--r--erts/lib_src/win/ethr_event.c22
-rw-r--r--erts/lib_src/win/ethread.c22
-rw-r--r--erts/test/ethread_SUITE.erl32
-rw-r--r--erts/test/ethread_SUITE_data/ethread_tests.c428
92 files changed, 19909 insertions, 3536 deletions
diff --git a/erts/aclocal.m4 b/erts/aclocal.m4
index b64380e817..3ebea66d30 100644
--- a/erts/aclocal.m4
+++ b/erts/aclocal.m4
@@ -679,6 +679,55 @@ AC_SUBST(ERTS_INTERNAL_X_LIBS)
])
+AC_DEFUN(ETHR_CHK_SYNC_OP,
+[
+ AC_MSG_CHECKING([for $3-bit $1()])
+ case "$2" in
+ "1") sync_call="$1(&var);";;
+ "2") sync_call="$1(&var, ($4) 0);";;
+ "3") sync_call="$1(&var, ($4) 0, ($4) 0);";;
+ esac
+ have_sync_op=no
+ AC_TRY_LINK([],
+ [
+ $4 res;
+ volatile $4 var;
+ res = $sync_call
+ ],
+ [have_sync_op=yes])
+ test $have_sync_op = yes && $5
+ AC_MSG_RESULT([$have_sync_op])
+])
+
+AC_DEFUN(ETHR_CHK_INTERLOCKED,
+[
+ ilckd="$1"
+ AC_MSG_CHECKING([for ${ilckd}()])
+ case "$2" in
+ "1") ilckd_call="${ilckd}(var);";;
+ "2") ilckd_call="${ilckd}(var, ($3) 0);";;
+ "3") ilckd_call="${ilckd}(var, ($3) 0, ($3) 0);";;
+ "4") ilckd_call="${ilckd}(var, ($3) 0, ($3) 0, arr);";;
+ esac
+ have_interlocked_op=no
+ AC_TRY_LINK(
+ [
+ #define WIN32_LEAN_AND_MEAN
+ #include <windows.h>
+ #include <intrin.h>
+ ],
+ [
+ volatile $3 *var;
+ volatile $3 arr[2];
+
+ $ilckd_call
+ return 0;
+ ],
+ [have_interlocked_op=yes])
+ test $have_interlocked_op = yes && $4
+ AC_MSG_RESULT([$have_interlocked_op])
+])
+
dnl ----------------------------------------------------------------------
dnl
dnl ERL_FIND_ETHR_LIB
@@ -750,121 +799,41 @@ case "$THR_LIB_NAME" in
AC_DEFINE(ETHR_WIN32_THREADS, 1, [Define if you have win32 threads])
- have_ilckd=no
- AC_MSG_CHECKING([for _InterlockedCompareExchange64()])
- AC_TRY_LINK([
- #define WIN32_LEAN_AND_MEAN
- #include <windows.h>
- ],
- [
- volatile __int64 *var;
- _InterlockedCompareExchange64(var, (__int64) 1, (__int64) 0);
- return 0;
- ],
- have_ilckd=yes)
- AC_MSG_RESULT([$have_ilckd])
- test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64, 1, [Define if you have _InterlockedCompareExchange64()])
-
- AC_CHECK_SIZEOF(void *)
- case "$ac_cv_sizeof_void_p-$have_ilckd" in
- 8-no)
- ethr_have_native_atomics=no
- ethr_have_native_spinlock=no;;
- *)
- ethr_have_native_atomics=yes
- ethr_have_native_spinlock=yes;;
- esac
-
- have_ilckd=no
- AC_MSG_CHECKING([for _InterlockedDecrement64()])
- AC_TRY_LINK([
- #define WIN32_LEAN_AND_MEAN
- #include <windows.h>
- ],
- [
- volatile __int64 *var;
- _InterlockedDecrement64(var);
- return 0;
- ],
- have_ilckd=yes)
- AC_MSG_RESULT([$have_ilckd])
- test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDDECREMENT64, 1, [Define if you have _InterlockedDecrement64()])
-
- have_ilckd=no
- AC_MSG_CHECKING([for _InterlockedIncrement64()])
- AC_TRY_LINK([
- #define WIN32_LEAN_AND_MEAN
- #include <windows.h>
- ],
- [
- volatile __int64 *var;
- _InterlockedIncrement64(var);
- return 0;
- ],
- have_ilckd=yes)
- AC_MSG_RESULT([$have_ilckd])
- test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDINCREMENT64, 1, [Define if you have _InterlockedIncrement64()])
-
- have_ilckd=no
- AC_MSG_CHECKING([for _InterlockedExchangeAdd64()])
- AC_TRY_LINK([
- #define WIN32_LEAN_AND_MEAN
- #include <windows.h>
- ],
- [
- volatile __int64 *var;
- _InterlockedExchangeAdd64(var, (__int64) 1);
- return 0;
- ],
- have_ilckd=yes)
- AC_MSG_RESULT([$have_ilckd])
- test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDEXCHANGEADD64, 1, [Define if you have _InterlockedExchangeAdd64()])
-
- have_ilckd=no
- AC_MSG_CHECKING([for _InterlockedExchange64()])
- AC_TRY_LINK([
- #define WIN32_LEAN_AND_MEAN
- #include <windows.h>
- ],
- [
- volatile __int64 *var;
- _InterlockedExchange64(var, (__int64) 1);
- return 0;
- ],
- have_ilckd=yes)
- AC_MSG_RESULT([$have_ilckd])
- test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDEXCHANGE64, 1, [Define if you have _InterlockedExchange64()])
-
- have_ilckd=no
- AC_MSG_CHECKING([for _InterlockedAnd64()])
- AC_TRY_LINK([
- #define WIN32_LEAN_AND_MEAN
- #include <windows.h>
- ],
- [
- volatile __int64 *var;
- _InterlockedAnd64(var, (__int64) 1);
- return 0;
- ],
- have_ilckd=yes)
- AC_MSG_RESULT([$have_ilckd])
- test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDAND64, 1, [Define if you have _InterlockedAnd64()])
-
- have_ilckd=no
- AC_MSG_CHECKING([for _InterlockedOr64()])
- AC_TRY_LINK([
- #define WIN32_LEAN_AND_MEAN
- #include <windows.h>
- ],
- [
- volatile __int64 *var;
- _InterlockedOr64(var, (__int64) 1);
- return 0;
- ],
- have_ilckd=yes)
- AC_MSG_RESULT([$have_ilckd])
- test $have_ilckd = yes && AC_DEFINE(ETHR_HAVE__INTERLOCKEDOR64, 1, [Define if you have _InterlockedOr64()])
-
+ ETHR_CHK_INTERLOCKED([_InterlockedDecrement], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT, 1, [Define if you have _InterlockedDecrement()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedDecrement_rel], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT_REL, 1, [Define if you have _InterlockedDecrement_rel()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedIncrement], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT, 1, [Define if you have _InterlockedIncrement()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedIncrement_acq], [1], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ, 1, [Define if you have _InterlockedIncrement_acq()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD, 1, [Define if you have _InterlockedExchangeAdd()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd_acq], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ, 1, [Define if you have _InterlockedExchangeAdd_acq()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedAnd], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDAND, 1, [Define if you have _InterlockedAnd()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedOr], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDOR, 1, [Define if you have _InterlockedOr()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedExchange], [2], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGE, 1, [Define if you have _InterlockedExchange()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange], [3], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE, 1, [Define if you have _InterlockedCompareExchange()]))
+ test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes
+ ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange_acq], [3], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ, 1, [Define if you have _InterlockedCompareExchange_acq()]))
+ test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes
+ ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange_rel], [3], [long], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL, 1, [Define if you have _InterlockedCompareExchange_rel()]))
+ test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes
+
+ ETHR_CHK_INTERLOCKED([_InterlockedDecrement64], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT64, 1, [Define if you have _InterlockedDecrement64()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedDecrement64_rel], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDDECREMENT64_REL, 1, [Define if you have _InterlockedDecrement64_rel()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedIncrement64], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT64, 1, [Define if you have _InterlockedIncrement64()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedIncrement64_acq], [1], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ, 1, [Define if you have _InterlockedIncrement64_acq()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD64, 1, [Define if you have _InterlockedExchangeAdd64()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedExchangeAdd64_acq], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ, 1, [Define if you have _InterlockedExchangeAdd64_acq()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedAnd64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDAND64, 1, [Define if you have _InterlockedAnd64()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedOr64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDOR64, 1, [Define if you have _InterlockedOr64()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedExchange64], [2], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDEXCHANGE64, 1, [Define if you have _InterlockedExchange64()]))
+ ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange64], [3], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64, 1, [Define if you have _InterlockedCompareExchange64()]))
+ test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes
+ ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange64_acq], [3], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ, 1, [Define if you have _InterlockedCompareExchange64_acq()]))
+ test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes
+ ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange64_rel], [3], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL, 1, [Define if you have _InterlockedCompareExchange64_rel()]))
+ test "$have_interlocked_op" = "yes" && ethr_have_native_atomics=yes
+
+ ETHR_CHK_INTERLOCKED([_InterlockedCompareExchange128], [4], [__int64], AC_DEFINE_UNQUOTED(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE128, 1, [Define if you have _InterlockedCompareExchange128()]))
+
+ test "$ethr_have_native_atomics" = "yes" && ethr_have_native_spinlock=yes
;;
pthread)
@@ -1100,35 +1069,51 @@ case "$THR_LIB_NAME" in
AC_MSG_RESULT([$linux_futex])
test $linux_futex = yes && AC_DEFINE(ETHR_HAVE_LINUX_FUTEX, 1, [Define if you have a linux futex implementation.])
- AC_MSG_CHECKING([for GCC atomic operations])
- ethr_have_gcc_atomic_ops=no
- AC_TRY_LINK([],
- [
- long res;
- volatile long val;
- res = __sync_val_compare_and_swap(&val, (long) 1, (long) 0);
- res = __sync_add_and_fetch(&val, (long) 1);
- res = __sync_sub_and_fetch(&val, (long) 1);
- res = __sync_fetch_and_and(&val, (long) 1);
- res = __sync_fetch_and_or(&val, (long) 1);
- ],
- [ethr_have_native_atomics=yes
- ethr_have_gcc_atomic_ops=yes])
- AC_MSG_RESULT([$ethr_have_gcc_atomic_ops])
- test $ethr_have_gcc_atomic_ops = yes && AC_DEFINE(ETHR_HAVE_GCC_ATOMIC_OPS, 1, [Define if you have gcc atomic operations])
+ AC_CHECK_SIZEOF(int)
+ AC_CHECK_SIZEOF(long)
+ AC_CHECK_SIZEOF(long long)
+ AC_CHECK_SIZEOF(__int128_t)
+
+ if test "$ac_cv_sizeof_int" = "4"; then
+ int32="int"
+ elif test "$ac_cv_sizeof_long" = "4"; then
+ int32="long"
+ elif test "$ac_cv_sizeof_long_long" = "4"; then
+ int32="long long"
+ else
+ AC_MSG_ERROR([No 32-bit type found])
+ fi
- case "$host_cpu" in
- sun4u | sparc64 | sun4v)
- ethr_have_native_atomics=yes;;
- i86pc | i*86 | x86_64 | amd64)
- ethr_have_native_atomics=yes;;
- macppc | ppc | "Power Macintosh")
- ethr_have_native_atomics=yes;;
- tile)
- ethr_have_native_atomics=yes;;
- *)
- ;;
- esac
+ if test "$ac_cv_sizeof_int" = "8"; then
+ int64="int"
+ elif test "$ac_cv_sizeof_long" = "8"; then
+ int64="long"
+ elif test "$ac_cv_sizeof_long_long" = "8"; then
+ int64="long long"
+ else
+ AC_MSG_ERROR([No 64-bit type found])
+ fi
+
+ int128=no
+ if test "$ac_cv_sizeof___int128_t" = "16"; then
+ int128="__int128_t"
+ fi
+
+ ETHR_CHK_SYNC_OP([__sync_val_compare_and_swap], [3], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32, 1, [Define if you have __sync_val_compare_and_swap() for 32-bit integers]))
+ test "$have_sync_op" = "yes" && ethr_have_native_atomics=yes
+ ETHR_CHK_SYNC_OP([__sync_add_and_fetch], [2], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_ADD_AND_FETCH32, 1, [Define if you have __sync_add_and_fetch() for 32-bit integers]))
+ ETHR_CHK_SYNC_OP([__sync_fetch_and_and], [2], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_AND32, 1, [Define if you have __sync_fetch_and_and() for 32-bit integers]))
+ ETHR_CHK_SYNC_OP([__sync_fetch_and_or], [2], [32], [$int32], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_OR32, 1, [Define if you have __sync_fetch_and_or() for 32-bit integers]))
+
+ ETHR_CHK_SYNC_OP([__sync_val_compare_and_swap], [3], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64, 1, [Define if you have __sync_val_compare_and_swap() for 64-bit integers]))
+ test "$have_sync_op" = "yes" && ethr_have_native_atomics=yes
+ ETHR_CHK_SYNC_OP([__sync_add_and_fetch], [2], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_ADD_AND_FETCH64, 1, [Define if you have __sync_add_and_fetch() for 64-bit integers]))
+ ETHR_CHK_SYNC_OP([__sync_fetch_and_and], [2], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_AND64, 1, [Define if you have __sync_fetch_and_and() for 64-bit integers]))
+ ETHR_CHK_SYNC_OP([__sync_fetch_and_or], [2], [64], [$int64], AC_DEFINE(ETHR_HAVE___SYNC_FETCH_AND_OR64, 1, [Define if you have __sync_fetch_and_or() for 64-bit integers]))
+
+ if test $int128 != no; then
+ ETHR_CHK_SYNC_OP([__sync_val_compare_and_swap], [3], [128], [$int128], AC_DEFINE(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128, 1, [Define if you have __sync_val_compare_and_swap() for 128-bit integers]))
+ fi
AC_MSG_CHECKING([for a usable libatomic_ops implementation])
case "x$with_libatomic_ops" in
@@ -1175,6 +1160,34 @@ case "$THR_LIB_NAME" in
AC_MSG_ERROR([No usable libatomic_ops implementation found])
fi
+ case "$host_cpu" in
+ sparc | sun4u | sparc64 | sun4v)
+ case "$with_sparc_memory_order" in
+ "TSO")
+ AC_DEFINE(ETHR_SPARC_TSO, 1, [Define if only run in Sparc TSO mode]);;
+ "PSO")
+ AC_DEFINE(ETHR_SPARC_PSO, 1, [Define if only run in Sparc PSO, or TSO mode]);;
+ "RMO"|"")
+ AC_DEFINE(ETHR_SPARC_RMO, 1, [Define if run in Sparc RMO, PSO, or TSO mode]);;
+ *)
+ AC_MSG_ERROR([Unsupported Sparc memory order: $with_sparc_memory_order]);;
+ esac
+ ethr_have_native_atomics=yes;;
+ i86pc | i*86 | x86_64 | amd64)
+ if test "$enable_x86_out_of_order" = "yes"; then
+ AC_DEFINE(ETHR_X86_OUT_OF_ORDER, 1, [Define if x86/x86_64 out of order instructions should be synchronized])
+ fi
+ ethr_have_native_atomics=yes;;
+ macppc | ppc | "Power Macintosh")
+ ethr_have_native_atomics=yes;;
+ tile)
+ ethr_have_native_atomics=yes;;
+ *)
+ ;;
+ esac
+
+ test ethr_have_native_atomics = "yes" && ethr_have_native_spinlock=yes
+
dnl Restore LIBS
LIBS=$saved_libs
dnl restore CPPFLAGS
@@ -1210,6 +1223,8 @@ AC_CHECK_SIZEOF(long long)
AC_DEFINE_UNQUOTED(ETHR_SIZEOF_LONG_LONG, $ac_cv_sizeof_long_long, [Define to the size of long long])
AC_CHECK_SIZEOF(__int64)
AC_DEFINE_UNQUOTED(ETHR_SIZEOF___INT64, $ac_cv_sizeof___int64, [Define to the size of __int64])
+AC_CHECK_SIZEOF(__int128_t)
+AC_DEFINE_UNQUOTED(ETHR_SIZEOF___INT128_T, $ac_cv_sizeof___int128_t, [Define to the size of __int128_t])
case X$erl_xcomp_bigendian in
@@ -1232,6 +1247,10 @@ AC_ARG_ENABLE(native-ethr-impls,
*) disable_native_ethr_impls=no ;;
esac ], disable_native_ethr_impls=no)
+AC_ARG_ENABLE(x86-out-of-order,
+ AS_HELP_STRING([--enable-x86-out-of-order],
+ [enable x86/x84_64 out of order support (default disabled)]))
+
test "X$disable_native_ethr_impls" = "Xyes" &&
AC_DEFINE(ETHR_DISABLE_NATIVE_IMPLS, 1, [Define if you want to disable native ethread implementations])
@@ -1250,55 +1269,101 @@ AC_ARG_WITH(libatomic_ops,
AS_HELP_STRING([--with-libatomic_ops=PATH],
[specify and prefer usage of libatomic_ops in the ethread library]))
-AC_ARG_ENABLE(ethread-pre-pentium4-compatibility,
- AS_HELP_STRING([--enable-ethread-pre-pentium4-compatibility],
- [enable compatibility with x86 processors before pentium 4 (back to 486) in the ethread library]),
-[
- case "$enable_ethread_pre_pentium4_compatibility" in
- yes|no) ;;
- *) enable_ethread_pre_pentium4_compatibility=check;;
- esac
-],
-[enable_ethread_pre_pentium4_compatibility=check])
-
-test "$cross_compiling" != "yes" || enable_ethread_pre_pentium4_compatibility=no
-
-case "$enable_ethread_pre_pentium4_compatibility-$host_cpu" in
- check-i86pc | check-i*86)
- AC_MSG_CHECKING([whether pre pentium 4 compatibility should forced])
- AC_RUN_IFELSE([
-#if defined(__GNUC__)
-# if defined(ETHR_PREFER_LIBATOMIC_OPS_NATIVE_IMPLS)
-# define CHECK_LIBATOMIC_OPS__
-# else
-# define CHECK_GCC_ASM__
-# endif
-#elif defined(ETHR_HAVE_LIBATOMIC_OPS)
-# define CHECK_LIBATOMIC_OPS__
+AC_ARG_WITH(with_sparc_memory_order,
+ AS_HELP_STRING([--with-sparc-memory-order=TSO|PSO|RMO],
+ [specify sparc memory order (defaults to RMO)]))
+
+ETHR_X86_SSE2_ASM=no
+case "$GCC-$ac_cv_sizeof_void_p-$host_cpu" in
+ yes-4-i86pc | yes-4-i*86 | yes-4-x86_64 | yes-4-amd64)
+ AC_MSG_CHECKING([for gcc sse2 asm support])
+ save_CFLAGS="$CFLAGS"
+ CFLAGS="$CFLAGS -msse2"
+ gcc_sse2_asm=no
+ AC_TRY_COMPILE([],
+ [
+ long long x, *y;
+ __asm__ __volatile__("movq %1, %0\n\t" : "=x"(x) : "m"(*y) : "memory");
+ ],
+ [gcc_sse2_asm=yes])
+ CFLAGS="$save_CFLAGS"
+ AC_MSG_RESULT([$gcc_sse2_asm])
+ if test "$gcc_sse2_asm" = "yes"; then
+ AC_DEFINE(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT, 1, [Define if you use a gcc that supports -msse2 and understand sse2 specific asm statements])
+ ETHR_X86_SSE2_ASM=yes
+ fi
+ ;;
+ *)
+ ;;
+esac
+
+case "$GCC-$host_cpu" in
+ yes-i86pc | yes-i*86 | yes-x86_64 | yes-amd64)
+ gcc_dw_cmpxchg_asm=no
+ AC_MSG_CHECKING([for gcc double word cmpxchg asm support])
+ AC_TRY_COMPILE([],
+ [
+ char xchgd;
+ long new[2], xchg[2], *p;
+ __asm__ __volatile__(
+#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__
+ "pushl %%ebx\n\t"
+ "movl %8, %%ebx\n\t"
#endif
-#if defined(CHECK_LIBATOMIC_OPS__)
-#include "atomic_ops.h"
+#if ETHR_SIZEOF_PTR == 4
+ "lock; cmpxchg8b %0\n\t"
+#else
+ "lock; cmpxchg16b %0\n\t"
#endif
-int main(void)
-{
-#if defined(CHECK_GCC_ASM__)
- __asm__ __volatile__("mfence" : : : "memory");
-#elif defined(CHECK_LIBATOMIC_OPS__)
- AO_nop_full();
+ "setz %3\n\t"
+#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__
+ "popl %%ebx\n\t"
#endif
- return 0;
-}
+ : "=m"(*p), "=d"(xchg[1]), "=a"(xchg[0]), "=c"(xchgd)
+ : "m"(*p), "1"(xchg[1]), "2"(xchg[0]), "3"(new[1]),
+#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__
+ "r"(new[0])
+#else
+ "b"(new[0])
+#endif
+ : "cc", "memory");
+
],
- [enable_ethread_pre_pentium4_compatibility=no],
- [enable_ethread_pre_pentium4_compatibility=yes],
- [enable_ethread_pre_pentium4_compatibility=no])
- AC_MSG_RESULT([$enable_ethread_pre_pentium4_compatibility]);;
+ [gcc_dw_cmpxchg_asm=yes])
+ if test $gcc_dw_cmpxchg_asm = no && test $ac_cv_sizeof_void_p = 4; then
+ AC_TRY_COMPILE([],
+ [
+ char xchgd;
+ long new[2], xchg[2], *p;
+#if !defined(__PIC__) || !__PIC__
+# error nope
+#endif
+ __asm__ __volatile__(
+ "pushl %%ebx\n\t"
+ "movl (%7), %%ebx\n\t"
+ "movl 4(%7), %%ecx\n\t"
+ "lock; cmpxchg8b %0\n\t"
+ "setz %3\n\t"
+ "popl %%ebx\n\t"
+ : "=m"(*p), "=d"(xchg[1]), "=a"(xchg[0]), "=c"(xchgd)
+ : "m"(*p), "1"(xchg[1]), "2"(xchg[0]), "3"(new)
+ : "cc", "memory");
+
+ ],
+ [gcc_dw_cmpxchg_asm=yes])
+ if test "$gcc_dw_cmpxchg_asm" = "yes"; then
+ AC_DEFINE(ETHR_CMPXCHG8B_REGISTER_SHORTAGE, 1, [Define if you get a register shortage with cmpxchg8b and position independent code])
+ fi
+ fi
+ AC_MSG_RESULT([$gcc_dw_cmpxchg_asm])
+ if test "$gcc_dw_cmpxchg_asm" = "yes"; then
+ AC_DEFINE(ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT, 1, [Define if you use a gcc that supports the double word cmpxchg instruction])
+ fi;;
*)
;;
esac
-test $enable_ethread_pre_pentium4_compatibility = yes &&
- AC_DEFINE(ETHR_PRE_PENTIUM4_COMPAT, 1, [Define if you want compatibilty with x86 processors before pentium4.])
+
AC_DEFINE(ETHR_HAVE_ETHREAD_DEFINES, 1, \
[Define if you have all ethread defines])
@@ -1309,6 +1374,7 @@ AC_SUBST(ETHR_LIB_NAME)
AC_SUBST(ETHR_DEFS)
AC_SUBST(ETHR_THR_LIB_BASE)
AC_SUBST(ETHR_THR_LIB_BASE_DIR)
+AC_SUBST(ETHR_X86_SSE2_ASM)
])
diff --git a/erts/emulator/beam/atom.c b/erts/emulator/beam/atom.c
index b97705ed96..d7c7f117cf 100644
--- a/erts/emulator/beam/atom.c
+++ b/erts/emulator/beam/atom.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 1996-2010. All Rights Reserved.
+ * Copyright Ericsson AB 1996-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -75,7 +75,7 @@ void atom_info(int to, void *to_arg)
index_info(to, to_arg, &erts_atom_table);
#ifdef ERTS_ATOM_PUT_OPS_STAT
erts_print(to, to_arg, "atom_put_ops: %ld\n",
- erts_smp_atomic_read(&atom_put_ops));
+ erts_smp_atomic_read_nob(&atom_put_ops));
#endif
if (lock)
@@ -213,7 +213,7 @@ am_atom_put(const char* name, int len)
len = MAX_ATOM_LENGTH;
}
#ifdef ERTS_ATOM_PUT_OPS_STAT
- erts_smp_atomic_inc(&atom_put_ops);
+ erts_smp_atomic_inc_nob(&atom_put_ops);
#endif
a.len = len;
a.name = (byte*)name;
@@ -309,7 +309,7 @@ init_atom_table(void)
rwmtx_opt.lived = ERTS_SMP_RWMTX_LONG_LIVED;
#ifdef ERTS_ATOM_PUT_OPS_STAT
- erts_smp_atomic_init(&atom_put_ops, 0);
+ erts_smp_atomic_init_nob(&atom_put_ops, 0);
#endif
erts_smp_rwmtx_init_opt(&atom_table_lock, &rwmtx_opt, "atom_tab");
diff --git a/erts/emulator/beam/beam_bp.c b/erts/emulator/beam/beam_bp.c
index 31910888d1..773baad01f 100644
--- a/erts/emulator/beam/beam_bp.c
+++ b/erts/emulator/beam/beam_bp.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2000-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2000-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -408,7 +408,7 @@ erts_is_count_break(BeamInstr *pc, Sint *count_ret) {
if (bdc) {
if (count_ret) {
- *count_ret = (Sint) erts_smp_atomic_read(&bdc->acount);
+ *count_ret = (Sint) erts_smp_atomic_read_nob(&bdc->acount);
}
return !0;
}
@@ -958,17 +958,17 @@ static int set_function_break(Module *modp, BeamInstr *pc, int bif,
if (break_op == (BeamInstr) BeamOp(op_i_count_breakpoint)) {
if (count_op == erts_break_stop) {
- count = erts_smp_atomic_read(&bdc->acount);
+ count = erts_smp_atomic_read_nob(&bdc->acount);
if (count >= 0) {
while(1) {
- res = erts_smp_atomic_cmpxchg(&bdc->acount, -count - 1, count);
+ res = erts_smp_atomic_cmpxchg_nob(&bdc->acount, -count - 1, count);
if ((res == count) || count < 0) break;
count = res;
}
}
} else {
/* Reset call counter */
- erts_smp_atomic_set(&bdc->acount, 0);
+ erts_smp_atomic_set_nob(&bdc->acount, 0);
}
} else if (break_op == (BeamInstr) BeamOp(op_i_time_breakpoint)) {
@@ -1097,7 +1097,7 @@ static int set_function_break(Module *modp, BeamInstr *pc, int bif,
}
} else if (break_op == (BeamInstr) BeamOp(op_i_count_breakpoint)) {
BpDataCount *bdc = (BpDataCount *) bd;
- erts_smp_atomic_init(&bdc->acount, 0);
+ erts_smp_atomic_init_nob(&bdc->acount, 0);
}
if (bif == BREAK_IS_ERL) {
diff --git a/erts/emulator/beam/beam_bp.h b/erts/emulator/beam/beam_bp.h
index bd8a7249a7..2ec5818688 100644
--- a/erts/emulator/beam/beam_bp.h
+++ b/erts/emulator/beam/beam_bp.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2000-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2000-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -165,8 +165,8 @@ do { \
bdc = (BpDataCount *) bdc->next; \
ASSERT(bdc); \
bds[ix] = (BpData *) bdc; \
- count = erts_smp_atomic_read(&bdc->acount); \
- if (count >= 0) erts_smp_atomic_inc(&bdc->acount); \
+ count = erts_smp_atomic_read_nob(&bdc->acount); \
+ if (count >= 0) erts_smp_atomic_inc_nob(&bdc->acount); \
*(instr_result) = bdc->orig_instr; \
} while (0)
diff --git a/erts/emulator/beam/bif.c b/erts/emulator/beam/bif.c
index 68b3350d7f..98dde066fc 100644
--- a/erts/emulator/beam/bif.c
+++ b/erts/emulator/beam/bif.c
@@ -811,7 +811,7 @@ BIF_RETTYPE spawn_opt_1(BIF_ALIST_1)
so.min_heap_size = H_MIN_SIZE;
so.min_vheap_size = BIN_VH_MIN_SIZE;
so.priority = PRIORITY_NORMAL;
- so.max_gen_gcs = (Uint16) erts_smp_atomic32_read(&erts_max_gen_gcs);
+ so.max_gen_gcs = (Uint16) erts_smp_atomic32_read_nob(&erts_max_gen_gcs);
so.scheduler = 0;
/*
@@ -3417,10 +3417,10 @@ BIF_RETTYPE ports_0(BIF_ALIST_0)
erts_smp_mtx_lock(&ports_snapshot_mtx); /* One snapshot at a time */
- erts_smp_atomic_set(&erts_dead_ports_ptr,
- (erts_aint_t) (port_buf + erts_max_ports));
+ erts_smp_atomic_set_nob(&erts_dead_ports_ptr,
+ (erts_aint_t) (port_buf + erts_max_ports));
- next_ss = erts_smp_atomic32_inctest(&erts_ports_snapshot);
+ next_ss = erts_smp_atomic32_inc_read_relb(&erts_ports_snapshot);
for (i = erts_max_ports-1; i >= 0; i--) {
Port* prt = &erts_port[i];
@@ -3434,8 +3434,8 @@ BIF_RETTYPE ports_0(BIF_ALIST_0)
erts_smp_port_state_unlock(prt);
}
- dead_ports = (Eterm*)erts_smp_atomic_xchg(&erts_dead_ports_ptr,
- (erts_aint_t) NULL);
+ dead_ports = (Eterm*)erts_smp_atomic_xchg_nob(&erts_dead_ports_ptr,
+ (erts_aint_t) NULL);
erts_smp_mtx_unlock(&ports_snapshot_mtx);
ASSERT(pp <= dead_ports);
@@ -3942,8 +3942,8 @@ BIF_RETTYPE system_flag_2(BIF_ALIST_2)
goto error;
}
nval = (n > (Sint) ((Uint16) -1)) ? ((Uint16) -1) : ((Uint16) n);
- oval = (Uint) erts_smp_atomic32_xchg(&erts_max_gen_gcs,
- (erts_aint32_t) nval);
+ oval = (Uint) erts_smp_atomic32_xchg_nob(&erts_max_gen_gcs,
+ (erts_aint32_t) nval);
BIF_RET(make_small(oval));
} else if (BIF_ARG_1 == am_min_heap_size) {
int oval = H_MIN_SIZE;
@@ -4286,7 +4286,7 @@ void erts_init_bif(void)
erts_smp_spinlock_init(&make_ref_lock, "make_ref");
erts_smp_mtx_init(&ports_snapshot_mtx, "ports_snapshot");
- erts_smp_atomic_init(&erts_dead_ports_ptr, (erts_aint_t) NULL);
+ erts_smp_atomic_init_nob(&erts_dead_ports_ptr, (erts_aint_t) NULL);
/*
* bif_return_trap/1 is a hidden BIF that bifs that need to
diff --git a/erts/emulator/beam/break.c b/erts/emulator/beam/break.c
index b8889e6206..432b3d0780 100644
--- a/erts/emulator/beam/break.c
+++ b/erts/emulator/beam/break.c
@@ -626,7 +626,7 @@ bin_check(void)
erts_printf("%p orig_size: %bpd, norefs = %bpd\n",
bp->val,
bp->val->orig_size,
- erts_smp_atomic_read(&bp->val->refc));
+ erts_smp_atomic_read_nob(&bp->val->refc));
}
}
if (printed) {
@@ -650,7 +650,7 @@ erl_crash_dump_v(char *file, int line, char* fmt, va_list args)
char dumpnamebuf[MAXPATHLEN];
char* dumpname;
- if (ERTS_IS_CRASH_DUMPING)
+ if (ERTS_SOMEONE_IS_CRASH_DUMPING)
return;
/* Wait for all threads to block. If all threads haven't blocked
@@ -667,7 +667,8 @@ erl_crash_dump_v(char *file, int line, char* fmt, va_list args)
/* Allow us to pass certain places without locking... */
#ifdef ERTS_SMP
- erts_smp_atomic_inc(&erts_writing_erl_crash_dump);
+ erts_smp_atomic32_set_mb(&erts_writing_erl_crash_dump, 1);
+ erts_smp_tsd_set(erts_is_crash_dumping_key, (void *) 1);
#else
erts_writing_erl_crash_dump = 1;
#endif
diff --git a/erts/emulator/beam/dist.c b/erts/emulator/beam/dist.c
index b1cdd0660a..ad042ec088 100644
--- a/erts/emulator/beam/dist.c
+++ b/erts/emulator/beam/dist.c
@@ -128,8 +128,8 @@ delete_cache(ErtsAtomCache *cache)
{
if (cache) {
erts_free(ERTS_ALC_T_DCACHE, (void *) cache);
- ASSERT(erts_smp_atomic_read(&no_caches) > 0);
- erts_smp_atomic_dec(&no_caches);
+ ASSERT(erts_smp_atomic_read_nob(&no_caches) > 0);
+ erts_smp_atomic_dec_nob(&no_caches);
}
}
@@ -147,7 +147,7 @@ create_cache(DistEntry *dep)
dep->cache = cp = (ErtsAtomCache*) erts_alloc(ERTS_ALC_T_DCACHE,
sizeof(ErtsAtomCache));
- erts_smp_atomic_inc(&no_caches);
+ erts_smp_atomic_inc_nob(&no_caches);
for (i = 0; i < sizeof(cp->in_arr)/sizeof(cp->in_arr[0]); i++) {
cp->in_arr[i] = THE_NON_VALUE;
cp->out_arr[i] = THE_NON_VALUE;
@@ -156,7 +156,7 @@ create_cache(DistEntry *dep)
Uint erts_dist_cache_size(void)
{
- return (Uint) erts_smp_atomic_read(&no_caches)*sizeof(ErtsAtomCache);
+ return (Uint) erts_smp_atomic_read_mb(&no_caches)*sizeof(ErtsAtomCache);
}
static ErtsProcList *
@@ -444,7 +444,7 @@ int erts_do_net_exits(DistEntry *dep, Eterm reason)
ErtsMonitor *monitors;
Uint32 flags;
- erts_smp_atomic_set(&dep->dist_cmd_scheduled, 1);
+ erts_smp_atomic_set_mb(&dep->dist_cmd_scheduled, 1);
erts_smp_de_rwlock(dep);
ERTS_SMP_LC_ASSERT(is_internal_port(dep->cid)
@@ -510,7 +510,7 @@ void init_dist(void)
{
init_nodes_monitors();
- erts_smp_atomic_init(&no_caches, 0);
+ erts_smp_atomic_init_nob(&no_caches, 0);
/* Lookup/Install all references to trap functions */
dsend2_trap = trap_function(am_dsend,2);
@@ -596,7 +596,7 @@ static void clear_dist_entry(DistEntry *dep)
suspendees = get_suspended_on_de(dep, ERTS_DE_QFLGS_ALL);
erts_smp_mtx_unlock(&dep->qlock);
- erts_smp_atomic_set(&dep->dist_cmd_scheduled, 0);
+ erts_smp_atomic_set_nob(&dep->dist_cmd_scheduled, 0);
dep->send = NULL;
erts_smp_de_rwunlock(dep);
@@ -1775,7 +1775,7 @@ erts_dist_command(Port *prt, int reds_limit)
erts_refc_inc(&dep->refc, 1); /* Otherwise dist_entry might be
removed if port command fails */
- erts_smp_atomic_xchg(&dep->dist_cmd_scheduled, 0);
+ erts_smp_atomic_set_mb(&dep->dist_cmd_scheduled, 0);
erts_smp_de_rlock(dep);
flags = dep->flags;
diff --git a/erts/emulator/beam/dist.h b/erts/emulator/beam/dist.h
index 695a4fc3fe..845151c895 100644
--- a/erts/emulator/beam/dist.h
+++ b/erts/emulator/beam/dist.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 1996-2010. All Rights Reserved.
+ * Copyright Ericsson AB 1996-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -203,7 +203,7 @@ void erts_schedule_dist_command(Port *prt, DistEntry *dist_entry)
id = dep->cid;
}
- if (!erts_smp_atomic_xchg(&dep->dist_cmd_scheduled, 1)) {
+ if (!erts_smp_atomic_xchg_mb(&dep->dist_cmd_scheduled, 1)) {
(void) erts_port_task_schedule(id,
&dep->dist_cmd,
ERTS_PORT_TASK_DIST_CMD,
diff --git a/erts/emulator/beam/erl_bif_ddll.c b/erts/emulator/beam/erl_bif_ddll.c
index 9631fb50db..d714eacd06 100644
--- a/erts/emulator/beam/erl_bif_ddll.c
+++ b/erts/emulator/beam/erl_bif_ddll.c
@@ -369,7 +369,7 @@ BIF_RETTYPE erl_ddll_try_load_3(Process *p, Eterm path_term,
if (!(prt->status & FREE_PORT_FLAGS) &&
prt->drv_ptr->handle == dh) {
#if DDLL_SMP
- erts_smp_atomic_inc(&prt->refc);
+ erts_smp_atomic_inc_nob(&prt->refc);
/* Extremely rare spinlock */
while(prt->status & ERTS_PORT_SFLG_INITIALIZING) {
erts_smp_port_state_unlock(prt);
@@ -597,7 +597,7 @@ done:
if (!(prt->status & FREE_PORT_FLAGS)
&& prt->drv_ptr->handle == dh) {
#if DDLL_SMP
- erts_smp_atomic_inc(&prt->refc);
+ erts_smp_atomic_inc_nob(&prt->refc);
/* Extremely rare spinlock */
while(prt->status & ERTS_PORT_SFLG_INITIALIZING) {
erts_smp_port_state_unlock(prt);
@@ -1054,7 +1054,7 @@ void erts_ddll_proc_dead(Process *p, ErtsProcLocks plocks)
if (!(prt->status & FREE_PORT_FLAGS) &&
prt->drv_ptr->handle == dh) {
#if DDLL_SMP
- erts_smp_atomic_inc(&prt->refc);
+ erts_smp_atomic_inc_nob(&prt->refc);
while(prt->status & ERTS_PORT_SFLG_INITIALIZING) {
erts_smp_port_state_unlock(prt);
erts_smp_port_state_lock(prt);
@@ -1602,7 +1602,7 @@ static int do_load_driver_entry(DE_Handle *dh, char *path, char *name)
erts_sys_ddll_close(dh->handle);
return ERL_DE_LOAD_ERROR_BAD_NAME;
}
- erts_smp_atomic_init(&(dh->refc), (erts_aint_t) 0);
+ erts_smp_atomic_init_nob(&(dh->refc), (erts_aint_t) 0);
dh->port_count = 0;
dh->full_path = erts_alloc(ERTS_ALC_T_DDLL_HANDLE, sys_strlen(path) + 1);
sys_strcpy(dh->full_path, path);
diff --git a/erts/emulator/beam/erl_bif_info.c b/erts/emulator/beam/erl_bif_info.c
index f264bf44df..6a74596f76 100644
--- a/erts/emulator/beam/erl_bif_info.c
+++ b/erts/emulator/beam/erl_bif_info.c
@@ -135,7 +135,7 @@ bld_bin_list(Uint **hpp, Uint *szp, ErlOffHeap* oh)
if (szp)
*szp += 4+2;
if (hpp) {
- Uint refc = (Uint) erts_smp_atomic_read(&pb->val->refc);
+ Uint refc = (Uint) erts_smp_atomic_read_nob(&pb->val->refc);
tuple = TUPLE3(*hpp, val, orig_size, make_small(refc));
res = CONS(*hpp + 4, tuple, res);
*hpp += 4+2;
@@ -2026,7 +2026,7 @@ BIF_RETTYPE system_info_1(BIF_ALIST_1)
res = TUPLE2(hp, am_sequential_tracer, val);
BIF_RET(res);
} else if (BIF_ARG_1 == am_garbage_collection){
- Uint val = (Uint) erts_smp_atomic32_read(&erts_max_gen_gcs);
+ Uint val = (Uint) erts_smp_atomic32_read_nob(&erts_max_gen_gcs);
Eterm tup;
hp = HAlloc(BIF_P, 3+2 + 3+2 + 3+2);
@@ -2041,7 +2041,7 @@ BIF_RETTYPE system_info_1(BIF_ALIST_1)
BIF_RET(res);
} else if (BIF_ARG_1 == am_fullsweep_after){
- Uint val = (Uint) erts_smp_atomic32_read(&erts_max_gen_gcs);
+ Uint val = (Uint) erts_smp_atomic32_read_nob(&erts_max_gen_gcs);
hp = HAlloc(BIF_P, 3);
res = TUPLE2(hp, am_fullsweep_after, make_small(val));
BIF_RET(res);
@@ -2545,6 +2545,70 @@ BIF_RETTYPE system_info_1(BIF_ALIST_1)
hp = hsz ? HAlloc(BIF_P, hsz) : NULL;
res = erts_bld_uint(&hp, NULL, erts_dist_buf_busy_limit);
BIF_RET(res);
+ } else if (ERTS_IS_ATOM_STR("print_ethread_info", BIF_ARG_1)) {
+ int i;
+ char **str;
+#ifdef ETHR_NATIVE_ATOMIC32_IMPL
+ erts_printf("32-bit native atomics: %s\n",
+ ETHR_NATIVE_ATOMIC32_IMPL);
+ str = ethr_native_atomic32_ops();
+ for (i = 0; str[i]; i++)
+ erts_printf("ethr_native_atomic32_%s()\n", str[i]);
+#endif
+#ifdef ETHR_NATIVE_ATOMIC64_IMPL
+ erts_printf("64-bit native atomics: %s\n",
+ ETHR_NATIVE_ATOMIC64_IMPL);
+ str = ethr_native_atomic64_ops();
+ for (i = 0; str[i]; i++)
+ erts_printf("ethr_native_atomic64_%s()\n", str[i]);
+#endif
+#ifdef ETHR_NATIVE_DW_ATOMIC_IMPL
+ if (ethr_have_native_dw_atomic()) {
+ erts_printf("Double word native atomics: %s\n",
+ ETHR_NATIVE_DW_ATOMIC_IMPL);
+ str = ethr_native_dw_atomic_ops();
+ for (i = 0; str[i]; i++)
+ erts_printf("ethr_native_dw_atomic_%s()\n", str[i]);
+ str = ethr_native_su_dw_atomic_ops();
+ for (i = 0; str[i]; i++)
+ erts_printf("ethr_native_su_dw_atomic_%s()\n", str[i]);
+ }
+#endif
+#ifdef ETHR_NATIVE_SPINLOCK_IMPL
+ erts_printf("Native spin-locks: %s\n", ETHR_NATIVE_SPINLOCK_IMPL);
+#endif
+#ifdef ETHR_NATIVE_RWSPINLOCK_IMPL
+ erts_printf("Native rwspin-locks: %s\n", ETHR_NATIVE_RWSPINLOCK_IMPL);
+#endif
+#ifdef ETHR_X86_RUNTIME_CONF_HAVE_SSE2__
+ erts_printf("SSE2 support: %s\n", (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__
+ ? "yes" : "no"));
+#endif
+#ifdef ETHR_X86_OUT_OF_ORDER
+ erts_printf("x86"
+#ifdef ARCH_64
+ "_64"
+#endif
+ " out of order\n");
+#endif
+#ifdef ETHR_SPARC_TSO
+ erts_printf("Sparc TSO\n");
+#endif
+#ifdef ETHR_SPARC_PSO
+ erts_printf("Sparc PSO\n");
+#endif
+#ifdef ETHR_SPARC_RMO
+ erts_printf("Sparc RMO\n");
+#endif
+#if defined(ETHR_PPC_HAVE_LWSYNC)
+ erts_printf("Have lwsync instruction: yes\n");
+#elif defined(ETHR_PPC_HAVE_NO_LWSYNC)
+ erts_printf("Have lwsync instruction: no\n");
+#elif defined(ETHR_PPC_RUNTIME_CONF_HAVE_LWSYNC__)
+ erts_printf("Have lwsync instruction: %s (runtime test)\n",
+ ETHR_PPC_RUNTIME_CONF_HAVE_LWSYNC__ ? "yes" : "no");
+#endif
+ BIF_RET(am_true);
}
BIF_ERROR(BIF_P, BADARG);
@@ -2845,7 +2909,7 @@ fun_info_2(Process* p, Eterm fun, Eterm what)
}
break;
case am_refc:
- val = erts_make_integer(erts_smp_atomic_read(&funp->fe->refc), p);
+ val = erts_make_integer(erts_smp_atomic_read_nob(&funp->fe->refc), p);
hp = HAlloc(p, 3);
break;
case am_arity:
@@ -3065,8 +3129,8 @@ BIF_RETTYPE statistics_1(BIF_ALIST_1)
Eterm r1, r2;
Eterm in, out;
Uint hsz = 9;
- Uint bytes_in = (Uint) erts_smp_atomic_read(&erts_bytes_in);
- Uint bytes_out = (Uint) erts_smp_atomic_read(&erts_bytes_out);
+ Uint bytes_in = (Uint) erts_smp_atomic_read_nob(&erts_bytes_in);
+ Uint bytes_out = (Uint) erts_smp_atomic_read_nob(&erts_bytes_out);
(void) erts_bld_uint(NULL, &hsz, bytes_in);
(void) erts_bld_uint(NULL, &hsz, bytes_out);
@@ -3139,7 +3203,7 @@ BIF_RETTYPE erts_debug_get_internal_state_1(BIF_ALIST_1)
* NOTE: Only supposed to be used for testing, and debugging.
*/
- if (!erts_smp_atomic_read(&available_internal_state)) {
+ if (!erts_smp_atomic_read_nob(&available_internal_state)) {
BIF_ERROR(BIF_P, EXC_UNDEF);
}
@@ -3437,7 +3501,7 @@ BIF_RETTYPE erts_debug_set_internal_state_2(BIF_ALIST_2)
if (ERTS_IS_ATOM_STR("available_internal_state", BIF_ARG_1)
&& (BIF_ARG_2 == am_true || BIF_ARG_2 == am_false)) {
erts_aint_t on = (erts_aint_t) (BIF_ARG_2 == am_true);
- erts_aint_t prev_on = erts_smp_atomic_xchg(&available_internal_state, on);
+ erts_aint_t prev_on = erts_smp_atomic_xchg_nob(&available_internal_state, on);
if (on) {
erts_dsprintf_buf_t *dsbufp = erts_create_logger_dsbuf();
erts_dsprintf(dsbufp, "Process %T ", BIF_P->id);
@@ -3453,7 +3517,7 @@ BIF_RETTYPE erts_debug_set_internal_state_2(BIF_ALIST_2)
BIF_RET(prev_on ? am_true : am_false);
}
- if (!erts_smp_atomic_read(&available_internal_state)) {
+ if (!erts_smp_atomic_read_nob(&available_internal_state)) {
BIF_ERROR(BIF_P, EXC_UNDEF);
}
@@ -3634,14 +3698,14 @@ BIF_RETTYPE erts_debug_set_internal_state_2(BIF_ALIST_2)
}
else if (ERTS_IS_ATOM_STR("hipe_test_reschedule_suspend", BIF_ARG_1)) {
/* Used by hipe test suites */
- erts_aint_t flag = erts_smp_atomic_read(&hipe_test_reschedule_flag);
+ erts_aint_t flag = erts_smp_atomic_read_nob(&hipe_test_reschedule_flag);
if (!flag && BIF_ARG_2 != am_false) {
- erts_smp_atomic_set(&hipe_test_reschedule_flag, 1);
+ erts_smp_atomic_set_nob(&hipe_test_reschedule_flag, 1);
erts_suspend(BIF_P, ERTS_PROC_LOCK_MAIN, NULL);
ERTS_BIF_YIELD2(bif_export[BIF_erts_debug_set_internal_state_2],
BIF_P, BIF_ARG_1, BIF_ARG_2);
}
- erts_smp_atomic_set(&hipe_test_reschedule_flag, !flag);
+ erts_smp_atomic_set_nob(&hipe_test_reschedule_flag, !flag);
BIF_RET(NIL);
}
else if (ERTS_IS_ATOM_STR("hipe_test_reschedule_resume", BIF_ARG_1)) {
@@ -3951,8 +4015,8 @@ BIF_RETTYPE erts_debug_lock_counters_1(BIF_ALIST_1)
void
erts_bif_info_init(void)
{
- erts_smp_atomic_init(&available_internal_state, 0);
- erts_smp_atomic_init(&hipe_test_reschedule_flag, 0);
+ erts_smp_atomic_init_nob(&available_internal_state, 0);
+ erts_smp_atomic_init_nob(&hipe_test_reschedule_flag, 0);
process_info_init();
}
diff --git a/erts/emulator/beam/erl_bits.c b/erts/emulator/beam/erl_bits.c
index e56084b9cb..326a5c136b 100644
--- a/erts/emulator/beam/erl_bits.c
+++ b/erts/emulator/beam/erl_bits.c
@@ -76,14 +76,12 @@ struct erl_bits_state ErlBitsState;
#define byte_buf (ErlBitsState.byte_buf_)
#define byte_buf_len (ErlBitsState.byte_buf_len_)
-#ifdef ERTS_SMP
static erts_smp_atomic_t bits_bufs_size;
-#endif
Uint
erts_bits_bufs_size(void)
{
- return 0;
+ return (Uint) erts_smp_atomic_read_nob(&bits_bufs_size);
}
#if !defined(ERTS_SMP)
@@ -109,8 +107,8 @@ erts_bits_destroy_state(ERL_BITS_PROTO_0)
void
erts_init_bits(void)
{
+ erts_smp_atomic_init_nob(&bits_bufs_size, 0);
#if defined(ERTS_SMP)
- erts_smp_atomic_init(&bits_bufs_size, 0);
/* erl_process.c calls erts_bits_init_state() on all state instances */
#else
ERL_BITS_DECLARE_STATEP;
@@ -713,9 +711,7 @@ static void
ERTS_INLINE need_byte_buf(ERL_BITS_PROTO_1(int need))
{
if (byte_buf_len < need) {
-#ifdef ERTS_SMP
- erts_smp_atomic_add(&bits_bufs_size, need - byte_buf_len);
-#endif
+ erts_smp_atomic_add_nob(&bits_bufs_size, need - byte_buf_len);
byte_buf_len = need;
byte_buf = erts_realloc(ERTS_ALC_T_BITS_BUF, byte_buf, byte_buf_len);
}
diff --git a/erts/emulator/beam/erl_cpu_topology.c b/erts/emulator/beam/erl_cpu_topology.c
index bcf8bcf270..03c0ef904a 100644
--- a/erts/emulator/beam/erl_cpu_topology.c
+++ b/erts/emulator/beam/erl_cpu_topology.c
@@ -487,7 +487,7 @@ erts_sched_check_cpu_bind_post_suspend(ErtsSchedulerData *esdp)
/* Make sure we check if we should bind to a cpu or not... */
if (esdp->run_queue->flags & ERTS_RUNQ_FLG_SHARED_RUNQ)
- erts_smp_atomic32_set(&esdp->chk_cpu_bind, 1);
+ erts_smp_atomic32_set_nob(&esdp->chk_cpu_bind, 1);
else
esdp->run_queue->flags |= ERTS_RUNQ_FLG_CHK_CPU_BIND;
}
@@ -503,7 +503,7 @@ erts_sched_check_cpu_bind(ErtsSchedulerData *esdp)
erts_cpu_groups_callback_call_t *cgcc;
#ifdef ERTS_SMP
if (erts_common_run_queue)
- erts_smp_atomic32_set(&esdp->chk_cpu_bind, 0);
+ erts_smp_atomic32_set_nob(&esdp->chk_cpu_bind, 0);
else {
esdp->run_queue->flags &= ~ERTS_RUNQ_FLG_CHK_CPU_BIND;
}
diff --git a/erts/emulator/beam/erl_db.c b/erts/emulator/beam/erl_db.c
index e0a6aa05c6..7d731b6a94 100644
--- a/erts/emulator/beam/erl_db.c
+++ b/erts/emulator/beam/erl_db.c
@@ -224,21 +224,21 @@ static void
free_dbtable(DbTable* tb)
{
#ifdef HARDDEBUG
- if (erts_smp_atomic_read(&tb->common.memory_size) != sizeof(DbTable)) {
+ if (erts_smp_atomic_read_nob(&tb->common.memory_size) != sizeof(DbTable)) {
erts_fprintf(stderr, "ets: free_dbtable memory remain=%ld fix=%x\n",
- erts_smp_atomic_read(&tb->common.memory_size)-sizeof(DbTable),
+ erts_smp_atomic_read_nob(&tb->common.memory_size)-sizeof(DbTable),
tb->common.fixations);
}
erts_fprintf(stderr, "ets: free_dbtable(%T) deleted!!!\r\n",
tb->common.id);
erts_fprintf(stderr, "ets: free_dbtable: meta_pid_to_tab common.memory_size = %ld\n",
- erts_smp_atomic_read(&meta_pid_to_tab->common.memory_size));
+ erts_smp_atomic_read_nob(&meta_pid_to_tab->common.memory_size));
print_table(ERTS_PRINT_STDOUT, NULL, 1, meta_pid_to_tab);
erts_fprintf(stderr, "ets: free_dbtable: meta_pid_to_fixed_tab common.memory_size = %ld\n",
- erts_smp_atomic_read(&meta_pid_to_fixed_tab->common.memory_size));
+ erts_smp_atomic_read_nob(&meta_pid_to_fixed_tab->common.memory_size));
print_table(ERTS_PRINT_STDOUT, NULL, 1, meta_pid_to_fixed_tab);
#endif
#ifdef ERTS_SMP
@@ -248,6 +248,7 @@ free_dbtable(DbTable* tb)
ASSERT(is_immed(tb->common.heir_data));
erts_db_free(ERTS_ALC_T_DB_TABLE, tb, (void *) tb, sizeof(DbTable));
ERTS_ETS_MISC_MEM_ADD(-sizeof(DbTable));
+ ERTS_THR_MEMORY_BARRIER;
}
#ifdef ERTS_SMP
@@ -1417,12 +1418,12 @@ BIF_RETTYPE ets_new_2(BIF_ALIST_2)
{
DbTable init_tb;
- erts_smp_atomic_init(&init_tb.common.memory_size, 0);
+ erts_smp_atomic_init_nob(&init_tb.common.memory_size, 0);
tb = (DbTable*) erts_db_alloc(ERTS_ALC_T_DB_TABLE,
&init_tb, sizeof(DbTable));
ERTS_ETS_MISC_MEM_ADD(sizeof(DbTable));
- erts_smp_atomic_init(&tb->common.memory_size,
- erts_smp_atomic_read(&init_tb.common.memory_size));
+ erts_smp_atomic_init_nob(&tb->common.memory_size,
+ erts_smp_atomic_read_nob(&init_tb.common.memory_size));
}
tb->common.meth = meth;
@@ -1439,7 +1440,7 @@ BIF_RETTYPE ets_new_2(BIF_ALIST_2)
tb->common.owner = BIF_P->id;
set_heir(BIF_P, tb, heir, heir_data);
- erts_smp_atomic_init(&tb->common.nitems, 0);
+ erts_smp_atomic_init_nob(&tb->common.nitems, 0);
tb->common.fixations = NULL;
tb->common.compress = is_compressed;
@@ -1505,9 +1506,9 @@ BIF_RETTYPE ets_new_2(BIF_ALIST_2)
BIF_ARG_1, BIF_ARG_2, ret, BIF_P->id,
BIF_P->initial[0], BIF_P->initial[1], BIF_P->initial[2]);
erts_fprintf(stderr, "ets: new: meta_pid_to_tab common.memory_size = %ld\n",
- erts_smp_atomic_read(&meta_pid_to_tab->common.memory_size));
+ erts_smp_atomic_read_nob(&meta_pid_to_tab->common.memory_size));
erts_fprintf(stderr, "ets: new: meta_pid_to_fixed_tab common.memory_size = %ld\n",
- erts_smp_atomic_read(&meta_pid_to_fixed_tab->common.memory_size));
+ erts_smp_atomic_read_nob(&meta_pid_to_fixed_tab->common.memory_size));
#endif
UseTmpHeap(3,BIF_P);
@@ -1995,7 +1996,7 @@ BIF_RETTYPE ets_select_delete_2(BIF_ALIST_2)
if ((tb = db_get_table(BIF_P, BIF_ARG_1, DB_WRITE, LCK_WRITE)) == NULL) {
BIF_ERROR(BIF_P, BADARG);
}
- nitems = erts_smp_atomic_read(&tb->common.nitems);
+ nitems = erts_smp_atomic_read_nob(&tb->common.nitems);
tb->common.meth->db_delete_all_objects(BIF_P, tb);
db_unlock(tb, LCK_WRITE);
BIF_RET(erts_make_integer(nitems,BIF_P));
@@ -2789,7 +2790,7 @@ void init_db(void)
}
#endif
- erts_smp_atomic_init(&erts_ets_misc_mem_size, 0);
+ erts_smp_atomic_init_nob(&erts_ets_misc_mem_size, 0);
db_initialize_util();
if (user_requested_db_max_tabs < DB_DEF_MAX_TABS)
@@ -2831,13 +2832,13 @@ void init_db(void)
/*TT*/
/* Create meta table invertion. */
- erts_smp_atomic_init(&init_tb.common.memory_size, 0);
+ erts_smp_atomic_init_nob(&init_tb.common.memory_size, 0);
meta_pid_to_tab = (DbTable*) erts_db_alloc(ERTS_ALC_T_DB_TABLE,
&init_tb,
sizeof(DbTable));
ERTS_ETS_MISC_MEM_ADD(sizeof(DbTable));
- erts_smp_atomic_init(&meta_pid_to_tab->common.memory_size,
- erts_smp_atomic_read(&init_tb.common.memory_size));
+ erts_smp_atomic_init_nob(&meta_pid_to_tab->common.memory_size,
+ erts_smp_atomic_read_nob(&init_tb.common.memory_size));
meta_pid_to_tab->common.id = NIL;
meta_pid_to_tab->common.the_name = am_true;
@@ -2850,7 +2851,7 @@ void init_db(void)
#endif
meta_pid_to_tab->common.keypos = 1;
meta_pid_to_tab->common.owner = NIL;
- erts_smp_atomic_init(&meta_pid_to_tab->common.nitems, 0);
+ erts_smp_atomic_init_nob(&meta_pid_to_tab->common.nitems, 0);
meta_pid_to_tab->common.slot = -1;
meta_pid_to_tab->common.meth = &db_hash;
meta_pid_to_tab->common.compress = 0;
@@ -2863,13 +2864,13 @@ void init_db(void)
erl_exit(1,"Unable to create ets metadata tables.");
}
- erts_smp_atomic_set(&init_tb.common.memory_size, 0);
+ erts_smp_atomic_set_nob(&init_tb.common.memory_size, 0);
meta_pid_to_fixed_tab = (DbTable*) erts_db_alloc(ERTS_ALC_T_DB_TABLE,
&init_tb,
sizeof(DbTable));
ERTS_ETS_MISC_MEM_ADD(sizeof(DbTable));
- erts_smp_atomic_init(&meta_pid_to_fixed_tab->common.memory_size,
- erts_smp_atomic_read(&init_tb.common.memory_size));
+ erts_smp_atomic_init_nob(&meta_pid_to_fixed_tab->common.memory_size,
+ erts_smp_atomic_read_nob(&init_tb.common.memory_size));
meta_pid_to_fixed_tab->common.id = NIL;
meta_pid_to_fixed_tab->common.the_name = am_true;
@@ -2882,7 +2883,7 @@ void init_db(void)
#endif
meta_pid_to_fixed_tab->common.keypos = 1;
meta_pid_to_fixed_tab->common.owner = NIL;
- erts_smp_atomic_init(&meta_pid_to_fixed_tab->common.nitems, 0);
+ erts_smp_atomic_init_nob(&meta_pid_to_fixed_tab->common.nitems, 0);
meta_pid_to_fixed_tab->common.slot = -1;
meta_pid_to_fixed_tab->common.meth = &db_hash;
meta_pid_to_fixed_tab->common.compress = 0;
@@ -3421,7 +3422,7 @@ static void unfix_table_locked(Process* p, DbTable* tb,
unlocked:
if (!IS_FIXED(tb) && IS_HASH_TABLE(tb->common.status)
- && erts_smp_atomic_read(&tb->hash.fixdel) != (erts_aint_t)NULL) {
+ && erts_smp_atomic_read_nob(&tb->hash.fixdel) != (erts_aint_t)NULL) {
#ifdef ERTS_SMP
if (*kind_p == LCK_READ && tb->common.is_thread_safe) {
/* Must have write lock while purging pseudo-deleted (OTP-8166) */
@@ -3606,7 +3607,7 @@ static Eterm table_info(Process* p, DbTable* tb, Eterm What)
Eterm ret = THE_NON_VALUE;
if (What == am_size) {
- ret = make_small(erts_smp_atomic_read(&tb->common.nitems));
+ ret = make_small(erts_smp_atomic_read_nob(&tb->common.nitems));
} else if (What == am_type) {
if (tb->common.status & DB_SET) {
ret = am_set;
@@ -3619,7 +3620,7 @@ static Eterm table_info(Process* p, DbTable* tb, Eterm What)
ret = am_bag;
}
} else if (What == am_memory) {
- Uint words = (Uint) ((erts_smp_atomic_read(&tb->common.memory_size)
+ Uint words = (Uint) ((erts_smp_atomic_read_nob(&tb->common.memory_size)
+ sizeof(Uint)
- 1)
/ sizeof(Uint));
@@ -3716,7 +3717,7 @@ static Eterm table_info(Process* p, DbTable* tb, Eterm What)
std_dev_exp = make_float(hp);
PUT_DOUBLE(f, hp);
hp += FLOAT_SIZE_OBJECT;
- ret = TUPLE6(hp, make_small(erts_smp_atomic_read(&tb->hash.nactive)),
+ ret = TUPLE6(hp, make_small(erts_smp_atomic_read_nob(&tb->hash.nactive)),
avg, std_dev_real, std_dev_exp,
make_small(stats.min_chain_len),
make_small(stats.max_chain_len));
@@ -3735,9 +3736,9 @@ static void print_table(int to, void *to_arg, int show, DbTable* tb)
tb->common.meth->db_print(to, to_arg, show, tb);
- erts_print(to, to_arg, "Objects: %d\n", (int)erts_smp_atomic_read(&tb->common.nitems));
+ erts_print(to, to_arg, "Objects: %d\n", (int)erts_smp_atomic_read_nob(&tb->common.nitems));
erts_print(to, to_arg, "Words: %bpu\n",
- (UWord) ((erts_smp_atomic_read(&tb->common.memory_size)
+ (Uint) ((erts_smp_atomic_read_nob(&tb->common.memory_size)
+ sizeof(Uint)
- 1)
/ sizeof(Uint)));
@@ -3763,8 +3764,9 @@ void db_info(int to, void *to_arg, int show) /* Called by break handler */
Uint
erts_get_ets_misc_mem_size(void)
{
+ ERTS_THR_MEMORY_BARRIER;
/* Memory not allocated in ets_alloc */
- return (Uint) erts_smp_atomic_read(&erts_ets_misc_mem_size);
+ return (Uint) erts_smp_atomic_read_nob(&erts_ets_misc_mem_size);
}
/* SMP Note: May only be used when system is locked */
diff --git a/erts/emulator/beam/erl_db.h b/erts/emulator/beam/erl_db.h
index e0bdebcb01..2e5deaf338 100644
--- a/erts/emulator/beam/erl_db.h
+++ b/erts/emulator/beam/erl_db.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 1996-2010. All Rights Reserved.
+ * Copyright Ericsson AB 1996-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -86,11 +86,11 @@ do { \
erts_aint_t sz__ = (((erts_aint_t) (ALLOC_SZ)) \
- ((erts_aint_t) (FREE_SZ))); \
ASSERT((TAB)); \
- erts_smp_atomic_add(&(TAB)->common.memory_size, sz__); \
+ erts_smp_atomic_add_nob(&(TAB)->common.memory_size, sz__); \
} while (0)
#define ERTS_ETS_MISC_MEM_ADD(SZ) \
- erts_smp_atomic_add(&erts_ets_misc_mem_size, (SZ));
+ erts_smp_atomic_add_nob(&erts_ets_misc_mem_size, (SZ));
ERTS_GLB_INLINE void *erts_db_alloc(ErtsAlcType_t type,
DbTable *tab,
@@ -227,7 +227,7 @@ erts_db_free(ErtsAlcType_t type, DbTable *tab, void *ptr, Uint size)
ERTS_DB_ALC_MEM_UPDATE_(tab, size, 0);
ASSERT(((void *) tab) != ptr
- || erts_smp_atomic_read(&tab->common.memory_size) == 0);
+ || erts_smp_atomic_read_nob(&tab->common.memory_size) == 0);
erts_free(type, ptr);
}
diff --git a/erts/emulator/beam/erl_db_hash.c b/erts/emulator/beam/erl_db_hash.c
index fdc82c8b88..e3380a57b2 100644
--- a/erts/emulator/beam/erl_db_hash.c
+++ b/erts/emulator/beam/erl_db_hash.c
@@ -105,9 +105,13 @@
#define NSEG_2 256 /* Size of second segment table */
#define NSEG_INC 128 /* Number of segments to grow after that */
-#define SEGTAB(tb) ((struct segment**)erts_smp_atomic_read_acqb(&(tb)->segtab))
-#define NACTIVE(tb) ((int)erts_smp_atomic_read(&(tb)->nactive))
-#define NITEMS(tb) ((int)erts_smp_atomic_read(&(tb)->common.nitems))
+#ifdef ETHR_ORDERED_READ_DEPEND
+#define SEGTAB(tb) ((struct segment**)erts_smp_atomic_read_nob(&(tb)->segtab))
+#else
+#define SEGTAB(tb) ((struct segment**)erts_smp_atomic_read_rb(&(tb)->segtab))
+#endif
+#define NACTIVE(tb) ((int)erts_smp_atomic_read_nob(&(tb)->nactive))
+#define NITEMS(tb) ((int)erts_smp_atomic_read_nob(&(tb)->common.nitems))
#define BUCKET(tb, i) SEGTAB(tb)[(i) >> SEGSZ_EXP]->buckets[(i) & SEGSZ_MASK]
@@ -123,10 +127,10 @@
static ERTS_INLINE Uint hash_to_ix(DbTableHash* tb, HashValue hval)
{
Uint mask = erts_smp_atomic_read_acqb(&tb->szm);
- Uint ix = hval & mask;
- if (ix >= erts_smp_atomic_read(&tb->nactive)) {
+ Uint ix = hval & mask;
+ if (ix >= erts_smp_atomic_read_nob(&tb->nactive)) {
ix &= mask>>1;
- ASSERT(ix < erts_smp_atomic_read(&tb->nactive));
+ ASSERT(ix < erts_smp_atomic_read_nob(&tb->nactive));
}
return ix;
}
@@ -141,14 +145,14 @@ static ERTS_INLINE void add_fixed_deletion(DbTableHash* tb, int ix)
(DbTable *) tb,
sizeof(FixedDeletion));
ERTS_ETS_MISC_MEM_ADD(sizeof(FixedDeletion));
- fixd->slot = ix;
- was_next = erts_smp_atomic_read(&tb->fixdel);
+ fixd->slot = ix;
+ was_next = erts_smp_atomic_read_acqb(&tb->fixdel);
do { /* Lockless atomic insertion in linked list: */
exp_next = was_next;
fixd->next = (FixedDeletion*) exp_next;
- was_next = erts_smp_atomic_cmpxchg(&tb->fixdel,
- (erts_aint_t) fixd,
- exp_next);
+ was_next = erts_smp_atomic_cmpxchg_relb(&tb->fixdel,
+ (erts_aint_t) fixd,
+ exp_next);
}while (was_next != exp_next);
}
@@ -540,22 +544,24 @@ static void restore_fixdel(DbTableHash* tb, FixedDeletion* fixdel)
{
/*int tries = 0;*/
DEBUG_WAIT();
- if (erts_smp_atomic_cmpxchg(&tb->fixdel, (erts_aint_t)fixdel,
- (erts_aint_t)NULL) != (erts_aint_t)NULL) {
+ if (erts_smp_atomic_cmpxchg_relb(&tb->fixdel,
+ (erts_aint_t) fixdel,
+ (erts_aint_t) NULL) != (erts_aint_t) NULL) {
/* Oboy, must join lists */
FixedDeletion* last = fixdel;
erts_aint_t was_tail;
erts_aint_t exp_tail;
- while (last->next != NULL) last = last->next;
- was_tail = erts_smp_atomic_read(&tb->fixdel);
+ while (last->next != NULL) last = last->next;
+ was_tail = erts_smp_atomic_read_acqb(&tb->fixdel);
do { /* Lockless atomic list insertion */
exp_tail = was_tail;
last->next = (FixedDeletion*) exp_tail;
/*++tries;*/
DEBUG_WAIT();
- was_tail = erts_smp_atomic_cmpxchg(&tb->fixdel, (erts_aint_t)fixdel,
- exp_tail);
+ was_tail = erts_smp_atomic_cmpxchg_relb(&tb->fixdel,
+ (erts_aint_t) fixdel,
+ exp_tail);
}while (was_tail != exp_tail);
}
/*erts_fprintf(stderr,"erl_db_hash: restore_fixdel tries=%d\r\n", tries);*/
@@ -572,7 +578,8 @@ void db_unfix_table_hash(DbTableHash *tb)
|| (erts_smp_lc_rwmtx_is_rlocked(&tb->common.rwlock)
&& !tb->common.is_thread_safe));
restart:
- fixdel = (FixedDeletion*) erts_smp_atomic_xchg(&tb->fixdel, (erts_aint_t)NULL);
+ fixdel = (FixedDeletion*) erts_smp_atomic_xchg_acqb(&tb->fixdel,
+ (erts_aint_t) NULL);
while (fixdel != NULL) {
FixedDeletion *fx = fixdel;
int ix = fx->slot;
@@ -639,14 +646,14 @@ int db_create_hash(Process *p, DbTable *tbl)
{
DbTableHash *tb = &tbl->hash;
- erts_smp_atomic_init(&tb->szm, SEGSZ_MASK);
- erts_smp_atomic_init(&tb->nactive, SEGSZ);
- erts_smp_atomic_init(&tb->fixdel, (erts_aint_t)NULL);
- erts_smp_atomic_init(&tb->segtab, (erts_aint_t) alloc_ext_seg(tb,0,NULL)->segtab);
+ erts_smp_atomic_init_nob(&tb->szm, SEGSZ_MASK);
+ erts_smp_atomic_init_nob(&tb->nactive, SEGSZ);
+ erts_smp_atomic_init_nob(&tb->fixdel, (erts_aint_t)NULL);
+ erts_smp_atomic_init_nob(&tb->segtab, (erts_aint_t) alloc_ext_seg(tb,0,NULL)->segtab);
tb->nsegs = NSEG_1;
tb->nslots = SEGSZ;
- erts_smp_atomic_init(&tb->is_resizing, 0);
+ erts_smp_atomic_init_nob(&tb->is_resizing, 0);
#ifdef ERTS_SMP
if (tb->common.type & DB_FINE_LOCKED) {
erts_smp_rwmtx_opt_t rwmtx_opt = ERTS_SMP_RWMTX_OPT_DEFAULT_INITER;
@@ -663,7 +670,7 @@ int db_create_hash(Process *p, DbTable *tbl)
/* This important property is needed to guarantee that the buckets
* involved in a grow/shrink operation it protected by the same lock:
*/
- ASSERT(erts_smp_atomic_read(&tb->nactive) % DB_HASH_LOCK_CNT == 0);
+ ASSERT(erts_smp_atomic_read_nob(&tb->nactive) % DB_HASH_LOCK_CNT == 0);
}
else { /* coarse locking */
tb->locks = NULL;
@@ -783,7 +790,7 @@ int db_put_hash(DbTable *tbl, Eterm obj, int key_clash_fail)
if (tb->common.status & DB_SET) {
HashDbTerm* bnext = b->next;
if (b->hvalue == INVALID_HASH) {
- erts_smp_atomic_inc(&tb->common.nitems);
+ erts_smp_atomic_inc_nob(&tb->common.nitems);
}
else if (key_clash_fail) {
ret = DB_ERROR_BADKEY;
@@ -811,7 +818,7 @@ int db_put_hash(DbTable *tbl, Eterm obj, int key_clash_fail)
do {
if (db_eq(&tb->common,obj,&q->dbterm)) {
if (q->hvalue == INVALID_HASH) {
- erts_smp_atomic_inc(&tb->common.nitems);
+ erts_smp_atomic_inc_nob(&tb->common.nitems);
q->hvalue = hval;
if (q != b) { /* must move to preserve key insertion order */
*qp = q->next;
@@ -832,7 +839,7 @@ Lnew:
q->hvalue = hval;
q->next = b;
*bp = q;
- nitems = erts_smp_atomic_inctest(&tb->common.nitems);
+ nitems = erts_smp_atomic_inc_read_nob(&tb->common.nitems);
WUNLOCK_HASH(lck);
{
int nactive = NACTIVE(tb);
@@ -1069,7 +1076,7 @@ int db_erase_bag_exact2(DbTable *tbl, Eterm key, Eterm value)
EQ(value, b->dbterm.tpl[2])) {
*bp = b->next;
free_term(tb, b);
- erts_smp_atomic_dec(&tb->common.nitems);
+ erts_smp_atomic_dec_nob(&tb->common.nitems);
b = *bp;
break;
}
@@ -1128,7 +1135,7 @@ int db_erase_hash(DbTable *tbl, Eterm key, Eterm *ret)
}
WUNLOCK_HASH(lck);
if (nitems_diff) {
- erts_smp_atomic_add(&tb->common.nitems, nitems_diff);
+ erts_smp_atomic_add_nob(&tb->common.nitems, nitems_diff);
try_shrink(tb);
}
*ret = am_true;
@@ -1187,7 +1194,7 @@ static int db_erase_object_hash(DbTable *tbl, Eterm object, Eterm *ret)
}
WUNLOCK_HASH(lck);
if (nitems_diff) {
- erts_smp_atomic_add(&tb->common.nitems, nitems_diff);
+ erts_smp_atomic_add_nob(&tb->common.nitems, nitems_diff);
try_shrink(tb);
}
*ret = am_true;
@@ -1798,7 +1805,7 @@ static int db_select_delete_hash(Process *p,
free_term(tb, del);
did_erase = 1;
}
- erts_smp_atomic_dec(&tb->common.nitems);
+ erts_smp_atomic_dec_nob(&tb->common.nitems);
++got;
}
--num_left;
@@ -1909,7 +1916,7 @@ static int db_select_delete_continue_hash(Process *p,
free_term(tb, del);
did_erase = 1;
}
- erts_smp_atomic_dec(&tb->common.nitems);
+ erts_smp_atomic_dec_nob(&tb->common.nitems);
++got;
}
@@ -2064,7 +2071,7 @@ int db_mark_all_deleted_hash(DbTable *tbl)
}while(list != NULL);
}
}
- erts_smp_atomic_set(&tb->common.nitems, 0);
+ erts_smp_atomic_set_nob(&tb->common.nitems, 0);
return DB_ERROR_NONE;
}
@@ -2115,7 +2122,7 @@ static int db_free_table_continue_hash(DbTable *tbl)
{
DbTableHash *tb = &tbl->hash;
int done;
- FixedDeletion* fixdel = (FixedDeletion*) erts_smp_atomic_read(&tb->fixdel);
+ FixedDeletion* fixdel = (FixedDeletion*) erts_smp_atomic_read_acqb(&tb->fixdel);
ERTS_SMP_LC_ASSERT(IS_TAB_WLOCKED(tb));
done = 0;
@@ -2129,11 +2136,11 @@ static int db_free_table_continue_hash(DbTable *tbl)
sizeof(FixedDeletion));
ERTS_ETS_MISC_MEM_ADD(-sizeof(FixedDeletion));
if (++done >= 2*DELETE_RECORD_LIMIT) {
- erts_smp_atomic_set(&tb->fixdel, (erts_aint_t)fixdel);
+ erts_smp_atomic_set_relb(&tb->fixdel, (erts_aint_t)fixdel);
return 0; /* Not done */
}
}
- erts_smp_atomic_set(&tb->fixdel, (erts_aint_t)NULL);
+ erts_smp_atomic_set_relb(&tb->fixdel, (erts_aint_t)NULL);
done /= 2;
while(tb->nslots != 0) {
@@ -2157,7 +2164,7 @@ static int db_free_table_continue_hash(DbTable *tbl)
tb->locks = NULL;
}
#endif
- ASSERT(erts_smp_atomic_read(&tb->common.memory_size) == sizeof(DbTable));
+ ASSERT(erts_smp_atomic_read_nob(&tb->common.memory_size) == sizeof(DbTable));
return 1; /* Done */
}
@@ -2350,7 +2357,7 @@ static int alloc_seg(DbTableHash *tb)
struct ext_segment* eseg;
eseg = (struct ext_segment*) SEGTAB(tb)[seg_ix-1];
MY_ASSERT(eseg!=NULL && eseg->s.is_ext_segment);
- erts_smp_atomic_set_relb(&tb->segtab, (erts_aint_t) eseg->segtab);
+ erts_smp_atomic_set_wb(&tb->segtab, (erts_aint_t) eseg->segtab);
tb->nsegs = eseg->nsegs;
}
ASSERT(seg_ix < tb->nsegs);
@@ -2422,7 +2429,7 @@ static int free_seg(DbTableHash *tb, int free_records)
MY_ASSERT(newtop->s.is_ext_segment);
if (newtop->prev_segtab != NULL) {
/* Time to use a smaller segtab */
- erts_smp_atomic_set_relb(&tb->segtab, (erts_aint_t)newtop->prev_segtab);
+ erts_smp_atomic_set_wb(&tb->segtab, (erts_aint_t)newtop->prev_segtab);
tb->nsegs = seg_ix;
ASSERT(tb->nsegs == EXTSEG(SEGTAB(tb))->nsegs);
}
@@ -2439,7 +2446,7 @@ static int free_seg(DbTableHash *tb, int free_records)
if (seg_ix > 0) {
if (seg_ix < tb->nsegs) SEGTAB(tb)[seg_ix] = NULL;
} else {
- erts_smp_atomic_set_relb(&tb->segtab, (erts_aint_t)NULL);
+ erts_smp_atomic_set_wb(&tb->segtab, (erts_aint_t)NULL);
}
#endif
tb->nslots -= SEGSZ;
@@ -2500,7 +2507,7 @@ static void grow(DbTableHash* tb, int nactive)
int from_ix;
int szm;
- if (erts_smp_atomic_xchg(&tb->is_resizing, 1)) {
+ if (erts_smp_atomic_xchg_acqb(&tb->is_resizing, 1)) {
return; /* already in progress */
}
if (NACTIVE(tb) != nactive) {
@@ -2515,7 +2522,7 @@ static void grow(DbTableHash* tb, int nactive)
}
ASSERT(nactive < tb->nslots);
- szm = erts_smp_atomic_read(&tb->szm);
+ szm = erts_smp_atomic_read_nob(&tb->szm);
if (nactive <= szm) {
from_ix = nactive & (szm >> 1);
} else {
@@ -2532,7 +2539,7 @@ static void grow(DbTableHash* tb, int nactive)
WUNLOCK_HASH(lck);
goto abort;
}
- erts_smp_atomic_inc(&tb->nactive);
+ erts_smp_atomic_inc_nob(&tb->nactive);
if (from_ix == 0) {
erts_smp_atomic_set_relb(&tb->szm, szm);
}
@@ -2577,13 +2584,13 @@ abort:
*/
static void shrink(DbTableHash* tb, int nactive)
{
- if (erts_smp_atomic_xchg(&tb->is_resizing, 1)) {
+ if (erts_smp_atomic_xchg_acqb(&tb->is_resizing, 1)) {
return; /* already in progress */
}
if (NACTIVE(tb) == nactive) {
erts_smp_rwmtx_t* lck;
int src_ix = nactive - 1;
- int low_szm = erts_smp_atomic_read(&tb->szm) >> 1;
+ int low_szm = erts_smp_atomic_read_nob(&tb->szm) >> 1;
int dst_ix = src_ix & low_szm;
ASSERT(dst_ix < src_ix);
@@ -2610,7 +2617,7 @@ static void shrink(DbTableHash* tb, int nactive)
*dst_bp = *src_bp;
*src_bp = NULL;
- erts_smp_atomic_set(&tb->nactive, src_ix);
+ erts_smp_atomic_set_nob(&tb->nactive, src_ix);
if (dst_ix == 0) {
erts_smp_atomic_set_relb(&tb->szm, low_szm);
}
@@ -2746,7 +2753,7 @@ static int db_delete_all_objects_hash(Process* p, DbTable* tbl)
} else {
db_free_table_hash(tbl);
db_create_hash(p, tbl);
- erts_smp_atomic_set(&tbl->hash.common.nitems, 0);
+ erts_smp_atomic_set_nob(&tbl->hash.common.nitems, 0);
}
return 0;
}
diff --git a/erts/emulator/beam/erl_db_tree.c b/erts/emulator/beam/erl_db_tree.c
index 9a0ba3a418..c6f0d80e32 100644
--- a/erts/emulator/beam/erl_db_tree.c
+++ b/erts/emulator/beam/erl_db_tree.c
@@ -49,7 +49,7 @@
#include "erl_db_tree.h"
#define GETKEY_WITH_POS(Keypos, Tplp) (*((Tplp) + Keypos))
-#define NITEMS(tb) ((int)erts_smp_atomic_read(&(tb)->common.nitems))
+#define NITEMS(tb) ((int)erts_smp_atomic_read_nob(&(tb)->common.nitems))
/*
** A stack of this size is enough for an AVL tree with more than
@@ -84,7 +84,7 @@
*/
static DbTreeStack* get_static_stack(DbTableTree* tb)
{
- if (!erts_smp_atomic_xchg(&tb->is_stack_busy, 1)) {
+ if (!erts_smp_atomic_xchg_acqb(&tb->is_stack_busy, 1)) {
return &tb->static_stack;
}
return NULL;
@@ -96,7 +96,7 @@ static DbTreeStack* get_static_stack(DbTableTree* tb)
static DbTreeStack* get_any_stack(DbTableTree* tb)
{
DbTreeStack* stack;
- if (!erts_smp_atomic_xchg(&tb->is_stack_busy, 1)) {
+ if (!erts_smp_atomic_xchg_acqb(&tb->is_stack_busy, 1)) {
return &tb->static_stack;
}
stack = erts_db_alloc(ERTS_ALC_T_DB_STK, (DbTable *) tb,
@@ -110,7 +110,7 @@ static DbTreeStack* get_any_stack(DbTableTree* tb)
static void release_stack(DbTableTree* tb, DbTreeStack* stack)
{
if (stack == &tb->static_stack) {
- ASSERT(erts_smp_atomic_read(&tb->is_stack_busy) == 1);
+ ASSERT(erts_smp_atomic_read_nob(&tb->is_stack_busy) == 1);
erts_smp_atomic_set_relb(&tb->is_stack_busy, 0);
}
else {
@@ -478,7 +478,7 @@ int db_create_tree(Process *p, DbTable *tbl)
sizeof(TreeDbTerm *) * STACK_NEED);
tb->static_stack.pos = 0;
tb->static_stack.slot = 0;
- erts_smp_atomic_init(&tb->is_stack_busy, 0);
+ erts_smp_atomic_init_nob(&tb->is_stack_busy, 0);
tb->deletion = 0;
return DB_ERROR_NONE;
}
@@ -613,8 +613,8 @@ static int db_put_tree(DbTable *tbl, Eterm obj, int key_clash_fail)
for (;;)
if (!*this) { /* Found our place */
state = 1;
- if (erts_smp_atomic_inctest(&tb->common.nitems) >= TREE_MAX_ELEMENTS) {
- erts_smp_atomic_dec(&tb->common.nitems);
+ if (erts_smp_atomic_inc_read_nob(&tb->common.nitems) >= TREE_MAX_ELEMENTS) {
+ erts_smp_atomic_dec_nob(&tb->common.nitems);
return DB_ERROR_SYSRES;
}
*this = new_dbterm(tb, obj);
@@ -1583,7 +1583,7 @@ static int db_select_delete_continue_tree(Process *p,
sc.max = 1000;
sc.keypos = tb->common.keypos;
- ASSERT(!erts_smp_atomic_read(&tb->is_stack_busy));
+ ASSERT(!erts_smp_atomic_read_nob(&tb->is_stack_busy));
traverse_backwards(tb, &tb->static_stack, lastkey, NULL, &doit_select_delete, &sc);
BUMP_REDS(p, 1000 - sc.max);
@@ -1774,7 +1774,7 @@ static int db_free_table_continue_tree(DbTable *tbl)
(DbTable *) tb,
(void *) tb->static_stack.array,
sizeof(TreeDbTerm *) * STACK_NEED);
- ASSERT(erts_smp_atomic_read(&tb->common.memory_size)
+ ASSERT(erts_smp_atomic_read_nob(&tb->common.memory_size)
== sizeof(DbTable));
}
return result;
@@ -1784,7 +1784,7 @@ static int db_delete_all_objects_tree(Process* p, DbTable* tbl)
{
db_free_table_tree(tbl);
db_create_tree(p, tbl);
- erts_smp_atomic_set(&tbl->tree.common.nitems, 0);
+ erts_smp_atomic_set_nob(&tbl->tree.common.nitems, 0);
return 0;
}
@@ -1866,7 +1866,7 @@ static TreeDbTerm *linkout_tree(DbTableTree *tb,
tstack[tpos++] = this;
state = delsub(this);
}
- erts_smp_atomic_dec(&tb->common.nitems);
+ erts_smp_atomic_dec_nob(&tb->common.nitems);
break;
}
}
@@ -1933,7 +1933,7 @@ static TreeDbTerm *linkout_object_tree(DbTableTree *tb,
tstack[tpos++] = this;
state = delsub(this);
}
- erts_smp_atomic_dec(&tb->common.nitems);
+ erts_smp_atomic_dec_nob(&tb->common.nitems);
break;
}
}
diff --git a/erts/emulator/beam/erl_db_util.c b/erts/emulator/beam/erl_db_util.c
index c3b074f782..f9896a60e6 100644
--- a/erts/emulator/beam/erl_db_util.c
+++ b/erts/emulator/beam/erl_db_util.c
@@ -491,7 +491,7 @@ erts_match_set_release_result(Process* c_p)
/* The trace control word. */
-static erts_smp_atomic_t trace_control_word;
+static erts_smp_atomic32_t trace_control_word;
/* This needs to be here, before the bif table... */
@@ -911,7 +911,7 @@ static void db_free_tmp_uncompressed(DbTerm* obj);
BIF_RETTYPE db_get_trace_control_word_0(Process *p)
{
- Uint32 tcw = (Uint32) erts_smp_atomic_read(&trace_control_word);
+ Uint32 tcw = (Uint32) erts_smp_atomic32_read_acqb(&trace_control_word);
BIF_RET(erts_make_integer((Uint) tcw, p));
}
@@ -924,7 +924,8 @@ BIF_RETTYPE db_set_trace_control_word_1(Process *p, Eterm new)
if (val != ((Uint32)val))
BIF_ERROR(p, BADARG);
- old_tcw = (Uint32) erts_smp_atomic_xchg(&trace_control_word, (erts_aint_t) val);
+ old_tcw = (Uint32) erts_smp_atomic32_xchg_relb(&trace_control_word,
+ (erts_aint32_t) val);
BIF_RET(erts_make_integer((Uint) old_tcw, p));
}
@@ -1249,7 +1250,7 @@ void db_initialize_util(void){
sizeof(DMCGuardBif),
(int (*)(const void *, const void *)) &cmp_guard_bif);
match_pseudo_process_init();
- erts_smp_atomic_init(&trace_control_word, 0);
+ erts_smp_atomic32_init_nob(&trace_control_word, 0);
}
diff --git a/erts/emulator/beam/erl_init.c b/erts/emulator/beam/erl_init.c
index 0a57eb6d88..5f3f653e99 100644
--- a/erts/emulator/beam/erl_init.c
+++ b/erts/emulator/beam/erl_init.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 1997-2010. All Rights Reserved.
+ * Copyright Ericsson AB 1997-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -69,7 +69,8 @@ static void erl_init(int ncpu);
#define ERTS_MIN_COMPAT_REL 7
#ifdef ERTS_SMP
-erts_smp_atomic_t erts_writing_erl_crash_dump;
+erts_smp_atomic32_t erts_writing_erl_crash_dump;
+erts_tsd_key_t erts_is_crash_dumping_key;
#else
volatile int erts_writing_erl_crash_dump = 0;
#endif
@@ -323,7 +324,7 @@ init_shared_memory(int argc, char **argv)
#endif
global_gen_gcs = 0;
- global_max_gen_gcs = (Uint16) erts_smp_atomic32_read(&erts_max_gen_gcs);
+ global_max_gen_gcs = (Uint16) erts_smp_atomic32_read_nob(&erts_max_gen_gcs);
global_gc_flags = erts_default_process_flags;
erts_global_offheap.mso = NULL;
@@ -646,12 +647,14 @@ early_init(int *argc, char **argv) /*
erts_lc_init();
#endif
#ifdef ERTS_SMP
- erts_smp_atomic_init(&erts_writing_erl_crash_dump, 0L);
+ erts_smp_atomic32_init_nob(&erts_writing_erl_crash_dump, 0L);
+ erts_tsd_key_create(&erts_is_crash_dumping_key);
#else
erts_writing_erl_crash_dump = 0;
#endif
- erts_smp_atomic32_init(&erts_max_gen_gcs, (erts_aint32_t) ((Uint16) -1));
+ erts_smp_atomic32_init_nob(&erts_max_gen_gcs,
+ (erts_aint32_t) ((Uint16) -1));
erts_pre_init_process();
#if defined(USE_THREADS) && !defined(ERTS_SMP)
@@ -856,7 +859,8 @@ erl_start(int argc, char **argv)
envbufsz = sizeof(envbuf);
if (erts_sys_getenv("ERL_FULLSWEEP_AFTER", envbuf, &envbufsz) == 0) {
Uint16 max_gen_gcs = atoi(envbuf);
- erts_smp_atomic32_set(&erts_max_gen_gcs, (erts_aint32_t) max_gen_gcs);
+ erts_smp_atomic32_set_nob(&erts_max_gen_gcs,
+ (erts_aint32_t) max_gen_gcs);
}
envbufsz = sizeof(envbuf);
diff --git a/erts/emulator/beam/erl_monitors.c b/erts/emulator/beam/erl_monitors.c
index 9751b5d77c..47597f302b 100644
--- a/erts/emulator/beam/erl_monitors.c
+++ b/erts/emulator/beam/erl_monitors.c
@@ -125,7 +125,7 @@ static ErtsMonitor *create_monitor(Uint type, Eterm ref, Eterm pid, Eterm name)
} else {
n = (ErtsMonitor *) erts_alloc(ERTS_ALC_T_MONITOR_LH,
mon_size*sizeof(Uint));
- erts_smp_atomic_add(&tot_link_lh_size, mon_size*sizeof(Uint));
+ erts_smp_atomic_add_nob(&tot_link_lh_size, mon_size*sizeof(Uint));
}
hp = n->heap;
@@ -156,7 +156,7 @@ static ErtsLink *create_link(Uint type, Eterm pid)
} else {
n = (ErtsLink *) erts_alloc(ERTS_ALC_T_NLINK_LH,
lnk_size*sizeof(Uint));
- erts_smp_atomic_add(&tot_link_lh_size, lnk_size*sizeof(Uint));
+ erts_smp_atomic_add_nob(&tot_link_lh_size, lnk_size*sizeof(Uint));
}
hp = n->heap;
@@ -191,13 +191,13 @@ static ErtsSuspendMonitor *create_suspend_monitor(Eterm pid)
void
erts_init_monitors(void)
{
- erts_smp_atomic_init(&tot_link_lh_size, 0);
+ erts_smp_atomic_init_nob(&tot_link_lh_size, 0);
}
Uint
erts_tot_link_lh_size(void)
{
- return (Uint) erts_smp_atomic_read(&tot_link_lh_size);
+ return (Uint) erts_smp_atomic_read_nob(&tot_link_lh_size);
}
void erts_destroy_monitor(ErtsMonitor *mon)
@@ -222,7 +222,7 @@ void erts_destroy_monitor(ErtsMonitor *mon)
erts_free(ERTS_ALC_T_MONITOR_SH, (void *) mon);
} else {
erts_free(ERTS_ALC_T_MONITOR_LH, (void *) mon);
- erts_smp_atomic_add(&tot_link_lh_size, -1*mon_size*sizeof(Uint));
+ erts_smp_atomic_add_nob(&tot_link_lh_size, -1*mon_size*sizeof(Uint));
}
}
@@ -244,7 +244,7 @@ void erts_destroy_link(ErtsLink *lnk)
erts_free(ERTS_ALC_T_NLINK_SH, (void *) lnk);
} else {
erts_free(ERTS_ALC_T_NLINK_LH, (void *) lnk);
- erts_smp_atomic_add(&tot_link_lh_size, -1*lnk_size*sizeof(Uint));
+ erts_smp_atomic_add_nob(&tot_link_lh_size, -1*lnk_size*sizeof(Uint));
}
}
diff --git a/erts/emulator/beam/erl_node_tables.c b/erts/emulator/beam/erl_node_tables.c
index 6daa127d23..af3873995e 100644
--- a/erts/emulator/beam/erl_node_tables.c
+++ b/erts/emulator/beam/erl_node_tables.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2001-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2001-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -118,7 +118,7 @@ dist_table_alloc(void *dep_tmpl)
dep->finalized_out_queue.first = NULL;
dep->finalized_out_queue.last = NULL;
- erts_smp_atomic_init(&dep->dist_cmd_scheduled, 0);
+ erts_smp_atomic_init_nob(&dep->dist_cmd_scheduled, 0);
erts_port_task_handle_init(&dep->dist_cmd);
dep->send = NULL;
dep->cache = NULL;
@@ -767,7 +767,7 @@ void erts_init_node_tables(void)
erts_this_dist_entry->finalized_out_queue.first = NULL;
erts_this_dist_entry->finalized_out_queue.last = NULL;
- erts_smp_atomic_init(&erts_this_dist_entry->dist_cmd_scheduled, 0);
+ erts_smp_atomic_init_nob(&erts_this_dist_entry->dist_cmd_scheduled, 0);
erts_port_task_handle_init(&erts_this_dist_entry->dist_cmd);
erts_this_dist_entry->send = NULL;
erts_this_dist_entry->cache = NULL;
diff --git a/erts/emulator/beam/erl_port_task.c b/erts/emulator/beam/erl_port_task.c
index e6b55c45e4..6aa5161b08 100644
--- a/erts/emulator/beam/erl_port_task.c
+++ b/erts/emulator/beam/erl_port_task.c
@@ -121,7 +121,7 @@ ERTS_SCHED_PREF_QUICK_ALLOC_IMPL(port_taskq,
static ERTS_INLINE ErtsPortTask *
handle2task(ErtsPortTaskHandle *pthp)
{
- return (ErtsPortTask *) erts_smp_atomic_read(pthp);
+ return (ErtsPortTask *) erts_smp_atomic_read_nob(pthp);
}
static ERTS_INLINE void
@@ -129,7 +129,7 @@ reset_handle(ErtsPortTask *ptp)
{
if (ptp->handle) {
ASSERT(ptp == handle2task(ptp->handle));
- erts_smp_atomic_set(ptp->handle, (erts_aint_t) NULL);
+ erts_smp_atomic_set_nob(ptp->handle, (erts_aint_t) NULL);
}
}
@@ -138,7 +138,7 @@ set_handle(ErtsPortTask *ptp, ErtsPortTaskHandle *pthp)
{
ptp->handle = pthp;
if (pthp) {
- erts_smp_atomic_set(pthp, (erts_aint_t) ptp);
+ erts_smp_atomic_set_nob(pthp, (erts_aint_t) ptp);
ASSERT(ptp == handle2task(ptp->handle));
}
}
@@ -479,8 +479,8 @@ erts_port_task_abort(Eterm id, ErtsPortTaskHandle *pthp)
case ERTS_PORT_TASK_INPUT:
case ERTS_PORT_TASK_OUTPUT:
case ERTS_PORT_TASK_EVENT:
- ASSERT(erts_smp_atomic_read(&erts_port_task_outstanding_io_tasks) > 0);
- erts_smp_atomic_dec(&erts_port_task_outstanding_io_tasks);
+ ASSERT(erts_smp_atomic_read_nob(&erts_port_task_outstanding_io_tasks) > 0);
+ erts_smp_atomic_dec_relb(&erts_port_task_outstanding_io_tasks);
break;
default:
break;
@@ -568,7 +568,7 @@ erts_port_task_schedule(Eterm id,
ErtsRunQueue *xrunq = erts_check_emigration_need(runq, ERTS_PORT_PRIO_LEVEL);
if (xrunq) {
/* Port emigrated ... */
- erts_smp_atomic_set(&pp->run_queue, (erts_aint_t) xrunq);
+ erts_smp_atomic_set_nob(&pp->run_queue, (erts_aint_t) xrunq);
erts_smp_runq_unlock(runq);
runq = xrunq;
}
@@ -594,7 +594,7 @@ erts_port_task_schedule(Eterm id,
case ERTS_PORT_TASK_INPUT:
case ERTS_PORT_TASK_OUTPUT:
case ERTS_PORT_TASK_EVENT:
- erts_smp_atomic_inc(&erts_port_task_outstanding_io_tasks);
+ erts_smp_atomic_inc_relb(&erts_port_task_outstanding_io_tasks);
/* Fall through... */
default:
enqueue_task(pp->sched.taskq, ptp);
@@ -662,7 +662,7 @@ erts_port_task_free_port(Port *pp)
pp->status |= ERTS_PORT_SFLG_FREE_SCHEDULED;
erts_may_save_closed_port(pp);
erts_smp_port_state_unlock(pp);
- ERTS_SMP_LC_ASSERT(erts_smp_atomic_read(&pp->refc) > 1);
+ ERTS_SMP_LC_ASSERT(erts_smp_atomic_read_nob(&pp->refc) > 1);
ptp->type = ERTS_PORT_TASK_FREE;
ptp->event = (ErlDrvEvent) -1;
ptp->event_data = NULL;
@@ -684,9 +684,9 @@ erts_port_task_free_port(Port *pp)
erts_may_save_closed_port(pp);
erts_smp_port_state_unlock(pp);
#ifdef ERTS_SMP
- erts_smp_atomic_dec(&pp->refc); /* Not alive */
+ erts_smp_atomic_dec_nob(&pp->refc); /* Not alive */
#endif
- ERTS_SMP_LC_ASSERT(erts_smp_atomic_read(&pp->refc) > 0); /* Lock */
+ ERTS_SMP_LC_ASSERT(erts_smp_atomic_read_nob(&pp->refc) > 0); /* Lock */
handle_remaining_tasks(runq, pp); /* May release runq lock */
ASSERT(!pp->sched.exe_taskq && (!ptqp || !ptqp->first));
pp->sched.taskq = NULL;
@@ -724,7 +724,7 @@ resume_after_block(void *vd)
ErtsPortTaskExeBlockData *d = (ErtsPortTaskExeBlockData *) vd;
erts_smp_runq_lock(d->runq);
if (d->resp)
- *d->resp = (erts_smp_atomic_read(&erts_port_task_outstanding_io_tasks)
+ *d->resp = (erts_smp_atomic_read_nob(&erts_port_task_outstanding_io_tasks)
!= (erts_aint_t) 0);
}
@@ -832,8 +832,8 @@ erts_port_task_execute(ErtsRunQueue *runq, Port **curr_port_pp)
ASSERT(!ptqp->first
&& (!pp->sched.taskq || !pp->sched.taskq->first));
#ifdef ERTS_SMP
- erts_smp_atomic_dec(&pp->refc); /* Not alive */
- ERTS_SMP_LC_ASSERT(erts_smp_atomic_read(&pp->refc) > 0); /* Lock */
+ erts_smp_atomic_dec_nob(&pp->refc); /* Not alive */
+ ERTS_SMP_LC_ASSERT(erts_smp_atomic_read_nob(&pp->refc) > 0); /* Lock */
#else
erts_port_status_bor_set(pp, ERTS_PORT_SFLG_FREE);
#endif
@@ -906,14 +906,16 @@ erts_port_task_execute(ErtsRunQueue *runq, Port **curr_port_pp)
erts_unblock_fpe(fpe_was_unmasked);
if (io_tasks_executed) {
- ASSERT(erts_smp_atomic_read(&erts_port_task_outstanding_io_tasks) >= io_tasks_executed);
- erts_smp_atomic_add(&erts_port_task_outstanding_io_tasks, -1*io_tasks_executed);
+ ASSERT(erts_smp_atomic_read_nob(&erts_port_task_outstanding_io_tasks)
+ >= io_tasks_executed);
+ erts_smp_atomic_add_relb(&erts_port_task_outstanding_io_tasks,
+ -1*io_tasks_executed);
}
*curr_port_pp = NULL;
#ifdef ERTS_SMP
- ASSERT(runq == (ErtsRunQueue *) erts_smp_atomic_read(&pp->run_queue));
+ ASSERT(runq == (ErtsRunQueue *) erts_smp_atomic_read_nob(&pp->run_queue));
#endif
if (!pp->sched.taskq) {
@@ -940,7 +942,7 @@ erts_port_task_execute(ErtsRunQueue *runq, Port **curr_port_pp)
}
else {
/* Port emigrated ... */
- erts_smp_atomic_set(&pp->run_queue, (erts_aint_t) xrunq);
+ erts_smp_atomic_set_nob(&pp->run_queue, (erts_aint_t) xrunq);
enqueue_port(xrunq, pp);
ASSERT(pp->sched.exe_taskq);
pp->sched.exe_taskq = NULL;
@@ -951,7 +953,7 @@ erts_port_task_execute(ErtsRunQueue *runq, Port **curr_port_pp)
port_was_enqueued = 1;
}
- res = (erts_smp_atomic_read(&erts_port_task_outstanding_io_tasks)
+ res = (erts_smp_atomic_read_nob(&erts_port_task_outstanding_io_tasks)
!= (erts_aint_t) 0);
ERTS_PT_CHK_PRES_PORTQ(runq, pp);
@@ -972,13 +974,13 @@ erts_port_task_execute(ErtsRunQueue *runq, Port **curr_port_pp)
{
erts_aint_t refc;
erts_smp_mtx_unlock(pp->lock);
- refc = erts_smp_atomic_dectest(&pp->refc);
+ refc = erts_smp_atomic_dec_read_nob(&pp->refc);
ASSERT(refc >= 0);
if (refc == 0) {
erts_smp_runq_unlock(runq);
erts_port_cleanup(pp); /* Might aquire runq lock */
erts_smp_runq_lock(runq);
- res = (erts_smp_atomic_read(&erts_port_task_outstanding_io_tasks)
+ res = (erts_smp_atomic_read_nob(&erts_port_task_outstanding_io_tasks)
!= (erts_aint_t) 0);
}
}
@@ -1107,12 +1109,12 @@ erts_port_migrate(Port *prt, int *prt_locked,
/* Refuse to migrate to a suspended run queue */
if (to_rq->flags & ERTS_RUNQ_FLG_SUSPENDED)
return ERTS_MIGRATE_FAILED_RUNQ_SUSPENDED;
- if (from_rq != (ErtsRunQueue *) erts_smp_atomic_read(&prt->run_queue))
+ if (from_rq != (ErtsRunQueue *) erts_smp_atomic_read_nob(&prt->run_queue))
return ERTS_MIGRATE_FAILED_RUNQ_CHANGED;
if (!ERTS_PORT_IS_IN_RUNQ(from_rq, prt))
return ERTS_MIGRATE_FAILED_NOT_IN_RUNQ;
dequeue_port(from_rq, prt);
- erts_smp_atomic_set(&prt->run_queue, (erts_aint_t) to_rq);
+ erts_smp_atomic_set_nob(&prt->run_queue, (erts_aint_t) to_rq);
enqueue_port(to_rq, prt);
return ERTS_MIGRATE_SUCCESS;
}
@@ -1125,7 +1127,8 @@ erts_port_migrate(Port *prt, int *prt_locked,
void
erts_port_task_init(void)
{
- erts_smp_atomic_init(&erts_port_task_outstanding_io_tasks, (erts_aint_t) 0);
+ erts_smp_atomic_init_nob(&erts_port_task_outstanding_io_tasks,
+ (erts_aint_t) 0);
init_port_task_alloc();
init_port_taskq_alloc();
}
diff --git a/erts/emulator/beam/erl_port_task.h b/erts/emulator/beam/erl_port_task.h
index 3e2c5f07ab..d7104e1143 100644
--- a/erts/emulator/beam/erl_port_task.h
+++ b/erts/emulator/beam/erl_port_task.h
@@ -79,13 +79,13 @@ ERTS_GLB_INLINE int erts_port_task_have_outstanding_io_tasks(void);
ERTS_GLB_INLINE void
erts_port_task_handle_init(ErtsPortTaskHandle *pthp)
{
- erts_smp_atomic_init(pthp, (erts_aint_t) NULL);
+ erts_smp_atomic_init_nob(pthp, (erts_aint_t) NULL);
}
ERTS_GLB_INLINE int
erts_port_task_is_scheduled(ErtsPortTaskHandle *pthp)
{
- return ((void *) erts_smp_atomic_read(pthp)) != NULL;
+ return ((void *) erts_smp_atomic_read_nob(pthp)) != NULL;
}
ERTS_GLB_INLINE void
@@ -102,8 +102,8 @@ erts_port_task_init_sched(ErtsPortTaskSched *ptsp)
ERTS_GLB_INLINE int
erts_port_task_have_outstanding_io_tasks(void)
{
- ERTS_THR_MEMORY_BARRIER;
- return erts_smp_atomic_read(&erts_port_task_outstanding_io_tasks) != 0;
+ return (erts_smp_atomic_read_acqb(&erts_port_task_outstanding_io_tasks)
+ != 0);
}
#endif /* ERTS_INCLUDE_SCHEDULER_INTERNALS */
diff --git a/erts/emulator/beam/erl_process.c b/erts/emulator/beam/erl_process.c
index 2704359a8f..bbdcf79d00 100644
--- a/erts/emulator/beam/erl_process.c
+++ b/erts/emulator/beam/erl_process.c
@@ -134,15 +134,15 @@ int erts_disable_proc_not_running_opt;
#ifndef DEBUG
#define ERTS_SCHDLR_SSPND_CHNG_SET(VAL, OLD_VAL) \
- erts_smp_atomic32_set(&schdlr_sspnd.changing, (VAL))
+ erts_smp_atomic32_set_nob(&schdlr_sspnd.changing, (VAL))
#else
#define ERTS_SCHDLR_SSPND_CHNG_SET(VAL, OLD_VAL) \
do { \
erts_aint32_t old_val__; \
- old_val__ = erts_smp_atomic32_xchg(&schdlr_sspnd.changing, \
- (VAL)); \
+ old_val__ = erts_smp_atomic32_xchg_nob(&schdlr_sspnd.changing, \
+ (VAL)); \
ASSERT(old_val__ == (OLD_VAL)); \
} while (0)
@@ -158,7 +158,7 @@ static struct {
erts_smp_atomic32_t changing;
erts_smp_atomic32_t active;
struct {
- erts_smp_atomic32_t ongoing;
+ int ongoing;
long wait_active;
ErtsProcList *procs;
} msb; /* Multi Scheduling Block */
@@ -410,7 +410,7 @@ erts_init_process(int ncpu)
init_proclist_alloc();
- erts_smp_atomic32_init(&process_count, 0);
+ erts_smp_atomic32_init_nob(&process_count, 0);
if (erts_use_r9_pids_ports) {
proc_bits = ERTS_R9_PROC_BITS;
@@ -694,8 +694,8 @@ erts_smp_schedule_misc_aux_work(int ignore_self,
erts_smp_mtx_unlock(&misc_aux_work_queues[ix].data.mtx);
ssi = ERTS_SCHED_SLEEP_INFO_IX(ix);
- aux_work = erts_smp_atomic32_bor(&ssi->aux_work,
- ERTS_SSI_AUX_WORK_MISC);
+ aux_work = erts_smp_atomic32_read_bor_nob(&ssi->aux_work,
+ ERTS_SSI_AUX_WORK_MISC);
if ((aux_work & ERTS_SSI_AUX_WORK_MISC) == 0)
erts_sched_poke(ssi);
}
@@ -711,8 +711,8 @@ erts_smp_notify_check_children_needed(void)
erts_aint32_t aux_work;
ErtsSchedulerSleepInfo *ssi;
ssi = ERTS_SCHED_SLEEP_INFO_IX(i);
- aux_work = erts_smp_atomic32_bor(&ssi->aux_work,
- ERTS_SSI_AUX_WORK_CHECK_CHILDREN);
+ aux_work = erts_smp_atomic32_read_bor_nob(&ssi->aux_work,
+ ERTS_SSI_AUX_WORK_CHECK_CHILDREN);
if (!(aux_work & ERTS_SSI_AUX_WORK_CHECK_CHILDREN))
erts_sched_poke(ssi);
}
@@ -727,15 +727,15 @@ blockable_aux_work(ErtsSchedulerData *esdp,
{
if (aux_work & ERTS_SSI_BLOCKABLE_AUX_WORK_MASK) {
if (aux_work & ERTS_SSI_AUX_WORK_MISC) {
- aux_work = erts_smp_atomic32_band(&ssi->aux_work,
- ~ERTS_SSI_AUX_WORK_MISC);
+ aux_work = erts_smp_atomic32_read_band_nob(&ssi->aux_work,
+ ~ERTS_SSI_AUX_WORK_MISC);
aux_work &= ~ERTS_SSI_AUX_WORK_MISC;
handle_misc_aux_work(esdp);
}
#ifdef ERTS_SMP_SCHEDULERS_NEED_TO_CHECK_CHILDREN
if (aux_work & ERTS_SSI_AUX_WORK_CHECK_CHILDREN) {
- aux_work = erts_smp_atomic32_band(&ssi->aux_work,
- ~ERTS_SSI_AUX_WORK_CHECK_CHILDREN);
+ aux_work = erts_smp_atomic32_band_nob(&ssi->aux_work,
+ ~ERTS_SSI_AUX_WORK_CHECK_CHILDREN);
aux_work &= ~ERTS_SSI_AUX_WORK_CHECK_CHILDREN;
erts_check_children();
}
@@ -815,7 +815,7 @@ erts_active_schedulers(void)
static ERTS_INLINE void
clear_sys_scheduling(void)
{
- erts_smp_atomic32_set_relb(&doing_sys_schedule, 0);
+ erts_smp_atomic32_set_mb(&doing_sys_schedule, 0);
}
static ERTS_INLINE int
@@ -882,42 +882,43 @@ sched_active(Uint no, ErtsRunQueue *rq)
static int ERTS_INLINE
ongoing_multi_scheduling_block(void)
{
- return erts_smp_atomic32_read(&schdlr_sspnd.msb.ongoing) != 0;
+ ERTS_SMP_LC_ASSERT(erts_lc_mtx_is_locked(&schdlr_sspnd.mtx));
+ return schdlr_sspnd.msb.ongoing;
}
static ERTS_INLINE void
empty_runq(ErtsRunQueue *rq)
{
- erts_aint32_t oifls = erts_smp_atomic32_band(&rq->info_flags,
- ~ERTS_RUNQ_IFLG_NONEMPTY);
+ erts_aint32_t oifls = erts_smp_atomic32_read_band_nob(&rq->info_flags,
+ ~ERTS_RUNQ_IFLG_NONEMPTY);
if (oifls & ERTS_RUNQ_IFLG_NONEMPTY) {
#ifdef DEBUG
- erts_aint32_t empty = erts_smp_atomic32_read(&no_empty_run_queues);
+ erts_aint32_t empty = erts_smp_atomic32_read_nob(&no_empty_run_queues);
/*
* For a short period of time no_empty_run_queues may have
* been increased twice for a specific run queue.
*/
ASSERT(0 <= empty && empty < 2*erts_no_run_queues);
#endif
- erts_smp_atomic32_inc(&no_empty_run_queues);
+ erts_smp_atomic32_inc_relb(&no_empty_run_queues);
}
}
static ERTS_INLINE void
non_empty_runq(ErtsRunQueue *rq)
{
- erts_aint32_t oifls = erts_smp_atomic32_bor(&rq->info_flags,
- ERTS_RUNQ_IFLG_NONEMPTY);
+ erts_aint32_t oifls = erts_smp_atomic32_read_bor_nob(&rq->info_flags,
+ ERTS_RUNQ_IFLG_NONEMPTY);
if (!(oifls & ERTS_RUNQ_IFLG_NONEMPTY)) {
#ifdef DEBUG
- erts_aint32_t empty = erts_smp_atomic32_read(&no_empty_run_queues);
+ erts_aint32_t empty = erts_smp_atomic32_read_nob(&no_empty_run_queues);
/*
* For a short period of time no_empty_run_queues may have
* been increased twice for a specific run queue.
*/
ASSERT(0 < empty && empty <= 2*erts_no_run_queues);
#endif
- erts_smp_atomic32_dec(&no_empty_run_queues);
+ erts_smp_atomic32_dec_relb(&no_empty_run_queues);
}
}
@@ -930,7 +931,7 @@ sched_prep_spin_wait(ErtsSchedulerSleepInfo *ssi)
erts_aint32_t xflgs = 0;
do {
- oflgs = erts_smp_atomic32_cmpxchg(&ssi->flags, nflgs, xflgs);
+ oflgs = erts_smp_atomic32_cmpxchg_acqb(&ssi->flags, nflgs, xflgs);
if (oflgs == xflgs)
return nflgs;
xflgs = oflgs;
@@ -947,7 +948,7 @@ sched_prep_cont_spin_wait(ErtsSchedulerSleepInfo *ssi)
erts_aint32_t xflgs = ERTS_SSI_FLG_WAITING;
do {
- oflgs = erts_smp_atomic32_cmpxchg(&ssi->flags, nflgs, xflgs);
+ oflgs = erts_smp_atomic32_cmpxchg_acqb(&ssi->flags, nflgs, xflgs);
if (oflgs == xflgs)
return nflgs;
xflgs = oflgs;
@@ -989,7 +990,7 @@ sched_set_sleeptype(ErtsSchedulerSleepInfo *ssi, erts_aint32_t sleep_type)
erts_tse_reset(ssi->event);
while (1) {
- oflgs = erts_smp_atomic32_cmpxchg(&ssi->flags, nflgs, xflgs);
+ oflgs = erts_smp_atomic32_cmpxchg_acqb(&ssi->flags, nflgs, xflgs);
if (oflgs == xflgs)
return nflgs;
if ((oflgs & (ERTS_SSI_FLG_SLEEPING|ERTS_SSI_FLG_WAITING))
@@ -1049,7 +1050,7 @@ scheduler_wait(int *fcalls, ErtsSchedulerData *esdp, ErtsRunQueue *rq)
tse_wait:
#ifdef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
- aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
tse_blockable_aux_work:
aux_work = blockable_aux_work(esdp, ssi, aux_work);
#endif
@@ -1059,7 +1060,7 @@ scheduler_wait(int *fcalls, ErtsSchedulerData *esdp, ErtsRunQueue *rq)
#ifdef ERTS_SCHED_NEED_NONBLOCKABLE_AUX_WORK
#ifndef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
- aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
#endif
nonblockable_aux_work(esdp, ssi, aux_work);
#endif
@@ -1092,7 +1093,7 @@ scheduler_wait(int *fcalls, ErtsSchedulerData *esdp, ErtsRunQueue *rq)
}
#ifdef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
- aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
if (aux_work & ERTS_SSI_BLOCKABLE_AUX_WORK_MASK) {
erts_smp_activity_end(ERTS_ACTIVITY_WAIT, NULL, NULL, NULL);
goto tse_blockable_aux_work;
@@ -1104,7 +1105,7 @@ scheduler_wait(int *fcalls, ErtsSchedulerData *esdp, ErtsRunQueue *rq)
erts_smp_activity_end(ERTS_ACTIVITY_WAIT, NULL, NULL, NULL);
if (flgs & ~ERTS_SSI_FLG_SUSPENDED)
- erts_smp_atomic32_band(&ssi->flags, ERTS_SSI_FLG_SUSPENDED);
+ erts_smp_atomic32_read_band_nob(&ssi->flags, ERTS_SSI_FLG_SUSPENDED);
erts_smp_runq_lock(rq);
sched_active(esdp->no, rq);
@@ -1136,12 +1137,12 @@ scheduler_wait(int *fcalls, ErtsSchedulerData *esdp, ErtsRunQueue *rq)
sys_aux_work:
#ifdef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
- aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
aux_work = blockable_aux_work(esdp, ssi, aux_work);
#endif
#ifdef ERTS_SCHED_NEED_NONBLOCKABLE_AUX_WORK
#ifndef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
- aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
#endif
nonblockable_aux_work(esdp, ssi, aux_work);
#endif
@@ -1239,7 +1240,7 @@ scheduler_wait(int *fcalls, ErtsSchedulerData *esdp, ErtsRunQueue *rq)
sys_locked_woken:
clear_sys_scheduling();
if (flgs & ~ERTS_SSI_FLG_SUSPENDED)
- erts_smp_atomic32_band(&ssi->flags, ERTS_SSI_FLG_SUSPENDED);
+ erts_smp_atomic32_read_band_nob(&ssi->flags, ERTS_SSI_FLG_SUSPENDED);
sched_active_sys(esdp->no, rq);
}
}
@@ -1255,7 +1256,7 @@ ssi_flags_set_wake(ErtsSchedulerSleepInfo *ssi)
erts_aint32_t nflgs = 0;
erts_aint32_t xflgs = ERTS_SSI_FLG_SLEEPING|ERTS_SSI_FLG_WAITING;
while (1) {
- oflgs = erts_smp_atomic32_cmpxchg(&ssi->flags, nflgs, xflgs);
+ oflgs = erts_smp_atomic32_cmpxchg_relb(&ssi->flags, nflgs, xflgs);
if (oflgs == xflgs)
return oflgs;
nflgs = oflgs & ERTS_SSI_FLG_SUSPENDED;
@@ -1298,7 +1299,6 @@ wake_scheduler(ErtsRunQueue *rq, int incq, int one)
erts_smp_spin_unlock(&sl->lock);
- ERTS_THR_MEMORY_BARRIER;
flgs = ssi_flags_set_wake(ssi);
erts_sched_finish_poke(ssi, flgs);
@@ -1344,13 +1344,13 @@ init_no_runqs(int active, int used)
{
erts_aint32_t no_runqs = (erts_aint32_t) (active & ERTS_NO_RUNQS_MASK);
no_runqs |= (erts_aint32_t) ((used & ERTS_NO_RUNQS_MASK) << ERTS_NO_USED_RUNQS_SHIFT);
- erts_smp_atomic32_init(&balance_info.no_runqs, no_runqs);
+ erts_smp_atomic32_init_nob(&balance_info.no_runqs, no_runqs);
}
static ERTS_INLINE void
get_no_runqs(int *active, int *used)
{
- erts_aint32_t no_runqs = erts_smp_atomic32_read(&balance_info.no_runqs);
+ erts_aint32_t no_runqs = erts_smp_atomic32_read_nob(&balance_info.no_runqs);
if (active)
*active = (int) (no_runqs & ERTS_NO_RUNQS_MASK);
if (used)
@@ -1360,11 +1360,12 @@ get_no_runqs(int *active, int *used)
static ERTS_INLINE void
set_no_used_runqs(int used)
{
- erts_aint32_t exp = erts_smp_atomic32_read(&balance_info.no_runqs);
+ erts_aint32_t exp = erts_smp_atomic32_read_nob(&balance_info.no_runqs);
while (1) {
erts_aint32_t act, new;
- new = (used << ERTS_NO_USED_RUNQS_SHIFT) | (exp & ERTS_NO_RUNQS_MASK);
- act = erts_smp_atomic32_cmpxchg(&balance_info.no_runqs, new, exp);
+ new = (used & ERTS_NO_RUNQS_MASK) << ERTS_NO_USED_RUNQS_SHIFT;
+ new |= exp & ERTS_NO_RUNQS_MASK;
+ act = erts_smp_atomic32_cmpxchg_nob(&balance_info.no_runqs, new, exp);
if (act == exp)
break;
exp = act;
@@ -1374,11 +1375,12 @@ set_no_used_runqs(int used)
static ERTS_INLINE void
set_no_active_runqs(int active)
{
- erts_aint32_t exp = erts_smp_atomic32_read(&balance_info.no_runqs);
+ erts_aint32_t exp = erts_smp_atomic32_read_nob(&balance_info.no_runqs);
while (1) {
erts_aint32_t act, new;
- new = (exp & (ERTS_NO_RUNQS_MASK << ERTS_NO_USED_RUNQS_SHIFT)) | active;
- act = erts_smp_atomic32_cmpxchg(&balance_info.no_runqs, new, exp);
+ new = exp & (ERTS_NO_RUNQS_MASK << ERTS_NO_USED_RUNQS_SHIFT);
+ new |= active & ERTS_NO_RUNQS_MASK;
+ act = erts_smp_atomic32_cmpxchg_nob(&balance_info.no_runqs, new, exp);
if (act == exp)
break;
exp = act;
@@ -1388,13 +1390,14 @@ set_no_active_runqs(int active)
static ERTS_INLINE int
try_inc_no_active_runqs(int active)
{
- erts_aint32_t exp = erts_smp_atomic32_read(&balance_info.no_runqs);
+ erts_aint32_t exp = erts_smp_atomic32_read_nob(&balance_info.no_runqs);
if (((exp >> ERTS_NO_USED_RUNQS_SHIFT) & ERTS_NO_RUNQS_MASK) < active)
return 0;
if ((exp & ERTS_NO_RUNQS_MASK) + 1 == active) {
erts_aint32_t new, act;
- new = (exp & ~ERTS_NO_RUNQS_MASK) | active;
- act = erts_smp_atomic32_cmpxchg(&balance_info.no_runqs, new, exp);
+ new = exp & (ERTS_NO_RUNQS_MASK << ERTS_NO_USED_RUNQS_SHIFT);
+ new |= active & ERTS_NO_RUNQS_MASK;
+ act = erts_smp_atomic32_cmpxchg_nob(&balance_info.no_runqs, new, exp);
if (act == exp)
return 1;
}
@@ -1410,7 +1413,7 @@ chk_wake_sched(ErtsRunQueue *crq, int ix, int activate)
if (crq->ix == ix)
return 0;
wrq = ERTS_RUNQ_IX(ix);
- iflgs = erts_smp_atomic32_read(&wrq->info_flags);
+ iflgs = erts_smp_atomic32_read_nob(&wrq->info_flags);
if (!(iflgs & (ERTS_RUNQ_IFLG_SUSPENDED|ERTS_RUNQ_IFLG_NONEMPTY))) {
if (activate) {
if (try_inc_no_active_runqs(ix+1)) {
@@ -1652,15 +1655,15 @@ evacuate_run_queue(ErtsRunQueue *evac_rq, ErtsRunQueue *rq)
erts_smp_runq_lock(evac_rq);
- erts_smp_atomic32_bor(&evac_rq->scheduler->ssi->flags,
- ERTS_SSI_FLG_SUSPENDED);
+ erts_smp_atomic32_read_bor_nob(&evac_rq->scheduler->ssi->flags,
+ ERTS_SSI_FLG_SUSPENDED);
evac_rq->flags &= ~ERTS_RUNQ_FLGS_IMMIGRATE_QMASK;
evac_rq->flags |= (ERTS_RUNQ_FLGS_EMIGRATE_QMASK
| ERTS_RUNQ_FLGS_EVACUATE_QMASK
| ERTS_RUNQ_FLG_SUSPENDED);
- erts_smp_atomic32_bor(&evac_rq->info_flags, ERTS_RUNQ_IFLG_SUSPENDED);
+ erts_smp_atomic32_read_bor_nob(&evac_rq->info_flags, ERTS_RUNQ_IFLG_SUSPENDED);
/*
* Need to set up evacuation paths first since we
* may release the run queue lock on evac_rq
@@ -1909,7 +1912,7 @@ static ERTS_INLINE int
check_possible_steal_victim(ErtsRunQueue *rq, int *rq_lockedp, int vix)
{
ErtsRunQueue *vrq = ERTS_RUNQ_IX(vix);
- erts_aint32_t iflgs = erts_smp_atomic32_read(&vrq->info_flags);
+ erts_aint32_t iflgs = erts_smp_atomic32_read_nob(&vrq->info_flags);
if (iflgs & ERTS_RUNQ_IFLG_NONEMPTY)
return try_steal_task_from_victim(rq, rq_lockedp, vrq);
else
@@ -2061,7 +2064,7 @@ check_balance(ErtsRunQueue *c_rq)
int forced, active, current_active, oowc, half_full_scheds, full_scheds,
mmax_len, blnc_no_rqs, qix, pix, freds_hist_ix;
- if (erts_smp_atomic32_xchg(&balance_info.checking_balance, 1)) {
+ if (erts_smp_atomic32_xchg_nob(&balance_info.checking_balance, 1)) {
c_rq->check_balance_reds = INT_MAX;
return;
}
@@ -2069,7 +2072,7 @@ check_balance(ErtsRunQueue *c_rq)
get_no_runqs(NULL, &blnc_no_rqs);
if (blnc_no_rqs == 1) {
c_rq->check_balance_reds = INT_MAX;
- erts_smp_atomic32_set(&balance_info.checking_balance, 0);
+ erts_smp_atomic32_set_nob(&balance_info.checking_balance, 0);
return;
}
@@ -2077,7 +2080,7 @@ check_balance(ErtsRunQueue *c_rq)
if (balance_info.halftime) {
balance_info.halftime = 0;
- erts_smp_atomic32_set(&balance_info.checking_balance, 0);
+ erts_smp_atomic32_set_nob(&balance_info.checking_balance, 0);
ERTS_FOREACH_RUNQ(rq,
{
if (rq->waiting)
@@ -2111,7 +2114,7 @@ check_balance(ErtsRunQueue *c_rq)
erts_smp_mtx_unlock(&balance_info.update_mtx);
erts_smp_runq_lock(c_rq);
c_rq->check_balance_reds = INT_MAX;
- erts_smp_atomic32_set(&balance_info.checking_balance, 0);
+ erts_smp_atomic32_set_nob(&balance_info.checking_balance, 0);
return;
}
@@ -2456,7 +2459,7 @@ erts_fprintf(stderr, "--------------------------------\n");
set_no_active_runqs(active);
balance_info.halftime = 1;
- erts_smp_atomic32_set(&balance_info.checking_balance, 0);
+ erts_smp_atomic32_set_nob(&balance_info.checking_balance, 0);
/* Write migration paths and reset balance statistics in all queues */
for (qix = 0; qix < blnc_no_rqs; qix++) {
@@ -2598,7 +2601,7 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
erts_alloc_permanent_cache_aligned(ERTS_ALC_T_RUNQS,
sizeof(ErtsAlignedRunQueue) * n);
#ifdef ERTS_SMP
- erts_smp_atomic32_init(&no_empty_run_queues, 0);
+ erts_smp_atomic32_init_nob(&no_empty_run_queues, 0);
#endif
erts_no_run_queues = n;
@@ -2608,7 +2611,7 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
ErtsRunQueue *rq = ERTS_RUNQ_IX(ix);
rq->ix = ix;
- erts_smp_atomic32_init(&rq->info_flags, ERTS_RUNQ_IFLG_NONEMPTY);
+ erts_smp_atomic32_init_nob(&rq->info_flags, ERTS_RUNQ_IFLG_NONEMPTY);
/* make sure that the "extra" id correponds to the schedulers
* id if the esdp->no <-> ix+1 mapping change.
@@ -2701,9 +2704,9 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
ssi->next = NULL;
ssi->prev = NULL;
#endif
- erts_smp_atomic32_init(&ssi->flags, 0);
+ erts_smp_atomic32_init_nob(&ssi->flags, 0);
ssi->event = NULL; /* initialized in sched_thread_func */
- erts_smp_atomic32_init(&ssi->aux_work, 0);
+ erts_smp_atomic32_init_nob(&ssi->aux_work, 0);
}
#endif
@@ -2747,7 +2750,7 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
}
#ifdef ERTS_SMP
- erts_smp_atomic32_init(&esdp->chk_cpu_bind, 0);
+ erts_smp_atomic32_init_nob(&esdp->chk_cpu_bind, 0);
#endif
}
@@ -2755,11 +2758,11 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
erts_smp_mtx_init(&schdlr_sspnd.mtx, "schdlr_sspnd");
erts_smp_cnd_init(&schdlr_sspnd.cnd);
- erts_smp_atomic32_init(&schdlr_sspnd.changing, 0);
+ erts_smp_atomic32_init_nob(&schdlr_sspnd.changing, 0);
schdlr_sspnd.online = no_schedulers_online;
schdlr_sspnd.curr_online = no_schedulers;
- erts_smp_atomic32_init(&schdlr_sspnd.msb.ongoing, 0);
- erts_smp_atomic32_init(&schdlr_sspnd.active, no_schedulers);
+ schdlr_sspnd.msb.ongoing = 0;
+ erts_smp_atomic32_init_nob(&schdlr_sspnd.active, no_schedulers);
schdlr_sspnd.msb.procs = NULL;
init_no_runqs(no_schedulers,
erts_common_run_queue ? 1 : no_schedulers_online);
@@ -2768,7 +2771,7 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
balance_info.forced_check_balance = 0;
balance_info.halftime = 1;
balance_info.full_reds_history_index = 0;
- erts_smp_atomic32_init(&balance_info.checking_balance, 0);
+ erts_smp_atomic32_init_nob(&balance_info.checking_balance, 0);
balance_info.prev_rise.active_runqs = 0;
balance_info.prev_rise.max_len = 0;
balance_info.prev_rise.reds = 0;
@@ -2777,8 +2780,8 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
if (no_schedulers_online < no_schedulers) {
if (erts_common_run_queue) {
for (ix = no_schedulers_online; ix < no_schedulers; ix++)
- erts_smp_atomic32_bor(&ERTS_SCHED_SLEEP_INFO_IX(ix)->flags,
- ERTS_SSI_FLG_SUSPENDED);
+ erts_smp_atomic32_read_bor_nob(&ERTS_SCHED_SLEEP_INFO_IX(ix)->flags,
+ ERTS_SSI_FLG_SUSPENDED);
}
else {
for (ix = no_schedulers_online; ix < erts_no_run_queues; ix++)
@@ -2792,7 +2795,7 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
ERTS_SCHDLR_SSPND_CHNG_SET((ERTS_SCHDLR_SSPND_CHNG_ONLN
| ERTS_SCHDLR_SSPND_CHNG_WAITER), 0);
- erts_smp_atomic32_init(&doing_sys_schedule, 0);
+ erts_smp_atomic32_init_nob(&doing_sys_schedule, 0);
init_misc_aux_work();
@@ -2808,7 +2811,7 @@ erts_init_scheduling(int mrq, int no_schedulers, int no_schedulers_online)
erts_no_schedulers = 1;
#endif
- erts_smp_atomic32_init(&function_calls, 0);
+ erts_smp_atomic32_init_nob(&function_calls, 0);
/* init port tasks */
erts_port_task_init();
@@ -2935,10 +2938,10 @@ int
erts_get_max_no_executing_schedulers(void)
{
#ifdef ERTS_SMP
- if (erts_smp_atomic32_read(&schdlr_sspnd.changing))
+ if (erts_smp_atomic32_read_nob(&schdlr_sspnd.changing))
return (int) erts_no_schedulers;
ERTS_THR_MEMORY_BARRIER;
- return (int) erts_smp_atomic32_read(&schdlr_sspnd.active);
+ return (int) erts_smp_atomic32_read_nob(&schdlr_sspnd.active);
#else
return 1;
#endif
@@ -2968,7 +2971,7 @@ scheduler_ix_resume_wake(Uint ix)
| ERTS_SSI_FLG_SUSPENDED);
erts_aint32_t oflgs;
do {
- oflgs = erts_smp_atomic32_cmpxchg(&ssi->flags, 0, xflgs);
+ oflgs = erts_smp_atomic32_cmpxchg_relb(&ssi->flags, 0, xflgs);
if (oflgs == xflgs) {
erts_sched_finish_poke(ssi, oflgs);
break;
@@ -2987,7 +2990,7 @@ sched_prep_spin_suspended(ErtsSchedulerSleepInfo *ssi, erts_aint32_t xpct)
erts_aint32_t xflgs = xpct;
do {
- oflgs = erts_smp_atomic32_cmpxchg(&ssi->flags, nflgs, xflgs);
+ oflgs = erts_smp_atomic32_cmpxchg_acqb(&ssi->flags, nflgs, xflgs);
if (oflgs == xflgs)
return nflgs;
xflgs = oflgs;
@@ -3037,7 +3040,7 @@ sched_set_suspended_sleeptype(ErtsSchedulerSleepInfo *ssi)
erts_tse_reset(ssi->event);
while (1) {
- oflgs = erts_smp_atomic32_cmpxchg(&ssi->flags, nflgs, xflgs);
+ oflgs = erts_smp_atomic32_cmpxchg_acqb(&ssi->flags, nflgs, xflgs);
if (oflgs == xflgs)
return nflgs;
if ((oflgs & (ERTS_SSI_FLG_SLEEPING
@@ -3092,15 +3095,15 @@ suspend_scheduler(ErtsSchedulerData *esdp)
flgs = sched_prep_spin_suspended(ssi, ERTS_SSI_FLG_SUSPENDED);
if (flgs & ERTS_SSI_FLG_SUSPENDED) {
- active_schedulers = erts_smp_atomic32_dectest(&schdlr_sspnd.active);
+ active_schedulers = erts_smp_atomic32_dec_read_nob(&schdlr_sspnd.active);
ASSERT(active_schedulers >= 1);
- changing = erts_smp_atomic32_read(&schdlr_sspnd.changing);
+ changing = erts_smp_atomic32_read_nob(&schdlr_sspnd.changing);
if (changing & ERTS_SCHDLR_SSPND_CHNG_MSB) {
if (active_schedulers == schdlr_sspnd.msb.wait_active)
wake = 1;
if (active_schedulers == 1) {
- changing = erts_smp_atomic32_band(&schdlr_sspnd.changing,
- ~ERTS_SCHDLR_SSPND_CHNG_MSB);
+ changing = erts_smp_atomic32_read_band_nob(&schdlr_sspnd.changing,
+ ~ERTS_SCHDLR_SSPND_CHNG_MSB);
changing &= ~ERTS_SCHDLR_SSPND_CHNG_MSB;
}
}
@@ -3122,8 +3125,8 @@ suspend_scheduler(ErtsSchedulerData *esdp)
&& schdlr_sspnd.curr_online == schdlr_sspnd.wait_curr_online)
wake = 1;
if (schdlr_sspnd.online == schdlr_sspnd.curr_online) {
- changing = erts_smp_atomic32_band(&schdlr_sspnd.changing,
- ~ERTS_SCHDLR_SSPND_CHNG_ONLN);
+ changing = erts_smp_atomic32_read_band_nob(&schdlr_sspnd.changing,
+ ~ERTS_SCHDLR_SSPND_CHNG_ONLN);
changing &= ~ERTS_SCHDLR_SSPND_CHNG_ONLN;
}
}
@@ -3140,7 +3143,7 @@ suspend_scheduler(ErtsSchedulerData *esdp)
#ifdef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
- aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
blockable_aux_work:
blockable_aux_work(esdp, ssi, aux_work);
#endif
@@ -3176,13 +3179,13 @@ suspend_scheduler(ErtsSchedulerData *esdp)
| ERTS_SSI_FLG_SUSPENDED));
if (!(flgs & ERTS_SSI_FLG_SUSPENDED))
break;
- changing = erts_smp_atomic32_read(&schdlr_sspnd.changing);
+ changing = erts_smp_atomic32_read_nob(&schdlr_sspnd.changing);
if (changing & ~ERTS_SCHDLR_SSPND_CHNG_WAITER)
break;
#ifdef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
- aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
if (aux_work & ERTS_SSI_BLOCKABLE_AUX_WORK_MASK) {
erts_smp_activity_end(ERTS_ACTIVITY_WAIT, NULL, NULL, NULL);
goto blockable_aux_work;
@@ -3194,19 +3197,19 @@ suspend_scheduler(ErtsSchedulerData *esdp)
erts_smp_activity_end(ERTS_ACTIVITY_WAIT, NULL, NULL, NULL);
erts_smp_mtx_lock(&schdlr_sspnd.mtx);
- changing = erts_smp_atomic32_read(&schdlr_sspnd.changing);
+ changing = erts_smp_atomic32_read_nob(&schdlr_sspnd.changing);
}
- active_schedulers = erts_smp_atomic32_inctest(&schdlr_sspnd.active);
- changing = erts_smp_atomic32_read(&schdlr_sspnd.changing);
+ active_schedulers = erts_smp_atomic32_inc_read_nob(&schdlr_sspnd.active);
+ changing = erts_smp_atomic32_read_nob(&schdlr_sspnd.changing);
if ((changing & ERTS_SCHDLR_SSPND_CHNG_MSB)
&& schdlr_sspnd.online == active_schedulers) {
- erts_smp_atomic32_band(&schdlr_sspnd.changing,
- ~ERTS_SCHDLR_SSPND_CHNG_MSB);
+ erts_smp_atomic32_read_band_nob(&schdlr_sspnd.changing,
+ ~ERTS_SCHDLR_SSPND_CHNG_MSB);
}
ASSERT(no <= schdlr_sspnd.online);
- ASSERT(!erts_smp_atomic32_read(&schdlr_sspnd.msb.ongoing));
+ ASSERT(!ongoing_multi_scheduling_block());
}
@@ -3235,7 +3238,7 @@ do { \
(RQ)->flags |= (ERTS_RUNQ_FLG_OUT_OF_WORK \
| ERTS_RUNQ_FLG_HALFTIME_OUT_OF_WORK); \
(RQ)->check_balance_reds = ERTS_RUNQ_CALL_CHECK_BALANCE_REDS; \
- erts_smp_atomic32_band(&(RQ)->info_flags, ~ERTS_RUNQ_IFLG_SUSPENDED);\
+ erts_smp_atomic32_read_band_nob(&(RQ)->info_flags, ~ERTS_RUNQ_IFLG_SUSPENDED);\
for (pix__ = 0; pix__ < ERTS_NO_PROC_PRIO_LEVELS; pix__++) { \
(RQ)->procs.prio_info[pix__].max_len = 0; \
(RQ)->procs.prio_info[pix__].reds = 0; \
@@ -3279,7 +3282,7 @@ erts_schedulers_state(Uint *total,
int res;
erts_aint32_t changing;
erts_smp_mtx_lock(&schdlr_sspnd.mtx);
- changing = erts_smp_atomic32_read(&schdlr_sspnd.changing);
+ changing = erts_smp_atomic32_read_nob(&schdlr_sspnd.changing);
if (yield_allowed && (changing & ~ERTS_SCHDLR_SSPND_CHNG_WAITER))
res = ERTS_SCHDLR_SSPND_YIELD_RESTART;
else {
@@ -3310,7 +3313,7 @@ erts_set_schedulers_online(Process *p,
have_unlocked_plocks = 0;
no = (int) new_no;
- changing = erts_smp_atomic32_read(&schdlr_sspnd.changing);
+ changing = erts_smp_atomic32_read_nob(&schdlr_sspnd.changing);
if (changing) {
res = ERTS_SCHDLR_SSPND_YIELD_RESTART;
}
@@ -3385,8 +3388,8 @@ erts_set_schedulers_online(Process *p,
for (ix = no; ix < online; ix++) {
ErtsSchedulerSleepInfo *ssi;
ssi = ERTS_SCHED_SLEEP_INFO_IX(ix);
- erts_smp_atomic32_bor(&ssi->flags,
- ERTS_SSI_FLG_SUSPENDED);
+ erts_smp_atomic32_read_bor_nob(&ssi->flags,
+ ERTS_SSI_FLG_SUSPENDED);
}
wake_all_schedulers();
}
@@ -3433,11 +3436,11 @@ erts_set_schedulers_online(Process *p,
NULL);
ASSERT(res != ERTS_SCHDLR_SSPND_DONE
? (ERTS_SCHDLR_SSPND_CHNG_WAITER
- & erts_smp_atomic32_read(&schdlr_sspnd.changing))
+ & erts_smp_atomic32_read_nob(&schdlr_sspnd.changing))
: (ERTS_SCHDLR_SSPND_CHNG_WAITER
- == erts_smp_atomic32_read(&schdlr_sspnd.changing)));
- erts_smp_atomic32_band(&schdlr_sspnd.changing,
- ~ERTS_SCHDLR_SSPND_CHNG_WAITER);
+ == erts_smp_atomic32_read_nob(&schdlr_sspnd.changing)));
+ erts_smp_atomic32_read_band_nob(&schdlr_sspnd.changing,
+ ~ERTS_SCHDLR_SSPND_CHNG_WAITER);
}
}
@@ -3456,7 +3459,7 @@ erts_block_multi_scheduling(Process *p, ErtsProcLocks plocks, int on, int all)
ErtsProcList *plp;
erts_smp_mtx_lock(&schdlr_sspnd.mtx);
- changing = erts_smp_atomic32_read(&schdlr_sspnd.changing);
+ changing = erts_smp_atomic32_read_nob(&schdlr_sspnd.changing);
if (changing) {
res = ERTS_SCHDLR_SSPND_YIELD_RESTART; /* Yield */
}
@@ -3466,7 +3469,7 @@ erts_block_multi_scheduling(Process *p, ErtsProcLocks plocks, int on, int all)
plp->next = schdlr_sspnd.msb.procs;
schdlr_sspnd.msb.procs = plp;
p->flags |= F_HAVE_BLCKD_MSCHED;
- ASSERT(erts_smp_atomic32_read(&schdlr_sspnd.active) == 1);
+ ASSERT(erts_smp_atomic32_read_nob(&schdlr_sspnd.active) == 1);
ASSERT(p->scheduler_data->no == 1);
res = ERTS_SCHDLR_SSPND_DONE_MSCHED_BLOCKED;
}
@@ -3477,11 +3480,11 @@ erts_block_multi_scheduling(Process *p, ErtsProcLocks plocks, int on, int all)
have_unlocked_plocks = 1;
erts_smp_proc_unlock(p, plocks);
}
- ASSERT(0 == erts_smp_atomic32_read(&schdlr_sspnd.msb.ongoing));
- erts_smp_atomic32_set(&schdlr_sspnd.msb.ongoing, 1);
+ ASSERT(!ongoing_multi_scheduling_block());
+ schdlr_sspnd.msb.ongoing = 1;
if (online == 1) {
res = ERTS_SCHDLR_SSPND_DONE_MSCHED_BLOCKED;
- ASSERT(erts_smp_atomic32_read(&schdlr_sspnd.active) == 1);
+ ASSERT(erts_smp_atomic32_read_nob(&schdlr_sspnd.active) == 1);
ASSERT(p->scheduler_data->no == 1);
}
else {
@@ -3501,8 +3504,8 @@ erts_block_multi_scheduling(Process *p, ErtsProcLocks plocks, int on, int all)
}
if (erts_common_run_queue) {
for (ix = 1; ix < online; ix++)
- erts_smp_atomic32_bor(&ERTS_SCHED_SLEEP_INFO_IX(ix)->flags,
- ERTS_SSI_FLG_SUSPENDED);
+ erts_smp_atomic32_read_bor_nob(&ERTS_SCHED_SLEEP_INFO_IX(ix)->flags,
+ ERTS_SSI_FLG_SUSPENDED);
wake_all_schedulers();
}
else {
@@ -3530,7 +3533,7 @@ erts_block_multi_scheduling(Process *p, ErtsProcLocks plocks, int on, int all)
susp_sched_prep_block,
susp_sched_resume_block,
NULL);
- while (erts_smp_atomic32_read(&schdlr_sspnd.active)
+ while (erts_smp_atomic32_read_nob(&schdlr_sspnd.active)
!= schdlr_sspnd.msb.wait_active)
erts_smp_cnd_wait(&schdlr_sspnd.cnd, &schdlr_sspnd.mtx);
erts_smp_activity_end(ERTS_ACTIVITY_WAIT,
@@ -3539,11 +3542,11 @@ erts_block_multi_scheduling(Process *p, ErtsProcLocks plocks, int on, int all)
NULL);
ASSERT(res != ERTS_SCHDLR_SSPND_DONE_MSCHED_BLOCKED
? (ERTS_SCHDLR_SSPND_CHNG_WAITER
- & erts_smp_atomic32_read(&schdlr_sspnd.changing))
+ & erts_smp_atomic32_read_nob(&schdlr_sspnd.changing))
: (ERTS_SCHDLR_SSPND_CHNG_WAITER
- == erts_smp_atomic32_read(&schdlr_sspnd.changing)));
- erts_smp_atomic32_band(&schdlr_sspnd.changing,
- ~ERTS_SCHDLR_SSPND_CHNG_WAITER);
+ == erts_smp_atomic32_read_nob(&schdlr_sspnd.changing)));
+ erts_smp_atomic32_read_band_nob(&schdlr_sspnd.changing,
+ ~ERTS_SCHDLR_SSPND_CHNG_WAITER);
}
plp = proclist_create(p);
plp->next = schdlr_sspnd.msb.procs;
@@ -3610,16 +3613,16 @@ erts_block_multi_scheduling(Process *p, ErtsProcLocks plocks, int on, int all)
});
#endif
p->flags &= ~F_HAVE_BLCKD_MSCHED;
- erts_smp_atomic32_set(&schdlr_sspnd.msb.ongoing, 0);
+ schdlr_sspnd.msb.ongoing = 0;
if (schdlr_sspnd.online == 1) {
/* No schedulers to resume */
- ASSERT(erts_smp_atomic32_read(&schdlr_sspnd.active) == 1);
+ ASSERT(erts_smp_atomic32_read_nob(&schdlr_sspnd.active) == 1);
ERTS_SCHDLR_SSPND_CHNG_SET(0, ERTS_SCHDLR_SSPND_CHNG_MSB);
}
else if (erts_common_run_queue) {
for (ix = 1; ix < schdlr_sspnd.online; ix++)
- erts_smp_atomic32_band(&ERTS_SCHED_SLEEP_INFO_IX(ix)->flags,
- ~ERTS_SSI_FLG_SUSPENDED);
+ erts_smp_atomic32_read_band_nob(&ERTS_SCHED_SLEEP_INFO_IX(ix)->flags,
+ ~ERTS_SSI_FLG_SUSPENDED);
wake_all_schedulers();
}
else {
@@ -3669,7 +3672,7 @@ void
erts_dbg_multi_scheduling_return_trap(Process *p, Eterm return_value)
{
if (return_value == am_blocked) {
- erts_aint32_t active = erts_smp_atomic32_read(&schdlr_sspnd.active);
+ erts_aint32_t active = erts_smp_atomic32_read_nob(&schdlr_sspnd.active);
ASSERT(1 <= active && active <= 2);
ASSERT(ERTS_PROC_GET_SCHDATA(p)->no == 1);
}
@@ -3752,12 +3755,12 @@ sched_thread_func(void *vesdp)
erts_thread_init_float();
erts_smp_mtx_lock(&schdlr_sspnd.mtx);
- ASSERT(erts_smp_atomic32_read(&schdlr_sspnd.changing)
+ ASSERT(erts_smp_atomic32_read_nob(&schdlr_sspnd.changing)
& ERTS_SCHDLR_SSPND_CHNG_ONLN);
if (--schdlr_sspnd.curr_online == schdlr_sspnd.wait_curr_online) {
- erts_smp_atomic32_band(&schdlr_sspnd.changing,
- ~ERTS_SCHDLR_SSPND_CHNG_ONLN);
+ erts_smp_atomic32_read_band_nob(&schdlr_sspnd.changing,
+ ~ERTS_SCHDLR_SSPND_CHNG_ONLN);
if (((ErtsSchedulerData *) vesdp)->no != 1)
erts_smp_cnd_signal(&schdlr_sspnd.cnd);
}
@@ -5214,7 +5217,7 @@ Process *schedule(Process *p, int calls)
reds = ERTS_PROC_MIN_CONTEXT_SWITCH_REDS_COST;
esdp->virtual_reds = 0;
- fcalls = (int) erts_smp_atomic32_addtest(&function_calls, reds);
+ fcalls = (int) erts_smp_atomic32_add_read_acqb(&function_calls, reds);
ASSERT(esdp && esdp == erts_get_scheduler_data());
rq = erts_get_runq_current(esdp);
@@ -5349,7 +5352,7 @@ Process *schedule(Process *p, int calls)
if ((rq->flags & ERTS_RUNQ_FLG_SUSPENDED)
|| (erts_smp_atomic32_read_acqb(&esdp->ssi->flags)
& ERTS_SSI_FLG_SUSPENDED)) {
- ASSERT(erts_smp_atomic32_read(&esdp->ssi->flags)
+ ASSERT(erts_smp_atomic32_read_nob(&esdp->ssi->flags)
& ERTS_SSI_FLG_SUSPENDED);
suspend_scheduler(esdp);
}
@@ -5363,7 +5366,7 @@ Process *schedule(Process *p, int calls)
|| defined(ERTS_SCHED_NEED_NONBLOCKABLE_AUX_WORK)
{
ErtsSchedulerSleepInfo *ssi = esdp->ssi;
- erts_aint32_t aux_work = erts_smp_atomic32_read(&ssi->aux_work);
+ erts_aint32_t aux_work = erts_smp_atomic32_read_nob(&ssi->aux_work);
if (aux_work) {
erts_smp_runq_unlock(rq);
#ifdef ERTS_SCHED_NEED_BLOCKABLE_AUX_WORK
@@ -5407,7 +5410,7 @@ Process *schedule(Process *p, int calls)
if ((rq->flags & ERTS_RUNQ_FLG_SUSPENDED)
|| (erts_smp_atomic32_read_acqb(&esdp->ssi->flags)
& ERTS_SSI_FLG_SUSPENDED)) {
- ASSERT(erts_smp_atomic32_read(&esdp->ssi->flags)
+ ASSERT(erts_smp_atomic32_read_nob(&esdp->ssi->flags)
& ERTS_SSI_FLG_SUSPENDED);
non_empty_runq(rq);
goto continue_check_activities_to_run;
@@ -5948,7 +5951,7 @@ erts_test_next_pid(int set, Uint next)
Uint erts_process_count(void)
{
- erts_aint32_t res = erts_smp_atomic32_read(&process_count);
+ erts_aint32_t res = erts_smp_atomic32_read_nob(&process_count);
ASSERT(res >= 0);
return (Uint) res;
}
@@ -5997,7 +6000,7 @@ alloc_process(void)
ASSERT(!process_tab[p_next]);
process_tab[p_next] = p;
- erts_smp_atomic32_inc(&process_count);
+ erts_smp_atomic32_inc_nob(&process_count);
p->id = make_internal_pid(p_serial << p_serial_shift | p_next);
if (p->id == ERTS_INVALID_PID) {
/* Do not use the invalid pid; change serial */
@@ -6123,7 +6126,7 @@ erl_create_process(Process* parent, /* Parent of process (default group leader).
p->min_heap_size = H_MIN_SIZE;
p->min_vheap_size = BIN_VH_MIN_SIZE;
p->prio = PRIORITY_NORMAL;
- p->max_gen_gcs = (Uint16) erts_smp_atomic32_read(&erts_max_gen_gcs);
+ p->max_gen_gcs = (Uint16) erts_smp_atomic32_read_nob(&erts_max_gen_gcs);
}
p->skipped = 0;
ASSERT(p->min_heap_size == erts_next_heap_size(p->min_heap_size, 0));
@@ -7580,8 +7583,8 @@ continue_exit_process(Process *p
p->status_flags = 0;
#endif
process_tab[pix] = NULL; /* Time of death! */
- ASSERT(erts_smp_atomic32_read(&process_count) > 0);
- erts_smp_atomic32_dec(&process_count);
+ ASSERT(erts_smp_atomic32_read_nob(&process_count) > 0);
+ erts_smp_atomic32_dec_nob(&process_count);
#ifdef ERTS_SMP
erts_pix_unlock(pix_lock);
diff --git a/erts/emulator/beam/erl_process.h b/erts/emulator/beam/erl_process.h
index 296acc7367..739aef3130 100644
--- a/erts/emulator/beam/erl_process.h
+++ b/erts/emulator/beam/erl_process.h
@@ -1599,11 +1599,11 @@ erts_sched_poke(ErtsSchedulerSleepInfo *ssi)
{
erts_aint32_t flags;
ERTS_THR_MEMORY_BARRIER;
- flags = erts_smp_atomic32_read(&ssi->flags);
+ flags = erts_smp_atomic32_read_nob(&ssi->flags);
ASSERT(!(flags & ERTS_SSI_FLG_SLEEPING)
|| (flags & ERTS_SSI_FLG_WAITING));
if (flags & ERTS_SSI_FLG_SLEEPING) {
- flags = erts_smp_atomic32_band(&ssi->flags, ~ERTS_SSI_FLGS_SLEEP);
+ flags = erts_smp_atomic32_read_band_nob(&ssi->flags, ~ERTS_SSI_FLGS_SLEEP);
erts_sched_finish_poke(ssi, flags);
}
}
diff --git a/erts/emulator/beam/erl_process_dump.c b/erts/emulator/beam/erl_process_dump.c
index 5410bcd495..3550f1396c 100644
--- a/erts/emulator/beam/erl_process_dump.c
+++ b/erts/emulator/beam/erl_process_dump.c
@@ -350,7 +350,7 @@ heap_dump(int to, void *to_arg, Eterm x)
ProcBin* pb = (ProcBin *) binary_val(x);
Binary* val = pb->val;
- if (erts_smp_atomic_xchg(&val->refc, 0) != 0) {
+ if (erts_smp_atomic_xchg_nob(&val->refc, 0) != 0) {
val->flags = (UWord) all_binaries;
all_binaries = val;
}
diff --git a/erts/emulator/beam/erl_process_lock.c b/erts/emulator/beam/erl_process_lock.c
index 72560aa124..83379d7352 100644
--- a/erts/emulator/beam/erl_process_lock.c
+++ b/erts/emulator/beam/erl_process_lock.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2007-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2007-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -316,7 +316,7 @@ try_aquire(erts_proc_lock_t *lck, erts_tse_t *wtr)
break;
}
wflg = lock << ERTS_PROC_LOCK_WAITER_SHIFT;
- old_lflgs = ERTS_PROC_LOCK_FLGS_BOR_(lck, wflg | lock);
+ old_lflgs = ERTS_PROC_LOCK_FLGS_BOR_ACQB_(lck, wflg | lock);
if (old_lflgs & lock) {
/* Didn't get the lock */
goto enqueue;
@@ -413,7 +413,7 @@ transfer_locks(Process *p,
do {
erts_tse_t *tmp = wake;
wake = wake->next;
- erts_atomic32_set(&tmp->uaflgs, 0);
+ erts_atomic32_set_nob(&tmp->uaflgs, 0);
erts_tse_set(tmp);
} while (wake);
@@ -509,14 +509,14 @@ wait_for_locks(Process *p,
ASSERT((wtr->uflgs & ~ERTS_PROC_LOCKS_ALL) == 0);
- erts_atomic32_set(&wtr->uaflgs, 1);
+ erts_atomic32_set_nob(&wtr->uaflgs, 1);
erts_pix_unlock(pix_lock);
while (1) {
int res;
erts_tse_reset(wtr);
- if (erts_atomic32_read(&wtr->uaflgs) == 0)
+ if (erts_atomic32_read_nob(&wtr->uaflgs) == 0)
break;
/*
@@ -955,7 +955,8 @@ erts_proc_lock_init(Process *p)
{
/* We always start with all locks locked */
#if ERTS_PROC_LOCK_ATOMIC_IMPL
- erts_smp_atomic32_init(&p->lock.flags, (erts_aint32_t) ERTS_PROC_LOCKS_ALL);
+ erts_smp_atomic32_init_nob(&p->lock.flags,
+ (erts_aint32_t) ERTS_PROC_LOCKS_ALL);
#else
p->lock.flags = ERTS_PROC_LOCKS_ALL;
#endif
@@ -974,7 +975,7 @@ erts_proc_lock_init(Process *p)
{
int i;
for (i = 0; i <= ERTS_PROC_LOCK_MAX_BIT; i++)
- erts_smp_atomic32_init(&p->lock.locked[i], (erts_aint32_t) 1);
+ erts_smp_atomic32_init_nob(&p->lock.locked[i], (erts_aint32_t) 1);
}
#endif
}
diff --git a/erts/emulator/beam/erl_process_lock.h b/erts/emulator/beam/erl_process_lock.h
index 355179f084..cd3b2182fd 100644
--- a/erts/emulator/beam/erl_process_lock.h
+++ b/erts/emulator/beam/erl_process_lock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2007-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2007-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -41,10 +41,10 @@
#define ERTS_PROC_LOCK_SPINLOCK_IMPL 0
#define ERTS_PROC_LOCK_MUTEX_IMPL 0
-#if defined(ETHR_HAVE_OPTIMIZED_ATOMIC_OPS)
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
# undef ERTS_PROC_LOCK_ATOMIC_IMPL
# define ERTS_PROC_LOCK_ATOMIC_IMPL 1
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCK)
+#elif defined(ETHR_HAVE_NATIVE_SPINLOCKS)
# undef ERTS_PROC_LOCK_SPINLOCK_IMPL
# define ERTS_PROC_LOCK_SPINLOCK_IMPL 1
#else
@@ -270,9 +270,11 @@ typedef struct {
#if ERTS_PROC_LOCK_ATOMIC_IMPL
#define ERTS_PROC_LOCK_FLGS_BAND_(L, MSK) \
- ((ErtsProcLocks) erts_smp_atomic32_band(&(L)->flags, (erts_aint32_t) (MSK)))
-#define ERTS_PROC_LOCK_FLGS_BOR_(L, MSK) \
- ((ErtsProcLocks) erts_smp_atomic32_bor(&(L)->flags, (erts_aint32_t) (MSK)))
+ ((ErtsProcLocks) erts_smp_atomic32_read_band_nob(&(L)->flags, \
+ (erts_aint32_t) (MSK)))
+#define ERTS_PROC_LOCK_FLGS_BOR_ACQB_(L, MSK) \
+ ((ErtsProcLocks) erts_smp_atomic32_read_bor_acqb(&(L)->flags, \
+ (erts_aint32_t) (MSK)))
#define ERTS_PROC_LOCK_FLGS_CMPXCHG_ACQB_(L, NEW, EXPECTED) \
((ErtsProcLocks) erts_smp_atomic32_cmpxchg_acqb(&(L)->flags, \
(erts_aint32_t) (NEW), \
@@ -282,7 +284,7 @@ typedef struct {
(erts_aint32_t) (NEW), \
(erts_aint32_t) (EXPECTED)))
#define ERTS_PROC_LOCK_FLGS_READ_(L) \
- ((ErtsProcLocks) erts_smp_atomic32_read(&(L)->flags))
+ ((ErtsProcLocks) erts_smp_atomic32_read_nob(&(L)->flags))
#else /* no opt atomic ops */
@@ -325,7 +327,7 @@ erts_proc_lock_flags_cmpxchg(erts_proc_lock_t *lck, ErtsProcLocks new,
#endif
#define ERTS_PROC_LOCK_FLGS_BAND_(L, MSK) erts_proc_lock_flags_band((L), (MSK))
-#define ERTS_PROC_LOCK_FLGS_BOR_(L, MSK) erts_proc_lock_flags_bor((L), (MSK))
+#define ERTS_PROC_LOCK_FLGS_BOR_ACQB_(L, MSK) erts_proc_lock_flags_bor((L), (MSK))
#define ERTS_PROC_LOCK_FLGS_CMPXCHG_ACQB_(L, NEW, EXPECTED) \
erts_proc_lock_flags_cmpxchg((L), (NEW), (EXPECTED))
#define ERTS_PROC_LOCK_FLGS_CMPXCHG_RELB_(L, NEW, EXPECTED) \
@@ -623,11 +625,11 @@ erts_proc_lock_op_debug(Process *p, ErtsProcLocks locks, int locked)
if (locks & lock) {
erts_aint32_t lock_count;
if (locked) {
- lock_count = erts_smp_atomic32_inctest(&p->lock.locked[i]);
+ lock_count = erts_smp_atomic32_inc_read_nob(&p->lock.locked[i]);
ERTS_LC_ASSERT(lock_count == 1);
}
else {
- lock_count = erts_smp_atomic32_dectest(&p->lock.locked[i]);
+ lock_count = erts_smp_atomic32_dec_read_nob(&p->lock.locked[i]);
ERTS_LC_ASSERT(lock_count == 0);
}
}
diff --git a/erts/emulator/beam/erl_smp.h b/erts/emulator/beam/erl_smp.h
index 287327bfe1..a89ddfbcc1 100644
--- a/erts/emulator/beam/erl_smp.h
+++ b/erts/emulator/beam/erl_smp.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -54,8 +54,9 @@ typedef erts_cnd_t erts_smp_cnd_t;
typedef erts_rwmtx_opt_t erts_smp_rwmtx_opt_t;
typedef erts_rwmtx_t erts_smp_rwmtx_t;
typedef erts_tsd_key_t erts_smp_tsd_key_t;
-typedef erts_atomic_t erts_smp_atomic_t;
-typedef erts_atomic32_t erts_smp_atomic32_t;
+#define erts_smp_dw_atomic_t erts_dw_atomic_t
+#define erts_smp_atomic_t erts_atomic_t
+#define erts_smp_atomic32_t erts_atomic32_t
typedef erts_spinlock_t erts_smp_spinlock_t;
typedef erts_rwlock_t erts_smp_rwlock_t;
void erts_thr_fatal_error(int, char *); /* implemented in erl_init.c */
@@ -83,8 +84,9 @@ typedef struct {
} erts_smp_rwmtx_opt_t;
typedef int erts_smp_rwmtx_t;
typedef int erts_smp_tsd_key_t;
-typedef SWord erts_smp_atomic_t;
-typedef Uint32 erts_smp_atomic32_t;
+#define erts_smp_dw_atomic_t erts_no_dw_atomic_t
+#define erts_smp_atomic_t erts_no_atomic_t
+#define erts_smp_atomic32_t erts_no_atomic32_t
#if __GNUC__ > 2
typedef struct { } erts_smp_spinlock_t;
typedef struct { } erts_smp_rwlock_t;
@@ -160,82 +162,6 @@ ERTS_GLB_INLINE int erts_smp_rwmtx_tryrwlock(erts_smp_rwmtx_t *rwmtx);
ERTS_GLB_INLINE void erts_smp_rwmtx_rwunlock(erts_smp_rwmtx_t *rwmtx);
ERTS_GLB_INLINE int erts_smp_lc_rwmtx_is_rlocked(erts_smp_rwmtx_t *mtx);
ERTS_GLB_INLINE int erts_smp_lc_rwmtx_is_rwlocked(erts_smp_rwmtx_t *mtx);
-ERTS_GLB_INLINE void erts_smp_atomic_init(erts_smp_atomic_t *var,
- erts_aint_t i);
-ERTS_GLB_INLINE void erts_smp_atomic_set(erts_smp_atomic_t *var, erts_aint_t i);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_read(erts_smp_atomic_t *var);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_inctest(erts_smp_atomic_t *incp);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_dectest(erts_smp_atomic_t *decp);
-ERTS_GLB_INLINE void erts_smp_atomic_inc(erts_smp_atomic_t *incp);
-ERTS_GLB_INLINE void erts_smp_atomic_dec(erts_smp_atomic_t *decp);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_addtest(erts_smp_atomic_t *addp,
- erts_aint_t i);
-ERTS_GLB_INLINE void erts_smp_atomic_add(erts_smp_atomic_t *addp,
- erts_aint_t i);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_xchg(erts_smp_atomic_t *xchgp,
- erts_aint_t new);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_cmpxchg(erts_smp_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t expected);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_bor(erts_smp_atomic_t *var,
- erts_aint_t mask);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_band(erts_smp_atomic_t *var,
- erts_aint_t mask);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_read_acqb(erts_smp_atomic_t *var);
-ERTS_GLB_INLINE void erts_smp_atomic_set_relb(erts_smp_atomic_t *var,
- erts_aint_t i);
-ERTS_GLB_INLINE void erts_smp_atomic_dec_relb(erts_smp_atomic_t *decp);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_dectest_relb(erts_smp_atomic_t *decp);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_cmpxchg_acqb(erts_smp_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp);
-ERTS_GLB_INLINE erts_aint_t erts_smp_atomic_cmpxchg_relb(erts_smp_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp);
-ERTS_GLB_INLINE void
-erts_smp_atomic32_init(erts_smp_atomic32_t *var, erts_aint32_t i);
-ERTS_GLB_INLINE void
-erts_smp_atomic32_set(erts_smp_atomic32_t *var, erts_aint32_t i);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_read(erts_smp_atomic32_t *var);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_inctest(erts_smp_atomic32_t *incp);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_dectest(erts_smp_atomic32_t *decp);
-ERTS_GLB_INLINE void
-erts_smp_atomic32_inc(erts_smp_atomic32_t *incp);
-ERTS_GLB_INLINE void
-erts_smp_atomic32_dec(erts_smp_atomic32_t *decp);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_addtest(erts_smp_atomic32_t *addp, erts_aint32_t i);
-ERTS_GLB_INLINE void
-erts_smp_atomic32_add(erts_smp_atomic32_t *addp, erts_aint32_t i);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_xchg(erts_smp_atomic32_t *xchgp, erts_aint32_t new);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_cmpxchg(erts_smp_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t expected);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_bor(erts_smp_atomic32_t *var, erts_aint32_t mask);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_band(erts_smp_atomic32_t *var, erts_aint32_t mask);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_read_acqb(erts_smp_atomic32_t *var);
-ERTS_GLB_INLINE void
-erts_smp_atomic32_set_relb(erts_smp_atomic32_t *var, erts_aint32_t i);
-ERTS_GLB_INLINE void
-erts_smp_atomic32_dec_relb(erts_smp_atomic32_t *decp);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_dectest_relb(erts_smp_atomic32_t *decp);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_cmpxchg_acqb(erts_smp_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp);
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_cmpxchg_relb(erts_smp_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp);
ERTS_GLB_INLINE void erts_smp_spinlock_init_x(erts_smp_spinlock_t *lock,
char *name,
Eterm extra);
@@ -279,6 +205,429 @@ ERTS_GLB_INLINE void erts_smp_thr_sigmask(int how,
ERTS_GLB_INLINE void erts_smp_thr_sigwait(const sigset_t *set, int *sig);
#endif /* #ifdef ERTS_THR_HAVE_SIG_FUNCS */
+/*
+ * Functions implementing atomic operations with with no (nob),
+ * full (mb), acquire (acqb), release (relb), read (rb), and
+ * write (wb) memory barriers.
+ *
+ * If SMP support has been disabled, they are mapped to functions
+ * that performs the same operation, but aren't atomic and don't
+ * imply memory barriers.
+ */
+
+#ifdef ERTS_SMP
+
+/* Double word size atomics */
+
+#define erts_smp_dw_atomic_init_nob erts_dw_atomic_init_nob
+#define erts_smp_dw_atomic_set_nob erts_dw_atomic_set_nob
+#define erts_smp_dw_atomic_read_nob erts_dw_atomic_read_nob
+#define erts_smp_dw_atomic_cmpxchg_nob erts_dw_atomic_cmpxchg_nob
+
+#define erts_smp_dw_atomic_init_mb erts_dw_atomic_init_mb
+#define erts_smp_dw_atomic_set_mb erts_dw_atomic_set_mb
+#define erts_smp_dw_atomic_read_mb erts_dw_atomic_read_mb
+#define erts_smp_dw_atomic_cmpxchg_mb erts_dw_atomic_cmpxchg_mb
+
+#define erts_smp_dw_atomic_init_acqb erts_dw_atomic_init_acqb
+#define erts_smp_dw_atomic_set_acqb erts_dw_atomic_set_acqb
+#define erts_smp_dw_atomic_read_acqb erts_dw_atomic_read_acqb
+#define erts_smp_dw_atomic_cmpxchg_acqb erts_dw_atomic_cmpxchg_acqb
+
+#define erts_smp_dw_atomic_init_relb erts_dw_atomic_init_relb
+#define erts_smp_dw_atomic_set_relb erts_dw_atomic_set_relb
+#define erts_smp_dw_atomic_read_relb erts_dw_atomic_read_relb
+#define erts_smp_dw_atomic_cmpxchg_relb erts_dw_atomic_cmpxchg_relb
+
+#define erts_smp_dw_atomic_init_rb erts_dw_atomic_init_rb
+#define erts_smp_dw_atomic_set_rb erts_dw_atomic_set_rb
+#define erts_smp_dw_atomic_read_rb erts_dw_atomic_read_rb
+#define erts_smp_dw_atomic_cmpxchg_rb erts_dw_atomic_cmpxchg_rb
+
+#define erts_smp_dw_atomic_init_wb erts_dw_atomic_init_wb
+#define erts_smp_dw_atomic_set_wb erts_dw_atomic_set_wb
+#define erts_smp_dw_atomic_read_wb erts_dw_atomic_read_wb
+#define erts_smp_dw_atomic_cmpxchg_wb erts_dw_atomic_cmpxchg_wb
+
+/* Word size atomics */
+
+#define erts_smp_atomic_init_nob erts_atomic_init_nob
+#define erts_smp_atomic_set_nob erts_atomic_set_nob
+#define erts_smp_atomic_read_nob erts_atomic_read_nob
+#define erts_smp_atomic_inc_read_nob erts_atomic_inc_read_nob
+#define erts_smp_atomic_dec_read_nob erts_atomic_dec_read_nob
+#define erts_smp_atomic_inc_nob erts_atomic_inc_nob
+#define erts_smp_atomic_dec_nob erts_atomic_dec_nob
+#define erts_smp_atomic_add_read_nob erts_atomic_add_read_nob
+#define erts_smp_atomic_add_nob erts_atomic_add_nob
+#define erts_smp_atomic_read_bor_nob erts_atomic_read_bor_nob
+#define erts_smp_atomic_read_band_nob erts_atomic_read_band_nob
+#define erts_smp_atomic_xchg_nob erts_atomic_xchg_nob
+#define erts_smp_atomic_cmpxchg_nob erts_atomic_cmpxchg_nob
+
+#define erts_smp_atomic_init_mb erts_atomic_init_mb
+#define erts_smp_atomic_set_mb erts_atomic_set_mb
+#define erts_smp_atomic_read_mb erts_atomic_read_mb
+#define erts_smp_atomic_inc_read_mb erts_atomic_inc_read_mb
+#define erts_smp_atomic_dec_read_mb erts_atomic_dec_read_mb
+#define erts_smp_atomic_inc_mb erts_atomic_inc_mb
+#define erts_smp_atomic_dec_mb erts_atomic_dec_mb
+#define erts_smp_atomic_add_read_mb erts_atomic_add_read_mb
+#define erts_smp_atomic_add_mb erts_atomic_add_mb
+#define erts_smp_atomic_read_bor_mb erts_atomic_read_bor_mb
+#define erts_smp_atomic_read_band_mb erts_atomic_read_band_mb
+#define erts_smp_atomic_xchg_mb erts_atomic_xchg_mb
+#define erts_smp_atomic_cmpxchg_mb erts_atomic_cmpxchg_mb
+
+#define erts_smp_atomic_init_acqb erts_atomic_init_acqb
+#define erts_smp_atomic_set_acqb erts_atomic_set_acqb
+#define erts_smp_atomic_read_acqb erts_atomic_read_acqb
+#define erts_smp_atomic_inc_read_acqb erts_atomic_inc_read_acqb
+#define erts_smp_atomic_dec_read_acqb erts_atomic_dec_read_acqb
+#define erts_smp_atomic_inc_acqb erts_atomic_inc_acqb
+#define erts_smp_atomic_dec_acqb erts_atomic_dec_acqb
+#define erts_smp_atomic_add_read_acqb erts_atomic_add_read_acqb
+#define erts_smp_atomic_add_acqb erts_atomic_add_acqb
+#define erts_smp_atomic_read_bor_acqb erts_atomic_read_bor_acqb
+#define erts_smp_atomic_read_band_acqb erts_atomic_read_band_acqb
+#define erts_smp_atomic_xchg_acqb erts_atomic_xchg_acqb
+#define erts_smp_atomic_cmpxchg_acqb erts_atomic_cmpxchg_acqb
+
+#define erts_smp_atomic_init_relb erts_atomic_init_relb
+#define erts_smp_atomic_set_relb erts_atomic_set_relb
+#define erts_smp_atomic_read_relb erts_atomic_read_relb
+#define erts_smp_atomic_inc_read_relb erts_atomic_inc_read_relb
+#define erts_smp_atomic_dec_read_relb erts_atomic_dec_read_relb
+#define erts_smp_atomic_inc_relb erts_atomic_inc_relb
+#define erts_smp_atomic_dec_relb erts_atomic_dec_relb
+#define erts_smp_atomic_add_read_relb erts_atomic_add_read_relb
+#define erts_smp_atomic_add_relb erts_atomic_add_relb
+#define erts_smp_atomic_read_bor_relb erts_atomic_read_bor_relb
+#define erts_smp_atomic_read_band_relb erts_atomic_read_band_relb
+#define erts_smp_atomic_xchg_relb erts_atomic_xchg_relb
+#define erts_smp_atomic_cmpxchg_relb erts_atomic_cmpxchg_relb
+
+#define erts_smp_atomic_init_rb erts_atomic_init_rb
+#define erts_smp_atomic_set_rb erts_atomic_set_rb
+#define erts_smp_atomic_read_rb erts_atomic_read_rb
+#define erts_smp_atomic_inc_read_rb erts_atomic_inc_read_rb
+#define erts_smp_atomic_dec_read_rb erts_atomic_dec_read_rb
+#define erts_smp_atomic_inc_rb erts_atomic_inc_rb
+#define erts_smp_atomic_dec_rb erts_atomic_dec_rb
+#define erts_smp_atomic_add_read_rb erts_atomic_add_read_rb
+#define erts_smp_atomic_add_rb erts_atomic_add_rb
+#define erts_smp_atomic_read_bor_rb erts_atomic_read_bor_rb
+#define erts_smp_atomic_read_band_rb erts_atomic_read_band_rb
+#define erts_smp_atomic_xchg_rb erts_atomic_xchg_rb
+#define erts_smp_atomic_cmpxchg_rb erts_atomic_cmpxchg_rb
+
+#define erts_smp_atomic_init_wb erts_atomic_init_wb
+#define erts_smp_atomic_set_wb erts_atomic_set_wb
+#define erts_smp_atomic_read_wb erts_atomic_read_wb
+#define erts_smp_atomic_inc_read_wb erts_atomic_inc_read_wb
+#define erts_smp_atomic_dec_read_wb erts_atomic_dec_read_wb
+#define erts_smp_atomic_inc_wb erts_atomic_inc_wb
+#define erts_smp_atomic_dec_wb erts_atomic_dec_wb
+#define erts_smp_atomic_add_read_wb erts_atomic_add_read_wb
+#define erts_smp_atomic_add_wb erts_atomic_add_wb
+#define erts_smp_atomic_read_bor_wb erts_atomic_read_bor_wb
+#define erts_smp_atomic_read_band_wb erts_atomic_read_band_wb
+#define erts_smp_atomic_xchg_wb erts_atomic_xchg_wb
+#define erts_smp_atomic_cmpxchg_wb erts_atomic_cmpxchg_wb
+
+/* 32-bit atomics */
+
+#define erts_smp_atomic32_init_nob erts_atomic32_init_nob
+#define erts_smp_atomic32_set_nob erts_atomic32_set_nob
+#define erts_smp_atomic32_read_nob erts_atomic32_read_nob
+#define erts_smp_atomic32_inc_read_nob erts_atomic32_inc_read_nob
+#define erts_smp_atomic32_dec_read_nob erts_atomic32_dec_read_nob
+#define erts_smp_atomic32_inc_nob erts_atomic32_inc_nob
+#define erts_smp_atomic32_dec_nob erts_atomic32_dec_nob
+#define erts_smp_atomic32_add_read_nob erts_atomic32_add_read_nob
+#define erts_smp_atomic32_add_nob erts_atomic32_add_nob
+#define erts_smp_atomic32_read_bor_nob erts_atomic32_read_bor_nob
+#define erts_smp_atomic32_read_band_nob erts_atomic32_read_band_nob
+#define erts_smp_atomic32_xchg_nob erts_atomic32_xchg_nob
+#define erts_smp_atomic32_cmpxchg_nob erts_atomic32_cmpxchg_nob
+
+#define erts_smp_atomic32_init_mb erts_atomic32_init_mb
+#define erts_smp_atomic32_set_mb erts_atomic32_set_mb
+#define erts_smp_atomic32_read_mb erts_atomic32_read_mb
+#define erts_smp_atomic32_inc_read_mb erts_atomic32_inc_read_mb
+#define erts_smp_atomic32_dec_read_mb erts_atomic32_dec_read_mb
+#define erts_smp_atomic32_inc_mb erts_atomic32_inc_mb
+#define erts_smp_atomic32_dec_mb erts_atomic32_dec_mb
+#define erts_smp_atomic32_add_read_mb erts_atomic32_add_read_mb
+#define erts_smp_atomic32_add_mb erts_atomic32_add_mb
+#define erts_smp_atomic32_read_bor_mb erts_atomic32_read_bor_mb
+#define erts_smp_atomic32_read_band_mb erts_atomic32_read_band_mb
+#define erts_smp_atomic32_xchg_mb erts_atomic32_xchg_mb
+#define erts_smp_atomic32_cmpxchg_mb erts_atomic32_cmpxchg_mb
+
+#define erts_smp_atomic32_init_acqb erts_atomic32_init_acqb
+#define erts_smp_atomic32_set_acqb erts_atomic32_set_acqb
+#define erts_smp_atomic32_read_acqb erts_atomic32_read_acqb
+#define erts_smp_atomic32_inc_read_acqb erts_atomic32_inc_read_acqb
+#define erts_smp_atomic32_dec_read_acqb erts_atomic32_dec_read_acqb
+#define erts_smp_atomic32_inc_acqb erts_atomic32_inc_acqb
+#define erts_smp_atomic32_dec_acqb erts_atomic32_dec_acqb
+#define erts_smp_atomic32_add_read_acqb erts_atomic32_add_read_acqb
+#define erts_smp_atomic32_add_acqb erts_atomic32_add_acqb
+#define erts_smp_atomic32_read_bor_acqb erts_atomic32_read_bor_acqb
+#define erts_smp_atomic32_read_band_acqb erts_atomic32_read_band_acqb
+#define erts_smp_atomic32_xchg_acqb erts_atomic32_xchg_acqb
+#define erts_smp_atomic32_cmpxchg_acqb erts_atomic32_cmpxchg_acqb
+
+#define erts_smp_atomic32_init_relb erts_atomic32_init_relb
+#define erts_smp_atomic32_set_relb erts_atomic32_set_relb
+#define erts_smp_atomic32_read_relb erts_atomic32_read_relb
+#define erts_smp_atomic32_inc_read_relb erts_atomic32_inc_read_relb
+#define erts_smp_atomic32_dec_read_relb erts_atomic32_dec_read_relb
+#define erts_smp_atomic32_inc_relb erts_atomic32_inc_relb
+#define erts_smp_atomic32_dec_relb erts_atomic32_dec_relb
+#define erts_smp_atomic32_add_read_relb erts_atomic32_add_read_relb
+#define erts_smp_atomic32_add_relb erts_atomic32_add_relb
+#define erts_smp_atomic32_read_bor_relb erts_atomic32_read_bor_relb
+#define erts_smp_atomic32_read_band_relb erts_atomic32_read_band_relb
+#define erts_smp_atomic32_xchg_relb erts_atomic32_xchg_relb
+#define erts_smp_atomic32_cmpxchg_relb erts_atomic32_cmpxchg_relb
+
+#define erts_smp_atomic32_init_rb erts_atomic32_init_rb
+#define erts_smp_atomic32_set_rb erts_atomic32_set_rb
+#define erts_smp_atomic32_read_rb erts_atomic32_read_rb
+#define erts_smp_atomic32_inc_read_rb erts_atomic32_inc_read_rb
+#define erts_smp_atomic32_dec_read_rb erts_atomic32_dec_read_rb
+#define erts_smp_atomic32_inc_rb erts_atomic32_inc_rb
+#define erts_smp_atomic32_dec_rb erts_atomic32_dec_rb
+#define erts_smp_atomic32_add_read_rb erts_atomic32_add_read_rb
+#define erts_smp_atomic32_add_rb erts_atomic32_add_rb
+#define erts_smp_atomic32_read_bor_rb erts_atomic32_read_bor_rb
+#define erts_smp_atomic32_read_band_rb erts_atomic32_read_band_rb
+#define erts_smp_atomic32_xchg_rb erts_atomic32_xchg_rb
+#define erts_smp_atomic32_cmpxchg_rb erts_atomic32_cmpxchg_rb
+
+#define erts_smp_atomic32_init_wb erts_atomic32_init_wb
+#define erts_smp_atomic32_set_wb erts_atomic32_set_wb
+#define erts_smp_atomic32_read_wb erts_atomic32_read_wb
+#define erts_smp_atomic32_inc_read_wb erts_atomic32_inc_read_wb
+#define erts_smp_atomic32_dec_read_wb erts_atomic32_dec_read_wb
+#define erts_smp_atomic32_inc_wb erts_atomic32_inc_wb
+#define erts_smp_atomic32_dec_wb erts_atomic32_dec_wb
+#define erts_smp_atomic32_add_read_wb erts_atomic32_add_read_wb
+#define erts_smp_atomic32_add_wb erts_atomic32_add_wb
+#define erts_smp_atomic32_read_bor_wb erts_atomic32_read_bor_wb
+#define erts_smp_atomic32_read_band_wb erts_atomic32_read_band_wb
+#define erts_smp_atomic32_xchg_wb erts_atomic32_xchg_wb
+#define erts_smp_atomic32_cmpxchg_wb erts_atomic32_cmpxchg_wb
+
+#else /* !ERTS_SMP */
+
+/* Double word size atomics */
+
+#define erts_smp_dw_atomic_init_nob erts_no_dw_atomic_set
+#define erts_smp_dw_atomic_set_nob erts_no_dw_atomic_set
+#define erts_smp_dw_atomic_read_nob erts_no_dw_atomic_read
+#define erts_smp_dw_atomic_cmpxchg_nob erts_no_dw_atomic_cmpxchg
+
+#define erts_smp_dw_atomic_init_mb erts_no_dw_atomic_init
+#define erts_smp_dw_atomic_set_mb erts_no_dw_atomic_set
+#define erts_smp_dw_atomic_read_mb erts_no_dw_atomic_read
+#define erts_smp_dw_atomic_cmpxchg_mb erts_no_dw_atomic_cmpxchg
+
+#define erts_smp_dw_atomic_init_acqb erts_no_dw_atomic_init
+#define erts_smp_dw_atomic_set_acqb erts_no_dw_atomic_set
+#define erts_smp_dw_atomic_read_acqb erts_no_dw_atomic_read
+#define erts_smp_dw_atomic_cmpxchg_acqb erts_no_dw_atomic_cmpxchg
+
+#define erts_smp_dw_atomic_init_relb erts_no_dw_atomic_init
+#define erts_smp_dw_atomic_set_relb erts_no_dw_atomic_set
+#define erts_smp_dw_atomic_read_relb erts_no_dw_atomic_read
+#define erts_smp_dw_atomic_cmpxchg_relb erts_no_dw_atomic_cmpxchg
+
+#define erts_smp_dw_atomic_init_rb erts_no_dw_atomic_init
+#define erts_smp_dw_atomic_set_rb erts_no_dw_atomic_set
+#define erts_smp_dw_atomic_read_rb erts_no_dw_atomic_read
+#define erts_smp_dw_atomic_cmpxchg_rb erts_no_dw_atomic_cmpxchg
+
+#define erts_smp_dw_atomic_init_wb erts_no_dw_atomic_init
+#define erts_smp_dw_atomic_set_wb erts_no_dw_atomic_set
+#define erts_smp_dw_atomic_read_wb erts_no_dw_atomic_read
+#define erts_smp_dw_atomic_cmpxchg_wb erts_no_dw_atomic_cmpxchg
+
+/* Word size atomics */
+
+#define erts_smp_atomic_init_nob erts_no_atomic_set
+#define erts_smp_atomic_set_nob erts_no_atomic_set
+#define erts_smp_atomic_read_nob erts_no_atomic_read
+#define erts_smp_atomic_inc_read_nob erts_no_atomic_inc_read
+#define erts_smp_atomic_dec_read_nob erts_no_atomic_dec_read
+#define erts_smp_atomic_inc_nob erts_no_atomic_inc
+#define erts_smp_atomic_dec_nob erts_no_atomic_dec
+#define erts_smp_atomic_add_read_nob erts_no_atomic_add_read
+#define erts_smp_atomic_add_nob erts_no_atomic_add
+#define erts_smp_atomic_read_bor_nob erts_no_atomic_read_bor
+#define erts_smp_atomic_read_band_nob erts_no_atomic_read_band
+#define erts_smp_atomic_xchg_nob erts_no_atomic_xchg
+#define erts_smp_atomic_cmpxchg_nob erts_no_atomic_cmpxchg
+
+#define erts_smp_atomic_init_mb erts_no_atomic_set
+#define erts_smp_atomic_set_mb erts_no_atomic_set
+#define erts_smp_atomic_read_mb erts_no_atomic_read
+#define erts_smp_atomic_inc_read_mb erts_no_atomic_inc_read
+#define erts_smp_atomic_dec_read_mb erts_no_atomic_dec_read
+#define erts_smp_atomic_inc_mb erts_no_atomic_inc
+#define erts_smp_atomic_dec_mb erts_no_atomic_dec
+#define erts_smp_atomic_add_read_mb erts_no_atomic_add_read
+#define erts_smp_atomic_add_mb erts_no_atomic_add
+#define erts_smp_atomic_read_bor_mb erts_no_atomic_read_bor
+#define erts_smp_atomic_read_band_mb erts_no_atomic_read_band
+#define erts_smp_atomic_xchg_mb erts_no_atomic_xchg
+#define erts_smp_atomic_cmpxchg_mb erts_no_atomic_cmpxchg
+
+#define erts_smp_atomic_init_acqb erts_no_atomic_set
+#define erts_smp_atomic_set_acqb erts_no_atomic_set
+#define erts_smp_atomic_read_acqb erts_no_atomic_read
+#define erts_smp_atomic_inc_read_acqb erts_no_atomic_inc_read
+#define erts_smp_atomic_dec_read_acqb erts_no_atomic_dec_read
+#define erts_smp_atomic_inc_acqb erts_no_atomic_inc
+#define erts_smp_atomic_dec_acqb erts_no_atomic_dec
+#define erts_smp_atomic_add_read_acqb erts_no_atomic_add_read
+#define erts_smp_atomic_add_acqb erts_no_atomic_add
+#define erts_smp_atomic_read_bor_acqb erts_no_atomic_read_bor
+#define erts_smp_atomic_read_band_acqb erts_no_atomic_read_band
+#define erts_smp_atomic_xchg_acqb erts_no_atomic_xchg
+#define erts_smp_atomic_cmpxchg_acqb erts_no_atomic_cmpxchg
+
+#define erts_smp_atomic_init_relb erts_no_atomic_set
+#define erts_smp_atomic_set_relb erts_no_atomic_set
+#define erts_smp_atomic_read_relb erts_no_atomic_read
+#define erts_smp_atomic_inc_read_relb erts_no_atomic_inc_read
+#define erts_smp_atomic_dec_read_relb erts_no_atomic_dec_read
+#define erts_smp_atomic_inc_relb erts_no_atomic_inc
+#define erts_smp_atomic_dec_relb erts_no_atomic_dec
+#define erts_smp_atomic_add_read_relb erts_no_atomic_add_read
+#define erts_smp_atomic_add_relb erts_no_atomic_add
+#define erts_smp_atomic_read_bor_relb erts_no_atomic_read_bor
+#define erts_smp_atomic_read_band_relb erts_no_atomic_read_band
+#define erts_smp_atomic_xchg_relb erts_no_atomic_xchg
+#define erts_smp_atomic_cmpxchg_relb erts_no_atomic_cmpxchg
+
+#define erts_smp_atomic_init_rb erts_no_atomic_set
+#define erts_smp_atomic_set_rb erts_no_atomic_set
+#define erts_smp_atomic_read_rb erts_no_atomic_read
+#define erts_smp_atomic_inc_read_rb erts_no_atomic_inc_read
+#define erts_smp_atomic_dec_read_rb erts_no_atomic_dec_read
+#define erts_smp_atomic_inc_rb erts_no_atomic_inc
+#define erts_smp_atomic_dec_rb erts_no_atomic_dec
+#define erts_smp_atomic_add_read_rb erts_no_atomic_add_read
+#define erts_smp_atomic_add_rb erts_no_atomic_add
+#define erts_smp_atomic_read_bor_rb erts_no_atomic_read_bor
+#define erts_smp_atomic_read_band_rb erts_no_atomic_read_band
+#define erts_smp_atomic_xchg_rb erts_no_atomic_xchg
+#define erts_smp_atomic_cmpxchg_rb erts_no_atomic_cmpxchg
+
+#define erts_smp_atomic_init_wb erts_no_atomic_set
+#define erts_smp_atomic_set_wb erts_no_atomic_set
+#define erts_smp_atomic_read_wb erts_no_atomic_read
+#define erts_smp_atomic_inc_read_wb erts_no_atomic_inc_read
+#define erts_smp_atomic_dec_read_wb erts_no_atomic_dec_read
+#define erts_smp_atomic_inc_wb erts_no_atomic_inc
+#define erts_smp_atomic_dec_wb erts_no_atomic_dec
+#define erts_smp_atomic_add_read_wb erts_no_atomic_add_read
+#define erts_smp_atomic_add_wb erts_no_atomic_add
+#define erts_smp_atomic_read_bor_wb erts_no_atomic_read_bor
+#define erts_smp_atomic_read_band_wb erts_no_atomic_read_band
+#define erts_smp_atomic_xchg_wb erts_no_atomic_xchg
+#define erts_smp_atomic_cmpxchg_wb erts_no_atomic_cmpxchg
+
+/* 32-bit atomics */
+
+#define erts_smp_atomic32_init_nob erts_no_atomic32_set
+#define erts_smp_atomic32_set_nob erts_no_atomic32_set
+#define erts_smp_atomic32_read_nob erts_no_atomic32_read
+#define erts_smp_atomic32_inc_read_nob erts_no_atomic32_inc_read
+#define erts_smp_atomic32_dec_read_nob erts_no_atomic32_dec_read
+#define erts_smp_atomic32_inc_nob erts_no_atomic32_inc
+#define erts_smp_atomic32_dec_nob erts_no_atomic32_dec
+#define erts_smp_atomic32_add_read_nob erts_no_atomic32_add_read
+#define erts_smp_atomic32_add_nob erts_no_atomic32_add
+#define erts_smp_atomic32_read_bor_nob erts_no_atomic32_read_bor
+#define erts_smp_atomic32_read_band_nob erts_no_atomic32_read_band
+#define erts_smp_atomic32_xchg_nob erts_no_atomic32_xchg
+#define erts_smp_atomic32_cmpxchg_nob erts_no_atomic32_cmpxchg
+
+#define erts_smp_atomic32_init_mb erts_no_atomic32_set
+#define erts_smp_atomic32_set_mb erts_no_atomic32_set
+#define erts_smp_atomic32_read_mb erts_no_atomic32_read
+#define erts_smp_atomic32_inc_read_mb erts_no_atomic32_inc_read
+#define erts_smp_atomic32_dec_read_mb erts_no_atomic32_dec_read
+#define erts_smp_atomic32_inc_mb erts_no_atomic32_inc
+#define erts_smp_atomic32_dec_mb erts_no_atomic32_dec
+#define erts_smp_atomic32_add_read_mb erts_no_atomic32_add_read
+#define erts_smp_atomic32_add_mb erts_no_atomic32_add
+#define erts_smp_atomic32_read_bor_mb erts_no_atomic32_read_bor
+#define erts_smp_atomic32_read_band_mb erts_no_atomic32_read_band
+#define erts_smp_atomic32_xchg_mb erts_no_atomic32_xchg
+#define erts_smp_atomic32_cmpxchg_mb erts_no_atomic32_cmpxchg
+
+#define erts_smp_atomic32_init_acqb erts_no_atomic32_set
+#define erts_smp_atomic32_set_acqb erts_no_atomic32_set
+#define erts_smp_atomic32_read_acqb erts_no_atomic32_read
+#define erts_smp_atomic32_inc_read_acqb erts_no_atomic32_inc_read
+#define erts_smp_atomic32_dec_read_acqb erts_no_atomic32_dec_read
+#define erts_smp_atomic32_inc_acqb erts_no_atomic32_inc
+#define erts_smp_atomic32_dec_acqb erts_no_atomic32_dec
+#define erts_smp_atomic32_add_read_acqb erts_no_atomic32_add_read
+#define erts_smp_atomic32_add_acqb erts_no_atomic32_add
+#define erts_smp_atomic32_read_bor_acqb erts_no_atomic32_read_bor
+#define erts_smp_atomic32_read_band_acqb erts_no_atomic32_read_band
+#define erts_smp_atomic32_xchg_acqb erts_no_atomic32_xchg
+#define erts_smp_atomic32_cmpxchg_acqb erts_no_atomic32_cmpxchg
+
+#define erts_smp_atomic32_init_relb erts_no_atomic32_set
+#define erts_smp_atomic32_set_relb erts_no_atomic32_set
+#define erts_smp_atomic32_read_relb erts_no_atomic32_read
+#define erts_smp_atomic32_inc_read_relb erts_no_atomic32_inc_read
+#define erts_smp_atomic32_dec_read_relb erts_no_atomic32_dec_read
+#define erts_smp_atomic32_inc_relb erts_no_atomic32_inc
+#define erts_smp_atomic32_dec_relb erts_no_atomic32_dec
+#define erts_smp_atomic32_add_read_relb erts_no_atomic32_add_read
+#define erts_smp_atomic32_add_relb erts_no_atomic32_add
+#define erts_smp_atomic32_read_bor_relb erts_no_atomic32_read_bor
+#define erts_smp_atomic32_read_band_relb erts_no_atomic32_read_band
+#define erts_smp_atomic32_xchg_relb erts_no_atomic32_xchg
+#define erts_smp_atomic32_cmpxchg_relb erts_no_atomic32_cmpxchg
+
+#define erts_smp_atomic32_init_rb erts_no_atomic32_set
+#define erts_smp_atomic32_set_rb erts_no_atomic32_set
+#define erts_smp_atomic32_read_rb erts_no_atomic32_read
+#define erts_smp_atomic32_inc_read_rb erts_no_atomic32_inc_read
+#define erts_smp_atomic32_dec_read_rb erts_no_atomic32_dec_read
+#define erts_smp_atomic32_inc_rb erts_no_atomic32_inc
+#define erts_smp_atomic32_dec_rb erts_no_atomic32_dec
+#define erts_smp_atomic32_add_read_rb erts_no_atomic32_add_read
+#define erts_smp_atomic32_add_rb erts_no_atomic32_add
+#define erts_smp_atomic32_read_bor_rb erts_no_atomic32_read_bor
+#define erts_smp_atomic32_read_band_rb erts_no_atomic32_read_band
+#define erts_smp_atomic32_xchg_rb erts_no_atomic32_xchg
+#define erts_smp_atomic32_cmpxchg_rb erts_no_atomic32_cmpxchg
+
+#define erts_smp_atomic32_init_wb erts_no_atomic32_set
+#define erts_smp_atomic32_set_wb erts_no_atomic32_set
+#define erts_smp_atomic32_read_wb erts_no_atomic32_read
+#define erts_smp_atomic32_inc_read_wb erts_no_atomic32_inc_read
+#define erts_smp_atomic32_dec_read_wb erts_no_atomic32_dec_read
+#define erts_smp_atomic32_inc_wb erts_no_atomic32_inc
+#define erts_smp_atomic32_dec_wb erts_no_atomic32_dec
+#define erts_smp_atomic32_add_read_wb erts_no_atomic32_add_read
+#define erts_smp_atomic32_add_wb erts_no_atomic32_add
+#define erts_smp_atomic32_read_bor_wb erts_no_atomic32_read_bor
+#define erts_smp_atomic32_read_band_wb erts_no_atomic32_read_band
+#define erts_smp_atomic32_xchg_wb erts_no_atomic32_xchg
+#define erts_smp_atomic32_cmpxchg_wb erts_no_atomic32_cmpxchg
+
+#endif /* !ERTS_SMP */
#if ERTS_GLB_INLINE_INCL_FUNC_DEF
@@ -655,434 +1004,6 @@ erts_smp_lc_rwmtx_is_rwlocked(erts_smp_rwmtx_t *mtx)
}
ERTS_GLB_INLINE void
-erts_smp_atomic_init(erts_smp_atomic_t *var, erts_aint_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic_init(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic_set(erts_smp_atomic_t *var, erts_aint_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic_set(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_read(erts_smp_atomic_t *var)
-{
-#ifdef ERTS_SMP
- return erts_atomic_read(var);
-#else
- return *var;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_inctest(erts_smp_atomic_t *incp)
-{
-#ifdef ERTS_SMP
- return erts_atomic_inctest(incp);
-#else
- return ++(*incp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_dectest(erts_smp_atomic_t *decp)
-{
-#ifdef ERTS_SMP
- return erts_atomic_dectest(decp);
-#else
- return --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic_inc(erts_smp_atomic_t *incp)
-{
-#ifdef ERTS_SMP
- erts_atomic_inc(incp);
-#else
- ++(*incp);
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic_dec(erts_smp_atomic_t *decp)
-{
-#ifdef ERTS_SMP
- erts_atomic_dec(decp);
-#else
- --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_addtest(erts_smp_atomic_t *addp, erts_aint_t i)
-{
-#ifdef ERTS_SMP
- return erts_atomic_addtest(addp, i);
-#else
- return *addp += i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic_add(erts_smp_atomic_t *addp, erts_aint_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic_add(addp, i);
-#else
- *addp += i;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_xchg(erts_smp_atomic_t *xchgp, erts_aint_t new)
-{
-#ifdef ERTS_SMP
- return erts_atomic_xchg(xchgp, new);
-#else
- erts_aint_t old;
- old = *xchgp;
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_cmpxchg(erts_smp_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t expected)
-{
-#ifdef ERTS_SMP
- return erts_atomic_cmpxchg(xchgp, new, expected);
-#else
- erts_aint_t old = *xchgp;
- if (old == expected)
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_bor(erts_smp_atomic_t *var, erts_aint_t mask)
-{
-#ifdef ERTS_SMP
- return erts_atomic_bor(var, mask);
-#else
- erts_aint_t old;
- old = *var;
- *var |= mask;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_band(erts_smp_atomic_t *var, erts_aint_t mask)
-{
-#ifdef ERTS_SMP
- return erts_atomic_band(var, mask);
-#else
- erts_aint_t old;
- old = *var;
- *var &= mask;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_read_acqb(erts_smp_atomic_t *var)
-{
-#ifdef ERTS_SMP
- return erts_atomic_read_acqb(var);
-#else
- return *var;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic_set_relb(erts_smp_atomic_t *var, erts_aint_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic_set_relb(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic_dec_relb(erts_smp_atomic_t *decp)
-{
-#ifdef ERTS_SMP
- erts_atomic_dec_relb(decp);
-#else
- --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_dectest_relb(erts_smp_atomic_t *decp)
-{
-#ifdef ERTS_SMP
- return erts_atomic_dectest_relb(decp);
-#else
- return --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_cmpxchg_acqb(erts_smp_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp)
-{
-#ifdef ERTS_SMP
- return erts_atomic_cmpxchg_acqb(xchgp, new, exp);
-#else
- erts_aint_t old = *xchgp;
- if (old == exp)
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_smp_atomic_cmpxchg_relb(erts_smp_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp)
-{
-#ifdef ERTS_SMP
- return erts_atomic_cmpxchg_relb(xchgp, new, exp);
-#else
- erts_aint_t old = *xchgp;
- if (old == exp)
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic32_init(erts_smp_atomic32_t *var, erts_aint32_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic32_init(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic32_set(erts_smp_atomic32_t *var, erts_aint32_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic32_set(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_read(erts_smp_atomic32_t *var)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_read(var);
-#else
- return *var;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_inctest(erts_smp_atomic32_t *incp)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_inctest(incp);
-#else
- return ++(*incp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_dectest(erts_smp_atomic32_t *decp)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_dectest(decp);
-#else
- return --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic32_inc(erts_smp_atomic32_t *incp)
-{
-#ifdef ERTS_SMP
- erts_atomic32_inc(incp);
-#else
- ++(*incp);
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic32_dec(erts_smp_atomic32_t *decp)
-{
-#ifdef ERTS_SMP
- erts_atomic32_dec(decp);
-#else
- --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_addtest(erts_smp_atomic32_t *addp, erts_aint32_t i)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_addtest(addp, i);
-#else
- return *addp += i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic32_add(erts_smp_atomic32_t *addp, erts_aint32_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic32_add(addp, i);
-#else
- *addp += i;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_xchg(erts_smp_atomic32_t *xchgp, erts_aint32_t new)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_xchg(xchgp, new);
-#else
- erts_aint32_t old;
- old = *xchgp;
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_cmpxchg(erts_smp_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t expected)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_cmpxchg(xchgp, new, expected);
-#else
- erts_aint32_t old = *xchgp;
- if (old == expected)
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_bor(erts_smp_atomic32_t *var, erts_aint32_t mask)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_bor(var, mask);
-#else
- erts_aint32_t old;
- old = *var;
- *var |= mask;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_band(erts_smp_atomic32_t *var, erts_aint32_t mask)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_band(var, mask);
-#else
- erts_aint32_t old;
- old = *var;
- *var &= mask;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_read_acqb(erts_smp_atomic32_t *var)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_read_acqb(var);
-#else
- return *var;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic32_set_relb(erts_smp_atomic32_t *var, erts_aint32_t i)
-{
-#ifdef ERTS_SMP
- erts_atomic32_set_relb(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_smp_atomic32_dec_relb(erts_smp_atomic32_t *decp)
-{
-#ifdef ERTS_SMP
- erts_atomic32_dec_relb(decp);
-#else
- --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_dectest_relb(erts_smp_atomic32_t *decp)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_dectest_relb(decp);
-#else
- return --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_cmpxchg_acqb(erts_smp_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_cmpxchg_acqb(xchgp, new, exp);
-#else
- erts_aint32_t old = *xchgp;
- if (old == exp)
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_smp_atomic32_cmpxchg_relb(erts_smp_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp)
-{
-#ifdef ERTS_SMP
- return erts_atomic32_cmpxchg_relb(xchgp, new, exp);
-#else
- erts_aint32_t old = *xchgp;
- if (old == exp)
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE void
erts_smp_spinlock_init_x(erts_smp_spinlock_t *lock, char *name, Eterm extra)
{
#ifdef ERTS_SMP
@@ -1308,3 +1229,37 @@ erts_smp_thr_sigwait(const sigset_t *set, int *sig)
#endif /* #if ERTS_GLB_INLINE_INCL_FUNC_DEF */
#endif /* ERL_SMP_H */
+
+#ifdef ERTS_UNDEF_DEPRECATED_ATOMICS
+
+/* Deprecated functions to replace */
+
+#undef erts_smp_atomic_init
+#undef erts_smp_atomic_set
+#undef erts_smp_atomic_read
+#undef erts_smp_atomic_inctest
+#undef erts_smp_atomic_dectest
+#undef erts_smp_atomic_inc
+#undef erts_smp_atomic_dec
+#undef erts_smp_atomic_addtest
+#undef erts_smp_atomic_add
+#undef erts_smp_atomic_xchg
+#undef erts_smp_atomic_cmpxchg
+#undef erts_smp_atomic_bor
+#undef erts_smp_atomic_band
+
+#undef erts_smp_atomic32_init
+#undef erts_smp_atomic32_set
+#undef erts_smp_atomic32_read
+#undef erts_smp_atomic32_inctest
+#undef erts_smp_atomic32_dectest
+#undef erts_smp_atomic32_inc
+#undef erts_smp_atomic32_dec
+#undef erts_smp_atomic32_addtest
+#undef erts_smp_atomic32_add
+#undef erts_smp_atomic32_xchg
+#undef erts_smp_atomic32_cmpxchg
+#undef erts_smp_atomic32_bor
+#undef erts_smp_atomic32_band
+
+#endif
diff --git a/erts/emulator/beam/erl_threads.h b/erts/emulator/beam/erl_threads.h
index 8c9cace0c5..9b897ffd24 100644
--- a/erts/emulator/beam/erl_threads.h
+++ b/erts/emulator/beam/erl_threads.h
@@ -28,6 +28,11 @@
#define ERTS_SPIN_BODY ETHR_SPIN_BODY
#include "sys.h"
+
+typedef struct { SWord sint[2]; } erts_no_dw_atomic_t;
+typedef SWord erts_no_atomic_t;
+typedef Sint32 erts_no_atomic32_t;
+
#ifdef USE_THREADS
#define ETHR_TRY_INLINE_FUNCS
@@ -99,10 +104,12 @@ typedef ethr_rwmutex_opt erts_rwmtx_opt_t;
typedef ethr_tsd_key erts_tsd_key_t;
typedef ethr_ts_event erts_tse_t;
-typedef ethr_sint_t erts_aint_t;
-typedef ethr_atomic_t erts_atomic_t;
-typedef ethr_sint32_t erts_aint32_t;
-typedef ethr_atomic32_t erts_atomic32_t;
+#define erts_dw_aint_t ethr_dw_sint_t
+#define erts_dw_atomic_t ethr_dw_atomic_t
+#define erts_aint_t ethr_sint_t
+#define erts_atomic_t ethr_atomic_t
+#define erts_aint32_t ethr_sint32_t
+#define erts_atomic32_t ethr_atomic32_t
/* spinlock */
typedef struct {
@@ -164,10 +171,12 @@ typedef struct {
typedef int erts_rwmtx_t;
typedef int erts_tsd_key_t;
typedef int erts_tse_t;
-typedef SWord erts_aint_t;
-typedef SWord erts_atomic_t;
-typedef SWord erts_aint32_t;
-typedef SWord erts_atomic32_t;
+#define erts_dw_aint_t erts_no_dw_atomic_t
+#define erts_dw_atomic_t erts_no_dw_atomic_t
+#define erts_aint_t SWord
+#define erts_atomic_t erts_no_atomic_t
+#define erts_aint32_t Sint32
+#define erts_atomic32_t erts_no_atomic32_t
#if __GNUC__ > 2
typedef struct { } erts_spinlock_t;
typedef struct { } erts_rwlock_t;
@@ -247,65 +256,51 @@ ERTS_GLB_INLINE int erts_rwmtx_tryrwlock(erts_rwmtx_t *rwmtx);
ERTS_GLB_INLINE void erts_rwmtx_rwunlock(erts_rwmtx_t *rwmtx);
ERTS_GLB_INLINE int erts_lc_rwmtx_is_rlocked(erts_rwmtx_t *mtx);
ERTS_GLB_INLINE int erts_lc_rwmtx_is_rwlocked(erts_rwmtx_t *mtx);
-ERTS_GLB_INLINE void erts_atomic_init(erts_atomic_t *var, erts_aint_t i);
-ERTS_GLB_INLINE void erts_atomic_set(erts_atomic_t *var, erts_aint_t i);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_read(erts_atomic_t *var);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_inctest(erts_atomic_t *incp);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_dectest(erts_atomic_t *decp);
-ERTS_GLB_INLINE void erts_atomic_inc(erts_atomic_t *incp);
-ERTS_GLB_INLINE void erts_atomic_dec(erts_atomic_t *decp);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_addtest(erts_atomic_t *addp,
- erts_aint_t i);
-ERTS_GLB_INLINE void erts_atomic_add(erts_atomic_t *addp, erts_aint_t i);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_xchg(erts_atomic_t *xchgp,
- erts_aint_t new);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg(erts_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t expected);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_bor(erts_atomic_t *var,
- erts_aint_t mask);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_band(erts_atomic_t *var,
- erts_aint_t mask);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_read_acqb(erts_atomic_t *var);
-ERTS_GLB_INLINE void erts_atomic_set_relb(erts_atomic_t *var, erts_aint_t i);
-ERTS_GLB_INLINE void erts_atomic_dec_relb(erts_atomic_t *decp);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_dectest_relb(erts_atomic_t *decp);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_acqb(erts_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp);
-ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_relb(erts_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp);
-ERTS_GLB_INLINE void erts_atomic32_init(erts_atomic32_t *var, erts_aint32_t i);
-ERTS_GLB_INLINE void erts_atomic32_set(erts_atomic32_t *var, erts_aint32_t i);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_read(erts_atomic32_t *var);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_inctest(erts_atomic32_t *incp);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_dectest(erts_atomic32_t *decp);
-ERTS_GLB_INLINE void erts_atomic32_inc(erts_atomic32_t *incp);
-ERTS_GLB_INLINE void erts_atomic32_dec(erts_atomic32_t *decp);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_addtest(erts_atomic32_t *addp,
- erts_aint32_t i);
-ERTS_GLB_INLINE void erts_atomic32_add(erts_atomic32_t *addp, erts_aint32_t i);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_xchg(erts_atomic32_t *xchgp,
- erts_aint32_t new);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_cmpxchg(erts_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t expected);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_bor(erts_atomic32_t *var,
- erts_aint32_t mask);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_band(erts_atomic32_t *var,
- erts_aint32_t mask);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_read_acqb(erts_atomic32_t *var);
-ERTS_GLB_INLINE void erts_atomic32_set_relb(erts_atomic32_t *var,
- erts_aint32_t i);
-ERTS_GLB_INLINE void erts_atomic32_dec_relb(erts_atomic32_t *decp);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_dectest_relb(erts_atomic32_t *decp);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_cmpxchg_acqb(erts_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp);
-ERTS_GLB_INLINE erts_aint32_t erts_atomic32_cmpxchg_relb(erts_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp);
+
+ERTS_GLB_INLINE void erts_no_dw_atomic_set(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val);
+ERTS_GLB_INLINE void erts_no_dw_atomic_read(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val);
+ERTS_GLB_INLINE int erts_no_dw_atomic_cmpxchg(erts_no_dw_atomic_t *var,
+ erts_no_dw_atomic_t *val,
+ erts_no_dw_atomic_t *old_val);
+ERTS_GLB_INLINE void erts_no_atomic_set(erts_no_atomic_t *var, erts_aint_t i);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_read(erts_no_atomic_t *var);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_inc_read(erts_no_atomic_t *incp);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_dec_read(erts_no_atomic_t *decp);
+ERTS_GLB_INLINE void erts_no_atomic_inc(erts_no_atomic_t *incp);
+ERTS_GLB_INLINE void erts_no_atomic_dec(erts_no_atomic_t *decp);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_add_read(erts_no_atomic_t *addp,
+ erts_aint_t i);
+ERTS_GLB_INLINE void erts_no_atomic_add(erts_no_atomic_t *addp, erts_aint_t i);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_read_bor(erts_no_atomic_t *var,
+ erts_aint_t mask);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_read_band(erts_no_atomic_t *var,
+ erts_aint_t mask);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_xchg(erts_no_atomic_t *xchgp,
+ erts_aint_t new);
+ERTS_GLB_INLINE erts_aint_t erts_no_atomic_cmpxchg(erts_no_atomic_t *xchgp,
+ erts_aint_t new,
+ erts_aint_t expected);
+ERTS_GLB_INLINE void erts_no_atomic32_set(erts_no_atomic32_t *var,
+ erts_aint32_t i);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_read(erts_no_atomic32_t *var);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_inc_read(erts_no_atomic32_t *incp);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_dec_read(erts_no_atomic32_t *decp);
+ERTS_GLB_INLINE void erts_no_atomic32_inc(erts_no_atomic32_t *incp);
+ERTS_GLB_INLINE void erts_no_atomic32_dec(erts_no_atomic32_t *decp);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_add_read(erts_no_atomic32_t *addp,
+ erts_aint32_t i);
+ERTS_GLB_INLINE void erts_no_atomic32_add(erts_no_atomic32_t *addp,
+ erts_aint32_t i);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_read_bor(erts_no_atomic32_t *var,
+ erts_aint32_t mask);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_read_band(erts_no_atomic32_t *var,
+ erts_aint32_t mask);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_xchg(erts_no_atomic32_t *xchgp,
+ erts_aint32_t new);
+ERTS_GLB_INLINE erts_aint32_t erts_no_atomic32_cmpxchg(erts_no_atomic32_t *xchgp,
+ erts_aint32_t new,
+ erts_aint32_t expected);
+
ERTS_GLB_INLINE void erts_spinlock_init_x_opt(erts_spinlock_t *lock,
char *name,
Eterm extra,
@@ -362,6 +357,430 @@ ERTS_GLB_INLINE void erts_thr_sigmask(int how, const sigset_t *set,
ERTS_GLB_INLINE void erts_thr_sigwait(const sigset_t *set, int *sig);
#endif /* #ifdef HAVE_ETHR_SIG_FUNCS */
+/*
+ * Functions implementing atomic operations with with no (nob),
+ * full (mb), acquire (acqb), release (relb), read (rb), and
+ * write (wb) memory barriers.
+ *
+ * If thread support has been disabled, they are mapped to
+ * functions that performs the same operation, but aren't atomic
+ * and don't imply memory barriers.
+ */
+
+#ifdef USE_THREADS
+
+/* Double word size atomics */
+
+#define erts_dw_atomic_init_nob ethr_dw_atomic_init
+#define erts_dw_atomic_set_nob ethr_dw_atomic_set
+#define erts_dw_atomic_read_nob ethr_dw_atomic_read
+#define erts_dw_atomic_cmpxchg_nob ethr_dw_atomic_cmpxchg
+
+#define erts_dw_atomic_init_mb ethr_dw_atomic_init_mb
+#define erts_dw_atomic_set_mb ethr_dw_atomic_set_mb
+#define erts_dw_atomic_read_mb ethr_dw_atomic_read_mb
+#define erts_dw_atomic_cmpxchg_mb ethr_dw_atomic_cmpxchg_mb
+
+#define erts_dw_atomic_init_acqb ethr_dw_atomic_init_acqb
+#define erts_dw_atomic_set_acqb ethr_dw_atomic_set_acqb
+#define erts_dw_atomic_read_acqb ethr_dw_atomic_read_acqb
+#define erts_dw_atomic_cmpxchg_acqb ethr_dw_atomic_cmpxchg_acqb
+
+#define erts_dw_atomic_init_relb ethr_dw_atomic_init_relb
+#define erts_dw_atomic_set_relb ethr_dw_atomic_set_relb
+#define erts_dw_atomic_read_relb ethr_dw_atomic_read_relb
+#define erts_dw_atomic_cmpxchg_relb ethr_dw_atomic_cmpxchg_relb
+
+#define erts_dw_atomic_init_rb ethr_dw_atomic_init_rb
+#define erts_dw_atomic_set_rb ethr_dw_atomic_set_rb
+#define erts_dw_atomic_read_rb ethr_dw_atomic_read_rb
+#define erts_dw_atomic_cmpxchg_rb ethr_dw_atomic_cmpxchg_rb
+
+#define erts_dw_atomic_init_wb ethr_dw_atomic_init_wb
+#define erts_dw_atomic_set_wb ethr_dw_atomic_set_wb
+#define erts_dw_atomic_read_wb ethr_dw_atomic_read_wb
+#define erts_dw_atomic_cmpxchg_wb ethr_dw_atomic_cmpxchg_wb
+
+/* Word size atomics */
+
+#define erts_atomic_init_nob ethr_atomic_init
+#define erts_atomic_set_nob ethr_atomic_set
+#define erts_atomic_read_nob ethr_atomic_read
+#define erts_atomic_inc_read_nob ethr_atomic_inc_read
+#define erts_atomic_dec_read_nob ethr_atomic_dec_read
+#define erts_atomic_inc_nob ethr_atomic_inc
+#define erts_atomic_dec_nob ethr_atomic_dec
+#define erts_atomic_add_read_nob ethr_atomic_add_read
+#define erts_atomic_add_nob ethr_atomic_add
+#define erts_atomic_read_bor_nob ethr_atomic_read_bor
+#define erts_atomic_read_band_nob ethr_atomic_read_band
+#define erts_atomic_xchg_nob ethr_atomic_xchg
+#define erts_atomic_cmpxchg_nob ethr_atomic_cmpxchg
+
+#define erts_atomic_init_mb ethr_atomic_init_mb
+#define erts_atomic_set_mb ethr_atomic_set_mb
+#define erts_atomic_read_mb ethr_atomic_read_mb
+#define erts_atomic_inc_read_mb ethr_atomic_inc_read_mb
+#define erts_atomic_dec_read_mb ethr_atomic_dec_read_mb
+#define erts_atomic_inc_mb ethr_atomic_inc_mb
+#define erts_atomic_dec_mb ethr_atomic_dec_mb
+#define erts_atomic_add_read_mb ethr_atomic_add_read_mb
+#define erts_atomic_add_mb ethr_atomic_add_mb
+#define erts_atomic_read_bor_mb ethr_atomic_read_bor_mb
+#define erts_atomic_read_band_mb ethr_atomic_read_band_mb
+#define erts_atomic_xchg_mb ethr_atomic_xchg_mb
+#define erts_atomic_cmpxchg_mb ethr_atomic_cmpxchg_mb
+
+#define erts_atomic_init_acqb ethr_atomic_init_acqb
+#define erts_atomic_set_acqb ethr_atomic_set_acqb
+#define erts_atomic_read_acqb ethr_atomic_read_acqb
+#define erts_atomic_inc_read_acqb ethr_atomic_inc_read_acqb
+#define erts_atomic_dec_read_acqb ethr_atomic_dec_read_acqb
+#define erts_atomic_inc_acqb ethr_atomic_inc_acqb
+#define erts_atomic_dec_acqb ethr_atomic_dec_acqb
+#define erts_atomic_add_read_acqb ethr_atomic_add_read_acqb
+#define erts_atomic_add_acqb ethr_atomic_add_acqb
+#define erts_atomic_read_bor_acqb ethr_atomic_read_bor_acqb
+#define erts_atomic_read_band_acqb ethr_atomic_read_band_acqb
+#define erts_atomic_xchg_acqb ethr_atomic_xchg_acqb
+#define erts_atomic_cmpxchg_acqb ethr_atomic_cmpxchg_acqb
+
+#define erts_atomic_init_relb ethr_atomic_init_relb
+#define erts_atomic_set_relb ethr_atomic_set_relb
+#define erts_atomic_read_relb ethr_atomic_read_relb
+#define erts_atomic_inc_read_relb ethr_atomic_inc_read_relb
+#define erts_atomic_dec_read_relb ethr_atomic_dec_read_relb
+#define erts_atomic_inc_relb ethr_atomic_inc_relb
+#define erts_atomic_dec_relb ethr_atomic_dec_relb
+#define erts_atomic_add_read_relb ethr_atomic_add_read_relb
+#define erts_atomic_add_relb ethr_atomic_add_relb
+#define erts_atomic_read_bor_relb ethr_atomic_read_bor_relb
+#define erts_atomic_read_band_relb ethr_atomic_read_band_relb
+#define erts_atomic_xchg_relb ethr_atomic_xchg_relb
+#define erts_atomic_cmpxchg_relb ethr_atomic_cmpxchg_relb
+
+#define erts_atomic_init_rb ethr_atomic_init_rb
+#define erts_atomic_set_rb ethr_atomic_set_rb
+#define erts_atomic_read_rb ethr_atomic_read_rb
+#define erts_atomic_inc_read_rb ethr_atomic_inc_read_rb
+#define erts_atomic_dec_read_rb ethr_atomic_dec_read_rb
+#define erts_atomic_inc_rb ethr_atomic_inc_rb
+#define erts_atomic_dec_rb ethr_atomic_dec_rb
+#define erts_atomic_add_read_rb ethr_atomic_add_read_rb
+#define erts_atomic_add_rb ethr_atomic_add_rb
+#define erts_atomic_read_bor_rb ethr_atomic_read_bor_rb
+#define erts_atomic_read_band_rb ethr_atomic_read_band_rb
+#define erts_atomic_xchg_rb ethr_atomic_xchg_rb
+#define erts_atomic_cmpxchg_rb ethr_atomic_cmpxchg_rb
+
+#define erts_atomic_init_wb ethr_atomic_init_wb
+#define erts_atomic_set_wb ethr_atomic_set_wb
+#define erts_atomic_read_wb ethr_atomic_read_wb
+#define erts_atomic_inc_read_wb ethr_atomic_inc_read_wb
+#define erts_atomic_dec_read_wb ethr_atomic_dec_read_wb
+#define erts_atomic_inc_wb ethr_atomic_inc_wb
+#define erts_atomic_dec_wb ethr_atomic_dec_wb
+#define erts_atomic_add_read_wb ethr_atomic_add_read_wb
+#define erts_atomic_add_wb ethr_atomic_add_wb
+#define erts_atomic_read_bor_wb ethr_atomic_read_bor_wb
+#define erts_atomic_read_band_wb ethr_atomic_read_band_wb
+#define erts_atomic_xchg_wb ethr_atomic_xchg_wb
+#define erts_atomic_cmpxchg_wb ethr_atomic_cmpxchg_wb
+
+/* 32-bit atomics */
+
+#define erts_atomic32_init_nob ethr_atomic32_init
+#define erts_atomic32_set_nob ethr_atomic32_set
+#define erts_atomic32_read_nob ethr_atomic32_read
+#define erts_atomic32_inc_read_nob ethr_atomic32_inc_read
+#define erts_atomic32_dec_read_nob ethr_atomic32_dec_read
+#define erts_atomic32_inc_nob ethr_atomic32_inc
+#define erts_atomic32_dec_nob ethr_atomic32_dec
+#define erts_atomic32_add_read_nob ethr_atomic32_add_read
+#define erts_atomic32_add_nob ethr_atomic32_add
+#define erts_atomic32_read_bor_nob ethr_atomic32_read_bor
+#define erts_atomic32_read_band_nob ethr_atomic32_read_band
+#define erts_atomic32_xchg_nob ethr_atomic32_xchg
+#define erts_atomic32_cmpxchg_nob ethr_atomic32_cmpxchg
+
+#define erts_atomic32_init_mb ethr_atomic32_init_mb
+#define erts_atomic32_set_mb ethr_atomic32_set_mb
+#define erts_atomic32_read_mb ethr_atomic32_read_mb
+#define erts_atomic32_inc_read_mb ethr_atomic32_inc_read_mb
+#define erts_atomic32_dec_read_mb ethr_atomic32_dec_read_mb
+#define erts_atomic32_inc_mb ethr_atomic32_inc_mb
+#define erts_atomic32_dec_mb ethr_atomic32_dec_mb
+#define erts_atomic32_add_read_mb ethr_atomic32_add_read_mb
+#define erts_atomic32_add_mb ethr_atomic32_add_mb
+#define erts_atomic32_read_bor_mb ethr_atomic32_read_bor_mb
+#define erts_atomic32_read_band_mb ethr_atomic32_read_band_mb
+#define erts_atomic32_xchg_mb ethr_atomic32_xchg_mb
+#define erts_atomic32_cmpxchg_mb ethr_atomic32_cmpxchg_mb
+
+#define erts_atomic32_init_acqb ethr_atomic32_init_acqb
+#define erts_atomic32_set_acqb ethr_atomic32_set_acqb
+#define erts_atomic32_read_acqb ethr_atomic32_read_acqb
+#define erts_atomic32_inc_read_acqb ethr_atomic32_inc_read_acqb
+#define erts_atomic32_dec_read_acqb ethr_atomic32_dec_read_acqb
+#define erts_atomic32_inc_acqb ethr_atomic32_inc_acqb
+#define erts_atomic32_dec_acqb ethr_atomic32_dec_acqb
+#define erts_atomic32_add_read_acqb ethr_atomic32_add_read_acqb
+#define erts_atomic32_add_acqb ethr_atomic32_add_acqb
+#define erts_atomic32_read_bor_acqb ethr_atomic32_read_bor_acqb
+#define erts_atomic32_read_band_acqb ethr_atomic32_read_band_acqb
+#define erts_atomic32_xchg_acqb ethr_atomic32_xchg_acqb
+#define erts_atomic32_cmpxchg_acqb ethr_atomic32_cmpxchg_acqb
+
+#define erts_atomic32_init_relb ethr_atomic32_init_relb
+#define erts_atomic32_set_relb ethr_atomic32_set_relb
+#define erts_atomic32_read_relb ethr_atomic32_read_relb
+#define erts_atomic32_inc_read_relb ethr_atomic32_inc_read_relb
+#define erts_atomic32_dec_read_relb ethr_atomic32_dec_read_relb
+#define erts_atomic32_inc_relb ethr_atomic32_inc_relb
+#define erts_atomic32_dec_relb ethr_atomic32_dec_relb
+#define erts_atomic32_add_read_relb ethr_atomic32_add_read_relb
+#define erts_atomic32_add_relb ethr_atomic32_add_relb
+#define erts_atomic32_read_bor_relb ethr_atomic32_read_bor_relb
+#define erts_atomic32_read_band_relb ethr_atomic32_read_band_relb
+#define erts_atomic32_xchg_relb ethr_atomic32_xchg_relb
+#define erts_atomic32_cmpxchg_relb ethr_atomic32_cmpxchg_relb
+
+#define erts_atomic32_init_rb ethr_atomic32_init_rb
+#define erts_atomic32_set_rb ethr_atomic32_set_rb
+#define erts_atomic32_read_rb ethr_atomic32_read_rb
+#define erts_atomic32_inc_read_rb ethr_atomic32_inc_read_rb
+#define erts_atomic32_dec_read_rb ethr_atomic32_dec_read_rb
+#define erts_atomic32_inc_rb ethr_atomic32_inc_rb
+#define erts_atomic32_dec_rb ethr_atomic32_dec_rb
+#define erts_atomic32_add_read_rb ethr_atomic32_add_read_rb
+#define erts_atomic32_add_rb ethr_atomic32_add_rb
+#define erts_atomic32_read_bor_rb ethr_atomic32_read_bor_rb
+#define erts_atomic32_read_band_rb ethr_atomic32_read_band_rb
+#define erts_atomic32_xchg_rb ethr_atomic32_xchg_rb
+#define erts_atomic32_cmpxchg_rb ethr_atomic32_cmpxchg_rb
+
+#define erts_atomic32_init_wb ethr_atomic32_init_wb
+#define erts_atomic32_set_wb ethr_atomic32_set_wb
+#define erts_atomic32_read_wb ethr_atomic32_read_wb
+#define erts_atomic32_inc_read_wb ethr_atomic32_inc_read_wb
+#define erts_atomic32_dec_read_wb ethr_atomic32_dec_read_wb
+#define erts_atomic32_inc_wb ethr_atomic32_inc_wb
+#define erts_atomic32_dec_wb ethr_atomic32_dec_wb
+#define erts_atomic32_add_read_wb ethr_atomic32_add_read_wb
+#define erts_atomic32_add_wb ethr_atomic32_add_wb
+#define erts_atomic32_read_bor_wb ethr_atomic32_read_bor_wb
+#define erts_atomic32_read_band_wb ethr_atomic32_read_band_wb
+#define erts_atomic32_xchg_wb ethr_atomic32_xchg_wb
+#define erts_atomic32_cmpxchg_wb ethr_atomic32_cmpxchg_wb
+
+#else /* !USE_THREADS */
+
+/* Double word size atomics */
+
+#define erts_dw_atomic_init_nob erts_no_dw_atomic_set
+#define erts_dw_atomic_set_nob erts_no_dw_atomic_set
+#define erts_dw_atomic_read_nob erts_no_dw_atomic_read
+#define erts_dw_atomic_cmpxchg_nob erts_no_dw_atomic_cmpxchg
+
+#define erts_dw_atomic_init_mb erts_no_dw_atomic_init
+#define erts_dw_atomic_set_mb erts_no_dw_atomic_set
+#define erts_dw_atomic_read_mb erts_no_dw_atomic_read
+#define erts_dw_atomic_cmpxchg_mb erts_no_dw_atomic_cmpxchg
+
+#define erts_dw_atomic_init_acqb erts_no_dw_atomic_init
+#define erts_dw_atomic_set_acqb erts_no_dw_atomic_set
+#define erts_dw_atomic_read_acqb erts_no_dw_atomic_read
+#define erts_dw_atomic_cmpxchg_acqb erts_no_dw_atomic_cmpxchg
+
+#define erts_dw_atomic_init_relb erts_no_dw_atomic_init
+#define erts_dw_atomic_set_relb erts_no_dw_atomic_set
+#define erts_dw_atomic_read_relb erts_no_dw_atomic_read
+#define erts_dw_atomic_cmpxchg_relb erts_no_dw_atomic_cmpxchg
+
+#define erts_dw_atomic_init_rb erts_no_dw_atomic_init
+#define erts_dw_atomic_set_rb erts_no_dw_atomic_set
+#define erts_dw_atomic_read_rb erts_no_dw_atomic_read
+#define erts_dw_atomic_cmpxchg_rb erts_no_dw_atomic_cmpxchg
+
+#define erts_dw_atomic_init_wb erts_no_dw_atomic_init
+#define erts_dw_atomic_set_wb erts_no_dw_atomic_set
+#define erts_dw_atomic_read_wb erts_no_dw_atomic_read
+#define erts_dw_atomic_cmpxchg_wb erts_no_dw_atomic_cmpxchg
+
+/* Word size atomics */
+
+#define erts_atomic_init_nob erts_no_atomic_set
+#define erts_atomic_set_nob erts_no_atomic_set
+#define erts_atomic_read_nob erts_no_atomic_read
+#define erts_atomic_inc_read_nob erts_no_atomic_inc_read
+#define erts_atomic_dec_read_nob erts_no_atomic_dec_read
+#define erts_atomic_inc_nob erts_no_atomic_inc
+#define erts_atomic_dec_nob erts_no_atomic_dec
+#define erts_atomic_add_read_nob erts_no_atomic_add_read
+#define erts_atomic_add_nob erts_no_atomic_add
+#define erts_atomic_read_bor_nob erts_no_atomic_read_bor
+#define erts_atomic_read_band_nob erts_no_atomic_read_band
+#define erts_atomic_xchg_nob erts_no_atomic_xchg
+#define erts_atomic_cmpxchg_nob erts_no_atomic_cmpxchg
+
+#define erts_atomic_init_mb erts_no_atomic_set
+#define erts_atomic_set_mb erts_no_atomic_set
+#define erts_atomic_read_mb erts_no_atomic_read
+#define erts_atomic_inc_read_mb erts_no_atomic_inc_read
+#define erts_atomic_dec_read_mb erts_no_atomic_dec_read
+#define erts_atomic_inc_mb erts_no_atomic_inc
+#define erts_atomic_dec_mb erts_no_atomic_dec
+#define erts_atomic_add_read_mb erts_no_atomic_add_read
+#define erts_atomic_add_mb erts_no_atomic_add
+#define erts_atomic_read_bor_mb erts_no_atomic_read_bor
+#define erts_atomic_read_band_mb erts_no_atomic_read_band
+#define erts_atomic_xchg_mb erts_no_atomic_xchg
+#define erts_atomic_cmpxchg_mb erts_no_atomic_cmpxchg
+
+#define erts_atomic_init_acqb erts_no_atomic_set
+#define erts_atomic_set_acqb erts_no_atomic_set
+#define erts_atomic_read_acqb erts_no_atomic_read
+#define erts_atomic_inc_read_acqb erts_no_atomic_inc_read
+#define erts_atomic_dec_read_acqb erts_no_atomic_dec_read
+#define erts_atomic_inc_acqb erts_no_atomic_inc
+#define erts_atomic_dec_acqb erts_no_atomic_dec
+#define erts_atomic_add_read_acqb erts_no_atomic_add_read
+#define erts_atomic_add_acqb erts_no_atomic_add
+#define erts_atomic_read_bor_acqb erts_no_atomic_read_bor
+#define erts_atomic_read_band_acqb erts_no_atomic_read_band
+#define erts_atomic_xchg_acqb erts_no_atomic_xchg
+#define erts_atomic_cmpxchg_acqb erts_no_atomic_cmpxchg
+
+#define erts_atomic_init_relb erts_no_atomic_set
+#define erts_atomic_set_relb erts_no_atomic_set
+#define erts_atomic_read_relb erts_no_atomic_read
+#define erts_atomic_inc_read_relb erts_no_atomic_inc_read
+#define erts_atomic_dec_read_relb erts_no_atomic_dec_read
+#define erts_atomic_inc_relb erts_no_atomic_inc
+#define erts_atomic_dec_relb erts_no_atomic_dec
+#define erts_atomic_add_read_relb erts_no_atomic_add_read
+#define erts_atomic_add_relb erts_no_atomic_add
+#define erts_atomic_read_bor_relb erts_no_atomic_read_bor
+#define erts_atomic_read_band_relb erts_no_atomic_read_band
+#define erts_atomic_xchg_relb erts_no_atomic_xchg
+#define erts_atomic_cmpxchg_relb erts_no_atomic_cmpxchg
+
+#define erts_atomic_init_rb erts_no_atomic_set
+#define erts_atomic_set_rb erts_no_atomic_set
+#define erts_atomic_read_rb erts_no_atomic_read
+#define erts_atomic_inc_read_rb erts_no_atomic_inc_read
+#define erts_atomic_dec_read_rb erts_no_atomic_dec_read
+#define erts_atomic_inc_rb erts_no_atomic_inc
+#define erts_atomic_dec_rb erts_no_atomic_dec
+#define erts_atomic_add_read_rb erts_no_atomic_add_read
+#define erts_atomic_add_rb erts_no_atomic_add
+#define erts_atomic_read_bor_rb erts_no_atomic_read_bor
+#define erts_atomic_read_band_rb erts_no_atomic_read_band
+#define erts_atomic_xchg_rb erts_no_atomic_xchg
+#define erts_atomic_cmpxchg_rb erts_no_atomic_cmpxchg
+
+#define erts_atomic_init_wb erts_no_atomic_set
+#define erts_atomic_set_wb erts_no_atomic_set
+#define erts_atomic_read_wb erts_no_atomic_read
+#define erts_atomic_inc_read_wb erts_no_atomic_inc_read
+#define erts_atomic_dec_read_wb erts_no_atomic_dec_read
+#define erts_atomic_inc_wb erts_no_atomic_inc
+#define erts_atomic_dec_wb erts_no_atomic_dec
+#define erts_atomic_add_read_wb erts_no_atomic_add_read
+#define erts_atomic_add_wb erts_no_atomic_add
+#define erts_atomic_read_bor_wb erts_no_atomic_read_bor
+#define erts_atomic_read_band_wb erts_no_atomic_read_band
+#define erts_atomic_xchg_wb erts_no_atomic_xchg
+#define erts_atomic_cmpxchg_wb erts_no_atomic_cmpxchg
+
+/* 32-bit atomics */
+
+#define erts_atomic32_init_nob erts_no_atomic32_set
+#define erts_atomic32_set_nob erts_no_atomic32_set
+#define erts_atomic32_read_nob erts_no_atomic32_read
+#define erts_atomic32_inc_read_nob erts_no_atomic32_inc_read
+#define erts_atomic32_dec_read_nob erts_no_atomic32_dec_read
+#define erts_atomic32_inc_nob erts_no_atomic32_inc
+#define erts_atomic32_dec_nob erts_no_atomic32_dec
+#define erts_atomic32_add_read_nob erts_no_atomic32_add_read
+#define erts_atomic32_add_nob erts_no_atomic32_add
+#define erts_atomic32_read_bor_nob erts_no_atomic32_read_bor
+#define erts_atomic32_read_band_nob erts_no_atomic32_read_band
+#define erts_atomic32_xchg_nob erts_no_atomic32_xchg
+#define erts_atomic32_cmpxchg_nob erts_no_atomic32_cmpxchg
+
+#define erts_atomic32_init_mb erts_no_atomic32_set
+#define erts_atomic32_set_mb erts_no_atomic32_set
+#define erts_atomic32_read_mb erts_no_atomic32_read
+#define erts_atomic32_inc_read_mb erts_no_atomic32_inc_read
+#define erts_atomic32_dec_read_mb erts_no_atomic32_dec_read
+#define erts_atomic32_inc_mb erts_no_atomic32_inc
+#define erts_atomic32_dec_mb erts_no_atomic32_dec
+#define erts_atomic32_add_read_mb erts_no_atomic32_add_read
+#define erts_atomic32_add_mb erts_no_atomic32_add
+#define erts_atomic32_read_bor_mb erts_no_atomic32_read_bor
+#define erts_atomic32_read_band_mb erts_no_atomic32_read_band
+#define erts_atomic32_xchg_mb erts_no_atomic32_xchg
+#define erts_atomic32_cmpxchg_mb erts_no_atomic32_cmpxchg
+
+#define erts_atomic32_init_acqb erts_no_atomic32_set
+#define erts_atomic32_set_acqb erts_no_atomic32_set
+#define erts_atomic32_read_acqb erts_no_atomic32_read
+#define erts_atomic32_inc_read_acqb erts_no_atomic32_inc_read
+#define erts_atomic32_dec_read_acqb erts_no_atomic32_dec_read
+#define erts_atomic32_inc_acqb erts_no_atomic32_inc
+#define erts_atomic32_dec_acqb erts_no_atomic32_dec
+#define erts_atomic32_add_read_acqb erts_no_atomic32_add_read
+#define erts_atomic32_add_acqb erts_no_atomic32_add
+#define erts_atomic32_read_bor_acqb erts_no_atomic32_read_bor
+#define erts_atomic32_read_band_acqb erts_no_atomic32_read_band
+#define erts_atomic32_xchg_acqb erts_no_atomic32_xchg
+#define erts_atomic32_cmpxchg_acqb erts_no_atomic32_cmpxchg
+
+#define erts_atomic32_init_relb erts_no_atomic32_set
+#define erts_atomic32_set_relb erts_no_atomic32_set
+#define erts_atomic32_read_relb erts_no_atomic32_read
+#define erts_atomic32_inc_read_relb erts_no_atomic32_inc_read
+#define erts_atomic32_dec_read_relb erts_no_atomic32_dec_read
+#define erts_atomic32_inc_relb erts_no_atomic32_inc
+#define erts_atomic32_dec_relb erts_no_atomic32_dec
+#define erts_atomic32_add_read_relb erts_no_atomic32_add_read
+#define erts_atomic32_add_relb erts_no_atomic32_add
+#define erts_atomic32_read_bor_relb erts_no_atomic32_read_bor
+#define erts_atomic32_read_band_relb erts_no_atomic32_read_band
+#define erts_atomic32_xchg_relb erts_no_atomic32_xchg
+#define erts_atomic32_cmpxchg_relb erts_no_atomic32_cmpxchg
+
+#define erts_atomic32_init_rb erts_no_atomic32_set
+#define erts_atomic32_set_rb erts_no_atomic32_set
+#define erts_atomic32_read_rb erts_no_atomic32_read
+#define erts_atomic32_inc_read_rb erts_no_atomic32_inc_read
+#define erts_atomic32_dec_read_rb erts_no_atomic32_dec_read
+#define erts_atomic32_inc_rb erts_no_atomic32_inc
+#define erts_atomic32_dec_rb erts_no_atomic32_dec
+#define erts_atomic32_add_read_rb erts_no_atomic32_add_read
+#define erts_atomic32_add_rb erts_no_atomic32_add
+#define erts_atomic32_read_bor_rb erts_no_atomic32_read_bor
+#define erts_atomic32_read_band_rb erts_no_atomic32_read_band
+#define erts_atomic32_xchg_rb erts_no_atomic32_xchg
+#define erts_atomic32_cmpxchg_rb erts_no_atomic32_cmpxchg
+
+#define erts_atomic32_init_wb erts_no_atomic32_set
+#define erts_atomic32_set_wb erts_no_atomic32_set
+#define erts_atomic32_read_wb erts_no_atomic32_read
+#define erts_atomic32_inc_read_wb erts_no_atomic32_inc_read
+#define erts_atomic32_dec_read_wb erts_no_atomic32_dec_read
+#define erts_atomic32_inc_wb erts_no_atomic32_inc
+#define erts_atomic32_dec_wb erts_no_atomic32_dec
+#define erts_atomic32_add_read_wb erts_no_atomic32_add_read
+#define erts_atomic32_add_wb erts_no_atomic32_add
+#define erts_atomic32_read_bor_wb erts_no_atomic32_read_bor
+#define erts_atomic32_read_band_wb erts_no_atomic32_read_band
+#define erts_atomic32_xchg_wb erts_no_atomic32_xchg
+#define erts_atomic32_cmpxchg_wb erts_no_atomic32_cmpxchg
+
+#endif /* !USE_THREADS */
+
#if ERTS_GLB_INLINE_INCL_FUNC_DEF
ERTS_GLB_INLINE void
@@ -995,428 +1414,206 @@ erts_lc_rwmtx_is_rwlocked(erts_rwmtx_t *mtx)
#endif
}
+/* No atomic ops */
+
ERTS_GLB_INLINE void
-erts_atomic_init(erts_atomic_t *var, erts_aint_t i)
+erts_no_dw_atomic_set(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val)
{
-#ifdef USE_THREADS
- ethr_atomic_init(var, i);
-#else
- *var = i;
-#endif
+ var->sint[0] = val->sint[0];
+ var->sint[1] = val->sint[1];
}
ERTS_GLB_INLINE void
-erts_atomic_set(erts_atomic_t *var, erts_aint_t i)
+erts_no_dw_atomic_read(erts_no_dw_atomic_t *var, erts_no_dw_atomic_t *val)
+{
+ val->sint[0] = var->sint[0];
+ val->sint[1] = var->sint[1];
+}
+
+ERTS_GLB_INLINE int erts_no_dw_atomic_cmpxchg(erts_no_dw_atomic_t *var,
+ erts_no_dw_atomic_t *new_val,
+ erts_no_dw_atomic_t *old_val)
+{
+ if (var->sint[0] != old_val->sint[0] || var->sint[1] != old_val->sint[1]) {
+ erts_no_dw_atomic_read(var, old_val);
+ return 0;
+ }
+ else {
+ erts_no_dw_atomic_set(var, new_val);
+ return !0;
+ }
+}
+
+ERTS_GLB_INLINE void
+erts_no_atomic_set(erts_no_atomic_t *var, erts_aint_t i)
{
-#ifdef USE_THREADS
- ethr_atomic_set(var, i);
-#else
*var = i;
-#endif
}
ERTS_GLB_INLINE erts_aint_t
-erts_atomic_read(erts_atomic_t *var)
+erts_no_atomic_read(erts_no_atomic_t *var)
{
-#ifdef USE_THREADS
- return ethr_atomic_read(var);
-#else
return *var;
-#endif
}
ERTS_GLB_INLINE erts_aint_t
-erts_atomic_inctest(erts_atomic_t *incp)
+erts_no_atomic_inc_read(erts_no_atomic_t *incp)
{
-#ifdef USE_THREADS
- return ethr_atomic_inc_read(incp);
-#else
return ++(*incp);
-#endif
}
ERTS_GLB_INLINE erts_aint_t
-erts_atomic_dectest(erts_atomic_t *decp)
+erts_no_atomic_dec_read(erts_no_atomic_t *decp)
{
-#ifdef USE_THREADS
- return ethr_atomic_dec_read(decp);
-#else
return --(*decp);
-#endif
}
ERTS_GLB_INLINE void
-erts_atomic_inc(erts_atomic_t *incp)
+erts_no_atomic_inc(erts_no_atomic_t *incp)
{
-#ifdef USE_THREADS
- ethr_atomic_inc(incp);
-#else
++(*incp);
-#endif
}
ERTS_GLB_INLINE void
-erts_atomic_dec(erts_atomic_t *decp)
+erts_no_atomic_dec(erts_no_atomic_t *decp)
{
-#ifdef USE_THREADS
- ethr_atomic_dec(decp);
-#else
--(*decp);
-#endif
}
ERTS_GLB_INLINE erts_aint_t
-erts_atomic_addtest(erts_atomic_t *addp, erts_aint_t i)
+erts_no_atomic_add_read(erts_no_atomic_t *addp, erts_aint_t i)
{
-#ifdef USE_THREADS
- return ethr_atomic_add_read(addp, i);
-#else
return *addp += i;
-#endif
}
ERTS_GLB_INLINE void
-erts_atomic_add(erts_atomic_t *addp, erts_aint_t i)
+erts_no_atomic_add(erts_no_atomic_t *addp, erts_aint_t i)
{
-#ifdef USE_THREADS
- ethr_atomic_add(addp, i);
-#else
*addp += i;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_atomic_xchg(erts_atomic_t *xchgp, erts_aint_t new)
-{
-#ifdef USE_THREADS
- return ethr_atomic_xchg(xchgp, new);
-#else
- erts_aint_t old = *xchgp;
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_atomic_cmpxchg(erts_atomic_t *xchgp, erts_aint_t new, erts_aint_t expected)
-{
-#ifdef USE_THREADS
- return ethr_atomic_cmpxchg(xchgp, new, expected);
-#else
- erts_aint_t old = *xchgp;
- if (old == expected)
- *xchgp = new;
- return old;
-#endif
}
ERTS_GLB_INLINE erts_aint_t
-erts_atomic_bor(erts_atomic_t *var, erts_aint_t mask)
+erts_no_atomic_read_bor(erts_no_atomic_t *var, erts_aint_t mask)
{
-#ifdef USE_THREADS
- return ethr_atomic_read_bor(var, mask);
-#else
erts_aint_t old;
old = *var;
*var |= mask;
return old;
-#endif
}
ERTS_GLB_INLINE erts_aint_t
-erts_atomic_band(erts_atomic_t *var, erts_aint_t mask)
+erts_no_atomic_read_band(erts_no_atomic_t *var, erts_aint_t mask)
{
-#ifdef USE_THREADS
- return ethr_atomic_read_band(var, mask);
-#else
erts_aint_t old;
old = *var;
*var &= mask;
return old;
-#endif
}
ERTS_GLB_INLINE erts_aint_t
-erts_atomic_read_acqb(erts_atomic_t *var)
+erts_no_atomic_xchg(erts_no_atomic_t *xchgp, erts_aint_t new)
{
-#ifdef USE_THREADS
- return ethr_atomic_read_acqb(var);
-#else
- return *var;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_atomic_set_relb(erts_atomic_t *var, erts_aint_t i)
-{
-#ifdef USE_THREADS
- ethr_atomic_set_relb(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_atomic_dec_relb(erts_atomic_t *decp)
-{
-#ifdef USE_THREADS
- ethr_atomic_dec_relb(decp);
-#else
- --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t
-erts_atomic_dectest_relb(erts_atomic_t *decp)
-{
-#ifdef USE_THREADS
- return ethr_atomic_dec_read_relb(decp);
-#else
- return --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_acqb(erts_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp)
-{
-#ifdef USE_THREADS
- return ethr_atomic_cmpxchg_acqb(xchgp, new, exp);
-#else
erts_aint_t old = *xchgp;
- if (old == exp)
- *xchgp = new;
+ *xchgp = new;
return old;
-#endif
}
-ERTS_GLB_INLINE erts_aint_t erts_atomic_cmpxchg_relb(erts_atomic_t *xchgp,
- erts_aint_t new,
- erts_aint_t exp)
+ERTS_GLB_INLINE erts_aint_t
+erts_no_atomic_cmpxchg(erts_no_atomic_t *xchgp,
+ erts_aint_t new,
+ erts_aint_t expected)
{
-#ifdef USE_THREADS
- return ethr_atomic_cmpxchg_relb(xchgp, new, exp);
-#else
erts_aint_t old = *xchgp;
- if (old == exp)
+ if (old == expected)
*xchgp = new;
return old;
-#endif
}
/* atomic32 */
ERTS_GLB_INLINE void
-erts_atomic32_init(erts_atomic32_t *var, erts_aint32_t i)
-{
-#ifdef USE_THREADS
- ethr_atomic32_init(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_atomic32_set(erts_atomic32_t *var, erts_aint32_t i)
+erts_no_atomic32_set(erts_no_atomic32_t *var, erts_aint32_t i)
{
-#ifdef USE_THREADS
- ethr_atomic32_set(var, i);
-#else
*var = i;
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_read(erts_atomic32_t *var)
+erts_no_atomic32_read(erts_no_atomic32_t *var)
{
-#ifdef USE_THREADS
- return ethr_atomic32_read(var);
-#else
return *var;
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_inctest(erts_atomic32_t *incp)
+erts_no_atomic32_inc_read(erts_no_atomic32_t *incp)
{
-#ifdef USE_THREADS
- return ethr_atomic32_inc_read(incp);
-#else
return ++(*incp);
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_dectest(erts_atomic32_t *decp)
+erts_no_atomic32_dec_read(erts_no_atomic32_t *decp)
{
-#ifdef USE_THREADS
- return ethr_atomic32_dec_read(decp);
-#else
return --(*decp);
-#endif
}
ERTS_GLB_INLINE void
-erts_atomic32_inc(erts_atomic32_t *incp)
+erts_no_atomic32_inc(erts_no_atomic32_t *incp)
{
-#ifdef USE_THREADS
- ethr_atomic32_inc(incp);
-#else
++(*incp);
-#endif
}
ERTS_GLB_INLINE void
-erts_atomic32_dec(erts_atomic32_t *decp)
+erts_no_atomic32_dec(erts_no_atomic32_t *decp)
{
-#ifdef USE_THREADS
- ethr_atomic32_dec(decp);
-#else
--(*decp);
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_addtest(erts_atomic32_t *addp, erts_aint32_t i)
+erts_no_atomic32_add_read(erts_no_atomic32_t *addp, erts_aint32_t i)
{
-#ifdef USE_THREADS
- return ethr_atomic32_add_read(addp, i);
-#else
return *addp += i;
-#endif
}
ERTS_GLB_INLINE void
-erts_atomic32_add(erts_atomic32_t *addp, erts_aint32_t i)
+erts_no_atomic32_add(erts_no_atomic32_t *addp, erts_aint32_t i)
{
-#ifdef USE_THREADS
- ethr_atomic32_add(addp, i);
-#else
*addp += i;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_xchg(erts_atomic32_t *xchgp, erts_aint32_t new)
-{
-#ifdef USE_THREADS
- return ethr_atomic32_xchg(xchgp, new);
-#else
- erts_aint32_t old = *xchgp;
- *xchgp = new;
- return old;
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_cmpxchg(erts_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t expected)
-{
-#ifdef USE_THREADS
- return ethr_atomic32_cmpxchg(xchgp, new, expected);
-#else
- erts_aint32_t old = *xchgp;
- if (old == expected)
- *xchgp = new;
- return old;
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_bor(erts_atomic32_t *var, erts_aint32_t mask)
+erts_no_atomic32_read_bor(erts_no_atomic32_t *var, erts_aint32_t mask)
{
-#ifdef USE_THREADS
- return ethr_atomic32_read_bor(var, mask);
-#else
erts_aint32_t old;
old = *var;
*var |= mask;
return old;
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_band(erts_atomic32_t *var, erts_aint32_t mask)
+erts_no_atomic32_read_band(erts_no_atomic32_t *var, erts_aint32_t mask)
{
-#ifdef USE_THREADS
- return ethr_atomic32_read_band(var, mask);
-#else
erts_aint32_t old;
old = *var;
*var &= mask;
return old;
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_read_acqb(erts_atomic32_t *var)
-{
-#ifdef USE_THREADS
- return ethr_atomic32_read_acqb(var);
-#else
- return *var;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_atomic32_set_relb(erts_atomic32_t *var, erts_aint32_t i)
-{
-#ifdef USE_THREADS
- ethr_atomic32_set_relb(var, i);
-#else
- *var = i;
-#endif
-}
-
-ERTS_GLB_INLINE void
-erts_atomic32_dec_relb(erts_atomic32_t *decp)
+erts_no_atomic32_xchg(erts_no_atomic32_t *xchgp, erts_aint32_t new)
{
-#ifdef USE_THREADS
- ethr_atomic32_dec_relb(decp);
-#else
- --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_dectest_relb(erts_atomic32_t *decp)
-{
-#ifdef USE_THREADS
- return ethr_atomic32_dec_read_relb(decp);
-#else
- return --(*decp);
-#endif
-}
-
-ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_cmpxchg_acqb(erts_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp)
-{
-#ifdef USE_THREADS
- return ethr_atomic32_cmpxchg_acqb(xchgp, new, exp);
-#else
erts_aint32_t old = *xchgp;
- if (old == exp)
- *xchgp = new;
+ *xchgp = new;
return old;
-#endif
}
ERTS_GLB_INLINE erts_aint32_t
-erts_atomic32_cmpxchg_relb(erts_atomic32_t *xchgp,
- erts_aint32_t new,
- erts_aint32_t exp)
+erts_no_atomic32_cmpxchg(erts_no_atomic32_t *xchgp,
+ erts_aint32_t new,
+ erts_aint32_t expected)
{
-#ifdef USE_THREADS
- return ethr_atomic32_cmpxchg_relb(xchgp, new, exp);
-#else
erts_aint32_t old = *xchgp;
- if (old == exp)
+ if (old == expected)
*xchgp = new;
return old;
-#endif
}
/* spinlock */
@@ -1887,3 +2084,37 @@ erts_thr_sigwait(const sigset_t *set, int *sig)
#endif /* #if ERTS_GLB_INLINE_INCL_FUNC_DEF */
#endif /* #ifndef ERL_THREAD_H__ */
+
+#ifdef ERTS_UNDEF_DEPRECATED_ATOMICS
+
+/* Deprecated functions to replace */
+
+#undef erts_atomic_init
+#undef erts_atomic_set
+#undef erts_atomic_read
+#undef erts_atomic_inctest
+#undef erts_atomic_dectest
+#undef erts_atomic_inc
+#undef erts_atomic_dec
+#undef erts_atomic_addtest
+#undef erts_atomic_add
+#undef erts_atomic_xchg
+#undef erts_atomic_cmpxchg
+#undef erts_atomic_bor
+#undef erts_atomic_band
+
+#undef erts_atomic32_init
+#undef erts_atomic32_set
+#undef erts_atomic32_read
+#undef erts_atomic32_inctest
+#undef erts_atomic32_dectest
+#undef erts_atomic32_inc
+#undef erts_atomic32_dec
+#undef erts_atomic32_addtest
+#undef erts_atomic32_add
+#undef erts_atomic32_xchg
+#undef erts_atomic32_cmpxchg
+#undef erts_atomic32_bor
+#undef erts_atomic32_band
+
+#endif
diff --git a/erts/emulator/beam/erl_time.h b/erts/emulator/beam/erl_time.h
index d0ad73cd81..7a09d30ff6 100644
--- a/erts/emulator/beam/erl_time.h
+++ b/erts/emulator/beam/erl_time.h
@@ -85,8 +85,8 @@ ERTS_GLB_INLINE void erts_do_time_add(long);
#if ERTS_GLB_INLINE_INCL_FUNC_DEF
-ERTS_GLB_INLINE erts_aint_t erts_do_time_read_and_reset(void) { return erts_smp_atomic_xchg(&do_time, 0L); }
-ERTS_GLB_INLINE void erts_do_time_add(long elapsed) { erts_smp_atomic_add(&do_time, elapsed); }
+ERTS_GLB_INLINE erts_aint_t erts_do_time_read_and_reset(void) { return erts_smp_atomic_xchg_acqb(&do_time, 0L); }
+ERTS_GLB_INLINE void erts_do_time_add(long elapsed) { erts_smp_atomic_add_relb(&do_time, elapsed); }
#endif /* #if ERTS_GLB_INLINE_INCL_FUNC_DEF */
diff --git a/erts/emulator/beam/global.h b/erts/emulator/beam/global.h
index 499bdd77ba..249df54015 100644
--- a/erts/emulator/beam/global.h
+++ b/erts/emulator/beam/global.h
@@ -200,10 +200,10 @@ erts_port_runq(Port *prt)
{
#ifdef ERTS_SMP
ErtsRunQueue *rq1, *rq2;
- rq1 = (ErtsRunQueue *) erts_smp_atomic_read(&prt->run_queue);
+ rq1 = (ErtsRunQueue *) erts_smp_atomic_read_nob(&prt->run_queue);
while (1) {
erts_smp_runq_lock(rq1);
- rq2 = (ErtsRunQueue *) erts_smp_atomic_read(&prt->run_queue);
+ rq2 = (ErtsRunQueue *) erts_smp_atomic_read_nob(&prt->run_queue);
if (rq1 == rq2)
return rq1;
erts_smp_runq_unlock(rq1);
@@ -542,10 +542,11 @@ ERTS_GLB_INLINE void erts_may_save_closed_port(Port *prt)
ERTS_SMP_LC_ASSERT(erts_smp_lc_spinlock_is_locked(&prt->state_lck));
if (prt->snapshot != erts_smp_atomic32_read_acqb(&erts_ports_snapshot)) {
/* Dead ports are added from the end of the snapshot buffer */
- Eterm* tombstone = (Eterm*) erts_smp_atomic_addtest(&erts_dead_ports_ptr,
- -(erts_aint_t)sizeof(Eterm));
+ Eterm* tombstone;
+ tombstone = (Eterm*) erts_smp_atomic_add_read_nob(&erts_dead_ports_ptr,
+ -(erts_aint_t)sizeof(Eterm));
ASSERT(tombstone+1 != NULL);
- ASSERT(prt->snapshot == erts_smp_atomic32_read(&erts_ports_snapshot) - 1);
+ ASSERT(prt->snapshot == erts_smp_atomic_read_nob(&erts_ports_snapshot) - 1);
*tombstone = prt->id;
}
/*else no ongoing snapshot or port was already included or created after snapshot */
@@ -1200,11 +1201,11 @@ erts_smp_port_trylock(Port *prt)
#ifdef ERTS_SMP
int res;
- ASSERT(erts_smp_atomic_read(&prt->refc) > 0);
- erts_smp_atomic_inc(&prt->refc);
+ ASSERT(erts_smp_atomic_read_nob(&prt->refc) > 0);
+ erts_smp_atomic_inc_nob(&prt->refc);
res = erts_smp_mtx_trylock(prt->lock);
if (res == EBUSY) {
- erts_smp_atomic_dec(&prt->refc);
+ erts_smp_atomic_dec_nob(&prt->refc);
}
return res;
@@ -1217,8 +1218,8 @@ ERTS_GLB_INLINE void
erts_smp_port_lock(Port *prt)
{
#ifdef ERTS_SMP
- ASSERT(erts_smp_atomic_read(&prt->refc) > 0);
- erts_smp_atomic_inc(&prt->refc);
+ ASSERT(erts_smp_atomic_read_nob(&prt->refc) > 0);
+ erts_smp_atomic_inc_nob(&prt->refc);
erts_smp_mtx_lock(prt->lock);
#endif
}
@@ -1229,7 +1230,7 @@ erts_smp_port_unlock(Port *prt)
#ifdef ERTS_SMP
erts_aint_t refc;
erts_smp_mtx_unlock(prt->lock);
- refc = erts_smp_atomic_dectest(&prt->refc);
+ refc = erts_smp_atomic_dec_read_nob(&prt->refc);
ASSERT(refc >= 0);
if (refc == 0)
erts_port_cleanup(prt);
@@ -1298,7 +1299,7 @@ erts_id2port_sflgs(Eterm id, Process *c_p, ErtsProcLocks c_p_locks, Uint32 sflgs
}
#ifdef ERTS_SMP
else {
- erts_smp_atomic_inc(&prt->refc);
+ erts_smp_atomic_inc_nob(&prt->refc);
erts_smp_port_state_unlock(prt);
if (no_proc_locks)
diff --git a/erts/emulator/beam/io.c b/erts/emulator/beam/io.c
index df5f8b22a3..151c776a3d 100644
--- a/erts/emulator/beam/io.c
+++ b/erts/emulator/beam/io.c
@@ -244,8 +244,8 @@ get_free_port(void)
}
port->status = ERTS_PORT_SFLG_INITIALIZING;
#ifdef ERTS_SMP
- ERTS_SMP_LC_ASSERT(erts_smp_atomic_read(&port->refc) == 0);
- erts_smp_atomic_set(&port->refc, 2); /* Port alive + lock */
+ ERTS_SMP_LC_ASSERT(erts_smp_atomic_read_nob(&port->refc) == 0);
+ erts_smp_atomic_set_nob(&port->refc, 2); /* Port alive + lock */
#endif
erts_smp_port_state_unlock(port);
return num & port_num_mask;
@@ -327,7 +327,7 @@ port_cleanup(Port *prt)
#ifdef ERTS_SMP
ASSERT(prt->status & ERTS_PORT_SFLG_FREE_SCHEDULED);
- ERTS_SMP_LC_ASSERT(erts_smp_atomic_read(&prt->refc) == 0);
+ ERTS_SMP_LC_ASSERT(erts_smp_atomic_read_nob(&prt->refc) == 0);
port_specific = (prt->status & ERTS_PORT_SFLG_PORT_SPECIFIC_LOCK);
@@ -425,11 +425,11 @@ setup_port(Port* prt, Eterm pid, erts_driver_t *driver,
erts_smp_runq_lock(runq);
erts_smp_port_state_lock(prt);
prt->status = ERTS_PORT_SFLG_CONNECTED | xstatus;
- prt->snapshot = erts_smp_atomic32_read(&erts_ports_snapshot);
+ prt->snapshot = erts_smp_atomic32_read_nob(&erts_ports_snapshot);
old_name = prt->name;
prt->name = new_name;
#ifdef ERTS_SMP
- erts_smp_atomic_set(&prt->run_queue, (erts_aint_t) runq);
+ erts_smp_atomic_set_nob(&prt->run_queue, (erts_aint_t) runq);
#endif
ASSERT(!prt->drv_ptr);
prt->drv_ptr = driver;
@@ -590,8 +590,8 @@ erts_open_driver(erts_driver_t* driver, /* Pointer to driver. */
erts_smp_port_state_lock(port);
port->status = ERTS_PORT_SFLG_FREE;
#ifdef ERTS_SMP
- ERTS_SMP_LC_ASSERT(erts_smp_atomic_read(&port->refc) == 2);
- erts_smp_atomic_set(&port->refc, 0);
+ ERTS_SMP_LC_ASSERT(erts_smp_atomic_read_nob(&port->refc) == 2);
+ erts_smp_atomic_set_nob(&port->refc, 0);
#endif
erts_smp_port_state_unlock(port);
return -3;
@@ -1206,7 +1206,7 @@ int erts_write_to_port(Eterm caller_id, Port *p, Eterm list)
}
}
p->bytes_out += size;
- erts_smp_atomic_add(&erts_bytes_out, size);
+ erts_smp_atomic_add_nob(&erts_bytes_out, size);
#ifdef ERTS_SMP
if (p->xports)
@@ -1277,13 +1277,13 @@ void init_io(void)
erts_port = (Port *) erts_alloc(ERTS_ALC_T_PORT_TABLE,
erts_max_ports * sizeof(Port));
- erts_smp_atomic_init(&erts_bytes_out, 0);
- erts_smp_atomic_init(&erts_bytes_in, 0);
+ erts_smp_atomic_init_nob(&erts_bytes_out, 0);
+ erts_smp_atomic_init_nob(&erts_bytes_in, 0);
for (i = 0; i < erts_max_ports; i++) {
erts_port_task_init_sched(&erts_port[i].sched);
#ifdef ERTS_SMP
- erts_smp_atomic_init(&erts_port[i].refc, 0);
+ erts_smp_atomic_init_nob(&erts_port[i].refc, 0);
erts_port[i].lock = NULL;
erts_port[i].xports = NULL;
erts_smp_spinlock_init_x(&erts_port[i].state_lck, "port_state", make_small(i));
@@ -1300,7 +1300,7 @@ void init_io(void)
erts_port[i].port_data_lock = NULL;
}
- erts_smp_atomic32_init(&erts_ports_snapshot, (erts_aint32_t) 0);
+ erts_smp_atomic32_init_nob(&erts_ports_snapshot, (erts_aint32_t) 0);
last_port_num = 0;
erts_smp_spinlock_init(&get_free_port_lck, "get_free_port");
@@ -3253,7 +3253,7 @@ int driver_output_binary(ErlDrvPort ix, char* hbuf, int hlen,
return 0;
prt->bytes_in += (hlen + len);
- erts_smp_atomic_add(&erts_bytes_in, (erts_aint_t) (hlen + len));
+ erts_smp_atomic_add_nob(&erts_bytes_in, (erts_aint_t) (hlen + len));
if (prt->status & ERTS_PORT_SFLG_DISTRIBUTION) {
return erts_net_message(prt,
prt->dist_entry,
@@ -3288,7 +3288,7 @@ int driver_output2(ErlDrvPort ix, char* hbuf, int hlen, char* buf, int len)
return 0;
prt->bytes_in += (hlen + len);
- erts_smp_atomic_add(&erts_bytes_in, (erts_aint_t) (hlen + len));
+ erts_smp_atomic_add_nob(&erts_bytes_in, (erts_aint_t) (hlen + len));
if (prt->status & ERTS_PORT_SFLG_DISTRIBUTION) {
if (len == 0)
return erts_net_message(prt,
@@ -3365,7 +3365,7 @@ int driver_outputv(ErlDrvPort ix, char* hbuf, int hlen, ErlIOVec* vec, int skip)
/* XXX handle distribution !!! */
prt->bytes_in += (hlen + size);
- erts_smp_atomic_add(&erts_bytes_in, (erts_aint_t) (hlen + size));
+ erts_smp_atomic_add_nob(&erts_bytes_in, (erts_aint_t) (hlen + size));
deliver_vec_message(prt, prt->connected, hbuf, hlen, binv, iov, n, size);
return 0;
}
@@ -3539,13 +3539,13 @@ pdl_init(void)
static ERTS_INLINE void
pdl_init_refc(ErlDrvPDL pdl)
{
- erts_atomic_init(&pdl->refc, 1);
+ erts_atomic_init_nob(&pdl->refc, 1);
}
static ERTS_INLINE ErlDrvSInt
pdl_read_refc(ErlDrvPDL pdl)
{
- erts_aint_t refc = erts_atomic_read(&pdl->refc);
+ erts_aint_t refc = erts_atomic_read_nob(&pdl->refc);
ERTS_LC_ASSERT(refc >= 0);
return (ErlDrvSInt) refc;
}
@@ -3553,14 +3553,14 @@ pdl_read_refc(ErlDrvPDL pdl)
static ERTS_INLINE void
pdl_inc_refc(ErlDrvPDL pdl)
{
- erts_atomic_inc(&pdl->refc);
+ erts_atomic_inc_nob(&pdl->refc);
ERTS_LC_ASSERT(driver_pdl_get_refc(pdl) > 1);
}
static ERTS_INLINE ErlDrvSInt
pdl_inctest_refc(ErlDrvPDL pdl)
{
- erts_aint_t refc = erts_atomic_inctest(&pdl->refc);
+ erts_aint_t refc = erts_atomic_inc_read_nob(&pdl->refc);
ERTS_LC_ASSERT(refc > 1);
return (ErlDrvSInt) refc;
}
@@ -3569,7 +3569,7 @@ pdl_inctest_refc(ErlDrvPDL pdl)
static ERTS_INLINE void
pdl_dec_refc(ErlDrvPDL pdl)
{
- erts_atomic_dec(&pdl->refc);
+ erts_atomic_dec_nob(&pdl->refc);
ERTS_LC_ASSERT(driver_pdl_get_refc(pdl) > 0);
}
#endif
@@ -3577,7 +3577,7 @@ pdl_dec_refc(ErlDrvPDL pdl)
static ERTS_INLINE ErlDrvSInt
pdl_dectest_refc(ErlDrvPDL pdl)
{
- erts_aint_t refc = erts_atomic_dectest(&pdl->refc);
+ erts_aint_t refc = erts_atomic_dec_read_nob(&pdl->refc);
ERTS_LC_ASSERT(refc >= 0);
return (ErlDrvSInt) refc;
}
diff --git a/erts/emulator/beam/safe_hash.c b/erts/emulator/beam/safe_hash.c
index 4c54e19cdb..3326e5cc2a 100644
--- a/erts/emulator/beam/safe_hash.c
+++ b/erts/emulator/beam/safe_hash.c
@@ -61,7 +61,7 @@ static ERTS_INLINE int align_up_pow2(int val)
*/
static void rehash(SafeHash* h, int grow_limit)
{
- if (erts_smp_atomic_xchg(&h->is_rehashing, 1) != 0) {
+ if (erts_smp_atomic_xchg_acqb(&h->is_rehashing, 1) != 0) {
return; /* already in progress */
}
if (h->grow_limit == grow_limit) {
@@ -166,8 +166,8 @@ SafeHash* safe_hash_init(ErtsAlcType_t type, SafeHash* h, char* name, int size,
h->name = name;
h->fun = fun;
set_size(h,size);
- erts_smp_atomic_init(&h->is_rehashing, 0);
- erts_smp_atomic_init(&h->nitems, 0);
+ erts_smp_atomic_init_nob(&h->is_rehashing, 0);
+ erts_smp_atomic_init_nob(&h->nitems, 0);
for (i=0; i<SAFE_HASH_LOCK_CNT; i++) {
erts_smp_mtx_init(&h->lock_vec[i].mtx,"safe_hash");
}
@@ -222,7 +222,7 @@ void* safe_hash_put(SafeHash* h, void* tmpl)
*head = b;
grow_limit = h->grow_limit;
erts_smp_mtx_unlock(lock);
- if (erts_smp_atomic_inctest(&h->nitems) > grow_limit) {
+ if (erts_smp_atomic_inc_read_nob(&h->nitems) > grow_limit) {
rehash(h, grow_limit);
}
return (void*) b;
@@ -245,7 +245,7 @@ void* safe_hash_erase(SafeHash* h, void* tmpl)
if ((b->hvalue == hval) && (h->fun.cmp(tmpl, (void*)b) == 0)) {
*prevp = b->next;
erts_smp_mtx_unlock(lock);
- erts_smp_atomic_dec(&h->nitems);
+ erts_smp_atomic_dec_nob(&h->nitems);
h->fun.free((void*)b);
return tmpl;
}
diff --git a/erts/emulator/beam/sys.h b/erts/emulator/beam/sys.h
index e64c43de6e..669a601b35 100644
--- a/erts/emulator/beam/sys.h
+++ b/erts/emulator/beam/sys.h
@@ -340,7 +340,8 @@ int erts_send_warning_to_logger_str_nogl(char *);
#ifdef ERTS_WANT_BREAK_HANDLING
# ifdef ERTS_SMP
extern erts_smp_atomic32_t erts_break_requested;
-# define ERTS_BREAK_REQUESTED ((int) erts_smp_atomic32_read(&erts_break_requested))
+# define ERTS_BREAK_REQUESTED \
+ ((int) erts_smp_atomic32_read_nob(&erts_break_requested))
# else
extern volatile int erts_break_requested;
# define ERTS_BREAK_REQUESTED erts_break_requested
@@ -354,7 +355,7 @@ void erts_do_break_handling(void);
# else
# ifdef ERTS_SMP
extern erts_smp_atomic32_t erts_got_sigusr1;
-# define ERTS_GOT_SIGUSR1 ((int) erts_smp_atomic32_read(&erts_got_sigusr1))
+# define ERTS_GOT_SIGUSR1 ((int) erts_smp_atomic32_read_mb(&erts_got_sigusr1))
# else
extern volatile int erts_got_sigusr1;
# define ERTS_GOT_SIGUSR1 erts_got_sigusr1
@@ -363,11 +364,15 @@ extern volatile int erts_got_sigusr1;
#endif
#ifdef ERTS_SMP
-extern erts_smp_atomic_t erts_writing_erl_crash_dump;
+extern erts_smp_atomic32_t erts_writing_erl_crash_dump;
+extern erts_tsd_key_t erts_is_crash_dumping_key;
+#define ERTS_SOMEONE_IS_CRASH_DUMPING \
+ ((int) erts_smp_atomic32_read_mb(&erts_writing_erl_crash_dump))
#define ERTS_IS_CRASH_DUMPING \
- ((int) erts_smp_atomic_read(&erts_writing_erl_crash_dump))
+ ((int) (SWord) erts_tsd_get(erts_is_crash_dumping_key))
#else
extern volatile int erts_writing_erl_crash_dump;
+#define ERTS_SOMEONE_IS_CRASH_DUMPING erts_writing_erl_crash_dump
#define ERTS_IS_CRASH_DUMPING erts_writing_erl_crash_dump
#endif
@@ -877,7 +882,7 @@ ERTS_GLB_INLINE int
erts_smp_pending_system_block(void)
{
#ifdef ERTS_SMP
- return (int) erts_smp_atomic32_read(&erts_system_block_state.do_block);
+ return (int) erts_smp_atomic32_read_nob(&erts_system_block_state.do_block);
#else
return 0;
#endif
@@ -913,7 +918,7 @@ erts_smp_set_activity(erts_activity_t old_activity,
case ERTS_ACTIVITY_UNDEFINED:
break;
case ERTS_ACTIVITY_WAIT:
- erts_smp_atomic32_dec(&erts_system_block_state.in_activity.wait);
+ erts_smp_atomic32_dec_acqb(&erts_system_block_state.in_activity.wait);
if (locked) {
/* You are not allowed to leave activity waiting
* without supplying the possibility to block
@@ -924,10 +929,10 @@ erts_smp_set_activity(erts_activity_t old_activity,
}
break;
case ERTS_ACTIVITY_GC:
- erts_smp_atomic32_dec(&erts_system_block_state.in_activity.gc);
+ erts_smp_atomic32_dec_acqb(&erts_system_block_state.in_activity.gc);
break;
case ERTS_ACTIVITY_IO:
- erts_smp_atomic32_dec(&erts_system_block_state.in_activity.io);
+ erts_smp_atomic32_dec_acqb(&erts_system_block_state.in_activity.io);
break;
default:
erts_set_activity_error(ERTS_ACT_ERR_LEAVE_UNKNOWN_ACTIVITY,
@@ -943,13 +948,13 @@ erts_smp_set_activity(erts_activity_t old_activity,
case ERTS_ACTIVITY_UNDEFINED:
break;
case ERTS_ACTIVITY_WAIT:
- erts_smp_atomic32_inc(&erts_system_block_state.in_activity.wait);
+ erts_smp_atomic32_inc_mb(&erts_system_block_state.in_activity.wait);
break;
case ERTS_ACTIVITY_GC:
- erts_smp_atomic32_inc(&erts_system_block_state.in_activity.gc);
+ erts_smp_atomic32_inc_mb(&erts_system_block_state.in_activity.gc);
break;
case ERTS_ACTIVITY_IO:
- erts_smp_atomic32_inc(&erts_system_block_state.in_activity.io);
+ erts_smp_atomic32_inc_mb(&erts_system_block_state.in_activity.io);
break;
default:
erts_set_activity_error(ERTS_ACT_ERR_ENTER_UNKNOWN_ACTIVITY,
@@ -1001,27 +1006,27 @@ ERTS_GLB_INLINE erts_aint_t erts_refc_read(erts_refc_t *refcp,
ERTS_GLB_INLINE void
erts_refc_init(erts_refc_t *refcp, erts_aint_t val)
{
- erts_smp_atomic_init((erts_smp_atomic_t *) refcp, val);
+ erts_smp_atomic_init_nob((erts_smp_atomic_t *) refcp, val);
}
ERTS_GLB_INLINE void
erts_refc_inc(erts_refc_t *refcp, erts_aint_t min_val)
{
#ifdef ERTS_REFC_DEBUG
- erts_aint_t val = erts_smp_atomic_inctest((erts_smp_atomic_t *) refcp);
+ erts_aint_t val = erts_smp_atomic_inc_read_nob((erts_smp_atomic_t *) refcp);
if (val < min_val)
erl_exit(ERTS_ABORT_EXIT,
"erts_refc_inc(): Bad refc found (refc=%ld < %ld)!\n",
val, min_val);
#else
- erts_smp_atomic_inc((erts_smp_atomic_t *) refcp);
+ erts_smp_atomic_inc_nob((erts_smp_atomic_t *) refcp);
#endif
}
ERTS_GLB_INLINE erts_aint_t
erts_refc_inctest(erts_refc_t *refcp, erts_aint_t min_val)
{
- erts_aint_t val = erts_smp_atomic_inctest((erts_smp_atomic_t *) refcp);
+ erts_aint_t val = erts_smp_atomic_inc_read_nob((erts_smp_atomic_t *) refcp);
#ifdef ERTS_REFC_DEBUG
if (val < min_val)
erl_exit(ERTS_ABORT_EXIT,
@@ -1035,20 +1040,20 @@ ERTS_GLB_INLINE void
erts_refc_dec(erts_refc_t *refcp, erts_aint_t min_val)
{
#ifdef ERTS_REFC_DEBUG
- erts_aint_t val = erts_smp_atomic_dectest((erts_smp_atomic_t *) refcp);
+ erts_aint_t val = erts_smp_atomic_dec_read_nob((erts_smp_atomic_t *) refcp);
if (val < min_val)
erl_exit(ERTS_ABORT_EXIT,
"erts_refc_dec(): Bad refc found (refc=%ld < %ld)!\n",
val, min_val);
#else
- erts_smp_atomic_dec((erts_smp_atomic_t *) refcp);
+ erts_smp_atomic_dec_nob((erts_smp_atomic_t *) refcp);
#endif
}
ERTS_GLB_INLINE erts_aint_t
erts_refc_dectest(erts_refc_t *refcp, erts_aint_t min_val)
{
- erts_aint_t val = erts_smp_atomic_dectest((erts_smp_atomic_t *) refcp);
+ erts_aint_t val = erts_smp_atomic_dec_read_nob((erts_smp_atomic_t *) refcp);
#ifdef ERTS_REFC_DEBUG
if (val < min_val)
erl_exit(ERTS_ABORT_EXIT,
@@ -1062,20 +1067,20 @@ ERTS_GLB_INLINE void
erts_refc_add(erts_refc_t *refcp, erts_aint_t diff, erts_aint_t min_val)
{
#ifdef ERTS_REFC_DEBUG
- erts_aint_t val = erts_smp_atomic_addtest((erts_smp_atomic_t *) refcp, diff);
+ erts_aint_t val = erts_smp_atomic_add_read_nob((erts_smp_atomic_t *) refcp, diff);
if (val < min_val)
erl_exit(ERTS_ABORT_EXIT,
"erts_refc_add(%ld): Bad refc found (refc=%ld < %ld)!\n",
diff, val, min_val);
#else
- erts_smp_atomic_add((erts_smp_atomic_t *) refcp, diff);
+ erts_smp_atomic_add_nob((erts_smp_atomic_t *) refcp, diff);
#endif
}
ERTS_GLB_INLINE erts_aint_t
erts_refc_read(erts_refc_t *refcp, erts_aint_t min_val)
{
- erts_aint_t val = erts_smp_atomic_read((erts_smp_atomic_t *) refcp);
+ erts_aint_t val = erts_smp_atomic_read_nob((erts_smp_atomic_t *) refcp);
#ifdef ERTS_REFC_DEBUG
if (val < min_val)
erl_exit(ERTS_ABORT_EXIT,
diff --git a/erts/emulator/beam/time.c b/erts/emulator/beam/time.c
index a00faff912..8fa8c1cfe0 100644
--- a/erts/emulator/beam/time.c
+++ b/erts/emulator/beam/time.c
@@ -108,9 +108,9 @@ static ErlTimer *tiw_min_ptr;
static int itime; /* Constant after init */
erts_smp_atomic_t do_time; /* set at clock interrupt */
-static ERTS_INLINE erts_aint_t do_time_read(void) { return erts_smp_atomic_read(&do_time); }
+static ERTS_INLINE erts_aint_t do_time_read(void) { return erts_smp_atomic_read_acqb(&do_time); }
static ERTS_INLINE erts_aint_t do_time_update(void) { return do_time_read(); }
-static ERTS_INLINE void do_time_init(void) { erts_smp_atomic_init(&do_time, 0L); }
+static ERTS_INLINE void do_time_init(void) { erts_smp_atomic_init_nob(&do_time, 0L); }
/* get the time (in units of itime) to the next timeout,
or -1 if there are no timeouts */
diff --git a/erts/emulator/beam/utils.c b/erts/emulator/beam/utils.c
index a17de717bc..3f6accba2d 100644
--- a/erts/emulator/beam/utils.c
+++ b/erts/emulator/beam/utils.c
@@ -3689,14 +3689,16 @@ threads_not_under_control(void)
{
erts_aint32_t res = system_block_state.threads_to_block;
+ ERTS_THR_MEMORY_BARRIER;
+
/* Waiting is always an allowed activity... */
- res -= erts_smp_atomic32_read(&erts_system_block_state.in_activity.wait);
+ res -= erts_smp_atomic32_read_nob(&erts_system_block_state.in_activity.wait);
if (system_block_state.allowed_activities & ERTS_BS_FLG_ALLOW_GC)
- res -= erts_smp_atomic32_read(&erts_system_block_state.in_activity.gc);
+ res -= erts_smp_atomic32_read_nob(&erts_system_block_state.in_activity.gc);
if (system_block_state.allowed_activities & ERTS_BS_FLG_ALLOW_IO)
- res -= erts_smp_atomic32_read(&erts_system_block_state.in_activity.io);
+ res -= erts_smp_atomic32_read_nob(&erts_system_block_state.in_activity.io);
if (res < 0) {
ASSERT(0);
@@ -3756,7 +3758,7 @@ erts_block_system(Uint32 allowed_activities)
}
else {
- erts_smp_atomic32_inc(&erts_system_block_state.do_block);
+ erts_smp_atomic32_inc_nob(&erts_system_block_state.do_block);
/* Someone else might be waiting for us to block... */
if (do_block) {
@@ -3808,11 +3810,11 @@ erts_emergency_block_system(long timeout, Uint32 allowed_activities)
another_blocker = erts_smp_pending_system_block();
system_block_state.emergency = 1;
- erts_smp_atomic32_inc(&erts_system_block_state.do_block);
+ erts_smp_atomic32_inc_nob(&erts_system_block_state.do_block);
if (another_blocker) {
if (is_blocker()) {
- erts_smp_atomic32_dec(&erts_system_block_state.do_block);
+ erts_smp_atomic32_dec_nob(&erts_system_block_state.do_block);
res = 0;
goto done;
}
@@ -3869,7 +3871,7 @@ erts_release_system(void)
if (system_block_state.recursive_block)
system_block_state.recursive_block--;
else {
- do_block = erts_smp_atomic32_dectest(&erts_system_block_state.do_block);
+ do_block = erts_smp_atomic32_dec_read_nob(&erts_system_block_state.do_block);
system_block_state.have_blocker = 0;
if (is_blockable_thread())
system_block_state.threads_to_block++;
@@ -4004,10 +4006,10 @@ erts_system_block_init(void)
/* Global state... */
- erts_smp_atomic32_init(&erts_system_block_state.do_block, 0);
- erts_smp_atomic32_init(&erts_system_block_state.in_activity.wait, 0);
- erts_smp_atomic32_init(&erts_system_block_state.in_activity.gc, 0);
- erts_smp_atomic32_init(&erts_system_block_state.in_activity.io, 0);
+ erts_smp_atomic32_init_nob(&erts_system_block_state.do_block, 0);
+ erts_smp_atomic32_init_nob(&erts_system_block_state.in_activity.wait, 0);
+ erts_smp_atomic32_init_nob(&erts_system_block_state.in_activity.gc, 0);
+ erts_smp_atomic32_init_nob(&erts_system_block_state.in_activity.io, 0);
/* Make sure blockable threads unregister when exiting... */
erts_smp_install_exit_handler(erts_unregister_blockable_thread);
diff --git a/erts/emulator/hipe/hipe_native_bif.c b/erts/emulator/hipe/hipe_native_bif.c
index 8d31348496..dfb4ca794a 100644
--- a/erts/emulator/hipe/hipe_native_bif.c
+++ b/erts/emulator/hipe/hipe_native_bif.c
@@ -334,7 +334,7 @@ char *hipe_bs_allocate(int len)
bptr = erts_bin_nrml_alloc(len);
bptr->flags = 0;
bptr->orig_size = len;
- erts_smp_atomic_init(&bptr->refc, 1);
+ erts_smp_atomic_init_nob(&bptr->refc, 1);
return bptr->orig_bytes;
}
@@ -584,7 +584,7 @@ void hipe_clear_timeout(Process *c_p)
void hipe_atomic_inc(int *counter)
{
- erts_smp_atomic_inc((erts_smp_atomic_t*)counter);
+ erts_smp_atomic_inc_nob((erts_smp_atomic_t*)counter);
}
#endif
diff --git a/erts/emulator/sys/common/erl_check_io.c b/erts/emulator/sys/common/erl_check_io.c
index cd4de21d65..57321259f9 100644
--- a/erts/emulator/sys/common/erl_check_io.c
+++ b/erts/emulator/sys/common/erl_check_io.c
@@ -218,7 +218,7 @@ remember_removed(ErtsDrvEventState *state, struct pollset_info* psi)
#ifdef ERTS_SMP
struct removed_fd *fdlp;
ERTS_SMP_LC_ASSERT(erts_smp_lc_mtx_is_locked(fd_mtx(state->fd)));
- if (erts_smp_atomic_read(&psi->in_poll_wait)) {
+ if (erts_smp_atomic_read_nob(&psi->in_poll_wait)) {
state->remove_cnt++;
ASSERT(state->remove_cnt > 0);
fdlp = removed_fd_alloc();
@@ -333,7 +333,7 @@ grow_drv_ev_state(int min_ix)
new_len = max_fds;
erts_smp_mtx_lock(&drv_ev_state_grow_lock);
- if (erts_smp_atomic_read(&drv_ev_state_len) <= min_ix) {
+ if (erts_smp_atomic_read_nob(&drv_ev_state_len) <= min_ix) {
for (i=0; i<DRV_EV_STATE_LOCK_CNT; i++) { /* lock all fd's */
erts_smp_mtx_lock(&drv_ev_state_locks[i].lck);
}
@@ -343,7 +343,7 @@ grow_drv_ev_state(int min_ix)
sizeof(ErtsDrvEventState)*new_len)
: erts_alloc(ERTS_ALC_T_DRV_EV_STATE,
sizeof(ErtsDrvEventState)*new_len));
- for (i = erts_smp_atomic_read(&drv_ev_state_len); i < new_len; i++) {
+ for (i = erts_smp_atomic_read_nob(&drv_ev_state_len); i < new_len; i++) {
drv_ev_state[i].fd = (ErtsSysFdType) i;
drv_ev_state[i].driver.select = NULL;
drv_ev_state[i].events = 0;
@@ -351,7 +351,7 @@ grow_drv_ev_state(int min_ix)
drv_ev_state[i].type = ERTS_EV_TYPE_NONE;
drv_ev_state[i].flags = 0;
}
- erts_smp_atomic_set(&drv_ev_state_len, new_len);
+ erts_smp_atomic_set_nob(&drv_ev_state_len, new_len);
for (i=0; i<DRV_EV_STATE_LOCK_CNT; i++) {
erts_smp_mtx_unlock(&drv_ev_state_locks[i].lck);
}
@@ -497,7 +497,7 @@ ERTS_CIO_EXPORT(driver_select)(ErlDrvPort ix,
&& erts_lc_is_port_locked(erts_drvport2port(ix)));
#ifdef ERTS_SYS_CONTINOUS_FD_NUMBERS
- if ((unsigned)fd >= (unsigned)erts_smp_atomic_read(&drv_ev_state_len)) {
+ if ((unsigned)fd >= (unsigned)erts_smp_atomic_read_nob(&drv_ev_state_len)) {
if (fd < 0) {
return -1;
}
@@ -709,7 +709,7 @@ ERTS_CIO_EXPORT(driver_event)(ErlDrvPort ix,
&& erts_lc_is_port_locked(erts_drvport2port(ix)));
#ifdef ERTS_SYS_CONTINOUS_FD_NUMBERS
- if ((unsigned)fd >= (unsigned)erts_smp_atomic_read(&drv_ev_state_len)) {
+ if ((unsigned)fd >= (unsigned)erts_smp_atomic_read_nob(&drv_ev_state_len)) {
if (fd < 0)
return -1;
if (fd >= max_fds) {
@@ -1156,7 +1156,7 @@ ERTS_CIO_EXPORT(erts_check_io)(int do_wait)
erts_smp_activity_begin(ERTS_ACTIVITY_WAIT, NULL, NULL, NULL);
pollres_len = sizeof(pollres)/sizeof(ErtsPollResFd);
- erts_smp_atomic_set(&pollset.in_poll_wait, 1);
+ erts_smp_atomic_set_nob(&pollset.in_poll_wait, 1);
poll_ret = ERTS_CIO_POLL_WAIT(pollset.ps, pollres, &pollres_len, &wait_time);
@@ -1173,7 +1173,7 @@ ERTS_CIO_EXPORT(erts_check_io)(int do_wait)
#endif
if (poll_ret != 0) {
- erts_smp_atomic_set(&pollset.in_poll_wait, 0);
+ erts_smp_atomic_set_nob(&pollset.in_poll_wait, 0);
forget_removed(&pollset);
if (poll_ret == EAGAIN) {
goto restart;
@@ -1304,7 +1304,7 @@ ERTS_CIO_EXPORT(erts_check_io)(int do_wait)
#endif
}
- erts_smp_atomic_set(&pollset.in_poll_wait, 0);
+ erts_smp_atomic_set_nob(&pollset.in_poll_wait, 0);
forget_removed(&pollset);
}
@@ -1419,7 +1419,7 @@ static void drv_ev_state_free(void *des)
void
ERTS_CIO_EXPORT(erts_init_check_io)(void)
{
- erts_smp_atomic_init(&pollset.in_poll_wait, 0);
+ erts_smp_atomic_init_nob(&pollset.in_poll_wait, 0);
ERTS_CIO_POLL_INIT();
pollset.ps = ERTS_CIO_NEW_POLLSET();
@@ -1441,7 +1441,7 @@ ERTS_CIO_EXPORT(erts_init_check_io)(void)
#endif
#ifdef ERTS_SYS_CONTINOUS_FD_NUMBERS
max_fds = ERTS_CIO_POLL_MAX_FDS();
- erts_smp_atomic_init(&drv_ev_state_len, 0);
+ erts_smp_atomic_init_nob(&drv_ev_state_len, 0);
drv_ev_state = NULL;
erts_smp_mtx_init(&drv_ev_state_grow_lock, "drv_ev_state_grow");
#else
@@ -1479,7 +1479,7 @@ ERTS_CIO_EXPORT(erts_check_io_size)(void)
ERTS_CIO_POLL_INFO(pollset.ps, &pi);
res = pi.memory_size;
#ifdef ERTS_SYS_CONTINOUS_FD_NUMBERS
- res += sizeof(ErtsDrvEventState) * erts_smp_atomic_read(&drv_ev_state_len);
+ res += sizeof(ErtsDrvEventState) * erts_smp_atomic_read_nob(&drv_ev_state_len);
#else
res += safe_hash_table_sz(&drv_ev_state_tab);
{
@@ -1506,7 +1506,7 @@ ERTS_CIO_EXPORT(erts_check_io_info)(void *proc)
ERTS_CIO_POLL_INFO(pollset.ps, &pi);
memory_size = pi.memory_size;
#ifdef ERTS_SYS_CONTINOUS_FD_NUMBERS
- memory_size += sizeof(ErtsDrvEventState) * erts_smp_atomic_read(&drv_ev_state_len);
+ memory_size += sizeof(ErtsDrvEventState) * erts_smp_atomic_read_nob(&drv_ev_state_len);
#else
memory_size += safe_hash_table_sz(&drv_ev_state_tab);
{
@@ -1886,7 +1886,7 @@ ERTS_CIO_EXPORT(erts_check_io_debug)(void)
counters.num_errors = 0;
#ifdef ERTS_SYS_CONTINOUS_FD_NUMBERS
- len = erts_smp_atomic_read(&drv_ev_state_len);
+ len = erts_smp_atomic_read_nob(&drv_ev_state_len);
for (fd = 0; fd < len; fd++) {
doit_erts_check_io_debug((void *) &drv_ev_state[fd], (void *) &counters);
}
diff --git a/erts/emulator/sys/common/erl_poll.c b/erts/emulator/sys/common/erl_poll.c
index f5c785d683..81f1c95020 100644
--- a/erts/emulator/sys/common/erl_poll.c
+++ b/erts/emulator/sys/common/erl_poll.c
@@ -124,11 +124,11 @@
erts_smp_mtx_unlock(&(PS)->mtx)
#define ERTS_POLLSET_SET_POLLED_CHK(PS) \
- ((int) erts_atomic32_xchg(&(PS)->polled, (erts_aint32_t) 1))
+ ((int) erts_atomic32_xchg_nob(&(PS)->polled, (erts_aint32_t) 1))
#define ERTS_POLLSET_UNSET_POLLED(PS) \
- erts_atomic32_set(&(PS)->polled, (erts_aint32_t) 0)
+ erts_atomic32_set_nob(&(PS)->polled, (erts_aint32_t) 0)
#define ERTS_POLLSET_IS_POLLED(PS) \
- ((int) erts_atomic32_read(&(PS)->polled))
+ ((int) erts_atomic32_read_nob(&(PS)->polled))
#else
@@ -142,11 +142,11 @@
#if ERTS_POLL_USE_UPDATE_REQUESTS_QUEUE
#define ERTS_POLLSET_SET_HAVE_UPDATE_REQUESTS(PS) \
- erts_smp_atomic32_set(&(PS)->have_update_requests, (erts_aint32_t) 1)
+ erts_smp_atomic32_set_nob(&(PS)->have_update_requests, (erts_aint32_t) 1)
#define ERTS_POLLSET_UNSET_HAVE_UPDATE_REQUESTS(PS) \
- erts_smp_atomic32_set(&(PS)->have_update_requests, (erts_aint32_t) 0)
+ erts_smp_atomic32_set_nob(&(PS)->have_update_requests, (erts_aint32_t) 0)
#define ERTS_POLLSET_HAVE_UPDATE_REQUESTS(PS) \
- ((int) erts_smp_atomic32_read(&(PS)->have_update_requests))
+ ((int) erts_smp_atomic32_read_nob(&(PS)->have_update_requests))
#else
#define ERTS_POLLSET_SET_HAVE_UPDATE_REQUESTS(PS)
#define ERTS_POLLSET_UNSET_HAVE_UPDATE_REQUESTS(PS)
@@ -346,7 +346,7 @@ static ERTS_INLINE void
reset_wakeup_state(ErtsPollSet ps)
{
#ifdef ERTS_SMP
- erts_atomic32_set(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN);
+ erts_atomic32_set_nob(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN);
ERTS_THR_MEMORY_BARRIER;
#elif ERTS_POLL_ASYNC_INTERRUPT_SUPPORT
ps->wakeup_state = 0;
@@ -369,7 +369,7 @@ static ERTS_INLINE int
is_interrupted_reset(ErtsPollSet ps)
{
#ifdef ERTS_SMP
- return (erts_atomic32_xchg(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN)
+ return (erts_atomic32_xchg_nob(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN)
== ERTS_POLL_WOKEN_INTR);
#elif ERTS_POLL_ASYNC_INTERRUPT_SUPPORT
int res = ps->wakeup_state == ERTS_POLL_WOKEN_INTR;
@@ -384,12 +384,12 @@ static ERTS_INLINE void
woke_up(ErtsPollSet ps)
{
#ifdef ERTS_SMP
- erts_aint32_t wakeup_state = erts_atomic32_read(&ps->wakeup_state);
+ erts_aint32_t wakeup_state = erts_atomic32_read_nob(&ps->wakeup_state);
if (wakeup_state == ERTS_POLL_NOT_WOKEN)
- (void) erts_atomic32_cmpxchg(&ps->wakeup_state,
- ERTS_POLL_WOKEN,
- ERTS_POLL_NOT_WOKEN);
- ASSERT(erts_atomic32_read(&ps->wakeup_state) != ERTS_POLL_NOT_WOKEN);
+ (void) erts_atomic32_cmpxchg_nob(&ps->wakeup_state,
+ ERTS_POLL_WOKEN,
+ ERTS_POLL_NOT_WOKEN);
+ ASSERT(erts_atomic32_read_nob(&ps->wakeup_state) != ERTS_POLL_NOT_WOKEN);
#elif ERTS_POLL_ASYNC_INTERRUPT_SUPPORT
if (ps->wakeup_state == ERTS_POLL_NOT_WOKEN)
ps->wakeup_state = ERTS_POLL_WOKEN;
@@ -417,7 +417,7 @@ wake_poller(ErtsPollSet ps, int interrupted)
* We might unnecessarily write to the pipe, however,
* that isn't problematic.
*/
- wakeup_state = erts_atomic32_read(&ps->wakeup_state);
+ wakeup_state = erts_atomic32_read_nob(&ps->wakeup_state);
erts_atomic32_set_relb(&ps->wakeup_state, ERTS_POLL_WOKEN_INTR);
}
wake = wakeup_state == ERTS_POLL_NOT_WOKEN;
@@ -839,7 +839,7 @@ write_batch_buf(ErtsPollSet ps, ErtsPollBatchBuf *bbp)
ps->fds_status[fd].flags |= ERTS_POLL_FD_FLG_USEFLBCK;
ASSERT(ps->fds_status[fd].used_events);
ps->fds_status[fd].used_events = 0;
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
update_fallback_pollset(ps, fd);
ASSERT(ps->fds_status[fd].flags & ERTS_POLL_FD_FLG_INFLBCK);
break;
@@ -889,11 +889,11 @@ batch_update_pollset(ErtsPollSet ps, int fd, ErtsPollBatchBuf *bbp)
events = ERTS_POLL_EV_E2N(ps->fds_status[fd].events);
if (!events) {
buf[buf_len].events = POLLREMOVE;
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
}
else if (!ps->fds_status[fd].used_events) {
buf[buf_len].events = events;
- erts_smp_atomic_inc(&ps->no_of_user_fds);
+ erts_smp_atomic_inc_nob(&ps->no_of_user_fds);
}
else {
if ((ps->fds_status[fd].flags & ERTS_POLL_FD_FLG_RST)
@@ -983,12 +983,12 @@ batch_update_pollset(ErtsPollSet ps, int fd, ErtsPollBatchBuf *bbp)
}
if (used_events) {
if (!events) {
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
}
}
else {
if (events)
- erts_smp_atomic_inc(&ps->no_of_user_fds);
+ erts_smp_atomic_inc_nob(&ps->no_of_user_fds);
}
ASSERT((events & ~(ERTS_POLL_EV_IN|ERTS_POLL_EV_OUT)) == 0);
ASSERT((used_events & ~(ERTS_POLL_EV_IN|ERTS_POLL_EV_OUT)) == 0);
@@ -1062,7 +1062,7 @@ update_pollset(ErtsPollSet ps, int fd)
epe.data.fd = epe_templ.data.fd;
res = epoll_ctl(ps->kp_fd, EPOLL_CTL_DEL, fd, &epe);
} while (res != 0 && errno == EINTR);
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
ps->fds_status[fd].used_events = 0;
}
@@ -1070,11 +1070,11 @@ update_pollset(ErtsPollSet ps, int fd)
/* A note on EPOLL_CTL_DEL: linux kernel versions before 2.6.9
need a non-NULL event pointer even though it is ignored... */
op = EPOLL_CTL_DEL;
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
}
else if (!ps->fds_status[fd].used_events) {
op = EPOLL_CTL_ADD;
- erts_smp_atomic_inc(&ps->no_of_user_fds);
+ erts_smp_atomic_inc_nob(&ps->no_of_user_fds);
}
else {
op = EPOLL_CTL_MOD;
@@ -1124,7 +1124,7 @@ update_pollset(ErtsPollSet ps, int fd)
/* Fall through ... */
case EPOLL_CTL_ADD: {
ps->fds_status[fd].flags |= ERTS_POLL_FD_FLG_USEFLBCK;
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
#if ERTS_POLL_USE_CONCURRENT_UPDATE
if (!*update_fallback) {
*update_fallback = 1;
@@ -1212,7 +1212,7 @@ static int update_pollset(ErtsPollSet ps, int fd)
#if ERTS_POLL_USE_FALLBACK
ASSERT(ps->fds_status[fd].flags & ERTS_POLL_FD_FLG_INFLBCK);
#endif
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
last_pix = --ps->no_poll_fds;
if (pix != last_pix) {
/* Move last pix to this pix */
@@ -1239,7 +1239,7 @@ static int update_pollset(ErtsPollSet ps, int fd)
ASSERT(!(ps->fds_status[fd].flags & ERTS_POLL_FD_FLG_INFLBCK)
|| fd == ps->kp_fd);
#endif
- erts_smp_atomic_inc(&ps->no_of_user_fds);
+ erts_smp_atomic_inc_nob(&ps->no_of_user_fds);
ps->fds_status[fd].pix = pix = ps->no_poll_fds++;
if (pix >= ps->poll_fds_len)
grow_poll_fds(ps, pix);
@@ -1290,7 +1290,7 @@ static int update_pollset(ErtsPollSet ps, int fd)
if (!ps->fds_status[fd].used_events) {
ASSERT(events);
- erts_smp_atomic_inc(&ps->no_of_user_fds);
+ erts_smp_atomic_inc_nob(&ps->no_of_user_fds);
#if ERTS_POLL_USE_FALLBACK
ps->no_select_fds++;
ps->fds_status[fd].flags |= ERTS_POLL_FD_FLG_INFLBCK;
@@ -1298,7 +1298,7 @@ static int update_pollset(ErtsPollSet ps, int fd)
}
else if (!events) {
ASSERT(ps->fds_status[fd].used_events);
- erts_smp_atomic_dec(&ps->no_of_user_fds);
+ erts_smp_atomic_dec_nob(&ps->no_of_user_fds);
ps->fds_status[fd].events = events;
#if ERTS_POLL_USE_FALLBACK
ps->no_select_fds--;
@@ -1896,7 +1896,7 @@ static ERTS_INLINE int
check_fd_events(ErtsPollSet ps, SysTimeval *tv, int max_res, int *ps_locked)
{
ASSERT(!*ps_locked);
- if (erts_smp_atomic_read(&ps->no_of_user_fds) == 0
+ if (erts_smp_atomic_read_nob(&ps->no_of_user_fds) == 0
&& tv->tv_usec == 0 && tv->tv_sec == 0) {
/* Nothing to poll and zero timeout; done... */
return 0;
@@ -1937,7 +1937,7 @@ check_fd_events(ErtsPollSet ps, SysTimeval *tv, int max_res, int *ps_locked)
* the maximum number of file descriptors in the poll set.
*/
struct dvpoll poll_res;
- int nfds = (int) erts_smp_atomic_read(&ps->no_of_user_fds);
+ int nfds = (int) erts_smp_atomic_read_nob(&ps->no_of_user_fds);
#ifdef ERTS_SMP
nfds++; /* Wakeup pipe */
#endif
@@ -2143,10 +2143,10 @@ ERTS_POLL_EXPORT(erts_poll_interrupt_timed)(ErtsPollSet ps,
#ifdef ERTS_POLL_COUNT_AVOIDED_WAKEUPS
else {
if (ERTS_POLLSET_IS_POLLED(ps))
- erts_smp_atomic_inc(&ps->no_avoided_wakeups);
- erts_smp_atomic_inc(&ps->no_avoided_interrupts);
+ erts_smp_atomic_inc_nob(&ps->no_avoided_wakeups);
+ erts_smp_atomic_inc_nob(&ps->no_avoided_interrupts);
}
- erts_smp_atomic_inc(&ps->no_interrupt_timed);
+ erts_smp_atomic_inc_nob(&ps->no_interrupt_timed);
#endif
}
#endif
@@ -2208,7 +2208,7 @@ ERTS_POLL_EXPORT(erts_poll_create_pollset)(void)
ps->internal_fd_limit = 0;
ps->fds_status = NULL;
ps->fds_status_len = 0;
- erts_smp_atomic_init(&ps->no_of_user_fds, 0);
+ erts_smp_atomic_init_nob(&ps->no_of_user_fds, 0);
#if ERTS_POLL_USE_KERNEL_POLL
ps->kp_fd = -1;
#if ERTS_POLL_USE_EPOLL
@@ -2260,14 +2260,14 @@ ERTS_POLL_EXPORT(erts_poll_create_pollset)(void)
ps->update_requests.next = NULL;
ps->update_requests.len = 0;
ps->curr_upd_req_block = &ps->update_requests;
- erts_smp_atomic32_init(&ps->have_update_requests, 0);
+ erts_smp_atomic32_init_nob(&ps->have_update_requests, 0);
#endif
#ifdef ERTS_SMP
- erts_atomic32_init(&ps->polled, 0);
+ erts_atomic32_init_nob(&ps->polled, 0);
erts_smp_mtx_init(&ps->mtx, "pollset");
#endif
#ifdef ERTS_SMP
- erts_atomic32_init(&ps->wakeup_state, (erts_aint32_t) 0);
+ erts_atomic32_init_nob(&ps->wakeup_state, (erts_aint32_t) 0);
#elif ERTS_POLL_ASYNC_INTERRUPT_SUPPORT
ps->wakeup_state = 0;
#endif
@@ -2291,11 +2291,11 @@ ERTS_POLL_EXPORT(erts_poll_create_pollset)(void)
ps->internal_fd_limit = kp_fd + 1;
ps->kp_fd = kp_fd;
#endif
- erts_smp_atomic32_init(&ps->timeout, ERTS_AINT32_T_MAX);
+ erts_smp_atomic32_init_nob(&ps->timeout, ERTS_AINT32_T_MAX);
#ifdef ERTS_POLL_COUNT_AVOIDED_WAKEUPS
- erts_smp_atomic_init(&ps->no_avoided_wakeups, 0);
- erts_smp_atomic_init(&ps->no_avoided_interrupts, 0);
- erts_smp_atomic_init(&ps->no_interrupt_timed, 0);
+ erts_smp_atomic_init_nob(&ps->no_avoided_wakeups, 0);
+ erts_smp_atomic_init_nob(&ps->no_avoided_interrupts, 0);
+ erts_smp_atomic_init_nob(&ps->no_interrupt_timed, 0);
#endif
#if ERTS_POLL_USE_UPDATE_REQUESTS_QUEUE
handle_update_requests(ps);
@@ -2303,7 +2303,7 @@ ERTS_POLL_EXPORT(erts_poll_create_pollset)(void)
#if ERTS_POLL_USE_FALLBACK
ps->fallback_used = 0;
#endif
- erts_smp_atomic_set(&ps->no_of_user_fds, 0); /* Don't count wakeup pipe and fallback fd */
+ erts_smp_atomic_set_nob(&ps->no_of_user_fds, 0); /* Don't count wakeup pipe and fallback fd */
erts_smp_spin_lock(&pollsets_lock);
ps->next = pollsets;
@@ -2449,7 +2449,7 @@ ERTS_POLL_EXPORT(erts_poll_info)(ErtsPollSet ps, ErtsPollInfo *pip)
pip->memory_size = size;
- pip->poll_set_size = (int) erts_smp_atomic_read(&ps->no_of_user_fds);
+ pip->poll_set_size = (int) erts_smp_atomic_read_nob(&ps->no_of_user_fds);
#ifdef ERTS_SMP
pip->poll_set_size++; /* Wakeup pipe */
#endif
@@ -2507,9 +2507,9 @@ ERTS_POLL_EXPORT(erts_poll_info)(ErtsPollSet ps, ErtsPollInfo *pip)
pip->max_fds = max_fds;
#ifdef ERTS_POLL_COUNT_AVOIDED_WAKEUPS
- pip->no_avoided_wakeups = erts_smp_atomic_read(&ps->no_avoided_wakeups);
- pip->no_avoided_interrupts = erts_smp_atomic_read(&ps->no_avoided_interrupts);
- pip->no_interrupt_timed = erts_smp_atomic_read(&ps->no_interrupt_timed);
+ pip->no_avoided_wakeups = erts_smp_atomic_read_nob(&ps->no_avoided_wakeups);
+ pip->no_avoided_interrupts = erts_smp_atomic_read_nob(&ps->no_avoided_interrupts);
+ pip->no_interrupt_timed = erts_smp_atomic_read_nob(&ps->no_interrupt_timed);
#endif
ERTS_POLLSET_UNLOCK(ps);
@@ -2529,7 +2529,7 @@ fatal_error(char *format, ...)
{
va_list ap;
- if (ERTS_IS_CRASH_DUMPING || ERTS_GOT_SIGUSR1) {
+ if (ERTS_SOMEONE_IS_CRASH_DUMPING || ERTS_GOT_SIGUSR1) {
/*
* Crash dump writing and reception of sigusr1 (which will
* result in a crash dump) closes all file descriptors. This
@@ -2549,7 +2549,7 @@ fatal_error(char *format, ...)
static void
fatal_error_async_signal_safe(char *error_str)
{
- if (ERTS_IS_CRASH_DUMPING || ERTS_GOT_SIGUSR1) {
+ if (ERTS_SOMEONE_IS_CRASH_DUMPING || ERTS_GOT_SIGUSR1) {
/* See comment above in fatal_error() */
return;
}
diff --git a/erts/emulator/sys/unix/sys.c b/erts/emulator/sys/unix/sys.c
index bafbbb0f6c..fd15635168 100644
--- a/erts/emulator/sys/unix/sys.c
+++ b/erts/emulator/sys/unix/sys.c
@@ -167,12 +167,12 @@ static int debug_log = 0;
#ifdef ERTS_SMP
erts_smp_atomic32_t erts_got_sigusr1;
#define ERTS_SET_GOT_SIGUSR1 \
- erts_smp_atomic32_set(&erts_got_sigusr1, 1)
+ erts_smp_atomic32_set_mb(&erts_got_sigusr1, 1)
#define ERTS_UNSET_GOT_SIGUSR1 \
- erts_smp_atomic32_set(&erts_got_sigusr1, 0)
+ erts_smp_atomic32_set_mb(&erts_got_sigusr1, 0)
static erts_smp_atomic32_t have_prepared_crash_dump;
#define ERTS_PREPARED_CRASH_DUMP \
- ((int) erts_smp_atomic32_xchg(&have_prepared_crash_dump, 1))
+ ((int) erts_smp_atomic32_xchg_nob(&have_prepared_crash_dump, 1))
#else
volatile int erts_got_sigusr1;
#define ERTS_SET_GOT_SIGUSR1 (erts_got_sigusr1 = 1)
@@ -242,9 +242,9 @@ static int max_files = -1;
#ifdef ERTS_SMP
erts_smp_atomic32_t erts_break_requested;
#define ERTS_SET_BREAK_REQUESTED \
- erts_smp_atomic32_set(&erts_break_requested, (erts_aint32_t) 1)
+ erts_smp_atomic32_set_nob(&erts_break_requested, (erts_aint32_t) 1)
#define ERTS_UNSET_BREAK_REQUESTED \
- erts_smp_atomic32_set(&erts_break_requested, (erts_aint32_t) 0)
+ erts_smp_atomic32_set_nob(&erts_break_requested, (erts_aint32_t) 0)
#else
volatile int erts_break_requested = 0;
#define ERTS_SET_BREAK_REQUESTED (erts_break_requested = 1)
@@ -364,7 +364,7 @@ Uint
erts_sys_misc_mem_sz(void)
{
Uint res = ERTS_CHK_IO_SZ();
- res += erts_smp_atomic_read(&sys_misc_mem_sz);
+ res += erts_smp_atomic_read_mb(&sys_misc_mem_sz);
return res;
}
@@ -509,9 +509,9 @@ erts_sys_pre_init(void)
#endif
}
#ifdef ERTS_SMP
- erts_smp_atomic32_init(&erts_break_requested, 0);
- erts_smp_atomic32_init(&erts_got_sigusr1, 0);
- erts_smp_atomic32_init(&have_prepared_crash_dump, 0);
+ erts_smp_atomic32_init_nob(&erts_break_requested, 0);
+ erts_smp_atomic32_init_nob(&erts_got_sigusr1, 0);
+ erts_smp_atomic32_init_nob(&have_prepared_crash_dump, 0);
#else
erts_break_requested = 0;
erts_got_sigusr1 = 0;
@@ -521,7 +521,7 @@ erts_sys_pre_init(void)
children_died = 0;
#endif
#endif /* USE_THREADS */
- erts_smp_atomic_init(&sys_misc_mem_sz, 0);
+ erts_smp_atomic_init_nob(&sys_misc_mem_sz, 0);
}
void
@@ -553,7 +553,7 @@ erl_sys_init(void)
+ sizeof(CHILD_SETUP_PROG_NAME)
+ 1);
child_setup_prog = erts_alloc(ERTS_ALC_T_CS_PROG_PATH, csp_path_sz);
- erts_smp_atomic_add(&sys_misc_mem_sz, csp_path_sz);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, csp_path_sz);
sprintf(child_setup_prog,
"%s%c%s",
bindir,
@@ -1216,8 +1216,8 @@ static int spawn_init()
sys_sigset(SIGPIPE, SIG_IGN); /* Ignore - we'll handle the write failure */
driver_data = (struct driver_data *)
erts_alloc(ERTS_ALC_T_DRV_TAB, max_files * sizeof(struct driver_data));
- erts_smp_atomic_add(&sys_misc_mem_sz,
- max_files * sizeof(struct driver_data));
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz,
+ max_files * sizeof(struct driver_data));
for (i = 0; i < max_files; i++)
driver_data[i].pid = -1;
@@ -1925,8 +1925,8 @@ static void clear_fd_data(int fd)
{
if (fd_data[fd].sz > 0) {
erts_free(ERTS_ALC_T_FD_ENTRY_BUF, (void *) fd_data[fd].buf);
- ASSERT(erts_smp_atomic_read(&sys_misc_mem_sz) >= fd_data[fd].sz);
- erts_smp_atomic_add(&sys_misc_mem_sz, -1*fd_data[fd].sz);
+ ASSERT(erts_smp_atomic_read_nob(&sys_misc_mem_sz) >= fd_data[fd].sz);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, -1*fd_data[fd].sz);
}
fd_data[fd].buf = NULL;
fd_data[fd].sz = 0;
@@ -2261,7 +2261,7 @@ static void ready_input(ErlDrvData e, ErlDrvEvent ready_fd)
port_inp_failure(port_num, ready_fd, -1);
}
else {
- erts_smp_atomic_add(&sys_misc_mem_sz, h);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, h);
sys_memcpy(buf, cpos, bytes_left);
fd_data[ready_fd].buf = buf;
fd_data[ready_fd].sz = h;
@@ -2465,7 +2465,7 @@ erts_sys_putenv(char *buffer, int sep_ix)
#else
Uint sz = strlen(buffer)+1;
env = erts_alloc(ERTS_ALC_T_PUTENV_STR, sz);
- erts_smp_atomic_add(&sys_misc_mem_sz, sz);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, sz);
strcpy(env,buffer);
#endif
erts_smp_rwmtx_rwlock(&environ_rwmtx);
@@ -2504,8 +2504,8 @@ sys_init_io(void)
{
fd_data = (struct fd_data *)
erts_alloc(ERTS_ALC_T_FD_TAB, max_files * sizeof(struct fd_data));
- erts_smp_atomic_add(&sys_misc_mem_sz,
- max_files * sizeof(struct fd_data));
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz,
+ max_files * sizeof(struct fd_data));
#ifdef USE_THREADS
#ifdef ERTS_SMP
diff --git a/erts/emulator/sys/win32/erl_poll.c b/erts/emulator/sys/win32/erl_poll.c
index 074e2e247f..735c420d8e 100644
--- a/erts/emulator/sys/win32/erl_poll.c
+++ b/erts/emulator/sys/win32/erl_poll.c
@@ -309,9 +309,9 @@ struct ErtsPollSet_ {
#ifdef ERTS_SMP
extern erts_smp_atomic32_t erts_break_requested;
#define ERTS_SET_BREAK_REQUESTED \
- erts_smp_atomic32_set(&erts_break_requested, (erts_aint32_t) 1)
+ erts_smp_atomic32_set_nob(&erts_break_requested, (erts_aint32_t) 1)
#define ERTS_UNSET_BREAK_REQUESTED \
- erts_smp_atomic32_set(&erts_break_requested, (erts_aint32_t) 0)
+ erts_smp_atomic32_set_nob(&erts_break_requested, (erts_aint32_t) 0)
#else
extern volatile int erts_break_requested;
#define ERTS_SET_BREAK_REQUESTED (erts_break_requested = 1)
@@ -371,19 +371,19 @@ do { \
static ERTS_INLINE int
is_io_ready(ErtsPollSet ps)
{
- return erts_atomic32_read(&ps->wakeup_state) == ERTS_POLL_WOKEN_IO_READY;
+ return erts_atomic32_read_nob(&ps->wakeup_state) == ERTS_POLL_WOKEN_IO_READY;
}
static ERTS_INLINE void
woke_up(ErtsPollSet ps)
{
- if (erts_atomic32_read(&ps->wakeup_state) == ERTS_POLL_NOT_WOKEN)
- erts_atomic32_cmpxchg(&ps->wakeup_state,
- ERTS_POLL_WOKEN_TIMEDOUT,
- ERTS_POLL_NOT_WOKEN);
+ if (erts_atomic32_read_nob(&ps->wakeup_state) == ERTS_POLL_NOT_WOKEN)
+ erts_atomic32_cmpxchg_nob(&ps->wakeup_state,
+ ERTS_POLL_WOKEN_TIMEDOUT,
+ ERTS_POLL_NOT_WOKEN);
#ifdef DEBUG
{
- erts_aint32_t wakeup_state = erts_atomic32_read(&ps->wakeup_state);
+ erts_aint32_t wakeup_state = erts_atomic32_read_nob(&ps->wakeup_state);
switch (wakeup_state) {
case ERTS_POLL_WOKEN_IO_READY:
case ERTS_POLL_WOKEN_INTR:
@@ -401,7 +401,7 @@ static ERTS_INLINE int
wakeup_cause(ErtsPollSet ps)
{
int res;
- erts_aint32_t wakeup_state = erts_atomic32_read(&ps->wakeup_state);
+ erts_aint32_t wakeup_state = erts_atomic32_read_nob(&ps->wakeup_state);
switch (wakeup_state) {
case ERTS_POLL_WOKEN_IO_READY:
res = 0;
@@ -439,7 +439,7 @@ poll_wait_timeout(ErtsPollSet ps, SysTimeval *tvp)
* by ResetEvent().
*/
ERTS_THR_MEMORY_BARRIER;
- if (erts_atomic32_read(&ps->wakeup_state) != ERTS_POLL_NOT_WOKEN)
+ if (erts_atomic32_read_nob(&ps->wakeup_state) != ERTS_POLL_NOT_WOKEN)
return (DWORD) 0;
if (timeout > ERTS_AINT32_T_MAX) /* Also prevents DWORD overflow */
@@ -455,17 +455,17 @@ wake_poller(ErtsPollSet ps, int io_ready)
erts_aint32_t wakeup_state;
if (io_ready) {
/* We may set the event multiple times. This is, however, harmless. */
- wakeup_state = erts_atomic32_read(&ps->wakeup_state);
+ wakeup_state = erts_atomic32_read_nob(&ps->wakeup_state);
erts_atomic32_set_relb(&ps->wakeup_state, ERTS_POLL_WOKEN_IO_READY);
}
else {
ERTS_THR_MEMORY_BARRIER;
- wakeup_state = erts_atomic32_read(&ps->wakeup_state);
+ wakeup_state = erts_atomic32_read_nob(&ps->wakeup_state);
while (wakeup_state != ERTS_POLL_WOKEN_IO_READY
&& wakeup_state != ERTS_POLL_WOKEN_INTR) {
- erts_aint32_t act = erts_atomic32_cmpxchg(&ps->wakeup_state,
- ERTS_POLL_WOKEN_INTR,
- wakeup_state);
+ erts_aint32_t act = erts_atomic32_cmpxchg_nob(&ps->wakeup_state,
+ ERTS_POLL_WOKEN_INTR,
+ wakeup_state);
if (act == wakeup_state) {
wakeup_state = act;
break;
@@ -488,13 +488,13 @@ wake_poller(ErtsPollSet ps, int io_ready)
static ERTS_INLINE void
reset_io_ready(ErtsPollSet ps)
{
- erts_atomic32_set(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN);
+ erts_atomic32_set_nob(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN);
}
static ERTS_INLINE void
restore_io_ready(ErtsPollSet ps)
{
- erts_atomic32_set(&ps->wakeup_state, ERTS_POLL_WOKEN_IO_READY);
+ erts_atomic32_set_nob(&ps->wakeup_state, ERTS_POLL_WOKEN_IO_READY);
}
/*
@@ -511,12 +511,12 @@ static ERTS_INLINE void
reset_interrupt(ErtsPollSet ps)
{
/* We need to keep io-ready if set */
- erts_aint32_t wakeup_state = erts_atomic32_read(&ps->wakeup_state);
+ erts_aint32_t wakeup_state = erts_atomic32_read_nob(&ps->wakeup_state);
while (wakeup_state != ERTS_POLL_WOKEN_IO_READY
&& wakeup_state != ERTS_POLL_NOT_WOKEN) {
- erts_aint32_t act = erts_atomic32_cmpxchg(&ps->wakeup_state,
- ERTS_POLL_NOT_WOKEN,
- wakeup_state);
+ erts_aint32_t act = erts_atomic32_cmpxchg_nob(&ps->wakeup_state,
+ ERTS_POLL_NOT_WOKEN,
+ wakeup_state);
if (wakeup_state == act)
break;
wakeup_state = act;
@@ -692,7 +692,7 @@ static void *break_waiter(void *param)
case WAIT_OBJECT_0:
ResetEvent(harr[0]);
erts_mtx_lock(&break_waiter_lock);
- erts_atomic32_set(&break_waiter_state,BREAK_WAITER_GOT_BREAK);
+ erts_atomic32_set_nob(&break_waiter_state,BREAK_WAITER_GOT_BREAK);
ERTS_THR_MEMORY_BARRIER;
SetEvent(break_happened_event);
erts_mtx_unlock(&break_waiter_lock);
@@ -700,7 +700,7 @@ static void *break_waiter(void *param)
case (WAIT_OBJECT_0+1):
ResetEvent(harr[1]);
erts_mtx_lock(&break_waiter_lock);
- erts_atomic32_set(&break_waiter_state,BREAK_WAITER_GOT_HALT);
+ erts_atomic32_set_nob(&break_waiter_state,BREAK_WAITER_GOT_HALT);
ERTS_THR_MEMORY_BARRIER;
SetEvent(break_happened_event);
erts_mtx_unlock(&break_waiter_lock);
@@ -1153,7 +1153,7 @@ int erts_poll_wait(ErtsPollSet ps,
/*HARDDEBUGF(("timeout = %ld",(long) timeout));*/
- if (timeout > 0 && !erts_atomic32_read(&break_waiter_state)) {
+ if (timeout > 0 && !erts_atomic32_read_nob(&break_waiter_state)) {
HANDLE harr[2] = {ps->event_io_ready, break_happened_event};
int num_h = 2;
@@ -1166,10 +1166,10 @@ int erts_poll_wait(ErtsPollSet ps,
}
ERTS_UNSET_BREAK_REQUESTED;
- if(erts_atomic32_read(&break_waiter_state)) {
+ if(erts_atomic32_read_nob(&break_waiter_state)) {
erts_mtx_lock(&break_waiter_lock);
- break_state = erts_atomic32_read(&break_waiter_state);
- erts_atomic32_set(&break_waiter_state,0);
+ break_state = erts_atomic32_read_nob(&break_waiter_state);
+ erts_atomic32_set_nob(&break_waiter_state,0);
ResetEvent(break_happened_event);
erts_mtx_unlock(&break_waiter_lock);
switch (break_state) {
@@ -1236,7 +1236,7 @@ int erts_poll_wait(ErtsPollSet ps,
erts_mtx_unlock(&w->mtx);
}
done:
- erts_smp_atomic32_set(&ps->timeout, ERTS_AINT32_T_MAX);
+ erts_smp_atomic32_set_nob(&ps->timeout, ERTS_AINT32_T_MAX);
*len = num;
ERTS_POLLSET_UNLOCK(ps);
HARDTRACEF(("Out erts_poll_wait"));
@@ -1316,11 +1316,11 @@ ErtsPollSet erts_poll_create_pollset(void)
ps->standby_wait_event = CreateManualEvent(FALSE);
ps->restore_events = 0;
- erts_atomic32_init(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN);
+ erts_atomic32_init_nob(&ps->wakeup_state, ERTS_POLL_NOT_WOKEN);
#ifdef ERTS_SMP
erts_smp_mtx_init(&ps->mtx, "pollset");
#endif
- erts_smp_atomic32_init(&ps->timeout, ERTS_AINT32_T_MAX);
+ erts_smp_atomic32_init_nob(&ps->timeout, ERTS_AINT32_T_MAX);
HARDTRACEF(("Out erts_poll_create_pollset"));
return ps;
@@ -1372,7 +1372,7 @@ void erts_poll_init(void)
erts_mtx_init(&break_waiter_lock,"break_waiter_lock");
break_happened_event = CreateManualEvent(FALSE);
- erts_atomic32_init(&break_waiter_state, 0);
+ erts_atomic32_init_nob(&break_waiter_state, 0);
erts_thr_create(&thread, &break_waiter, NULL, NULL);
ERTS_UNSET_BREAK_REQUESTED;
diff --git a/erts/emulator/sys/win32/sys.c b/erts/emulator/sys/win32/sys.c
index a2159d063c..ce1d376a54 100644
--- a/erts/emulator/sys/win32/sys.c
+++ b/erts/emulator/sys/win32/sys.c
@@ -198,7 +198,7 @@ Uint
erts_sys_misc_mem_sz(void)
{
Uint res = (Uint) erts_check_io_size();
- res += (Uint) erts_smp_atomic_read(&sys_misc_mem_sz);
+ res += (Uint) erts_smp_atomic_read_mb(&sys_misc_mem_sz);
return res;
}
@@ -648,7 +648,7 @@ new_driver_data(int port_num, int packet_bytes, int wait_objs_required, int use_
erts_smp_mtx_unlock(&sys_driver_data_lock);
return NULL;
}
- erts_smp_atomic_add(&sys_misc_mem_sz, dp->inBufSize);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, dp->inBufSize);
dp->outBufSize = 0;
dp->outbuf = NULL;
dp->port_num = port_num;
@@ -733,8 +733,8 @@ release_driver_data(DriverData* dp)
#endif
if (dp->inbuf != NULL) {
- ASSERT(erts_smp_atomic_read(&sys_misc_mem_sz) >= dp->inBufSize);
- erts_smp_atomic_add(&sys_misc_mem_sz, -1*dp->inBufSize);
+ ASSERT(erts_smp_atomic_read_nob(&sys_misc_mem_sz) >= dp->inBufSize);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, -1*dp->inBufSize);
DRV_BUF_FREE(dp->inbuf);
dp->inBufSize = 0;
dp->inbuf = NULL;
@@ -742,8 +742,8 @@ release_driver_data(DriverData* dp)
ASSERT(dp->inBufSize == 0);
if (dp->outbuf != NULL) {
- ASSERT(erts_smp_atomic_read(&sys_misc_mem_sz) >= dp->outBufSize);
- erts_smp_atomic_add(&sys_misc_mem_sz, -1*dp->outBufSize);
+ ASSERT(erts_smp_atomic_read_nob(&sys_misc_mem_sz) >= dp->outBufSize);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, -1*dp->outBufSize);
DRV_BUF_FREE(dp->outbuf);
dp->outBufSize = 0;
dp->outbuf = NULL;
@@ -1162,7 +1162,8 @@ spawn_init(void)
#endif
driver_data = (struct driver_data *)
erts_alloc(ERTS_ALC_T_DRV_TAB, max_files * sizeof(struct driver_data));
- erts_smp_atomic_add(&sys_misc_mem_sz, max_files*sizeof(struct driver_data));
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz,
+ max_files*sizeof(struct driver_data));
for (i = 0; i < max_files; i++)
driver_data[i].port_num = PORT_FREE;
@@ -1698,7 +1699,7 @@ create_child_process
static int create_pipe(HANDLE *phRead, HANDLE *phWrite, BOOL inheritRead, BOOL overlapped_io)
{
SECURITY_ATTRIBUTES sa = {sizeof(SECURITY_ATTRIBUTES), NULL, TRUE};
- char pipe_name[128]; /* Name of pipe. */
+ char pipe_name[256]; /* Name of pipe. */
Uint calls;
/*
@@ -1735,9 +1736,9 @@ static int create_pipe(HANDLE *phRead, HANDLE *phWrite, BOOL inheritRead, BOOL o
* Otherwise, create named pipes.
*/
- calls = (Uint) erts_smp_atomic_inctest(&pipe_creation_counter);
- sprintf(pipe_name, "\\\\.\\pipe\\erlang44_%d_%d",
- getpid(), calls);
+ calls = (UWord) erts_smp_atomic_inc_read_nob(&pipe_creation_counter);
+ erts_snprintf(pipe_name, sizeof(pipe_name),
+ "\\\\.\\pipe\\erlang44_%d_%bpu", getpid(), calls);
DEBUGF(("Creating pipe %s\n", pipe_name));
sa.bInheritHandle = inheritRead;
@@ -2529,7 +2530,7 @@ output(ErlDrvData drv_data, char* buf, int len)
}
dp->outBufSize = pb+len;
- erts_smp_atomic_add(&sys_misc_mem_sz, dp->outBufSize);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, dp->outBufSize);
/*
* Store header bytes (if any).
@@ -2558,8 +2559,8 @@ output(ErlDrvData drv_data, char* buf, int len)
} else {
dp->out.ov.Offset += pb+len; /* For vanilla driver. */
/* XXX OffsetHigh should be changed too. */
- ASSERT(erts_smp_atomic_read(&sys_misc_mem_sz) >= dp->outBufSize);
- erts_smp_atomic_add(&sys_misc_mem_sz, -1*dp->outBufSize);
+ ASSERT(erts_smp_atomic_read_nob(&sys_misc_mem_sz) >= dp->outBufSize);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, -1*dp->outBufSize);
DRV_BUF_FREE(dp->outbuf);
dp->outBufSize = 0;
dp->outbuf = NULL;
@@ -2673,9 +2674,9 @@ ready_input(ErlDrvData drv_data, ErlDrvEvent ready_event)
error = ERROR_NOT_ENOUGH_MEMORY;
break; /* Break out of loop into error handler. */
}
- ASSERT(erts_smp_atomic_read(&sys_misc_mem_sz) >= dp->inBufSize);
- erts_smp_atomic_add(&sys_misc_mem_sz,
- dp->totalNeeded - dp->inBufSize);
+ ASSERT(erts_smp_atomic_read_nob(&sys_misc_mem_sz) >= dp->inBufSize);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz,
+ dp->totalNeeded - dp->inBufSize);
dp->inBufSize = dp->totalNeeded;
dp->inbuf = new_buf;
}
@@ -2775,8 +2776,8 @@ ready_output(ErlDrvData drv_data, ErlDrvEvent ready_event)
write... */
return;
}
- ASSERT(erts_smp_atomic_read(&sys_misc_mem_sz) >= dp->outBufSize);
- erts_smp_atomic_add(&sys_misc_mem_sz, -1*dp->outBufSize);
+ ASSERT(erts_smp_atomic_read_nob(&sys_misc_mem_sz) >= dp->outBufSize);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, -1*dp->outBufSize);
DRV_BUF_FREE(dp->outbuf);
dp->outBufSize = 0;
dp->outbuf = NULL;
@@ -2926,8 +2927,8 @@ Preload* sys_preloaded(void)
(num_preloaded+1)*sizeof(Preload));
res_name = erts_alloc(ERTS_ALC_T_PRELOADED,
(num_preloaded+1)*sizeof(unsigned));
- erts_smp_atomic_add(&sys_misc_mem_sz,
- (num_preloaded+1)*sizeof(Preload)
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz,
+ (num_preloaded+1)*sizeof(Preload)
+ (num_preloaded+1)*sizeof(unsigned));
for (i = 0; i < num_preloaded; i++) {
int n;
@@ -2939,7 +2940,7 @@ Preload* sys_preloaded(void)
n = GETWORD(data);
data += 2;
preloaded[i].name = erts_alloc(ERTS_ALC_T_PRELOADED, n+1);
- erts_smp_atomic_add(&sys_misc_mem_sz, n+1);
+ erts_smp_atomic_add_nob(&sys_misc_mem_sz, n+1);
sys_memcpy(preloaded[i].name, data, n);
preloaded[i].name[n] = '\0';
data += n;
@@ -3281,7 +3282,7 @@ erts_sys_pre_init(void)
#endif
}
#endif
- erts_smp_atomic_init(&sys_misc_mem_sz, 0);
+ erts_smp_atomic_init_nob(&sys_misc_mem_sz, 0);
}
void noinherit_std_handle(DWORD type)
@@ -3310,7 +3311,7 @@ void erl_sys_init(void)
erts_smp_tsd_key_create(&win32_errstr_key);
InitializeCriticalSection(&htbc_lock);
#endif
- erts_smp_atomic_init(&pipe_creation_counter,0);
+ erts_smp_atomic_init_nob(&pipe_creation_counter,0);
/*
* Test if we have named pipes or not.
*/
diff --git a/erts/emulator/sys/win32/sys_interrupt.c b/erts/emulator/sys/win32/sys_interrupt.c
index 943c338794..1d73edd30b 100644
--- a/erts/emulator/sys/win32/sys_interrupt.c
+++ b/erts/emulator/sys/win32/sys_interrupt.c
@@ -33,9 +33,9 @@
#ifdef ERTS_SMP
erts_smp_atomic32_t erts_break_requested;
#define ERTS_SET_BREAK_REQUESTED \
- erts_smp_atomic32_set(&erts_break_requested, (erts_aint32_t) 1)
+ erts_smp_atomic32_set_nob(&erts_break_requested, (erts_aint32_t) 1)
#define ERTS_UNSET_BREAK_REQUESTED \
- erts_smp_atomic32_set(&erts_break_requested, (erts_aint32_t) 0)
+ erts_smp_atomic32_set_nob(&erts_break_requested, (erts_aint32_t) 0)
#else
volatile int erts_break_requested = 0;
#define ERTS_SET_BREAK_REQUESTED (erts_break_requested = 1)
diff --git a/erts/include/internal/ethr_atomics.h b/erts/include/internal/ethr_atomics.h
index 1caf4d0567..0f3c26f1df 100644
--- a/erts/include/internal/ethr_atomics.h
+++ b/erts/include/internal/ethr_atomics.h
@@ -1,7 +1,16 @@
/*
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ * This file was automatically generated by the
+ * $ERL_TOP/erts/lib_src/utils/make_atomics_api script.
+ * If you need to make changes, edit the script and
+ * regenerate this file.
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ */
+
+/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -18,709 +27,8818 @@
*/
/*
- * Description: The ethread atomic API
+ * Description: The ethread atomics API
* Author: Rickard Green
*/
-#ifndef ETHR_ATOMIC_H__
-#define ETHR_ATOMIC_H__
+/*
+ * This file maps native atomic implementations to ethread
+ * API atomics. If no native atomic implementation
+ * is available, a less efficient fallback is used instead.
+ * The API consists of 32-bit size, word size (pointer size),
+ * and double word size atomics.
+ *
+ * The following atomic operations are implemented for
+ * 32-bit size, and word size atomics:
+ * - cmpxchg
+ * - xchg
+ * - set
+ * - init
+ * - add_read
+ * - read
+ * - inc_read
+ * - dec_read
+ * - add
+ * - inc
+ * - dec
+ * - read_band
+ * - read_bor
+ *
+ * The following atomic operations are implemented for
+ * double word size atomics:
+ * - cmpxchg
+ * - set
+ * - read
+ * - init
+ *
+ * Appart from a function implementing the atomic operation
+ * with unspecified memory barrier semantics, there are
+ * functions implementing each operation with the following
+ * memory barrier semantics:
+ * - rb (read barrier)
+ * - wb (write barrier)
+ * - acqb (acquire barrier)
+ * - relb (release barrier)
+ * - mb (full memory barrier)
+ *
+ * We implement all of these operation/barrier
+ * combinations, regardless of whether they are useful
+ * or not (some of them are useless).
+ *
+ * Double word size atomic functions are on the followning
+ * form:
+ * ethr_dw_atomic_<OP>[_<BARRIER>]
+ *
+ * Word size atomic functions are on the followning
+ * form:
+ * ethr_atomic_<OP>[_<BARRIER>]
+ *
+ * 32-bit size atomic functions are on the followning
+ * form:
+ * ethr_atomic32_<OP>[_<BARRIER>]
+ *
+ * Apart from the operation/barrier functions
+ * described above also 'addr' functions are implemented
+ * which return the actual memory address used of the
+ * atomic variable. The 'addr' functions have no barrier
+ * versions.
+ *
+ * The native atomic implementation does not need to
+ * implement all operation/barrier combinations.
+ * Functions that have no native implementation will be
+ * constructed from existing native functionality. These
+ * functions will perform the wanted operation and will
+ * produce sufficient memory barriers, but may
+ * in some cases be less efficient than pure native
+ * versions.
+ *
+ * When we create ethread API operation/barrier functions by
+ * adding barriers before and after native operations it is
+ * assumed that:
+ * - A native read operation begins, and ends with a load.
+ * - A native set operation begins, and ends with a store.
+ * - An init operation begins with either a load, or a store,
+ * and ends with either a load, or a store.
+ * - All other operations begins with a load, and ends with
+ * either a load, or a store.
+ *
+ * This is the minimum functionality that a native
+ * implementation needs to provide:
+ *
+ * - Functions that need to be implemented:
+ *
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_addr
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>]
+ * (at least one cmpxchg of optional barrier)
+ *
+ * - Macros that needs to be defined:
+ *
+ * A macro informing about the presence of the native
+ * implementation:
+ *
+ * - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]
+ *
+ * A macro naming (a string constant) the implementation:
+ *
+ * - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL
+ *
+ * Each implemented native atomic function has to
+ * be accompanied by a defined macro on the following
+ * form informing about its presence:
+ *
+ * - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>]
+ *
+ * A (sparc-v9 style) membar macro:
+ *
+ * - ETHR_MEMBAR(B)
+ *
+ * Which takes a combination of the following macros
+ * or:ed (using |) together:
+ *
+ * - ETHR_LoadLoad
+ * - ETHR_LoadStore
+ * - ETHR_StoreLoad
+ * - ETHR_StoreStore
+ *
+ */
-#if !defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-# define ETHR_NEED_ATOMIC_PROTOTYPES__
-#endif
+#ifndef ETHR_ATOMICS_H__
+#define ETHR_ATOMICS_H__
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
+#undef ETHR_AMC_FALLBACK__
+#undef ETHR_AMC_NO_ATMCS__
+#undef ETHR_AMC_ATMC_T__
+#undef ETHR_AMC_ATMC_FUNC__
+
+/* -- 32-bit atomics -- */
+
+#undef ETHR_NAINT32_T__
+#undef ETHR_NATMC32_FUNC__
+#undef ETHR_NATMC32_ADDR_FUNC__
+#undef ETHR_NATMC32_BITS__
+#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# define ETHR_NEED_NATMC32_ADDR
+# define ETHR_NATMC32_ADDR_FUNC__ ethr_native_atomic32_addr
+typedef ethr_native_atomic32_t ethr_atomic32_t;
+# define ETHR_NAINT32_T__ ethr_sint32_t
+# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_NATMC32_BITS__ 32
+#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_NEED_NATMC64_ADDR
+#ifdef ETHR_BIGENDIAN
+# define ETHR_NATMC32_ADDR_FUNC__(VAR) \
+ (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1)
+#else
+# define ETHR_NATMC32_ADDR_FUNC__(VAR) \
+ ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR)))
+#endif
+typedef ethr_native_atomic64_t ethr_atomic32_t;
+# define ETHR_NAINT32_T__ ethr_sint64_t
+# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC32_BITS__ 64
+#else
/*
- * No native atomic implementation available. :(
+ * No native atomics usable for 32-bits atomics :(
* Use fallback...
*/
typedef ethr_sint32_t ethr_atomic32_t;
-typedef ethr_sint_t ethr_atomic_t;
-#else
-/*
- * Map ethread native atomics to ethread API atomics.
- *
- * We do at least have a native atomic implementation that
- * can handle integers of a size larger than or equal to
- * the size of pointers.
- */
+#endif
-/* -- Pointer size atomics -- */
+#undef ETHR_ATMC32_INLINE__
+#ifdef ETHR_NATMC32_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC32_INLINE__
+# endif
+# define ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS
+#endif
+
+#if !defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_ATMC32_PROTOTYPES__
+#endif
+
+#ifndef ETHR_INLINE_ATMC32_FUNC_NAME_
+# define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_ATMC32_FUNC__
+#define ETHR_ATMC32_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+
+
+/* -- Word size atomics -- */
+
+#undef ETHR_NEED_NATMC32_ADDR
+#undef ETHR_NEED_NATMC64_ADDR
#undef ETHR_NAINT_T__
#undef ETHR_NATMC_FUNC__
#undef ETHR_NATMC_ADDR_FUNC__
-#if ETHR_SIZEOF_PTR == 8
-# if defined(ETHR_HAVE_NATIVE_ATOMIC64)
-# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr
+#undef ETHR_NATMC_BITS__
+#if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr
typedef ethr_native_atomic64_t ethr_atomic_t;
-# define ETHR_NAINT_T__ ethr_sint64_t
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-# else
-# error "Missing native atomic implementation"
+# define ETHR_NAINT_T__ ethr_sint64_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_BITS__ 64
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC32_ADDR
# endif
-#elif ETHR_SIZEOF_PTR == 4
# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic32_addr
-# ifdef ETHR_HAVE_NATIVE_ATOMIC32
typedef ethr_native_atomic32_t ethr_atomic_t;
-# define ETHR_NAINT_T__ ethr_sint32_t
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-# elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_NAINT_T__ ethr_sint32_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_NATMC_BITS__ 32
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+#ifdef ETHR_BIGENDIAN
+# define ETHR_NATMC_ADDR_FUNC__(VAR) \
+ (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1)
+#else
+# define ETHR_NATMC_ADDR_FUNC__(VAR) \
+ ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR)))
+#endif
typedef ethr_native_atomic64_t ethr_atomic_t;
-# define ETHR_NATMC_T__ ethr_native_atomic64_t
-# define ETHR_NAINT_T__ ethr_sint64_t
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_T__ ethr_native_atomic64_t
+# define ETHR_NAINT_T__ ethr_sint64_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_BITS__ 64
+#else
+/*
+ * No native atomics usable for pointer size atomics :(
+ * Use fallback...
+ */
+
+# if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 2
+# define ETHR_AMC_SINT_T__ ethr_sint32_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic32_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+typedef struct {
+ ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__];
+} ethr_amc_t;
+typedef struct {
+ ethr_amc_t amc;
+ ethr_sint_t sint;
+} ethr_atomic_t;
+# else /* locked fallback */
+typedef ethr_sint_t ethr_atomic_t;
+# endif
+#endif
+
+#undef ETHR_ATMC_INLINE__
+#ifdef ETHR_NATMC_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC_INLINE__
+# endif
+# define ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS
+#endif
+
+#if !defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_ATMC_PROTOTYPES__
+#endif
+
+#ifndef ETHR_INLINE_ATMC_FUNC_NAME_
+# define ETHR_INLINE_ATMC_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_ATMC_FUNC__
+#define ETHR_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X)
+
+/* -- Double word atomics -- */
+
+#undef ETHR_SU_DW_NAINT_T__
+#undef ETHR_SU_DW_NATMC_FUNC__
+#undef ETHR_SU_DW_NATMC_ADDR_FUNC__
+#undef ETHR_DW_NATMC_FUNC__
+#undef ETHR_DW_NATMC_ADDR_FUNC__
+#undef ETHR_DW_NATMC_BITS__
+#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)
+# define ETHR_NEED_DW_NATMC_ADDR
+# define ETHR_DW_NATMC_ADDR_FUNC__ ethr_native_dw_atomic_addr
+# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_dw_atomic_t
+# define ETHR_DW_NATMC_FUNC__(X) ethr_native_dw_atomic_ ## X
+# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_su_dw_atomic_ ## X
+# if ETHR_SIZEOF_PTR == 8
+# define ETHR_DW_NATMC_BITS__ 128
+# elif ETHR_SIZEOF_PTR == 4
+# define ETHR_DW_NATMC_BITS__ 64
# else
-# error "Missing native atomic implementation"
+# error "Word size not supported"
+# endif
+# ifdef ETHR_NATIVE_SU_DW_SINT_T
+# define ETHR_SU_DW_NAINT_T__ ETHR_NATIVE_SU_DW_SINT_T
# endif
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+# define ETHR_DW_NATMC_ADDR_FUNC__(VAR) \
+ ((ethr_dw_sint_t *) ethr_native_atomic64_addr((VAR)))
+# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_atomic64_t
+# define ETHR_SU_DW_NAINT_T__ ethr_sint64_t
+# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_DW_NATMC_BITS__ 64
#endif
-/* -- 32-bit atomics -- */
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+#define ETHR_DW_ATOMIC_FUNC__(X) ethr_dw_atomic_ ## X ## _fallback__
+#else
+#define ETHR_DW_ATOMIC_FUNC__(X) ethr_dw_atomic_ ## X
+#endif
-#undef ETHR_NAINT32_T__
-#undef ETHR_NATMC32_FUNC__
-#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
-typedef ethr_native_atomic32_t ethr_atomic32_t;
-# define ETHR_NAINT32_T__ ethr_sint32_t
-# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X
-#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
-typedef ethr_native_atomic64_t ethr_atomic32_t;
-# define ETHR_NAINT32_T__ ethr_sint64_t
-# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X
+#if !defined(ETHR_DW_NATMC_BITS__) || defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+# define ETHR_NEED_DW_FALLBACK__
+#endif
+
+#if defined(ETHR_NEED_DW_FALLBACK__)
+/*
+ * No native atomics usable for double word atomics :(
+ * Use fallback...
+ */
+
+# ifndef ETHR_AMC_FALLBACK__
+# if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 1
+# define ETHR_AMC_SINT_T__ ethr_sint_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X)
+# elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 2
+# define ETHR_AMC_SINT_T__ ethr_sint32_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic32_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+# endif
+# ifdef ETHR_AMC_FALLBACK__
+typedef struct {
+ ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__];
+} ethr_amc_t;
+# endif
+# endif
+
+typedef struct {
+#ifdef ETHR_AMC_FALLBACK__
+ ethr_amc_t amc;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_fallback_t;
+
+#endif
+
+typedef union {
+#ifdef ETHR_NATIVE_DW_ATOMIC_T__
+ ETHR_NATIVE_DW_ATOMIC_T__ native;
+#endif
+#ifdef ETHR_NEED_DW_FALLBACK__
+ ethr_dw_atomic_fallback_t fallback;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_t;
+
+typedef union {
+#ifdef ETHR_SU_DW_NAINT_T__
+ ETHR_SU_DW_NAINT_T__ dw_sint;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_sint_t;
+
+#ifdef ETHR_BIGENDIAN
+# define ETHR_DW_SINT_LOW_WORD 1
+# define ETHR_DW_SINT_HIGH_WORD 0
#else
-# error "Missing native atomic implementation"
+# define ETHR_DW_SINT_LOW_WORD 0
+# define ETHR_DW_SINT_HIGH_WORD 1
#endif
+#undef ETHR_DW_ATMC_INLINE__
+#ifdef ETHR_DW_NATMC_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC32_INLINE__
+# endif
+# define ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS
#endif
-#ifdef ETHR_NEED_ATOMIC_PROTOTYPES__
-ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *);
-void ethr_atomic_init(ethr_atomic_t *, ethr_sint_t);
-void ethr_atomic_set(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_read(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *);
-void ethr_atomic_inc(ethr_atomic_t *);
-void ethr_atomic_dec(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *, ethr_sint_t);
-void ethr_atomic_add(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *, ethr_sint_t, ethr_sint_t);
-ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *);
-void ethr_atomic_set_relb(ethr_atomic_t *, ethr_sint_t);
-void ethr_atomic_dec_relb(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *, ethr_sint_t, ethr_sint_t);
-ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *, ethr_sint_t, ethr_sint_t);
-
-ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *);
-void ethr_atomic32_init(ethr_atomic32_t *, ethr_sint32_t);
-void ethr_atomic32_set(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *);
-void ethr_atomic32_inc(ethr_atomic32_t *);
-void ethr_atomic32_dec(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *, ethr_sint32_t);
-void ethr_atomic32_add(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *,
- ethr_sint32_t,
- ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *);
-void ethr_atomic32_set_relb(ethr_atomic32_t *, ethr_sint32_t);
-void ethr_atomic32_dec_relb(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *,
- ethr_sint32_t,
- ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *,
- ethr_sint32_t,
- ethr_sint32_t);
+#if !defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_DW_ATMC_PROTOTYPES__
#endif
-int ethr_init_atomics(void);
+#ifndef ETHR_INLINE_DW_ATMC_FUNC_NAME_
+# define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_DW_ATMC_FUNC__
+#define ETHR_DW_ATMC_FUNC__(X) ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_dw_atomic_ ## X)
+
+#if defined(ETHR_NEED_DW_ATMC_PROTOTYPES__)
+int ethr_have_native_dw_atomic(void);
+#endif
+#if defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+static ETHR_INLINE int
+ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_have_native_dw_atomic)(void)
+{
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ return ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__;
+#elif defined(ETHR_DW_NATMC_BITS__)
+ return 1;
+#else
+ return 0;
+#endif
+}
+#endif
+
+/* -- Misc -- */
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+/*
+ * Unusual values are used by read() fallbacks implemented via cmpxchg().
+ * We want to use an unusual value in hope that it is more efficient
+ * not to match the value in memory.
+ *
+ * - Negative integer values are probably more unusual.
+ * - Very large absolute integer values are probably more unusual.
+ * - Odd pointers are probably more unusual (only char pointers can be odd).
+ */
+# define ETHR_UNUSUAL_SINT32_VAL__ ((ethr_sint32_t) 0x81818181)
+# if ETHR_SIZEOF_PTR == 4
+# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) ETHR_UNUSUAL_SINT32_VAL__)
+# elif ETHR_SIZEOF_PTR == 8
+# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) 0x8181818181818181L)
+# else
+# error "Word size not supported"
+# endif
+# if defined(ETHR_NEED_DW_NATMC_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR)
+# error "No ethr_native_dw_atomic_addr() available"
+# endif
+# if defined(ETHR_NEED_NATMC32_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR)
+# error "No ethr_native_atomic32_addr() available"
+# endif
+# if defined(ETHR_NEED_NATMC64_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR)
+# error "No ethr_native_atomic64_addr() available"
+# endif
+#endif
+
+#if defined(__GNUC__)
+# ifndef ETHR_COMPILER_BARRIER
+# define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
+# endif
+#elif defined(ETHR_WIN32_THREADS)
+# ifndef ETHR_COMPILER_BARRIER
+# include <intrin.h>
+# pragma intrinsic(_ReadWriteBarrier)
+# define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
+# endif
+#endif
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
+void ethr_compiler_barrier_fallback(void);
+#ifndef ETHR_COMPILER_BARRIER
+# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback()
+#endif
+
+int ethr_init_atomics(void);
+
+/* info */
+char **ethr_native_atomic32_ops(void);
+char **ethr_native_atomic64_ops(void);
+char **ethr_native_dw_atomic_ops(void);
+char **ethr_native_su_dw_atomic_ops(void);
+
+#if !defined(ETHR_DW_NATMC_BITS__) && !defined(ETHR_NATMC_BITS__) && !defined(ETHR_NATMC32_BITS__)
/*
- * Fallbacks for atomics used in absence of a native implementation.
+ * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only,
+ * i.e. when no native atomic implementation exist and only our lock
+ * based atomic fallback is used, a noop is sufficient.
*/
+# undef ETHR_MEMORY_BARRIER
+# undef ETHR_WRITE_MEMORY_BARRIER
+# undef ETHR_READ_MEMORY_BARRIER
+# undef ETHR_READ_DEPEND_MEMORY_BARRIER
+# undef ETHR_MEMBAR
+# define ETHR_MEMBAR(B) do { } while (0)
+#endif
+
+#ifndef ETHR_MEMBAR
+# error "No ETHR_MEMBAR defined"
+#endif
-#define ETHR_ATOMIC_ADDR_BITS 10
-#define ETHR_ATOMIC_ADDR_SHIFT 6
+#define ETHR_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore)
+#define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMBAR(ETHR_StoreStore)
+#define ETHR_READ_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad)
+#ifdef ETHR_READ_DEPEND_MEMORY_BARRIER
+# undef ETHR_ORDERED_READ_DEPEND
+#else
+# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER
+# define ETHR_ORDERED_READ_DEPEND
+#endif
-typedef struct {
- union {
- ethr_spinlock_t lck;
- char buf[ETHR_CACHE_LINE_SIZE];
- } u;
-} ethr_atomic_protection_t;
-extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATOMIC_ADDR_BITS];
+/* ---------- Double word size atomic implementation ---------- */
+
+
+#ifdef ETHR_NEED_DW_ATMC_PROTOTYPES__
+ethr_sint_t *ethr_dw_atomic_addr(ethr_dw_atomic_t *var);
+int ethr_dw_atomic_cmpxchg(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+void ethr_dw_atomic_set(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ethr_sint_t *ETHR_DW_ATOMIC_FUNC__(addr)(ethr_dw_atomic_t *var);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+void ETHR_DW_ATOMIC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+#endif
+#endif /* ETHR_NEED_DW_ATMC_PROTOTYPES__ */
+
+#if (defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) \
+ && (defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)))
+
+#if !defined(ETHR_DW_NATMC_BITS__)
+# error "Missing native atomic implementation"
+#elif defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG
+# define ETHR_HAVE_DW_NATMC_CMPXCHG 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_WB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_WB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_ACQB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RELB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RELB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET
+# undef ETHR_HAVE_DW_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET
+# define ETHR_HAVE_DW_NATMC_SET 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET
+# define ETHR_HAVE_SU_DW_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RB
+# undef ETHR_HAVE_DW_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RB
+# define ETHR_HAVE_DW_NATMC_SET_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_WB
+# undef ETHR_HAVE_DW_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_WB
+# define ETHR_HAVE_DW_NATMC_SET_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_WB
+# define ETHR_HAVE_SU_DW_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_ACQB
+# undef ETHR_HAVE_DW_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_ACQB
+# define ETHR_HAVE_DW_NATMC_SET_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RELB
+# undef ETHR_HAVE_DW_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RELB
+# define ETHR_HAVE_DW_NATMC_SET_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RELB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_MB
+# undef ETHR_HAVE_DW_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_MB
+# define ETHR_HAVE_DW_NATMC_SET_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_MB
+# define ETHR_HAVE_SU_DW_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ
+# undef ETHR_HAVE_DW_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ
+# define ETHR_HAVE_DW_NATMC_READ 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ
+# define ETHR_HAVE_SU_DW_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RB
+# undef ETHR_HAVE_DW_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RB
+# define ETHR_HAVE_DW_NATMC_READ_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_WB
+# undef ETHR_HAVE_DW_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_WB
+# define ETHR_HAVE_DW_NATMC_READ_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_WB
+# define ETHR_HAVE_SU_DW_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_ACQB
+# undef ETHR_HAVE_DW_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_ACQB
+# define ETHR_HAVE_DW_NATMC_READ_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RELB
+# undef ETHR_HAVE_DW_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RELB
+# define ETHR_HAVE_DW_NATMC_READ_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RELB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_MB
+# undef ETHR_HAVE_DW_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_MB
+# define ETHR_HAVE_DW_NATMC_READ_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_MB
+# define ETHR_HAVE_SU_DW_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT
+# undef ETHR_HAVE_DW_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT
+# define ETHR_HAVE_DW_NATMC_INIT 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT
+# define ETHR_HAVE_SU_DW_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RB
+# undef ETHR_HAVE_DW_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RB
+# define ETHR_HAVE_DW_NATMC_INIT_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_WB
+# undef ETHR_HAVE_DW_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_WB
+# define ETHR_HAVE_DW_NATMC_INIT_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_WB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_ACQB
+# undef ETHR_HAVE_DW_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_ACQB
+# define ETHR_HAVE_DW_NATMC_INIT_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RELB
+# undef ETHR_HAVE_DW_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RELB
+# define ETHR_HAVE_DW_NATMC_INIT_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RELB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_MB
+# undef ETHR_HAVE_DW_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_MB
+# define ETHR_HAVE_DW_NATMC_INIT_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_MB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_MB 1
+# endif
+#elif ETHR_DW_NATMC_BITS__ == 64
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_WB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_MB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET
+# undef ETHR_HAVE_SU_DW_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
+# define ETHR_HAVE_SU_DW_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_RB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_WB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
+# define ETHR_HAVE_SU_DW_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_MB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
+# define ETHR_HAVE_SU_DW_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ
+# undef ETHR_HAVE_SU_DW_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
+# define ETHR_HAVE_SU_DW_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_RB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_WB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
+# define ETHR_HAVE_SU_DW_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_MB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
+# define ETHR_HAVE_SU_DW_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT
+# undef ETHR_HAVE_SU_DW_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
+# define ETHR_HAVE_SU_DW_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_RB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_WB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_MB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_MB 1
+# endif
+#else
+# error "Invalid native atomic size"
+#endif
+
-#define ETHR_ATOMIC_PTR2LCK__(PTR) \
-(&ethr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATOMIC_ADDR_SHIFT) \
- & ((1 << ETHR_ATOMIC_ADDR_BITS) - 1))].u.lck)
+#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC)
+#if (!defined(ETHR_HAVE_DW_NATMC_CMPXCHG) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
+/*
+ * Read op used together with cmpxchg() fallback when no native op present.
+ */
+#if defined(ETHR_HAVE_DW_NATMC_READ)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_rb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_wb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_acqb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_relb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_mb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+do { \
+ VAL.sint[0] = (ethr_sint_t) 0; \
+ VAL.sint[1] = (ethr_sint_t) 0; \
+} while (0)
+#endif
-#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS) \
-do { \
- ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP)); \
- ethr_spin_lock(slp__); \
- { EXPS; } \
- ethr_spin_unlock(slp__); \
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ int res__; \
+ ethr_dw_sint_t AVAL, exp_act__; \
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, exp_act__); \
+ do { \
+ AVAL.sint[0] = exp_act__.sint[0]; \
+ AVAL.sint[1] = exp_act__.sint[1]; \
+ { OPS; } \
+ res__ = CMPXCHG(VAR, AVAL.sint, exp_act__.sint); \
+ } while (__builtin_expect(res__ == 0, 0)); \
} while (0)
+
+#elif defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)
+
+#if (!defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
+/*
+ * Read op used together with cmpxchg() fallback when no native op present.
+ */
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ((ETHR_SU_DW_NAINT_T__) 0)
+#endif
+
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ ETHR_SU_DW_NAINT_T__ AVAL; \
+ ETHR_SU_DW_NAINT_T__ new__, act__, exp__; \
+ act__ = ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR); \
+ do { \
+ exp__ = act__; \
+ AVAL = (ETHR_SU_DW_NAINT_T__) act__; \
+ { OPS; } \
+ new__ = (ETHR_SU_DW_NAINT_T__) AVAL; \
+ act__ = CMPXCHG(VAR, new__, exp__); \
+ } while (__builtin_expect(act__ != exp__, 0)); \
+} while (0)
+
+
+#else
+# error "?!?"
+#endif
+
+
+
+/* --- addr() --- */
+
+static ETHR_INLINE ethr_sint_t *ETHR_DW_ATMC_FUNC__(addr)(ethr_dw_atomic_t *var)
+{
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+ return (ethr_sint_t *) ETHR_DW_NATMC_ADDR_FUNC__((&var->native));
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { return ETHR_DW_ATOMIC_FUNC__(addr)(var); }
+#endif
+
+}
+
+
+/* --- cmpxchg() --- */
+
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_rb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_wb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_acqb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_relb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_mb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+
+/* --- set() --- */
+
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#else
+#error "Missing implementation of ethr_dw_atomic_set()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_rb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_rb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_wb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_wb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_acqb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_acqb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_relb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_relb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_mb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_mb)(var, val); }
+#endif
+
+}
+
+
+/* --- read() --- */
+
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_rb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_rb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_wb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_wb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_acqb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_acqb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_relb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_relb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_mb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_mb)(var, val); }
+#endif
+
+}
+
+
+/* --- init() --- */
+
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#else
+ ETHR_DW_ATMC_FUNC__(set)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_DW_ATMC_FUNC__(set_rb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_rb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+#else
+ ETHR_DW_ATMC_FUNC__(set_wb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_wb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_DW_ATMC_FUNC__(set_acqb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_acqb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#else
+ ETHR_DW_ATMC_FUNC__(set_relb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_relb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_DW_ATMC_FUNC__(set_mb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_mb)(var, val); }
+#endif
+
+}
+
+#endif /* ETHR_DW_ATMC_INLINE__ */
+
+
+/* ---------- Word size atomic implementation ---------- */
+
+
+#ifdef ETHR_NEED_ATMC_PROTOTYPES__
+ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_rb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_wb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_mb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_mb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_rb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_wb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_acqb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_mb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_rb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_wb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_acqb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_relb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_mb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_mb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_rb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_wb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_relb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_mb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_rb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_wb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_relb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_mb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_rb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_wb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_acqb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_mb(ethr_atomic_t *var);
+void ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_rb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_wb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_acqb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_relb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_mb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_inc(ethr_atomic_t *var);
+void ethr_atomic_inc_rb(ethr_atomic_t *var);
+void ethr_atomic_inc_wb(ethr_atomic_t *var);
+void ethr_atomic_inc_acqb(ethr_atomic_t *var);
+void ethr_atomic_inc_relb(ethr_atomic_t *var);
+void ethr_atomic_inc_mb(ethr_atomic_t *var);
+void ethr_atomic_dec(ethr_atomic_t *var);
+void ethr_atomic_dec_rb(ethr_atomic_t *var);
+void ethr_atomic_dec_wb(ethr_atomic_t *var);
+void ethr_atomic_dec_acqb(ethr_atomic_t *var);
+void ethr_atomic_dec_relb(ethr_atomic_t *var);
+void ethr_atomic_dec_mb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_mb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_mb(ethr_atomic_t *var, ethr_sint_t val);
+#endif /* ETHR_NEED_ATMC_PROTOTYPES__ */
+
+#if (defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \
+ && (defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)))
+
+#if !defined(ETHR_NATMC_BITS__)
+# error "Missing native atomic implementation"
+#elif ETHR_NATMC_BITS__ == 64
+# undef ETHR_HAVE_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
+# define ETHR_HAVE_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
+# define ETHR_HAVE_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
+# define ETHR_HAVE_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
+# define ETHR_HAVE_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG
+# define ETHR_HAVE_NATMC_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB
+# define ETHR_HAVE_NATMC_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB
+# define ETHR_HAVE_NATMC_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB
+# define ETHR_HAVE_NATMC_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB
+# define ETHR_HAVE_NATMC_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB
+# define ETHR_HAVE_NATMC_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
+# define ETHR_HAVE_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
+# define ETHR_HAVE_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
+# define ETHR_HAVE_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
+# define ETHR_HAVE_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
+# define ETHR_HAVE_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
+# define ETHR_HAVE_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
+# define ETHR_HAVE_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
+# define ETHR_HAVE_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
+# define ETHR_HAVE_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
+# define ETHR_HAVE_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
+# define ETHR_HAVE_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
+# define ETHR_HAVE_NATMC_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN
+# define ETHR_HAVE_NATMC_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
+# define ETHR_HAVE_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
+# define ETHR_HAVE_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
+# define ETHR_HAVE_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
+# define ETHR_HAVE_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
+# define ETHR_HAVE_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
+# define ETHR_HAVE_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN
+# define ETHR_HAVE_NATMC_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB
+# define ETHR_HAVE_NATMC_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB
+# define ETHR_HAVE_NATMC_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB
+# define ETHR_HAVE_NATMC_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN
+# define ETHR_HAVE_NATMC_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD
+# define ETHR_HAVE_NATMC_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB
+# define ETHR_HAVE_NATMC_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB
+# define ETHR_HAVE_NATMC_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB
+# define ETHR_HAVE_NATMC_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB
+# define ETHR_HAVE_NATMC_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB
+# define ETHR_HAVE_NATMC_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC
+# define ETHR_HAVE_NATMC_INC 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB
+# define ETHR_HAVE_NATMC_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB
+# define ETHR_HAVE_NATMC_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB
+# define ETHR_HAVE_NATMC_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB
+# define ETHR_HAVE_NATMC_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB
+# define ETHR_HAVE_NATMC_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC
+# define ETHR_HAVE_NATMC_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB
+# define ETHR_HAVE_NATMC_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB
+# define ETHR_HAVE_NATMC_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB
+# define ETHR_HAVE_NATMC_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB
+# define ETHR_HAVE_NATMC_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB
+# define ETHR_HAVE_NATMC_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD
+# define ETHR_HAVE_NATMC_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD
+# define ETHR_HAVE_NATMC_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC_OR_RETOLD_MB 1
+# endif
+#elif ETHR_NATMC_BITS__ == 32
+# undef ETHR_HAVE_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG
+# define ETHR_HAVE_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB
+# define ETHR_HAVE_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB
+# define ETHR_HAVE_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB
+# define ETHR_HAVE_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG
+# define ETHR_HAVE_NATMC_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB
+# define ETHR_HAVE_NATMC_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB
+# define ETHR_HAVE_NATMC_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB
+# define ETHR_HAVE_NATMC_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB
+# define ETHR_HAVE_NATMC_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB
+# define ETHR_HAVE_NATMC_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET
+# define ETHR_HAVE_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB
+# define ETHR_HAVE_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB
+# define ETHR_HAVE_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB
+# define ETHR_HAVE_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB
+# define ETHR_HAVE_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB
+# define ETHR_HAVE_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT
+# define ETHR_HAVE_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB
+# define ETHR_HAVE_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB
+# define ETHR_HAVE_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB
+# define ETHR_HAVE_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB
+# define ETHR_HAVE_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB
+# define ETHR_HAVE_NATMC_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN
+# define ETHR_HAVE_NATMC_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ
+# define ETHR_HAVE_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB
+# define ETHR_HAVE_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB
+# define ETHR_HAVE_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB
+# define ETHR_HAVE_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB
+# define ETHR_HAVE_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB
+# define ETHR_HAVE_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN
+# define ETHR_HAVE_NATMC_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB
+# define ETHR_HAVE_NATMC_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB
+# define ETHR_HAVE_NATMC_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB
+# define ETHR_HAVE_NATMC_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN
+# define ETHR_HAVE_NATMC_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD
+# define ETHR_HAVE_NATMC_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB
+# define ETHR_HAVE_NATMC_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB
+# define ETHR_HAVE_NATMC_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB
+# define ETHR_HAVE_NATMC_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB
+# define ETHR_HAVE_NATMC_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB
+# define ETHR_HAVE_NATMC_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC
+# define ETHR_HAVE_NATMC_INC 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB
+# define ETHR_HAVE_NATMC_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB
+# define ETHR_HAVE_NATMC_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB
+# define ETHR_HAVE_NATMC_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB
+# define ETHR_HAVE_NATMC_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB
+# define ETHR_HAVE_NATMC_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC
+# define ETHR_HAVE_NATMC_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB
+# define ETHR_HAVE_NATMC_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB
+# define ETHR_HAVE_NATMC_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB
+# define ETHR_HAVE_NATMC_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB
+# define ETHR_HAVE_NATMC_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB
+# define ETHR_HAVE_NATMC_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD
+# define ETHR_HAVE_NATMC_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD
+# define ETHR_HAVE_NATMC_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC_OR_RETOLD_MB 1
+# endif
+#else
+# error "Invalid native atomic size"
#endif
+#if (!defined(ETHR_HAVE_NATMC_CMPXCHG) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
/*
- * --- Pointer size atomics ---------------------------------------------------
+ * Read op used together with cmpxchg() fallback when no native op present.
*/
+#if defined(ETHR_HAVE_NATMC_READ)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ((ETHR_NAINT_T__) 0)
+#endif
+
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ ethr_sint_t AVAL; \
+ ETHR_NAINT_T__ new__, act__, exp__; \
+ act__ = ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR); \
+ do { \
+ exp__ = act__; \
+ AVAL = (ethr_sint_t) act__; \
+ { OPS; } \
+ new__ = (ETHR_NAINT_T__) AVAL; \
+ act__ = CMPXCHG(VAR, new__, exp__); \
+ } while (__builtin_expect(act__ != exp__, 0)); \
+} while (0)
+
+
+
+/* --- addr() --- */
-static ETHR_INLINE ethr_sint_t *
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_addr)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t *ETHR_ATMC_FUNC__(addr)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
return (ethr_sint_t *) ETHR_NATMC_ADDR_FUNC__(var);
+
+}
+
+
+/* --- cmpxchg() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
#else
- return (ethr_sint_t *) var;
+#error "Missing implementation of ethr_atomic_cmpxchg()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_init)(ethr_atomic_t *var, ethr_sint_t i)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_rb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) i);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic_cmpxchg_rb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_set)(ethr_atomic_t *var, ethr_sint_t i)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_wb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) i);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic_cmpxchg_wb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_acqb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic_cmpxchg_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_relb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#else
+#error "Missing implementation of ethr_atomic_cmpxchg_relb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_mb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_cmpxchg_mb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_add)(ethr_atomic_t *var, ethr_sint_t incr)
+
+/* --- xchg() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) incr);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += incr);
+#error "Missing implementation of ethr_atomic_xchg()!"
#endif
-}
-
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_add_read)(ethr_atomic_t *var, ethr_sint_t i)
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_rb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) i);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
+#error "Missing implementation of ethr_atomic_xchg_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += i; res = *var);
+#if defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#else
+#error "Missing implementation of ethr_atomic_xchg_wb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_xchg_acqb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_relb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(inc)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#error "Missing implementation of ethr_atomic_xchg_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_mb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(dec)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_xchg_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- set() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET)
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_mb)(var, val);
+#endif
+}
+
+
+/* --- init() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#else
+ ETHR_ATMC_FUNC__(set)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATMC_FUNC__(set_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+#else
+ ETHR_ATMC_FUNC__(set_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC_FUNC__(set_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#else
+ ETHR_ATMC_FUNC__(set_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC_FUNC__(set_mb)(var, val);
+#endif
+}
+
+
+/* --- add_read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic_add_read()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic_add_read_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic_add_read_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_add_read_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#error "Missing implementation of ethr_atomic_add_read_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_mb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic_add_read_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#if defined(ETHR_HAVE_NATMC_READ)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_rb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_rb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_wb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_wb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_acqb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#if defined(ETHR_HAVE_NATMC_READ_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_acqb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_relb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_relb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_band)(ethr_atomic_t *var,
- ethr_sint_t mask)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_mb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var,
- (ETHR_NAINT_T__) mask);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_mb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
+ return res;
+}
+
+
+/* --- inc_read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= mask);
+#if defined(ETHR_HAVE_NATMC_INC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read)(var, (ethr_sint_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_rb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_rb)(var, (ethr_sint_t) 1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_bor)(ethr_atomic_t *var,
- ethr_sint_t mask)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_wb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var,
- (ETHR_NAINT_T__) mask);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
#else
+ res = ETHR_ATMC_FUNC__(add_read_wb)(var, (ethr_sint_t) 1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_acqb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_acqb)(var, (ethr_sint_t) 1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_relb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= mask);
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_relb)(var, (ethr_sint_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_mb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_mb)(var, (ethr_sint_t) 1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_xchg)(ethr_atomic_t *var, ethr_sint_t new)
+
+/* --- dec_read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var,
- (ETHR_NAINT_T__) new);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
#else
+ res = ETHR_ATMC_FUNC__(add_read)(var, (ethr_sint_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_rb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = new);
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_rb)(var, (ethr_sint_t) -1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_wb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_wb)(var, (ethr_sint_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(ethr_atomic_t *var,
- ethr_sint_t new,
- ethr_sint_t exp)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_acqb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var,
- (ETHR_NAINT_T__) new,
- (ETHR_NAINT_T__) exp);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+ res = ETHR_ATMC_FUNC__(add_read_acqb)(var, (ethr_sint_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_relb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var,
- {
- res = *var;
- if (__builtin_expect(res == exp, 1))
- *var = new;
- });
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_relb)(var, (ethr_sint_t) -1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_mb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_mb)(var, (ethr_sint_t) -1);
#endif
+ return res;
}
-/*
- * Important memory barrier requirements.
- *
- * The following atomic operations *must* supply a memory barrier of
- * at least the type specified by its suffix:
- * _acqb = acquire barrier
- * _relb = release barrier
- */
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_acqb)(ethr_atomic_t *var)
+/* --- add() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#if defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_read)(var);
+ (void) ETHR_ATMC_FUNC__(add_read)(var, val);
#endif
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read_acqb)(ethr_atomic_t *var)
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_rb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#if defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read)(var);
+ (void) ETHR_ATMC_FUNC__(add_read_rb)(var, val);
#endif
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_set_relb)(ethr_atomic_t *var,
- ethr_sint_t val)
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_wb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#if defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic_set)(var, val);
+ (void) ETHR_ATMC_FUNC__(add_read_wb)(var, val);
#endif
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_relb)(ethr_atomic_t *var)
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_acqb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
+#if defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(add_read_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(add_read_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(add_read_mb)(var, val);
+#endif
+}
+
+
+/* --- inc() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC)
+ ETHR_NATMC_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_rb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_NATMC_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_wb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_wb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_acqb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_NATMC_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_acqb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_relb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_relb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_mb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_mb)(var);
+#endif
+}
+
+
+/* --- dec() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_NATMC_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_rb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_NATMC_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_wb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_wb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_acqb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_NATMC_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_acqb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_relb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_relb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_mb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
ETHR_NATMC_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_mb)(var);
+#endif
+}
+
+
+/* --- read_band() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic_read_band()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic_read_band_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec)(var);
+#error "Missing implementation of ethr_atomic_read_band_wb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read_relb)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_acqb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read)(var);
+#error "Missing implementation of ethr_atomic_read_band_acqb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg_acqb)(ethr_atomic_t *var,
- ethr_sint_t new,
- ethr_sint_t exp)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_relb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var,
- (ETHR_NAINT_T__) new,
- (ETHR_NAINT_T__) exp);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(var, new, exp);
+#error "Missing implementation of ethr_atomic_read_band_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg_relb)(ethr_atomic_t *var,
- ethr_sint_t new,
- ethr_sint_t exp)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_mb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var,
- (ETHR_NAINT_T__) new,
- (ETHR_NAINT_T__) exp);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(var, new, exp);
+#error "Missing implementation of ethr_atomic_read_band_mb()!"
#endif
+ return res;
}
+
+/* --- read_bor() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic_read_bor()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_relb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_mb()!"
+#endif
+ return res;
+}
+
+#endif /* ETHR_ATMC_INLINE__ */
+
+
+/* ---------- 32-bit atomic implementation ---------- */
+
+
+#ifdef ETHR_NEED_ATMC32_PROTOTYPES__
+ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_rb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_wb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_mb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_rb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_wb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_relb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_mb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_rb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_wb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_relb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_mb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_rb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_wb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_acqb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_mb(ethr_atomic32_t *var);
+void ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_inc(ethr_atomic32_t *var);
+void ethr_atomic32_inc_rb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_wb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_acqb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_relb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_mb(ethr_atomic32_t *var);
+void ethr_atomic32_dec(ethr_atomic32_t *var);
+void ethr_atomic32_dec_rb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_wb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_acqb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_relb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_mb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+#endif /* ETHR_NEED_ATMC32_PROTOTYPES__ */
+
+#if (defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) \
+ && (defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__)))
+
+#if !defined(ETHR_NATMC32_BITS__)
+# error "Missing native atomic implementation"
+#elif ETHR_NATMC32_BITS__ == 64
+# undef ETHR_HAVE_NATMC32_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
+# define ETHR_HAVE_NATMC32_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
+# define ETHR_HAVE_NATMC32_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC32_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
+# define ETHR_HAVE_NATMC32_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG
+# define ETHR_HAVE_NATMC32_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB
+# define ETHR_HAVE_NATMC32_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB
+# define ETHR_HAVE_NATMC32_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB
+# define ETHR_HAVE_NATMC32_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB
+# define ETHR_HAVE_NATMC32_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB
+# define ETHR_HAVE_NATMC32_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
+# define ETHR_HAVE_NATMC32_SET 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
+# define ETHR_HAVE_NATMC32_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
+# define ETHR_HAVE_NATMC32_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
+# define ETHR_HAVE_NATMC32_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
+# define ETHR_HAVE_NATMC32_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
+# define ETHR_HAVE_NATMC32_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
+# define ETHR_HAVE_NATMC32_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
+# define ETHR_HAVE_NATMC32_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
+# define ETHR_HAVE_NATMC32_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
+# define ETHR_HAVE_NATMC32_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
+# define ETHR_HAVE_NATMC32_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
+# define ETHR_HAVE_NATMC32_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN
+# define ETHR_HAVE_NATMC32_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
+# define ETHR_HAVE_NATMC32_READ 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
+# define ETHR_HAVE_NATMC32_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
+# define ETHR_HAVE_NATMC32_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
+# define ETHR_HAVE_NATMC32_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
+# define ETHR_HAVE_NATMC32_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
+# define ETHR_HAVE_NATMC32_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN
+# define ETHR_HAVE_NATMC32_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB
+# define ETHR_HAVE_NATMC32_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB
+# define ETHR_HAVE_NATMC32_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN
+# define ETHR_HAVE_NATMC32_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD
+# define ETHR_HAVE_NATMC32_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB
+# define ETHR_HAVE_NATMC32_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB
+# define ETHR_HAVE_NATMC32_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB
+# define ETHR_HAVE_NATMC32_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB
+# define ETHR_HAVE_NATMC32_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB
+# define ETHR_HAVE_NATMC32_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC
+# define ETHR_HAVE_NATMC32_INC 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB
+# define ETHR_HAVE_NATMC32_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB
+# define ETHR_HAVE_NATMC32_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB
+# define ETHR_HAVE_NATMC32_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB
+# define ETHR_HAVE_NATMC32_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB
+# define ETHR_HAVE_NATMC32_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC
+# define ETHR_HAVE_NATMC32_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB
+# define ETHR_HAVE_NATMC32_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB
+# define ETHR_HAVE_NATMC32_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB
+# define ETHR_HAVE_NATMC32_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB
+# define ETHR_HAVE_NATMC32_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB
+# define ETHR_HAVE_NATMC32_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD
+# define ETHR_HAVE_NATMC32_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD
+# define ETHR_HAVE_NATMC32_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_MB 1
+# endif
+#elif ETHR_NATMC32_BITS__ == 32
+# undef ETHR_HAVE_NATMC32_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG
+# define ETHR_HAVE_NATMC32_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB
+# define ETHR_HAVE_NATMC32_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC32_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB
+# define ETHR_HAVE_NATMC32_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG
+# define ETHR_HAVE_NATMC32_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB
+# define ETHR_HAVE_NATMC32_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB
+# define ETHR_HAVE_NATMC32_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB
+# define ETHR_HAVE_NATMC32_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB
+# define ETHR_HAVE_NATMC32_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB
+# define ETHR_HAVE_NATMC32_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET
+# define ETHR_HAVE_NATMC32_SET 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB
+# define ETHR_HAVE_NATMC32_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB
+# define ETHR_HAVE_NATMC32_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB
+# define ETHR_HAVE_NATMC32_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB
+# define ETHR_HAVE_NATMC32_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB
+# define ETHR_HAVE_NATMC32_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT
+# define ETHR_HAVE_NATMC32_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB
+# define ETHR_HAVE_NATMC32_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB
+# define ETHR_HAVE_NATMC32_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB
+# define ETHR_HAVE_NATMC32_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB
+# define ETHR_HAVE_NATMC32_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB
+# define ETHR_HAVE_NATMC32_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN
+# define ETHR_HAVE_NATMC32_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ
+# define ETHR_HAVE_NATMC32_READ 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB
+# define ETHR_HAVE_NATMC32_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB
+# define ETHR_HAVE_NATMC32_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB
+# define ETHR_HAVE_NATMC32_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB
+# define ETHR_HAVE_NATMC32_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB
+# define ETHR_HAVE_NATMC32_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN
+# define ETHR_HAVE_NATMC32_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB
+# define ETHR_HAVE_NATMC32_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB
+# define ETHR_HAVE_NATMC32_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN
+# define ETHR_HAVE_NATMC32_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD
+# define ETHR_HAVE_NATMC32_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB
+# define ETHR_HAVE_NATMC32_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB
+# define ETHR_HAVE_NATMC32_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB
+# define ETHR_HAVE_NATMC32_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB
+# define ETHR_HAVE_NATMC32_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB
+# define ETHR_HAVE_NATMC32_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC
+# define ETHR_HAVE_NATMC32_INC 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB
+# define ETHR_HAVE_NATMC32_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB
+# define ETHR_HAVE_NATMC32_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB
+# define ETHR_HAVE_NATMC32_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB
+# define ETHR_HAVE_NATMC32_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB
+# define ETHR_HAVE_NATMC32_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC
+# define ETHR_HAVE_NATMC32_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB
+# define ETHR_HAVE_NATMC32_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB
+# define ETHR_HAVE_NATMC32_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB
+# define ETHR_HAVE_NATMC32_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB
+# define ETHR_HAVE_NATMC32_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB
+# define ETHR_HAVE_NATMC32_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD
+# define ETHR_HAVE_NATMC32_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD
+# define ETHR_HAVE_NATMC32_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_MB 1
+# endif
+#else
+# error "Invalid native atomic size"
+#endif
+
+#if (!defined(ETHR_HAVE_NATMC32_CMPXCHG) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
/*
- * --- 32-bit atomics ---------------------------------------------------------
+ * Read op used together with cmpxchg() fallback when no native op present.
*/
+#if defined(ETHR_HAVE_NATMC32_READ)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ((ETHR_NAINT32_T__) 0)
+#endif
+
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_NATMC32_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ ethr_sint32_t AVAL; \
+ ETHR_NAINT32_T__ new__, act__, exp__; \
+ act__ = ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR); \
+ do { \
+ exp__ = act__; \
+ AVAL = (ethr_sint32_t) act__; \
+ { OPS; } \
+ new__ = (ETHR_NAINT32_T__) AVAL; \
+ act__ = CMPXCHG(VAR, new__, exp__); \
+ } while (__builtin_expect(act__ != exp__, 0)); \
+} while (0)
-static ETHR_INLINE ethr_sint32_t *
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_addr)(ethr_atomic32_t *var)
+
+
+/* --- addr() --- */
+
+static ETHR_INLINE ethr_sint32_t *ETHR_ATMC32_FUNC__(addr)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return ethr_native_atomic32_addr(var);
+ return (ethr_sint32_t *) ETHR_NATMC32_ADDR_FUNC__(var);
+
+}
+
+
+/* --- cmpxchg() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
#else
- return (ethr_sint32_t *) var;
+#error "Missing implementation of ethr_atomic32_cmpxchg()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_init)(ethr_atomic32_t *var,
- ethr_sint32_t i)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_rb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) i);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic32_cmpxchg_rb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set)(ethr_atomic32_t *var, ethr_sint32_t i)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_wb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) i);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic32_cmpxchg_wb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_acqb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic32_cmpxchg_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_relb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#else
+#error "Missing implementation of ethr_atomic32_cmpxchg_relb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_mb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_cmpxchg_mb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_add)(ethr_atomic32_t *var,
- ethr_sint32_t incr)
+
+/* --- xchg() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) incr);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += incr);
+#error "Missing implementation of ethr_atomic32_xchg()!"
#endif
-}
-
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_add_read)(ethr_atomic32_t *var,
- ethr_sint32_t i)
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) i);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
+#error "Missing implementation of ethr_atomic32_xchg_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += i; res = *var);
+#if defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#else
+#error "Missing implementation of ethr_atomic32_xchg_wb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_xchg_acqb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(inc)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#error "Missing implementation of ethr_atomic32_xchg_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(dec)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_xchg_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- set() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_mb)(var, val);
+#endif
+}
+
+
+/* --- init() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_ATMC32_FUNC__(set)(var, val);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read)(ethr_atomic32_t *var)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#if defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
+ ETHR_ATMC32_FUNC__(set_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+#else
+ ETHR_ATMC32_FUNC__(set_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC32_FUNC__(set_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#else
+ ETHR_ATMC32_FUNC__(set_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC32_FUNC__(set_mb)(var, val);
+#endif
+}
+
+
+/* --- add_read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic32_add_read()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic32_add_read_rb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
#else
+#error "Missing implementation of ethr_atomic32_add_read_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_add_read_acqb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic32_add_read_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_band)(ethr_atomic32_t *var,
- ethr_sint32_t mask)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) mask);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic32_add_read_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= mask);
+#if defined(ETHR_HAVE_NATMC32_READ)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_rb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_rb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_bor)(ethr_atomic32_t *var,
- ethr_sint32_t mask)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_wb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return
- (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var,
- (ETHR_NAINT32_T__) mask);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_wb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_acqb)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= mask);
+#if defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_acqb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_relb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_relb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_xchg)(ethr_atomic32_t *var,
- ethr_sint32_t new)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_mb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var,
- (ETHR_NAINT32_T__) new);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_mb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
+ return res;
+}
+
+
+/* --- inc_read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = new);
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read)(var, (ethr_sint32_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_rb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_rb)(var, (ethr_sint32_t) 1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_wb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var,
- (ETHR_NAINT32_T__) new,
- (ETHR_NAINT32_T__) exp);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
#else
+ res = ETHR_ATMC32_FUNC__(add_read_wb)(var, (ethr_sint32_t) 1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_acqb)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var,
- {
- res = *var;
- if (__builtin_expect(res == exp, 1))
- *var = new;
- });
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_acqb)(var, (ethr_sint32_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_relb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_relb)(var, (ethr_sint32_t) 1);
#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_mb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_mb)(var, (ethr_sint32_t) 1);
+#endif
+ return res;
}
-/*
- * Important memory barrier requirements.
- *
- * The following atomic operations *must* supply a memory barrier of
- * at least the type specified by its suffix:
- * _acqb = acquire barrier
- * _relb = release barrier
- */
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_acqb)(ethr_atomic32_t *var)
+/* --- dec_read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read)(var);
+ res = ETHR_ATMC32_FUNC__(add_read)(var, (ethr_sint32_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read_acqb)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_rb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read)(var);
+ res = ETHR_ATMC32_FUNC__(add_read_rb)(var, (ethr_sint32_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set_relb)(ethr_atomic32_t *var,
- ethr_sint32_t val)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_wb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_wb)(var, (ethr_sint32_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_acqb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set)(var, val);
+ res = ETHR_ATMC32_FUNC__(add_read_acqb)(var, (ethr_sint32_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_relb)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_relb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_relb)(var, (ethr_sint32_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_mb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_mb)(var, (ethr_sint32_t) -1);
+#endif
+ return res;
+}
+
+
+/* --- add() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_mb)(var, val);
+#endif
+}
+
+
+/* --- inc() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_NATMC32_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_rb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_NATMC32_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_wb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_wb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_acqb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_NATMC32_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_acqb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_relb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_relb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_mb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_mb)(var);
+#endif
+}
+
+
+/* --- dec() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
+#if defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_NATMC32_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(dec_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_rb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_NATMC32_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(dec_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_wb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
ETHR_NATMC32_FUNC__(dec_relb)(var);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec)(var);
+ (void) ETHR_ATMC32_FUNC__(dec_read_wb)(var);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read_relb)(ethr_atomic32_t *var)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_acqb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#if defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_NATMC32_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read)(var);
+ (void) ETHR_ATMC32_FUNC__(dec_read_acqb)(var);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg_acqb)(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_relb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var,
- (ETHR_NAINT32_T__) new,
- (ETHR_NAINT32_T__) exp);
+#if defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(var, new, exp);
+ (void) ETHR_ATMC32_FUNC__(dec_read_relb)(var);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg_relb)(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_mb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(cmpxchg_relb)(var,
- (ETHR_NAINT32_T__) new,
- (ETHR_NAINT32_T__) exp);
+#if defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(var, new, exp);
+ (void) ETHR_ATMC32_FUNC__(dec_read_mb)(var);
#endif
}
-#endif /* ETHR_TRY_INLINE_FUNCS */
+/* --- read_band() --- */
-#undef ETHR_NAINT_T__
-#undef ETHR_NATMC_FUNC__
-#undef ETHR_NATMC_ADDR_FUNC__
-#undef ETHR_NAINT32_T__
-#undef ETHR_NATMC32_FUNC__
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_band()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_relb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- read_bor() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_rb()!"
+#endif
+ return res;
+}
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_relb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_mb()!"
#endif
+ return res;
+}
+
+#endif /* ETHR_ATMC32_INLINE__ */
+
+#endif /* ETHR_ATOMICS_H__ */
diff --git a/erts/include/internal/ethr_internal.h b/erts/include/internal/ethr_internal.h
index e9c3daf783..c9b1db5b46 100644
--- a/erts/include/internal/ethr_internal.h
+++ b/erts/include/internal/ethr_internal.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -63,5 +63,9 @@ int ethr_late_init_common__(ethr_late_init_data *lid);
void ethr_run_exit_handlers__(void);
void ethr_ts_event_destructor__(void *vtsep);
+#if defined(ETHR_X86_RUNTIME_CONF__)
+int ethr_x86_have_cpuid__(void);
+void ethr_x86_cpuid__(int *eax, int *ebx, int *ecx, int *edx);
+#endif
#endif
diff --git a/erts/include/internal/ethr_mutex.h b/erts/include/internal/ethr_mutex.h
index fadaf1e2a4..a0685ea3c0 100644
--- a/erts/include/internal/ethr_mutex.h
+++ b/erts/include/internal/ethr_mutex.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -54,6 +54,10 @@
# endif
#endif
+#ifndef ETHR_INLINE_MTX_FUNC_NAME_
+# define ETHR_INLINE_MTX_FUNC_NAME_(X) X
+#endif
+
#if defined(ETHR_USE_OWN_RWMTX_IMPL__) || defined(ETHR_USE_OWN_MTX_IMPL__)
#ifdef ETHR_DEBUG
@@ -505,7 +509,7 @@ void ethr_mutex_lock_wait__(ethr_mutex *, ethr_sint32_t);
void ethr_mutex_unlock_wake__(ethr_mutex *, ethr_sint32_t);
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
{
ethr_sint32_t act;
int res;
@@ -529,7 +533,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
{
ethr_sint32_t act;
ETHR_MTX_HARD_DEBUG_FENCE_CHK(mtx);
@@ -549,7 +553,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
{
ethr_sint32_t act;
ETHR_COMPILER_BARRIER;
@@ -574,7 +578,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_MUTEX_IMPL__)
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
{
int res;
res = pthread_mutex_trylock(&mtx->pt_mtx);
@@ -584,7 +588,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
{
int res = pthread_mutex_lock(&mtx->pt_mtx);
if (res != 0)
@@ -592,7 +596,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
{
int res = pthread_mutex_unlock(&mtx->pt_mtx);
if (res != 0)
@@ -615,7 +619,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_MUTEX_IMPL__)
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_tryrdlock(&rwmtx->pt_rwlock);
if (res != 0 && res != EBUSY)
@@ -624,7 +628,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_rdlock(&rwmtx->pt_rwlock);
if (res != 0)
@@ -632,7 +636,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_unlock(&rwmtx->pt_rwlock);
if (res != 0)
@@ -640,7 +644,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_trywrlock(&rwmtx->pt_rwlock);
if (res != 0 && res != EBUSY)
@@ -649,7 +653,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_wrlock(&rwmtx->pt_rwlock);
if (res != 0)
@@ -657,7 +661,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwunlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rwunlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_unlock(&rwmtx->pt_rwlock);
if (res != 0)
diff --git a/erts/include/internal/ethr_optimized_fallbacks.h b/erts/include/internal/ethr_optimized_fallbacks.h
index 8e04692856..45399d18e6 100644
--- a/erts/include/internal/ethr_optimized_fallbacks.h
+++ b/erts/include/internal/ethr_optimized_fallbacks.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -18,152 +18,172 @@
*/
/*
- * Description: "Optimized" fallbacks used when native ops are missing
+ * Description: Optimized fallbacks used when native ops are missing
* Author: Rickard Green
*/
#ifndef ETHR_OPTIMIZED_FALLBACKS_H__
#define ETHR_OPTIMIZED_FALLBACKS_H__
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
-#define ETHR_HAVE_OPTIMIZED_ATOMIC_OPS 1
+#if defined(ETHR_HAVE_NATIVE_SPINLOCKS)
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+
+static ETHR_INLINE int
+ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock)
+{
+ return 0;
+}
+
#endif
-#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
-#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1
#elif defined(ETHR_HAVE_PTHREAD_SPIN_LOCK)
-/* --- Optimized spinlocks using pthread spinlocks -------------------------- */
-#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1
+/* --- Native spinlocks using pthread spinlocks -------------------------- */
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+
+#define ETHR_NATIVE_SPINLOCK_IMPL "pthread"
-typedef pthread_spinlock_t ethr_opt_spinlock_t;
+typedef pthread_spinlock_t ethr_native_spinlock_t;
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
-static ETHR_INLINE int
-ethr_opt_spinlock_init(ethr_opt_spinlock_t *lock)
+static ETHR_INLINE void
+ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
{
- return pthread_spin_init((pthread_spinlock_t *) lock, 0);
+ int err = pthread_spin_init((pthread_spinlock_t *) lock, 0);
+ if (err)
+ ETHR_FATAL_ERROR__(err);
}
static ETHR_INLINE int
-ethr_opt_spinlock_destroy(ethr_opt_spinlock_t *lock)
+ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock)
{
return pthread_spin_destroy((pthread_spinlock_t *) lock);
}
-
-static ETHR_INLINE int
-ethr_opt_spin_unlock(ethr_opt_spinlock_t *lock)
+static ETHR_INLINE void
+ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- return pthread_spin_unlock((pthread_spinlock_t *) lock);
+ int err = pthread_spin_unlock((pthread_spinlock_t *) lock);
+ if (err)
+ ETHR_FATAL_ERROR__(err);
}
-static ETHR_INLINE int
-ethr_opt_spin_lock(ethr_opt_spinlock_t *lock)
+static ETHR_INLINE void
+ethr_native_spin_lock(ethr_native_spinlock_t *lock)
{
- return pthread_spin_lock((pthread_spinlock_t *) lock);
+ int err = pthread_spin_lock((pthread_spinlock_t *) lock);
+ if (err)
+ ETHR_FATAL_ERROR__(err);
}
#endif
-#elif defined(ETHR_HAVE_NATIVE_ATOMICS)
+#elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
/* --- Native spinlocks using native atomics -------------------------------- */
#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1
-
-#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
-typedef ethr_native_atomic32_t ethr_native_spinlock_t;
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
-typedef ethr_native_atomic64_t ethr_native_spinlock_t;
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#else
-# error "Missing native atomic implementation"
-#endif
+
+#define ETHR_NATIVE_SPINLOCK_IMPL "native-atomics"
+
+typedef ethr_atomic32_t ethr_native_spinlock_t;
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+#undef ETHR_NSPN_AOP__
+#define ETHR_NSPN_AOP__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+
static ETHR_INLINE void
ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
{
- ETHR_NATMC_FUNC__(init)(lock, 0);
+ ETHR_NSPN_AOP__(init)(lock, 0);
+}
+
+static ETHR_INLINE int
+ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock)
+{
+ return ETHR_NSPN_AOP__(read)(lock) == 0 ? 0 : EBUSY;
}
static ETHR_INLINE void
ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- ETHR_COMPILER_BARRIER;
- ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) == 1);
- ETHR_NATMC_FUNC__(set_relb)(lock, 0);
+ ETHR_ASSERT(ETHR_NSPN_AOP__(read)(lock) == 1);
+ ETHR_NSPN_AOP__(set_relb)(lock, 0);
}
static ETHR_INLINE void
ethr_native_spin_lock(ethr_native_spinlock_t *lock)
{
- while (ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, 1, 0) != 0) {
- while (ETHR_NATMC_FUNC__(read)(lock) != 0)
+ while (ETHR_NSPN_AOP__(cmpxchg_acqb)(lock, 1, 0) != 0) {
+ while (ETHR_NSPN_AOP__(read)(lock) != 0)
ETHR_SPIN_BODY;
}
ETHR_COMPILER_BARRIER;
}
-#endif
+#undef ETHR_NSPN_AOP__
-#undef ETHR_NATMC_FUNC__
+#endif
#endif
-#ifdef ETHR_HAVE_NATIVE_RWSPINLOCKS
-#define ETHR_HAVE_OPTIMIZED_RWSPINLOCKS 1
-#elif defined(ETHR_HAVE_NATIVE_ATOMICS)
+#if defined(ETHR_HAVE_NATIVE_RWSPINLOCKS)
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+
+static ETHR_INLINE int
+ethr_native_rwlock_destroy(ethr_native_rwlock_t *lock)
+{
+ return 0;
+}
+
+#endif
+
+#elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
/* --- Native rwspinlocks using native atomics ------------------------------ */
#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-#define ETHR_HAVE_OPTIMIZED_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "native-atomics"
-#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
-typedef ethr_native_atomic32_t ethr_native_rwlock_t;
-# define ETHR_NAINT_T__ ethr_sint32_t
+typedef ethr_atomic32_t ethr_native_rwlock_t;
# define ETHR_WLOCK_FLAG__ (((ethr_sint32_t) 1) << 30)
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
-typedef ethr_native_atomic64_t ethr_native_rwlock_t;
-# define ETHR_NAINT_T__ ethr_sint64_t
-# define ETHR_WLOCK_FLAG__ (((ethr_sint64_t) 1) << 62)
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#else
-# error "Missing native atomic implementation"
-#endif
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+#undef ETHR_NRWSPN_AOP__
+#define ETHR_NRWSPN_AOP__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+
static ETHR_INLINE void
ethr_native_rwlock_init(ethr_native_rwlock_t *lock)
{
- ETHR_NATMC_FUNC__(init)(lock, 0);
+ ETHR_NRWSPN_AOP__(init)(lock, 0);
+}
+
+static ETHR_INLINE int
+ethr_native_rwlock_destroy(ethr_native_rwlock_t *lock)
+{
+ return ETHR_NRWSPN_AOP__(read)(lock) == 0 ? 0 : EBUSY;
}
static ETHR_INLINE void
ethr_native_read_unlock(ethr_native_rwlock_t *lock)
{
- ETHR_COMPILER_BARRIER;
-#ifdef DEBUG
- ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) >= 0);
-#endif
- ETHR_NATMC_FUNC__(dec_relb)(lock);
+ ETHR_ASSERT(ETHR_NRWSPN_AOP__(read)(lock) >= 0);
+ ETHR_NRWSPN_AOP__(dec_relb)(lock);
}
static ETHR_INLINE void
ethr_native_read_lock(ethr_native_rwlock_t *lock)
{
- ETHR_NAINT_T__ act, exp = 0;
+ ethr_sint32_t act, exp = 0;
while (1) {
- act = ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, exp+1, exp);
+ act = ETHR_NRWSPN_AOP__(cmpxchg_acqb)(lock, exp+1, exp);
if (act == exp)
break;
+ /* Wait for writer to leave */
while (act & ETHR_WLOCK_FLAG__) {
ETHR_SPIN_BODY;
- act = ETHR_NATMC_FUNC__(read)(lock);
+ act = ETHR_NRWSPN_AOP__(read)(lock);
}
exp = act;
}
@@ -173,36 +193,37 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
static ETHR_INLINE void
ethr_native_write_unlock(ethr_native_rwlock_t *lock)
{
- ETHR_COMPILER_BARRIER;
- ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) == ETHR_WLOCK_FLAG__);
- ETHR_NATMC_FUNC__(set_relb)(lock, 0);
+ ETHR_ASSERT(ETHR_NRWSPN_AOP__(read)(lock) == ETHR_WLOCK_FLAG__);
+ ETHR_NRWSPN_AOP__(set_relb)(lock, 0);
}
static ETHR_INLINE void
ethr_native_write_lock(ethr_native_rwlock_t *lock)
{
- ETHR_NAINT_T__ act, exp = 0;
+ ethr_sint32_t act, exp = 0;
while (1) {
- act = ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, exp|ETHR_WLOCK_FLAG__, exp);
+ act = ETHR_NRWSPN_AOP__(cmpxchg_acqb)(lock, exp|ETHR_WLOCK_FLAG__, exp);
if (act == exp)
break;
- ETHR_SPIN_BODY;
- exp = act & ~ETHR_WLOCK_FLAG__;
+ /* Wait for writer to leave */
+ while (act & ETHR_WLOCK_FLAG__) {
+ ETHR_SPIN_BODY;
+ act = ETHR_NRWSPN_AOP__(read)(lock);
+ }
+ exp = act;
}
act |= ETHR_WLOCK_FLAG__;
/* Wait for readers to leave */
while (act != ETHR_WLOCK_FLAG__) {
ETHR_SPIN_BODY;
- act = ETHR_NATMC_FUNC__(read_acqb)(lock);
+ act = ETHR_NRWSPN_AOP__(read_acqb)(lock);
}
ETHR_COMPILER_BARRIER;
}
-#endif
+#undef ETHR_NRWSPN_AOP__
-#undef ETHR_NAINT_T__
-#undef ETHR_NATMC_FUNC__
-#undef ETHR_WLOCK_FLAG__
+#endif
#endif
diff --git a/erts/include/internal/ethread.h b/erts/include/internal/ethread.h
index 4cd95faf6a..8ad0ded144 100644
--- a/erts/include/internal/ethread.h
+++ b/erts/include/internal/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2004-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2004-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -33,10 +33,6 @@
#include <stdlib.h>
#include "erl_errno.h"
-#undef ETHR_HAVE_OPTIMIZED_ATOMIC_OPS
-#undef ETHR_HAVE_OPTIMIZED_SPINLOCK
-#undef ETHR_HAVE_OPTIMIZED_RWSPINLOCK
-
#if defined(DEBUG)
# define ETHR_DEBUG
#endif
@@ -68,7 +64,7 @@
#endif
/* Assume 64-byte cache line size */
-#define ETHR_CACHE_LINE_SIZE ((ethr_uint_t) 64)
+#define ETHR_CACHE_LINE_SIZE 64
#define ETHR_CACHE_LINE_MASK (ETHR_CACHE_LINE_SIZE - 1)
#define ETHR_CACHE_LINE_ALIGN_SIZE(SZ) \
@@ -251,13 +247,90 @@ typedef ethr_sint64_t ethr_sint_t;
typedef ethr_uint64_t ethr_uint_t;
#endif
-/* __builtin_expect() is needed by both native atomics code
- * and the fallback code */
-#if !defined(__GNUC__) || (__GNUC__ < 2) || (__GNUC__ == 2 && __GNUC_MINOR__ < 96)
+#if defined(ETHR_SIZEOF___INT128_T) && ETHR_SIZEOF___INT128_T == 16
+#define ETHR_HAVE_INT128_T
+typedef __int128_t ethr_sint128_t;
+typedef __uint128_t ethr_uint128_t;
+#endif
+
+#define ETHR_FATAL_ERROR__(ERR) \
+ ethr_fatal_error__(__FILE__, __LINE__, __func__, (ERR))
+
+ETHR_PROTO_NORETURN__ ethr_fatal_error__(const char *file,
+ int line,
+ const char *func,
+ int err);
+
+#if !defined(__GNUC__)
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) 0
+#elif !defined(__GNUC_MINOR__)
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \
+ ((__GNUC__ << 24) >= (((MAJ) << 24) | ((MIN) << 12) | (PL)))
+#elif !defined(__GNUC_PATCHLEVEL__)
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \
+ (((__GNUC__ << 24) | (__GNUC_MINOR__ << 12)) >= (((MAJ) << 24) | ((MIN) << 12) | (PL)))
+#else
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \
+ (((__GNUC__ << 24) | (__GNUC_MINOR__ << 12) | __GNUC_PATCHLEVEL__) >= (((MAJ) << 24) | ((MIN) << 12) | (PL)))
+#endif
+
+#if !ETHR_AT_LEAST_GCC_VSN__(2, 96, 0)
#define __builtin_expect(X, Y) (X)
#endif
-/* For CPU-optimised atomics, spinlocks, and rwlocks. */
+#if ETHR_AT_LEAST_GCC_VSN__(3, 1, 1)
+# define ETHR_CHOOSE_EXPR __builtin_choose_expr
+#else
+# define ETHR_CHOOSE_EXPR(B, E1, E2) ((B) ? (E1) : (E2))
+#endif
+
+#if ((defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))) \
+ || (defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_AMD64))))
+# define ETHR_X86_RUNTIME_CONF__
+
+# define ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__ \
+ (__builtin_expect(ethr_runtime__.conf.have_dw_cmpxchg != 0, 1))
+# define ETHR_X86_RUNTIME_CONF_HAVE_NO_DW_CMPXCHG__ \
+ (__builtin_expect(ethr_runtime__.conf.have_dw_cmpxchg == 0, 0))
+# define ETHR_X86_RUNTIME_CONF_HAVE_SSE2__ \
+ (__builtin_expect(ethr_runtime__.conf.have_sse2 != 0, 1))
+# define ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__ \
+ (__builtin_expect(ethr_runtime__.conf.have_sse2 == 0, 0))
+#endif
+
+#if (defined(__GNUC__) \
+ && !defined(ETHR_PPC_HAVE_LWSYNC) \
+ && !defined(ETHR_PPC_HAVE_NO_LWSYNC) \
+ && (defined(__powerpc__) || defined(__ppc__) || defined(__powerpc64__)))
+# define ETHR_PPC_RUNTIME_CONF__
+
+# define ETHR_PPC_RUNTIME_CONF_HAVE_LWSYNC__ \
+ (__builtin_expect(ethr_runtime__.conf.have_lwsync != 0, 1))
+# define ETHR_PPC_RUNTIME_CONF_HAVE_NO_LWSYNC__ \
+ (__builtin_expect(ethr_runtime__.conf.have_lwsync == 0, 0))
+#endif
+
+typedef struct {
+#if defined(ETHR_X86_RUNTIME_CONF__)
+ int have_dw_cmpxchg;
+ int have_sse2;
+#endif
+#if defined(ETHR_PPC_RUNTIME_CONF__)
+ int have_lwsync;
+#endif
+ int dummy;
+} ethr_runtime_conf_t;
+
+
+typedef union {
+ ethr_runtime_conf_t conf;
+ char pad__[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_runtime_conf_t))+ETHR_CACHE_LINE_SIZE];
+} ethr_runtime_t;
+
+
+extern ethr_runtime_t ethr_runtime__;
+
+/* For native CPU-optimised atomics, spinlocks, and rwlocks. */
#if !defined(ETHR_DISABLE_NATIVE_IMPLS)
# if defined(__GNUC__)
# if defined(ETHR_PREFER_GCC_NATIVE_IMPLS)
@@ -265,7 +338,7 @@ typedef ethr_uint64_t ethr_uint_t;
# elif defined(ETHR_PREFER_LIBATOMIC_OPS_NATIVE_IMPLS)
# include "libatomic_ops/ethread.h"
# endif
-# ifndef ETHR_HAVE_NATIVE_ATOMICS
+# if !defined(ETHR_HAVE_NATIVE_ATOMIC32) && !defined(ETHR_HAVE_NATIVE_ATOMIC64)
# if ETHR_SIZEOF_PTR == 4
# if defined(__i386__)
# include "i386/ethread.h"
@@ -283,8 +356,10 @@ typedef ethr_uint64_t ethr_uint_t;
# include "sparc64/ethread.h"
# endif
# endif
+#if 0
# include "gcc/ethread.h"
# include "libatomic_ops/ethread.h"
+#endif
# endif
# elif defined(ETHR_HAVE_LIBATOMIC_OPS)
# include "libatomic_ops/ethread.h"
@@ -293,10 +368,9 @@ typedef ethr_uint64_t ethr_uint_t;
# endif
#endif /* !ETHR_DISABLE_NATIVE_IMPLS */
+#include "ethr_atomics.h" /* The atomics API */
+
#if defined(__GNUC__)
-# ifndef ETHR_COMPILER_BARRIER
-# define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
-# endif
# ifndef ETHR_SPIN_BODY
# if defined(__i386__) || defined(__x86_64__)
# define ETHR_SPIN_BODY __asm__ __volatile__("rep;nop" : : : "memory")
@@ -309,11 +383,6 @@ typedef ethr_uint64_t ethr_uint_t;
# endif
# endif
#elif defined(ETHR_WIN32_THREADS)
-# ifndef ETHR_COMPILER_BARRIER
-# include <intrin.h>
-# pragma intrinsic(_ReadWriteBarrier)
-# define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
-# endif
# ifndef ETHR_SPIN_BODY
# define ETHR_SPIN_BODY do {YieldProcessor();ETHR_COMPILER_BARRIER;} while(0)
# endif
@@ -321,43 +390,6 @@ typedef ethr_uint64_t ethr_uint_t;
#define ETHR_YIELD_AFTER_BUSY_LOOPS 50
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
-/*
- * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only,
- * i.e. when our lock based atomic fallback is used, a noop is sufficient.
- */
-#define ETHR_MEMORY_BARRIER do { } while (0)
-#define ETHR_WRITE_MEMORY_BARRIER do { } while (0)
-#define ETHR_READ_MEMORY_BARRIER do { } while (0)
-#define ETHR_READ_DEPEND_MEMORY_BARRIER do { } while (0)
-#endif
-
-#ifndef ETHR_WRITE_MEMORY_BARRIER
-# define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-# define ETHR_WRITE_MEMORY_BARRIER_IS_FULL
-#endif
-#ifndef ETHR_READ_MEMORY_BARRIER
-# define ETHR_READ_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-# define ETHR_READ_MEMORY_BARRIER_IS_FULL
-#endif
-#ifndef ETHR_READ_DEPEND_MEMORY_BARRIER
-# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER
-# define ETHR_READ_DEPEND_MEMORY_BARRIER_IS_COMPILER_BARRIER
-#endif
-
-#define ETHR_FATAL_ERROR__(ERR) \
- ethr_fatal_error__(__FILE__, __LINE__, __func__, (ERR))
-
-ETHR_PROTO_NORETURN__ ethr_fatal_error__(const char *file,
- int line,
- const char *func,
- int err);
-
-void ethr_compiler_barrier_fallback(void);
-#ifndef ETHR_COMPILER_BARRIER
-# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback()
-#endif
-
#ifndef ETHR_SPIN_BODY
# define ETHR_SPIN_BODY ETHR_COMPILER_BARRIER
#endif
@@ -460,8 +492,6 @@ void ethr_compiler_barrier(void);
#if defined(ETHR_HAVE_NATIVE_SPINLOCKS)
typedef ethr_native_spinlock_t ethr_spinlock_t;
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
-typedef ethr_opt_spinlock_t ethr_spinlock_t;
#elif defined(__WIN32__)
typedef CRITICAL_SECTION ethr_spinlock_t;
#else
@@ -483,8 +513,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spinlock_init)(ethr_spinlock_t *lock)
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
ethr_native_spinlock_init(lock);
return 0;
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- return ethr_opt_spinlock_init((ethr_opt_spinlock_t *) lock);
#elif defined(__WIN32__)
if (!InitializeCriticalSectionAndSpinCount((CRITICAL_SECTION *) lock, INT_MAX))
return ethr_win_get_errno__();
@@ -498,9 +526,7 @@ static ETHR_INLINE int
ETHR_INLINE_FUNC_NAME_(ethr_spinlock_destroy)(ethr_spinlock_t *lock)
{
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
- return 0;
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- return ethr_opt_spinlock_destroy((ethr_opt_spinlock_t *) lock);
+ return ethr_native_spinlock_destroy(lock);
#elif defined(__WIN32__)
DeleteCriticalSection((CRITICAL_SECTION *) lock);
return 0;
@@ -514,10 +540,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_unlock)(ethr_spinlock_t *lock)
{
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
ethr_native_spin_unlock(lock);
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- int err = ethr_opt_spin_unlock((ethr_opt_spinlock_t *) lock);
- if (err)
- ETHR_FATAL_ERROR__(err);
#elif defined(__WIN32__)
LeaveCriticalSection((CRITICAL_SECTION *) lock);
#else
@@ -532,10 +554,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_lock)(ethr_spinlock_t *lock)
{
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
ethr_native_spin_lock(lock);
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- int err = ethr_opt_spin_lock((ethr_opt_spinlock_t *) lock);
- if (err)
- ETHR_FATAL_ERROR__(err);
#elif defined(__WIN32__)
EnterCriticalSection((CRITICAL_SECTION *) lock);
#else
@@ -547,8 +565,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_lock)(ethr_spinlock_t *lock)
#endif /* ETHR_TRY_INLINE_FUNCS */
-#include "ethr_atomics.h"
-
typedef struct ethr_ts_event_ ethr_ts_event; /* Needed by ethr_mutex.h */
#if defined(ETHR_WIN32_THREADS)
diff --git a/erts/include/internal/ethread_header_config.h.in b/erts/include/internal/ethread_header_config.h.in
index f394d790d2..dd3599f86d 100644
--- a/erts/include/internal/ethread_header_config.h.in
+++ b/erts/include/internal/ethread_header_config.h.in
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2004-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2004-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -32,6 +32,9 @@
/* Define to the size of __int64 */
#undef ETHR_SIZEOF___INT64
+/* Define to the size of __int128_t */
+#undef ETHR_SIZEOF___INT128_T
+
/* Define if bigendian */
#undef ETHR_BIGENDIAN
@@ -69,8 +72,44 @@
/* Define if you have a linux futex implementation. */
#undef ETHR_HAVE_LINUX_FUTEX
-/* Define if you have gcc atomic operations */
-#undef ETHR_HAVE_GCC_ATOMIC_OPS
+/* Define if x86/x86_64 out of order instructions should be synchronized */
+#undef ETHR_X86_OUT_OF_ORDER
+
+/* Define if only run in Sparc TSO mode */
+#undef ETHR_SPARC_TSO
+
+/* Define if only run in Sparc PSO, or TSO mode */
+#undef ETHR_SPARC_PSO
+
+/* Define if run in Sparc RMO, PSO, or TSO mode */
+#undef ETHR_SPARC_RMO
+
+/* Define if you have __sync_add_and_fetch() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_ADD_AND_FETCH32
+
+/* Define if you have __sync_add_and_fetch() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_ADD_AND_FETCH64
+
+/* Define if you have __sync_fetch_and_and() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_AND32
+
+/* Define if you have __sync_fetch_and_and() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_AND64
+
+/* Define if you have __sync_fetch_and_or() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_OR32
+
+/* Define if you have __sync_fetch_and_or() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_OR64
+
+/* Define if you have __sync_val_compare_and_swap() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32
+
+/* Define if you have __sync_val_compare_and_swap() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64
+
+/* Define if you have __sync_val_compare_and_swap() for 128-bit integers */
+#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128
/* Define if you prefer gcc native ethread implementations */
#undef ETHR_PREFER_GCC_NATIVE_IMPLS
@@ -90,8 +129,14 @@
/* Define if sched_yield() returns an int. */
#undef ETHR_SCHED_YIELD_RET_INT
-/* Define if you want compatibilty with x86 processors before pentium4. */
-#undef ETHR_PRE_PENTIUM4_COMPAT
+/* Define if you use a gcc that supports -msse2 and understand sse2 specific asm statements */
+#undef ETHR_GCC_HAVE_SSE2_ASM_SUPPORT
+
+/* Define if you use a gcc that supports the double word cmpxchg instruction */
+#undef ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT
+
+/* Define if you get a register shortage with cmpxchg8b and position independent code */
+#undef ETHR_CMPXCHG8B_REGISTER_SHORTAGE
/* Define if you have the pthread_rwlockattr_setkind_np() function. */
#undef ETHR_HAVE_PTHREAD_RWLOCKATTR_SETKIND_NP
@@ -115,23 +160,74 @@
/* Define to the size of AO_t if libatomic_ops is used */
#undef ETHR_SIZEOF_AO_T
+/* Define if you have _InterlockedAnd() */
+#undef ETHR_HAVE__INTERLOCKEDAND
+
+/* Define if you have _InterlockedAnd64() */
+#undef ETHR_HAVE__INTERLOCKEDAND64
+
+/* Define if you have _InterlockedCompareExchange() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE
+
/* Define if you have _InterlockedCompareExchange64() */
#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64
+/* Define if you have _InterlockedCompareExchange64_acq() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ
+
+/* Define if you have _InterlockedCompareExchange64_rel() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL
+
+/* Define if you have _InterlockedCompareExchange_acq() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ
+
+/* Define if you have _InterlockedCompareExchange_rel() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL
+
+/* Define if you have _InterlockedDecrement() */
+#undef ETHR_HAVE__INTERLOCKEDDECREMENT
+
/* Define if you have _InterlockedDecrement64() */
#undef ETHR_HAVE__INTERLOCKEDDECREMENT64
-/* Define if you have _InterlockedIncrement64() */
-#undef ETHR_HAVE__INTERLOCKEDINCREMENT64
+/* Define if you have _InterlockedDecrement64_rel() */
+#undef ETHR_HAVE__INTERLOCKEDDECREMENT64_REL
-/* Define if you have _InterlockedExchangeAdd64() */
-#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
+/* Define if you have _InterlockedDecrement_rel() */
+#undef ETHR_HAVE__INTERLOCKEDDECREMENT_REL
+
+/* Define if you have _InterlockedExchange() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGE
/* Define if you have _InterlockedExchange64() */
#undef ETHR_HAVE__INTERLOCKEDEXCHANGE64
-/* Define if you have _InterlockedAnd64() */
-#undef ETHR_HAVE__INTERLOCKEDAND64
+/* Define if you have _InterlockedExchangeAdd() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD
+
+/* Define if you have _InterlockedExchangeAdd64() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
+
+/* Define if you have _InterlockedExchangeAdd64_acq() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ
+
+/* Define if you have _InterlockedExchangeAdd_acq() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ
+
+/* Define if you have _InterlockedIncrement() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT
+
+/* Define if you have _InterlockedIncrement64() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT64
+
+/* Define if you have _InterlockedIncrement64_acq() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ
+
+/* Define if you have _InterlockedIncrement_acq() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ
+
+/* Define if you have _InterlockedOr() */
+#undef ETHR_HAVE__INTERLOCKEDOR
/* Define if you have _InterlockedOr64() */
#undef ETHR_HAVE__INTERLOCKEDOR64
diff --git a/erts/include/internal/gcc/ethr_atomic.h b/erts/include/internal/gcc/ethr_atomic.h
index 16935084b1..f598f8537b 100644
--- a/erts/include/internal/gcc/ethr_atomic.h
+++ b/erts/include/internal/gcc/ethr_atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,11 +25,15 @@
#undef ETHR_INCLUDE_ATOMIC_IMPL__
#if !defined(ETHR_GCC_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
#define ETHR_GCC_ATOMIC32_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32)
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+#endif
#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
#elif !defined(ETHR_GCC_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
#define ETHR_GCC_ATOMIC64_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64)
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+#endif
#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
#endif
@@ -45,58 +49,38 @@
# define ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ 1
#endif
-#if defined(__x86_64__) || (defined(__i386__) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 1
-#else
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 0
-#endif
-
-/*
- * According to the documentation this is what we want:
- * #define ETHR_MEMORY_BARRIER __sync_synchronize()
- * However, __sync_synchronize() is known to erroneously be
- * a noop on at least some platforms with some gcc versions.
- * This has suposedly been fixed in some gcc version, but we
- * don't know from which version. Therefore, we only use
- * it when it has been verified to work. Otherwise
- * we use a workaround.
- */
-#if defined(__mips__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 2))
-/* __sync_synchronize() has been verified to work here */
-#define ETHR_MEMORY_BARRIER __sync_synchronize()
-#define ETHR_READ_DEPEND_MEMORY_BARRIER __sync_synchronize()
-#elif defined(__x86_64__) || (defined(__i386__) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-/* Use fence instructions directly instead of workaround */
-#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory")
-#define ETHR_WRITE_MEMORY_BARRIER __asm__ __volatile__("sfence" : : : "memory")
-#define ETHR_READ_MEMORY_BARRIER __asm__ __volatile__("lfence" : : : "memory")
-#define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("" : : : "memory")
-#else
-/* Workaround */
-#define ETHR_MEMORY_BARRIER \
-do { \
- volatile ethr_sint32_t x___ = 0; \
- (void) __sync_val_compare_and_swap(&x___, (ethr_sint32_t) 0, (ethr_sint32_t) 1); \
-} while (0)
-#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-#endif
-
-#define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
-
#endif /* ETHR_GCC_ATOMIC_COMMON__ */
#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "gcc"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic32_t
#define ETHR_AINT_T__ ethr_sint32_t
+#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH32)
+# define ETHR_HAVE___SYNC_ADD_AND_FETCH
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND32)
+# define ETHR_HAVE___SYNC_FETCH_AND_AND
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR32)
+# define ETHR_HAVE___SYNC_FETCH_AND_OR
+#endif
#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#define ETHR_NATIVE_ATOMIC64_IMPL "gcc"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic64_t
#define ETHR_AINT_T__ ethr_sint64_t
+#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH64)
+# define ETHR_HAVE___SYNC_ADD_AND_FETCH
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND64)
+# define ETHR_HAVE___SYNC_FETCH_AND_AND
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR64)
+# define ETHR_HAVE___SYNC_FETCH_AND_OR
+#endif
#else
#error "Unsupported integer size"
#endif
@@ -108,183 +92,120 @@ typedef struct {
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
-{
#if ETHR_READ_AND_SET_WITHOUT_SYNC_OP__
- var->counter = value;
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
#else
- /*
- * Unfortunately no __sync_store() or similar exist in the gcc atomic
- * op interface. We therefore have to simulate it this way...
- */
- ETHR_AINT_T__ act = 0, exp;
- do {
- exp = act;
- act = __sync_val_compare_and_swap(&var->counter, exp, value);
- } while (act != exp);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
#endif
-}
static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
+ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
{
- ETHR_NATMC_FUNC__(set)(var, value);
+ var->counter = value;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
-{
+#endif /* ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ */
+
#if ETHR_READ_AND_SET_WITHOUT_SYNC_OP__
- return var->counter;
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
#else
- /*
- * Unfortunately no __sync_fetch() or similar exist in the gcc atomic
- * op interface. We therefore have to simulate it this way...
- */
- return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 0);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
#endif
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- (void) __sync_add_and_fetch(&var->counter, incr);
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
+ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
- return __sync_add_and_fetch(&var->counter, incr);
+ return var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void) __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
-}
+#endif /* ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ */
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void) __sync_sub_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
-}
+#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- return __sync_sub_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
+ return __sync_add_and_fetch(&var->counter, incr);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- return __sync_fetch_and_and(&var->counter, mask);
-}
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- return (ETHR_AINT_T__) __sync_fetch_and_or(&var->counter, mask);
-}
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
-{
- return __sync_val_compare_and_swap(&var->counter, old, new);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
+ETHR_NATMC_FUNC__(and_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
- ETHR_AINT_T__ exp, act = 0;
- do {
- exp = act;
- act = __sync_val_compare_and_swap(&var->counter, exp, new);
- } while (act != exp);
- return act;
+ return __sync_fetch_and_and(&var->counter, mask);
}
-/*
- * Atomic ops with at least specified barriers.
- */
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_AINT_T__ val = var->counter;
- ETHR_COMPILER_BARRIER;
- return val;
-#else
- return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 0);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_COMPILER_BARRIER;
- var->counter = i;
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB 1
#else
- (void) ETHR_NATMC_FUNC__(xchg)(var, i);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(inc_return)(var);
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(or_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
- ETHR_NATMC_FUNC__(dec)(var);
+ return (ETHR_AINT_T__) __sync_fetch_and_or(&var->counter, mask);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(dec_return)(var);
-}
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
-{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ return __sync_val_compare_and_swap(&var->counter, old, new);
}
-#endif
+#endif /* ETHR_TRY_INLINE_FUNCS */
#undef ETHR_NATMC_FUNC__
#undef ETHR_ATMC_T__
#undef ETHR_AINT_T__
#undef ETHR_AINT_SUFFIX__
+#undef ETHR_HAVE___SYNC_ADD_AND_FETCH
+#undef ETHR_HAVE___SYNC_FETCH_AND_AND
+#undef ETHR_HAVE___SYNC_FETCH_AND_OR
#endif
diff --git a/erts/include/internal/gcc/ethr_dw_atomic.h b/erts/include/internal/gcc/ethr_dw_atomic.h
new file mode 100644
index 0000000000..6736f9c547
--- /dev/null
+++ b/erts/include/internal/gcc/ethr_dw_atomic.h
@@ -0,0 +1,115 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Native double word atomics using gcc's builtins
+ * Author: Rickard Green
+ */
+
+#undef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+#ifndef ETHR_GCC_DW_ATOMIC_H__
+# define ETHR_GCC_DW_ATOMIC_H__
+# if ((ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64)) \
+ || (ETHR_SIZEOF_PTR == 8 \
+ && defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128) \
+ && defined(ETHR_HAVE_INT128_T)))
+# define ETHR_INCLUDE_DW_ATOMIC_IMPL__
+# endif
+#endif
+
+#ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
+# define ETHR_NATIVE_DW_ATOMIC_IMPL "gcc"
+
+# if defined(__i386__) || defined(__x86_64__)
+/*
+ * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used
+ * at runtime in order to determine if native or fallback implementation
+ * should be used.
+ */
+# define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \
+ ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__
+# endif
+
+# if ETHR_SIZEOF_PTR == 4
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
+# elif ETHR_SIZEOF_PTR == 8
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint128_t
+# endif
+
+typedef volatile ETHR_NATIVE_SU_DW_SINT_T * ethr_native_dw_ptr_t;
+
+/*
+ * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
+ * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
+ * not common, and at least some glibc malloc implementations
+ * only 4 byte align in 32-bit mode.
+ *
+ * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
+ * aligned memory in 32-bit mode. A malloc implementation that does
+ * not adhere to these alignment requirements is seriously broken,
+ * and we wont bother trying to work around it.
+ *
+ * Since memory alignment may be off by one word we need to align at
+ * runtime. We, therefore, need an extra word allocated.
+ */
+#define ETHR_DW_NATMC_MEM__(VAR) \
+ (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
+typedef union {
+ volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
+ volatile ethr_sint_t sint[3];
+ volatile char c[ETHR_SIZEOF_PTR*3];
+} ethr_native_dw_atomic_t;
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+# ifdef ETHR_DEBUG
+# define ETHR_DW_DBG_ALIGNED__(PTR) \
+ ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
+# else
+# define ETHR_DW_DBG_ALIGNED__(PTR)
+# endif
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR
+static ETHR_INLINE ethr_sint_t *
+ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
+{
+ return (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
+}
+
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
+
+static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T
+ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ETHR_NATIVE_SU_DW_SINT_T new,
+ ETHR_NATIVE_SU_DW_SINT_T old)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return __sync_val_compare_and_swap(p, old, new);
+}
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#endif /* ETHR_GCC_DW_ATOMIC_H__ */
+
diff --git a/erts/include/internal/gcc/ethr_membar.h b/erts/include/internal/gcc/ethr_membar.h
new file mode 100644
index 0000000000..7d428fc68e
--- /dev/null
+++ b/erts/include/internal/gcc/ethr_membar.h
@@ -0,0 +1,73 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers when using gcc's builtins
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_GCC_MEMBAR_H__
+#define ETHR_GCC_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+/*
+ * According to the documentation __sync_synchronize() will
+ * issue a full memory barrier. However, __sync_synchronize()
+ * is known to erroneously be a noop on at least some
+ * platforms with some gcc versions. This has suposedly been
+ * fixed in some gcc version, but we don't know from which
+ * version. Therefore, we only use it when it has been
+ * verified to work. Otherwise we use the workaround
+ * below.
+ */
+
+#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32)
+# define ETHR_MB_T__ ethr_sint32_t
+#elif defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64)
+# define ETHR_MB_T__ ethr_sint64_t
+#else
+# error "No __sync_val_compare_and_swap"
+#endif
+#define ETHR_SYNC_SYNCHRONIZE_WORKAROUND__ \
+do { \
+ volatile ETHR_MB_T__ x___ = 0; \
+ (void) __sync_val_compare_and_swap(&x___, (ETHR_MB_T__) 0, (ETHR_MB_T__) 1); \
+} while (0)
+
+#define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
+
+#if defined(__mips__) && ETHR_AT_LEAST_GCC_VSN__(4, 2, 0)
+# define ETHR_MEMBAR(B) __sync_synchronize()
+# define ETHR_READ_DEPEND_MEMORY_BARRIER __sync_synchronize()
+#elif ((defined(__powerpc__) || defined(__ppc__)) \
+ && ETHR_AT_LEAST_GCC_VSN__(4, 1, 2))
+# define ETHR_MEMBAR(B) __sync_synchronize()
+#else /* Use workaround */
+# define ETHR_MEMBAR(B) \
+ ETHR_SYNC_SYNCHRONIZE_WORKAROUND__
+# define ETHR_READ_DEPEND_MEMORY_BARRIER \
+ ETHR_SYNC_SYNCHRONIZE_WORKAROUND__
+#endif
+
+
+#endif /* ETHR_GCC_MEMBAR_H__ */
diff --git a/erts/include/internal/gcc/ethread.h b/erts/include/internal/gcc/ethread.h
index 392a1aa2b2..fcfdc39441 100644
--- a/erts/include/internal/gcc/ethread.h
+++ b/erts/include/internal/gcc/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,16 +25,24 @@
#ifndef ETHREAD_GCC_H__
#define ETHREAD_GCC_H__
-#if !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_GCC_ATOMIC_OPS)
-#define ETHR_HAVE_NATIVE_ATOMICS 1
+#ifndef ETHR_MEMBAR
+# include "ethr_membar.h"
+#endif
+
+#if !defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# define ETHR_ATOMIC_WANT_32BIT_IMPL__
+# include "ethr_atomic.h"
+#endif
-#define ETHR_ATOMIC_WANT_32BIT_IMPL__
-#include "ethr_atomic.h"
-#if ETHR_SIZEOF_PTR == 8
+#if ETHR_SIZEOF_PTR == 8 && !defined(ETHR_HAVE_NATIVE_ATOMIC64)
# define ETHR_ATOMIC_WANT_64BIT_IMPL__
# include "ethr_atomic.h"
#endif
+#if (!defined(ETHR_HAVE_NATIVE_DW_ATOMIC) \
+ && !(ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)) \
+ && !(ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC128)))
+# include "ethr_dw_atomic.h"
#endif
#endif
diff --git a/erts/include/internal/i386/atomic.h b/erts/include/internal/i386/atomic.h
index 4e402f261a..fc1b619935 100644
--- a/erts/include/internal/i386/atomic.h
+++ b/erts/include/internal/i386/atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,53 +25,42 @@
*/
#undef ETHR_INCLUDE_ATOMIC_IMPL__
-#if !defined(ETHR_X86_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
-#define ETHR_X86_ATOMIC32_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
-#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
-#elif !defined(ETHR_X86_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
-#define ETHR_X86_ATOMIC64_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
-#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
+#if !defined(ETHR_X86_ATOMIC32_H__) \
+ && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
+# define ETHR_X86_ATOMIC32_H__
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+# undef ETHR_ATOMIC_WANT_32BIT_IMPL__
+#elif !defined(ETHR_X86_ATOMIC64_H__) \
+ && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
+# define ETHR_X86_ATOMIC64_H__
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+# undef ETHR_ATOMIC_WANT_64BIT_IMPL__
#endif
#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
-#ifndef ETHR_X86_ATOMIC_COMMON__
-#define ETHR_X86_ATOMIC_COMMON__
-
-#define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
-
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
-#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory")
-#define ETHR_WRITE_MEMORY_BARRIER __asm__ __volatile__("sfence" : : : "memory")
-#define ETHR_READ_MEMORY_BARRIER __asm__ __volatile__("lfence" : : : "memory")
-#define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("" : : : "memory")
-#else
-#define ETHR_MEMORY_BARRIER \
-do { \
- volatile ethr_sint32_t x___ = 0; \
- __asm__ __volatile__("lock; incl %0" : "=m"(x___) : "m"(x___) : "memory"); \
-} while (0)
-#endif
-
-#endif /* ETHR_X86_ATOMIC_COMMON__ */
-
-#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
-#define ETHR_HAVE_NATIVE_ATOMIC32 1
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic32_t
-#define ETHR_AINT_T__ ethr_sint32_t
-#define ETHR_AINT_SUFFIX__ "l"
-#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
-#define ETHR_HAVE_NATIVE_ATOMIC64 1
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic64_t
-#define ETHR_AINT_T__ ethr_sint64_t
-#define ETHR_AINT_SUFFIX__ "q"
-#else
-#error "Unsupported integer size"
-#endif
+# ifndef ETHR_X86_ATOMIC_COMMON__
+# define ETHR_X86_ATOMIC_COMMON__
+# define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
+# endif /* ETHR_X86_ATOMIC_COMMON__ */
+
+# if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_NATIVE_ATOMIC32 1
+# define ETHR_NATIVE_ATOMIC32_IMPL "ethread"
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic32_t
+# define ETHR_AINT_T__ ethr_sint32_t
+# define ETHR_AINT_SUFFIX__ "l"
+# elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
+# define ETHR_HAVE_NATIVE_ATOMIC64 1
+# define ETHR_NATIVE_ATOMIC64_IMPL "ethread"
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic64_t
+# define ETHR_AINT_T__ ethr_sint64_t
+# define ETHR_AINT_SUFFIX__ "q"
+# else
+# error "Unsupported integer size"
+# endif
/* An atomic is an aligned ETHR_AINT_T__ accessed via locked operations.
*/
@@ -81,87 +70,28 @@ typedef struct {
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- var->counter = i;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- var->counter = i;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
-{
- return var->counter;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- __asm__ __volatile__(
- "lock; add" ETHR_AINT_SUFFIX__ " %1, %0"
- : "=m"(var->counter)
- : "ir"(incr), "m"(var->counter));
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- __asm__ __volatile__(
- "lock; inc" ETHR_AINT_SUFFIX__ " %0"
- : "=m"(var->counter)
- : "m"(var->counter));
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- __asm__ __volatile__(
- "lock; dec" ETHR_AINT_SUFFIX__ " %0"
- : "=m"(var->counter)
- : "m"(var->counter));
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- ETHR_AINT_T__ tmp;
-
- tmp = incr;
- __asm__ __volatile__(
- "lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */
- : "=r"(tmp)
- : "m"(var->counter), "0"(tmp));
- /* now tmp is the atomic's previous value */
- return tmp + incr;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) 1);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) -1);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
{
__asm__ __volatile__(
"lock; cmpxchg" ETHR_AINT_SUFFIX__ " %2, %3"
@@ -171,110 +101,148 @@ ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
return old;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ tmp, old;
-
- tmp = var->counter;
- do {
- old = tmp;
- tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp & mask, tmp);
- } while (__builtin_expect(tmp != old, 0));
- /* now tmp is the atomic's previous value */
- return tmp;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ tmp, old;
-
- tmp = var->counter;
- do {
- old = tmp;
- tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp | mask, tmp);
- } while (__builtin_expect(tmp != old, 0));
- /* now tmp is the atomic's previous value */
- return tmp;
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
+ETHR_NATMC_FUNC__(xchg_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
{
ETHR_AINT_T__ tmp = val;
__asm__ __volatile__(
"xchg" ETHR_AINT_SUFFIX__ " %0, %1"
: "=r"(tmp)
- : "m"(var->counter), "0"(tmp));
+ : "m"(var->counter), "0"(tmp)
+ : "memory");
/* now tmp is the atomic's previous value */
return tmp;
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
{
- ETHR_AINT_T__ val;
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
- val = var->counter;
+ var->counter = i;
+}
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1
#else
- val = ETHR_NATMC_FUNC__(add_return)(var, 0);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1
#endif
- __asm__ __volatile__("" : : : "memory");
- return val;
-}
static ETHR_INLINE void
ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
{
- __asm__ __volatile__("" : : : "memory");
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
- var->counter = i;
+#if defined(_M_IX86)
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ (void) ETHR_NATMC_FUNC__(xchg_mb)(var, i);
+ else
+#endif /* _M_IX86 */
+ {
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ var->counter = i;
+ }
+}
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB 1
#else
- (void) ETHR_NATMC_FUNC__(xchg)(var, i);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB 1
#endif
+
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ (void) ETHR_NATMC_FUNC__(xchg_mb)(var, i);
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
- __asm__ __volatile__("" : : : "memory");
- return res;
+ return var->counter;
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB 1
+#endif
+
static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(add_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- __asm__ __volatile__("" : : : "memory");
- ETHR_NATMC_FUNC__(dec)(var);
-}
+ __asm__ __volatile__(
+ "lock; add" ETHR_AINT_SUFFIX__ " %1, %0"
+ : "=m"(var->counter)
+ : "ir"(incr), "m"(var->counter)
+ : "memory");
+}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB 1
+#endif
+
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(inc_mb)(ETHR_ATMC_T__ *var)
{
- __asm__ __volatile__("" : : : "memory");
- return ETHR_NATMC_FUNC__(dec_return)(var);
+ __asm__ __volatile__(
+ "lock; inc" ETHR_AINT_SUFFIX__ " %0"
+ : "=m"(var->counter)
+ : "m"(var->counter)
+ : "memory");
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB 1
+#endif
+
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(dec_mb)(ETHR_ATMC_T__ *var)
{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ __asm__ __volatile__(
+ "lock; dec" ETHR_AINT_SUFFIX__ " %0"
+ : "=m"(var->counter)
+ : "m"(var->counter)
+ : "memory");
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ ETHR_AINT_T__ tmp;
+
+ tmp = incr;
+ __asm__ __volatile__(
+ "lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */
+ : "=r"(tmp)
+ : "m"(var->counter), "0"(tmp)
+ : "memory");
+ /* now tmp is the atomic's previous value */
+ return tmp + incr;
}
#endif /* ETHR_TRY_INLINE_FUNCS */
diff --git a/erts/include/internal/i386/ethr_dw_atomic.h b/erts/include/internal/i386/ethr_dw_atomic.h
new file mode 100644
index 0000000000..9fb89bbe43
--- /dev/null
+++ b/erts/include/internal/i386/ethr_dw_atomic.h
@@ -0,0 +1,278 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Native double word atomics for x86/x86_64
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_X86_DW_ATOMIC_H__
+#define ETHR_X86_DW_ATOMIC_H__
+
+#ifdef ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT
+
+#define ETHR_HAVE_NATIVE_DW_ATOMIC
+#define ETHR_NATIVE_DW_ATOMIC_IMPL "ethread"
+
+/*
+ * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used
+ * at runtime in order to determine if native or fallback implementation
+ * should be used.
+ */
+#define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \
+ ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__
+
+#if ETHR_SIZEOF_PTR == 4
+typedef volatile ethr_sint64_t * ethr_native_dw_ptr_t;
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
+# define ETHR_DW_CMPXCHG_SFX__ "8b"
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
+#else
+#ifdef ETHR_HAVE_INT128_T
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint128_t
+typedef volatile ethr_sint128_t * ethr_native_dw_ptr_t;
+#else
+typedef struct {
+ ethr_sint64_t sint64[2];
+} ethr_native_sint128_t__;
+typedef volatile ethr_native_sint128_t__ * ethr_native_dw_ptr_t;
+#endif
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
+# define ETHR_DW_CMPXCHG_SFX__ "16b"
+#endif
+
+/*
+ * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
+ * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
+ * not common, and at least some glibc malloc implementations
+ * only 4 byte align in 32-bit mode.
+ *
+ * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
+ * aligned memory in 32-bit mode. A malloc implementation that does
+ * not adhere to these alignment requirements is seriously broken,
+ * and we wont bother trying to work around it.
+ *
+ * Since memory alignment may be off by one word we need to align at
+ * runtime. We, therefore, need an extra word allocated.
+ */
+#define ETHR_DW_NATMC_MEM__(VAR) \
+ (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
+typedef union {
+#ifdef ETHR_NATIVE_SU_DW_SINT_T
+ volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
+#endif
+ volatile ethr_sint_t sint[3];
+ volatile char c[ETHR_SIZEOF_PTR*3];
+} ethr_native_dw_atomic_t;
+
+
+#if (defined(ETHR_TRY_INLINE_FUNCS) \
+ || defined(ETHR_ATOMIC_IMPL__) \
+ || defined(ETHR_X86_SSE2_ASM_C__)) \
+ && ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT)
+ethr_sint64_t
+ethr_sse2_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var);
+void
+ethr_sse2_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t val);
+#endif
+
+#if (defined(ETHR_TRY_INLINE_FUNCS) \
+ || defined(ETHR_ATOMIC_IMPL__) \
+ || defined(ETHR_X86_SSE2_ASM_C__))
+# ifdef ETHR_DEBUG
+# define ETHR_DW_DBG_ALIGNED__(PTR) \
+ ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
+# else
+# define ETHR_DW_DBG_ALIGNED__(PTR)
+# endif
+#endif
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR
+static ETHR_INLINE ethr_sint_t *
+ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
+{
+ return (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
+}
+
+#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__
+/*
+ * When position independent code is used in 32-bit mode, the EBX register
+ * is used for storage of global offset table address, and we may not
+ * use it as input or output in an asm. We need to save and restore the
+ * EBX register explicitly (for some reason gcc doesn't provide this
+ * service to us).
+ */
+# define ETHR_NO_CLOBBER_EBX__ 1
+#else
+# define ETHR_NO_CLOBBER_EBX__ 0
+#endif
+
+#if ETHR_NO_CLOBBER_EBX__ && !defined(ETHR_CMPXCHG8B_REGISTER_SHORTAGE)
+/* When no optimization is on, we'll run into a register shortage */
+# if defined(ETHR_DEBUG) || defined(DEBUG) || defined(VALGRIND) \
+ || defined(GCOV) || defined(PURIFY) || defined(PURECOV)
+# define ETHR_CMPXCHG8B_REGISTER_SHORTAGE 1
+# else
+# define ETHR_CMPXCHG8B_REGISTER_SHORTAGE 0
+# endif
+#endif
+
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
+
+static ETHR_INLINE int
+ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ethr_sint_t *new,
+ ethr_sint_t *xchg)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ char xchgd;
+
+ ETHR_DW_DBG_ALIGNED__(p);
+
+ __asm__ __volatile__(
+#if ETHR_NO_CLOBBER_EBX__
+ "pushl %%ebx\n\t"
+# if ETHR_CMPXCHG8B_REGISTER_SHORTAGE
+ "movl (%7), %%ebx\n\t"
+ "movl 4(%7), %%ecx\n\t"
+# else
+ "movl %8, %%ebx\n\t"
+# endif
+#endif
+ "lock; cmpxchg" ETHR_DW_CMPXCHG_SFX__ " %0\n\t"
+ "setz %3\n\t"
+#if ETHR_NO_CLOBBER_EBX__
+ "popl %%ebx\n\t"
+#endif
+ : "=m"(*p), "=d"(xchg[1]), "=a"(xchg[0]), "=c"(xchgd)
+ : "m"(*p), "1"(xchg[1]), "2"(xchg[0]),
+#if ETHR_NO_CLOBBER_EBX__
+# if ETHR_CMPXCHG8B_REGISTER_SHORTAGE
+ "3"(new)
+# else
+ "3"(new[1]),
+ "r"(new[0])
+# endif
+#else
+ "3"(new[1]),
+ "b"(new[0])
+#endif
+ : "cc", "memory");
+
+ return (int) xchgd;
+}
+
+#undef ETHR_NO_CLOBBER_EBX__
+
+#if ETHR_SIZEOF_PTR == 4 && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT)
+
+typedef union {
+ ethr_sint64_t sint64;
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_no_sse2_convert_t;
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ
+
+static ETHR_INLINE ethr_sint64_t
+ethr_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var)
+{
+ if (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__)
+ return ethr_sse2_native_su_dw_atomic_read(var);
+ else {
+ ethr_sint_t new[2];
+ ethr_dw_atomic_no_sse2_convert_t xchg;
+ new[0] = new[1] = xchg.sint[0] = xchg.sint[1] = 0x83838383;
+ (void) ethr_native_dw_atomic_cmpxchg_mb(var, new, xchg.sint);
+ return xchg.sint64;
+ }
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET
+
+static ETHR_INLINE void
+ethr_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t val)
+{
+ if (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__)
+ ethr_sse2_native_su_dw_atomic_set(var, val);
+ else {
+ ethr_sint_t xchg[2] = {0, 0};
+ ethr_dw_atomic_no_sse2_convert_t new;
+ new.sint64 = val;
+ while (!ethr_native_dw_atomic_cmpxchg_mb(var, new.sint, xchg));
+ }
+}
+
+#endif /* ETHR_SIZEOF_PTR == 4 */
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#if defined(ETHR_X86_SSE2_ASM_C__) \
+ && ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT)
+
+/*
+ * 8-byte aligned loads and stores of 64-bit values are atomic from
+ * pentium and forward. An ordinary volatile load or store in 32-bit
+ * mode generates two 32-bit operations (at least with gcc-4.1.2 using
+ * -msse2). In order to guarantee one 64-bit load/store operation
+ * from/to memory we load/store via an xmm register using movq.
+ *
+ * Load/store can be achieved using cmpxchg8b, however, using movq is
+ * much faster. Unfortunately we cannot do the same thing in 64-bit
+ * mode; instead, we have to do loads and stores via cmpxchg16b.
+ *
+ * We do not inline these, but instead compile these into a separate
+ * object file using -msse2. This since we don't want to use -msse2 for
+ * the whole system. If we detect sse2 support (pentium4 and forward)
+ * at runtime, we use them; otherwise, we fall back to using cmpxchg8b
+ * for loads and stores. This way the binary can be moved between
+ * processors with and without sse2 support.
+ */
+
+ethr_sint64_t
+ethr_sse2_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ethr_sint64_t val;
+ ETHR_DW_DBG_ALIGNED__(p);
+ __asm__ __volatile__("movq %1, %0\n\t" : "=x"(val) : "m"(*p) : "memory");
+ return val;
+}
+
+void
+ethr_sse2_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t val)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ __asm__ __volatile__("movq %1, %0\n\t" : "=m"(*p) : "x"(val) : "memory");
+}
+
+#endif /* ETHR_X86_SSE2_ASM_C__ */
+
+#endif /* ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT */
+
+#endif /* ETHR_X86_DW_ATOMIC_H__ */
+
diff --git a/erts/include/internal/i386/ethr_membar.h b/erts/include/internal/i386/ethr_membar.h
new file mode 100644
index 0000000000..92d9de7f3f
--- /dev/null
+++ b/erts/include/internal/i386/ethr_membar.h
@@ -0,0 +1,114 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for x86/x86-64
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_X86_MEMBAR_H__
+#define ETHR_X86_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#define ETHR_NO_SSE2_MEMORY_BARRIER__ \
+do { \
+ volatile ethr_sint32_t x__ = 0; \
+ __asm__ __volatile__ ("lock; orl $0x0, %0\n\t" \
+ : "=m"(x__) \
+ : "m"(x__) \
+ : "memory"); \
+} while (0)
+
+static __inline__ void
+ethr_cfence__(void)
+{
+ __asm__ __volatile__ ("" : : : "memory");
+}
+
+static __inline__ void
+ethr_mfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MEMORY_BARRIER__;
+ else
+#endif
+ __asm__ __volatile__ ("mfence\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_sfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MEMORY_BARRIER__;
+ else
+#endif
+ __asm__ __volatile__ ("sfence\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_lfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MEMORY_BARRIER__;
+ else
+#endif
+ __asm__ __volatile__ ("lfence\n\t" : : : "memory");
+}
+
+#define ETHR_X86_OUT_OF_ORDER_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \
+ ethr_sfence__(), \
+ ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \
+ ethr_lfence__(), \
+ ethr_mfence__()))
+
+#ifdef ETHR_X86_OUT_OF_ORDER
+
+#define ETHR_MEMBAR(B) \
+ ETHR_X86_OUT_OF_ORDER_MEMBAR((B))
+
+#else /* !ETHR_X86_OUT_OF_ORDER (the default) */
+
+/*
+ * We assume that only stores before loads may be reordered. That is,
+ * we assume that *no* instructions like these are used:
+ * - CLFLUSH,
+ * - streaming stores executed with non-temporal move,
+ * - string operations, or
+ * - other instructions which aren't LoadLoad, LoadStore, and StoreStore
+ * ordered by themselves
+ * If such instructions are used, either insert memory barriers
+ * using ETHR_X86_OUT_OF_ORDER_MEMBAR() at appropriate places, or
+ * define ETHR_X86_OUT_OF_ORDER. For more info see Intel 64 and IA-32
+ * Architectures Software Developer's Manual; Vol 3A; Chapter 8.2.2.
+ */
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_mfence__(), ethr_cfence__())
+
+#endif /* !ETHR_X86_OUT_OF_ORDER */
+
+#endif /* ETHR_X86_MEMBAR_H__ */
diff --git a/erts/include/internal/i386/ethread.h b/erts/include/internal/i386/ethread.h
index b5a17caefb..80e4dc7b99 100644
--- a/erts/include/internal/i386/ethread.h
+++ b/erts/include/internal/i386/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,17 +24,15 @@
#ifndef ETHREAD_I386_ETHREAD_H
#define ETHREAD_I386_ETHREAD_H
+#include "ethr_membar.h"
#define ETHR_ATOMIC_WANT_32BIT_IMPL__
#include "atomic.h"
#if ETHR_SIZEOF_PTR == 8
# define ETHR_ATOMIC_WANT_64BIT_IMPL__
# include "atomic.h"
#endif
+#include "ethr_dw_atomic.h"
#include "spinlock.h"
#include "rwlock.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-
#endif /* ETHREAD_I386_ETHREAD_H */
diff --git a/erts/include/internal/i386/rwlock.h b/erts/include/internal/i386/rwlock.h
index be47f459ce..1a8cd7da0c 100644
--- a/erts/include/internal/i386/rwlock.h
+++ b/erts/include/internal/i386/rwlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,9 @@
#ifndef ETHREAD_I386_RWLOCK_H
#define ETHREAD_I386_RWLOCK_H
+#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread"
+
/* XXX: describe the algorithm */
typedef struct {
volatile int lock;
diff --git a/erts/include/internal/i386/spinlock.h b/erts/include/internal/i386/spinlock.h
index 0325324895..a84fba91b1 100644
--- a/erts/include/internal/i386/spinlock.h
+++ b/erts/include/internal/i386/spinlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
#ifndef ETHREAD_I386_SPINLOCK_H
#define ETHREAD_I386_SPINLOCK_H
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+#define ETHR_NATIVE_SPINLOCK_IMPL "ethread"
+
/* A spinlock is the low byte of an aligned 32-bit integer.
* A non-zero value means that the lock is locked.
*/
@@ -46,16 +49,20 @@ ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
* On i386 this needs to be a locked operation
* to avoid Pentium Pro errata 66 and 92.
*/
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
- __asm__ __volatile__("" : : : "memory");
- *(unsigned char*)&lock->lock = 0;
-#else
- char tmp = 0;
- __asm__ __volatile__(
- "xchgb %b0, %1"
- : "=q"(tmp), "=m"(lock->lock)
- : "0"(tmp) : "memory");
+#if !defined(__x86_64__)
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) {
+ char tmp = 0;
+ __asm__ __volatile__(
+ "xchgb %b0, %1"
+ : "=q"(tmp), "=m"(lock->lock)
+ : "0"(tmp) : "memory");
+ }
+ else
#endif
+ {
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ *(unsigned char*)&lock->lock = 0;
+ }
}
static ETHR_INLINE int
diff --git a/erts/include/internal/libatomic_ops/ethr_atomic.h b/erts/include/internal/libatomic_ops/ethr_atomic.h
index 93bc06036f..fb1288c330 100644
--- a/erts/include/internal/libatomic_ops/ethr_atomic.h
+++ b/erts/include/internal/libatomic_ops/ethr_atomic.h
@@ -25,16 +25,6 @@
#ifndef ETHR_LIBATOMIC_OPS_ATOMIC_H__
#define ETHR_LIBATOMIC_OPS_ATOMIC_H__
-#if !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_LIBATOMIC_OPS)
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-
-#if (defined(__i386__) && !defined(ETHR_PRE_PENTIUM4_COMPAT)) \
- || defined(__x86_64__)
-#define AO_USE_PENTIUM4_INSTRS
-#endif
-
-#include "atomic_ops.h"
-
/*
* libatomic_ops can be downloaded from:
* http://www.hpl.hp.com/research/linux/atomic_ops/
@@ -46,21 +36,18 @@
* - AO_store()
* - AO_compare_and_swap()
*
- * The `AO_t' type also have to be at least as large as the `void *' type.
*/
-#if ETHR_SIZEOF_AO_T < ETHR_SIZEOF_PTR
-#error The AO_t type is too small
-#endif
-
#if ETHR_SIZEOF_AO_T == 4
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "libatomic_ops"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic32_t
#define ETHR_AINT_T__ ethr_sint32_t
#define ETHR_AINT_SUFFIX__ "l"
#elif ETHR_SIZEOF_AO_T == 8
#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#define ETHR_NATIVE_ATOMIC64_IMPL "libatomic_ops"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic64_t
#define ETHR_AINT_T__ ethr_sint64_t
@@ -69,61 +56,29 @@
#error "Unsupported integer size"
#endif
-#if ETHR_SIZEOF_AO_T == 8
-typedef union {
- volatile AO_t counter;
- ethr_sint32_t sint32[2];
-} ETHR_ATMC_T__;
-#else
typedef struct {
volatile AO_t counter;
} ETHR_ATMC_T__;
-#endif
-#define ETHR_MEMORY_BARRIER AO_nop_full()
-#ifdef AO_HAVE_nop_write
-# define ETHR_WRITE_MEMORY_BARRIER AO_nop_write()
-#else
-# define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-#endif
-#ifdef AO_HAVE_nop_read
-# define ETHR_READ_MEMORY_BARRIER AO_nop_read()
-#else
-# define ETHR_READ_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-#endif
-#ifdef AO_NO_DD_ORDERING
-# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_READ_MEMORY_BARRIER
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
#else
-# define ETHR_READ_DEPEND_MEMORY_BARRIER AO_compiler_barrier()
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
#endif
-#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-#if ETHR_SIZEOF_AO_T == 8
-/*
- * We also need to provide an ethr_native_atomic32_addr(), since
- * this 64-bit implementation will be used implementing 32-bit
- * native atomics.
- */
-
-static ETHR_INLINE ethr_sint32_t *
-ethr_native_atomic32_addr(ETHR_ATMC_T__ *var)
-{
- ETHR_ASSERT(((void *) &var->sint32[0]) == ((void *) &var->counter));
-#ifdef ETHR_BIGENDIAN
- return &var->sint32[1];
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
#else
- return &var->sint32[0];
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
#endif
-}
-
-#endif /* ETHR_SIZEOF_AO_T == 8 */
static ETHR_INLINE void
ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
@@ -131,197 +86,198 @@ ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
AO_store(&var->counter, (AO_t) value);
}
+#ifdef AO_HAVE_store_release
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1
+#endif
+
static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
+ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
{
- ETHR_NATMC_FUNC__(set)(var, value);
+ AO_store_release(&var->counter, (AO_t) value);
}
+#endif
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__) AO_load(&var->counter);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
-#ifdef AO_HAVE_fetch_and_add_full
- return ((ETHR_AINT_T__) AO_fetch_and_add_full(&var->counter, (AO_t) incr)) + incr;
+#ifdef AO_HAVE_load_acquire
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB 1
#else
- while (1) {
- AO_t exp = AO_load(&var->counter);
- AO_t new = exp + (AO_t) incr;
- if (AO_compare_and_swap_full(&var->counter, exp, new))
- return (ETHR_AINT_T__) new;
- }
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB 1
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
{
- (void) ETHR_NATMC_FUNC__(add_return)(var, incr);
+ return (ETHR_AINT_T__) AO_load_acquire(&var->counter);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
-#ifdef AO_HAVE_fetch_and_add1_full
- return ((ETHR_AINT_T__) AO_fetch_and_add1_full(&var->counter)) + 1;
-#else
- return ETHR_NATMC_FUNC__(add_return)(var, 1);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_NATMC_FUNC__(inc_return)(var);
-}
+#ifdef AO_HAVE_fetch_and_add
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
-{
-#ifdef AO_HAVE_fetch_and_sub1_full
- return ((ETHR_AINT_T__) AO_fetch_and_sub1_full(&var->counter)) - 1;
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1
#else
- return ETHR_NATMC_FUNC__(add_return)(var, -1);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN 1
#endif
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_NATMC_FUNC__(dec_return)(var);
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
+ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- while (1) {
- AO_t exp = AO_load(&var->counter);
- AO_t new = exp & ((AO_t) mask);
- if (AO_compare_and_swap_full(&var->counter, exp, new))
- return (ETHR_AINT_T__) exp;
- }
+ return ((ETHR_AINT_T__) AO_fetch_and_add(&var->counter, (AO_t) incr)) + incr;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- while (1) {
- AO_t exp = AO_load(&var->counter);
- AO_t new = exp | ((AO_t) mask);
- if (AO_compare_and_swap_full(&var->counter, exp, new))
- return (ETHR_AINT_T__) exp;
- }
-}
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ exp)
-{
- ETHR_AINT_T__ act;
- do {
- if (AO_compare_and_swap_full(&var->counter, (AO_t) exp, (AO_t) new))
- return exp;
- act = (ETHR_AINT_T__) AO_load(&var->counter);
- } while (act == exp);
- return act;
-}
+#ifdef AO_HAVE_fetch_and_add1
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
+ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
{
- while (1) {
- AO_t exp = AO_load(&var->counter);
- if (AO_compare_and_swap_full(&var->counter, exp, (AO_t) new))
- return (ETHR_AINT_T__) exp;
- }
+ return ((ETHR_AINT_T__) AO_fetch_and_add1(&var->counter)) + 1;
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
-#ifdef AO_HAVE_load_acquire
- return (ETHR_AINT_T__) AO_load_acquire(&var->counter);
+#ifdef AO_HAVE_fetch_and_add1_acquire
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1
#else
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var);
- ETHR_MEMORY_BARRIER;
- return res;
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_fetch_and_add1_acquire
return ((ETHR_AINT_T__) AO_fetch_and_add1_acquire(&var->counter)) + 1;
+}
+
+#endif
+
+#ifdef AO_HAVE_fetch_and_sub1
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN 1
#else
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(add_return)(var, 1);
- return res;
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN 1
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_store_release
- AO_store_release(&var->counter, (AO_t) value);
+ return ((ETHR_AINT_T__) AO_fetch_and_sub1(&var->counter)) - 1;
+}
+
+#endif
+
+#ifdef AO_HAVE_fetch_and_sub1_release
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB 1
#else
- ETHR_MEMORY_BARRIER;
- ETHR_NATMC_FUNC__(set)(var, value);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_fetch_and_sub1_release
return ((ETHR_AINT_T__) AO_fetch_and_sub1_release(&var->counter)) - 1;
+}
+
+#endif
+
+#ifdef AO_HAVE_compare_and_swap
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1
#else
- return ETHR_NATMC_FUNC__(dec_return)(var);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG 1
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ exp)
{
- (void) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_AINT_T__ act;
+ do {
+ if (AO_compare_and_swap(&var->counter, (AO_t) exp, (AO_t) new))
+ return exp;
+ act = (ETHR_AINT_T__) AO_load(&var->counter);
+ } while (act == exp);
+ return act;
}
+#endif
+
+#ifdef AO_HAVE_compare_and_swap_acquire
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
ETHR_AINT_T__ new,
ETHR_AINT_T__ exp)
{
-#ifdef AO_HAVE_compare_and_swap_acquire
ETHR_AINT_T__ act;
do {
if (AO_compare_and_swap_acquire(&var->counter, (AO_t) exp, (AO_t) new))
return exp;
+#ifdef AO_HAVE_load_acquire
+ act = (ETHR_AINT_T__) AO_load_acquire(&var->counter);
+#else
act = (ETHR_AINT_T__) AO_load(&var->counter);
+#endif
} while (act == exp);
+#ifndef AO_HAVE_load_acquire
AO_nop_full();
+#endif
return act;
+}
+
+#endif
+
+#ifdef AO_HAVE_compare_and_swap_release
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB 1
#else
- ETHR_AINT_T__ act = ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp);
- return act;
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
ETHR_AINT_T__ new,
ETHR_AINT_T__ exp)
{
-#ifdef AO_HAVE_compare_and_swap_release
ETHR_AINT_T__ act;
do {
if (AO_compare_and_swap_release(&var->counter, (AO_t) exp, (AO_t) new))
@@ -329,11 +285,9 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
act = (ETHR_AINT_T__) AO_load(&var->counter);
} while (act == exp);
return act;
-#else
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp);
-#endif
}
+#endif
#endif /* ETHR_TRY_INLINE_FUNCS */
@@ -341,6 +295,4 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
#undef ETHR_ATMC_T__
#undef ETHR_AINT_T__
-#endif /* !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_LIBATOMIC_OPS) */
-
#endif /* ETHR_LIBATOMIC_OPS_ATOMIC_H__ */
diff --git a/erts/include/internal/libatomic_ops/ethr_membar.h b/erts/include/internal/libatomic_ops/ethr_membar.h
new file mode 100644
index 0000000000..b8530a0094
--- /dev/null
+++ b/erts/include/internal/libatomic_ops/ethr_membar.h
@@ -0,0 +1,75 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers when using libatomic_ops
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_LIBATOMIC_OPS_MEMBAR_H__
+#define ETHR_LIBATOMIC_OPS_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#ifndef AO_HAVE_nop_full
+# error "No AO_nop_full()"
+#endif
+
+static __inline__ void
+ethr_mb__(void)
+{
+ AO_nop_full();
+}
+
+static __inline__ void
+ethr_rb__(void)
+{
+#ifdef AO_HAVE_nop_read
+ AO_nop_read();
+#else
+ AO_nop_full();
+#endif
+}
+
+static __inline__ void
+ethr_wb__(void)
+{
+#ifdef AO_HAVE_nop_write
+ AO_nop_write();
+#else
+ AO_nop_full();
+#endif
+}
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \
+ ethr_wb__(), \
+ ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \
+ ethr_rb__(), \
+ ethr_mb__()))
+
+#define ETHR_COMPILER_BARRIER AO_compiler_barrier()
+#ifdef AO_NO_DD_ORDERING
+# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_READ_MEMORY_BARRIER
+#endif
+
+#endif /* ETHR_LIBATOMIC_OPS_MEMBAR_H__ */
diff --git a/erts/include/internal/libatomic_ops/ethread.h b/erts/include/internal/libatomic_ops/ethread.h
index ee73ba73bc..e1fdd588bb 100644
--- a/erts/include/internal/libatomic_ops/ethread.h
+++ b/erts/include/internal/libatomic_ops/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,6 +25,18 @@
#ifndef ETHREAD_LIBATOMIC_OPS_H__
#define ETHREAD_LIBATOMIC_OPS_H__
+#if (defined(ETHR_HAVE_LIBATOMIC_OPS) \
+ && ((ETHR_SIZEOF_AO_T == 4 && !defined(ETHR_HAVE_NATIVE_ATOMIC32)) \
+ || (ETHR_SIZEOF_AO_T == 8 && !defined(ETHR_HAVE_NATIVE_ATOMIC64))))
+
+#if defined(__x86_64__)
+#define AO_USE_PENTIUM4_INSTRS
+#endif
+
+#include "atomic_ops.h"
+#include "ethr_membar.h"
#include "ethr_atomic.h"
#endif
+
+#endif
diff --git a/erts/include/internal/ppc32/atomic.h b/erts/include/internal/ppc32/atomic.h
index 522f433649..6001620677 100644
--- a/erts/include/internal/ppc32/atomic.h
+++ b/erts/include/internal/ppc32/atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -29,27 +29,31 @@
#define ETHREAD_PPC_ATOMIC_H
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "ethread"
typedef struct {
volatile ethr_sint32_t counter;
} ethr_native_atomic32_t;
-#define ETHR_MEMORY_BARRIER __asm__ __volatile__("sync" : : : "memory")
-
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+
static ETHR_INLINE ethr_sint32_t *
ethr_native_atomic32_addr(ethr_native_atomic32_t *var)
{
return (ethr_sint32_t *) &var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
+
static ETHR_INLINE void
-ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i)
+ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i)
{
var->counter = i;
}
-#define ethr_native_atomic32_set(v, i) ethr_native_atomic32_init((v), (i))
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_read(ethr_native_atomic32_t *var)
@@ -57,57 +61,68 @@ ethr_native_atomic32_read(ethr_native_atomic32_t *var)
return var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"add %0,%2,%0\n\t"
"stwcx. %0,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter), "r"(incr)
: "cc", "memory");
return tmp;
}
-static ETHR_INLINE void
-ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_add_return_acqb(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
- /* XXX: could use weaker version here w/o eieio+isync */
- (void)ethr_native_atomic32_add_return(var, incr);
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_add_return(var, incr);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var)
{
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"addic %0,%0,1\n\t" /* due to addi's (rA|0) behaviour */
"stwcx. %0,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter)
: "cc", "memory");
return tmp;
}
-static ETHR_INLINE void
-ethr_native_atomic32_inc(ethr_native_atomic32_t *var)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var)
{
- /* XXX: could use weaker version here w/o eieio+isync */
- (void)ethr_native_atomic32_inc_return(var);
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_inc_return(var);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
}
+
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
@@ -115,82 +130,120 @@ ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"addic %0,%0,-1\n\t"
"stwcx. %0,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter)
: "cc", "memory");
return tmp;
}
-static ETHR_INLINE void
-ethr_native_atomic32_dec(ethr_native_atomic32_t *var)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_dec_return_acqb(ethr_native_atomic32_t *var)
{
- /* XXX: could use weaker version here w/o eieio+isync */
- (void)ethr_native_atomic32_dec_return(var);
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_dec_return(var);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
ethr_sint32_t old, new;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%2\n\t"
"and %1,%0,%3\n\t"
"stwcx. %1,0,%2\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(old), "=&r"(new)
: "r"(&var->counter), "r"(mask)
: "cc", "memory");
return old;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_and_retold_acqb(ethr_native_atomic32_t *var, ethr_sint32_t mask)
+{
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_and_retold(var, mask);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
ethr_sint32_t old, new;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%2\n\t"
"or %1,%0,%3\n\t"
"stwcx. %1,0,%2\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(old), "=&r"(new)
: "r"(&var->counter), "r"(mask)
: "cc", "memory");
return old;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_or_retold_acqb(ethr_native_atomic32_t *var, ethr_sint32_t mask)
+{
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_or_retold(var, mask);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
+}
+
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val)
{
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"stwcx. %2,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter), "r"(val)
: "cc", "memory");
return tmp;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_xchg_acqb(ethr_native_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_xchg(var, val);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
ethr_sint32_t new,
@@ -199,14 +252,12 @@ ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
ethr_sint32_t old;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%2\n\t"
"cmpw 0,%0,%3\n\t"
"bne 2f\n\t"
"stwcx. %1,0,%2\n\t"
"bne- 1b\n\t"
- "isync\n"
"2:"
: "=&r"(old)
: "r"(new), "r"(&var->counter), "r"(expected)
@@ -215,25 +266,30 @@ ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
return old;
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
-static ETHR_INLINE long
-ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var,
+ ethr_sint32_t new,
+ ethr_sint32_t expected)
{
- long res = ethr_native_atomic32_read(var);
- ETHR_MEMORY_BARRIER;
- return res;
-}
+ ethr_sint32_t old;
-#define ethr_native_atomic32_set_relb ethr_native_atomic32_xchg
-#define ethr_native_atomic32_inc_return_acqb ethr_native_atomic32_inc_return
-#define ethr_native_atomic32_dec_relb ethr_native_atomic32_dec_return
-#define ethr_native_atomic32_dec_return_relb ethr_native_atomic32_dec_return
+ __asm__ __volatile__(
+ "1:\t"
+ "lwarx %0,0,%2\n\t"
+ "cmpw 0,%0,%3\n\t"
+ "bne 2f\n\t"
+ "stwcx. %1,0,%2\n\t"
+ "bne- 1b\n\t"
+ "isync\n"
+ "2:"
+ : "=&r"(old)
+ : "r"(new), "r"(&var->counter), "r"(expected)
+ : "cc", "memory");
-#define ethr_native_atomic32_cmpxchg_acqb ethr_native_atomic32_cmpxchg
-#define ethr_native_atomic32_cmpxchg_relb ethr_native_atomic32_cmpxchg
+ return old;
+}
#endif /* ETHR_TRY_INLINE_FUNCS */
diff --git a/erts/include/internal/ppc32/ethr_membar.h b/erts/include/internal/ppc32/ethr_membar.h
new file mode 100644
index 0000000000..ff5cc86bfb
--- /dev/null
+++ b/erts/include/internal/ppc32/ethr_membar.h
@@ -0,0 +1,63 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for PowerPC
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_PPC_MEMBAR_H__
+#define ETHR_PPC_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+static __inline__ void
+ethr_lwsync__(void)
+{
+#ifdef ETHR_PPC_HAVE_NO_LWSYNC
+ __asm__ __volatile__ ("sync\n\t" : : : "memory");
+#else
+#ifndef ETHR_PPC_HAVE_LWSYNC
+ if (ETHR_PPC_RUNTIME_CONF_HAVE_NO_LWSYNC__)
+ __asm__ __volatile__ ("sync\n\t" : : : "memory");
+ else
+#endif
+ __asm__ __volatile__ ("lwsync\n\t" : : : "memory");
+#endif
+}
+
+static __inline__ void
+ethr_sync__(void)
+{
+ __asm__ __volatile__ ("sync\n\t" : : : "memory");
+}
+
+/*
+ * According to the "memory barrier intstructions" section of
+ * http://www.ibm.com/developerworks/systems/articles/powerpc.html
+ * we want to use sync when a StoreLoad is needed and lwsync for
+ * everything else.
+ */
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_sync__(), ethr_lwsync__())
+
+#endif
diff --git a/erts/include/internal/ppc32/ethread.h b/erts/include/internal/ppc32/ethread.h
index 3b619e9d01..e41c83c5da 100644
--- a/erts/include/internal/ppc32/ethread.h
+++ b/erts/include/internal/ppc32/ethread.h
@@ -24,12 +24,9 @@
#ifndef ETHREAD_PPC32_ETHREAD_H
#define ETHREAD_PPC32_ETHREAD_H
+#include "ethr_membar.h"
#include "atomic.h"
#include "spinlock.h"
#include "rwlock.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-
#endif /* ETHREAD_PPC32_ETHREAD_H */
diff --git a/erts/include/internal/ppc32/rwlock.h b/erts/include/internal/ppc32/rwlock.h
index 19ec26ab68..311f000b69 100644
--- a/erts/include/internal/ppc32/rwlock.h
+++ b/erts/include/internal/ppc32/rwlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -23,12 +23,14 @@
*
* Based on the examples in Appendix E of Motorola's
* "Programming Environments Manual For 32-Bit Implementations
- * of the PowerPC Architecture". Uses eieio instead of sync
- * in the unlock sequence, as suggested in the manual.
+ * of the PowerPC Architecture".
*/
#ifndef ETHREAD_PPC_RWLOCK_H
#define ETHREAD_PPC_RWLOCK_H
+#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread"
+
/* Unlocked if zero, read-locked if negative, write-locked if +1. */
typedef struct {
volatile int lock;
@@ -47,9 +49,10 @@ ethr_native_read_unlock(ethr_native_rwlock_t *lock)
{
int tmp;
- /* this is eieio + ethr_native_atomic_inc() - isync */
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+
+ /* this is ethr_native_atomic_inc() - isync */
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"addic %0,%0,1\n\t"
@@ -105,7 +108,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
static ETHR_INLINE void
ethr_native_write_unlock(ethr_native_rwlock_t *lock)
{
- __asm__ __volatile__("eieio" : : : "memory");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
diff --git a/erts/include/internal/ppc32/spinlock.h b/erts/include/internal/ppc32/spinlock.h
index c8460a3e8a..4c95ec9efb 100644
--- a/erts/include/internal/ppc32/spinlock.h
+++ b/erts/include/internal/ppc32/spinlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -23,12 +23,14 @@
*
* Based on the examples in Appendix E of Motorola's
* "Programming Environments Manual For 32-Bit Implementations
- * of the PowerPC Architecture". Uses eieio instead of sync
- * in the unlock sequence, as suggested in the manual.
+ * of the PowerPC Architecture".
*/
#ifndef ETHREAD_PPC_SPINLOCK_H
#define ETHREAD_PPC_SPINLOCK_H
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+#define ETHR_NATIVE_SPINLOCK_IMPL "ethread"
+
/* Unlocked if zero, locked if non-zero. */
typedef struct {
volatile unsigned int lock;
@@ -45,7 +47,7 @@ ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
static ETHR_INLINE void
ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- __asm__ __volatile__("eieio" : : : "memory");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
diff --git a/erts/include/internal/pthread/ethr_event.h b/erts/include/internal/pthread/ethr_event.h
index 4c29b28536..d0a77990cc 100644
--- a/erts/include/internal/pthread/ethr_event.h
+++ b/erts/include/internal/pthread/ethr_event.h
@@ -21,7 +21,7 @@
* Author: Rickard Green
*/
-#if defined(ETHR_HAVE_LINUX_FUTEX) && defined(ETHR_HAVE_NATIVE_ATOMICS)
+#if defined(ETHR_HAVE_LINUX_FUTEX) && defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
/* --- Linux futex implementation of ethread events ------------------------- */
#define ETHR_LINUX_FUTEX_IMPL__
@@ -62,8 +62,7 @@ static void ETHR_INLINE
ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
{
ethr_sint32_t val;
- ETHR_MEMORY_BARRIER;
- val = ethr_atomic32_xchg(&e->futex, ETHR_EVENT_ON__);
+ val = ethr_atomic32_xchg_mb(&e->futex, ETHR_EVENT_ON__);
if (val == ETHR_EVENT_OFF_WAITER__) {
int res = ETHR_FUTEX__(&e->futex, ETHR_FUTEX_WAKE__, 1);
if (res != 0)
@@ -99,8 +98,7 @@ static void ETHR_INLINE
ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
{
ethr_sint32_t val;
- ETHR_MEMORY_BARRIER;
- val = ethr_atomic32_xchg(&e->state, ETHR_EVENT_ON__);
+ val = ethr_atomic32_xchg_mb(&e->state, ETHR_EVENT_ON__);
if (val == ETHR_EVENT_OFF_WAITER__) {
int res = pthread_mutex_lock(&e->mtx);
if (res != 0)
diff --git a/erts/include/internal/sparc32/atomic.h b/erts/include/internal/sparc32/atomic.h
index c297522ab1..fe1daaa9cf 100644
--- a/erts/include/internal/sparc32/atomic.h
+++ b/erts/include/internal/sparc32/atomic.h
@@ -35,23 +35,16 @@
#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
-#ifndef ETHR_SPARC_V9_ATOMIC_COMMON__
-#define ETHR_SPARC_V9_ATOMIC_COMMON__
-
-#define ETHR_MEMORY_BARRIER \
- __asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreLoad|#StoreStore\n" \
- : : : "memory")
-
-#endif /* ETHR_SPARC_V9_ATOMIC_COMMON__ */
-
#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "ethread"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic32_t
#define ETHR_AINT_T__ ethr_sint32_t
#define ETHR_CAS__ "cas"
#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#define ETHR_NATIVE_ATOMIC64_IMPL "ethread"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic64_t
#define ETHR_AINT_T__ ethr_sint64_t
@@ -66,17 +59,23 @@ typedef struct {
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- var->counter = i;
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
+#endif
static ETHR_INLINE void
ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
@@ -84,182 +83,49 @@ ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
var->counter = i;
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
return var->counter;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- ETHR_AINT_T__ old, tmp;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
- do {
- old = var->counter;
- tmp = old+incr;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(tmp)
- : "r"(old), "r"(&var->counter), "0"(tmp)
- : "memory");
- } while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old+incr;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- (void)ETHR_NATMC_FUNC__(add_return)(var, incr);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, 1);
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void)ETHR_NATMC_FUNC__(add_return)(var, 1);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, -1);
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void)ETHR_NATMC_FUNC__(add_return)(var, -1);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ old, tmp;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
- do {
- old = var->counter;
- tmp = old & mask;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(tmp)
- : "r"(old), "r"(&var->counter), "0"(tmp)
- : "memory");
- } while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ old, tmp;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
- do {
- old = var->counter;
- tmp = old | mask;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(tmp)
- : "r"(old), "r"(&var->counter), "0"(tmp)
- : "memory");
- } while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
-{
- ETHR_AINT_T__ old, new;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad" : : : "memory");
- do {
- old = var->counter;
- new = val;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(new)
- : "r"(old), "r"(&var->counter), "0"(new)
- : "memory");
- } while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old;
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
__asm__ __volatile__(
ETHR_CAS__ " [%2], %1, %0"
: "=&r"(new)
: "r"(old), "r"(&var->counter), "0"(new)
: "memory");
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return new;
}
-/*
- * Atomic ops with at least specified barriers.
- */
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
- return res;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
- ETHR_NATMC_FUNC__(set)(var, i);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
-{
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
- return res;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
-{
- ETHR_NATMC_FUNC__(dec)(var);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(dec_return)(var);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return res;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
-{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
-}
-
#endif /* ETHR_TRY_INLINE_FUNCS */
#undef ETHR_NATMC_FUNC__
diff --git a/erts/include/internal/sparc32/ethr_membar.h b/erts/include/internal/sparc32/ethr_membar.h
new file mode 100644
index 0000000000..6eb0c5a1d6
--- /dev/null
+++ b/erts/include/internal/sparc32/ethr_membar.h
@@ -0,0 +1,115 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for sparc-v9
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_SPARC_V9_MEMBAR_H__
+#define ETHR_SPARC_V9_MEMBAR_H__
+
+#if defined(ETHR_SPARC_TSO)
+/* --- Total Store Order ------------------------------------------------ */
+
+#define ETHR_LoadLoad 0
+#define ETHR_LoadStore 0
+#define ETHR_StoreLoad 1
+#define ETHR_StoreStore 0
+
+static __inline__ void
+ethr_cb__(void)
+{
+ __asm__ __volatile__ ("" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreLoad__(void)
+{
+ __asm__ __volatile__ ("membar #StoreLoad\n\t" : : : "memory");
+}
+
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B), ethr_StoreLoad__(), ethr_cb__())
+
+#elif defined(ETHR_SPARC_PSO)
+/* --- Partial Store Order ---------------------------------------------- */
+
+#define ETHR_LoadLoad 0
+#define ETHR_LoadStore 0
+#define ETHR_StoreLoad (1 << 0)
+#define ETHR_StoreStore (1 << 1)
+
+static __inline__ void
+ethr_cb__(void)
+{
+ __asm__ __volatile__ ("" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreLoad__(void)
+{
+ __asm__ __volatile__ ("membar #StoreLoad\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreStore__(void)
+{
+ __asm__ __volatile__ ("membar #StoreStore\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreLoad_StoreStore__(void)
+{
+ __asm__ __volatile__ ("membar #StoreLoad|StoreStore\n\t" : : : "memory");
+}
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR( \
+ (B) == ETHR_StoreLoad, \
+ ethr_StoreLoad__(), \
+ ETHR_CHOOSE_EXPR( \
+ (B) == ETHR_StoreStore, \
+ ethr_StoreStore__(), \
+ ETHR_CHOOSE_EXPR( \
+ (B) == (ETHR_StoreLoad|ETHR_StoreStore), \
+ ethr_StoreLoad_StoreStore__(), \
+ ethr_cb__())))
+
+#elif defined(ETHR_SPARC_RMO)
+/* --- Relaxed Memory Order --------------------------------------------- */
+
+# define ETHR_LoadLoad #LoadLoad
+# define ETHR_LoadStore #LoadStore
+# define ETHR_StoreLoad #StoreLoad
+# define ETHR_StoreStore #StoreStore
+
+# define ETHR_MEMBAR_AUX__(B) \
+ __asm__ __volatile__("membar " #B "\n\t" : : : "memory")
+
+# define ETHR_MEMBAR(B) ETHR_MEMBAR_AUX__(B)
+
+#else
+
+# error "No memory order defined"
+
+#endif
+
+#endif
diff --git a/erts/include/internal/sparc32/ethread.h b/erts/include/internal/sparc32/ethread.h
index aea9794390..5ad92d3da7 100644
--- a/erts/include/internal/sparc32/ethread.h
+++ b/erts/include/internal/sparc32/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,7 @@
#ifndef ETHREAD_SPARC32_ETHREAD_H
#define ETHREAD_SPARC32_ETHREAD_H
+#include "ethr_membar.h"
#define ETHR_ATOMIC_WANT_32BIT_IMPL__
#include "atomic.h"
#if ETHR_SIZEOF_PTR == 8
@@ -33,8 +34,4 @@
#include "spinlock.h"
#include "rwlock.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-
#endif /* ETHREAD_SPARC32_ETHREAD_H */
diff --git a/erts/include/internal/sparc32/rwlock.h b/erts/include/internal/sparc32/rwlock.h
index 465ec96866..8b6f2e9c57 100644
--- a/erts/include/internal/sparc32/rwlock.h
+++ b/erts/include/internal/sparc32/rwlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
#ifndef ETHREAD_SPARC32_RWLOCK_H
#define ETHREAD_SPARC32_RWLOCK_H
+#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread"
+
/* Unlocked if zero, read-locked if positive, write-locked if -1. */
typedef struct {
volatile int lock;
@@ -42,7 +45,7 @@ ethr_native_read_unlock(ethr_native_rwlock_t *lock)
{
unsigned int old, new;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
do {
old = lock->lock;
new = old-1;
@@ -70,7 +73,7 @@ ethr_native_read_trylock(ethr_native_rwlock_t *lock)
: "r"(old), "r"(&lock->lock), "0"(new)
: "memory");
} while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return 1;
}
@@ -87,7 +90,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
if (__builtin_expect(ethr_native_read_trylock(lock) != 0, 1))
break;
do {
- __asm__ __volatile__("membar #LoadLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad);
} while (ethr_native_read_is_locked(lock));
}
}
@@ -95,7 +98,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
static ETHR_INLINE void
ethr_native_write_unlock(ethr_native_rwlock_t *lock)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
@@ -115,7 +118,7 @@ ethr_native_write_trylock(ethr_native_rwlock_t *lock)
: "r"(old), "r"(&lock->lock), "0"(new)
: "memory");
} while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return 1;
}
@@ -132,7 +135,7 @@ ethr_native_write_lock(ethr_native_rwlock_t *lock)
if (__builtin_expect(ethr_native_write_trylock(lock) != 0, 1))
break;
do {
- __asm__ __volatile__("membar #LoadLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad);
} while (ethr_native_write_is_locked(lock));
}
}
diff --git a/erts/include/internal/sparc32/spinlock.h b/erts/include/internal/sparc32/spinlock.h
index 493d514210..d4e36e09cf 100644
--- a/erts/include/internal/sparc32/spinlock.h
+++ b/erts/include/internal/sparc32/spinlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
#ifndef ETHR_SPARC32_SPINLOCK_H
#define ETHR_SPARC32_SPINLOCK_H
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+#define ETHR_NATIVE_SPINLOCK_IMPL "ethread"
+
/* Locked with ldstub, so unlocked when 0 and locked when non-zero. */
typedef struct {
volatile unsigned char lock;
@@ -40,7 +43,7 @@ ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
static ETHR_INLINE void
ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
@@ -51,10 +54,10 @@ ethr_native_spin_trylock(ethr_native_spinlock_t *lock)
__asm__ __volatile__(
"ldstub [%1], %0\n\t"
- "membar #StoreLoad|#StoreStore"
: "=r"(prev)
: "r"(&lock->lock)
: "memory");
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return prev == 0;
}
@@ -71,7 +74,7 @@ ethr_native_spin_lock(ethr_native_spinlock_t *lock)
if (__builtin_expect(ethr_native_spin_trylock(lock) != 0, 1))
break;
do {
- __asm__ __volatile__("membar #LoadLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad);
} while (ethr_native_spin_is_locked(lock));
}
}
diff --git a/erts/include/internal/tile/atomic.h b/erts/include/internal/tile/atomic.h
index 5697afda25..1f1553c346 100644
--- a/erts/include/internal/tile/atomic.h
+++ b/erts/include/internal/tile/atomic.h
@@ -25,6 +25,7 @@
#define ETHREAD_TILE_ATOMIC_H
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "tilera"
#include <atomic.h>
@@ -34,27 +35,25 @@ typedef struct {
volatile ethr_sint32_t counter;
} ethr_native_atomic32_t;
-#define ETHR_MEMORY_BARRIER __insn_mf()
-
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+
static ETHR_INLINE ethr_sint32_t *
ethr_native_atomic32_addr(ethr_native_atomic32_t *var)
{
return (ethr_sint32_t *) &var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT 1
+
static ETHR_INLINE void
ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i)
{
var->counter = i;
}
-static ETHR_INLINE void
-ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i)
-{
- atomic_exchange_acq(&var->counter, i);
-}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_read(ethr_native_atomic32_t *var)
@@ -62,139 +61,80 @@ ethr_native_atomic32_read(ethr_native_atomic32_t *var)
return var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
+{
+ return atomic_compare_and_exchange_val_acq(&var->counter,
+ 0x81818181,
+ 0x81818181);
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD 1
+
static ETHR_INLINE void
ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
- ETHR_MEMORY_BARRIER;
atomic_add(&var->counter, incr);
- ETHR_MEMORY_BARRIER;
-}
-
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC 1
+
static ETHR_INLINE void
ethr_native_atomic32_inc(ethr_native_atomic32_t *var)
{
- ETHR_MEMORY_BARRIER;
atomic_increment(&var->counter);
- ETHR_MEMORY_BARRIER;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC 1
+
static ETHR_INLINE void
ethr_native_atomic32_dec(ethr_native_atomic32_t *var)
{
- ETHR_MEMORY_BARRIER;
atomic_decrement(&var->counter);
- ETHR_MEMORY_BARRIER;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
- ethr_sint32_t res;
- ETHR_MEMORY_BARRIER;
- res = atomic_exchange_and_add(&var->counter, incr) + incr;
- ETHR_MEMORY_BARRIER;
- return res;
+ return atomic_exchange_and_add(&var->counter, incr) + incr;
}
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_add_return(var, 1);
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_add_return(var, -1);
-}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
- ethr_sint32_t res;
- ETHR_MEMORY_BARRIER;
- res = atomic_and_val(&var->counter, mask);
- ETHR_MEMORY_BARRIER;
- return res;
+ return atomic_and_val(&var->counter, mask);
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
- ethr_sint32_t res;
- ETHR_MEMORY_BARRIER;
- res = atomic_or_val(&var->counter, mask);
- ETHR_MEMORY_BARRIER;
- return res;
+ return atomic_or_val(&var->counter, mask);
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB 1
+
static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val)
+ethr_native_atomic32_xchg_acqb(ethr_native_atomic32_t *var, ethr_sint32_t val)
{
- ETHR_MEMORY_BARRIER;
return atomic_exchange_acq(&var->counter, val);
}
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t expected)
-{
- ETHR_MEMORY_BARRIER;
- return atomic_compare_and_exchange_val_acq(&var->counter, new, expected);
-}
-
-/*
- * Atomic ops with at least specified barriers.
- */
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
-{
- ethr_sint32_t res = ethr_native_atomic32_read(var);
- ETHR_MEMORY_BARRIER;
- return res;
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_inc_return(var);
-}
-
-static ETHR_INLINE void
-ethr_native_atomic32_set_relb(ethr_native_atomic32_t *var, ethr_sint32_t val)
-{
- ETHR_MEMORY_BARRIER;
- ethr_native_atomic32_set(var, val);
-}
-
-static ETHR_INLINE void
-ethr_native_atomic32_dec_relb(ethr_native_atomic32_t *var)
-{
- ethr_native_atomic32_dec(var);
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_dec_return_relb(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_dec_return(var);
-}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var,
ethr_sint32_t new,
- ethr_sint32_t exp)
+ ethr_sint32_t expected)
{
- return ethr_native_atomic32_cmpxchg(var, new, exp);
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_cmpxchg_relb(ethr_native_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
-{
- return ethr_native_atomic32_cmpxchg(var, new, exp);
+ return atomic_compare_and_exchange_val_acq(&var->counter, new, expected);
}
#endif /* ETHR_TRY_INLINE_FUNCS */
diff --git a/erts/include/internal/tile/ethr_membar.h b/erts/include/internal/tile/ethr_membar.h
new file mode 100644
index 0000000000..7cb4f3cf9a
--- /dev/null
+++ b/erts/include/internal/tile/ethr_membar.h
@@ -0,0 +1,35 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for TILE64/TILEPro
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_TILE_MEMBAR_H__
+#define ETHR_TILE_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#define ETHR_MEMBAR(B) __insn_mf()
+
+#endif
diff --git a/erts/include/internal/tile/ethread.h b/erts/include/internal/tile/ethread.h
index 2de4d42bc6..7f579b50e7 100644
--- a/erts/include/internal/tile/ethread.h
+++ b/erts/include/internal/tile/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2009. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -23,8 +23,7 @@
#ifndef ETHREAD_TILE_ETHREAD_H
#define ETHREAD_TILE_ETHREAD_H
+#include "ethr_membar.h"
#include "atomic.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-
#endif /* ETHREAD_TILE_ETHREAD_H */
diff --git a/erts/include/internal/win/ethr_atomic.h b/erts/include/internal/win/ethr_atomic.h
index 60def01a7e..e11f1abf47 100644
--- a/erts/include/internal/win/ethr_atomic.h
+++ b/erts/include/internal/win/ethr_atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,364 +24,408 @@
#undef ETHR_INCLUDE_ATOMIC_IMPL__
#if !defined(ETHR_WIN_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
-#define ETHR_WIN_ATOMIC32_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
-#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
+# define ETHR_WIN_ATOMIC32_H__
+# if (defined(ETHR_MEMBAR) \
+ && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE) \
+ && defined(ETHR_HAVE__INTERLOCKEDEXCHANGE))
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+# endif
+# undef ETHR_ATOMIC_WANT_32BIT_IMPL__
#elif !defined(ETHR_WIN_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
-#define ETHR_WIN_ATOMIC64_H__
-#ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64
-/* _InterlockedCompareExchange64() required... */
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+# define ETHR_WIN_ATOMIC64_H__
+# if (defined(ETHR_MEMBAR) \
+ && (defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64) \
+ || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ) \
+ || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL)))
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+# endif
+# undef ETHR_ATOMIC_WANT_64BIT_IMPL__
#endif
-#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
+
+#if !defined(_MSC_VER) || _MSC_VER < 1400
+# undef ETHR_INCLUDE_ATOMIC_IMPL__
#endif
#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
-#if defined(_MSC_VER) && _MSC_VER >= 1400
+# ifndef ETHR_WIN_ATOMIC_COMMON__
+# define ETHR_WIN_ATOMIC_COMMON__
+
+# if defined(_M_IX86) || defined(_M_AMD64) || defined(_M_IA64)
+# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 1
+# else
+# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 0
+# endif
+
+# endif /* ETHR_WIN_ATOMIC_COMMON__ */
+
+# if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+
+# define ETHR_HAVE_NATIVE_ATOMIC32 1
+# define ETHR_NATIVE_ATOMIC32_IMPL "windows-interlocked"
+
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT
+# define ETHR_WIN_HAVE_DEC
+# pragma intrinsic(_InterlockedDecrement)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT_REL
+# define ETHR_WIN_HAVE_DEC_REL
+# pragma intrinsic(_InterlockedDecrement_rel)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT
+# define ETHR_WIN_HAVE_INC
+# pragma intrinsic(_InterlockedIncrement)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ
+# define ETHR_WIN_HAVE_INC_ACQ
+# pragma intrinsic(_InterlockedIncrement_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD
+# define ETHR_WIN_HAVE_XCHG_ADD
+# pragma intrinsic(_InterlockedExchangeAdd)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ
+# define ETHR_WIN_HAVE_XCHG_ADD_ACQ
+# pragma intrinsic(_InterlockedExchangeAdd_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE
+# define ETHR_WIN_HAVE_XCHG
+# pragma intrinsic(_InterlockedExchange)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDAND
+# define ETHR_WIN_HAVE_AND
+# pragma intrinsic(_InterlockedAnd)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDOR
+# define ETHR_WIN_HAVE_OR
+# pragma intrinsic(_InterlockedOr)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE
+# define ETHR_WIN_HAVE_CMPXCHG
+# pragma intrinsic(_InterlockedCompareExchange)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ
+# define ETHR_WIN_HAVE_CMPXCHG_ACQ
+# pragma intrinsic(_InterlockedCompareExchange_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL
+# define ETHR_WIN_HAVE_CMPXCHG_REL
+# pragma intrinsic(_InterlockedCompareExchange_rel)
+# endif
+
+# define ETHR_ILCKD__(X) _Interlocked ## X
+# define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## _acq
+# define ETHR_ILCKD_REL__(X) _Interlocked ## X ## _rel
+
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic32_t
+# define ETHR_AINT_T__ ethr_sint32_t
+
+# elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
+
+# define ETHR_HAVE_NATIVE_ATOMIC64 1
+# define ETHR_NATIVE_ATOMIC64_IMPL "windows-interlocked"
+
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64
+# define ETHR_WIN_HAVE_DEC
+# pragma intrinsic(_InterlockedDecrement64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64_REL
+# define ETHR_WIN_HAVE_DEC_REL
+# pragma intrinsic(_InterlockedDecrement64_rel)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ
+# define ETHR_WIN_HAVE_INC_ACQ
+# pragma intrinsic(_InterlockedIncrement64_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64
+# define ETHR_WIN_HAVE_INC
+# pragma intrinsic(_InterlockedIncrement64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
+# define ETHR_WIN_HAVE_XCHG_ADD
+# pragma intrinsic(_InterlockedExchangeAdd64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ
+# define ETHR_WIN_HAVE_XCHG_ADD_ACQ
+# pragma intrinsic(_InterlockedExchangeAdd64_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE64
+# define ETHR_WIN_HAVE_XCHG
+# pragma intrinsic(_InterlockedExchange64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDAND64
+# define ETHR_WIN_HAVE_AND
+# pragma intrinsic(_InterlockedAnd64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDOR64
+# define ETHR_WIN_HAVE_OR
+# pragma intrinsic(_InterlockedOr64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64
+# define ETHR_WIN_HAVE_CMPXCHG
+# pragma intrinsic(_InterlockedCompareExchange64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ
+# define ETHR_WIN_HAVE_CMPXCHG_ACQ
+# pragma intrinsic(_InterlockedCompareExchange64_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL
+# define ETHR_WIN_HAVE_CMPXCHG_REL
+# pragma intrinsic(_InterlockedCompareExchange64_rel)
+# endif
+
+# define ETHR_ILCKD__(X) _Interlocked ## X ## 64
+# define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64_acq
+# define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64_rel
+
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic64_t
+# define ETHR_AINT_T__ ethr_sint64_t
+
+# else
+# error "Unsupported integer size"
+# endif
-#ifndef ETHR_WIN_ATOMIC_COMMON__
-#define ETHR_WIN_ATOMIC_COMMON__
+typedef struct {
+ volatile ETHR_AINT_T__ value;
+} ETHR_ATMC_T__;
-#define ETHR_HAVE_NATIVE_ATOMICS 1
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-#if defined(_M_IX86) || defined(_M_AMD64) || defined(_M_IA64)
-# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 1
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
#else
-# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 0
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
#endif
-#if defined(_M_AMD64) || (defined(_M_IX86) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 1
-#else
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 0
-#endif
-/*
- * No configure test checking for interlocked acquire/release
- * versions have been written, yet. It should define
- * ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS if, and
- * only if, all used interlocked operations with barriers
- * exists.
- *
- * Note, that these are pure optimizations for the itanium
- * processor.
- */
+static ETHR_INLINE ETHR_AINT_T__ *
+ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
+{
+ return (ETHR_AINT_T__ *) &var->value;
+}
-#include <intrin.h>
-#undef ETHR_COMPILER_BARRIER
-#define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
-#pragma intrinsic(_ReadWriteBarrier)
-#pragma intrinsic(_InterlockedCompareExchange)
-
-#if defined(_M_AMD64) || (defined(_M_IX86) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-#include <emmintrin.h>
-#include <mmintrin.h>
-#pragma intrinsic(_mm_mfence)
-#define ETHR_MEMORY_BARRIER _mm_mfence()
-#pragma intrinsic(_mm_sfence)
-#define ETHR_WRITE_MEMORY_BARRIER _mm_sfence()
-#pragma intrinsic(_mm_lfence)
-#define ETHR_READ_MEMORY_BARRIER _mm_lfence()
-#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER
+#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
#else
-
-#define ETHR_MEMORY_BARRIER \
-do { \
- volatile long x___ = 0; \
- _InterlockedCompareExchange(&x___, (long) 1, (long) 0); \
-} while (0)
-
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
#endif
-#endif /* ETHR_WIN_ATOMIC_COMMON__ */
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ var->value = i;
+}
#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1
+#endif
-#define ETHR_HAVE_NATIVE_ATOMIC32 1
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+#if defined(_M_IX86)
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ (void) ETHR_ILCKD__(Exchange)(&var->value, i);
+ else
+#endif /* _M_IX86 */
+ {
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ var->value = i;
+ }
+}
-/*
- * All used operations available as 32-bit intrinsics
- */
+#endif /* ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ */
-#pragma intrinsic(_InterlockedDecrement)
-#pragma intrinsic(_InterlockedIncrement)
-#pragma intrinsic(_InterlockedExchangeAdd)
-#pragma intrinsic(_InterlockedExchange)
-#pragma intrinsic(_InterlockedAnd)
-#pragma intrinsic(_InterlockedOr)
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#pragma intrinsic(_InterlockedExchangeAdd_acq)
-#pragma intrinsic(_InterlockedIncrement_acq)
-#pragma intrinsic(_InterlockedDecrement_rel)
-#pragma intrinsic(_InterlockedCompareExchange_acq)
-#pragma intrinsic(_InterlockedCompareExchange_rel)
-#endif
+#if defined(ETHR_WIN_HAVE_XCHG)
-#define ETHR_ILCKD__(X) _Interlocked ## X
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## _acq
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## _rel
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB 1
#else
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB 1
#endif
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic32_t
-#define ETHR_AINT_T__ ethr_sint32_t
-
-#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ (void) ETHR_ILCKD__(Exchange)(&var->value, i);
+}
-#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#endif
-/*
- * _InterlockedCompareExchange64() is required. The other may not
- * be available, but if so, we can generate them.
- */
-#pragma intrinsic(_InterlockedCompareExchange64)
+#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
-#define ETHR_OWN_ILCKD_INIT_VAL__(PTR) *(PTR)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
#else
-#define ETHR_OWN_ILCKD_INIT_VAL__(PTR) (__int64) 0
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
#endif
-#define ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, PTR, NEW, ACT, EXP, OPS, RET) \
-{ \
- __int64 NEW, ACT, EXP; \
- ACT = ETHR_OWN_ILCKD_INIT_VAL__(PTR); \
- do { \
- EXP = ACT; \
- { OPS; } \
- ACT = _InterlockedCompareExchange64(PTR, NEW, EXP); \
- } while (ACT != EXP); \
- return RET; \
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
+{
+ return var->value;
}
-#define ETHR_OWN_ILCKD_1_IMPL__(FUNC, NEW, ACT, EXP, OPS, RET) \
-static __forceinline __int64 \
-FUNC(__int64 volatile *ptr) \
-ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, ptr, NEW, ACT, EXP, OPS, RET)
-
-#define ETHR_OWN_ILCKD_2_IMPL__(FUNC, NEW, ACT, EXP, OPS, ARG, RET) \
-static __forceinline __int64 \
-FUNC(__int64 volatile *ptr, __int64 ARG) \
-ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, ptr, NEW, ACT, EXP, OPS, RET)
+#endif /* ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ */
+#if defined(ETHR_WIN_HAVE_XCHG_ADD)
-#ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64
-#pragma intrinsic(_InterlockedDecrement64)
-#else
-ETHR_OWN_ILCKD_1_IMPL__(_InterlockedDecrement64, new, act, exp,
- new = act - 1, new)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64
-#pragma intrinsic(_InterlockedIncrement64)
-#else
-ETHR_OWN_ILCKD_1_IMPL__(_InterlockedIncrement64, new, act, exp,
- new = act + 1, new)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
-#pragma intrinsic(_InterlockedExchangeAdd64)
-#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedExchangeAdd64, new, act, exp,
- new = act + arg, arg, act)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE64
-#pragma intrinsic(_InterlockedExchange64)
-#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedExchange64, new, act, exp,
- new = arg, arg, act)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDAND64
-#pragma intrinsic(_InterlockedAnd64)
-#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedAnd64, new, act, exp,
- new = act & arg, arg, act)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDOR64
-#pragma intrinsic(_InterlockedOr64)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1
#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedOr64, new, act, exp,
- new = act | arg, arg, act)
-#endif
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#pragma intrinsic(_InterlockedExchangeAdd64_acq)
-#pragma intrinsic(_InterlockedIncrement64_acq)
-#pragma intrinsic(_InterlockedDecrement64_rel)
-#pragma intrinsic(_InterlockedCompareExchange64_acq)
-#pragma intrinsic(_InterlockedCompareExchange64_rel)
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1
#endif
-#define ETHR_ILCKD__(X) _Interlocked ## X ## 64
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64_acq
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64_rel
-#else
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ return ETHR_ILCKD__(ExchangeAdd)(&var->value, i) + i;
+}
+
#endif
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic64_t
-#define ETHR_AINT_T__ ethr_sint64_t
+#if defined(ETHR_WIN_HAVE_INC)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB 1
#else
-#error "Unsupported integer size"
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB 1
#endif
-typedef struct {
- volatile ETHR_AINT_T__ value;
-} ETHR_ATMC_T__;
-
-#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-
-static ETHR_INLINE ETHR_AINT_T__ *
-ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(inc_return_mb)(ETHR_ATMC_T__ *var)
{
- return (ETHR_AINT_T__ *) &var->value;
+ return ETHR_ILCKD__(Increment)(&var->value);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
- var->value = i;
-#else
- (void) ETHR_ILCKD__(Exchange)(&var->value, i);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
- var->value = i;
+#if defined(ETHR_WIN_HAVE_INC_ACQ)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1
#else
- (void) ETHR_ILCKD__(Exchange)(&var->value, i);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
{
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
- return var->value;
-#else
- return ETHR_ILCKD__(ExchangeAdd)(&var->value, (ETHR_AINT_T__) 0);
-#endif
+ return ETHR_ILCKD_ACQ__(Increment)(&var->value);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- (void) ETHR_ILCKD__(ExchangeAdd)(&var->value, incr);
-}
+#endif
+
+#if defined(ETHR_WIN_HAVE_DEC)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+ETHR_NATMC_FUNC__(dec_return_mb)(ETHR_ATMC_T__ *var)
{
- return ETHR_ILCKD__(ExchangeAdd)(&var->value, i) + i;
+ return ETHR_ILCKD__(Decrement)(&var->value);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_ILCKD__(Increment)(&var->value);
-}
+#endif
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_ILCKD__(Decrement)(&var->value);
-}
+#if defined(ETHR_WIN_HAVE_DEC_REL)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_ILCKD__(Increment)(&var->value);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
{
- return ETHR_ILCKD__(Decrement)(&var->value);
+ return ETHR_ILCKD_REL__(Decrement)(&var->value);
}
+#endif
+
+#if defined(ETHR_WIN_HAVE_AND)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
+ETHR_NATMC_FUNC__(and_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
return ETHR_ILCKD__(And)(&var->value, mask);
}
+#endif
+
+#if defined(ETHR_WIN_HAVE_OR)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
+ETHR_NATMC_FUNC__(or_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
return ETHR_ILCKD__(Or)(&var->value, mask);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
-{
- return ETHR_ILCKD__(CompareExchange)(&var->value, new, old);
-}
+#endif
+#if defined(ETHR_WIN_HAVE_XCHG)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
+ETHR_NATMC_FUNC__(xchg_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
{
return ETHR_ILCKD__(Exchange)(&var->value, new);
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_AINT_T__ val = var->value;
- ETHR_COMPILER_BARRIER;
- return val;
+#if defined(ETHR_WIN_HAVE_CMPXCHG)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1
#else
- return ETHR_ILCKD_ACQ__(ExchangeAdd)(&var->value, (ETHR_AINT_T__) 0);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
{
- return ETHR_ILCKD_ACQ__(Increment)(&var->value);
+ return ETHR_ILCKD__(CompareExchange)(&var->value, new, old);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_COMPILER_BARRIER;
- var->value = i;
-#else
- (void) ETHR_ILCKD_REL__(Exchange)(&var->value, i);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_ILCKD_REL__(Decrement)(&var->value);
-}
+#if defined(ETHR_WIN_HAVE_CMPXCHG_ACQ)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_ILCKD_REL__(Decrement)(&var->value);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
@@ -391,6 +435,16 @@ ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
return ETHR_ILCKD_ACQ__(CompareExchange)(&var->value, new, old);
}
+#endif
+
+#if defined(ETHR_WIN_HAVE_CMPXCHG_REL)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
ETHR_AINT_T__ new,
@@ -399,6 +453,8 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
return ETHR_ILCKD_REL__(CompareExchange)(&var->value, new, old);
}
+#endif
+
#endif /* ETHR_TRY_INLINE_FUNCS */
#undef ETHR_ILCKD__
@@ -408,8 +464,17 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
#undef ETHR_ATMC_T__
#undef ETHR_AINT_T__
#undef ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
-#undef ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
-
-#endif /* _MSC_VER */
+#undef ETHR_WIN_HAVE_CMPXCHG
+#undef ETHR_WIN_HAVE_DEC
+#undef ETHR_WIN_HAVE_INC
+#undef ETHR_WIN_HAVE_XCHG_ADD
+#undef ETHR_WIN_HAVE_XCHG
+#undef ETHR_WIN_HAVE_AND
+#undef ETHR_WIN_HAVE_OR
+#undef ETHR_WIN_HAVE_XCHG_ADD_ACQ
+#undef ETHR_WIN_HAVE_INC_ACQ
+#undef ETHR_WIN_HAVE_DEC_REL
+#undef ETHR_WIN_HAVE_CMPXCHG_ACQ
+#undef ETHR_WIN_HAVE_CMPXCHG_REL
#endif /* ETHR_INCLUDE_ATOMIC_IMPL__ */
diff --git a/erts/include/internal/win/ethr_dw_atomic.h b/erts/include/internal/win/ethr_dw_atomic.h
new file mode 100644
index 0000000000..a3e7ffc3aa
--- /dev/null
+++ b/erts/include/internal/win/ethr_dw_atomic.h
@@ -0,0 +1,154 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Native double word atomics for windows
+ * Author: Rickard Green
+ */
+
+#undef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+#ifndef ETHR_X86_DW_ATOMIC_H__
+# define ETHR_X86_DW_ATOMIC_H__
+# if ((ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64)) \
+ || (ETHR_SIZEOF_PTR == 8 \
+ && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE128)))
+# define ETHR_INCLUDE_DW_ATOMIC_IMPL__
+# endif
+#endif
+
+#ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+
+# if ETHR_SIZEOF_PTR == 4
+# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
+# else
+# define ETHR_HAVE_NATIVE_DW_ATOMIC
+# endif
+# define ETHR_NATIVE_DW_ATOMIC_IMPL "windows-interlocked"
+
+# if defined(_M_IX86) || defined(_M_AMD64)
+/*
+ * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used
+ * at runtime in order to determine if native or fallback implementation
+ * should be used.
+ */
+# define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \
+ ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__
+# endif
+
+# include <intrin.h>
+# if ETHR_SIZEOF_PTR == 4
+# pragma intrinsic(_InterlockedCompareExchange64)
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
+# else
+# pragma intrinsic(_InterlockedCompareExchange128)
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
+# endif
+
+typedef volatile __int64 * ethr_native_dw_ptr_t;
+
+/*
+ * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
+ * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
+ * not common, and at least some glibc malloc implementations
+ * only 4 byte align in 32-bit mode.
+ *
+ * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
+ * aligned memory in 32-bit mode. A malloc implementation that does
+ * not adhere to these alignment requirements is seriously broken,
+ * and we wont bother trying to work around it.
+ *
+ * Since memory alignment may be off by one word we need to align at
+ * runtime. We, therefore, need an extra word allocated.
+ */
+#define ETHR_DW_NATMC_MEM__(VAR) \
+ (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
+typedef union {
+#ifdef ETHR_NATIVE_SU_DW_SINT_T
+ volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
+#endif
+ volatile ethr_sint_t sint[3];
+ volatile char c[ETHR_SIZEOF_PTR*3];
+} ethr_native_dw_atomic_t;
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+#ifdef ETHR_DEBUG
+# define ETHR_DW_DBG_ALIGNED__(PTR) \
+ ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
+#else
+# define ETHR_DW_DBG_ALIGNED__(PTR)
+#endif
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR
+
+static ETHR_INLINE ethr_sint_t *
+ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
+{
+ ethr_sint_t *p = (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return p;
+}
+
+
+#if ETHR_SIZEOF_PTR == 4
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
+
+static ETHR_INLINE ethr_sint64_t
+ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t new,
+ ethr_sint64_t exp)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return (ethr_sint64_t) _InterlockedCompareExchange64(p, new, exp);
+}
+
+#elif ETHR_SIZEOF_PTR == 8
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
+
+#ifdef ETHR_BIGENDIAN
+# define ETHR_WIN_LOW_WORD__ 1
+# define ETHR_WIN_HIGH_WORD__ 0
+#else
+# define ETHR_WIN_LOW_WORD__ 0
+# define ETHR_WIN_HIGH_WORD__ 1
+#endif
+
+static ETHR_INLINE int
+ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ethr_sint_t *new,
+ ethr_sint_t *xchg)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return (int) _InterlockedCompareExchange128(p,
+ new[ETHR_WIN_HIGH_WORD__],
+ new[ETHR_WIN_LOW_WORD__],
+ xchg);
+}
+
+#endif
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#endif /* ETHR_INCLUDE_DW_ATOMIC_IMPL__ */
diff --git a/erts/include/internal/win/ethr_event.h b/erts/include/internal/win/ethr_event.h
index 598816b2c6..6363174a74 100644
--- a/erts/include/internal/win/ethr_event.h
+++ b/erts/include/internal/win/ethr_event.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2009-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2009-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -21,12 +21,12 @@
* Author: Rickard Green
*/
-#define ETHR_EVENT_OFF_WAITER__ ((long) -1)
-#define ETHR_EVENT_OFF__ ((long) 1)
-#define ETHR_EVENT_ON__ ((long) 0)
+#define ETHR_EVENT_OFF_WAITER__ ((ethr_sint32_t) -1)
+#define ETHR_EVENT_OFF__ ((ethr_sint32_t) 1)
+#define ETHR_EVENT_ON__ ((ethr_sint32_t) 0)
typedef struct {
- volatile long state;
+ ethr_atomic32_t state;
HANDLE handle;
} ethr_event;
@@ -38,7 +38,7 @@ static ETHR_INLINE void
ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
{
/* _InterlockedExchange() imply a full memory barrier which is important */
- long state = _InterlockedExchange(&e->state, ETHR_EVENT_ON__);
+ ethr_sint32_t state = ethr_atomic32_xchg_wb(&e->state, ETHR_EVENT_ON__);
if (state == ETHR_EVENT_OFF_WAITER__) {
if (!SetEvent(e->handle))
ETHR_FATAL_ERROR__(ethr_win_get_errno__());
@@ -48,8 +48,8 @@ ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
static ETHR_INLINE void
ETHR_INLINE_FUNC_NAME_(ethr_event_reset)(ethr_event *e)
{
- /* _InterlockedExchange() imply a full memory barrier which is important */
- InterlockedExchange(&e->state, ETHR_EVENT_OFF__);
+ ethr_atomic32_set(&e->state, ETHR_EVENT_OFF__);
+ ETHR_MEMORY_BARRIER;
}
#endif
diff --git a/erts/include/internal/win/ethr_membar.h b/erts/include/internal/win/ethr_membar.h
new file mode 100644
index 0000000000..8237660b2c
--- /dev/null
+++ b/erts/include/internal/win/ethr_membar.h
@@ -0,0 +1,145 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for Windows
+ * Author: Rickard Green
+ */
+
+#if (!defined(ETHR_WIN_MEMBAR_H__) \
+ && (defined(_MSC_VER) && _MSC_VER >= 1400) \
+ && (defined(_M_AMD64) \
+ || defined(_M_IA64) \
+ || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE)))
+#define ETHR_WIN_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#include <intrin.h>
+#undef ETHR_COMPILER_BARRIER
+#define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
+#pragma intrinsic(_ReadWriteBarrier)
+
+#pragma intrinsic(_InterlockedCompareExchange)
+
+#define ETHR_MB_USING_INTERLOCKED__ \
+do { \
+ volatile long x___ = 0; \
+ (void) _InterlockedCompareExchange(&x___, 2, 1); \
+} while (0)
+
+#if defined(_M_IA64)
+
+#define ETHR_MEMBAR(B) __mf()
+
+#elif defined(_M_AMD64) || defined(_M_IX86)
+
+#include <emmintrin.h>
+#include <mmintrin.h>
+
+#if ETHR_SIZEOF_PTR == 4
+# define ETHR_NO_SSE2_MB__ ETHR_MB_USING_INTERLOCKED__
+#endif
+#pragma intrinsic(_mm_mfence)
+#pragma intrinsic(_mm_sfence)
+#pragma intrinsic(_mm_lfence)
+
+static __forceinline void
+ethr_cfence__(void)
+{
+ _ReadWriteBarrier();
+}
+
+static __forceinline void
+ethr_mfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MB__;
+ else
+#endif
+ _mm_mfence();
+}
+
+static __forceinline void
+ethr_sfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MB__;
+ else
+#endif
+ _mm_sfence();
+}
+
+static __forceinline void
+ethr_lfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MB__;
+ else
+#endif
+ _mm_lfence();
+}
+
+#define ETHR_X86_OUT_OF_ORDER_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \
+ ethr_sfence__(), \
+ ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \
+ ethr_lfence__(), \
+ ethr_mfence__()))
+
+#ifdef ETHR_X86_OUT_OF_ORDER
+
+#define ETHR_MEMBAR(B) \
+ ETHR_X86_OUT_OF_ORDER_MEMBAR((B))
+
+#else /* !ETHR_X86_OUT_OF_ORDER (the default) */
+
+/*
+ * We assume that only stores before loads may be reordered. That is,
+ * we assume that *no* instructions like these are used:
+ * - CLFLUSH,
+ * - streaming stores executed with non-temporal move,
+ * - string operations, or
+ * - other instructions which aren't LoadLoad, LoadStore, and StoreStore
+ * ordered by themselves
+ * If such instructions are used, either insert memory barriers
+ * using ETHR_X86_OUT_OF_ORDER_MEMBAR() at appropriate places, or
+ * define ETHR_X86_OUT_OF_ORDER. For more info see Intel 64 and IA-32
+ * Architectures Software Developer's Manual; Vol 3A; Chapter 8.2.2.
+ */
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_mfence__(), ethr_cfence__())
+
+#endif
+
+#else /* No knowledge about platform; use interlocked fallback */
+
+#define ETHR_MEMBAR(B) ETHR_MB_USING_INTERLOCKED__
+#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_MB_USING_INTERLOCKED__
+
+#endif
+
+#endif /* ETHR_WIN_MEMBAR_H__ */
diff --git a/erts/include/internal/win/ethread.h b/erts/include/internal/win/ethread.h
index c01b17cf14..8be35e810e 100644
--- a/erts/include/internal/win/ethread.h
+++ b/erts/include/internal/win/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,11 +25,13 @@
#ifndef ETHREAD_WIN_H__
#define ETHREAD_WIN_H__
+#include "ethr_membar.h"
#define ETHR_ATOMIC_WANT_32BIT_IMPL__
#include "ethr_atomic.h"
#if ETHR_SIZEOF_PTR == 8
# define ETHR_ATOMIC_WANT_64BIT_IMPL__
# include "ethr_atomic.h"
#endif
+#include "ethr_dw_atomic.h"
#endif
diff --git a/erts/lib_src/Makefile.in b/erts/lib_src/Makefile.in
index 757b3b24e2..12b8732735 100644
--- a/erts/lib_src/Makefile.in
+++ b/erts/lib_src/Makefile.in
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2004-2010. All Rights Reserved.
+# Copyright Ericsson AB 2004-2011. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -65,7 +65,7 @@ TYPE_SUFFIX=.purecov
PRE_LD=purecov $(PURECOV_BUILD_OPTIONS)
else
ifeq ($(TYPE),gcov)
-CFLAGS=@DEBUG_CFLAGS@ -fprofile-arcs -ftest-coverage -O0
+CFLAGS=@DEBUG_CFLAGS@ -DGCOV -fprofile-arcs -ftest-coverage -O0
TYPE_SUFFIX=.gcov
PRE_LD=
else
@@ -288,6 +288,10 @@ ETHREAD_LIB_SRC=common/ethr_aux.c \
common/ethr_cbf.c \
$(ETHR_THR_LIB_BASE_DIR)/ethread.c \
$(ETHR_THR_LIB_BASE_DIR)/ethr_event.c
+ETHR_X86_SSE2_ASM=@ETHR_X86_SSE2_ASM@
+ifeq ($(ETHR_X86_SSE2_ASM),yes)
+ETHREAD_LIB_SRC += pthread/ethr_x86_sse2_asm.c
+endif
ETHREAD_LIB_NAME=ethread$(TYPE_SUFFIX)
ifeq ($(USING_VC),yes)
@@ -382,10 +386,8 @@ $(ERTS_LIB): $(ERTS_LIB_OBJS)
# Object files
#
-ifeq ($(TYPE)-@GCC@,debug-yes)
-$(r_OBJ_DIR)/ethr_aux.o: common/ethr_aux.c
- $(CC) $(THR_DEFS) $(CFLAGS) -Wno-unused-function $(INCLUDES) -c $< -o $@
-endif
+$(r_OBJ_DIR)/ethr_x86_sse2_asm.o: pthread/ethr_x86_sse2_asm.c
+ $(CC) -msse2 $(THR_DEFS) $(CFLAGS) $(INCLUDES) -c $< -o $@
$(r_OBJ_DIR)/%.o: common/%.c
$(CC) $(THR_DEFS) $(CFLAGS) $(INCLUDES) -c $< -o $@
diff --git a/erts/lib_src/common/ethr_atomics.c b/erts/lib_src/common/ethr_atomics.c
index 94557d904a..5796bdc22e 100644
--- a/erts/lib_src/common/ethr_atomics.c
+++ b/erts/lib_src/common/ethr_atomics.c
@@ -1,7 +1,16 @@
/*
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ * This file was automatically generated by the
+ * $ERL_TOP/erts/lib_src/utils/make_atomics_api script.
+ * If you need to make changes, edit the script and
+ * regenerate this file.
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ */
+
+/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -18,385 +27,4329 @@
*/
/*
- * Description: The ethread atomic API
+ * Description: The ethread atomics API
* Author: Rickard Green
*/
+/*
+ * This file maps native atomic implementations to ethread
+ * API atomics. If no native atomic implementation
+ * is available, a less efficient fallback is used instead.
+ * The API consists of 32-bit size, word size (pointer size),
+ * and double word size atomics.
+ *
+ * The following atomic operations are implemented for
+ * 32-bit size, and word size atomics:
+ * - cmpxchg
+ * - xchg
+ * - set
+ * - init
+ * - add_read
+ * - read
+ * - inc_read
+ * - dec_read
+ * - add
+ * - inc
+ * - dec
+ * - read_band
+ * - read_bor
+ *
+ * The following atomic operations are implemented for
+ * double word size atomics:
+ * - cmpxchg
+ * - set
+ * - read
+ * - init
+ *
+ * Appart from a function implementing the atomic operation
+ * with unspecified memory barrier semantics, there are
+ * functions implementing each operation with the following
+ * memory barrier semantics:
+ * - rb (read barrier)
+ * - wb (write barrier)
+ * - acqb (acquire barrier)
+ * - relb (release barrier)
+ * - mb (full memory barrier)
+ *
+ * We implement all of these operation/barrier
+ * combinations, regardless of whether they are useful
+ * or not (some of them are useless).
+ *
+ * Double word size atomic functions are on the followning
+ * form:
+ * ethr_dw_atomic_<OP>[_<BARRIER>]
+ *
+ * Word size atomic functions are on the followning
+ * form:
+ * ethr_atomic_<OP>[_<BARRIER>]
+ *
+ * 32-bit size atomic functions are on the followning
+ * form:
+ * ethr_atomic32_<OP>[_<BARRIER>]
+ *
+ * Apart from the operation/barrier functions
+ * described above also 'addr' functions are implemented
+ * which return the actual memory address used of the
+ * atomic variable. The 'addr' functions have no barrier
+ * versions.
+ *
+ * The native atomic implementation does not need to
+ * implement all operation/barrier combinations.
+ * Functions that have no native implementation will be
+ * constructed from existing native functionality. These
+ * functions will perform the wanted operation and will
+ * produce sufficient memory barriers, but may
+ * in some cases be less efficient than pure native
+ * versions.
+ *
+ * When we create ethread API operation/barrier functions by
+ * adding barriers before and after native operations it is
+ * assumed that:
+ * - A native read operation begins, and ends with a load.
+ * - A native set operation begins, and ends with a store.
+ * - An init operation begins with either a load, or a store,
+ * and ends with either a load, or a store.
+ * - All other operations begins with a load, and ends with
+ * either a load, or a store.
+ *
+ * This is the minimum functionality that a native
+ * implementation needs to provide:
+ *
+ * - Functions that need to be implemented:
+ *
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_addr
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>]
+ * (at least one cmpxchg of optional barrier)
+ *
+ * - Macros that needs to be defined:
+ *
+ * A macro informing about the presence of the native
+ * implementation:
+ *
+ * - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]
+ *
+ * A macro naming (a string constant) the implementation:
+ *
+ * - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL
+ *
+ * Each implemented native atomic function has to
+ * be accompanied by a defined macro on the following
+ * form informing about its presence:
+ *
+ * - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>]
+ *
+ * A (sparc-v9 style) membar macro:
+ *
+ * - ETHR_MEMBAR(B)
+ *
+ * Which takes a combination of the following macros
+ * or:ed (using |) together:
+ *
+ * - ETHR_LoadLoad
+ * - ETHR_LoadStore
+ * - ETHR_StoreLoad
+ * - ETHR_StoreStore
+ *
+ */
+
+
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
-#define ETHR_INLINE_FUNC_NAME_(X) X ## __
+#define ETHR_TRY_INLINE_FUNCS
+#define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X ## __
+#define ETHR_INLINE_ATMC_FUNC_NAME_(X) X ## __
+#define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X ## __
#define ETHR_ATOMIC_IMPL__
#include "ethread.h"
#include "ethr_internal.h"
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
-ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATOMIC_ADDR_BITS];
+#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \
+ || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS))
+/*
+ * Spinlock based fallback for atomics used in absence of a native
+ * implementation.
+ */
+
+#define ETHR_ATMC_FLLBK_ADDR_BITS 10
+#define ETHR_ATMC_FLLBK_ADDR_SHIFT 6
+
+typedef struct {
+ union {
+ ethr_spinlock_t lck;
+ char buf[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_spinlock_t))];
+ } u;
+} ethr_atomic_protection_t;
+
+extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS];
+
+#define ETHR_ATOMIC_PTR2LCK__(PTR) \
+(&ethr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATMC_FLLBK_ADDR_SHIFT) \
+ & ((1 << ETHR_ATMC_FLLBK_ADDR_BITS) - 1))].u.lck)
+
+
+#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS) \
+do { \
+ ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP)); \
+ ethr_spin_lock(slp__); \
+ { EXPS; } \
+ ethr_spin_unlock(slp__); \
+} while (0)
+
+ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS];
+
+#endif
+
+
+#if defined(ETHR_AMC_FALLBACK__)
+
+/*
+ * Fallback for large sized (word and/or double word size) atomics using
+ * an "Atomic Modification Counter" based on smaller sized native atomics.
+ *
+ * We use a 63-bit modification counter and a one bit exclusive flag.
+ * If 32-bit native atomics are used, we need two 32-bit native atomics.
+ * The exclusive flag is the least significant bit, or if multiple atomics
+ * are used, the least significant bit of the least significant atomic.
+ *
+ * When using the AMC fallback the following is true:
+ * - Reads of the same atomic variable can be done in parallel.
+ * - Uncontended reads doesn't cause any cache line invalidations,
+ * since no modifications are done.
+ * - Assuming that the AMC atomic(s) and the integer(s) containing the
+ * value of the implemented atomic resides in the same cache line,
+ * modifications will only cause invalidations of one cache line.
+ *
+ * When using the spinlock based fallback none of the above is true,
+ * however, the spinlock based fallback consumes less memory.
+ */
+
+# if ETHR_AMC_NO_ATMCS__ != 1 && ETHR_AMC_NO_ATMCS__ != 2
+# error "Not supported"
+# endif
+# define ETHR_AMC_MAX_TRY_READ__ 10
+# ifdef ETHR_DEBUG
+# define ETHR_DBG_CHK_EXCL_STATE(ASP, S) \
+do { \
+ ETHR_AMC_SINT_T__ act = ETHR_AMC_ATMC_FUNC__(read)(&(ASP)->atomic[0]); \
+ ETHR_ASSERT(act == (S) + 1); \
+ ETHR_ASSERT(act & 1); \
+} while (0)
+# else
+# define ETHR_DBG_CHK_EXCL_STATE(ASP, S)
+# endif
+
+static ETHR_INLINE void
+amc_init(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
+{
+ avar[0] = val[0];
+ if (dw)
+ avar[1] = val[1];
+#if ETHR_AMC_NO_ATMCS__ == 2
+ ETHR_AMC_ATMC_FUNC__(init)(&amc->atomic[1], 0);
#endif
+ ETHR_AMC_ATMC_FUNC__(init_wb)(&amc->atomic[0], 0);
+}
+
+static ETHR_INLINE ETHR_AMC_SINT_T__
+amc_set_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ prev_state0)
+{
+ ETHR_AMC_SINT_T__ state0 = prev_state0;
+ /* Set exclusive flag. */
+ while (1) {
+ ETHR_AMC_SINT_T__ act_state0, new_state0;
+ while (state0 & 1) { /* Wait until exclusive bit has been cleared */
+ ETHR_SPIN_BODY;
+ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+ }
+ /* Try to set exclusive bit */
+ new_state0 = state0 + 1;
+ act_state0 = ETHR_AMC_ATMC_FUNC__(cmpxchg_acqb)(&amc->atomic[0],
+ new_state0,
+ state0);
+ if (state0 == act_state0)
+ return state0; /* old state0 */
+ state0 = act_state0;
+ }
+}
+
+static ETHR_INLINE void
+amc_inc_mc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0)
+{
+ ETHR_AMC_SINT_T__ state0 = old_state0;
+
+ /* Increment modification counter and reset exclusive flag. */
+
+ ETHR_DBG_CHK_EXCL_STATE(amc, state0);
+
+ state0 += 2;
+
+ ETHR_ASSERT((state0 & 1) == 0);
+
+#if ETHR_AMC_NO_ATMCS__ == 2
+ if (state0 == 0) {
+ /*
+ * state0 wrapped, so we need to increment state1. There is no need
+ * for atomic inc op, since this is always done while having exclusive
+ * flag.
+ */
+ ETHR_AMC_SINT_T__ state1 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]);
+ state1++;
+ ETHR_AMC_ATMC_FUNC__(set)(&amc->atomic[1], state1);
+ }
+#endif
+ ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], state0);
+}
+
+static ETHR_INLINE void
+amc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0)
+{
+ ETHR_DBG_CHK_EXCL_STATE(amc, old_state0);
+ /*
+ * Reset exclusive flag, but leave modification counter unchanged,
+ * i.e., restore state to what it was before setting exclusive
+ * flag.
+ */
+ ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], old_state0);
+}
+
+static ETHR_INLINE void
+amc_set(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
+{
+ ETHR_AMC_SINT_T__ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+
+ state0 = amc_set_excl(amc, state0);
+
+ avar[0] = val[0];
+ if (dw)
+ avar[1] = val[1];
+
+ amc_inc_mc_unset_excl(amc, state0);
+}
+
+static ETHR_INLINE int
+amc_try_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar,
+ ethr_sint_t *val, ETHR_AMC_SINT_T__ *state0p)
+{
+ /* *state0p should contain last read value if aborting */
+ ETHR_AMC_SINT_T__ old_state0;
+#if ETHR_AMC_NO_ATMCS__ == 2
+ ETHR_AMC_SINT_T__ state1;
+ int abrt;
+#endif
+
+ *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]);
+ if ((*state0p) & 1)
+ return 0; /* exclusive flag set; abort */
+#if ETHR_AMC_NO_ATMCS__ == 2
+ state1 = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[1]);
+#else
+ ETHR_COMPILER_BARRIER;
+#endif
+
+ val[0] = avar[0];
+ if (dw)
+ val[1] = avar[1];
+
+ ETHR_READ_MEMORY_BARRIER;
+
+ /*
+ * Abort if state has changed (i.e, either the exclusive
+ * flag is set, or modification counter changed).
+ */
+ old_state0 = *state0p;
+#if ETHR_AMC_NO_ATMCS__ == 2
+ *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]);
+ abrt = (old_state0 != *state0p);
+ abrt |= (state1 != ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]));
+ return abrt == 0;
+#else
+ *state0p = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+ return old_state0 == *state0p;
+#endif
+}
+
+static ETHR_INLINE void
+amc_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
+{
+ ETHR_AMC_SINT_T__ state0;
+ int i;
+
+#if ETHR_AMC_MAX_TRY_READ__ == 0
+ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+#else
+ for (i = 0; i < ETHR_AMC_MAX_TRY_READ__; i++) {
+ if (amc_try_read(amc, dw, avar, val, &state0))
+ return; /* read success */
+ ETHR_SPIN_BODY;
+ }
+#endif
+
+ state0 = amc_set_excl(amc, state0);
+
+ val[0] = avar[0];
+ if (dw)
+ val[1] = avar[1];
+
+ amc_unset_excl(amc, state0);
+}
+
+static ETHR_INLINE int
+amc_cmpxchg(ethr_amc_t *amc, int dw, ethr_sint_t *avar,
+ ethr_sint_t *new, ethr_sint_t *xchg)
+{
+ ethr_sint_t val[2];
+ ETHR_AMC_SINT_T__ state0;
+
+ if (amc_try_read(amc, dw, avar, val, &state0)) {
+ if (val[0] != xchg[0] || (dw && val[1] != xchg[1])) {
+ xchg[0] = val[0];
+ if (dw)
+ xchg[1] = val[1];
+ return 0; /* failed */
+ }
+ /* Operation will succeed if not interrupted */
+ }
+
+ state0 = amc_set_excl(amc, state0);
+
+ if (xchg[0] != avar[0] || (dw && xchg[1] != avar[1])) {
+ xchg[0] = avar[0];
+ if (dw)
+ xchg[1] = avar[1];
+
+ ETHR_DBG_CHK_EXCL_STATE(amc, state0);
+
+ amc_unset_excl(amc, state0);
+ return 0; /* failed */
+ }
+
+ avar[0] = new[0];
+ if (dw)
+ avar[1] = new[1];
+
+ amc_inc_mc_unset_excl(amc, state0);
+ return 1;
+}
+
+
+#define ETHR_AMC_MODIFICATION_OPS__(AMC, OPS) \
+do { \
+ ETHR_AMC_SINT_T__ state0__; \
+ state0__ = ETHR_AMC_ATMC_FUNC__(read)(&(AMC)->atomic[0]); \
+ state0__ = amc_set_excl((AMC), state0__); \
+ { OPS; } \
+ amc_inc_mc_unset_excl((AMC), state0__); \
+} while (0)
+
+#endif /* amc fallback */
+
int
ethr_init_atomics(void)
{
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
- {
- int i;
- for (i = 0; i < (1 << ETHR_ATOMIC_ADDR_BITS); i++) {
- int res = ethr_spinlock_init(&ethr_atomic_protection__[i].u.lck);
- if (res != 0)
- return res;
- }
+#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \
+ || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS))
+ int i;
+ for (i = 0; i < (1 << ETHR_ATMC_FLLBK_ADDR_BITS); i++) {
+ int res = ethr_spinlock_init(&ethr_atomic_protection__[i].u.lck);
+ if (res != 0)
+ return res;
}
#endif
return 0;
}
+
+/* ---------- Double word size atomic implementation ---------- */
+
+
+
/*
- * --- Pointer size atomics ---------------------------------------------------
+ * Double word atomics need runtime test.
*/
-ethr_sint_t *
-ethr_atomic_addr(ethr_atomic_t *var)
+int ethr_have_native_dw_atomic(void)
+{
+ return ethr_have_native_dw_atomic__();
+}
+
+
+/* --- addr() --- */
+
+ethr_sint_t *ETHR_DW_ATOMIC_FUNC__(addr)(ethr_dw_atomic_t *var)
+{
+ ethr_sint_t *res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ res = ethr_dw_atomic_addr__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = (ethr_sint_t *) ((&var->fallback))->sint;
+#else
+ res = (ethr_sint_t *) (&var->fallback);
+#endif
+ return res;
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+ethr_sint_t *ethr_dw_atomic_addr(ethr_dw_atomic_t *var)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ return ethr_dw_atomic_addr__(var);
+}
+#endif
+
+
+/* -- cmpxchg() -- */
+
+
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ res = ethr_dw_atomic_cmpxchg__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback),
+ {
+ res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
+ if (res) {
+ (&var->fallback)->sint[0] = val->sint[0];
+ (&var->fallback)->sint[1] = val->sint[1];
+ }
+ else {
+ old_val->sint[0] = (&var->fallback)->sint[0];
+ old_val->sint[1] = (&var->fallback)->sint[1];
+ }
+ });
+#endif
+ return res;
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+int ethr_dw_atomic_cmpxchg(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ return ethr_dw_atomic_cmpxchg__(var, val, old_val);
+}
+#endif
+
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ res = ethr_dw_atomic_cmpxchg_rb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback),
+ {
+ res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
+ if (res) {
+ (&var->fallback)->sint[0] = val->sint[0];
+ (&var->fallback)->sint[1] = val->sint[1];
+ }
+ else {
+ old_val->sint[0] = (&var->fallback)->sint[0];
+ old_val->sint[1] = (&var->fallback)->sint[1];
+ }
+ });
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+int ethr_dw_atomic_cmpxchg_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ return ethr_dw_atomic_cmpxchg_rb__(var, val, old_val);
+}
+#endif
+
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ res = ethr_dw_atomic_cmpxchg_wb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback),
+ {
+ res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
+ if (res) {
+ (&var->fallback)->sint[0] = val->sint[0];
+ (&var->fallback)->sint[1] = val->sint[1];
+ }
+ else {
+ old_val->sint[0] = (&var->fallback)->sint[0];
+ old_val->sint[1] = (&var->fallback)->sint[1];
+ }
+ });
+#endif
+ return res;
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+int ethr_dw_atomic_cmpxchg_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ return ethr_dw_atomic_cmpxchg_wb__(var, val, old_val);
+}
+#endif
+
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ res = ethr_dw_atomic_cmpxchg_acqb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback),
+ {
+ res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
+ if (res) {
+ (&var->fallback)->sint[0] = val->sint[0];
+ (&var->fallback)->sint[1] = val->sint[1];
+ }
+ else {
+ old_val->sint[0] = (&var->fallback)->sint[0];
+ old_val->sint[1] = (&var->fallback)->sint[1];
+ }
+ });
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+int ethr_dw_atomic_cmpxchg_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ return ethr_dw_atomic_cmpxchg_acqb__(var, val, old_val);
+}
+#endif
+
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ res = ethr_dw_atomic_cmpxchg_relb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback),
+ {
+ res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
+ if (res) {
+ (&var->fallback)->sint[0] = val->sint[0];
+ (&var->fallback)->sint[1] = val->sint[1];
+ }
+ else {
+ old_val->sint[0] = (&var->fallback)->sint[0];
+ old_val->sint[1] = (&var->fallback)->sint[1];
+ }
+ });
+#endif
+ return res;
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+int ethr_dw_atomic_cmpxchg_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ return ethr_dw_atomic_cmpxchg_relb__(var, val, old_val);
+}
+#endif
+
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ res = ethr_dw_atomic_cmpxchg_mb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback),
+ {
+ res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
+ if (res) {
+ (&var->fallback)->sint[0] = val->sint[0];
+ (&var->fallback)->sint[1] = val->sint[1];
+ }
+ else {
+ old_val->sint[0] = (&var->fallback)->sint[0];
+ old_val->sint[1] = (&var->fallback)->sint[1];
+ }
+ });
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+int ethr_dw_atomic_cmpxchg_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ return ethr_dw_atomic_cmpxchg_mb__(var, val, old_val);
+}
+#endif
+
+
+/* -- set() -- */
+
+
+void ETHR_DW_ATOMIC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_set__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_set(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_set__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_set_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_set_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_set_rb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_set_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_set_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_set_wb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_set_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_set_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_set_acqb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_set_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_set_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_set_relb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_set_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_set_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_set_mb__(var, val);
+}
+#endif
+
+
+/* -- read() -- */
+
+
+void ETHR_DW_ATOMIC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_read__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_read(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_read__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_read_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_read_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_read_rb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_read_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_read_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_read_wb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_read_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_read_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_read_acqb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_read_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_read_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_read_relb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_read_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_read_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_read_mb__(var, val);
+}
+#endif
+
+
+/* -- init() -- */
+
+
+void ETHR_DW_ATOMIC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_init__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_init(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_init__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_init_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_init_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_init_rb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_init_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_init_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_init_wb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_init_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_init_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_init_acqb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_init_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_init_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_init_relb__(var, val);
+}
+#endif
+
+void ETHR_DW_ATOMIC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ ethr_dw_atomic_init_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
+void ethr_dw_atomic_init_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+ ETHR_ASSERT(var);
+ ethr_dw_atomic_init_mb__(var, val);
+}
+#endif
+
+
+/* ---------- Word size atomic implementation ---------- */
+
+
+
+
+/* --- addr() --- */
+
+ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *var)
+{
+ ethr_sint_t *res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_addr__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = (ethr_sint_t *) (var)->sint;
+#else
+ res = (ethr_sint_t *) var;
+#endif
+ return res;
+}
+
+
+/* -- cmpxchg() -- */
+
+
+ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_cmpxchg__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = old_val;
+ (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_cmpxchg_rb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_cmpxchg_rb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = old_val;
+ (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_cmpxchg_wb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_cmpxchg_wb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = old_val;
+ (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_cmpxchg_acqb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ res = old_val;
+ (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_cmpxchg_relb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = old_val;
+ (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_cmpxchg_mb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_cmpxchg_mb__(var, val, old_val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = old_val;
+ (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- xchg() -- */
+
+
+ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_xchg__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_xchg_rb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_xchg_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_xchg_wb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_xchg_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_xchg_acqb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_xchg_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_xchg_relb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_xchg_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_xchg_mb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_xchg_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- set() -- */
+
+
+void ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_set__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_set(&var->amc, 0, &var->sint, &val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic_set_rb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_set_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_set(&var->amc, 0, &var->sint, &val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+void ethr_atomic_set_wb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_set_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ amc_set(&var->amc, 0, &var->sint, &val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic_set_acqb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_set_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_set(&var->amc, 0, &var->sint, &val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+void ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_set_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ amc_set(&var->amc, 0, &var->sint, &val);
+#else
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic_set_mb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_set_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ amc_set(&var->amc, 0, &var->sint, &val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+
+/* -- init() -- */
+
+
+void ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_init__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_init(&var->amc, 0, &var->sint, &val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic_init_rb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_init_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_init(&var->amc, 0, &var->sint, &val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+void ethr_atomic_init_wb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_init_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ amc_init(&var->amc, 0, &var->sint, &val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic_init_acqb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_init_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_init(&var->amc, 0, &var->sint, &val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+void ethr_atomic_init_relb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_init_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ amc_init(&var->amc, 0, &var->sint, &val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic_init_mb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_init_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ amc_init(&var->amc, 0, &var->sint, &val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+
+/* -- add_read() -- */
+
+
+ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_add_read__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_add_read_rb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_add_read_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_add_read_wb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_add_read_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_add_read_acqb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_add_read_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_add_read_relb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_add_read_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_add_read_mb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_add_read_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- read() -- */
+
+
+ethr_sint_t ethr_atomic_read(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_read(&var->amc, 0, &var->sint, &res);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_rb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_rb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_read(&var->amc, 0, &var->sint, &res);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_wb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_wb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ amc_read(&var->amc, 0, &var->sint, &res);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_acqb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ amc_read(&var->amc, 0, &var->sint, &res);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_relb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_relb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ amc_read(&var->amc, 0, &var->sint, &res);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_mb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_mb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ amc_read(&var->amc, 0, &var->sint, &res);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#endif
+ return res;
+}
+
+
+/* -- inc_read() -- */
+
+
+ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_inc_read__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_inc_read_rb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_inc_read_rb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_inc_read_wb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_inc_read_wb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_inc_read_acqb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_inc_read_relb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_inc_read_relb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_inc_read_mb(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_inc_read_mb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- dec_read() -- */
+
+
+ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *var)
{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_addr__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_dec_read__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#endif
+ return res;
}
-void
-ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t i)
+ethr_sint_t ethr_atomic_dec_read_rb(ethr_atomic_t *var)
{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic_init__(var, i);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_dec_read_rb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
}
-void
-ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t i)
+ethr_sint_t ethr_atomic_dec_read_wb(ethr_atomic_t *var)
{
+ ethr_sint_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic_set__(var, i);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_dec_read_wb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#endif
+ return res;
}
-ethr_sint_t
-ethr_atomic_read(ethr_atomic_t *var)
+ethr_sint_t ethr_atomic_dec_read_acqb(ethr_atomic_t *var)
{
+ ethr_sint_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_read__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_dec_read_acqb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
}
-ethr_sint_t
-ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t incr)
+ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *var)
{
+ ethr_sint_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_add_read__(var, incr);
-}
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_dec_read_relb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#endif
+ return res;
+}
-ethr_sint_t
-ethr_atomic_inc_read(ethr_atomic_t *var)
+ethr_sint_t ethr_atomic_dec_read_mb(ethr_atomic_t *var)
{
+ ethr_sint_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_inc_read__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_dec_read_mb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
}
-ethr_sint_t
-ethr_atomic_dec_read(ethr_atomic_t *var)
+
+/* -- add() -- */
+
+
+void ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t val)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_dec_read__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_add__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+#endif
+
+}
+
+void ethr_atomic_add_rb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_add_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
}
-void
-ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t incr)
+void ethr_atomic_add_wb(ethr_atomic_t *var, ethr_sint_t val)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic_add__(var, incr);
-}
-
-void
-ethr_atomic_inc(ethr_atomic_t *var)
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_add_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+#endif
+
+}
+
+void ethr_atomic_add_acqb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_add_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+void ethr_atomic_add_relb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_add_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+#endif
+
+}
+
+void ethr_atomic_add_mb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_add_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+
+/* -- inc() -- */
+
+
+void ethr_atomic_inc(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
ethr_atomic_inc__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#endif
+
}
-void
-ethr_atomic_dec(ethr_atomic_t *var)
+void ethr_atomic_inc_rb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic_dec__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_inc_rb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
}
-ethr_sint_t
-ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t mask)
+void ethr_atomic_inc_wb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_read_band__(var, mask);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_inc_wb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#endif
+
}
-ethr_sint_t
-ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t mask)
+void ethr_atomic_inc_acqb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_read_bor__(var, mask);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_inc_acqb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
}
-ethr_sint_t
-ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t new)
+void ethr_atomic_inc_relb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_xchg__(var, new);
-}
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_inc_relb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#endif
+
+}
-ethr_sint_t
-ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t new, ethr_sint_t expected)
+void ethr_atomic_inc_mb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_cmpxchg__(var, new, expected);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_inc_mb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
}
-ethr_sint_t
-ethr_atomic_read_acqb(ethr_atomic_t *var)
+
+/* -- dec() -- */
+
+
+void ethr_atomic_dec(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_read_acqb__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_dec__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#endif
+
}
-ethr_sint_t
-ethr_atomic_inc_read_acqb(ethr_atomic_t *var)
+void ethr_atomic_dec_rb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_inc_read_acqb__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_dec_rb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
}
-void
-ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t i)
+void ethr_atomic_dec_wb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic_set_relb__(var, i);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_dec_wb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#endif
+
}
-void
-ethr_atomic_dec_relb(ethr_atomic_t *var)
+void ethr_atomic_dec_acqb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_dec_acqb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+void ethr_atomic_dec_relb(ethr_atomic_t *var)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
ethr_atomic_dec_relb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#endif
+
}
-ethr_sint_t
-ethr_atomic_dec_read_relb(ethr_atomic_t *var)
+void ethr_atomic_dec_mb(ethr_atomic_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_dec_read_relb__(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ ethr_atomic_dec_mb__(var);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
}
-ethr_sint_t
-ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t new, ethr_sint_t exp)
+
+/* -- read_band() -- */
+
+
+ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t val)
{
+ ethr_sint_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_cmpxchg_acqb__(var, new, exp);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_band__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+#endif
+ return res;
}
-ethr_sint_t
-ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t new, ethr_sint_t exp)
+ethr_sint_t ethr_atomic_read_band_rb(ethr_atomic_t *var, ethr_sint_t val)
{
+ ethr_sint_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic_cmpxchg_relb__(var, new, exp);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_band_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
}
+ethr_sint_t ethr_atomic_read_band_wb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_band_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_band_acqb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_band_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_band_relb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_band_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_band_mb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_band_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- read_bor() -- */
+
+
+ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_bor__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_bor_rb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_bor_rb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_bor_wb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_bor_wb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_bor_acqb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_bor_acqb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_bor_relb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_bor_relb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+#endif
+ return res;
+}
+
+ethr_sint_t ethr_atomic_read_bor_mb(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic_read_bor_mb__(var, val);
+#elif defined(ETHR_AMC_FALLBACK__)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* ---------- 32-bit atomic implementation ---------- */
+
+
+
+
+/* --- addr() --- */
+
+ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *var)
+{
+ ethr_sint32_t *res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_addr__(var);
+
+#else
+ res = (ethr_sint32_t *) var;
+#endif
+ return res;
+}
+
+
+/* -- cmpxchg() -- */
+
+
+ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_cmpxchg__(var, val, old_val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_cmpxchg_rb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_cmpxchg_rb__(var, val, old_val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_cmpxchg_wb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_cmpxchg_wb__(var, val, old_val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_cmpxchg_acqb__(var, val, old_val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_cmpxchg_relb__(var, val, old_val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_cmpxchg_mb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_cmpxchg_mb__(var, val, old_val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- xchg() -- */
+
+
+ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_xchg__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_xchg_rb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_xchg_rb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_xchg_wb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_xchg_wb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_xchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_xchg_acqb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_xchg_relb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_xchg_relb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_xchg_mb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_xchg_mb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- set() -- */
+
+
+void ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_set__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic32_set_rb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_set_rb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+void ethr_atomic32_set_wb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_set_wb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic32_set_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_set_acqb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+void ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_set_relb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic32_set_mb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_set_mb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+
+/* -- init() -- */
+
+
+void ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_init__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic32_init_rb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_init_rb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+void ethr_atomic32_init_wb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_init_wb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic32_init_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_init_acqb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+void ethr_atomic32_init_relb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_init_relb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+#endif
+
+}
+
+void ethr_atomic32_init_mb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_init_mb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+
+/* -- add_read() -- */
+
+
+ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_add_read__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_add_read_rb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_add_read_rb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_add_read_wb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_add_read_wb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_add_read_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_add_read_acqb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_add_read_relb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_add_read_relb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_add_read_mb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_add_read_mb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- read() -- */
+
+
+ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_rb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_rb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_wb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_wb__(var);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_acqb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_relb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_relb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_mb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_mb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#endif
+ return res;
+}
+
+
+/* -- inc_read() -- */
+
+
+ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_inc_read__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_inc_read_rb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_inc_read_rb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_inc_read_wb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_inc_read_wb__(var);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_inc_read_acqb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_inc_read_relb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_inc_read_relb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_inc_read_mb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_inc_read_mb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- dec_read() -- */
-/*
- * --- 32-bit atomics ---------------------------------------------------------
- */
-ethr_sint32_t *
-ethr_atomic32_addr(ethr_atomic32_t *var)
+ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *var)
{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_addr__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_dec_read__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#endif
+ return res;
}
-void
-ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t i)
+ethr_sint32_t ethr_atomic32_dec_read_rb(ethr_atomic32_t *var)
{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic32_init__(var, i);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_dec_read_rb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
}
-void
-ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t i)
+ethr_sint32_t ethr_atomic32_dec_read_wb(ethr_atomic32_t *var)
{
+ ethr_sint32_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic32_set__(var, i);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_dec_read_wb__(var);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#endif
+ return res;
}
-ethr_sint32_t
-ethr_atomic32_read(ethr_atomic32_t *var)
+ethr_sint32_t ethr_atomic32_dec_read_acqb(ethr_atomic32_t *var)
{
+ ethr_sint32_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_read__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_dec_read_acqb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
}
+ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_dec_read_relb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#endif
+ return res;
+}
-ethr_sint32_t
-ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t incr)
+ethr_sint32_t ethr_atomic32_dec_read_mb(ethr_atomic32_t *var)
{
+ ethr_sint32_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_add_read__(var, incr);
-}
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_dec_read_mb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- add() -- */
-ethr_sint32_t
-ethr_atomic32_inc_read(ethr_atomic32_t *var)
+
+void ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t val)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_inc_read__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_add__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+#endif
+
}
-ethr_sint32_t
-ethr_atomic32_dec_read(ethr_atomic32_t *var)
+void ethr_atomic32_add_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_dec_read__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_add_rb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
+}
+
+void ethr_atomic32_add_wb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_add_wb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+#endif
+
+}
+
+void ethr_atomic32_add_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_add_acqb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+void ethr_atomic32_add_relb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_add_relb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+#endif
+
}
-void
-ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t incr)
+void ethr_atomic32_add_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic32_add__(var, incr);
-}
-
-void
-ethr_atomic32_inc(ethr_atomic32_t *var)
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_add_mb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+
+/* -- inc() -- */
+
+
+void ethr_atomic32_inc(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
ethr_atomic32_inc__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#endif
+
}
-void
-ethr_atomic32_dec(ethr_atomic32_t *var)
+void ethr_atomic32_inc_rb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic32_dec__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_inc_rb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
}
-ethr_sint32_t
-ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t mask)
+void ethr_atomic32_inc_wb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_read_band__(var, mask);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_inc_wb__(var);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#endif
+
}
-ethr_sint32_t
-ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t mask)
+void ethr_atomic32_inc_acqb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_read_bor__(var, mask);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_inc_acqb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
}
-ethr_sint32_t
-ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t new)
+void ethr_atomic32_inc_relb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_xchg__(var, new);
-}
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_inc_relb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#endif
-ethr_sint32_t
-ethr_atomic32_cmpxchg(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t expected)
+}
+
+void ethr_atomic32_inc_mb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_cmpxchg__(var, new, expected);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_inc_mb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
}
-ethr_sint32_t
-ethr_atomic32_read_acqb(ethr_atomic32_t *var)
+
+/* -- dec() -- */
+
+
+void ethr_atomic32_dec(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_read_acqb__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_dec__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#endif
+
+}
+
+void ethr_atomic32_dec_rb(ethr_atomic32_t *var)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_dec_rb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+
}
-ethr_sint32_t
-ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var)
+void ethr_atomic32_dec_wb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_inc_read_acqb__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_dec_wb__(var);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#endif
+
}
-void
-ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t i)
+void ethr_atomic32_dec_acqb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- ethr_atomic32_set_relb__(var, i);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_dec_acqb__(var);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
}
-void
-ethr_atomic32_dec_relb(ethr_atomic32_t *var)
+void ethr_atomic32_dec_relb(ethr_atomic32_t *var)
{
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
ethr_atomic32_dec_relb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#endif
+
+}
+
+void ethr_atomic32_dec_mb(ethr_atomic32_t *var)
+{
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ ethr_atomic32_dec_mb__(var);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+
+}
+
+
+/* -- read_band() -- */
+
+
+ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_band__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_band_rb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_band_rb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_band_wb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_band_wb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_band_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_band_acqb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_band_relb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_band_relb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_band_mb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_band_mb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+/* -- read_bor() -- */
+
+
+ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_bor__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_bor_rb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_bor_rb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#endif
+ return res;
}
-ethr_sint32_t
-ethr_atomic32_dec_read_relb(ethr_atomic32_t *var)
+ethr_sint32_t ethr_atomic32_read_bor_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
+ ethr_sint32_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_dec_read_relb__(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_bor_wb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+#endif
+ return res;
}
-ethr_sint32_t
-ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+ethr_sint32_t ethr_atomic32_read_bor_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
+ ethr_sint32_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_cmpxchg_acqb__(var, new, exp);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_bor_acqb__(var, val);
+#else
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
}
-ethr_sint32_t
-ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+ethr_sint32_t ethr_atomic32_read_bor_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
+ ethr_sint32_t res;
ETHR_ASSERT(!ethr_not_inited__);
ETHR_ASSERT(var);
- return ethr_atomic32_cmpxchg_relb__(var, new, exp);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_bor_relb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+#endif
+ return res;
+}
+
+ethr_sint32_t ethr_atomic32_read_bor_mb(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(var);
+#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+ res = ethr_atomic32_read_bor_mb__(var, val);
+#else
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#endif
+ return res;
+}
+
+
+
+/* --------- Info functions --------- */
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+char *zero_ops[] = {NULL};
+#endif
+
+
+static char *native_su_dw_atomic_ops[] = {
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG
+ "cmpxchg",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RB
+ "cmpxchg_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_WB
+ "cmpxchg_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_ACQB
+ "cmpxchg_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RELB
+ "cmpxchg_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
+ "cmpxchg_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET
+ "set",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RB
+ "set_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_WB
+ "set_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_ACQB
+ "set_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RELB
+ "set_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_MB
+ "set_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ
+ "read",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RB
+ "read_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_WB
+ "read_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_ACQB
+ "read_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RELB
+ "read_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_MB
+ "read_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT
+ "init",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RB
+ "init_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_WB
+ "init_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_ACQB
+ "init_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RELB
+ "init_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_MB
+ "init_mb",
+#endif
+ NULL
+};
+
+char **
+ethr_native_su_dw_atomic_ops(void)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (!ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ return &zero_ops[0];
+#endif
+ return &native_su_dw_atomic_ops[0];
+}
+
+
+static char *native_dw_atomic_ops[] = {
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG
+ "cmpxchg",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RB
+ "cmpxchg_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_WB
+ "cmpxchg_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_ACQB
+ "cmpxchg_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RELB
+ "cmpxchg_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
+ "cmpxchg_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET
+ "set",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RB
+ "set_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_WB
+ "set_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_ACQB
+ "set_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RELB
+ "set_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_MB
+ "set_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ
+ "read",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RB
+ "read_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_WB
+ "read_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_ACQB
+ "read_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RELB
+ "read_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_MB
+ "read_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT
+ "init",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RB
+ "init_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_WB
+ "init_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_ACQB
+ "init_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RELB
+ "init_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_MB
+ "init_mb",
+#endif
+ NULL
+};
+
+char **
+ethr_native_dw_atomic_ops(void)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (!ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ return &zero_ops[0];
+#endif
+ return &native_dw_atomic_ops[0];
+}
+
+
+static char *native_atomic64_ops[] = {
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
+ "cmpxchg",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
+ "cmpxchg_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
+ "cmpxchg_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
+ "cmpxchg_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
+ "cmpxchg_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
+ "cmpxchg_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG
+ "xchg",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB
+ "xchg_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB
+ "xchg_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB
+ "xchg_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB
+ "xchg_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB
+ "xchg_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
+ "set",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
+ "set_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
+ "set_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
+ "set_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
+ "set_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
+ "set_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
+ "init",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
+ "init_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
+ "init_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
+ "init_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
+ "init_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
+ "init_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN
+ "add_return",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB
+ "add_return_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB
+ "add_return_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB
+ "add_return_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB
+ "add_return_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB
+ "add_return_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
+ "read",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
+ "read_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
+ "read_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
+ "read_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
+ "read_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
+ "read_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN
+ "inc_return",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB
+ "inc_return_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB
+ "inc_return_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB
+ "inc_return_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB
+ "inc_return_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB
+ "inc_return_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN
+ "dec_return",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB
+ "dec_return_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB
+ "dec_return_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB
+ "dec_return_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB
+ "dec_return_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB
+ "dec_return_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD
+ "add",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB
+ "add_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB
+ "add_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB
+ "add_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB
+ "add_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB
+ "add_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC
+ "inc",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB
+ "inc_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB
+ "inc_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB
+ "inc_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB
+ "inc_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB
+ "inc_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC
+ "dec",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB
+ "dec_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB
+ "dec_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB
+ "dec_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB
+ "dec_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB
+ "dec_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD
+ "and_retold",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB
+ "and_retold_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB
+ "and_retold_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB
+ "and_retold_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB
+ "and_retold_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB
+ "and_retold_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD
+ "or_retold",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB
+ "or_retold_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB
+ "or_retold_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB
+ "or_retold_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB
+ "or_retold_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB
+ "or_retold_mb",
+#endif
+ NULL
+};
+
+char **
+ethr_native_atomic64_ops(void)
+{
+
+ return &native_atomic64_ops[0];
}
+
+static char *native_atomic32_ops[] = {
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG
+ "cmpxchg",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB
+ "cmpxchg_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB
+ "cmpxchg_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB
+ "cmpxchg_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB
+ "cmpxchg_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB
+ "cmpxchg_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG
+ "xchg",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB
+ "xchg_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB
+ "xchg_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB
+ "xchg_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB
+ "xchg_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB
+ "xchg_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET
+ "set",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB
+ "set_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB
+ "set_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB
+ "set_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB
+ "set_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB
+ "set_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT
+ "init",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB
+ "init_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB
+ "init_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB
+ "init_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB
+ "init_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB
+ "init_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN
+ "add_return",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB
+ "add_return_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB
+ "add_return_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB
+ "add_return_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB
+ "add_return_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB
+ "add_return_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ
+ "read",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB
+ "read_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB
+ "read_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB
+ "read_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB
+ "read_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB
+ "read_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN
+ "inc_return",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB
+ "inc_return_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB
+ "inc_return_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB
+ "inc_return_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB
+ "inc_return_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB
+ "inc_return_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN
+ "dec_return",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB
+ "dec_return_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB
+ "dec_return_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB
+ "dec_return_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB
+ "dec_return_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB
+ "dec_return_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD
+ "add",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB
+ "add_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB
+ "add_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB
+ "add_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB
+ "add_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB
+ "add_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC
+ "inc",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB
+ "inc_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB
+ "inc_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB
+ "inc_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB
+ "inc_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB
+ "inc_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC
+ "dec",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB
+ "dec_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB
+ "dec_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB
+ "dec_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB
+ "dec_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB
+ "dec_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD
+ "and_retold",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB
+ "and_retold_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB
+ "and_retold_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB
+ "and_retold_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB
+ "and_retold_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB
+ "and_retold_mb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD
+ "or_retold",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB
+ "or_retold_rb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB
+ "or_retold_wb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB
+ "or_retold_acqb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB
+ "or_retold_relb",
+#endif
+#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB
+ "or_retold_mb",
+#endif
+ NULL
+};
+
+char **
+ethr_native_atomic32_ops(void)
+{
+
+ return &native_atomic32_ops[0];
+}
diff --git a/erts/lib_src/common/ethr_aux.c b/erts/lib_src/common/ethr_aux.c
index 2c3e25a805..521640317e 100644
--- a/erts/lib_src/common/ethr_aux.c
+++ b/erts/lib_src/common/ethr_aux.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -31,10 +31,6 @@
#define ETHR_INLINE_FUNC_NAME_(X) X ## __
#define ETHR_AUX_IMPL__
-#define ETHR_ATOMIC_IMPL__ /* Needed in order to pull in
- native atomic implementations
- for optimized fallbacks of
- spinlocks and rwspinlocks */
#include "ethread.h"
#include "ethr_internal.h"
#include <string.h>
@@ -75,10 +71,87 @@ static int main_threads;
static int init_ts_event_alloc(void);
+ethr_runtime_t ethr_runtime__
+#ifdef __GNUC__
+__attribute__ ((aligned (ETHR_CACHE_LINE_SIZE)))
+#endif
+ ;
+
+#if defined(ETHR_X86_RUNTIME_CONF__)
+
+/*
+ * x86/x86_64 specifics shared between windows and
+ * pthread implementations.
+ */
+
+#define ETHR_IS_X86_VENDOR(V, B, C, D) \
+ (sizeof(V) == 13 && is_x86_vendor((V), (B), (C), (D)))
+
+static ETHR_INLINE int
+is_x86_vendor(char *str, int ebx, int ecx, int edx)
+{
+ return (*((int *) &str[0]) == ebx
+ && *((int *) &str[sizeof(int)]) == edx
+ && *((int *) &str[sizeof(int)*2]) == ecx);
+}
+
+static void
+x86_init(void)
+{
+ int eax, ebx, ecx, edx;
+
+ eax = ebx = ecx = edx = 0;
+
+ ethr_x86_cpuid__(&eax, &ebx, &ecx, &edx);
+
+ if (eax > 0
+ && (ETHR_IS_X86_VENDOR("GenuineIntel", ebx, ecx, edx)
+ || ETHR_IS_X86_VENDOR("AuthenticAMD", ebx, ecx, edx))) {
+ eax = 1;
+ ethr_x86_cpuid__(&eax, &ebx, &ecx, &edx);
+ }
+ else {
+ /*
+ * The meaning of the feature flags for this
+ * vendor have not been verified.
+ */
+ eax = ebx = ecx = edx = 0;
+ }
+
+ /*
+ * The feature flags tested below have only been verified
+ * for vendors checked above. Also note that only these
+ * feature flags have been verified to have these specific
+ * meanings. If another feature flag test is introduced,
+ * it has to be verified to have the same meaning for all
+ * vendors above.
+ */
+
+#if ETHR_SIZEOF_PTR == 8
+ /* bit 13 of ecx is set if we have cmpxchg16b */
+ ethr_runtime__.conf.have_dw_cmpxchg = (ecx & (1 << 13));
+#elif ETHR_SIZEOF_PTR == 4
+ /* bit 8 of edx is set if we have cmpxchg8b */
+ ethr_runtime__.conf.have_dw_cmpxchg = (edx & (1 << 8));
+#else
+# error "Not supported"
+#endif
+ /* bit 26 of edx is set if we have sse2 */
+ ethr_runtime__.conf.have_sse2 = (edx & (1 << 26));
+}
+
+#endif /* ETHR_X86_RUNTIME_CONF__ */
+
+
int
ethr_init_common__(ethr_init_data *id)
{
int res;
+
+#if defined(ETHR_X86_RUNTIME_CONF__)
+ x86_init();
+#endif
+
if (id) {
ethr_thr_prepare_func__ = id->thread_create_prepare_func;
ethr_thr_parent_func__ = id->thread_create_parent_func;
diff --git a/erts/lib_src/common/ethr_mutex.c b/erts/lib_src/common/ethr_mutex.c
index 2ddef32dfc..81fd6af80a 100644
--- a/erts/lib_src/common/ethr_mutex.c
+++ b/erts/lib_src/common/ethr_mutex.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -26,8 +26,9 @@
#include "config.h"
#endif
-#define ETHR_INLINE_FUNC_NAME_(X) X ## __
+#define ETHR_INLINE_MTX_FUNC_NAME_(X) X ## __
#define ETHR_MUTEX_IMPL__
+#define ETHR_TRY_INLINE_FUNCS
#include <limits.h>
#include "ethread.h"
diff --git a/erts/lib_src/pthread/ethr_x86_sse2_asm.c b/erts/lib_src/pthread/ethr_x86_sse2_asm.c
new file mode 100644
index 0000000000..6cbe73cf16
--- /dev/null
+++ b/erts/lib_src/pthread/ethr_x86_sse2_asm.c
@@ -0,0 +1,31 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: sse2 asm:s
+ * Author: Rickard Green
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+/* ETHR_X86_SSE2_ASM_C__ will trigger asm:s to compile to be included */
+#define ETHR_X86_SSE2_ASM_C__
+#include "ethread.h"
diff --git a/erts/lib_src/pthread/ethread.c b/erts/lib_src/pthread/ethread.c
index f047104103..ad29249bac 100644
--- a/erts/lib_src/pthread/ethread.c
+++ b/erts/lib_src/pthread/ethread.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -121,6 +121,98 @@ ethr_ts_event *ethr_get_tse__(void)
return pthread_getspecific(ethr_ts_event_key__);
}
+#if defined(ETHR_PPC_RUNTIME_CONF__)
+
+static volatile int lwsync_caused_sigill;
+
+static void
+handle_lwsync_sigill(int signum)
+{
+ lwsync_caused_sigill = 1;
+}
+
+static int
+ppc_init__(void)
+{
+ struct sigaction act, oact;
+ lwsync_caused_sigill = 0;
+
+ sigemptyset(&act.sa_mask);
+ act.sa_flags = 0;
+ act.sa_handler = handle_lwsync_sigill;
+ if (sigaction(SIGILL, &act, &oact) != 0)
+ return errno;
+
+ __asm__ __volatile__ ("lwsync\n\t" : : : "memory");
+
+ act.sa_flags = 0;
+ act.sa_handler = SIG_DFL;
+ if (sigaction(SIGILL, &act, &oact) != 0)
+ return errno;
+
+ ethr_runtime__.conf.have_lwsync = (int) !lwsync_caused_sigill;
+ return 0;
+}
+
+#endif
+
+#if defined(ETHR_X86_RUNTIME_CONF__)
+
+void
+ethr_x86_cpuid__(int *eax, int *ebx, int *ecx, int *edx)
+{
+#if ETHR_SIZEOF_PTR == 4
+ int have_cpuid;
+ /*
+ * If it is possible to toggle eflags bit 21,
+ * we have the cpuid instruction.
+ */
+ __asm__ ("pushf\n\t"
+ "popl %%eax\n\t"
+ "movl %%eax, %%ecx\n\t"
+ "xorl $0x200000, %%eax\n\t"
+ "pushl %%eax\n\t"
+ "popf\n\t"
+ "pushf\n\t"
+ "popl %%eax\n\t"
+ "movl $0x0, %0\n\t"
+ "xorl %%ecx, %%eax\n\t"
+ "jz no_cpuid\n\t"
+ "movl $0x1, %0\n\t"
+ "no_cpuid:\n\t"
+ : "=r"(have_cpuid)
+ :
+ : "%eax", "%ecx", "cc");
+ if (!have_cpuid) {
+ *eax = *ebx = *ecx = *edx = 0;
+ return;
+ }
+#endif
+#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__
+ /*
+ * When position independet code is used in 32-bit mode, the B register
+ * is used for storage of global offset table address, and we may not
+ * use it as input or output in an asm. We need to save and restore the
+ * B register explicitly (for some reason gcc doesn't provide this
+ * service to us).
+ */
+ __asm__ ("pushl %%ebx\n\t"
+ "cpuid\n\t"
+ "movl %%ebx, %1\n\t"
+ "popl %%ebx\n\t"
+ : "=a"(*eax), "=r"(*ebx), "=c"(*ecx), "=d"(*edx)
+ : "0"(*eax)
+ : "cc");
+#else
+ __asm__ ("cpuid\n\t"
+ : "=a"(*eax), "=b"(*ebx), "=c"(*ecx), "=d"(*edx)
+ : "0"(*eax)
+ : "cc");
+#endif
+}
+
+#endif /* ETHR_X86_RUNTIME_CONF__ */
+
/*
* --------------------------------------------------------------------------
* Exported functions
@@ -137,6 +229,12 @@ ethr_init(ethr_init_data *id)
ethr_not_inited__ = 0;
+#if defined(ETHR_PPC_RUNTIME_CONF__)
+ res = ppc_init__();
+ if (res != 0)
+ goto error;
+#endif
+
res = ethr_init_common__(id);
if (res != 0)
goto error;
@@ -146,6 +244,8 @@ ethr_init(ethr_init_data *id)
child_wait_spin_count = 0;
res = pthread_key_create(&ethr_ts_event_key__, ethr_ts_event_destructor__);
+ if (res != 0)
+ goto error;
return 0;
error:
diff --git a/erts/lib_src/utils/make_atomics_api b/erts/lib_src/utils/make_atomics_api
new file mode 100755
index 0000000000..f4e71c7618
--- /dev/null
+++ b/erts/lib_src/utils/make_atomics_api
@@ -0,0 +1,2186 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2011. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-mode(compile).
+
+%%%-------------------------------------------------------------------
+%%% @author Rickard Green <[email protected]>
+%%% @copyright (C) 2011, Rickard Green
+%%% @doc
+%%% Generation of the ethread atomic API
+%%% @end
+%%% Created : 17 Jan 2011 by Rickard Green <[email protected]>
+%%%-------------------------------------------------------------------
+
+-define(H_FILE, "erts/include/internal/ethr_atomics.h").
+-define(C_FILE, "erts/lib_src/common/ethr_atomics.c").
+
+%% These order constraints are important:
+%% - 'cmpxchg' needs to appear before 'read'
+%% - 'xchg' needs to apper before 'set'
+%% - 'set' needs to apper before 'init'
+%% - 'add_read' needs to apper before 'add', 'inc_read', and 'dec_read'
+%% - 'inc_read' needs to apper before and 'inc'
+%% - 'dec_read' needs to apper before and 'dec'
+-define(ATOMIC_OPS, [cmpxchg, xchg, set, init, add_read,
+ read, inc_read, dec_read, add, inc,
+ dec, read_band, read_bor]).
+
+-define(DW_ATOMIC_OPS, [cmpxchg, set, read, init]).
+-define(DW_FUNC_MACRO, "ETHR_DW_ATOMIC_FUNC__").
+-define(DW_RTCHK_MACRO, "ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__").
+
+%% Barrier versions we implement
+-define(BARRIERS, [none, rb, wb, acqb, relb, mb]).
+
+-define(ATOMIC_SIZES, ["dword", "word", "32"]).
+
+-define(HAVE_NATIVE_ATOMIC, "ETHR_HAVE_ETHR_NATIVE_ATOMIC").
+
+-define(SU_DW_SINT_FIELD, "dw_sint").
+-define(DW_SINT_FIELD, "sint").
+
+%% Fallback
+-define(ETHR_ATMC_FLLBK_ADDR_BITS, "10").
+-define(ETHR_ATMC_FLLBK_ADDR_SHIFT, "6").
+
+-record(atomic_context, {dw,
+ amc_fallback,
+ ret_type,
+ ret_var,
+ arg1,
+ arg2,
+ arg3,
+ have_native_atomic_ops,
+ atomic,
+ atomic_t,
+ addr_aint_t,
+ aint_t,
+ naint_t,
+ 'NATMC',
+ 'ATMC',
+ unusual_val}).
+
+atomic_context("dword") ->
+ #atomic_context{dw = true,
+ amc_fallback = true,
+ ret_type = "int",
+ ret_var = "res",
+ arg1 = "var",
+ arg2 = "val",
+ arg3 = "old_val",
+ have_native_atomic_ops = "ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS",
+ atomic = "ethr_dw_atomic",
+ atomic_t = "ethr_dw_atomic_t",
+ addr_aint_t = "ethr_sint_t",
+ aint_t = "ethr_dw_sint_t",
+ naint_t = "ETHR_SU_DW_NAINT_T__",
+ 'NATMC' = "DW_NATMC",
+ 'ATMC' = "DW_ATMC",
+ unusual_val = "ETHR_UNUSUAL_SINT_VAL__"};
+atomic_context(Size) ->
+ {SizeSuffix, HaveSize, AMC} = case Size of
+ "word" -> {"", "WORD_SZ", true};
+ _ -> {Size, Size++"BIT", false}
+ end,
+ AintT = ["ethr_sint", SizeSuffix, "_t"],
+ #atomic_context{dw = false,
+ amc_fallback = AMC,
+ ret_type = AintT,
+ ret_var = "res",
+ arg1 = "var",
+ arg2 = "val",
+ arg3 = "old_val",
+ have_native_atomic_ops = ["ETHR_HAVE_", HaveSize, "_NATIVE_ATOMIC_OPS"],
+ atomic = ["ethr_atomic", SizeSuffix],
+ atomic_t = ["ethr_atomic", SizeSuffix, "_t"],
+ addr_aint_t = AintT,
+ aint_t = AintT,
+ naint_t = ["ETHR_NAINT", SizeSuffix, "_T__"],
+ 'NATMC' = ["NATMC", SizeSuffix],
+ 'ATMC' = ["ATMC", SizeSuffix],
+ unusual_val = ["ETHR_UNUSUAL_SINT", SizeSuffix, "_VAL__"]}.
+
+-record(op_context, {ret, var, val1, val2}).
+
+-define(POTENTIAL_NBITS, ["64", "32"]).
+
+is_return_op(#atomic_context{dw = false}, add) -> false;
+is_return_op(#atomic_context{dw = false}, inc) -> false;
+is_return_op(#atomic_context{dw = false}, dec) -> false;
+is_return_op(#atomic_context{dw = true}, read) -> false;
+is_return_op(_AC, init) -> false;
+is_return_op(_AC, set) -> false;
+is_return_op(_AC, _OP) -> true.
+
+native(add_read) -> add_return;
+native(inc_read) -> inc_return;
+native(dec_read) -> dec_return;
+native(read_band) -> and_retold;
+native(read_bor) -> or_retold;
+native(Op) -> Op.
+
+op(Op, #op_context{var = Var, val1 = Val1}) when Op == init; Op == set ->
+ [Var, " = ", Val1];
+op(read, #op_context{ret = Ret, var = Var}) ->
+ [Ret, " = ", Var];
+op(add_read, OpC) ->
+ [op(add, OpC), "; ", op(read, OpC)];
+op(add, #op_context{var = Var, val1 = Val1}) ->
+ [Var, " += ", Val1];
+op(inc, #op_context{var = Var}) ->
+ ["++(", Var, ")"];
+op(dec, #op_context{var = Var}) ->
+ ["--(", Var, ")"];
+op(inc_read, #op_context{ret = Ret, var = Var}) ->
+ [Ret, " = ++(", Var, ")"];
+op(dec_read, #op_context{ret = Ret, var = Var}) ->
+ [Ret, " = --(", Var, ")"];
+op(read_band, #op_context{var = Var, val1 = Val1} = OpC) ->
+ [op(read, OpC), "; ", Var, " &= ", Val1];
+op(read_bor, #op_context{var = Var, val1 = Val1} = OpC) ->
+ [op(read, OpC), "; ", Var, " |= ", Val1];
+op(xchg, OpC) ->
+ [op(read, OpC), "; ", op(set, OpC)];
+op(cmpxchg, #op_context{ret = Ret, var = Var, val1 = Val1, val2 = Val2}) ->
+ [Ret, " = (", Var, " == ", Val2, " ? (", Var, " = ", Val1, ", ", Val2, ") : ", Var, ")"].
+
+dw_op(Op, #op_context{var = Var, val1 = Val1}) when Op == init; Op == set ->
+ [Var, "[0] = ", Val1, "[0]; ", Var, "[1] = ", Val1, "[1]"];
+dw_op(read, #op_context{var = Var, val1 = Val1}) ->
+ [Val1, "[0] = ", Var, "[0]; ", Val1, "[1] = ", Var, "[1]"];
+dw_op(cmpxchg, #op_context{ret = Ret, var = Var, val1 = Val1, val2 = Val2}) ->
+ ["
+ {
+ ", Ret, " = (", Var, "[0] == ", Val2, "[0] && ", Var, "[1] == ", Val2, "[1]);
+ if (", Ret, ") {
+ ", Var, "[0] = ", Val1, "[0];
+ ", Var, "[1] = ", Val1, "[1];
+ }
+ else {
+ ", Val2, "[0] = ", Var, "[0];
+ ", Val2, "[1] = ", Var, "[1];
+ }
+ }"].
+
+op_head_tail(init) -> {undef, undef};
+op_head_tail(set) -> {store, store};
+op_head_tail(read) -> {load, load};
+op_head_tail(_) -> {load, undef}.
+
+op_barrier_ext(none) -> "";
+op_barrier_ext(Barrier) -> [$_, a2l(Barrier)].
+
+op_call(addr, _DW, Ret, Func, Arg1, _Arg2, _Arg3, _TypeCast) ->
+ [Ret, " ", Func, "(", Arg1, ");"];
+op_call(Op, false, Ret, Func, Arg1, _Arg2, _Arg3, _TypeCast) when Op == read;
+ Op == inc_read;
+ Op == inc_return;
+ Op == dec_read;
+ Op == dec_return ->
+ [Ret, " ", Func, "(", Arg1, ");"];
+op_call(Op, false, _Ret, Func, Arg1, _Arg2, _Arg3, _TypeCast) when Op == inc;
+ Op == dec ->
+ [Func, "(", Arg1, ");"];
+op_call(Op, false, Ret, Func, Arg1, Arg2, _Arg3, TypeCast) when Op == add_return;
+ Op == add_read;
+ Op == read_band;
+ Op == and_retold;
+ Op == read_bor;
+ Op == or_retold;
+ Op == xchg ->
+ [Ret, " ", Func, "(", Arg1, ",", TypeCast, " ", Arg2, ");"];
+op_call(cmpxchg, _DW, Ret, Func, Arg1, Arg2, Arg3, TypeCast) ->
+ [Ret, " ", Func, "(", Arg1, ",", TypeCast, " ", Arg2, ",", TypeCast, " ", Arg3, ");"];
+op_call(_Op, _DW, _Ret, Func, Arg1, Arg2, _Arg3, TypeCast) ->
+ [Func, "(", Arg1, ",", TypeCast, " ", Arg2, ");"]. % set, init, add (!= dw), read (== dw)
+
+native_op_call(#atomic_context{dw = DW,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3,
+ aint_t = AintT,
+ 'NATMC' = NATMC,
+ naint_t = NAintT},
+ Op, B, TypeCasts) ->
+ op_call(Op,
+ DW,
+ [RetVar, " =",
+ case TypeCasts of
+ true -> [" (", AintT, ")"];
+ false -> ""
+ end],
+ ["ETHR_", NATMC, "_FUNC__(", opstr(native(Op)), op_barrier_ext(B), ")"],
+ Arg1,
+ Arg2,
+ Arg3,
+ case TypeCasts of
+ true -> [" (", NAintT, ")"];
+ false -> ""
+ end).
+
+simple_fallback(#atomic_context{arg1 = Arg1,
+ arg2 = Arg2,
+ 'ATMC' = ATMC},
+ init, B) -> %% Also double word
+ [" ETHR_", ATMC, "_FUNC__(set", op_barrier_ext(B),")(", Arg1, ", ", Arg2, ");\n"];
+simple_fallback(#atomic_context{dw = false,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ 'ATMC' = ATMC},
+ set, B) ->
+ [" (void) ETHR_", ATMC, "_FUNC__(xchg", op_barrier_ext(B),")(", Arg1, ", ", Arg2, ");\n"];
+simple_fallback(#atomic_context{dw = false,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ 'ATMC' = ATMC},
+ add, B) ->
+ [" (void) ETHR_", ATMC, "_FUNC__(add_read", op_barrier_ext(B), ")(", Arg1, ", ", Arg2, ");\n"];
+simple_fallback(#atomic_context{dw = false,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ aint_t = AintT,
+ 'ATMC' = ATMC},
+ inc_read, B) ->
+ [" ", RetVar, " = ETHR_", ATMC, "_FUNC__(add_read", op_barrier_ext(B), ")(", Arg1, ", (", AintT,") 1);\n"];
+simple_fallback(#atomic_context{dw = false,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ aint_t = AintT,
+ 'ATMC' = ATMC},
+ dec_read, B) ->
+ [" ", RetVar, " = ETHR_", ATMC, "_FUNC__(add_read", op_barrier_ext(B), ")(", Arg1, ", (", AintT,") -1);\n"];
+simple_fallback(#atomic_context{dw = false,
+ arg1 = Arg1,
+ 'ATMC' = ATMC},
+ inc, B) ->
+ [" (void) ETHR_", ATMC, "_FUNC__(inc_read", op_barrier_ext(B), ")(", Arg1, ");\n"];
+simple_fallback(#atomic_context{dw = false,
+ arg1 = Arg1,
+ 'ATMC' = ATMC},
+ dec, B) ->
+ [" (void) ETHR_", ATMC, "_FUNC__(dec_read", op_barrier_ext(B), ")(", Arg1, ");\n"];
+simple_fallback(#atomic_context{dw = false,
+ unusual_val = UnusualVal,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ aint_t = AintT,
+ 'ATMC' = ATMC},
+ read, B) ->
+ [" ", RetVar, " = ETHR_", ATMC, "_FUNC__(cmpxchg", op_barrier_ext(B), ")(", Arg1, ", (", AintT, ") ", UnusualVal, ", (", AintT,") ", UnusualVal, ");\n"];
+simple_fallback(#atomic_context{dw = true,
+ unusual_val = UnusualVal,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ aint_t = AintT,
+ 'ATMC' = ATMC},
+ read, B) ->
+ [" ", AintT, " tmp;
+ tmp.", ?DW_SINT_FIELD, "[0] = ", UnusualVal, ";
+ tmp.", ?DW_SINT_FIELD, "[1] = ", UnusualVal, ";
+ ", Arg2, "->", ?DW_SINT_FIELD, "[0] = ", UnusualVal, ";
+ ", Arg2, "->", ?DW_SINT_FIELD, "[1] = ", UnusualVal, ";
+ (void) ETHR_", ATMC, "_FUNC__(cmpxchg", op_barrier_ext(B), ")(", Arg1, ", &tmp, ", Arg2, ");
+"
+ ];
+simple_fallback(_AC, _Op, _B) ->
+ [].
+
+func_header(AC, prototype, MacroName, Op, B) ->
+ [func_header(AC, implementation, MacroName, Op, B), ";"];
+func_header(#atomic_context{'ATMC' = ATMC} = AC, inline_implementation, _MacroName, Op, B) ->
+ do_func_header(AC, Op, "static ETHR_INLINE ",
+ ["ETHR_", ATMC, "_FUNC__(", opstr(Op), op_barrier_ext(B), ")"]);
+func_header(#atomic_context{atomic = Atomic} = AC, implementation, false, Op, B) ->
+ do_func_header(AC, Op, "", [Atomic, "_", opstr(Op), op_barrier_ext(B)]);
+func_header(AC, implementation, MacroName, Op, B) ->
+ do_func_header(AC, Op, "", [MacroName, "(", opstr(Op), op_barrier_ext(B), ")"]).
+
+
+do_func_header(#atomic_context{atomic_t = AtomicT,
+ addr_aint_t = AddrAintT,
+ arg1 = Arg1},
+ addr, Inline, Func) ->
+ [Inline, AddrAintT, " *", Func, "(", AtomicT, " *", Arg1, ")"];
+do_func_header(#atomic_context{dw = false,
+ atomic_t = AtomicT,
+ aint_t = AintT,
+ arg1 = Arg1,
+ arg2 = Arg2},
+ Op, Inline, Func) when Op == init;
+ Op == set;
+ Op == add ->
+ [Inline, "void ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " ", Arg2, ")"];
+do_func_header(#atomic_context{dw = false,
+ atomic_t = AtomicT,
+ arg1 = Arg1},
+ Op, Inline, Func) when Op == inc;
+ Op == dec ->
+ [Inline, "void ", Func, "(", AtomicT, " *", Arg1, ")"];
+do_func_header(#atomic_context{dw = false,
+ atomic_t = AtomicT,
+ aint_t = AintT,
+ arg1 = Arg1},
+ Op, Inline, Func) when Op == read;
+ Op == inc_read;
+ Op == dec_read ->
+ [Inline, AintT, " ", Func, "(", AtomicT, " *", Arg1, ")"];
+do_func_header(#atomic_context{dw = false,
+ atomic_t = AtomicT,
+ aint_t = AintT,
+ arg1 = Arg1,
+ arg2 = Arg2},
+ Op, Inline, Func) when Op == add_read;
+ Op == read_band;
+ Op == read_bor;
+ Op == xchg ->
+ [Inline, AintT, " ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " ", Arg2, ")"];
+do_func_header(#atomic_context{dw = false,
+ atomic_t = AtomicT,
+ aint_t = AintT,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3},
+ cmpxchg, Inline, Func) ->
+ [Inline, AintT, " ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " ", Arg2, ", ", AintT, " ", Arg3, ")"];
+do_func_header(#atomic_context{dw = true,
+ atomic_t = AtomicT,
+ aint_t = AintT,
+ arg1 = Arg1,
+ arg2 = Arg2},
+ Op, Inline, Func) when Op == init;
+ Op == set;
+ Op == read ->
+ [Inline, "void ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " *", Arg2, ")"];
+do_func_header(#atomic_context{dw = true,
+ atomic_t = AtomicT,
+ aint_t = AintT,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3},
+ cmpxchg, Inline, Func) ->
+ [Inline, "int ", Func, "(", AtomicT, " *", Arg1, ", ", AintT, " *", Arg2, ", ", AintT, " *", Arg3, ")"].
+
+
+xbarriers(_Op, none, _NB) ->
+ {"", ""};
+
+xbarriers(_Op, acqb, NB) when NB == acqb; NB == mb ->
+ {"", ""};
+xbarriers(Op, acqb, NB) ->
+ case {op_head_tail(Op), NB} of
+ {{_, load}, rb} -> {"", "ETHR_MEMBAR(ETHR_LoadStore);"};
+ {{_, load}, _} -> {"", "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);"};
+ {{_, store}, _} -> {"", "ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);"};
+ {_, rb} -> {"", "ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);"};
+ _ -> {"", "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);"}
+ end;
+
+xbarriers(_Op, relb, NB) when NB == relb; NB == mb ->
+ {"", ""};
+xbarriers(Op, relb, NB) ->
+ case {op_head_tail(Op), NB} of
+ {{store, _}, wb} -> {"ETHR_MEMBAR(ETHR_LoadStore);", ""};
+ {{store, _}, _} -> {"ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);", ""};
+ {{load, _}, _} -> {"ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);", ""};
+ {_, wb} -> {"ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);", ""};
+ _ -> {"ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);", ""}
+ end;
+
+xbarriers(_Op, wb, NB) when NB == wb; NB == mb ->
+ {"", ""};
+xbarriers(_Op, wb, _NB) ->
+ {"ETHR_MEMBAR(ETHR_StoreStore);", ""};
+
+xbarriers(_Op, rb, NB) when NB == rb; NB == mb ->
+ {"", ""};
+xbarriers(_Op, rb, _NB) ->
+ {"", "ETHR_MEMBAR(ETHR_LoadLoad);"};
+
+xbarriers(_Op, mb, mb) ->
+ {"", ""};
+xbarriers(Op, mb, NB) ->
+ MB = "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);",
+ {Head, Tail} = op_head_tail(Op),
+ PreOp = case {Head, NB} of
+ {_, relb} -> "";
+ {store, wb} -> "ETHR_MEMBAR(ETHR_LoadStore);";
+ {store, _} -> "ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);";
+ {load, _} -> "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);";
+ {_, wb} -> "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);";
+ _ -> MB
+ end,
+ PostOp = case {Tail, NB} of
+ {_, acqb} -> "";
+ {load, rb} -> "ETHR_MEMBAR(ETHR_LoadStore);";
+ {load, _} -> "ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);";
+ {store, _} -> "ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);";
+ {_, rb} -> "ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);";
+ _ -> MB
+ end,
+ {PreOp, PostOp}.
+
+try_barrier_order_first(none) ->
+ [none, rb, wb, acqb, relb];
+try_barrier_order_first(acqb) ->
+ [acqb, rb, none, mb];
+try_barrier_order_first(relb) ->
+ [relb, wb, none, mb];
+try_barrier_order_first(rb) ->
+ [rb, none, mb];
+try_barrier_order_first(wb) ->
+ [wb, none, mb];
+try_barrier_order_first(mb) ->
+ [mb, relb, acqb, wb, rb, none].
+
+try_barrier_order(B) ->
+ First = try_barrier_order_first(B),
+ First ++ (?BARRIERS -- First).
+
+native_barrier_op(#atomic_context{'NATMC' = NATMC} = AC, If, ExtraDecl, Op, B, NB, TypeCasts) ->
+ NOpStr = opstr(native(Op)),
+ CapNOpStr = to_upper(NOpStr),
+ NBExt = op_barrier_ext(NB),
+ CapNBExt = to_upper(NBExt),
+ {PreB, PostB} = xbarriers(Op, B, NB),
+ [If, " defined(ETHR_HAVE_", NATMC, "_", CapNOpStr, CapNBExt, ")\n",
+ ExtraDecl,
+ case PreB of
+ "" -> "";
+ _ -> [" ", PreB, "\n"]
+ end,
+ " ", native_op_call(AC, Op, NB, TypeCasts), "\n",
+ case PostB of
+ "" -> "";
+ _ -> [" ", PostB, "\n"]
+ end].
+
+dw_native_barrier_op(#atomic_context{arg1 = Arg1, arg2 = Arg2, arg3 = Arg3} = AC, If, ExtraDecl, Op, B, NB) ->
+ native_barrier_op(AC#atomic_context{arg1 = ["&", Arg1, "->native"],
+ arg2 = [Arg2, "->", ?DW_SINT_FIELD],
+ arg3 = [Arg3, "->", ?DW_SINT_FIELD]},
+ If, ExtraDecl, Op, B, NB, false).
+
+su_dw_native_barrier_op(#atomic_context{dw = true,
+ naint_t = NAintT,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3,
+ 'NATMC' = NATMC} = AC, If, cmpxchg, B, NB) ->
+ SU = ["->", ?SU_DW_SINT_FIELD],
+ TmpVar = "act",
+ SUArg1 = ["&", Arg1, "->native"],
+ SUArg2 = [Arg2, SU],
+ SUArg3 = [Arg3, SU],
+ ExtraDecl = [" ", NAintT, " ", TmpVar, ";\n"],
+ [native_barrier_op(AC#atomic_context{dw = false,
+ ret_var = TmpVar,
+ arg1 = SUArg1,
+ arg2 = SUArg2,
+ arg3 = SUArg3,
+ 'NATMC' = ["SU_", NATMC]},
+ If, ExtraDecl, cmpxchg, B, NB, false),
+ " ", RetVar, " = (", TmpVar, " == ", SUArg3, ");
+ ", SUArg3, " = ", TmpVar, ";
+"
+ ];
+su_dw_native_barrier_op(#atomic_context{dw = true,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ 'NATMC' = NATMC} = AC, If, Op, B, NB) ->
+ SUArg1 = ["&", Arg1, "->native"],
+ SUArg2 = [Arg2, "->", ?SU_DW_SINT_FIELD],
+ native_barrier_op(AC#atomic_context{dw = false,
+ ret_var = SUArg2,
+ arg1 = SUArg1,
+ arg2 = SUArg2,
+ arg3 = not_used,
+ 'NATMC' = ["SU_", NATMC]}, If, "", Op, B, NB, false).
+
+cmpxchg_fallback_define(#atomic_context{dw = false, aint_t = AintT} = AC) ->
+ do_cmpxchg_fallback_define(AC, true, AintT);
+cmpxchg_fallback_define(#atomic_context{dw = true,
+ 'NATMC' = NATMC,
+ naint_t = NAintT} = AC) ->
+ ["\n\n#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC)\n",
+ do_cmpxchg_fallback_define(AC, false, not_used),
+ "\n\n#elif defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)\n",
+ do_cmpxchg_fallback_define(AC#atomic_context{'NATMC' = ["SU_", NATMC],
+ naint_t = NAintT},
+ true,
+ NAintT),
+ "
+
+#else
+# error \"?!?\"
+#endif
+"].
+
+do_cmpxchg_fallback_define(#atomic_context{'NATMC' = NATMC,
+ aint_t = AintT,
+ naint_t = NAintT},
+ SU, SUType) ->
+
+ ReadFunc = fun (IF) ->
+ fun (B) ->
+ BExt = op_barrier_ext(B),
+ CapBExt = to_upper(BExt),
+ [IF, " defined(ETHR_HAVE_", NATMC, "_READ", CapBExt, ")",
+ case SU of
+ true -> ["
+#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR) \\
+ ETHR_", NATMC, "_FUNC__(read", BExt, ")(VAR)
+"
+ ];
+ false -> ["
+#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, VAL) \\
+ ETHR_", NATMC, "_FUNC__(read", BExt, ")(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_", NATMC, "_READ", CapBExt, ")
+#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, VAL) \\
+ VAL.", ?SU_DW_SINT_FIELD, " = ETHR_SU_", NATMC, "_FUNC__(read", BExt, ")(VAR)
+"
+ ]
+ end]
+ end
+ end,
+ NotDefCMPXCHG = fun (B) ->
+ CapBExt = to_upper(op_barrier_ext(B)),
+ ["!defined(ETHR_HAVE_", NATMC, "_CMPXCHG", CapBExt, ")"]
+ end,
+ NoneTryBarrierOrder = try_barrier_order(none),
+ %% First a sanity check
+ ["
+#if (", NotDefCMPXCHG(hd(?BARRIERS)) ,
+ lists:map(fun (B) ->
+ [" \\
+ && ", NotDefCMPXCHG(B)]
+ end,
+ tl(?BARRIERS)), ")
+# error \"No native cmpxchg() op available\"
+#endif
+
+
+/*
+ * Read op used together with cmpxchg() fallback when no native op present.
+ */
+",
+
+ %% Read op to use with cmpxchg fallback
+ (ReadFunc("#if"))(hd(NoneTryBarrierOrder)),
+ lists:map(ReadFunc("#elif"), tl(NoneTryBarrierOrder)),
+"#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */",
+ case SU of
+ true -> ["
+#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR) \\
+ ((", NAintT, ") 0)"];
+ false -> ["
+#define ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, VAL) \\
+do { \\
+ VAL.", ?DW_SINT_FIELD, "[0] = (ethr_sint_t) 0; \\
+ VAL.", ?DW_SINT_FIELD, "[1] = (ethr_sint_t) 0; \\
+} while (0)"]
+ end, "
+#endif
+",
+
+ %% The fallback
+ "
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_", NATMC, "_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \\
+do { \\",
+ case SU of
+ true -> ["
+ ", SUType, " AVAL; \\
+ ", NAintT, " new__, act__, exp__; \\
+ act__ = ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR); \\
+ do { \\
+ exp__ = act__; \\
+ AVAL = (", SUType, ") act__; \\
+ { OPS; } \\
+ new__ = (", NAintT, ") AVAL; \\
+ act__ = CMPXCHG(VAR, new__, exp__); \\
+ } while (__builtin_expect(act__ != exp__, 0)); \\"];
+ false -> ["
+ int res__; \\
+ ", AintT, " AVAL, exp_act__; \\
+ ETHR_", NATMC, "_CMPXCHG_FALLBACK_READ__(VAR, exp_act__); \\
+ do { \\
+ AVAL.", ?DW_SINT_FIELD, "[0] = exp_act__.", ?DW_SINT_FIELD, "[0]; \\
+ AVAL.", ?DW_SINT_FIELD, "[1] = exp_act__.", ?DW_SINT_FIELD, "[1]; \\
+ { OPS; } \\
+ res__ = CMPXCHG(VAR, AVAL.", ?DW_SINT_FIELD, ", exp_act__.", ?DW_SINT_FIELD, "); \\
+ } while (__builtin_expect(res__ == 0, 0)); \\"]
+ end, "
+} while (0)
+"
+ ].
+
+cmpxchg_fallbacks(#atomic_context{}, _SUDW, cmpxchg, _B) ->
+ ""; %% No need for a fallback
+cmpxchg_fallbacks(#atomic_context{dw = DW,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3,
+ 'NATMC' = NATMC},
+ SUDW, Op, B) ->
+ Operation = case DW of
+ false ->
+ op(Op, #op_context{ret = RetVar,
+ var = "aval",
+ val1 = Arg2,
+ val2 = Arg3});
+ true ->
+ case SUDW of
+ true ->
+ op(Op, #op_context{ret = [Arg2, "->", ?SU_DW_SINT_FIELD],
+ var = "aval",
+ val1 = [Arg2, "->", ?SU_DW_SINT_FIELD]});
+ false ->
+ dw_op(Op, #op_context{ret = RetVar,
+ var = ["aval.", ?DW_SINT_FIELD],
+ val1 = [Arg2, "->", ?DW_SINT_FIELD]})
+ end
+ end,
+ [lists:map(fun (NB) ->
+ NativeVar = case DW of
+ true -> ["&", Arg1, "->native"];
+ false -> Arg1
+ end,
+ NBExt = op_barrier_ext(NB),
+ CapNBExt = to_upper(NBExt),
+ {PreB, PostB} = xbarriers(cmpxchg, B, NB),
+ ["#elif defined(ETHR_HAVE_", NATMC, "_CMPXCHG", CapNBExt, ")\n",
+ case PreB of
+ "" -> "";
+ _ -> [" ", PreB, "\n"]
+ end,
+ " ETHR_", NATMC, "_CMPXCHG_FALLBACK__(ETHR_", NATMC, "_FUNC__(cmpxchg", NBExt, "), ", NativeVar, ", aval, ", Operation, ");\n",
+ case PostB of
+ "" -> "";
+ _ -> [" ", PostB, "\n"]
+ end]
+ end,
+ try_barrier_order(B))].
+
+translate_have_defs(#atomic_context{dw = DW, 'NATMC' = NATMC}) ->
+ ["
+#if !defined(ETHR_", NATMC, "_BITS__)
+# error \"Missing native atomic implementation\"",
+ lists:map(fun (NBits) ->
+ {HaveInPrefix,
+ HaveOutPrefix,
+ HaveInPrefixExtra,
+ HaveOutPrefixExtra,
+ NativeTypeCheck} = case NBits of
+ "dw" ->
+ {"ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC",
+ ["ETHR_HAVE_", NATMC],
+ "ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC",
+ ["ETHR_HAVE_SU_", NATMC],
+ "\n#elif defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)"};
+ _ ->
+ {[?HAVE_NATIVE_ATOMIC, NBits],
+ case DW of
+ true -> ["ETHR_HAVE_SU_", NATMC];
+ false -> ["ETHR_HAVE_", NATMC]
+ end,
+ false,
+ ["ETHR_HAVE_", NATMC],
+ ["\n#elif ETHR_", NATMC, "_BITS__ == ", NBits]}
+ end,
+ [NativeTypeCheck,
+ lists:map(fun (Op) ->
+ NOpStr = opstr(native(Op)),
+ CapNOpStr = to_upper(NOpStr),
+ lists:map(fun (B) ->
+ NBExt = op_barrier_ext(B),
+ CapNBExt = to_upper(NBExt),
+ HaveOutDef = [HaveOutPrefix, "_", CapNOpStr, CapNBExt],
+ HaveOutDefExtra = [HaveOutPrefixExtra, "_", CapNOpStr, CapNBExt],
+ [case DW of
+ true ->
+ ["\n# undef ", HaveOutDefExtra];
+ false ->
+ ""
+ end, "
+# undef ", HaveOutDef,"
+# ifdef ", HaveInPrefix, "_", CapNOpStr, CapNBExt, "
+# define ", HaveOutDef, " 1
+# endif",
+ case HaveInPrefixExtra of
+ false -> "";
+ _ -> ["
+# ifdef ", HaveInPrefixExtra, "_", CapNOpStr, CapNBExt, "
+# define ", HaveOutDefExtra, " 1
+# endif"
+ ]
+ end]
+ end,
+ ?BARRIERS)
+ end,
+ case DW of
+ true -> ?DW_ATOMIC_OPS;
+ false -> ?ATOMIC_OPS
+ end)]
+ end,
+ case DW of
+ true -> ["dw", "64"];
+ false -> ?POTENTIAL_NBITS
+ end),
+ "
+#else
+# error \"Invalid native atomic size\"
+#endif
+"].
+
+
+
+make_prototypes(#atomic_context{dw = DW, 'ATMC' = ATMC} = AC) ->
+ MkProt = fun (MacroName) ->
+ %% addr() is special
+ [func_header(AC, prototype, MacroName, addr, none), "\n",
+ lists:map(fun (Op) ->
+ lists:map(fun (B) ->
+ [func_header(AC, prototype, MacroName, Op, B), "\n"]
+ end,
+ ?BARRIERS)
+ end,
+ case DW of
+ true -> ?DW_ATOMIC_OPS;
+ false -> ?ATOMIC_OPS
+ end)]
+ end,
+ ["
+#ifdef ETHR_NEED_", ATMC, "_PROTOTYPES__
+",
+ MkProt(false),
+ case DW of
+ true -> ["#if defined(", ?DW_RTCHK_MACRO, ")\n",
+ MkProt(?DW_FUNC_MACRO),
+ "#endif\n"];
+ false -> ""
+ end,
+ "#endif /* ETHR_NEED_", ATMC, "_PROTOTYPES__ */\n"].
+
+rtchk_fallback_call(Return, #atomic_context{dw = DW,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3},
+ Op, B) ->
+ op_call(Op, DW, case Return of
+ true -> "return";
+ false -> [RetVar, " ="]
+ end, [?DW_FUNC_MACRO, "(", opstr(Op), op_barrier_ext(B), ")"], Arg1, Arg2, Arg3, "").
+
+make_implementations(#atomic_context{dw = DW,
+ ret_type = RetType,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ addr_aint_t = AddrAintT,
+ atomic = Atomic,
+ have_native_atomic_ops = HaveNativeAtomicOps,
+ 'ATMC' = ATMC,
+ 'NATMC' = NATMC} = AC) ->
+ NativeVar = case DW of
+ true -> ["(&", Arg1, "->native)"];
+ false -> Arg1
+ end,
+ RtchkBegin = ["
+#if defined(", ?DW_RTCHK_MACRO, ")
+ if (", ?DW_RTCHK_MACRO, ") {
+#endif
+"],
+ RtchkEnd = fun (Return, Operation, Barrier) ->
+ ["
+#if defined(", ?DW_RTCHK_MACRO, ")
+ } else { ", rtchk_fallback_call(Return, AC, Operation, Barrier), " }
+#endif\n"
+ ]
+ end,
+ ["
+#if (defined(", HaveNativeAtomicOps, ") \\
+ && (defined(ETHR_", ATMC, "_INLINE__) || defined(ETHR_ATOMIC_IMPL__)))
+",
+ translate_have_defs(AC),
+ cmpxchg_fallback_define(AC),
+ %% addr() is special
+ "
+
+
+/* --- addr() --- */
+
+", func_header(AC, inline_implementation, false, addr, none), "
+{", case DW of
+ true -> RtchkBegin;
+ false -> ""
+ end, "
+ return (", AddrAintT, " *) ETHR_", NATMC, "_ADDR_FUNC__(", NativeVar, ");
+",case DW of
+ true -> RtchkEnd(true, addr, none);
+ false -> ""
+ end, "
+}
+",
+ lists:map(fun (Op) ->
+ OpStr = opstr(Op),
+ ["
+
+/* --- ", OpStr, "() --- */
+
+",
+ lists:map(fun (B) ->
+ TryBarriers = try_barrier_order(B),
+ ["
+", func_header(AC, inline_implementation, false, Op, B), "
+{
+",
+ case is_return_op(AC, Op) of
+ true ->
+ [" ", RetType, " ", RetVar, ";\n"];
+ _ -> ""
+ end,
+ case DW of
+ true ->
+ [RtchkBegin,
+ "\n",
+ su_dw_native_barrier_op(AC, "#if", Op, B, hd(TryBarriers)),
+ lists:map(fun (NB) ->
+ su_dw_native_barrier_op(AC, "#elif", Op, B, NB)
+ end,
+ tl(TryBarriers)),
+ lists:map(fun (NB) ->
+ dw_native_barrier_op(AC, "#elif", "", Op, B, NB)
+ end,
+ TryBarriers),
+ case simple_fallback(AC, Op, B) of
+ "" ->
+ %% No simple fallback available;
+ %% use cmpxchg() fallbacks...
+ [cmpxchg_fallbacks(AC#atomic_context{'NATMC' = ["SU_", NATMC]}, true, Op, B),
+ cmpxchg_fallbacks(AC, false, Op, B),
+ "#else
+#error \"Missing implementation of ", Atomic, "_", opstr(Op), op_barrier_ext(B), "()!\"
+#endif
+"
+ ];
+ SimpleFallback ->
+ ["#else\n", SimpleFallback, "#endif\n"]
+ end,
+ RtchkEnd(false, Op, B), "\n"];
+ false ->
+ [native_barrier_op(AC, "#if", "", Op, B, hd(TryBarriers), true),
+ lists:map(fun (NB) ->
+ native_barrier_op(AC, "#elif", "", Op, B, NB, true)
+ end,
+ tl(TryBarriers)),
+ case simple_fallback(AC, Op, B) of
+ "" ->
+ %% No simple fallback available;
+ %% use cmpxchg() fallbacks...
+ [cmpxchg_fallbacks(AC, false, Op, B),
+ "#else
+#error \"Missing implementation of ", Atomic, "_", opstr(Op), op_barrier_ext(B), "()!\"
+#endif
+"
+ ];
+ SimpleFallback ->
+ ["#else\n", SimpleFallback, "#endif\n"]
+ end]
+ end,
+ case is_return_op(AC, Op) of
+ true ->
+ [" return ", RetVar, ";\n"];
+ false ->
+ ""
+ end,
+ "}\n"]
+ end,
+ ?BARRIERS)]
+ end,
+ case DW of
+ true -> ?DW_ATOMIC_OPS;
+ false -> ?ATOMIC_OPS
+ end),
+ "
+#endif /* ETHR_", ATMC, "_INLINE__ */
+"
+ ].
+
+atomic_implementation_comment(AtomicSize) ->
+ CSz = case AtomicSize of
+ "dword" -> "Double word size";
+ "word" -> "Word size";
+ _ -> AtomicSize ++ "-bit"
+ end,
+ ["
+
+/* ---------- ", CSz, " atomic implementation ---------- */
+
+"
+ ].
+
+write_h_file(FileName) ->
+ {ok, FD} = file:open(FileName, [write, latin1]),
+ ok = file:write(FD, comments()),
+ ok = file:write(FD, "
+#ifndef ETHR_ATOMICS_H__
+#define ETHR_ATOMICS_H__
+"
+ ),
+ ok = file:write(FD, h_top()),
+ ok = lists:foreach(fun (AtomicSize) ->
+ AC = atomic_context(AtomicSize),
+ ok = file:write(FD,
+ [atomic_implementation_comment(AtomicSize),
+ make_prototypes(AC),
+ make_implementations(AC)])
+ end,
+ ?ATOMIC_SIZES),
+ ok = file:write(FD, "
+#endif /* ETHR_ATOMICS_H__ */
+"
+ ),
+ ok = file:close(FD).
+
+
+make_native_impl_op(#atomic_context{dw = DW,
+ atomic = Atomic,
+ have_native_atomic_ops = HaveNativeAtomicOps,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3}, Op, B) ->
+ ["#if defined(", HaveNativeAtomicOps, ")",
+ case DW of
+ true -> [" && !defined(", ?DW_RTCHK_MACRO, ")"];
+ false -> ""
+ end,
+ "\n",
+ " ", op_call(Op, DW, [RetVar, " = "], [Atomic, "_", opstr(Op), op_barrier_ext(B), "__"], Arg1, Arg2, Arg3, ""),
+ "\n"].
+
+amc_op_dw_arg(#atomic_context{dw = false}) ->
+ "0";
+amc_op_dw_arg(#atomic_context{dw = true}) ->
+ "1".
+
+amc_op_arg_prefix(#atomic_context{dw = false}) ->
+ "&";
+amc_op_arg_prefix(#atomic_context{dw = true}) ->
+ "".
+
+amc_sint_arg(#atomic_context{dw = DW, arg2 = Arg}, arg2) ->
+ amc_sint_arg(DW, Arg);
+amc_sint_arg(#atomic_context{dw = DW, arg3 = Arg}, arg3) ->
+ amc_sint_arg(DW, Arg);
+amc_sint_arg(#atomic_context{dw = DW, ret_var = Arg}, ret_var) ->
+ amc_sint_arg(DW, Arg);
+amc_sint_arg(true, Arg) ->
+ [Arg, "->" ?DW_SINT_FIELD];
+amc_sint_arg(false, Arg) ->
+ ["&", Arg].
+
+amc_op_call(#atomic_context{arg1 = Arg1} = AC, init) ->
+ [" amc_init(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ");\n"];
+amc_op_call(#atomic_context{arg1 = Arg1} = AC, set) ->
+ [" amc_set(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ");\n"];
+amc_op_call(#atomic_context{dw = false, arg1 = Arg1} = AC, read) ->
+ [" amc_read(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, ret_var), ");\n"];
+amc_op_call(#atomic_context{dw = true, arg1 = Arg1} = AC, read) ->
+ [" amc_read(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ");\n"];
+amc_op_call(#atomic_context{dw = false, arg1 = Arg1, arg3 = Arg3, ret_var = RetVar} = AC, cmpxchg) ->
+ [" ", RetVar, " = ", Arg3, ";
+ (void) amc_cmpxchg(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ", ", amc_sint_arg(AC, ret_var), ");\n"];
+amc_op_call(#atomic_context{dw = true, arg1 = Arg1, ret_var = RetVar} = AC, cmpxchg) ->
+ [" ", RetVar, " = amc_cmpxchg(&", Arg1, "->amc, ", amc_op_dw_arg(AC), ", ", amc_op_arg_prefix(AC), Arg1, "->sint, ", amc_sint_arg(AC, arg2), ", ", amc_sint_arg(AC, arg3), ");\n"];
+amc_op_call(#atomic_context{dw = DW, arg1 = Arg1, arg2 = Arg2, arg3 = Arg3, ret_var = RetVar}, Op) ->
+ OpCtxt = #op_context{ret = RetVar, var = [Arg1,"->sint"], val1 = Arg2, val2 = Arg3},
+ OpStr = case DW of
+ true -> dw_op(Op, OpCtxt);
+ false -> op(Op, OpCtxt)
+ end,
+ [" ETHR_AMC_MODIFICATION_OPS__(&", Arg1, "->amc, ", OpStr, ");\n"].
+
+make_amc_fallback_op(#atomic_context{amc_fallback = false}, _Op, _B) ->
+ "";
+make_amc_fallback_op(#atomic_context{amc_fallback = true} = AC, Op, B) ->
+ NB = case Op of
+ read -> rb;
+ _ -> none
+ end,
+ {PreB, PostB} = xbarriers(Op, B, NB),
+ ["#elif defined(ETHR_AMC_FALLBACK__)\n",
+ case PreB of
+ "" -> "";
+ _ -> [" ", PreB, "\n"]
+ end,
+ amc_op_call(AC, Op),
+ case PostB of
+ "" -> "";
+ _ -> [" ", PostB, "\n"]
+ end].
+
+make_locked_fallback_op(#atomic_context{dw = DW,
+ ret_var = RetVar,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3}, Op, B) ->
+ OpStr = case DW of
+ true ->
+ dw_op(Op, #op_context{ret = RetVar,
+ var = [Arg1, "->" ?DW_SINT_FIELD],
+ val1 = [Arg2, "->" ?DW_SINT_FIELD],
+ val2 = [Arg3, "->" ?DW_SINT_FIELD]});
+ false ->
+ op(Op, #op_context{ret = RetVar,
+ var = ["*", Arg1],
+ val1 = Arg2,
+ val2 = Arg3})
+ end,
+ {PreB, PostB} = xbarriers(Op, B, none),
+ ["#else\n",
+ case PreB of
+ "" -> "";
+ _ -> [" ", PreB, "\n"]
+ end,
+ [" ETHR_ATOMIC_OP_FALLBACK_IMPL__(", Arg1, ", ", OpStr, ");\n"],
+ case PostB of
+ "" -> "";
+ _ -> [" ", PostB, "\n"]
+ end,
+ "#endif\n"].
+
+make_symbol_to_fallback_impl(#atomic_context{dw = true,
+ atomic = Atomic,
+ arg1 = Arg1,
+ arg2 = Arg2,
+ arg3 = Arg3} = AC,
+ Op, B) ->
+ ["
+#ifdef ", ?DW_RTCHK_MACRO, "
+", func_header(AC, implementation, false, Op, B), "
+{",
+ case Op of
+ init -> "";
+ _ -> ["\n ETHR_ASSERT(!ethr_not_inited__);"]
+ end, "
+ ETHR_ASSERT(", Arg1, ");
+ ", op_call(Op, true, "return", [Atomic, "_", opstr(Op), op_barrier_ext(B), "__"], Arg1, Arg2, Arg3, ""), "
+}
+#endif
+"
+ ];
+make_symbol_to_fallback_impl(_, _, _) ->
+ "".
+
+make_symbol_implementations(#atomic_context{dw = DW,
+ amc_fallback = AMC,
+ ret_type = RetType,
+ addr_aint_t = AddrAintT,
+ ret_var = RetVar,
+ arg1 = Arg1} = AC) ->
+ FallbackVar = case DW of
+ true -> ["(&", Arg1, "->fallback)"];
+ false -> Arg1
+ end,
+ ["
+",
+ case DW of
+ true -> ["
+/*
+ * Double word atomics need runtime test.
+ */
+
+int ethr_have_native_dw_atomic(void)
+{
+ return ethr_have_native_dw_atomic__();
+}
+ "];
+ false -> ""
+ end, "
+
+/* --- addr() --- */
+
+", func_header(AC, implementation,
+ case DW of
+ true -> ?DW_FUNC_MACRO;
+ false -> false
+ end, addr, none), "
+{
+ ", AddrAintT, " *", RetVar, ";
+ ETHR_ASSERT(!ethr_not_inited__);
+ ETHR_ASSERT(", Arg1, ");
+", make_native_impl_op(AC, addr, none),
+ case AMC of
+ true -> ["#elif defined(ETHR_AMC_FALLBACK__)
+ ", RetVar ," = (", AddrAintT, " *) (", FallbackVar, ")->sint;"];
+ false -> ""
+ end, "
+#else
+ ", RetVar, " = (", AddrAintT, " *) ", FallbackVar, ";
+#endif
+ return ", RetVar, ";
+}
+",
+ make_symbol_to_fallback_impl(AC, addr, none),
+ lists:map(fun (Op) ->
+ ["
+
+/* -- ", opstr(Op), "() -- */
+
+",
+ lists:map(fun (B) ->
+ ["\n",
+ func_header(AC, implementation,
+ case DW of
+ true -> ?DW_FUNC_MACRO;
+ false -> false
+ end, Op, B),
+ "\n{\n",
+ case is_return_op(AC, Op) of
+ true -> [" ", RetType, " ", RetVar, ";\n"];
+ false -> ""
+ end,
+ case Op of
+ init -> "";
+ _ -> [" ETHR_ASSERT(!ethr_not_inited__);\n"]
+ end,
+ [" ETHR_ASSERT(", Arg1, ");\n"],
+ make_native_impl_op(AC, Op, B),
+ make_amc_fallback_op(AC#atomic_context{arg1 = FallbackVar}, Op, B),
+ make_locked_fallback_op(AC#atomic_context{arg1 = FallbackVar}, Op, B),
+ case is_return_op(AC, Op) of
+ true -> [" return ", RetVar, ";"
+ ];
+ false ->
+ ""
+ end,
+ "\n}\n",
+ make_symbol_to_fallback_impl(AC, Op, B)]
+ end,
+ ?BARRIERS)]
+ end,
+ case DW of
+ true -> ?DW_ATOMIC_OPS;
+ false -> ?ATOMIC_OPS
+ end)].
+
+make_info_functions() ->
+ ["
+
+
+/* --------- Info functions --------- */
+
+#if defined(", ?DW_RTCHK_MACRO, ")
+char *zero_ops[] = {NULL};
+#endif
+",
+ [lists:map(fun (NBits) ->
+ {DW, Bits} = case NBits of
+ "su_dw" -> {"su_dw_", ""};
+ "dw" -> {"dw_", ""};
+ _ -> {"", NBits}
+ end,
+ ["
+
+static char *native_", DW, "atomic", Bits, "_ops[] = {",
+ lists:map(fun (Op) ->
+ NOpStr = opstr(native(Op)),
+ CapNOpStr = to_upper(NOpStr),
+ lists:map(fun (B) ->
+ HaveNative = case NBits of
+ "dw" ->
+ "ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC";
+ "su_dw" ->
+ "ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC";
+ _ ->
+ [?HAVE_NATIVE_ATOMIC, NBits]
+ end,
+ NBExt = op_barrier_ext(B),
+ CapNBExt = to_upper(NBExt),
+ ["
+#ifdef ", HaveNative, "_", CapNOpStr, CapNBExt, "
+ \"", NOpStr, NBExt, "\",
+#endif"
+ ]
+ end,
+ ?BARRIERS)
+ end,
+ case NBits of
+ "dw" -> ?DW_ATOMIC_OPS;
+ "su_dw" -> ?DW_ATOMIC_OPS;
+ _ -> ?ATOMIC_OPS
+ end), "
+ NULL
+};
+
+char **
+ethr_native_", DW, "atomic", Bits, "_ops(void)
+{
+",
+ case DW of
+ "" -> "";
+ _ -> ["
+#if defined(", ?DW_RTCHK_MACRO, ")
+ if (!", ?DW_RTCHK_MACRO, ")
+ return &zero_ops[0];
+#endif"
+ ]
+ end, "
+ return &native_", DW, "atomic", Bits, "_ops[0];
+}
+"
+ ]
+ end, ["su_dw", "dw" | ?POTENTIAL_NBITS])]].
+
+write_c_file(FileName) ->
+ {ok, FD} = file:open(FileName, [write, latin1]),
+ ok = file:write(FD, comments()),
+ ok = file:write(FD, c_top()),
+ lists:foreach(fun (AtomicSize) ->
+ ok = file:write(FD,
+ [atomic_implementation_comment(AtomicSize),
+ make_symbol_implementations(atomic_context(AtomicSize))])
+ end,
+ ?ATOMIC_SIZES),
+ ok = file:write(FD, make_info_functions()).
+
+
+main([]) ->
+ case os:getenv("ERL_TOP") of
+ false ->
+ io:format("$ERL_TOP not set!~n", []),
+ halt(1);
+ ErlTop ->
+ HFile = filename:join(ErlTop, ?H_FILE),
+ WHFile = fun () ->
+ write_h_file(HFile)
+ end,
+ CFile = filename:join(ErlTop, ?C_FILE),
+ WCFile = fun () ->
+ write_c_file(CFile)
+ end,
+ case erlang:system_info(schedulers_online) of
+ 1 ->
+ WHFile(),
+ WCFile();
+ _ ->
+ {HPid, HMon} = spawn_monitor(WHFile),
+ {CPid, CMon} = spawn_monitor(WCFile),
+ receive
+ {'DOWN', HMon, process, HPid, HReason} ->
+ normal = HReason
+ end,
+ receive
+ {'DOWN', CMon, process, CPid, CReason} ->
+ normal = CReason
+ end
+ end,
+ io:format("Wrote: ~s~n", [HFile]),
+ io:format("Wrote: ~s~n", [CFile]),
+ init:stop()
+ end.
+
+a2l(A) ->
+ atom_to_list(A).
+
+opstr(A) ->
+ a2l(A).
+
+to_upper([]) ->
+ [];
+to_upper([C|Cs]) when is_list(C) ->
+ [to_upper(C)|to_upper(Cs)];
+to_upper([C|Cs]) when is_integer(C), 97 =< C, C =< 122 ->
+ [C-32|to_upper(Cs)];
+to_upper([C|Cs]) ->
+ [C|to_upper(Cs)].
+
+
+comments() ->
+ Years = case erlang:date() of
+ {2011, _, _} -> "2011";
+ {Y, _, _} -> "2011-"++integer_to_list(Y)
+ end,
+ ["/*
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ * This file was automatically generated by the
+ * \$ERL_TOP/erts/lib_src/utils/make_atomics_api script.
+ * If you need to make changes, edit the script and
+ * regenerate this file.
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ */
+
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB ", Years, ". All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the \"License\"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an \"AS IS\"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: The ethread atomics API
+ * Author: Rickard Green
+ */
+
+/*
+ * This file maps native atomic implementations to ethread
+ * API atomics. If no native atomic implementation
+ * is available, a less efficient fallback is used instead.
+ * The API consists of 32-bit size, word size (pointer size),
+ * and double word size atomics.
+ *
+ * The following atomic operations are implemented for
+ * 32-bit size, and word size atomics:
+",
+ lists:map(fun (Op) ->
+ [" * - ", opstr(Op), "\n"]
+ end,
+ ?ATOMIC_OPS),
+ " *
+ * The following atomic operations are implemented for
+ * double word size atomics:
+",
+ lists:map(fun (Op) ->
+ [" * - ", opstr(Op), "\n"]
+ end,
+ ?DW_ATOMIC_OPS),
+ " *
+ * Appart from a function implementing the atomic operation
+ * with unspecified memory barrier semantics, there are
+ * functions implementing each operation with the following
+ * memory barrier semantics:
+",
+ lists:map(fun (none) ->
+ "";
+ (rb) ->
+ [" * - rb (read barrier)\n"];
+ (wb) ->
+ [" * - wb (write barrier)\n"];
+ (acqb) ->
+ [" * - acqb (acquire barrier)\n"];
+ (relb) ->
+ [" * - relb (release barrier)\n"];
+ (mb) ->
+ [" * - mb (full memory barrier)\n"];
+ (B) ->
+ [" * - ", a2l(B), "\n"]
+ end,
+ ?BARRIERS),
+ " *
+ * We implement all of these operation/barrier
+ * combinations, regardless of whether they are useful
+ * or not (some of them are useless).
+ *
+ * Double word size atomic functions are on the followning
+ * form:
+ * ethr_dw_atomic_<OP>[_<BARRIER>]
+ *
+ * Word size atomic functions are on the followning
+ * form:
+ * ethr_atomic_<OP>[_<BARRIER>]
+ *
+ * 32-bit size atomic functions are on the followning
+ * form:
+ * ethr_atomic32_<OP>[_<BARRIER>]
+ *
+ * Apart from the operation/barrier functions
+ * described above also 'addr' functions are implemented
+ * which return the actual memory address used of the
+ * atomic variable. The 'addr' functions have no barrier
+ * versions.
+ *
+ * The native atomic implementation does not need to
+ * implement all operation/barrier combinations.
+ * Functions that have no native implementation will be
+ * constructed from existing native functionality. These
+ * functions will perform the wanted operation and will
+ * produce sufficient memory barriers, but may
+ * in some cases be less efficient than pure native
+ * versions.
+ *
+ * When we create ethread API operation/barrier functions by
+ * adding barriers before and after native operations it is
+ * assumed that:
+ * - A native read operation begins, and ends with a load.
+ * - A native set operation begins, and ends with a store.
+ * - An init operation begins with either a load, or a store,
+ * and ends with either a load, or a store.
+ * - All other operations begins with a load, and ends with
+ * either a load, or a store.
+ *
+ * This is the minimum functionality that a native
+ * implementation needs to provide:
+ *
+ * - Functions that need to be implemented:
+ *
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_addr
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>]
+ * (at least one cmpxchg of optional barrier)
+ *
+ * - Macros that needs to be defined:
+ *
+ * A macro informing about the presence of the native
+ * implementation:
+ *
+ * - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]
+ *
+ * A macro naming (a string constant) the implementation:
+ *
+ * - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL
+ *
+ * Each implemented native atomic function has to
+ * be accompanied by a defined macro on the following
+ * form informing about its presence:
+ *
+ * - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>]
+ *
+ * A (sparc-v9 style) membar macro:
+ *
+ * - ETHR_MEMBAR(B)
+ *
+ * Which takes a combination of the following macros
+ * or:ed (using |) together:
+ *
+ * - ETHR_LoadLoad
+ * - ETHR_LoadStore
+ * - ETHR_StoreLoad
+ * - ETHR_StoreStore
+ *
+ */
+"
+ ].
+
+h_top() ->
+ ["
+#undef ETHR_AMC_FALLBACK__
+#undef ETHR_AMC_NO_ATMCS__
+#undef ETHR_AMC_ATMC_T__
+#undef ETHR_AMC_ATMC_FUNC__
+
+/* -- 32-bit atomics -- */
+
+#undef ETHR_NAINT32_T__
+#undef ETHR_NATMC32_FUNC__
+#undef ETHR_NATMC32_ADDR_FUNC__
+#undef ETHR_NATMC32_BITS__
+#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# define ETHR_NEED_NATMC32_ADDR
+# define ETHR_NATMC32_ADDR_FUNC__ ethr_native_atomic32_addr
+typedef ethr_native_atomic32_t ethr_atomic32_t;
+# define ETHR_NAINT32_T__ ethr_sint32_t
+# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_NATMC32_BITS__ 32
+#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_NEED_NATMC64_ADDR
+#ifdef ETHR_BIGENDIAN
+# define ETHR_NATMC32_ADDR_FUNC__(VAR) \\
+ (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1)
+#else
+# define ETHR_NATMC32_ADDR_FUNC__(VAR) \\
+ ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR)))
+#endif
+typedef ethr_native_atomic64_t ethr_atomic32_t;
+# define ETHR_NAINT32_T__ ethr_sint64_t
+# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC32_BITS__ 64
+#else
+/*
+ * No native atomics usable for 32-bits atomics :(
+ * Use fallback...
+ */
+typedef ethr_sint32_t ethr_atomic32_t;
+#endif
+
+#undef ETHR_ATMC32_INLINE__
+#ifdef ETHR_NATMC32_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC32_INLINE__
+# endif
+# define ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS
+#endif
+
+#if !defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_ATMC32_PROTOTYPES__
+#endif
+
+#ifndef ETHR_INLINE_ATMC32_FUNC_NAME_
+# define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_ATMC32_FUNC__
+#define ETHR_ATMC32_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+
+
+/* -- Word size atomics -- */
+
+#undef ETHR_NEED_NATMC32_ADDR
+#undef ETHR_NEED_NATMC64_ADDR
+
+#undef ETHR_NAINT_T__
+#undef ETHR_NATMC_FUNC__
+#undef ETHR_NATMC_ADDR_FUNC__
+#undef ETHR_NATMC_BITS__
+#if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr
+typedef ethr_native_atomic64_t ethr_atomic_t;
+# define ETHR_NAINT_T__ ethr_sint64_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_BITS__ 64
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC32_ADDR
+# endif
+# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic32_addr
+typedef ethr_native_atomic32_t ethr_atomic_t;
+# define ETHR_NAINT_T__ ethr_sint32_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_NATMC_BITS__ 32
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+#ifdef ETHR_BIGENDIAN
+# define ETHR_NATMC_ADDR_FUNC__(VAR) \\
+ (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1)
+#else
+# define ETHR_NATMC_ADDR_FUNC__(VAR) \\
+ ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR)))
+#endif
+typedef ethr_native_atomic64_t ethr_atomic_t;
+# define ETHR_NATMC_T__ ethr_native_atomic64_t
+# define ETHR_NAINT_T__ ethr_sint64_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_BITS__ 64
+#else
+/*
+ * No native atomics usable for pointer size atomics :(
+ * Use fallback...
+ */
+
+# if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 2
+# define ETHR_AMC_SINT_T__ ethr_sint32_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic32_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+typedef struct {
+ ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__];
+} ethr_amc_t;
+typedef struct {
+ ethr_amc_t amc;
+ ethr_sint_t sint;
+} ethr_atomic_t;
+# else /* locked fallback */
+typedef ethr_sint_t ethr_atomic_t;
+# endif
+#endif
+
+#undef ETHR_ATMC_INLINE__
+#ifdef ETHR_NATMC_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC_INLINE__
+# endif
+# define ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS
+#endif
+
+#if !defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_ATMC_PROTOTYPES__
+#endif
+
+#ifndef ETHR_INLINE_ATMC_FUNC_NAME_
+# define ETHR_INLINE_ATMC_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_ATMC_FUNC__
+#define ETHR_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X)
+
+/* -- Double word atomics -- */
+
+#undef ETHR_SU_DW_NAINT_T__
+#undef ETHR_SU_DW_NATMC_FUNC__
+#undef ETHR_SU_DW_NATMC_ADDR_FUNC__
+#undef ETHR_DW_NATMC_FUNC__
+#undef ETHR_DW_NATMC_ADDR_FUNC__
+#undef ETHR_DW_NATMC_BITS__
+#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)
+# define ETHR_NEED_DW_NATMC_ADDR
+# define ETHR_DW_NATMC_ADDR_FUNC__ ethr_native_dw_atomic_addr
+# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_dw_atomic_t
+# define ETHR_DW_NATMC_FUNC__(X) ethr_native_dw_atomic_ ## X
+# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_su_dw_atomic_ ## X
+# if ETHR_SIZEOF_PTR == 8
+# define ETHR_DW_NATMC_BITS__ 128
+# elif ETHR_SIZEOF_PTR == 4
+# define ETHR_DW_NATMC_BITS__ 64
+# else
+# error \"Word size not supported\"
+# endif
+# ifdef ETHR_NATIVE_SU_DW_SINT_T
+# define ETHR_SU_DW_NAINT_T__ ETHR_NATIVE_SU_DW_SINT_T
+# endif
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+# define ETHR_DW_NATMC_ADDR_FUNC__(VAR) \\
+ ((ethr_dw_sint_t *) ethr_native_atomic64_addr((VAR)))
+# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_atomic64_t
+# define ETHR_SU_DW_NAINT_T__ ethr_sint64_t
+# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_DW_NATMC_BITS__ 64
+#endif
+
+#if defined(", ?DW_RTCHK_MACRO, ")
+#define ", ?DW_FUNC_MACRO, "(X) ethr_dw_atomic_ ## X ## _fallback__
+#else
+#define ", ?DW_FUNC_MACRO, "(X) ethr_dw_atomic_ ## X
+#endif
+
+#if !defined(ETHR_DW_NATMC_BITS__) || defined(", ?DW_RTCHK_MACRO, ")
+# define ETHR_NEED_DW_FALLBACK__
+#endif
+
+#if defined(ETHR_NEED_DW_FALLBACK__)
+/*
+ * No native atomics usable for double word atomics :(
+ * Use fallback...
+ */
+
+# ifndef ETHR_AMC_FALLBACK__
+# if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 1
+# define ETHR_AMC_SINT_T__ ethr_sint_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X)
+# elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 2
+# define ETHR_AMC_SINT_T__ ethr_sint32_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic32_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+# endif
+# ifdef ETHR_AMC_FALLBACK__
+typedef struct {
+ ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__];
+} ethr_amc_t;
+# endif
+# endif
+
+typedef struct {
+#ifdef ETHR_AMC_FALLBACK__
+ ethr_amc_t amc;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_fallback_t;
+
+#endif
+
+typedef union {
+#ifdef ETHR_NATIVE_DW_ATOMIC_T__
+ ETHR_NATIVE_DW_ATOMIC_T__ native;
+#endif
+#ifdef ETHR_NEED_DW_FALLBACK__
+ ethr_dw_atomic_fallback_t fallback;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_t;
+
+typedef union {
+#ifdef ETHR_SU_DW_NAINT_T__
+ ETHR_SU_DW_NAINT_T__ ", ?SU_DW_SINT_FIELD, ";
+#endif
+ ethr_sint_t ", ?DW_SINT_FIELD, "[2];
+} ethr_dw_sint_t;
+
+#ifdef ETHR_BIGENDIAN
+# define ETHR_DW_SINT_LOW_WORD 1
+# define ETHR_DW_SINT_HIGH_WORD 0
+#else
+# define ETHR_DW_SINT_LOW_WORD 0
+# define ETHR_DW_SINT_HIGH_WORD 1
+#endif
+
+#undef ETHR_DW_ATMC_INLINE__
+#ifdef ETHR_DW_NATMC_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC32_INLINE__
+# endif
+# define ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS
+#endif
+
+#if !defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_DW_ATMC_PROTOTYPES__
+#endif
+
+#ifndef ETHR_INLINE_DW_ATMC_FUNC_NAME_
+# define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_DW_ATMC_FUNC__
+#define ETHR_DW_ATMC_FUNC__(X) ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_dw_atomic_ ## X)
+
+#if defined(ETHR_NEED_DW_ATMC_PROTOTYPES__)
+int ethr_have_native_dw_atomic(void);
+#endif
+#if defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+static ETHR_INLINE int
+ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_have_native_dw_atomic)(void)
+{
+#if defined(", ?DW_RTCHK_MACRO, ")
+ return ", ?DW_RTCHK_MACRO, ";
+#elif defined(ETHR_DW_NATMC_BITS__)
+ return 1;
+#else
+ return 0;
+#endif
+}
+#endif
+
+/* -- Misc -- */
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+/*
+ * Unusual values are used by read() fallbacks implemented via cmpxchg().
+ * We want to use an unusual value in hope that it is more efficient
+ * not to match the value in memory.
+ *
+ * - Negative integer values are probably more unusual.
+ * - Very large absolute integer values are probably more unusual.
+ * - Odd pointers are probably more unusual (only char pointers can be odd).
+ */
+# define ETHR_UNUSUAL_SINT32_VAL__ ((ethr_sint32_t) 0x81818181)
+# if ETHR_SIZEOF_PTR == 4
+# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) ETHR_UNUSUAL_SINT32_VAL__)
+# elif ETHR_SIZEOF_PTR == 8
+# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) 0x8181818181818181L)
+# else
+# error \"Word size not supported\"
+# endif
+# if defined(ETHR_NEED_DW_NATMC_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR)
+# error \"No ethr_native_dw_atomic_addr() available\"
+# endif
+# if defined(ETHR_NEED_NATMC32_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR)
+# error \"No ethr_native_atomic32_addr() available\"
+# endif
+# if defined(ETHR_NEED_NATMC64_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR)
+# error \"No ethr_native_atomic64_addr() available\"
+# endif
+#endif
+
+#if defined(__GNUC__)
+# ifndef ETHR_COMPILER_BARRIER
+# define ETHR_COMPILER_BARRIER __asm__ __volatile__(\"\" : : : \"memory\")
+# endif
+#elif defined(ETHR_WIN32_THREADS)
+# ifndef ETHR_COMPILER_BARRIER
+# include <intrin.h>
+# pragma intrinsic(_ReadWriteBarrier)
+# define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
+# endif
+#endif
+
+void ethr_compiler_barrier_fallback(void);
+#ifndef ETHR_COMPILER_BARRIER
+# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback()
+#endif
+
+int ethr_init_atomics(void);
+
+/* info */
+char **ethr_native_atomic32_ops(void);
+char **ethr_native_atomic64_ops(void);
+char **ethr_native_dw_atomic_ops(void);
+char **ethr_native_su_dw_atomic_ops(void);
+
+#if !defined(ETHR_DW_NATMC_BITS__) && !defined(ETHR_NATMC_BITS__) && !defined(ETHR_NATMC32_BITS__)
+/*
+ * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only,
+ * i.e. when no native atomic implementation exist and only our lock
+ * based atomic fallback is used, a noop is sufficient.
+ */
+# undef ETHR_MEMORY_BARRIER
+# undef ETHR_WRITE_MEMORY_BARRIER
+# undef ETHR_READ_MEMORY_BARRIER
+# undef ETHR_READ_DEPEND_MEMORY_BARRIER
+# undef ETHR_MEMBAR
+# define ETHR_MEMBAR(B) do { } while (0)
+#endif
+
+#ifndef ETHR_MEMBAR
+# error \"No ETHR_MEMBAR defined\"
+#endif
+
+#define ETHR_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore)
+#define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMBAR(ETHR_StoreStore)
+#define ETHR_READ_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad)
+#ifdef ETHR_READ_DEPEND_MEMORY_BARRIER
+# undef ETHR_ORDERED_READ_DEPEND
+#else
+# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER
+# define ETHR_ORDERED_READ_DEPEND
+#endif
+"].
+
+c_top() ->
+ ["
+
+#ifdef HAVE_CONFIG_H
+#include \"config.h\"
+#endif
+
+#define ETHR_TRY_INLINE_FUNCS
+#define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X ## __
+#define ETHR_INLINE_ATMC_FUNC_NAME_(X) X ## __
+#define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X ## __
+#define ETHR_ATOMIC_IMPL__
+
+#include \"ethread.h\"
+#include \"ethr_internal.h\"
+
+#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \\
+ || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS))
+/*
+ * Spinlock based fallback for atomics used in absence of a native
+ * implementation.
+ */
+
+#define ETHR_ATMC_FLLBK_ADDR_BITS ", ?ETHR_ATMC_FLLBK_ADDR_BITS, "
+#define ETHR_ATMC_FLLBK_ADDR_SHIFT ", ?ETHR_ATMC_FLLBK_ADDR_SHIFT, "
+
+typedef struct {
+ union {
+ ethr_spinlock_t lck;
+ char buf[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_spinlock_t))];
+ } u;
+} ethr_atomic_protection_t;
+
+extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS];
+
+#define ETHR_ATOMIC_PTR2LCK__(PTR) \\
+(&ethr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATMC_FLLBK_ADDR_SHIFT) \\
+ & ((1 << ETHR_ATMC_FLLBK_ADDR_BITS) - 1))].u.lck)
+
+
+#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS) \\
+do { \\
+ ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP)); \\
+ ethr_spin_lock(slp__); \\
+ { EXPS; } \\
+ ethr_spin_unlock(slp__); \\
+} while (0)
+
+ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS];
+
+#endif
+
+", make_amc_fallback(), "
+
+int
+ethr_init_atomics(void)
+{
+#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \\
+ || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS))
+ int i;
+ for (i = 0; i < (1 << ETHR_ATMC_FLLBK_ADDR_BITS); i++) {
+ int res = ethr_spinlock_init(&ethr_atomic_protection__[i].u.lck);
+ if (res != 0)
+ return res;
+ }
+#endif
+ return 0;
+}
+"].
+
+make_amc_fallback() ->
+ ["
+#if defined(ETHR_AMC_FALLBACK__)
+
+/*
+ * Fallback for large sized (word and/or double word size) atomics using
+ * an \"Atomic Modification Counter\" based on smaller sized native atomics.
+ *
+ * We use a 63-bit modification counter and a one bit exclusive flag.
+ * If 32-bit native atomics are used, we need two 32-bit native atomics.
+ * The exclusive flag is the least significant bit, or if multiple atomics
+ * are used, the least significant bit of the least significant atomic.
+ *
+ * When using the AMC fallback the following is true:
+ * - Reads of the same atomic variable can be done in parallel.
+ * - Uncontended reads doesn't cause any cache line invalidations,
+ * since no modifications are done.
+ * - Assuming that the AMC atomic(s) and the integer(s) containing the
+ * value of the implemented atomic resides in the same cache line,
+ * modifications will only cause invalidations of one cache line.
+ *
+ * When using the spinlock based fallback none of the above is true,
+ * however, the spinlock based fallback consumes less memory.
+ */
+
+# if ETHR_AMC_NO_ATMCS__ != 1 && ETHR_AMC_NO_ATMCS__ != 2
+# error \"Not supported\"
+# endif
+# define ETHR_AMC_MAX_TRY_READ__ 10
+# ifdef ETHR_DEBUG
+# define ETHR_DBG_CHK_EXCL_STATE(ASP, S) \\
+do { \\
+ ETHR_AMC_SINT_T__ act = ETHR_AMC_ATMC_FUNC__(read)(&(ASP)->atomic[0]); \\
+ ETHR_ASSERT(act == (S) + 1); \\
+ ETHR_ASSERT(act & 1); \\
+} while (0)
+# else
+# define ETHR_DBG_CHK_EXCL_STATE(ASP, S)
+# endif
+
+static ETHR_INLINE void
+amc_init(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
+{
+ avar[0] = val[0];
+ if (dw)
+ avar[1] = val[1];
+#if ETHR_AMC_NO_ATMCS__ == 2
+ ETHR_AMC_ATMC_FUNC__(init)(&amc->atomic[1], 0);
+#endif
+ ETHR_AMC_ATMC_FUNC__(init_wb)(&amc->atomic[0], 0);
+}
+
+static ETHR_INLINE ETHR_AMC_SINT_T__
+amc_set_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ prev_state0)
+{
+ ETHR_AMC_SINT_T__ state0 = prev_state0;
+ /* Set exclusive flag. */
+ while (1) {
+ ETHR_AMC_SINT_T__ act_state0, new_state0;
+ while (state0 & 1) { /* Wait until exclusive bit has been cleared */
+ ETHR_SPIN_BODY;
+ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+ }
+ /* Try to set exclusive bit */
+ new_state0 = state0 + 1;
+ act_state0 = ETHR_AMC_ATMC_FUNC__(cmpxchg_acqb)(&amc->atomic[0],
+ new_state0,
+ state0);
+ if (state0 == act_state0)
+ return state0; /* old state0 */
+ state0 = act_state0;
+ }
+}
+
+static ETHR_INLINE void
+amc_inc_mc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0)
+{
+ ETHR_AMC_SINT_T__ state0 = old_state0;
+
+ /* Increment modification counter and reset exclusive flag. */
+
+ ETHR_DBG_CHK_EXCL_STATE(amc, state0);
+
+ state0 += 2;
+
+ ETHR_ASSERT((state0 & 1) == 0);
+
+#if ETHR_AMC_NO_ATMCS__ == 2
+ if (state0 == 0) {
+ /*
+ * state0 wrapped, so we need to increment state1. There is no need
+ * for atomic inc op, since this is always done while having exclusive
+ * flag.
+ */
+ ETHR_AMC_SINT_T__ state1 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]);
+ state1++;
+ ETHR_AMC_ATMC_FUNC__(set)(&amc->atomic[1], state1);
+ }
+#endif
+ ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], state0);
+}
+
+static ETHR_INLINE void
+amc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0)
+{
+ ETHR_DBG_CHK_EXCL_STATE(amc, old_state0);
+ /*
+ * Reset exclusive flag, but leave modification counter unchanged,
+ * i.e., restore state to what it was before setting exclusive
+ * flag.
+ */
+ ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], old_state0);
+}
+
+static ETHR_INLINE void
+amc_set(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
+{
+ ETHR_AMC_SINT_T__ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+
+ state0 = amc_set_excl(amc, state0);
+
+ avar[0] = val[0];
+ if (dw)
+ avar[1] = val[1];
+
+ amc_inc_mc_unset_excl(amc, state0);
+}
+
+static ETHR_INLINE int
+amc_try_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar,
+ ethr_sint_t *val, ETHR_AMC_SINT_T__ *state0p)
+{
+ /* *state0p should contain last read value if aborting */
+ ETHR_AMC_SINT_T__ old_state0;
+#if ETHR_AMC_NO_ATMCS__ == 2
+ ETHR_AMC_SINT_T__ state1;
+ int abrt;
+#endif
+
+ *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]);
+ if ((*state0p) & 1)
+ return 0; /* exclusive flag set; abort */
+#if ETHR_AMC_NO_ATMCS__ == 2
+ state1 = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[1]);
+#else
+ ETHR_COMPILER_BARRIER;
+#endif
+
+ val[0] = avar[0];
+ if (dw)
+ val[1] = avar[1];
+
+ ETHR_READ_MEMORY_BARRIER;
+
+ /*
+ * Abort if state has changed (i.e, either the exclusive
+ * flag is set, or modification counter changed).
+ */
+ old_state0 = *state0p;
+#if ETHR_AMC_NO_ATMCS__ == 2
+ *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]);
+ abrt = (old_state0 != *state0p);
+ abrt |= (state1 != ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]));
+ return abrt == 0;
+#else
+ *state0p = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+ return old_state0 == *state0p;
+#endif
+}
+
+static ETHR_INLINE void
+amc_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
+{
+ ETHR_AMC_SINT_T__ state0;
+ int i;
+
+#if ETHR_AMC_MAX_TRY_READ__ == 0
+ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
+#else
+ for (i = 0; i < ETHR_AMC_MAX_TRY_READ__; i++) {
+ if (amc_try_read(amc, dw, avar, val, &state0))
+ return; /* read success */
+ ETHR_SPIN_BODY;
+ }
+#endif
+
+ state0 = amc_set_excl(amc, state0);
+
+ val[0] = avar[0];
+ if (dw)
+ val[1] = avar[1];
+
+ amc_unset_excl(amc, state0);
+}
+
+static ETHR_INLINE int
+amc_cmpxchg(ethr_amc_t *amc, int dw, ethr_sint_t *avar,
+ ethr_sint_t *new, ethr_sint_t *xchg)
+{
+ ethr_sint_t val[2];
+ ETHR_AMC_SINT_T__ state0;
+
+ if (amc_try_read(amc, dw, avar, val, &state0)) {
+ if (val[0] != xchg[0] || (dw && val[1] != xchg[1])) {
+ xchg[0] = val[0];
+ if (dw)
+ xchg[1] = val[1];
+ return 0; /* failed */
+ }
+ /* Operation will succeed if not interrupted */
+ }
+
+ state0 = amc_set_excl(amc, state0);
+
+ if (xchg[0] != avar[0] || (dw && xchg[1] != avar[1])) {
+ xchg[0] = avar[0];
+ if (dw)
+ xchg[1] = avar[1];
+
+ ETHR_DBG_CHK_EXCL_STATE(amc, state0);
+
+ amc_unset_excl(amc, state0);
+ return 0; /* failed */
+ }
+
+ avar[0] = new[0];
+ if (dw)
+ avar[1] = new[1];
+
+ amc_inc_mc_unset_excl(amc, state0);
+ return 1;
+}
+
+
+#define ETHR_AMC_MODIFICATION_OPS__(AMC, OPS) \\
+do { \\
+ ETHR_AMC_SINT_T__ state0__; \\
+ state0__ = ETHR_AMC_ATMC_FUNC__(read)(&(AMC)->atomic[0]); \\
+ state0__ = amc_set_excl((AMC), state0__); \\
+ { OPS; } \\
+ amc_inc_mc_unset_excl((AMC), state0__); \\
+} while (0)
+
+#endif /* amc fallback */
+"].
+
diff --git a/erts/lib_src/win/ethr_event.c b/erts/lib_src/win/ethr_event.c
index 68f093f49c..bc2f635c26 100644
--- a/erts/lib_src/win/ethr_event.c
+++ b/erts/lib_src/win/ethr_event.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2009-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2009-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -28,13 +28,10 @@
/* --- Windows implementation of thread events ------------------------------ */
-#pragma intrinsic(_InterlockedExchangeAdd)
-#pragma intrinsic(_InterlockedCompareExchange)
-
int
ethr_event_init(ethr_event *e)
{
- e->state = ETHR_EVENT_OFF__;
+ ethr_atomic32_init(&e->state, ETHR_EVENT_OFF__);
e->handle = CreateEvent(NULL, FALSE, FALSE, NULL);
if (e->handle == INVALID_HANDLE_VALUE)
return ethr_win_get_errno__();
@@ -63,7 +60,6 @@ ethr_event_reset(ethr_event *e)
static ETHR_INLINE int
wait(ethr_event *e, int spincount)
{
- LONG state;
DWORD code;
int sc, res, until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS;
@@ -73,13 +69,9 @@ wait(ethr_event *e, int spincount)
sc = spincount;
while (1) {
- long on;
+ ethr_sint32_t state;
while (1) {
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
- state = e->state;
-#else
- state = _InterlockedExchangeAdd(&e->state, (LONG) 0);
-#endif
+ state = ethr_atomic32_read(&e->state);
if (state == ETHR_EVENT_ON__)
return 0;
if (sc == 0)
@@ -95,9 +87,9 @@ wait(ethr_event *e, int spincount)
}
if (state != ETHR_EVENT_OFF_WAITER__) {
- state = _InterlockedCompareExchange(&e->state,
- ETHR_EVENT_OFF_WAITER__,
- ETHR_EVENT_OFF__);
+ state = ethr_atomic32_cmpxchg(&e->state,
+ ETHR_EVENT_OFF_WAITER__,
+ ETHR_EVENT_OFF__);
if (state == ETHR_EVENT_ON__)
return 0;
ETHR_ASSERT(state == ETHR_EVENT_OFF__);
diff --git a/erts/lib_src/win/ethread.c b/erts/lib_src/win/ethread.c
index 789a360b11..3abda6de4c 100644
--- a/erts/lib_src/win/ethread.c
+++ b/erts/lib_src/win/ethread.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -35,6 +35,7 @@
#include <winerror.h>
#include <stdio.h>
#include <limits.h>
+#include <intrin.h>
#define ETHR_INLINE_FUNC_NAME_(X) X ## __
#define ETHREAD_IMPL__
@@ -158,6 +159,25 @@ ethr_abort__(void)
#endif
}
+#if defined(ETHR_X86_RUNTIME_CONF__)
+
+#pragma intrinsic(__cpuid)
+
+void
+ethr_x86_cpuid__(int *eax, int *ebx, int *ecx, int *edx)
+{
+ int CPUInfo[4];
+
+ __cpuid(CPUInfo, *eax);
+
+ *eax = CPUInfo[0];
+ *ebx = CPUInfo[1];
+ *ecx = CPUInfo[2];
+ *edx = CPUInfo[3];
+}
+
+#endif /* ETHR_X86_RUNTIME_CONF__ */
+
/*
* ----------------------------------------------------------------------------
* Exported functions
diff --git a/erts/test/ethread_SUITE.erl b/erts/test/ethread_SUITE.erl
index 71d8c1c679..4206bebfe7 100644
--- a/erts/test/ethread_SUITE.erl
+++ b/erts/test/ethread_SUITE.erl
@@ -47,14 +47,29 @@
spinlock/1,
rwspinlock/1,
rwmutex/1,
- atomic/1]).
+ atomic/1,
+ dw_atomic_massage/1]).
-include_lib("test_server/include/test_server.hrl").
-tests() ->
- [create_join_thread, equal_tids, mutex, try_lock_mutex,
- cond_wait, broadcast, detached_thread,
- max_threads, tsd, spinlock, rwspinlock, rwmutex, atomic].
+tests() ->
+ [create_join_thread,
+ equal_tids,
+ mutex,
+ try_lock_mutex,
+ cond_wait,
+ broadcast,
+ detached_thread,
+ max_threads,
+ tsd,
+ spinlock,
+ rwspinlock,
+ rwmutex,
+ atomic,
+ dw_atomic_massage].
+
+all(doc) -> [];
+all(suite) -> tests().
suite() -> [{ct_hooks,[ts_install_cth]}].
@@ -211,6 +226,13 @@ atomic(suite) ->
atomic(Config) ->
run_case(Config, "atomic", "").
+dw_atomic_massage(doc) ->
+ ["Massage double word atomics"];
+dw_atomic_massage(suite) ->
+ [];
+dw_atomic_massage(Config) ->
+ run_case(Config, "dw_atomic_massage", "").
+
%%
%%
%% Auxiliary functions
diff --git a/erts/test/ethread_SUITE_data/ethread_tests.c b/erts/test/ethread_SUITE_data/ethread_tests.c
index 0b59ff5aa6..7e7e133d6c 100644
--- a/erts/test/ethread_SUITE_data/ethread_tests.c
+++ b/erts/test/ethread_SUITE_data/ethread_tests.c
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2004-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2004-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -1310,6 +1310,9 @@ rwmutex_test(void)
* Tests atomics.
*/
+#define AT_AINT32_MAX 0x7fffffff
+#define AT_AINT32_MIN 0x80000000
+
#define AT_THREADS 4
#define AT_ITER 10000
@@ -1320,12 +1323,428 @@ static ethr_atomic_t at_go;
static ethr_atomic_t at_done;
static ethr_atomic_t at_data;
+#define AT_TEST_INIT(T, A, B) \
+do { \
+ ethr_ ## A ## _init ## B(&A, 17); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 17); \
+} while (0)
+
+#define AT_TEST_SET(T, A, B) \
+do { \
+ ethr_ ## A ## _set ## B(&A, 4711); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \
+} while (0)
+
+#define AT_TEST_XCHG(T, A, B) \
+do { \
+ ethr_ ## A ## _set ## B(&A, 4711); \
+ ASSERT(ethr_ ## A ## _xchg ## B(&A, 17) == 4711); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 17); \
+} while (0)
+
+#define AT_TEST_CMPXCHG(T, A, B) \
+do { \
+ ethr_ ## A ## _set ## B(&A, 4711); \
+ ASSERT(ethr_ ## A ## _cmpxchg ## B(&A, 17, 33) == 4711); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \
+ ASSERT(ethr_ ## A ## _cmpxchg ## B(&A, 17, 4711) == 4711); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 17); \
+} while (0)
+
+#define AT_TEST_ADD_READ(T, A, B) \
+do { \
+ T var_ = AT_AINT32_MAX; \
+ var_ += 4711; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \
+ ASSERT(ethr_ ## A ## _add_read ## B(&A, 4711) == var_); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ var_ = AT_AINT32_MIN; \
+ var_ -= 4711; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \
+ ASSERT(ethr_ ## A ## _add_read ## B(&A, -4711) == var_); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ ethr_ ## A ## _set ## B(&A, 4711); \
+ ASSERT(ethr_ ## A ## _add_read ## B(&A, 10) == 4721); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 4721); \
+} while (0)
+
+#define AT_TEST_ADD(T, A, B) \
+do { \
+ T var_ = AT_AINT32_MAX; \
+ var_ += 4711; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \
+ ethr_ ## A ## _add ## B(&A, 4711); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ var_ = AT_AINT32_MIN; \
+ var_ -= 4711; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \
+ ethr_ ## A ## _add ## B(&A, -4711); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ ethr_ ## A ## _set ## B(&A, 11); \
+ ethr_ ## A ## _add ## B(&A, 4700); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \
+} while (0)
+
+#define AT_TEST_INC_READ(T, A, B) \
+do { \
+ T var_ = AT_AINT32_MAX; \
+ var_++; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \
+ ASSERT(ethr_ ## A ## _inc_read ## B(&A) == var_); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ ethr_ ## A ## _set ## B(&A, 4710); \
+ ASSERT(ethr_ ## A ## _inc_read ## B(&A) == 4711); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \
+} while (0)
+
+#define AT_TEST_DEC_READ(T, A, B) \
+do { \
+ T var_ = AT_AINT32_MIN; \
+ var_--; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \
+ ASSERT(ethr_ ## A ## _dec_read ## B(&A) == var_); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ ethr_ ## A ## _set ## B(&A, 17); \
+ ASSERT(ethr_ ## A ## _dec_read ## B(&A) == 16); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 16); \
+} while (0)
+
+
+#define AT_TEST_INC(T, A, B) \
+do { \
+ T var_ = AT_AINT32_MAX; \
+ var_++; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MAX); \
+ ethr_ ## A ## _inc ## B(&A); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ ethr_ ## A ## _set ## B(&A, 4710); \
+ ethr_ ## A ## _inc ## B(&A); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 4711); \
+} while (0)
+
+#define AT_TEST_DEC(T, A, B) \
+do { \
+ T var_ = AT_AINT32_MIN; \
+ var_--; \
+ ethr_ ## A ## _set ## B(&A, AT_AINT32_MIN); \
+ ethr_ ## A ## _dec ## B(&A); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == var_); \
+ ethr_ ## A ## _set ## B(&A, 17); \
+ ethr_ ## A ## _dec ## B(&A); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 16); \
+} while (0)
+
+#define AT_TEST_READ_BAND(T, A, B) \
+do { \
+ ethr_ ## A ## _set ## B(&A, 0x13131313); \
+ ASSERT(ethr_ ## A ## _read_band ## B(&A, 0x31313131) == 0x13131313); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 0x11111111); \
+} while (0)
+
+#define AT_TEST_READ_BOR(T, A, B) \
+do { \
+ ethr_ ## A ## _set ## B(&A, 0x11111111); \
+ ASSERT(ethr_ ## A ## _read_bor ## B(&A, 0x23232323) == 0x11111111); \
+ ASSERT(ethr_ ## A ## _read ## B(&A) == 0x33333333); \
+} while (0)
+
+
+static void
+atomic_basic_test(void)
+{
+ /*
+ * Verify that each op does what it is expected
+ * to do for at least one input.
+ */
+ ethr_atomic32_t atomic32;
+ ethr_atomic_t atomic;
+
+ print_line("AT_AINT32_MAX=%d",AT_AINT32_MAX);
+ print_line("AT_AINT32_MIN=%d",AT_AINT32_MIN);
+
+ AT_TEST_INIT(ethr_sint32_t, atomic32, );
+ AT_TEST_SET(ethr_sint32_t, atomic32, );
+ AT_TEST_XCHG(ethr_sint32_t, atomic32, );
+ AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, );
+ AT_TEST_ADD_READ(ethr_sint32_t, atomic32, );
+ AT_TEST_ADD(ethr_sint32_t, atomic32, );
+ AT_TEST_INC_READ(ethr_sint32_t, atomic32, );
+ AT_TEST_DEC_READ(ethr_sint32_t, atomic32, );
+ AT_TEST_INC(ethr_sint32_t, atomic32, );
+ AT_TEST_DEC(ethr_sint32_t, atomic32, );
+ AT_TEST_READ_BAND(ethr_sint32_t, atomic32, );
+ AT_TEST_READ_BOR(ethr_sint32_t, atomic32, );
+
+ AT_TEST_INIT(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_SET(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_XCHG(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_ADD(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_INC_READ(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_INC(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_DEC(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _acqb);
+ AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _acqb);
+
+ AT_TEST_INIT(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_SET(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_XCHG(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_ADD(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_INC_READ(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_INC(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_DEC(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _relb);
+ AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _relb);
+
+ AT_TEST_INIT(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_SET(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_XCHG(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_ADD(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_INC_READ(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_INC(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_DEC(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _rb);
+ AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _rb);
+
+ AT_TEST_INIT(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_SET(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_XCHG(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_ADD(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_INC_READ(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_INC(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_DEC(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _wb);
+ AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _wb);
+
+ AT_TEST_INIT(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_SET(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_XCHG(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_ADD(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_INC_READ(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_INC(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_DEC(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _mb);
+ AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _mb);
+
+ AT_TEST_INIT(ethr_sint_t, atomic, );
+ AT_TEST_SET(ethr_sint_t, atomic, );
+ AT_TEST_XCHG(ethr_sint_t, atomic, );
+ AT_TEST_CMPXCHG(ethr_sint_t, atomic, );
+ AT_TEST_ADD_READ(ethr_sint_t, atomic, );
+ AT_TEST_ADD(ethr_sint_t, atomic, );
+ AT_TEST_INC_READ(ethr_sint_t, atomic, );
+ AT_TEST_DEC_READ(ethr_sint_t, atomic, );
+ AT_TEST_INC(ethr_sint_t, atomic, );
+ AT_TEST_DEC(ethr_sint_t, atomic, );
+ AT_TEST_READ_BAND(ethr_sint_t, atomic, );
+ AT_TEST_READ_BOR(ethr_sint_t, atomic, );
+
+ AT_TEST_INIT(ethr_sint_t, atomic, _acqb);
+ AT_TEST_SET(ethr_sint_t, atomic, _acqb);
+ AT_TEST_XCHG(ethr_sint_t, atomic, _acqb);
+ AT_TEST_CMPXCHG(ethr_sint_t, atomic, _acqb);
+ AT_TEST_ADD_READ(ethr_sint_t, atomic, _acqb);
+ AT_TEST_ADD(ethr_sint_t, atomic, _acqb);
+ AT_TEST_INC_READ(ethr_sint_t, atomic, _acqb);
+ AT_TEST_DEC_READ(ethr_sint_t, atomic, _acqb);
+ AT_TEST_INC(ethr_sint_t, atomic, _acqb);
+ AT_TEST_DEC(ethr_sint_t, atomic, _acqb);
+ AT_TEST_READ_BAND(ethr_sint_t, atomic, _acqb);
+ AT_TEST_READ_BOR(ethr_sint_t, atomic, _acqb);
+
+ AT_TEST_INIT(ethr_sint_t, atomic, _relb);
+ AT_TEST_SET(ethr_sint_t, atomic, _relb);
+ AT_TEST_XCHG(ethr_sint_t, atomic, _relb);
+ AT_TEST_CMPXCHG(ethr_sint_t, atomic, _relb);
+ AT_TEST_ADD_READ(ethr_sint_t, atomic, _relb);
+ AT_TEST_ADD(ethr_sint_t, atomic, _relb);
+ AT_TEST_INC_READ(ethr_sint_t, atomic, _relb);
+ AT_TEST_DEC_READ(ethr_sint_t, atomic, _relb);
+ AT_TEST_INC(ethr_sint_t, atomic, _relb);
+ AT_TEST_DEC(ethr_sint_t, atomic, _relb);
+ AT_TEST_READ_BAND(ethr_sint_t, atomic, _relb);
+ AT_TEST_READ_BOR(ethr_sint_t, atomic, _relb);
+
+ AT_TEST_INIT(ethr_sint_t, atomic, _rb);
+ AT_TEST_SET(ethr_sint_t, atomic, _rb);
+ AT_TEST_XCHG(ethr_sint_t, atomic, _rb);
+ AT_TEST_CMPXCHG(ethr_sint_t, atomic, _rb);
+ AT_TEST_ADD_READ(ethr_sint_t, atomic, _rb);
+ AT_TEST_ADD(ethr_sint_t, atomic, _rb);
+ AT_TEST_INC_READ(ethr_sint_t, atomic, _rb);
+ AT_TEST_DEC_READ(ethr_sint_t, atomic, _rb);
+ AT_TEST_INC(ethr_sint_t, atomic, _rb);
+ AT_TEST_DEC(ethr_sint_t, atomic, _rb);
+ AT_TEST_READ_BAND(ethr_sint_t, atomic, _rb);
+ AT_TEST_READ_BOR(ethr_sint_t, atomic, _rb);
+
+ AT_TEST_INIT(ethr_sint_t, atomic, _wb);
+ AT_TEST_SET(ethr_sint_t, atomic, _wb);
+ AT_TEST_XCHG(ethr_sint_t, atomic, _wb);
+ AT_TEST_CMPXCHG(ethr_sint_t, atomic, _wb);
+ AT_TEST_ADD_READ(ethr_sint_t, atomic, _wb);
+ AT_TEST_ADD(ethr_sint_t, atomic, _wb);
+ AT_TEST_INC_READ(ethr_sint_t, atomic, _wb);
+ AT_TEST_DEC_READ(ethr_sint_t, atomic, _wb);
+ AT_TEST_INC(ethr_sint_t, atomic, _wb);
+ AT_TEST_DEC(ethr_sint_t, atomic, _wb);
+ AT_TEST_READ_BAND(ethr_sint_t, atomic, _wb);
+ AT_TEST_READ_BOR(ethr_sint_t, atomic, _wb);
+
+ AT_TEST_INIT(ethr_sint_t, atomic, _mb);
+ AT_TEST_SET(ethr_sint_t, atomic, _mb);
+ AT_TEST_XCHG(ethr_sint_t, atomic, _mb);
+ AT_TEST_CMPXCHG(ethr_sint_t, atomic, _mb);
+ AT_TEST_ADD_READ(ethr_sint_t, atomic, _mb);
+ AT_TEST_ADD(ethr_sint_t, atomic, _mb);
+ AT_TEST_INC_READ(ethr_sint_t, atomic, _mb);
+ AT_TEST_DEC_READ(ethr_sint_t, atomic, _mb);
+ AT_TEST_INC(ethr_sint_t, atomic, _mb);
+ AT_TEST_DEC(ethr_sint_t, atomic, _mb);
+ AT_TEST_READ_BAND(ethr_sint_t, atomic, _mb);
+ AT_TEST_READ_BOR(ethr_sint_t, atomic, _mb);
+
+ /* Double word */
+ {
+ ethr_dw_atomic_t dw_atomic;
+ ethr_dw_sint_t dw0, dw1;
+ dw0.sint[0] = 4711;
+ dw0.sint[1] = 4712;
+
+ /* init */
+ ethr_dw_atomic_init(&dw_atomic, &dw0);
+ ethr_dw_atomic_read(&dw_atomic, &dw1);
+ ETHR_ASSERT(dw1.sint[0] == 4711);
+ ETHR_ASSERT(dw1.sint[1] == 4712);
+
+ /* set */
+ dw0.sint[0] = 42;
+ dw0.sint[1] = ~((ethr_sint_t) 0);
+ ethr_dw_atomic_set(&dw_atomic, &dw0);
+ ethr_dw_atomic_read(&dw_atomic, &dw1);
+ ASSERT(dw1.sint[0] == 42);
+ ASSERT(dw1.sint[1] == ~((ethr_sint_t) 0));
+
+ /* cmpxchg */
+ dw0.sint[0] = 17;
+ dw0.sint[1] = 18;
+ dw1.sint[0] = 19;
+ dw1.sint[1] = 20;
+ ASSERT(!ethr_dw_atomic_cmpxchg(&dw_atomic, &dw1, &dw0));
+ ethr_dw_atomic_read(&dw_atomic, &dw0);
+ ASSERT(dw0.sint[0] == 42);
+ ASSERT(dw0.sint[1] == ~((ethr_sint_t) 0));
+
+ ASSERT(ethr_dw_atomic_cmpxchg(&dw_atomic, &dw1, &dw0));
+
+ ethr_dw_atomic_read(&dw_atomic, &dw0);
+ ASSERT(dw0.sint[0] == 19);
+ ASSERT(dw0.sint[1] == 20);
+ }
+}
+
+
+#define AT_DW_MIN 12
+#define AT_DW_MAX 42
+#define AT_DW_THREADS (AT_DW_MAX - AT_DW_MIN + 1)
+
+#define AT_DW_LOOPS 200000
+#define AT_DW_R_LOOPS 10
+
+ethr_dw_atomic_t at_dw_atomic;
+
+void
+at_dw_valid(ethr_dw_sint_t *dw)
+{
+ int i;
+ char c;
+ char *cp;
+
+ ASSERT(dw->sint[0] == dw->sint[1]);
+
+ cp = (char *) &dw->sint[0];
+ c = cp[0];
+
+ ASSERT(AT_DW_MIN <= c && c <= AT_DW_MAX);
+
+ for (i = 0; i < sizeof(ethr_sint_t); i++)
+ ASSERT(c == cp[i]);
+}
+
+void *
+at_dw_thr(void *vval)
+{
+ int l, r;
+ ethr_sint_t val = (ethr_sint_t) vval;
+ ethr_dw_sint_t dw;
+ ethr_dw_sint_t my_dw;
+
+ my_dw.sint[0] = val;
+ my_dw.sint[1] = val;
+
+ ethr_dw_atomic_set(&at_dw_atomic, &my_dw);
+ for (l = 0; l < AT_DW_LOOPS; l++) {
+ for (r = 0; r < AT_DW_R_LOOPS; r++) {
+ ethr_dw_atomic_read(&at_dw_atomic, &dw);
+ at_dw_valid(&dw);
+ }
+ ethr_dw_atomic_set(&at_dw_atomic, &my_dw);
+ for (r = 0; r < AT_DW_R_LOOPS; r++) {
+ ethr_dw_atomic_read(&at_dw_atomic, &dw);
+ at_dw_valid(&dw);
+ }
+ dw.sint[0] = 0;
+ dw.sint[1] = 0;
+ while (1) {
+ if (ethr_dw_atomic_cmpxchg(&at_dw_atomic, &my_dw, &dw))
+ break;
+ }
+ }
+}
+
+static void
+dw_atomic_massage_test(void)
+{
+ int i, res;
+ ethr_tid tid[AT_DW_THREADS];
+ ethr_thr_opts thr_opts = ETHR_THR_OPTS_DEFAULT_INITER;
+ ethr_dw_sint_t dw;
+
+ dw.sint[0] = dw.sint[1] = 0;
+
+ ethr_dw_atomic_init(&at_dw_atomic, &dw);
+
+ for (i = AT_DW_MIN; i <= AT_DW_MAX; i++) {
+ ethr_sint_t val;
+ memset(&val, i, sizeof(ethr_sint_t));
+ res = ethr_thr_create(&tid[i-AT_DW_MIN], at_dw_thr, (void *) val, &thr_opts);
+ ASSERT(res == 0);
+ }
+ for (i = AT_DW_MIN; i <= AT_DW_MAX; i++) {
+ res = ethr_thr_join(tid[i-AT_DW_MIN], NULL);
+ ASSERT(res == 0);
+ }
+}
+
void *
at_thread(void *unused)
{
int i;
long val, go;
-
val = ethr_atomic_inc_read(&at_ready);
ASSERT(val > 0);
@@ -1373,7 +1792,6 @@ at_thread(void *unused)
return NULL;
}
-
static void
atomic_test(void)
{
@@ -1382,6 +1800,8 @@ atomic_test(void)
ethr_tid tid[AT_THREADS];
ethr_thr_opts thr_opts = ETHR_THR_OPTS_DEFAULT_INITER;
+ atomic_basic_test();
+
#if ETHR_SIZEOF_PTR > 4
at_rm_val = ((long) 1) << 57;
at_set_val = ((long) 1) << 60;
@@ -1493,6 +1913,8 @@ main(int argc, char *argv[])
rwmutex_test();
else if (strcmp(testcase, "atomic") == 0)
atomic_test();
+ else if (strcmp(testcase, "dw_atomic_massage") == 0)
+ dw_atomic_massage_test();
else
skip("Test case \"%s\" not implemented yet", testcase);