aboutsummaryrefslogtreecommitdiffstats
path: root/erts/include/internal
diff options
context:
space:
mode:
Diffstat (limited to 'erts/include/internal')
-rw-r--r--erts/include/internal/ethr_atomics.h8932
-rw-r--r--erts/include/internal/ethr_internal.h6
-rw-r--r--erts/include/internal/ethr_mutex.h30
-rw-r--r--erts/include/internal/ethr_optimized_fallbacks.h175
-rw-r--r--erts/include/internal/ethread.h162
-rw-r--r--erts/include/internal/ethread_header_config.h.in116
-rw-r--r--erts/include/internal/gcc/ethr_atomic.h251
-rw-r--r--erts/include/internal/gcc/ethr_dw_atomic.h115
-rw-r--r--erts/include/internal/gcc/ethr_membar.h73
-rw-r--r--erts/include/internal/gcc/ethread.h20
-rw-r--r--erts/include/internal/i386/atomic.h334
-rw-r--r--erts/include/internal/i386/ethr_dw_atomic.h278
-rw-r--r--erts/include/internal/i386/ethr_membar.h114
-rw-r--r--erts/include/internal/i386/ethread.h8
-rw-r--r--erts/include/internal/i386/rwlock.h5
-rw-r--r--erts/include/internal/i386/spinlock.h27
-rw-r--r--erts/include/internal/libatomic_ops/ethr_atomic.h298
-rw-r--r--erts/include/internal/libatomic_ops/ethr_membar.h75
-rw-r--r--erts/include/internal/libatomic_ops/ethread.h14
-rw-r--r--erts/include/internal/ppc32/atomic.h148
-rw-r--r--erts/include/internal/ppc32/ethr_membar.h63
-rw-r--r--erts/include/internal/ppc32/ethread.h5
-rw-r--r--erts/include/internal/ppc32/rwlock.h15
-rw-r--r--erts/include/internal/ppc32/spinlock.h10
-rw-r--r--erts/include/internal/pthread/ethr_event.h8
-rw-r--r--erts/include/internal/sparc32/atomic.h194
-rw-r--r--erts/include/internal/sparc32/ethr_membar.h115
-rw-r--r--erts/include/internal/sparc32/ethread.h7
-rw-r--r--erts/include/internal/sparc32/rwlock.h17
-rw-r--r--erts/include/internal/sparc32/spinlock.h11
-rw-r--r--erts/include/internal/tile/atomic.h136
-rw-r--r--erts/include/internal/tile/ethr_membar.h35
-rw-r--r--erts/include/internal/tile/ethread.h5
-rw-r--r--erts/include/internal/win/ethr_atomic.h595
-rw-r--r--erts/include/internal/win/ethr_dw_atomic.h154
-rw-r--r--erts/include/internal/win/ethr_event.h16
-rw-r--r--erts/include/internal/win/ethr_membar.h145
-rw-r--r--erts/include/internal/win/ethread.h4
38 files changed, 10971 insertions, 1745 deletions
diff --git a/erts/include/internal/ethr_atomics.h b/erts/include/internal/ethr_atomics.h
index 1caf4d0567..0f3c26f1df 100644
--- a/erts/include/internal/ethr_atomics.h
+++ b/erts/include/internal/ethr_atomics.h
@@ -1,7 +1,16 @@
/*
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ * This file was automatically generated by the
+ * $ERL_TOP/erts/lib_src/utils/make_atomics_api script.
+ * If you need to make changes, edit the script and
+ * regenerate this file.
+ * --------------- DO NOT EDIT THIS FILE! ---------------
+ */
+
+/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -18,709 +27,8818 @@
*/
/*
- * Description: The ethread atomic API
+ * Description: The ethread atomics API
* Author: Rickard Green
*/
-#ifndef ETHR_ATOMIC_H__
-#define ETHR_ATOMIC_H__
+/*
+ * This file maps native atomic implementations to ethread
+ * API atomics. If no native atomic implementation
+ * is available, a less efficient fallback is used instead.
+ * The API consists of 32-bit size, word size (pointer size),
+ * and double word size atomics.
+ *
+ * The following atomic operations are implemented for
+ * 32-bit size, and word size atomics:
+ * - cmpxchg
+ * - xchg
+ * - set
+ * - init
+ * - add_read
+ * - read
+ * - inc_read
+ * - dec_read
+ * - add
+ * - inc
+ * - dec
+ * - read_band
+ * - read_bor
+ *
+ * The following atomic operations are implemented for
+ * double word size atomics:
+ * - cmpxchg
+ * - set
+ * - read
+ * - init
+ *
+ * Appart from a function implementing the atomic operation
+ * with unspecified memory barrier semantics, there are
+ * functions implementing each operation with the following
+ * memory barrier semantics:
+ * - rb (read barrier)
+ * - wb (write barrier)
+ * - acqb (acquire barrier)
+ * - relb (release barrier)
+ * - mb (full memory barrier)
+ *
+ * We implement all of these operation/barrier
+ * combinations, regardless of whether they are useful
+ * or not (some of them are useless).
+ *
+ * Double word size atomic functions are on the followning
+ * form:
+ * ethr_dw_atomic_<OP>[_<BARRIER>]
+ *
+ * Word size atomic functions are on the followning
+ * form:
+ * ethr_atomic_<OP>[_<BARRIER>]
+ *
+ * 32-bit size atomic functions are on the followning
+ * form:
+ * ethr_atomic32_<OP>[_<BARRIER>]
+ *
+ * Apart from the operation/barrier functions
+ * described above also 'addr' functions are implemented
+ * which return the actual memory address used of the
+ * atomic variable. The 'addr' functions have no barrier
+ * versions.
+ *
+ * The native atomic implementation does not need to
+ * implement all operation/barrier combinations.
+ * Functions that have no native implementation will be
+ * constructed from existing native functionality. These
+ * functions will perform the wanted operation and will
+ * produce sufficient memory barriers, but may
+ * in some cases be less efficient than pure native
+ * versions.
+ *
+ * When we create ethread API operation/barrier functions by
+ * adding barriers before and after native operations it is
+ * assumed that:
+ * - A native read operation begins, and ends with a load.
+ * - A native set operation begins, and ends with a store.
+ * - An init operation begins with either a load, or a store,
+ * and ends with either a load, or a store.
+ * - All other operations begins with a load, and ends with
+ * either a load, or a store.
+ *
+ * This is the minimum functionality that a native
+ * implementation needs to provide:
+ *
+ * - Functions that need to be implemented:
+ *
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_addr
+ * - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>]
+ * (at least one cmpxchg of optional barrier)
+ *
+ * - Macros that needs to be defined:
+ *
+ * A macro informing about the presence of the native
+ * implementation:
+ *
+ * - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]
+ *
+ * A macro naming (a string constant) the implementation:
+ *
+ * - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL
+ *
+ * Each implemented native atomic function has to
+ * be accompanied by a defined macro on the following
+ * form informing about its presence:
+ *
+ * - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>]
+ *
+ * A (sparc-v9 style) membar macro:
+ *
+ * - ETHR_MEMBAR(B)
+ *
+ * Which takes a combination of the following macros
+ * or:ed (using |) together:
+ *
+ * - ETHR_LoadLoad
+ * - ETHR_LoadStore
+ * - ETHR_StoreLoad
+ * - ETHR_StoreStore
+ *
+ */
-#if !defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-# define ETHR_NEED_ATOMIC_PROTOTYPES__
-#endif
+#ifndef ETHR_ATOMICS_H__
+#define ETHR_ATOMICS_H__
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
+#undef ETHR_AMC_FALLBACK__
+#undef ETHR_AMC_NO_ATMCS__
+#undef ETHR_AMC_ATMC_T__
+#undef ETHR_AMC_ATMC_FUNC__
+
+/* -- 32-bit atomics -- */
+
+#undef ETHR_NAINT32_T__
+#undef ETHR_NATMC32_FUNC__
+#undef ETHR_NATMC32_ADDR_FUNC__
+#undef ETHR_NATMC32_BITS__
+#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# define ETHR_NEED_NATMC32_ADDR
+# define ETHR_NATMC32_ADDR_FUNC__ ethr_native_atomic32_addr
+typedef ethr_native_atomic32_t ethr_atomic32_t;
+# define ETHR_NAINT32_T__ ethr_sint32_t
+# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_NATMC32_BITS__ 32
+#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_NEED_NATMC64_ADDR
+#ifdef ETHR_BIGENDIAN
+# define ETHR_NATMC32_ADDR_FUNC__(VAR) \
+ (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1)
+#else
+# define ETHR_NATMC32_ADDR_FUNC__(VAR) \
+ ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR)))
+#endif
+typedef ethr_native_atomic64_t ethr_atomic32_t;
+# define ETHR_NAINT32_T__ ethr_sint64_t
+# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC32_BITS__ 64
+#else
/*
- * No native atomic implementation available. :(
+ * No native atomics usable for 32-bits atomics :(
* Use fallback...
*/
typedef ethr_sint32_t ethr_atomic32_t;
-typedef ethr_sint_t ethr_atomic_t;
-#else
-/*
- * Map ethread native atomics to ethread API atomics.
- *
- * We do at least have a native atomic implementation that
- * can handle integers of a size larger than or equal to
- * the size of pointers.
- */
+#endif
-/* -- Pointer size atomics -- */
+#undef ETHR_ATMC32_INLINE__
+#ifdef ETHR_NATMC32_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC32_INLINE__
+# endif
+# define ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS
+#endif
+
+#if !defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_ATMC32_PROTOTYPES__
+#endif
+
+#ifndef ETHR_INLINE_ATMC32_FUNC_NAME_
+# define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_ATMC32_FUNC__
+#define ETHR_ATMC32_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+
+
+/* -- Word size atomics -- */
+
+#undef ETHR_NEED_NATMC32_ADDR
+#undef ETHR_NEED_NATMC64_ADDR
#undef ETHR_NAINT_T__
#undef ETHR_NATMC_FUNC__
#undef ETHR_NATMC_ADDR_FUNC__
-#if ETHR_SIZEOF_PTR == 8
-# if defined(ETHR_HAVE_NATIVE_ATOMIC64)
-# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr
+#undef ETHR_NATMC_BITS__
+#if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic64_addr
typedef ethr_native_atomic64_t ethr_atomic_t;
-# define ETHR_NAINT_T__ ethr_sint64_t
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-# else
-# error "Missing native atomic implementation"
+# define ETHR_NAINT_T__ ethr_sint64_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_BITS__ 64
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC32_ADDR
# endif
-#elif ETHR_SIZEOF_PTR == 4
# define ETHR_NATMC_ADDR_FUNC__ ethr_native_atomic32_addr
-# ifdef ETHR_HAVE_NATIVE_ATOMIC32
typedef ethr_native_atomic32_t ethr_atomic_t;
-# define ETHR_NAINT_T__ ethr_sint32_t
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-# elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_NAINT_T__ ethr_sint32_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_NATMC_BITS__ 32
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+#ifdef ETHR_BIGENDIAN
+# define ETHR_NATMC_ADDR_FUNC__(VAR) \
+ (((ethr_sint32_t *) ethr_native_atomic64_addr((VAR))) + 1)
+#else
+# define ETHR_NATMC_ADDR_FUNC__(VAR) \
+ ((ethr_sint32_t *) ethr_native_atomic64_addr((VAR)))
+#endif
typedef ethr_native_atomic64_t ethr_atomic_t;
-# define ETHR_NATMC_T__ ethr_native_atomic64_t
-# define ETHR_NAINT_T__ ethr_sint64_t
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_T__ ethr_native_atomic64_t
+# define ETHR_NAINT_T__ ethr_sint64_t
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_NATMC_BITS__ 64
+#else
+/*
+ * No native atomics usable for pointer size atomics :(
+ * Use fallback...
+ */
+
+# if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 2
+# define ETHR_AMC_SINT_T__ ethr_sint32_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic32_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+typedef struct {
+ ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__];
+} ethr_amc_t;
+typedef struct {
+ ethr_amc_t amc;
+ ethr_sint_t sint;
+} ethr_atomic_t;
+# else /* locked fallback */
+typedef ethr_sint_t ethr_atomic_t;
+# endif
+#endif
+
+#undef ETHR_ATMC_INLINE__
+#ifdef ETHR_NATMC_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC_INLINE__
+# endif
+# define ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS
+#endif
+
+#if !defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_ATMC_PROTOTYPES__
+#endif
+
+#ifndef ETHR_INLINE_ATMC_FUNC_NAME_
+# define ETHR_INLINE_ATMC_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_ATMC_FUNC__
+#define ETHR_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X)
+
+/* -- Double word atomics -- */
+
+#undef ETHR_SU_DW_NAINT_T__
+#undef ETHR_SU_DW_NATMC_FUNC__
+#undef ETHR_SU_DW_NATMC_ADDR_FUNC__
+#undef ETHR_DW_NATMC_FUNC__
+#undef ETHR_DW_NATMC_ADDR_FUNC__
+#undef ETHR_DW_NATMC_BITS__
+#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)
+# define ETHR_NEED_DW_NATMC_ADDR
+# define ETHR_DW_NATMC_ADDR_FUNC__ ethr_native_dw_atomic_addr
+# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_dw_atomic_t
+# define ETHR_DW_NATMC_FUNC__(X) ethr_native_dw_atomic_ ## X
+# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_su_dw_atomic_ ## X
+# if ETHR_SIZEOF_PTR == 8
+# define ETHR_DW_NATMC_BITS__ 128
+# elif ETHR_SIZEOF_PTR == 4
+# define ETHR_DW_NATMC_BITS__ 64
# else
-# error "Missing native atomic implementation"
+# error "Word size not supported"
+# endif
+# ifdef ETHR_NATIVE_SU_DW_SINT_T
+# define ETHR_SU_DW_NAINT_T__ ETHR_NATIVE_SU_DW_SINT_T
# endif
+#elif ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)
+# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
+# ifndef ETHR_NEED_NATMC64_ADDR
+# define ETHR_NEED_NATMC64_ADDR
+# endif
+# define ETHR_DW_NATMC_ADDR_FUNC__(VAR) \
+ ((ethr_dw_sint_t *) ethr_native_atomic64_addr((VAR)))
+# define ETHR_NATIVE_DW_ATOMIC_T__ ethr_native_atomic64_t
+# define ETHR_SU_DW_NAINT_T__ ethr_sint64_t
+# define ETHR_SU_DW_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_DW_NATMC_BITS__ 64
#endif
-/* -- 32-bit atomics -- */
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+#define ETHR_DW_ATOMIC_FUNC__(X) ethr_dw_atomic_ ## X ## _fallback__
+#else
+#define ETHR_DW_ATOMIC_FUNC__(X) ethr_dw_atomic_ ## X
+#endif
-#undef ETHR_NAINT32_T__
-#undef ETHR_NATMC32_FUNC__
-#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
-typedef ethr_native_atomic32_t ethr_atomic32_t;
-# define ETHR_NAINT32_T__ ethr_sint32_t
-# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic32_ ## X
-#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
-typedef ethr_native_atomic64_t ethr_atomic32_t;
-# define ETHR_NAINT32_T__ ethr_sint64_t
-# define ETHR_NATMC32_FUNC__(X) ethr_native_atomic64_ ## X
+#if !defined(ETHR_DW_NATMC_BITS__) || defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+# define ETHR_NEED_DW_FALLBACK__
+#endif
+
+#if defined(ETHR_NEED_DW_FALLBACK__)
+/*
+ * No native atomics usable for double word atomics :(
+ * Use fallback...
+ */
+
+# ifndef ETHR_AMC_FALLBACK__
+# if ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 1
+# define ETHR_AMC_SINT_T__ ethr_sint_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC_FUNC_NAME_(ethr_atomic_ ## X)
+# elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
+# define ETHR_AMC_FALLBACK__
+# define ETHR_AMC_NO_ATMCS__ 2
+# define ETHR_AMC_SINT_T__ ethr_sint32_t
+# define ETHR_AMC_ATMC_T__ ethr_atomic32_t
+# define ETHR_AMC_ATMC_FUNC__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+# endif
+# ifdef ETHR_AMC_FALLBACK__
+typedef struct {
+ ETHR_AMC_ATMC_T__ atomic[ETHR_AMC_NO_ATMCS__];
+} ethr_amc_t;
+# endif
+# endif
+
+typedef struct {
+#ifdef ETHR_AMC_FALLBACK__
+ ethr_amc_t amc;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_fallback_t;
+
+#endif
+
+typedef union {
+#ifdef ETHR_NATIVE_DW_ATOMIC_T__
+ ETHR_NATIVE_DW_ATOMIC_T__ native;
+#endif
+#ifdef ETHR_NEED_DW_FALLBACK__
+ ethr_dw_atomic_fallback_t fallback;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_t;
+
+typedef union {
+#ifdef ETHR_SU_DW_NAINT_T__
+ ETHR_SU_DW_NAINT_T__ dw_sint;
+#endif
+ ethr_sint_t sint[2];
+} ethr_dw_sint_t;
+
+#ifdef ETHR_BIGENDIAN
+# define ETHR_DW_SINT_LOW_WORD 1
+# define ETHR_DW_SINT_HIGH_WORD 0
#else
-# error "Missing native atomic implementation"
+# define ETHR_DW_SINT_LOW_WORD 0
+# define ETHR_DW_SINT_HIGH_WORD 1
#endif
+#undef ETHR_DW_ATMC_INLINE__
+#ifdef ETHR_DW_NATMC_BITS__
+# ifdef ETHR_TRY_INLINE_FUNCS
+# define ETHR_ATMC32_INLINE__
+# endif
+# define ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS
#endif
-#ifdef ETHR_NEED_ATOMIC_PROTOTYPES__
-ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *);
-void ethr_atomic_init(ethr_atomic_t *, ethr_sint_t);
-void ethr_atomic_set(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_read(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *);
-void ethr_atomic_inc(ethr_atomic_t *);
-void ethr_atomic_dec(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *, ethr_sint_t);
-void ethr_atomic_add(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *, ethr_sint_t);
-ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *, ethr_sint_t, ethr_sint_t);
-ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *);
-void ethr_atomic_set_relb(ethr_atomic_t *, ethr_sint_t);
-void ethr_atomic_dec_relb(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *);
-ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *, ethr_sint_t, ethr_sint_t);
-ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *, ethr_sint_t, ethr_sint_t);
-
-ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *);
-void ethr_atomic32_init(ethr_atomic32_t *, ethr_sint32_t);
-void ethr_atomic32_set(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *);
-void ethr_atomic32_inc(ethr_atomic32_t *);
-void ethr_atomic32_dec(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *, ethr_sint32_t);
-void ethr_atomic32_add(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *, ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *,
- ethr_sint32_t,
- ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *);
-void ethr_atomic32_set_relb(ethr_atomic32_t *, ethr_sint32_t);
-void ethr_atomic32_dec_relb(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *);
-ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *,
- ethr_sint32_t,
- ethr_sint32_t);
-ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *,
- ethr_sint32_t,
- ethr_sint32_t);
+#if !defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+# define ETHR_NEED_DW_ATMC_PROTOTYPES__
#endif
-int ethr_init_atomics(void);
+#ifndef ETHR_INLINE_DW_ATMC_FUNC_NAME_
+# define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X
+#endif
+
+#undef ETHR_DW_ATMC_FUNC__
+#define ETHR_DW_ATMC_FUNC__(X) ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_dw_atomic_ ## X)
+
+#if defined(ETHR_NEED_DW_ATMC_PROTOTYPES__)
+int ethr_have_native_dw_atomic(void);
+#endif
+#if defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)
+static ETHR_INLINE int
+ETHR_INLINE_DW_ATMC_FUNC_NAME_(ethr_have_native_dw_atomic)(void)
+{
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ return ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__;
+#elif defined(ETHR_DW_NATMC_BITS__)
+ return 1;
+#else
+ return 0;
+#endif
+}
+#endif
+
+/* -- Misc -- */
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+/*
+ * Unusual values are used by read() fallbacks implemented via cmpxchg().
+ * We want to use an unusual value in hope that it is more efficient
+ * not to match the value in memory.
+ *
+ * - Negative integer values are probably more unusual.
+ * - Very large absolute integer values are probably more unusual.
+ * - Odd pointers are probably more unusual (only char pointers can be odd).
+ */
+# define ETHR_UNUSUAL_SINT32_VAL__ ((ethr_sint32_t) 0x81818181)
+# if ETHR_SIZEOF_PTR == 4
+# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) ETHR_UNUSUAL_SINT32_VAL__)
+# elif ETHR_SIZEOF_PTR == 8
+# define ETHR_UNUSUAL_SINT_VAL__ ((ethr_sint_t) 0x8181818181818181L)
+# else
+# error "Word size not supported"
+# endif
+# if defined(ETHR_NEED_DW_NATMC_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR)
+# error "No ethr_native_dw_atomic_addr() available"
+# endif
+# if defined(ETHR_NEED_NATMC32_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR)
+# error "No ethr_native_atomic32_addr() available"
+# endif
+# if defined(ETHR_NEED_NATMC64_ADDR) && !defined(ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR)
+# error "No ethr_native_atomic64_addr() available"
+# endif
+#endif
+
+#if defined(__GNUC__)
+# ifndef ETHR_COMPILER_BARRIER
+# define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
+# endif
+#elif defined(ETHR_WIN32_THREADS)
+# ifndef ETHR_COMPILER_BARRIER
+# include <intrin.h>
+# pragma intrinsic(_ReadWriteBarrier)
+# define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
+# endif
+#endif
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
+void ethr_compiler_barrier_fallback(void);
+#ifndef ETHR_COMPILER_BARRIER
+# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback()
+#endif
+
+int ethr_init_atomics(void);
+
+/* info */
+char **ethr_native_atomic32_ops(void);
+char **ethr_native_atomic64_ops(void);
+char **ethr_native_dw_atomic_ops(void);
+char **ethr_native_su_dw_atomic_ops(void);
+
+#if !defined(ETHR_DW_NATMC_BITS__) && !defined(ETHR_NATMC_BITS__) && !defined(ETHR_NATMC32_BITS__)
/*
- * Fallbacks for atomics used in absence of a native implementation.
+ * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only,
+ * i.e. when no native atomic implementation exist and only our lock
+ * based atomic fallback is used, a noop is sufficient.
*/
+# undef ETHR_MEMORY_BARRIER
+# undef ETHR_WRITE_MEMORY_BARRIER
+# undef ETHR_READ_MEMORY_BARRIER
+# undef ETHR_READ_DEPEND_MEMORY_BARRIER
+# undef ETHR_MEMBAR
+# define ETHR_MEMBAR(B) do { } while (0)
+#endif
+
+#ifndef ETHR_MEMBAR
+# error "No ETHR_MEMBAR defined"
+#endif
-#define ETHR_ATOMIC_ADDR_BITS 10
-#define ETHR_ATOMIC_ADDR_SHIFT 6
+#define ETHR_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore)
+#define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMBAR(ETHR_StoreStore)
+#define ETHR_READ_MEMORY_BARRIER ETHR_MEMBAR(ETHR_LoadLoad)
+#ifdef ETHR_READ_DEPEND_MEMORY_BARRIER
+# undef ETHR_ORDERED_READ_DEPEND
+#else
+# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER
+# define ETHR_ORDERED_READ_DEPEND
+#endif
-typedef struct {
- union {
- ethr_spinlock_t lck;
- char buf[ETHR_CACHE_LINE_SIZE];
- } u;
-} ethr_atomic_protection_t;
-extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATOMIC_ADDR_BITS];
+/* ---------- Double word size atomic implementation ---------- */
+
+
+#ifdef ETHR_NEED_DW_ATMC_PROTOTYPES__
+ethr_sint_t *ethr_dw_atomic_addr(ethr_dw_atomic_t *var);
+int ethr_dw_atomic_cmpxchg(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ethr_dw_atomic_cmpxchg_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+void ethr_dw_atomic_set(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_set_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_read_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ethr_dw_atomic_init_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ethr_sint_t *ETHR_DW_ATOMIC_FUNC__(addr)(ethr_dw_atomic_t *var);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+int ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val);
+void ETHR_DW_ATOMIC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+void ETHR_DW_ATOMIC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val);
+#endif
+#endif /* ETHR_NEED_DW_ATMC_PROTOTYPES__ */
+
+#if (defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) \
+ && (defined(ETHR_DW_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)))
+
+#if !defined(ETHR_DW_NATMC_BITS__)
+# error "Missing native atomic implementation"
+#elif defined(ETHR_HAVE_NATIVE_DW_ATOMIC) || defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG
+# define ETHR_HAVE_DW_NATMC_CMPXCHG 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_WB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_WB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_ACQB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RELB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RELB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
+# define ETHR_HAVE_DW_NATMC_CMPXCHG_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET
+# undef ETHR_HAVE_DW_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET
+# define ETHR_HAVE_DW_NATMC_SET 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET
+# define ETHR_HAVE_SU_DW_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RB
+# undef ETHR_HAVE_DW_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RB
+# define ETHR_HAVE_DW_NATMC_SET_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_WB
+# undef ETHR_HAVE_DW_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_WB
+# define ETHR_HAVE_DW_NATMC_SET_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_WB
+# define ETHR_HAVE_SU_DW_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_ACQB
+# undef ETHR_HAVE_DW_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_ACQB
+# define ETHR_HAVE_DW_NATMC_SET_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RELB
+# undef ETHR_HAVE_DW_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RELB
+# define ETHR_HAVE_DW_NATMC_SET_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RELB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_SET_MB
+# undef ETHR_HAVE_DW_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_MB
+# define ETHR_HAVE_DW_NATMC_SET_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_MB
+# define ETHR_HAVE_SU_DW_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ
+# undef ETHR_HAVE_DW_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ
+# define ETHR_HAVE_DW_NATMC_READ 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ
+# define ETHR_HAVE_SU_DW_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RB
+# undef ETHR_HAVE_DW_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RB
+# define ETHR_HAVE_DW_NATMC_READ_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_WB
+# undef ETHR_HAVE_DW_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_WB
+# define ETHR_HAVE_DW_NATMC_READ_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_WB
+# define ETHR_HAVE_SU_DW_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_ACQB
+# undef ETHR_HAVE_DW_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_ACQB
+# define ETHR_HAVE_DW_NATMC_READ_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RELB
+# undef ETHR_HAVE_DW_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RELB
+# define ETHR_HAVE_DW_NATMC_READ_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RELB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_READ_MB
+# undef ETHR_HAVE_DW_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_MB
+# define ETHR_HAVE_DW_NATMC_READ_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_MB
+# define ETHR_HAVE_SU_DW_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT
+# undef ETHR_HAVE_DW_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT
+# define ETHR_HAVE_DW_NATMC_INIT 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT
+# define ETHR_HAVE_SU_DW_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RB
+# undef ETHR_HAVE_DW_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RB
+# define ETHR_HAVE_DW_NATMC_INIT_RB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_WB
+# undef ETHR_HAVE_DW_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_WB
+# define ETHR_HAVE_DW_NATMC_INIT_WB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_WB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_ACQB
+# undef ETHR_HAVE_DW_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_ACQB
+# define ETHR_HAVE_DW_NATMC_INIT_ACQB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RELB
+# undef ETHR_HAVE_DW_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RELB
+# define ETHR_HAVE_DW_NATMC_INIT_RELB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RELB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_MB
+# undef ETHR_HAVE_DW_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_MB
+# define ETHR_HAVE_DW_NATMC_INIT_MB 1
+# endif
+# ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_MB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_MB 1
+# endif
+#elif ETHR_DW_NATMC_BITS__ == 64
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_WB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_CMPXCHG_MB
+# undef ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
+# define ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET
+# undef ETHR_HAVE_SU_DW_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
+# define ETHR_HAVE_SU_DW_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_RB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_WB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
+# define ETHR_HAVE_SU_DW_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
+# define ETHR_HAVE_SU_DW_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_SET_MB
+# undef ETHR_HAVE_SU_DW_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
+# define ETHR_HAVE_SU_DW_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ
+# undef ETHR_HAVE_SU_DW_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
+# define ETHR_HAVE_SU_DW_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_RB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_WB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
+# define ETHR_HAVE_SU_DW_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
+# define ETHR_HAVE_SU_DW_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_READ_MB
+# undef ETHR_HAVE_SU_DW_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
+# define ETHR_HAVE_SU_DW_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT
+# undef ETHR_HAVE_SU_DW_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
+# define ETHR_HAVE_SU_DW_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_RB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_WB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_ACQB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_RELB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_DW_NATMC_INIT_MB
+# undef ETHR_HAVE_SU_DW_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
+# define ETHR_HAVE_SU_DW_NATMC_INIT_MB 1
+# endif
+#else
+# error "Invalid native atomic size"
+#endif
+
-#define ETHR_ATOMIC_PTR2LCK__(PTR) \
-(&ethr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATOMIC_ADDR_SHIFT) \
- & ((1 << ETHR_ATOMIC_ADDR_BITS) - 1))].u.lck)
+#if defined(ETHR_HAVE_NATIVE_DW_ATOMIC)
+#if (!defined(ETHR_HAVE_DW_NATMC_CMPXCHG) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
+/*
+ * Read op used together with cmpxchg() fallback when no native op present.
+ */
+#if defined(ETHR_HAVE_DW_NATMC_READ)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_rb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_wb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_acqb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_relb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ ETHR_DW_NATMC_FUNC__(read_mb)(VAR, VAL)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+ VAL.dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, VAL) \
+do { \
+ VAL.sint[0] = (ethr_sint_t) 0; \
+ VAL.sint[1] = (ethr_sint_t) 0; \
+} while (0)
+#endif
-#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS) \
-do { \
- ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP)); \
- ethr_spin_lock(slp__); \
- { EXPS; } \
- ethr_spin_unlock(slp__); \
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_DW_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ int res__; \
+ ethr_dw_sint_t AVAL, exp_act__; \
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR, exp_act__); \
+ do { \
+ AVAL.sint[0] = exp_act__.sint[0]; \
+ AVAL.sint[1] = exp_act__.sint[1]; \
+ { OPS; } \
+ res__ = CMPXCHG(VAR, AVAL.sint, exp_act__.sint); \
+ } while (__builtin_expect(res__ == 0, 0)); \
} while (0)
+
+#elif defined(ETHR_HAVE_NATIVE_SU_DW_ATOMIC)
+
+#if (!defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
+/*
+ * Read op used together with cmpxchg() fallback when no native op present.
+ */
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_SU_DW_NATMC_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ((ETHR_SU_DW_NAINT_T__) 0)
+#endif
+
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ ETHR_SU_DW_NAINT_T__ AVAL; \
+ ETHR_SU_DW_NAINT_T__ new__, act__, exp__; \
+ act__ = ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK_READ__(VAR); \
+ do { \
+ exp__ = act__; \
+ AVAL = (ETHR_SU_DW_NAINT_T__) act__; \
+ { OPS; } \
+ new__ = (ETHR_SU_DW_NAINT_T__) AVAL; \
+ act__ = CMPXCHG(VAR, new__, exp__); \
+ } while (__builtin_expect(act__ != exp__, 0)); \
+} while (0)
+
+
+#else
+# error "?!?"
+#endif
+
+
+
+/* --- addr() --- */
+
+static ETHR_INLINE ethr_sint_t *ETHR_DW_ATMC_FUNC__(addr)(ethr_dw_atomic_t *var)
+{
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+ return (ethr_sint_t *) ETHR_DW_NATMC_ADDR_FUNC__((&var->native));
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { return ETHR_DW_ATOMIC_FUNC__(addr)(var); }
+#endif
+
+}
+
+
+/* --- cmpxchg() --- */
+
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_rb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_StoreStore);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_wb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_acqb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_relb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+static ETHR_INLINE int ETHR_DW_ATMC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
+{
+ int res;
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NAINT_T__ act;
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->dw_sint, old_val->dw_sint);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NAINT_T__ act;
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ act = ETHR_SU_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->dw_sint, old_val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ res = (act == old_val->dw_sint);
+ old_val->dw_sint = act;
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_mb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_relb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_acqb)(&var->native, val->sint, old_val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_wb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg_rb)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = ETHR_DW_NATMC_FUNC__(cmpxchg)(&var->native, val->sint, old_val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_cmpxchg_mb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { res = ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(var, val, old_val); }
+#endif
+
+ return res;
+}
+
+
+/* --- set() --- */
+
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#else
+#error "Missing implementation of ethr_dw_atomic_set()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_rb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_rb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_wb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_wb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_acqb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_acqb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_relb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_relb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_SET_MB)
+ ETHR_SU_DW_NATMC_FUNC__(set_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(set_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(set)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_MB)
+ ETHR_DW_NATMC_FUNC__(set_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RELB)
+ ETHR_DW_NATMC_FUNC__(set_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_DW_NATMC_FUNC__(set_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(set)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_MB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RELB)
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval = val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_SU_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval = val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_MB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_mb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RELB)
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_relb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_acqb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_wb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg_rb), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_CMPXCHG_FALLBACK__(ETHR_DW_NATMC_FUNC__(cmpxchg), &var->native, aval, aval.sint[0] = val->sint[0]; aval.sint[1] = val->sint[1]);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_dw_atomic_set_mb()!"
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(set_mb)(var, val); }
+#endif
+
+}
+
+
+/* --- read() --- */
+
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_rb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_rb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_wb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_wb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_acqb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_acqb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_relb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_relb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_READ_MB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_mb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RELB)
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_relb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_acqb)(&var->native);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_wb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read_rb)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ val->dw_sint = ETHR_SU_DW_NATMC_FUNC__(read)(&var->native);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_MB)
+ ETHR_DW_NATMC_FUNC__(read_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RELB)
+ ETHR_DW_NATMC_FUNC__(read_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_DW_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(read)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ ethr_dw_sint_t tmp;
+ tmp.sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ tmp.sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[0] = ETHR_UNUSUAL_SINT_VAL__;
+ val->sint[1] = ETHR_UNUSUAL_SINT_VAL__;
+ (void) ETHR_DW_ATMC_FUNC__(cmpxchg_mb)(var, &tmp, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(read_mb)(var, val); }
+#endif
+
+}
+
+
+/* --- init() --- */
+
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#else
+ ETHR_DW_ATMC_FUNC__(set)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_DW_ATMC_FUNC__(set_rb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_rb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+#else
+ ETHR_DW_ATMC_FUNC__(set_wb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_wb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_DW_ATMC_FUNC__(set_acqb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_acqb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#else
+ ETHR_DW_ATMC_FUNC__(set_relb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_relb)(var, val); }
+#endif
+
+}
+
+static ETHR_INLINE void ETHR_DW_ATMC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
+{
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ if (ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__) {
+#endif
+
+#if defined(ETHR_HAVE_SU_DW_NATMC_INIT_MB)
+ ETHR_SU_DW_NATMC_FUNC__(init_mb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RELB)
+ ETHR_SU_DW_NATMC_FUNC__(init_relb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_acqb)(&var->native, val->dw_sint);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_SU_DW_NATMC_FUNC__(init_wb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init_rb)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_SU_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_SU_DW_NATMC_FUNC__(init)(&var->native, val->dw_sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_MB)
+ ETHR_DW_NATMC_FUNC__(init_mb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RELB)
+ ETHR_DW_NATMC_FUNC__(init_relb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_acqb)(&var->native, val->sint);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_DW_NATMC_FUNC__(init_wb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init_rb)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_DW_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_DW_NATMC_FUNC__(init)(&var->native, val->sint);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_DW_ATMC_FUNC__(set_mb)(var, val);
+#endif
+
+#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
+ } else { ETHR_DW_ATOMIC_FUNC__(init_mb)(var, val); }
+#endif
+
+}
+
+#endif /* ETHR_DW_ATMC_INLINE__ */
+
+
+/* ---------- Word size atomic implementation ---------- */
+
+
+#ifdef ETHR_NEED_ATMC_PROTOTYPES__
+ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_rb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_wb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_cmpxchg_mb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val);
+ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_xchg_mb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_rb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_wb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_acqb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_set_mb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_rb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_wb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_acqb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_relb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_init_mb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_add_read_mb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_rb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_wb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_relb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_mb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_rb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_wb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_relb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_inc_read_mb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_rb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_wb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_acqb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_dec_read_mb(ethr_atomic_t *var);
+void ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_rb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_wb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_acqb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_relb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_add_mb(ethr_atomic_t *var, ethr_sint_t val);
+void ethr_atomic_inc(ethr_atomic_t *var);
+void ethr_atomic_inc_rb(ethr_atomic_t *var);
+void ethr_atomic_inc_wb(ethr_atomic_t *var);
+void ethr_atomic_inc_acqb(ethr_atomic_t *var);
+void ethr_atomic_inc_relb(ethr_atomic_t *var);
+void ethr_atomic_inc_mb(ethr_atomic_t *var);
+void ethr_atomic_dec(ethr_atomic_t *var);
+void ethr_atomic_dec_rb(ethr_atomic_t *var);
+void ethr_atomic_dec_wb(ethr_atomic_t *var);
+void ethr_atomic_dec_acqb(ethr_atomic_t *var);
+void ethr_atomic_dec_relb(ethr_atomic_t *var);
+void ethr_atomic_dec_mb(ethr_atomic_t *var);
+ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_band_mb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_rb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_wb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_acqb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_relb(ethr_atomic_t *var, ethr_sint_t val);
+ethr_sint_t ethr_atomic_read_bor_mb(ethr_atomic_t *var, ethr_sint_t val);
+#endif /* ETHR_NEED_ATMC_PROTOTYPES__ */
+
+#if (defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \
+ && (defined(ETHR_ATMC_INLINE__) || defined(ETHR_ATOMIC_IMPL__)))
+
+#if !defined(ETHR_NATMC_BITS__)
+# error "Missing native atomic implementation"
+#elif ETHR_NATMC_BITS__ == 64
+# undef ETHR_HAVE_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
+# define ETHR_HAVE_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
+# define ETHR_HAVE_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
+# define ETHR_HAVE_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
+# define ETHR_HAVE_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG
+# define ETHR_HAVE_NATMC_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB
+# define ETHR_HAVE_NATMC_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB
+# define ETHR_HAVE_NATMC_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB
+# define ETHR_HAVE_NATMC_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB
+# define ETHR_HAVE_NATMC_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB
+# define ETHR_HAVE_NATMC_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
+# define ETHR_HAVE_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
+# define ETHR_HAVE_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
+# define ETHR_HAVE_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
+# define ETHR_HAVE_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
+# define ETHR_HAVE_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
+# define ETHR_HAVE_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
+# define ETHR_HAVE_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
+# define ETHR_HAVE_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
+# define ETHR_HAVE_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
+# define ETHR_HAVE_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
+# define ETHR_HAVE_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
+# define ETHR_HAVE_NATMC_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN
+# define ETHR_HAVE_NATMC_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
+# define ETHR_HAVE_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
+# define ETHR_HAVE_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
+# define ETHR_HAVE_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
+# define ETHR_HAVE_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
+# define ETHR_HAVE_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
+# define ETHR_HAVE_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN
+# define ETHR_HAVE_NATMC_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB
+# define ETHR_HAVE_NATMC_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB
+# define ETHR_HAVE_NATMC_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB
+# define ETHR_HAVE_NATMC_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN
+# define ETHR_HAVE_NATMC_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD
+# define ETHR_HAVE_NATMC_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB
+# define ETHR_HAVE_NATMC_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB
+# define ETHR_HAVE_NATMC_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB
+# define ETHR_HAVE_NATMC_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB
+# define ETHR_HAVE_NATMC_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB
+# define ETHR_HAVE_NATMC_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC
+# define ETHR_HAVE_NATMC_INC 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB
+# define ETHR_HAVE_NATMC_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB
+# define ETHR_HAVE_NATMC_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB
+# define ETHR_HAVE_NATMC_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB
+# define ETHR_HAVE_NATMC_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB
+# define ETHR_HAVE_NATMC_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC
+# define ETHR_HAVE_NATMC_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB
+# define ETHR_HAVE_NATMC_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB
+# define ETHR_HAVE_NATMC_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB
+# define ETHR_HAVE_NATMC_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB
+# define ETHR_HAVE_NATMC_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB
+# define ETHR_HAVE_NATMC_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD
+# define ETHR_HAVE_NATMC_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD
+# define ETHR_HAVE_NATMC_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC_OR_RETOLD_MB 1
+# endif
+#elif ETHR_NATMC_BITS__ == 32
+# undef ETHR_HAVE_NATMC_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG
+# define ETHR_HAVE_NATMC_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB
+# define ETHR_HAVE_NATMC_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB
+# define ETHR_HAVE_NATMC_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB
+# define ETHR_HAVE_NATMC_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG
+# define ETHR_HAVE_NATMC_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB
+# define ETHR_HAVE_NATMC_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB
+# define ETHR_HAVE_NATMC_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB
+# define ETHR_HAVE_NATMC_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB
+# define ETHR_HAVE_NATMC_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB
+# define ETHR_HAVE_NATMC_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET
+# define ETHR_HAVE_NATMC_SET 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB
+# define ETHR_HAVE_NATMC_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB
+# define ETHR_HAVE_NATMC_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB
+# define ETHR_HAVE_NATMC_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB
+# define ETHR_HAVE_NATMC_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB
+# define ETHR_HAVE_NATMC_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT
+# define ETHR_HAVE_NATMC_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB
+# define ETHR_HAVE_NATMC_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB
+# define ETHR_HAVE_NATMC_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB
+# define ETHR_HAVE_NATMC_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB
+# define ETHR_HAVE_NATMC_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB
+# define ETHR_HAVE_NATMC_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN
+# define ETHR_HAVE_NATMC_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ
+# define ETHR_HAVE_NATMC_READ 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB
+# define ETHR_HAVE_NATMC_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB
+# define ETHR_HAVE_NATMC_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB
+# define ETHR_HAVE_NATMC_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB
+# define ETHR_HAVE_NATMC_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB
+# define ETHR_HAVE_NATMC_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN
+# define ETHR_HAVE_NATMC_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB
+# define ETHR_HAVE_NATMC_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB
+# define ETHR_HAVE_NATMC_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB
+# define ETHR_HAVE_NATMC_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN
+# define ETHR_HAVE_NATMC_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD
+# define ETHR_HAVE_NATMC_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB
+# define ETHR_HAVE_NATMC_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB
+# define ETHR_HAVE_NATMC_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB
+# define ETHR_HAVE_NATMC_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB
+# define ETHR_HAVE_NATMC_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB
+# define ETHR_HAVE_NATMC_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC
+# define ETHR_HAVE_NATMC_INC 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB
+# define ETHR_HAVE_NATMC_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB
+# define ETHR_HAVE_NATMC_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB
+# define ETHR_HAVE_NATMC_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB
+# define ETHR_HAVE_NATMC_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB
+# define ETHR_HAVE_NATMC_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC
+# define ETHR_HAVE_NATMC_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB
+# define ETHR_HAVE_NATMC_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB
+# define ETHR_HAVE_NATMC_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB
+# define ETHR_HAVE_NATMC_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB
+# define ETHR_HAVE_NATMC_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB
+# define ETHR_HAVE_NATMC_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD
+# define ETHR_HAVE_NATMC_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD
+# define ETHR_HAVE_NATMC_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC_OR_RETOLD_MB 1
+# endif
+#else
+# error "Invalid native atomic size"
#endif
+#if (!defined(ETHR_HAVE_NATMC_CMPXCHG) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_NATMC_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
/*
- * --- Pointer size atomics ---------------------------------------------------
+ * Read op used together with cmpxchg() fallback when no native op present.
*/
+#if defined(ETHR_HAVE_NATMC_READ)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR) \
+ ((ETHR_NAINT_T__) 0)
+#endif
+
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_NATMC_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ ethr_sint_t AVAL; \
+ ETHR_NAINT_T__ new__, act__, exp__; \
+ act__ = ETHR_NATMC_CMPXCHG_FALLBACK_READ__(VAR); \
+ do { \
+ exp__ = act__; \
+ AVAL = (ethr_sint_t) act__; \
+ { OPS; } \
+ new__ = (ETHR_NAINT_T__) AVAL; \
+ act__ = CMPXCHG(VAR, new__, exp__); \
+ } while (__builtin_expect(act__ != exp__, 0)); \
+} while (0)
+
+
+
+/* --- addr() --- */
-static ETHR_INLINE ethr_sint_t *
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_addr)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t *ETHR_ATMC_FUNC__(addr)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
return (ethr_sint_t *) ETHR_NATMC_ADDR_FUNC__(var);
+
+}
+
+
+/* --- cmpxchg() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
#else
- return (ethr_sint_t *) var;
+#error "Missing implementation of ethr_atomic_cmpxchg()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_init)(ethr_atomic_t *var, ethr_sint_t i)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_rb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) i);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic_cmpxchg_rb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_set)(ethr_atomic_t *var, ethr_sint_t i)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_wb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) i);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic_cmpxchg_wb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_acqb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic_cmpxchg_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_relb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#else
+#error "Missing implementation of ethr_atomic_cmpxchg_relb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(cmpxchg_mb)(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var, (ETHR_NAINT_T__) val, (ETHR_NAINT_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_cmpxchg_mb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_add)(ethr_atomic_t *var, ethr_sint_t incr)
+
+/* --- xchg() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) incr);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += incr);
+#error "Missing implementation of ethr_atomic_xchg()!"
#endif
-}
-
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_add_read)(ethr_atomic_t *var, ethr_sint_t i)
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_rb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) i);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
+#error "Missing implementation of ethr_atomic_xchg_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += i; res = *var);
+#if defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#else
+#error "Missing implementation of ethr_atomic_xchg_wb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_xchg_acqb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_relb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(inc)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#error "Missing implementation of ethr_atomic_xchg_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(xchg_mb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(dec)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_XCHG_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_xchg_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- set() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET)
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(set_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_SET_MB)
+ ETHR_NATMC_FUNC__(set_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_RELB)
+ ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC_FUNC__(set_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(set)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(xchg_mb)(var, val);
+#endif
+}
+
+
+/* --- init() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#else
+ ETHR_ATMC_FUNC__(set)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ ETHR_ATMC_FUNC__(set_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+#else
+ ETHR_ATMC_FUNC__(set_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC_FUNC__(set_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#else
+ ETHR_ATMC_FUNC__(set_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(init_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_INIT_MB)
+ ETHR_NATMC_FUNC__(init_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_RELB)
+ ETHR_NATMC_FUNC__(init_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(init_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(init)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC_FUNC__(set_mb)(var, val);
+#endif
+}
+
+
+/* --- add_read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic_add_read()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic_add_read_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic_add_read_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_add_read_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+#error "Missing implementation of ethr_atomic_add_read_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(add_read_mb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_ADD_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(add_return)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic_add_read_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#if defined(ETHR_HAVE_NATMC_READ)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_rb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_rb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_wb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_wb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_acqb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#if defined(ETHR_HAVE_NATMC_READ_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_acqb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_relb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_relb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_band)(ethr_atomic_t *var,
- ethr_sint_t mask)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_mb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var,
- (ETHR_NAINT_T__) mask);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_READ_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#else
+ res = ETHR_ATMC_FUNC__(cmpxchg_mb)(var, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__, (ethr_sint_t) ETHR_UNUSUAL_SINT_VAL__);
+#endif
+ return res;
+}
+
+
+/* --- inc_read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= mask);
+#if defined(ETHR_HAVE_NATMC_INC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read)(var, (ethr_sint_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_rb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_rb)(var, (ethr_sint_t) 1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_bor)(ethr_atomic_t *var,
- ethr_sint_t mask)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_wb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var,
- (ETHR_NAINT_T__) mask);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
#else
+ res = ETHR_ATMC_FUNC__(add_read_wb)(var, (ethr_sint_t) 1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_acqb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_acqb)(var, (ethr_sint_t) 1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_relb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= mask);
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_relb)(var, (ethr_sint_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(inc_read_mb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_INC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_mb)(var, (ethr_sint_t) 1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_xchg)(ethr_atomic_t *var, ethr_sint_t new)
+
+/* --- dec_read() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(xchg)(var,
- (ETHR_NAINT_T__) new);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
#else
+ res = ETHR_ATMC_FUNC__(add_read)(var, (ethr_sint_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_rb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = new);
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_rb)(var, (ethr_sint_t) -1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_wb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_wb)(var, (ethr_sint_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(ethr_atomic_t *var,
- ethr_sint_t new,
- ethr_sint_t exp)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_acqb)(ethr_atomic_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg)(var,
- (ETHR_NAINT_T__) new,
- (ETHR_NAINT_T__) exp);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+ res = ETHR_ATMC_FUNC__(add_read_acqb)(var, (ethr_sint_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_relb)(ethr_atomic_t *var)
+{
ethr_sint_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var,
- {
- res = *var;
- if (__builtin_expect(res == exp, 1))
- *var = new;
- });
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_relb)(var, (ethr_sint_t) -1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(dec_read_mb)(ethr_atomic_t *var)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_DEC_RETURN_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC_FUNC__(add_read_mb)(var, (ethr_sint_t) -1);
#endif
+ return res;
}
-/*
- * Important memory barrier requirements.
- *
- * The following atomic operations *must* supply a memory barrier of
- * at least the type specified by its suffix:
- * _acqb = acquire barrier
- * _relb = release barrier
- */
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_read_acqb)(ethr_atomic_t *var)
+/* --- add() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(read_acqb)(var);
+#if defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_read)(var);
+ (void) ETHR_ATMC_FUNC__(add_read)(var, val);
#endif
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read_acqb)(ethr_atomic_t *var)
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_rb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(inc_return_acqb)(var);
+#if defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_inc_read)(var);
+ (void) ETHR_ATMC_FUNC__(add_read_rb)(var, val);
#endif
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_set_relb)(ethr_atomic_t *var,
- ethr_sint_t val)
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_wb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC_FUNC__(set_relb)(var, (ETHR_NAINT_T__) val);
+#if defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic_set)(var, val);
+ (void) ETHR_ATMC_FUNC__(add_read_wb)(var, val);
#endif
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_relb)(ethr_atomic_t *var)
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_acqb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
+#if defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(add_read_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#else
+ (void) ETHR_ATMC_FUNC__(add_read_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(add_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+#if defined(ETHR_HAVE_NATMC_ADD_MB)
+ ETHR_NATMC_FUNC__(add_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_RELB)
+ ETHR_NATMC_FUNC__(add_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(add)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(add_read_mb)(var, val);
+#endif
+}
+
+
+/* --- inc() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC)
+ ETHR_NATMC_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_rb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_NATMC_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_wb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_wb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_acqb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_NATMC_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_acqb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_relb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_relb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(inc_mb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_INC_MB)
+ ETHR_NATMC_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_RELB)
+ ETHR_NATMC_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(inc_read_mb)(var);
+#endif
+}
+
+
+/* --- dec() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_NATMC_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_rb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_NATMC_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_wb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_wb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_acqb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_NATMC_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_acqb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_relb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_RELB)
+ ETHR_NATMC_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_relb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC_FUNC__(dec_mb)(ethr_atomic_t *var)
+{
+#if defined(ETHR_HAVE_NATMC_DEC_MB)
+ ETHR_NATMC_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_RELB)
ETHR_NATMC_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC_FUNC__(dec_read_mb)(var);
+#endif
+}
+
+
+/* --- read_band() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic_read_band()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic_read_band_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec)(var);
+#error "Missing implementation of ethr_atomic_read_band_wb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read_relb)(ethr_atomic_t *var)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_acqb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_dec_read)(var);
+#error "Missing implementation of ethr_atomic_read_band_acqb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg_acqb)(ethr_atomic_t *var,
- ethr_sint_t new,
- ethr_sint_t exp)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_relb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_acqb)(var,
- (ETHR_NAINT_T__) new,
- (ETHR_NAINT_T__) exp);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(var, new, exp);
+#error "Missing implementation of ethr_atomic_read_band_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg_relb)(ethr_atomic_t *var,
- ethr_sint_t new,
- ethr_sint_t exp)
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_band_mb)(ethr_atomic_t *var, ethr_sint_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint_t) ETHR_NATMC_FUNC__(cmpxchg_relb)(var,
- (ETHR_NAINT_T__) new,
- (ETHR_NAINT_T__) exp);
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_AND_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(and_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic_cmpxchg)(var, new, exp);
+#error "Missing implementation of ethr_atomic_read_band_mb()!"
#endif
+ return res;
}
+
+/* --- read_bor() --- */
+
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic_read_bor()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_rb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_wb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_acqb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_relb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_relb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint_t ETHR_ATMC_FUNC__(read_bor_mb)(ethr_atomic_t *var, ethr_sint_t val)
+{
+ ethr_sint_t res;
+#if defined(ETHR_HAVE_NATMC_OR_RETOLD_MB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_mb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RELB)
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_relb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_acqb)(var, (ETHR_NAINT_T__) val);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_wb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold_rb)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint_t) ETHR_NATMC_FUNC__(or_retold)(var, (ETHR_NAINT_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_MB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RELB)
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC_CMPXCHG_FALLBACK__(ETHR_NATMC_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic_read_bor_mb()!"
+#endif
+ return res;
+}
+
+#endif /* ETHR_ATMC_INLINE__ */
+
+
+/* ---------- 32-bit atomic implementation ---------- */
+
+
+#ifdef ETHR_NEED_ATMC32_PROTOTYPES__
+ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_rb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_wb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_cmpxchg_mb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val);
+ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_xchg_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_set_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_init_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_add_read_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_rb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_wb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_relb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_mb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_rb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_wb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_relb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_inc_read_mb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_rb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_wb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_acqb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_dec_read_mb(ethr_atomic32_t *var);
+void ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_add_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+void ethr_atomic32_inc(ethr_atomic32_t *var);
+void ethr_atomic32_inc_rb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_wb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_acqb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_relb(ethr_atomic32_t *var);
+void ethr_atomic32_inc_mb(ethr_atomic32_t *var);
+void ethr_atomic32_dec(ethr_atomic32_t *var);
+void ethr_atomic32_dec_rb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_wb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_acqb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_relb(ethr_atomic32_t *var);
+void ethr_atomic32_dec_mb(ethr_atomic32_t *var);
+ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_band_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_rb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_wb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_acqb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_relb(ethr_atomic32_t *var, ethr_sint32_t val);
+ethr_sint32_t ethr_atomic32_read_bor_mb(ethr_atomic32_t *var, ethr_sint32_t val);
+#endif /* ETHR_NEED_ATMC32_PROTOTYPES__ */
+
+#if (defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS) \
+ && (defined(ETHR_ATMC32_INLINE__) || defined(ETHR_ATOMIC_IMPL__)))
+
+#if !defined(ETHR_NATMC32_BITS__)
+# error "Missing native atomic implementation"
+#elif ETHR_NATMC32_BITS__ == 64
+# undef ETHR_HAVE_NATMC32_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
+# define ETHR_HAVE_NATMC32_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
+# define ETHR_HAVE_NATMC32_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC32_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
+# define ETHR_HAVE_NATMC32_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG
+# define ETHR_HAVE_NATMC32_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB
+# define ETHR_HAVE_NATMC32_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB
+# define ETHR_HAVE_NATMC32_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB
+# define ETHR_HAVE_NATMC32_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB
+# define ETHR_HAVE_NATMC32_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB
+# define ETHR_HAVE_NATMC32_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
+# define ETHR_HAVE_NATMC32_SET 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
+# define ETHR_HAVE_NATMC32_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
+# define ETHR_HAVE_NATMC32_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
+# define ETHR_HAVE_NATMC32_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
+# define ETHR_HAVE_NATMC32_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
+# define ETHR_HAVE_NATMC32_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
+# define ETHR_HAVE_NATMC32_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
+# define ETHR_HAVE_NATMC32_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
+# define ETHR_HAVE_NATMC32_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
+# define ETHR_HAVE_NATMC32_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
+# define ETHR_HAVE_NATMC32_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
+# define ETHR_HAVE_NATMC32_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN
+# define ETHR_HAVE_NATMC32_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
+# define ETHR_HAVE_NATMC32_READ 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
+# define ETHR_HAVE_NATMC32_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
+# define ETHR_HAVE_NATMC32_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
+# define ETHR_HAVE_NATMC32_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
+# define ETHR_HAVE_NATMC32_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
+# define ETHR_HAVE_NATMC32_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN
+# define ETHR_HAVE_NATMC32_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB
+# define ETHR_HAVE_NATMC32_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB
+# define ETHR_HAVE_NATMC32_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN
+# define ETHR_HAVE_NATMC32_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD
+# define ETHR_HAVE_NATMC32_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB
+# define ETHR_HAVE_NATMC32_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB
+# define ETHR_HAVE_NATMC32_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB
+# define ETHR_HAVE_NATMC32_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB
+# define ETHR_HAVE_NATMC32_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB
+# define ETHR_HAVE_NATMC32_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC
+# define ETHR_HAVE_NATMC32_INC 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB
+# define ETHR_HAVE_NATMC32_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB
+# define ETHR_HAVE_NATMC32_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB
+# define ETHR_HAVE_NATMC32_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB
+# define ETHR_HAVE_NATMC32_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB
+# define ETHR_HAVE_NATMC32_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC
+# define ETHR_HAVE_NATMC32_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB
+# define ETHR_HAVE_NATMC32_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB
+# define ETHR_HAVE_NATMC32_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB
+# define ETHR_HAVE_NATMC32_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB
+# define ETHR_HAVE_NATMC32_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB
+# define ETHR_HAVE_NATMC32_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD
+# define ETHR_HAVE_NATMC32_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD
+# define ETHR_HAVE_NATMC32_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_MB 1
+# endif
+#elif ETHR_NATMC32_BITS__ == 32
+# undef ETHR_HAVE_NATMC32_CMPXCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG
+# define ETHR_HAVE_NATMC32_CMPXCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB
+# define ETHR_HAVE_NATMC32_CMPXCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB
+# define ETHR_HAVE_NATMC32_CMPXCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB
+# define ETHR_HAVE_NATMC32_CMPXCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_CMPXCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB
+# define ETHR_HAVE_NATMC32_CMPXCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG
+# define ETHR_HAVE_NATMC32_XCHG 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB
+# define ETHR_HAVE_NATMC32_XCHG_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB
+# define ETHR_HAVE_NATMC32_XCHG_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB
+# define ETHR_HAVE_NATMC32_XCHG_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB
+# define ETHR_HAVE_NATMC32_XCHG_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_XCHG_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB
+# define ETHR_HAVE_NATMC32_XCHG_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET
+# define ETHR_HAVE_NATMC32_SET 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB
+# define ETHR_HAVE_NATMC32_SET_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB
+# define ETHR_HAVE_NATMC32_SET_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB
+# define ETHR_HAVE_NATMC32_SET_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB
+# define ETHR_HAVE_NATMC32_SET_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_SET_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB
+# define ETHR_HAVE_NATMC32_SET_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT
+# define ETHR_HAVE_NATMC32_INIT 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB
+# define ETHR_HAVE_NATMC32_INIT_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB
+# define ETHR_HAVE_NATMC32_INIT_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB
+# define ETHR_HAVE_NATMC32_INIT_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB
+# define ETHR_HAVE_NATMC32_INIT_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INIT_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB
+# define ETHR_HAVE_NATMC32_INIT_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN
+# define ETHR_HAVE_NATMC32_ADD_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB
+# define ETHR_HAVE_NATMC32_ADD_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ
+# define ETHR_HAVE_NATMC32_READ 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB
+# define ETHR_HAVE_NATMC32_READ_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB
+# define ETHR_HAVE_NATMC32_READ_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB
+# define ETHR_HAVE_NATMC32_READ_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB
+# define ETHR_HAVE_NATMC32_READ_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_READ_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB
+# define ETHR_HAVE_NATMC32_READ_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN
+# define ETHR_HAVE_NATMC32_INC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB
+# define ETHR_HAVE_NATMC32_INC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_INC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_INC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB
+# define ETHR_HAVE_NATMC32_INC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN
+# define ETHR_HAVE_NATMC32_DEC_RETURN 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RETURN_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB
+# define ETHR_HAVE_NATMC32_DEC_RETURN_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD
+# define ETHR_HAVE_NATMC32_ADD 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB
+# define ETHR_HAVE_NATMC32_ADD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB
+# define ETHR_HAVE_NATMC32_ADD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB
+# define ETHR_HAVE_NATMC32_ADD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB
+# define ETHR_HAVE_NATMC32_ADD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_ADD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB
+# define ETHR_HAVE_NATMC32_ADD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC
+# define ETHR_HAVE_NATMC32_INC 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB
+# define ETHR_HAVE_NATMC32_INC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB
+# define ETHR_HAVE_NATMC32_INC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB
+# define ETHR_HAVE_NATMC32_INC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB
+# define ETHR_HAVE_NATMC32_INC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_INC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB
+# define ETHR_HAVE_NATMC32_INC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC
+# define ETHR_HAVE_NATMC32_DEC 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB
+# define ETHR_HAVE_NATMC32_DEC_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB
+# define ETHR_HAVE_NATMC32_DEC_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB
+# define ETHR_HAVE_NATMC32_DEC_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB
+# define ETHR_HAVE_NATMC32_DEC_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_DEC_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB
+# define ETHR_HAVE_NATMC32_DEC_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD
+# define ETHR_HAVE_NATMC32_AND_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_AND_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB
+# define ETHR_HAVE_NATMC32_AND_RETOLD_MB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD
+# define ETHR_HAVE_NATMC32_OR_RETOLD 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_WB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_WB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_ACQB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_ACQB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_RELB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_RELB 1
+# endif
+# undef ETHR_HAVE_NATMC32_OR_RETOLD_MB
+# ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB
+# define ETHR_HAVE_NATMC32_OR_RETOLD_MB 1
+# endif
+#else
+# error "Invalid native atomic size"
+#endif
+
+#if (!defined(ETHR_HAVE_NATMC32_CMPXCHG) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_RB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_WB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB) \
+ && !defined(ETHR_HAVE_NATMC32_CMPXCHG_MB))
+# error "No native cmpxchg() op available"
+#endif
+
+
/*
- * --- 32-bit atomics ---------------------------------------------------------
+ * Read op used together with cmpxchg() fallback when no native op present.
*/
+#if defined(ETHR_HAVE_NATMC32_READ)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_rb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_wb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_acqb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_relb)(VAR)
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ETHR_NATMC32_FUNC__(read_mb)(VAR)
+#else
+/*
+ * We have no native read() op; guess zero and then use the
+ * the atomics actual value returned from cmpxchg().
+ */
+#define ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR) \
+ ((ETHR_NAINT32_T__) 0)
+#endif
+
+/*
+ * Native cmpxchg() fallback used when no native op present.
+ */
+#define ETHR_NATMC32_CMPXCHG_FALLBACK__(CMPXCHG, VAR, AVAL, OPS) \
+do { \
+ ethr_sint32_t AVAL; \
+ ETHR_NAINT32_T__ new__, act__, exp__; \
+ act__ = ETHR_NATMC32_CMPXCHG_FALLBACK_READ__(VAR); \
+ do { \
+ exp__ = act__; \
+ AVAL = (ethr_sint32_t) act__; \
+ { OPS; } \
+ new__ = (ETHR_NAINT32_T__) AVAL; \
+ act__ = CMPXCHG(VAR, new__, exp__); \
+ } while (__builtin_expect(act__ != exp__, 0)); \
+} while (0)
-static ETHR_INLINE ethr_sint32_t *
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_addr)(ethr_atomic32_t *var)
+
+
+/* --- addr() --- */
+
+static ETHR_INLINE ethr_sint32_t *ETHR_ATMC32_FUNC__(addr)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return ethr_native_atomic32_addr(var);
+ return (ethr_sint32_t *) ETHR_NATMC32_ADDR_FUNC__(var);
+
+}
+
+
+/* --- cmpxchg() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
#else
- return (ethr_sint32_t *) var;
+#error "Missing implementation of ethr_atomic32_cmpxchg()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_init)(ethr_atomic32_t *var,
- ethr_sint32_t i)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_rb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) i);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic32_cmpxchg_rb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set)(ethr_atomic32_t *var, ethr_sint32_t i)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_wb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) i);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = i);
+#error "Missing implementation of ethr_atomic32_cmpxchg_wb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_acqb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic32_cmpxchg_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_relb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#else
+#error "Missing implementation of ethr_atomic32_cmpxchg_relb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(cmpxchg_mb)(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_mb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_relb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_wb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg_rb)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var, (ETHR_NAINT32_T__) val, (ETHR_NAINT32_T__) old_val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_cmpxchg_mb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_add)(ethr_atomic32_t *var,
- ethr_sint32_t incr)
+
+/* --- xchg() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) incr);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += incr);
+#error "Missing implementation of ethr_atomic32_xchg()!"
#endif
-}
-
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_add_read)(ethr_atomic32_t *var,
- ethr_sint32_t i)
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) i);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
+#error "Missing implementation of ethr_atomic32_xchg_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += i; res = *var);
+#if defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#else
+#error "Missing implementation of ethr_atomic32_xchg_wb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_xchg_acqb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(inc)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
+#error "Missing implementation of ethr_atomic32_xchg_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(xchg_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(dec)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_XCHG_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_XCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval = val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval = val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_xchg_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- set() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(set_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_SET_MB)
+ ETHR_NATMC32_FUNC__(set_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_RELB)
+ ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_ACQB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_SET_WB)
+ ETHR_MEMBAR(ETHR_LoadStore);
+ ETHR_NATMC32_FUNC__(set_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET_RB)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_SET)
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(set)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(xchg_mb)(var, val);
+#endif
+}
+
+
+/* --- init() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
#else
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
+ ETHR_ATMC32_FUNC__(set)(var, val);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read)(ethr_atomic32_t *var)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#if defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
+ ETHR_ATMC32_FUNC__(set_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+#else
+ ETHR_ATMC32_FUNC__(set_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC32_FUNC__(set_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#else
+ ETHR_ATMC32_FUNC__(set_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(init_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_INIT_MB)
+ ETHR_NATMC32_FUNC__(init_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RELB)
+ ETHR_NATMC32_FUNC__(init_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_INIT_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(init_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INIT)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(init)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ ETHR_ATMC32_FUNC__(set_mb)(var, val);
+#endif
+}
+
+
+/* --- add_read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic32_add_read()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic32_add_read_rb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
#else
+#error "Missing implementation of ethr_atomic32_add_read_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_add_read_acqb()!"
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#else
+#error "Missing implementation of ethr_atomic32_add_read_relb()!"
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_band)(ethr_atomic32_t *var,
- ethr_sint32_t mask)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(add_read_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) mask);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_ADD_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(add_return)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, aval += val; res = aval);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, aval += val; res = aval);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
+#error "Missing implementation of ethr_atomic32_add_read_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= mask);
+#if defined(ETHR_HAVE_NATMC32_READ)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_rb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_rb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_bor)(ethr_atomic32_t *var,
- ethr_sint32_t mask)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_wb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return
- (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var,
- (ETHR_NAINT32_T__) mask);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_wb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_acqb)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= mask);
+#if defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_acqb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_relb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_relb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_xchg)(ethr_atomic32_t *var,
- ethr_sint32_t new)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_mb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(xchg)(var,
- (ETHR_NAINT32_T__) new);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_READ_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_READ_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore);
+#elif defined(ETHR_HAVE_NATMC32_READ)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(read)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#else
+ res = ETHR_ATMC32_FUNC__(cmpxchg_mb)(var, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__, (ethr_sint32_t) ETHR_UNUSUAL_SINT32_VAL__);
+#endif
+ return res;
+}
+
+
+/* --- inc_read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = new);
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read)(var, (ethr_sint32_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_rb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_rb)(var, (ethr_sint32_t) 1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_wb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(cmpxchg)(var,
- (ETHR_NAINT32_T__) new,
- (ETHR_NAINT32_T__) exp);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
#else
+ res = ETHR_ATMC32_FUNC__(add_read_wb)(var, (ethr_sint32_t) 1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_acqb)(ethr_atomic32_t *var)
+{
ethr_sint32_t res;
- ETHR_ATOMIC_OP_FALLBACK_IMPL__(var,
- {
- res = *var;
- if (__builtin_expect(res == exp, 1))
- *var = new;
- });
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_acqb)(var, (ethr_sint32_t) 1);
+#endif
return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_relb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_relb)(var, (ethr_sint32_t) 1);
#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(inc_read_mb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_INC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_mb)(var, (ethr_sint32_t) 1);
+#endif
+ return res;
}
-/*
- * Important memory barrier requirements.
- *
- * The following atomic operations *must* supply a memory barrier of
- * at least the type specified by its suffix:
- * _acqb = acquire barrier
- * _relb = release barrier
- */
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read_acqb)(ethr_atomic32_t *var)
+/* --- dec_read() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(read_acqb)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_read)(var);
+ res = ETHR_ATMC32_FUNC__(add_read)(var, (ethr_sint32_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read_acqb)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_rb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(inc_return_acqb)(var);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_inc_read)(var);
+ res = ETHR_ATMC32_FUNC__(add_read_rb)(var, (ethr_sint32_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set_relb)(ethr_atomic32_t *var,
- ethr_sint32_t val)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_wb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- ETHR_NATMC32_FUNC__(set_relb)(var, (ETHR_NAINT32_T__) val);
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_wb)(var, (ethr_sint32_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_acqb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic32_set)(var, val);
+ res = ETHR_ATMC32_FUNC__(add_read_acqb)(var, (ethr_sint32_t) -1);
#endif
+ return res;
}
-static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_relb)(ethr_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_relb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_relb)(var, (ethr_sint32_t) -1);
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(dec_read_mb)(ethr_atomic32_t *var)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_DEC_RETURN_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RETURN)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ res = ETHR_ATMC32_FUNC__(add_read_mb)(var, (ethr_sint32_t) -1);
+#endif
+ return res;
+}
+
+
+/* --- add() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_rb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_wb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_acqb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_relb)(var, val);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(add_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+#if defined(ETHR_HAVE_NATMC32_ADD_MB)
+ ETHR_NATMC32_FUNC__(add_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RELB)
+ ETHR_NATMC32_FUNC__(add_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_ADD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_ADD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(add)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(add_read_mb)(var, val);
+#endif
+}
+
+
+/* --- inc() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_NATMC32_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_rb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_NATMC32_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_wb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_wb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_acqb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_NATMC32_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_acqb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_relb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_relb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(inc_mb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_INC_MB)
+ ETHR_NATMC32_FUNC__(inc_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_RELB)
+ ETHR_NATMC32_FUNC__(inc_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_INC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_INC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(inc)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+ (void) ETHR_ATMC32_FUNC__(inc_read_mb)(var);
+#endif
+}
+
+
+/* --- dec() --- */
+
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
+#if defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_NATMC32_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#else
+ (void) ETHR_ATMC32_FUNC__(dec_read)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_rb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_NATMC32_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+ (void) ETHR_ATMC32_FUNC__(dec_read_rb)(var);
+#endif
+}
+
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_wb)(ethr_atomic32_t *var)
+{
+#if defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
ETHR_NATMC32_FUNC__(dec_relb)(var);
#else
- ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec)(var);
+ (void) ETHR_ATMC32_FUNC__(dec_read_wb)(var);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read_relb)(ethr_atomic32_t *var)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_acqb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t) ETHR_NATMC32_FUNC__(dec_return_relb)(var);
+#if defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_NATMC32_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_dec_read)(var);
+ (void) ETHR_ATMC32_FUNC__(dec_read_acqb)(var);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg_acqb)(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_relb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(cmpxchg_acqb)(var,
- (ETHR_NAINT32_T__) new,
- (ETHR_NAINT32_T__) exp);
+#if defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(var, new, exp);
+ (void) ETHR_ATMC32_FUNC__(dec_read_relb)(var);
#endif
}
-static ETHR_INLINE ethr_sint32_t
-ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg_relb)(ethr_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
+static ETHR_INLINE void ETHR_ATMC32_FUNC__(dec_mb)(ethr_atomic32_t *var)
{
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
- return (ethr_sint32_t)
- ETHR_NATMC32_FUNC__(cmpxchg_relb)(var,
- (ETHR_NAINT32_T__) new,
- (ETHR_NAINT32_T__) exp);
+#if defined(ETHR_HAVE_NATMC32_DEC_MB)
+ ETHR_NATMC32_FUNC__(dec_mb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RELB)
+ ETHR_NATMC32_FUNC__(dec_relb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_acqb)(var);
+#elif defined(ETHR_HAVE_NATMC32_DEC_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_wb)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec_rb)(var);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_DEC)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_FUNC__(dec)(var);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
- return ETHR_INLINE_FUNC_NAME_(ethr_atomic32_cmpxchg)(var, new, exp);
+ (void) ETHR_ATMC32_FUNC__(dec_read_mb)(var);
#endif
}
-#endif /* ETHR_TRY_INLINE_FUNCS */
+/* --- read_band() --- */
-#undef ETHR_NAINT_T__
-#undef ETHR_NATMC_FUNC__
-#undef ETHR_NATMC_ADDR_FUNC__
-#undef ETHR_NAINT32_T__
-#undef ETHR_NATMC32_FUNC__
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_band()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_rb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_relb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_band_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_AND_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_AND_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(and_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval &= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval &= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_band_mb()!"
+#endif
+ return res;
+}
+
+
+/* --- read_bor() --- */
+
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_rb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_rb()!"
+#endif
+ return res;
+}
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_wb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_MEMBAR(ETHR_StoreStore);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_wb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_acqb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_acqb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_relb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_relb()!"
+#endif
+ return res;
+}
+
+static ETHR_INLINE ethr_sint32_t ETHR_ATMC32_FUNC__(read_bor_mb)(ethr_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+#if defined(ETHR_HAVE_NATMC32_OR_RETOLD_MB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_mb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RELB)
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_relb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_acqb)(var, (ETHR_NAINT32_T__) val);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_wb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold_rb)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_OR_RETOLD)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ res = (ethr_sint32_t) ETHR_NATMC32_FUNC__(or_retold)(var, (ETHR_NAINT32_T__) val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_MB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_mb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RELB)
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_relb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_ACQB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_acqb), var, aval, res = aval; aval |= val);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_WB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_wb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG_RB)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg_rb), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#elif defined(ETHR_HAVE_NATMC32_CMPXCHG)
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
+ ETHR_NATMC32_CMPXCHG_FALLBACK__(ETHR_NATMC32_FUNC__(cmpxchg), var, aval, res = aval; aval |= val);
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
+#else
+#error "Missing implementation of ethr_atomic32_read_bor_mb()!"
#endif
+ return res;
+}
+
+#endif /* ETHR_ATMC32_INLINE__ */
+
+#endif /* ETHR_ATOMICS_H__ */
diff --git a/erts/include/internal/ethr_internal.h b/erts/include/internal/ethr_internal.h
index e9c3daf783..c9b1db5b46 100644
--- a/erts/include/internal/ethr_internal.h
+++ b/erts/include/internal/ethr_internal.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -63,5 +63,9 @@ int ethr_late_init_common__(ethr_late_init_data *lid);
void ethr_run_exit_handlers__(void);
void ethr_ts_event_destructor__(void *vtsep);
+#if defined(ETHR_X86_RUNTIME_CONF__)
+int ethr_x86_have_cpuid__(void);
+void ethr_x86_cpuid__(int *eax, int *ebx, int *ecx, int *edx);
+#endif
#endif
diff --git a/erts/include/internal/ethr_mutex.h b/erts/include/internal/ethr_mutex.h
index fadaf1e2a4..a0685ea3c0 100644
--- a/erts/include/internal/ethr_mutex.h
+++ b/erts/include/internal/ethr_mutex.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -54,6 +54,10 @@
# endif
#endif
+#ifndef ETHR_INLINE_MTX_FUNC_NAME_
+# define ETHR_INLINE_MTX_FUNC_NAME_(X) X
+#endif
+
#if defined(ETHR_USE_OWN_RWMTX_IMPL__) || defined(ETHR_USE_OWN_MTX_IMPL__)
#ifdef ETHR_DEBUG
@@ -505,7 +509,7 @@ void ethr_mutex_lock_wait__(ethr_mutex *, ethr_sint32_t);
void ethr_mutex_unlock_wake__(ethr_mutex *, ethr_sint32_t);
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
{
ethr_sint32_t act;
int res;
@@ -529,7 +533,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
{
ethr_sint32_t act;
ETHR_MTX_HARD_DEBUG_FENCE_CHK(mtx);
@@ -549,7 +553,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
{
ethr_sint32_t act;
ETHR_COMPILER_BARRIER;
@@ -574,7 +578,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_MUTEX_IMPL__)
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
{
int res;
res = pthread_mutex_trylock(&mtx->pt_mtx);
@@ -584,7 +588,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_trylock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
{
int res = pthread_mutex_lock(&mtx->pt_mtx);
if (res != 0)
@@ -592,7 +596,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_lock)(ethr_mutex *mtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
{
int res = pthread_mutex_unlock(&mtx->pt_mtx);
if (res != 0)
@@ -615,7 +619,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_mutex_unlock)(ethr_mutex *mtx)
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_MUTEX_IMPL__)
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_tryrdlock(&rwmtx->pt_rwlock);
if (res != 0 && res != EBUSY)
@@ -624,7 +628,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_rdlock(&rwmtx->pt_rwlock);
if (res != 0)
@@ -632,7 +636,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_unlock(&rwmtx->pt_rwlock);
if (res != 0)
@@ -640,7 +644,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_runlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE int
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_trywrlock(&rwmtx->pt_rwlock);
if (res != 0 && res != EBUSY)
@@ -649,7 +653,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_tryrwlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_wrlock(&rwmtx->pt_rwlock);
if (res != 0)
@@ -657,7 +661,7 @@ ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwlock)(ethr_rwmutex *rwmtx)
}
static ETHR_INLINE void
-ETHR_INLINE_FUNC_NAME_(ethr_rwmutex_rwunlock)(ethr_rwmutex *rwmtx)
+ETHR_INLINE_MTX_FUNC_NAME_(ethr_rwmutex_rwunlock)(ethr_rwmutex *rwmtx)
{
int res = pthread_rwlock_unlock(&rwmtx->pt_rwlock);
if (res != 0)
diff --git a/erts/include/internal/ethr_optimized_fallbacks.h b/erts/include/internal/ethr_optimized_fallbacks.h
index 8e04692856..45399d18e6 100644
--- a/erts/include/internal/ethr_optimized_fallbacks.h
+++ b/erts/include/internal/ethr_optimized_fallbacks.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -18,152 +18,172 @@
*/
/*
- * Description: "Optimized" fallbacks used when native ops are missing
+ * Description: Optimized fallbacks used when native ops are missing
* Author: Rickard Green
*/
#ifndef ETHR_OPTIMIZED_FALLBACKS_H__
#define ETHR_OPTIMIZED_FALLBACKS_H__
-#ifdef ETHR_HAVE_NATIVE_ATOMICS
-#define ETHR_HAVE_OPTIMIZED_ATOMIC_OPS 1
+#if defined(ETHR_HAVE_NATIVE_SPINLOCKS)
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+
+static ETHR_INLINE int
+ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock)
+{
+ return 0;
+}
+
#endif
-#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
-#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1
#elif defined(ETHR_HAVE_PTHREAD_SPIN_LOCK)
-/* --- Optimized spinlocks using pthread spinlocks -------------------------- */
-#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1
+/* --- Native spinlocks using pthread spinlocks -------------------------- */
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+
+#define ETHR_NATIVE_SPINLOCK_IMPL "pthread"
-typedef pthread_spinlock_t ethr_opt_spinlock_t;
+typedef pthread_spinlock_t ethr_native_spinlock_t;
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
-static ETHR_INLINE int
-ethr_opt_spinlock_init(ethr_opt_spinlock_t *lock)
+static ETHR_INLINE void
+ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
{
- return pthread_spin_init((pthread_spinlock_t *) lock, 0);
+ int err = pthread_spin_init((pthread_spinlock_t *) lock, 0);
+ if (err)
+ ETHR_FATAL_ERROR__(err);
}
static ETHR_INLINE int
-ethr_opt_spinlock_destroy(ethr_opt_spinlock_t *lock)
+ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock)
{
return pthread_spin_destroy((pthread_spinlock_t *) lock);
}
-
-static ETHR_INLINE int
-ethr_opt_spin_unlock(ethr_opt_spinlock_t *lock)
+static ETHR_INLINE void
+ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- return pthread_spin_unlock((pthread_spinlock_t *) lock);
+ int err = pthread_spin_unlock((pthread_spinlock_t *) lock);
+ if (err)
+ ETHR_FATAL_ERROR__(err);
}
-static ETHR_INLINE int
-ethr_opt_spin_lock(ethr_opt_spinlock_t *lock)
+static ETHR_INLINE void
+ethr_native_spin_lock(ethr_native_spinlock_t *lock)
{
- return pthread_spin_lock((pthread_spinlock_t *) lock);
+ int err = pthread_spin_lock((pthread_spinlock_t *) lock);
+ if (err)
+ ETHR_FATAL_ERROR__(err);
}
#endif
-#elif defined(ETHR_HAVE_NATIVE_ATOMICS)
+#elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
/* --- Native spinlocks using native atomics -------------------------------- */
#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_OPTIMIZED_SPINLOCKS 1
-
-#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
-typedef ethr_native_atomic32_t ethr_native_spinlock_t;
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
-typedef ethr_native_atomic64_t ethr_native_spinlock_t;
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#else
-# error "Missing native atomic implementation"
-#endif
+
+#define ETHR_NATIVE_SPINLOCK_IMPL "native-atomics"
+
+typedef ethr_atomic32_t ethr_native_spinlock_t;
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+#undef ETHR_NSPN_AOP__
+#define ETHR_NSPN_AOP__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+
static ETHR_INLINE void
ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
{
- ETHR_NATMC_FUNC__(init)(lock, 0);
+ ETHR_NSPN_AOP__(init)(lock, 0);
+}
+
+static ETHR_INLINE int
+ethr_native_spinlock_destroy(ethr_native_spinlock_t *lock)
+{
+ return ETHR_NSPN_AOP__(read)(lock) == 0 ? 0 : EBUSY;
}
static ETHR_INLINE void
ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- ETHR_COMPILER_BARRIER;
- ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) == 1);
- ETHR_NATMC_FUNC__(set_relb)(lock, 0);
+ ETHR_ASSERT(ETHR_NSPN_AOP__(read)(lock) == 1);
+ ETHR_NSPN_AOP__(set_relb)(lock, 0);
}
static ETHR_INLINE void
ethr_native_spin_lock(ethr_native_spinlock_t *lock)
{
- while (ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, 1, 0) != 0) {
- while (ETHR_NATMC_FUNC__(read)(lock) != 0)
+ while (ETHR_NSPN_AOP__(cmpxchg_acqb)(lock, 1, 0) != 0) {
+ while (ETHR_NSPN_AOP__(read)(lock) != 0)
ETHR_SPIN_BODY;
}
ETHR_COMPILER_BARRIER;
}
-#endif
+#undef ETHR_NSPN_AOP__
-#undef ETHR_NATMC_FUNC__
+#endif
#endif
-#ifdef ETHR_HAVE_NATIVE_RWSPINLOCKS
-#define ETHR_HAVE_OPTIMIZED_RWSPINLOCKS 1
-#elif defined(ETHR_HAVE_NATIVE_ATOMICS)
+#if defined(ETHR_HAVE_NATIVE_RWSPINLOCKS)
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+
+static ETHR_INLINE int
+ethr_native_rwlock_destroy(ethr_native_rwlock_t *lock)
+{
+ return 0;
+}
+
+#endif
+
+#elif defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
/* --- Native rwspinlocks using native atomics ------------------------------ */
#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-#define ETHR_HAVE_OPTIMIZED_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "native-atomics"
-#if defined(ETHR_HAVE_NATIVE_ATOMIC32)
-typedef ethr_native_atomic32_t ethr_native_rwlock_t;
-# define ETHR_NAINT_T__ ethr_sint32_t
+typedef ethr_atomic32_t ethr_native_rwlock_t;
# define ETHR_WLOCK_FLAG__ (((ethr_sint32_t) 1) << 30)
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#elif defined(ETHR_HAVE_NATIVE_ATOMIC64)
-typedef ethr_native_atomic64_t ethr_native_rwlock_t;
-# define ETHR_NAINT_T__ ethr_sint64_t
-# define ETHR_WLOCK_FLAG__ (((ethr_sint64_t) 1) << 62)
-# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#else
-# error "Missing native atomic implementation"
-#endif
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
+#undef ETHR_NRWSPN_AOP__
+#define ETHR_NRWSPN_AOP__(X) ETHR_INLINE_ATMC32_FUNC_NAME_(ethr_atomic32_ ## X)
+
static ETHR_INLINE void
ethr_native_rwlock_init(ethr_native_rwlock_t *lock)
{
- ETHR_NATMC_FUNC__(init)(lock, 0);
+ ETHR_NRWSPN_AOP__(init)(lock, 0);
+}
+
+static ETHR_INLINE int
+ethr_native_rwlock_destroy(ethr_native_rwlock_t *lock)
+{
+ return ETHR_NRWSPN_AOP__(read)(lock) == 0 ? 0 : EBUSY;
}
static ETHR_INLINE void
ethr_native_read_unlock(ethr_native_rwlock_t *lock)
{
- ETHR_COMPILER_BARRIER;
-#ifdef DEBUG
- ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) >= 0);
-#endif
- ETHR_NATMC_FUNC__(dec_relb)(lock);
+ ETHR_ASSERT(ETHR_NRWSPN_AOP__(read)(lock) >= 0);
+ ETHR_NRWSPN_AOP__(dec_relb)(lock);
}
static ETHR_INLINE void
ethr_native_read_lock(ethr_native_rwlock_t *lock)
{
- ETHR_NAINT_T__ act, exp = 0;
+ ethr_sint32_t act, exp = 0;
while (1) {
- act = ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, exp+1, exp);
+ act = ETHR_NRWSPN_AOP__(cmpxchg_acqb)(lock, exp+1, exp);
if (act == exp)
break;
+ /* Wait for writer to leave */
while (act & ETHR_WLOCK_FLAG__) {
ETHR_SPIN_BODY;
- act = ETHR_NATMC_FUNC__(read)(lock);
+ act = ETHR_NRWSPN_AOP__(read)(lock);
}
exp = act;
}
@@ -173,36 +193,37 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
static ETHR_INLINE void
ethr_native_write_unlock(ethr_native_rwlock_t *lock)
{
- ETHR_COMPILER_BARRIER;
- ETHR_ASSERT(ETHR_NATMC_FUNC__(read)(lock) == ETHR_WLOCK_FLAG__);
- ETHR_NATMC_FUNC__(set_relb)(lock, 0);
+ ETHR_ASSERT(ETHR_NRWSPN_AOP__(read)(lock) == ETHR_WLOCK_FLAG__);
+ ETHR_NRWSPN_AOP__(set_relb)(lock, 0);
}
static ETHR_INLINE void
ethr_native_write_lock(ethr_native_rwlock_t *lock)
{
- ETHR_NAINT_T__ act, exp = 0;
+ ethr_sint32_t act, exp = 0;
while (1) {
- act = ETHR_NATMC_FUNC__(cmpxchg_acqb)(lock, exp|ETHR_WLOCK_FLAG__, exp);
+ act = ETHR_NRWSPN_AOP__(cmpxchg_acqb)(lock, exp|ETHR_WLOCK_FLAG__, exp);
if (act == exp)
break;
- ETHR_SPIN_BODY;
- exp = act & ~ETHR_WLOCK_FLAG__;
+ /* Wait for writer to leave */
+ while (act & ETHR_WLOCK_FLAG__) {
+ ETHR_SPIN_BODY;
+ act = ETHR_NRWSPN_AOP__(read)(lock);
+ }
+ exp = act;
}
act |= ETHR_WLOCK_FLAG__;
/* Wait for readers to leave */
while (act != ETHR_WLOCK_FLAG__) {
ETHR_SPIN_BODY;
- act = ETHR_NATMC_FUNC__(read_acqb)(lock);
+ act = ETHR_NRWSPN_AOP__(read_acqb)(lock);
}
ETHR_COMPILER_BARRIER;
}
-#endif
+#undef ETHR_NRWSPN_AOP__
-#undef ETHR_NAINT_T__
-#undef ETHR_NATMC_FUNC__
-#undef ETHR_WLOCK_FLAG__
+#endif
#endif
diff --git a/erts/include/internal/ethread.h b/erts/include/internal/ethread.h
index 4cd95faf6a..8ad0ded144 100644
--- a/erts/include/internal/ethread.h
+++ b/erts/include/internal/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2004-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2004-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -33,10 +33,6 @@
#include <stdlib.h>
#include "erl_errno.h"
-#undef ETHR_HAVE_OPTIMIZED_ATOMIC_OPS
-#undef ETHR_HAVE_OPTIMIZED_SPINLOCK
-#undef ETHR_HAVE_OPTIMIZED_RWSPINLOCK
-
#if defined(DEBUG)
# define ETHR_DEBUG
#endif
@@ -68,7 +64,7 @@
#endif
/* Assume 64-byte cache line size */
-#define ETHR_CACHE_LINE_SIZE ((ethr_uint_t) 64)
+#define ETHR_CACHE_LINE_SIZE 64
#define ETHR_CACHE_LINE_MASK (ETHR_CACHE_LINE_SIZE - 1)
#define ETHR_CACHE_LINE_ALIGN_SIZE(SZ) \
@@ -251,13 +247,90 @@ typedef ethr_sint64_t ethr_sint_t;
typedef ethr_uint64_t ethr_uint_t;
#endif
-/* __builtin_expect() is needed by both native atomics code
- * and the fallback code */
-#if !defined(__GNUC__) || (__GNUC__ < 2) || (__GNUC__ == 2 && __GNUC_MINOR__ < 96)
+#if defined(ETHR_SIZEOF___INT128_T) && ETHR_SIZEOF___INT128_T == 16
+#define ETHR_HAVE_INT128_T
+typedef __int128_t ethr_sint128_t;
+typedef __uint128_t ethr_uint128_t;
+#endif
+
+#define ETHR_FATAL_ERROR__(ERR) \
+ ethr_fatal_error__(__FILE__, __LINE__, __func__, (ERR))
+
+ETHR_PROTO_NORETURN__ ethr_fatal_error__(const char *file,
+ int line,
+ const char *func,
+ int err);
+
+#if !defined(__GNUC__)
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) 0
+#elif !defined(__GNUC_MINOR__)
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \
+ ((__GNUC__ << 24) >= (((MAJ) << 24) | ((MIN) << 12) | (PL)))
+#elif !defined(__GNUC_PATCHLEVEL__)
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \
+ (((__GNUC__ << 24) | (__GNUC_MINOR__ << 12)) >= (((MAJ) << 24) | ((MIN) << 12) | (PL)))
+#else
+# define ETHR_AT_LEAST_GCC_VSN__(MAJ, MIN, PL) \
+ (((__GNUC__ << 24) | (__GNUC_MINOR__ << 12) | __GNUC_PATCHLEVEL__) >= (((MAJ) << 24) | ((MIN) << 12) | (PL)))
+#endif
+
+#if !ETHR_AT_LEAST_GCC_VSN__(2, 96, 0)
#define __builtin_expect(X, Y) (X)
#endif
-/* For CPU-optimised atomics, spinlocks, and rwlocks. */
+#if ETHR_AT_LEAST_GCC_VSN__(3, 1, 1)
+# define ETHR_CHOOSE_EXPR __builtin_choose_expr
+#else
+# define ETHR_CHOOSE_EXPR(B, E1, E2) ((B) ? (E1) : (E2))
+#endif
+
+#if ((defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))) \
+ || (defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_AMD64))))
+# define ETHR_X86_RUNTIME_CONF__
+
+# define ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__ \
+ (__builtin_expect(ethr_runtime__.conf.have_dw_cmpxchg != 0, 1))
+# define ETHR_X86_RUNTIME_CONF_HAVE_NO_DW_CMPXCHG__ \
+ (__builtin_expect(ethr_runtime__.conf.have_dw_cmpxchg == 0, 0))
+# define ETHR_X86_RUNTIME_CONF_HAVE_SSE2__ \
+ (__builtin_expect(ethr_runtime__.conf.have_sse2 != 0, 1))
+# define ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__ \
+ (__builtin_expect(ethr_runtime__.conf.have_sse2 == 0, 0))
+#endif
+
+#if (defined(__GNUC__) \
+ && !defined(ETHR_PPC_HAVE_LWSYNC) \
+ && !defined(ETHR_PPC_HAVE_NO_LWSYNC) \
+ && (defined(__powerpc__) || defined(__ppc__) || defined(__powerpc64__)))
+# define ETHR_PPC_RUNTIME_CONF__
+
+# define ETHR_PPC_RUNTIME_CONF_HAVE_LWSYNC__ \
+ (__builtin_expect(ethr_runtime__.conf.have_lwsync != 0, 1))
+# define ETHR_PPC_RUNTIME_CONF_HAVE_NO_LWSYNC__ \
+ (__builtin_expect(ethr_runtime__.conf.have_lwsync == 0, 0))
+#endif
+
+typedef struct {
+#if defined(ETHR_X86_RUNTIME_CONF__)
+ int have_dw_cmpxchg;
+ int have_sse2;
+#endif
+#if defined(ETHR_PPC_RUNTIME_CONF__)
+ int have_lwsync;
+#endif
+ int dummy;
+} ethr_runtime_conf_t;
+
+
+typedef union {
+ ethr_runtime_conf_t conf;
+ char pad__[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_runtime_conf_t))+ETHR_CACHE_LINE_SIZE];
+} ethr_runtime_t;
+
+
+extern ethr_runtime_t ethr_runtime__;
+
+/* For native CPU-optimised atomics, spinlocks, and rwlocks. */
#if !defined(ETHR_DISABLE_NATIVE_IMPLS)
# if defined(__GNUC__)
# if defined(ETHR_PREFER_GCC_NATIVE_IMPLS)
@@ -265,7 +338,7 @@ typedef ethr_uint64_t ethr_uint_t;
# elif defined(ETHR_PREFER_LIBATOMIC_OPS_NATIVE_IMPLS)
# include "libatomic_ops/ethread.h"
# endif
-# ifndef ETHR_HAVE_NATIVE_ATOMICS
+# if !defined(ETHR_HAVE_NATIVE_ATOMIC32) && !defined(ETHR_HAVE_NATIVE_ATOMIC64)
# if ETHR_SIZEOF_PTR == 4
# if defined(__i386__)
# include "i386/ethread.h"
@@ -283,8 +356,10 @@ typedef ethr_uint64_t ethr_uint_t;
# include "sparc64/ethread.h"
# endif
# endif
+#if 0
# include "gcc/ethread.h"
# include "libatomic_ops/ethread.h"
+#endif
# endif
# elif defined(ETHR_HAVE_LIBATOMIC_OPS)
# include "libatomic_ops/ethread.h"
@@ -293,10 +368,9 @@ typedef ethr_uint64_t ethr_uint_t;
# endif
#endif /* !ETHR_DISABLE_NATIVE_IMPLS */
+#include "ethr_atomics.h" /* The atomics API */
+
#if defined(__GNUC__)
-# ifndef ETHR_COMPILER_BARRIER
-# define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
-# endif
# ifndef ETHR_SPIN_BODY
# if defined(__i386__) || defined(__x86_64__)
# define ETHR_SPIN_BODY __asm__ __volatile__("rep;nop" : : : "memory")
@@ -309,11 +383,6 @@ typedef ethr_uint64_t ethr_uint_t;
# endif
# endif
#elif defined(ETHR_WIN32_THREADS)
-# ifndef ETHR_COMPILER_BARRIER
-# include <intrin.h>
-# pragma intrinsic(_ReadWriteBarrier)
-# define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
-# endif
# ifndef ETHR_SPIN_BODY
# define ETHR_SPIN_BODY do {YieldProcessor();ETHR_COMPILER_BARRIER;} while(0)
# endif
@@ -321,43 +390,6 @@ typedef ethr_uint64_t ethr_uint_t;
#define ETHR_YIELD_AFTER_BUSY_LOOPS 50
-#ifndef ETHR_HAVE_NATIVE_ATOMICS
-/*
- * ETHR_*MEMORY_BARRIER orders between locked and atomic accesses only,
- * i.e. when our lock based atomic fallback is used, a noop is sufficient.
- */
-#define ETHR_MEMORY_BARRIER do { } while (0)
-#define ETHR_WRITE_MEMORY_BARRIER do { } while (0)
-#define ETHR_READ_MEMORY_BARRIER do { } while (0)
-#define ETHR_READ_DEPEND_MEMORY_BARRIER do { } while (0)
-#endif
-
-#ifndef ETHR_WRITE_MEMORY_BARRIER
-# define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-# define ETHR_WRITE_MEMORY_BARRIER_IS_FULL
-#endif
-#ifndef ETHR_READ_MEMORY_BARRIER
-# define ETHR_READ_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-# define ETHR_READ_MEMORY_BARRIER_IS_FULL
-#endif
-#ifndef ETHR_READ_DEPEND_MEMORY_BARRIER
-# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER
-# define ETHR_READ_DEPEND_MEMORY_BARRIER_IS_COMPILER_BARRIER
-#endif
-
-#define ETHR_FATAL_ERROR__(ERR) \
- ethr_fatal_error__(__FILE__, __LINE__, __func__, (ERR))
-
-ETHR_PROTO_NORETURN__ ethr_fatal_error__(const char *file,
- int line,
- const char *func,
- int err);
-
-void ethr_compiler_barrier_fallback(void);
-#ifndef ETHR_COMPILER_BARRIER
-# define ETHR_COMPILER_BARRIER ethr_compiler_barrier_fallback()
-#endif
-
#ifndef ETHR_SPIN_BODY
# define ETHR_SPIN_BODY ETHR_COMPILER_BARRIER
#endif
@@ -460,8 +492,6 @@ void ethr_compiler_barrier(void);
#if defined(ETHR_HAVE_NATIVE_SPINLOCKS)
typedef ethr_native_spinlock_t ethr_spinlock_t;
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
-typedef ethr_opt_spinlock_t ethr_spinlock_t;
#elif defined(__WIN32__)
typedef CRITICAL_SECTION ethr_spinlock_t;
#else
@@ -483,8 +513,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spinlock_init)(ethr_spinlock_t *lock)
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
ethr_native_spinlock_init(lock);
return 0;
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- return ethr_opt_spinlock_init((ethr_opt_spinlock_t *) lock);
#elif defined(__WIN32__)
if (!InitializeCriticalSectionAndSpinCount((CRITICAL_SECTION *) lock, INT_MAX))
return ethr_win_get_errno__();
@@ -498,9 +526,7 @@ static ETHR_INLINE int
ETHR_INLINE_FUNC_NAME_(ethr_spinlock_destroy)(ethr_spinlock_t *lock)
{
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
- return 0;
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- return ethr_opt_spinlock_destroy((ethr_opt_spinlock_t *) lock);
+ return ethr_native_spinlock_destroy(lock);
#elif defined(__WIN32__)
DeleteCriticalSection((CRITICAL_SECTION *) lock);
return 0;
@@ -514,10 +540,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_unlock)(ethr_spinlock_t *lock)
{
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
ethr_native_spin_unlock(lock);
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- int err = ethr_opt_spin_unlock((ethr_opt_spinlock_t *) lock);
- if (err)
- ETHR_FATAL_ERROR__(err);
#elif defined(__WIN32__)
LeaveCriticalSection((CRITICAL_SECTION *) lock);
#else
@@ -532,10 +554,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_lock)(ethr_spinlock_t *lock)
{
#ifdef ETHR_HAVE_NATIVE_SPINLOCKS
ethr_native_spin_lock(lock);
-#elif defined(ETHR_HAVE_OPTIMIZED_SPINLOCKS)
- int err = ethr_opt_spin_lock((ethr_opt_spinlock_t *) lock);
- if (err)
- ETHR_FATAL_ERROR__(err);
#elif defined(__WIN32__)
EnterCriticalSection((CRITICAL_SECTION *) lock);
#else
@@ -547,8 +565,6 @@ ETHR_INLINE_FUNC_NAME_(ethr_spin_lock)(ethr_spinlock_t *lock)
#endif /* ETHR_TRY_INLINE_FUNCS */
-#include "ethr_atomics.h"
-
typedef struct ethr_ts_event_ ethr_ts_event; /* Needed by ethr_mutex.h */
#if defined(ETHR_WIN32_THREADS)
diff --git a/erts/include/internal/ethread_header_config.h.in b/erts/include/internal/ethread_header_config.h.in
index 1f82db8693..dd3599f86d 100644
--- a/erts/include/internal/ethread_header_config.h.in
+++ b/erts/include/internal/ethread_header_config.h.in
@@ -32,6 +32,9 @@
/* Define to the size of __int64 */
#undef ETHR_SIZEOF___INT64
+/* Define to the size of __int128_t */
+#undef ETHR_SIZEOF___INT128_T
+
/* Define if bigendian */
#undef ETHR_BIGENDIAN
@@ -69,8 +72,44 @@
/* Define if you have a linux futex implementation. */
#undef ETHR_HAVE_LINUX_FUTEX
-/* Define if you have gcc atomic operations */
-#undef ETHR_HAVE_GCC_ATOMIC_OPS
+/* Define if x86/x86_64 out of order instructions should be synchronized */
+#undef ETHR_X86_OUT_OF_ORDER
+
+/* Define if only run in Sparc TSO mode */
+#undef ETHR_SPARC_TSO
+
+/* Define if only run in Sparc PSO, or TSO mode */
+#undef ETHR_SPARC_PSO
+
+/* Define if run in Sparc RMO, PSO, or TSO mode */
+#undef ETHR_SPARC_RMO
+
+/* Define if you have __sync_add_and_fetch() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_ADD_AND_FETCH32
+
+/* Define if you have __sync_add_and_fetch() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_ADD_AND_FETCH64
+
+/* Define if you have __sync_fetch_and_and() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_AND32
+
+/* Define if you have __sync_fetch_and_and() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_AND64
+
+/* Define if you have __sync_fetch_and_or() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_OR32
+
+/* Define if you have __sync_fetch_and_or() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_FETCH_AND_OR64
+
+/* Define if you have __sync_val_compare_and_swap() for 32-bit integers */
+#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32
+
+/* Define if you have __sync_val_compare_and_swap() for 64-bit integers */
+#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64
+
+/* Define if you have __sync_val_compare_and_swap() for 128-bit integers */
+#undef ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128
/* Define if you prefer gcc native ethread implementations */
#undef ETHR_PREFER_GCC_NATIVE_IMPLS
@@ -90,8 +129,14 @@
/* Define if sched_yield() returns an int. */
#undef ETHR_SCHED_YIELD_RET_INT
-/* Define if you want compatibility with x86 processors before pentium4. */
-#undef ETHR_PRE_PENTIUM4_COMPAT
+/* Define if you use a gcc that supports -msse2 and understand sse2 specific asm statements */
+#undef ETHR_GCC_HAVE_SSE2_ASM_SUPPORT
+
+/* Define if you use a gcc that supports the double word cmpxchg instruction */
+#undef ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT
+
+/* Define if you get a register shortage with cmpxchg8b and position independent code */
+#undef ETHR_CMPXCHG8B_REGISTER_SHORTAGE
/* Define if you have the pthread_rwlockattr_setkind_np() function. */
#undef ETHR_HAVE_PTHREAD_RWLOCKATTR_SETKIND_NP
@@ -115,23 +160,74 @@
/* Define to the size of AO_t if libatomic_ops is used */
#undef ETHR_SIZEOF_AO_T
+/* Define if you have _InterlockedAnd() */
+#undef ETHR_HAVE__INTERLOCKEDAND
+
+/* Define if you have _InterlockedAnd64() */
+#undef ETHR_HAVE__INTERLOCKEDAND64
+
+/* Define if you have _InterlockedCompareExchange() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE
+
/* Define if you have _InterlockedCompareExchange64() */
#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64
+/* Define if you have _InterlockedCompareExchange64_acq() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ
+
+/* Define if you have _InterlockedCompareExchange64_rel() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL
+
+/* Define if you have _InterlockedCompareExchange_acq() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ
+
+/* Define if you have _InterlockedCompareExchange_rel() */
+#undef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL
+
+/* Define if you have _InterlockedDecrement() */
+#undef ETHR_HAVE__INTERLOCKEDDECREMENT
+
/* Define if you have _InterlockedDecrement64() */
#undef ETHR_HAVE__INTERLOCKEDDECREMENT64
-/* Define if you have _InterlockedIncrement64() */
-#undef ETHR_HAVE__INTERLOCKEDINCREMENT64
+/* Define if you have _InterlockedDecrement64_rel() */
+#undef ETHR_HAVE__INTERLOCKEDDECREMENT64_REL
-/* Define if you have _InterlockedExchangeAdd64() */
-#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
+/* Define if you have _InterlockedDecrement_rel() */
+#undef ETHR_HAVE__INTERLOCKEDDECREMENT_REL
+
+/* Define if you have _InterlockedExchange() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGE
/* Define if you have _InterlockedExchange64() */
#undef ETHR_HAVE__INTERLOCKEDEXCHANGE64
-/* Define if you have _InterlockedAnd64() */
-#undef ETHR_HAVE__INTERLOCKEDAND64
+/* Define if you have _InterlockedExchangeAdd() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD
+
+/* Define if you have _InterlockedExchangeAdd64() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
+
+/* Define if you have _InterlockedExchangeAdd64_acq() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ
+
+/* Define if you have _InterlockedExchangeAdd_acq() */
+#undef ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ
+
+/* Define if you have _InterlockedIncrement() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT
+
+/* Define if you have _InterlockedIncrement64() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT64
+
+/* Define if you have _InterlockedIncrement64_acq() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ
+
+/* Define if you have _InterlockedIncrement_acq() */
+#undef ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ
+
+/* Define if you have _InterlockedOr() */
+#undef ETHR_HAVE__INTERLOCKEDOR
/* Define if you have _InterlockedOr64() */
#undef ETHR_HAVE__INTERLOCKEDOR64
diff --git a/erts/include/internal/gcc/ethr_atomic.h b/erts/include/internal/gcc/ethr_atomic.h
index 16935084b1..f598f8537b 100644
--- a/erts/include/internal/gcc/ethr_atomic.h
+++ b/erts/include/internal/gcc/ethr_atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,11 +25,15 @@
#undef ETHR_INCLUDE_ATOMIC_IMPL__
#if !defined(ETHR_GCC_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
#define ETHR_GCC_ATOMIC32_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32)
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+#endif
#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
#elif !defined(ETHR_GCC_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
#define ETHR_GCC_ATOMIC64_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64)
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+#endif
#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
#endif
@@ -45,58 +49,38 @@
# define ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ 1
#endif
-#if defined(__x86_64__) || (defined(__i386__) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 1
-#else
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 0
-#endif
-
-/*
- * According to the documentation this is what we want:
- * #define ETHR_MEMORY_BARRIER __sync_synchronize()
- * However, __sync_synchronize() is known to erroneously be
- * a noop on at least some platforms with some gcc versions.
- * This has suposedly been fixed in some gcc version, but we
- * don't know from which version. Therefore, we only use
- * it when it has been verified to work. Otherwise
- * we use a workaround.
- */
-#if defined(__mips__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 2))
-/* __sync_synchronize() has been verified to work here */
-#define ETHR_MEMORY_BARRIER __sync_synchronize()
-#define ETHR_READ_DEPEND_MEMORY_BARRIER __sync_synchronize()
-#elif defined(__x86_64__) || (defined(__i386__) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-/* Use fence instructions directly instead of workaround */
-#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory")
-#define ETHR_WRITE_MEMORY_BARRIER __asm__ __volatile__("sfence" : : : "memory")
-#define ETHR_READ_MEMORY_BARRIER __asm__ __volatile__("lfence" : : : "memory")
-#define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("" : : : "memory")
-#else
-/* Workaround */
-#define ETHR_MEMORY_BARRIER \
-do { \
- volatile ethr_sint32_t x___ = 0; \
- (void) __sync_val_compare_and_swap(&x___, (ethr_sint32_t) 0, (ethr_sint32_t) 1); \
-} while (0)
-#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-#endif
-
-#define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
-
#endif /* ETHR_GCC_ATOMIC_COMMON__ */
#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "gcc"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic32_t
#define ETHR_AINT_T__ ethr_sint32_t
+#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH32)
+# define ETHR_HAVE___SYNC_ADD_AND_FETCH
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND32)
+# define ETHR_HAVE___SYNC_FETCH_AND_AND
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR32)
+# define ETHR_HAVE___SYNC_FETCH_AND_OR
+#endif
#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#define ETHR_NATIVE_ATOMIC64_IMPL "gcc"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic64_t
#define ETHR_AINT_T__ ethr_sint64_t
+#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH64)
+# define ETHR_HAVE___SYNC_ADD_AND_FETCH
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND64)
+# define ETHR_HAVE___SYNC_FETCH_AND_AND
+#endif
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR64)
+# define ETHR_HAVE___SYNC_FETCH_AND_OR
+#endif
#else
#error "Unsupported integer size"
#endif
@@ -108,183 +92,120 @@ typedef struct {
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
-{
#if ETHR_READ_AND_SET_WITHOUT_SYNC_OP__
- var->counter = value;
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
#else
- /*
- * Unfortunately no __sync_store() or similar exist in the gcc atomic
- * op interface. We therefore have to simulate it this way...
- */
- ETHR_AINT_T__ act = 0, exp;
- do {
- exp = act;
- act = __sync_val_compare_and_swap(&var->counter, exp, value);
- } while (act != exp);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
#endif
-}
static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
+ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
{
- ETHR_NATMC_FUNC__(set)(var, value);
+ var->counter = value;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
-{
+#endif /* ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ */
+
#if ETHR_READ_AND_SET_WITHOUT_SYNC_OP__
- return var->counter;
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
#else
- /*
- * Unfortunately no __sync_fetch() or similar exist in the gcc atomic
- * op interface. We therefore have to simulate it this way...
- */
- return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 0);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
#endif
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- (void) __sync_add_and_fetch(&var->counter, incr);
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
+ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
- return __sync_add_and_fetch(&var->counter, incr);
+ return var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void) __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
-}
+#endif /* ETHR_READ_AND_SET_WITHOUT_SYNC_OP__ */
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void) __sync_sub_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
-}
+#if defined(ETHR_HAVE___SYNC_ADD_AND_FETCH)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- return __sync_sub_and_fetch(&var->counter, (ETHR_AINT_T__) 1);
+ return __sync_add_and_fetch(&var->counter, incr);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- return __sync_fetch_and_and(&var->counter, mask);
-}
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- return (ETHR_AINT_T__) __sync_fetch_and_or(&var->counter, mask);
-}
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_AND)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
-{
- return __sync_val_compare_and_swap(&var->counter, old, new);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
+ETHR_NATMC_FUNC__(and_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
- ETHR_AINT_T__ exp, act = 0;
- do {
- exp = act;
- act = __sync_val_compare_and_swap(&var->counter, exp, new);
- } while (act != exp);
- return act;
+ return __sync_fetch_and_and(&var->counter, mask);
}
-/*
- * Atomic ops with at least specified barriers.
- */
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_AINT_T__ val = var->counter;
- ETHR_COMPILER_BARRIER;
- return val;
-#else
- return __sync_add_and_fetch(&var->counter, (ETHR_AINT_T__) 0);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_COMPILER_BARRIER;
- var->counter = i;
+#if defined(ETHR_HAVE___SYNC_FETCH_AND_OR)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB 1
#else
- (void) ETHR_NATMC_FUNC__(xchg)(var, i);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(inc_return)(var);
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(or_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
- ETHR_NATMC_FUNC__(dec)(var);
+ return (ETHR_AINT_T__) __sync_fetch_and_or(&var->counter, mask);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(dec_return)(var);
-}
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
-{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ return __sync_val_compare_and_swap(&var->counter, old, new);
}
-#endif
+#endif /* ETHR_TRY_INLINE_FUNCS */
#undef ETHR_NATMC_FUNC__
#undef ETHR_ATMC_T__
#undef ETHR_AINT_T__
#undef ETHR_AINT_SUFFIX__
+#undef ETHR_HAVE___SYNC_ADD_AND_FETCH
+#undef ETHR_HAVE___SYNC_FETCH_AND_AND
+#undef ETHR_HAVE___SYNC_FETCH_AND_OR
#endif
diff --git a/erts/include/internal/gcc/ethr_dw_atomic.h b/erts/include/internal/gcc/ethr_dw_atomic.h
new file mode 100644
index 0000000000..6736f9c547
--- /dev/null
+++ b/erts/include/internal/gcc/ethr_dw_atomic.h
@@ -0,0 +1,115 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Native double word atomics using gcc's builtins
+ * Author: Rickard Green
+ */
+
+#undef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+#ifndef ETHR_GCC_DW_ATOMIC_H__
+# define ETHR_GCC_DW_ATOMIC_H__
+# if ((ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64)) \
+ || (ETHR_SIZEOF_PTR == 8 \
+ && defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP128) \
+ && defined(ETHR_HAVE_INT128_T)))
+# define ETHR_INCLUDE_DW_ATOMIC_IMPL__
+# endif
+#endif
+
+#ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
+# define ETHR_NATIVE_DW_ATOMIC_IMPL "gcc"
+
+# if defined(__i386__) || defined(__x86_64__)
+/*
+ * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used
+ * at runtime in order to determine if native or fallback implementation
+ * should be used.
+ */
+# define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \
+ ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__
+# endif
+
+# if ETHR_SIZEOF_PTR == 4
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
+# elif ETHR_SIZEOF_PTR == 8
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint128_t
+# endif
+
+typedef volatile ETHR_NATIVE_SU_DW_SINT_T * ethr_native_dw_ptr_t;
+
+/*
+ * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
+ * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
+ * not common, and at least some glibc malloc implementations
+ * only 4 byte align in 32-bit mode.
+ *
+ * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
+ * aligned memory in 32-bit mode. A malloc implementation that does
+ * not adhere to these alignment requirements is seriously broken,
+ * and we wont bother trying to work around it.
+ *
+ * Since memory alignment may be off by one word we need to align at
+ * runtime. We, therefore, need an extra word allocated.
+ */
+#define ETHR_DW_NATMC_MEM__(VAR) \
+ (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
+typedef union {
+ volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
+ volatile ethr_sint_t sint[3];
+ volatile char c[ETHR_SIZEOF_PTR*3];
+} ethr_native_dw_atomic_t;
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+# ifdef ETHR_DEBUG
+# define ETHR_DW_DBG_ALIGNED__(PTR) \
+ ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
+# else
+# define ETHR_DW_DBG_ALIGNED__(PTR)
+# endif
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR
+static ETHR_INLINE ethr_sint_t *
+ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
+{
+ return (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
+}
+
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
+
+static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T
+ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ETHR_NATIVE_SU_DW_SINT_T new,
+ ETHR_NATIVE_SU_DW_SINT_T old)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return __sync_val_compare_and_swap(p, old, new);
+}
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#endif /* ETHR_GCC_DW_ATOMIC_H__ */
+
diff --git a/erts/include/internal/gcc/ethr_membar.h b/erts/include/internal/gcc/ethr_membar.h
new file mode 100644
index 0000000000..7d428fc68e
--- /dev/null
+++ b/erts/include/internal/gcc/ethr_membar.h
@@ -0,0 +1,73 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers when using gcc's builtins
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_GCC_MEMBAR_H__
+#define ETHR_GCC_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+/*
+ * According to the documentation __sync_synchronize() will
+ * issue a full memory barrier. However, __sync_synchronize()
+ * is known to erroneously be a noop on at least some
+ * platforms with some gcc versions. This has suposedly been
+ * fixed in some gcc version, but we don't know from which
+ * version. Therefore, we only use it when it has been
+ * verified to work. Otherwise we use the workaround
+ * below.
+ */
+
+#if defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP32)
+# define ETHR_MB_T__ ethr_sint32_t
+#elif defined(ETHR_HAVE___SYNC_VAL_COMPARE_AND_SWAP64)
+# define ETHR_MB_T__ ethr_sint64_t
+#else
+# error "No __sync_val_compare_and_swap"
+#endif
+#define ETHR_SYNC_SYNCHRONIZE_WORKAROUND__ \
+do { \
+ volatile ETHR_MB_T__ x___ = 0; \
+ (void) __sync_val_compare_and_swap(&x___, (ETHR_MB_T__) 0, (ETHR_MB_T__) 1); \
+} while (0)
+
+#define ETHR_COMPILER_BARRIER __asm__ __volatile__("" : : : "memory")
+
+#if defined(__mips__) && ETHR_AT_LEAST_GCC_VSN__(4, 2, 0)
+# define ETHR_MEMBAR(B) __sync_synchronize()
+# define ETHR_READ_DEPEND_MEMORY_BARRIER __sync_synchronize()
+#elif ((defined(__powerpc__) || defined(__ppc__)) \
+ && ETHR_AT_LEAST_GCC_VSN__(4, 1, 2))
+# define ETHR_MEMBAR(B) __sync_synchronize()
+#else /* Use workaround */
+# define ETHR_MEMBAR(B) \
+ ETHR_SYNC_SYNCHRONIZE_WORKAROUND__
+# define ETHR_READ_DEPEND_MEMORY_BARRIER \
+ ETHR_SYNC_SYNCHRONIZE_WORKAROUND__
+#endif
+
+
+#endif /* ETHR_GCC_MEMBAR_H__ */
diff --git a/erts/include/internal/gcc/ethread.h b/erts/include/internal/gcc/ethread.h
index 392a1aa2b2..fcfdc39441 100644
--- a/erts/include/internal/gcc/ethread.h
+++ b/erts/include/internal/gcc/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,16 +25,24 @@
#ifndef ETHREAD_GCC_H__
#define ETHREAD_GCC_H__
-#if !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_GCC_ATOMIC_OPS)
-#define ETHR_HAVE_NATIVE_ATOMICS 1
+#ifndef ETHR_MEMBAR
+# include "ethr_membar.h"
+#endif
+
+#if !defined(ETHR_HAVE_NATIVE_ATOMIC32)
+# define ETHR_ATOMIC_WANT_32BIT_IMPL__
+# include "ethr_atomic.h"
+#endif
-#define ETHR_ATOMIC_WANT_32BIT_IMPL__
-#include "ethr_atomic.h"
-#if ETHR_SIZEOF_PTR == 8
+#if ETHR_SIZEOF_PTR == 8 && !defined(ETHR_HAVE_NATIVE_ATOMIC64)
# define ETHR_ATOMIC_WANT_64BIT_IMPL__
# include "ethr_atomic.h"
#endif
+#if (!defined(ETHR_HAVE_NATIVE_DW_ATOMIC) \
+ && !(ETHR_SIZEOF_PTR == 4 && defined(ETHR_HAVE_NATIVE_ATOMIC64)) \
+ && !(ETHR_SIZEOF_PTR == 8 && defined(ETHR_HAVE_NATIVE_ATOMIC128)))
+# include "ethr_dw_atomic.h"
#endif
#endif
diff --git a/erts/include/internal/i386/atomic.h b/erts/include/internal/i386/atomic.h
index 4e402f261a..fc1b619935 100644
--- a/erts/include/internal/i386/atomic.h
+++ b/erts/include/internal/i386/atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,53 +25,42 @@
*/
#undef ETHR_INCLUDE_ATOMIC_IMPL__
-#if !defined(ETHR_X86_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
-#define ETHR_X86_ATOMIC32_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
-#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
-#elif !defined(ETHR_X86_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
-#define ETHR_X86_ATOMIC64_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
-#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
+#if !defined(ETHR_X86_ATOMIC32_H__) \
+ && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
+# define ETHR_X86_ATOMIC32_H__
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+# undef ETHR_ATOMIC_WANT_32BIT_IMPL__
+#elif !defined(ETHR_X86_ATOMIC64_H__) \
+ && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
+# define ETHR_X86_ATOMIC64_H__
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+# undef ETHR_ATOMIC_WANT_64BIT_IMPL__
#endif
#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
-#ifndef ETHR_X86_ATOMIC_COMMON__
-#define ETHR_X86_ATOMIC_COMMON__
-
-#define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
-
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
-#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory")
-#define ETHR_WRITE_MEMORY_BARRIER __asm__ __volatile__("sfence" : : : "memory")
-#define ETHR_READ_MEMORY_BARRIER __asm__ __volatile__("lfence" : : : "memory")
-#define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("" : : : "memory")
-#else
-#define ETHR_MEMORY_BARRIER \
-do { \
- volatile ethr_sint32_t x___ = 0; \
- __asm__ __volatile__("lock; incl %0" : "=m"(x___) : "m"(x___) : "memory"); \
-} while (0)
-#endif
-
-#endif /* ETHR_X86_ATOMIC_COMMON__ */
-
-#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
-#define ETHR_HAVE_NATIVE_ATOMIC32 1
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic32_t
-#define ETHR_AINT_T__ ethr_sint32_t
-#define ETHR_AINT_SUFFIX__ "l"
-#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
-#define ETHR_HAVE_NATIVE_ATOMIC64 1
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic64_t
-#define ETHR_AINT_T__ ethr_sint64_t
-#define ETHR_AINT_SUFFIX__ "q"
-#else
-#error "Unsupported integer size"
-#endif
+# ifndef ETHR_X86_ATOMIC_COMMON__
+# define ETHR_X86_ATOMIC_COMMON__
+# define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
+# endif /* ETHR_X86_ATOMIC_COMMON__ */
+
+# if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_NATIVE_ATOMIC32 1
+# define ETHR_NATIVE_ATOMIC32_IMPL "ethread"
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic32_t
+# define ETHR_AINT_T__ ethr_sint32_t
+# define ETHR_AINT_SUFFIX__ "l"
+# elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
+# define ETHR_HAVE_NATIVE_ATOMIC64 1
+# define ETHR_NATIVE_ATOMIC64_IMPL "ethread"
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic64_t
+# define ETHR_AINT_T__ ethr_sint64_t
+# define ETHR_AINT_SUFFIX__ "q"
+# else
+# error "Unsupported integer size"
+# endif
/* An atomic is an aligned ETHR_AINT_T__ accessed via locked operations.
*/
@@ -81,87 +70,28 @@ typedef struct {
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- var->counter = i;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- var->counter = i;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
-{
- return var->counter;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- __asm__ __volatile__(
- "lock; add" ETHR_AINT_SUFFIX__ " %1, %0"
- : "=m"(var->counter)
- : "ir"(incr), "m"(var->counter));
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- __asm__ __volatile__(
- "lock; inc" ETHR_AINT_SUFFIX__ " %0"
- : "=m"(var->counter)
- : "m"(var->counter));
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- __asm__ __volatile__(
- "lock; dec" ETHR_AINT_SUFFIX__ " %0"
- : "=m"(var->counter)
- : "m"(var->counter));
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- ETHR_AINT_T__ tmp;
-
- tmp = incr;
- __asm__ __volatile__(
- "lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */
- : "=r"(tmp)
- : "m"(var->counter), "0"(tmp));
- /* now tmp is the atomic's previous value */
- return tmp + incr;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) 1);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) -1);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
{
__asm__ __volatile__(
"lock; cmpxchg" ETHR_AINT_SUFFIX__ " %2, %3"
@@ -171,110 +101,148 @@ ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
return old;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ tmp, old;
-
- tmp = var->counter;
- do {
- old = tmp;
- tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp & mask, tmp);
- } while (__builtin_expect(tmp != old, 0));
- /* now tmp is the atomic's previous value */
- return tmp;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ tmp, old;
-
- tmp = var->counter;
- do {
- old = tmp;
- tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp | mask, tmp);
- } while (__builtin_expect(tmp != old, 0));
- /* now tmp is the atomic's previous value */
- return tmp;
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
+ETHR_NATMC_FUNC__(xchg_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
{
ETHR_AINT_T__ tmp = val;
__asm__ __volatile__(
"xchg" ETHR_AINT_SUFFIX__ " %0, %1"
: "=r"(tmp)
- : "m"(var->counter), "0"(tmp));
+ : "m"(var->counter), "0"(tmp)
+ : "memory");
/* now tmp is the atomic's previous value */
return tmp;
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
{
- ETHR_AINT_T__ val;
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
- val = var->counter;
+ var->counter = i;
+}
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1
#else
- val = ETHR_NATMC_FUNC__(add_return)(var, 0);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1
#endif
- __asm__ __volatile__("" : : : "memory");
- return val;
-}
static ETHR_INLINE void
ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
{
- __asm__ __volatile__("" : : : "memory");
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
- var->counter = i;
+#if defined(_M_IX86)
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ (void) ETHR_NATMC_FUNC__(xchg_mb)(var, i);
+ else
+#endif /* _M_IX86 */
+ {
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ var->counter = i;
+ }
+}
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB 1
#else
- (void) ETHR_NATMC_FUNC__(xchg)(var, i);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB 1
#endif
+
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ (void) ETHR_NATMC_FUNC__(xchg_mb)(var, i);
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
- __asm__ __volatile__("" : : : "memory");
- return res;
+ return var->counter;
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB 1
+#endif
+
static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(add_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- __asm__ __volatile__("" : : : "memory");
- ETHR_NATMC_FUNC__(dec)(var);
-}
+ __asm__ __volatile__(
+ "lock; add" ETHR_AINT_SUFFIX__ " %1, %0"
+ : "=m"(var->counter)
+ : "ir"(incr), "m"(var->counter)
+ : "memory");
+}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB 1
+#endif
+
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(inc_mb)(ETHR_ATMC_T__ *var)
{
- __asm__ __volatile__("" : : : "memory");
- return ETHR_NATMC_FUNC__(dec_return)(var);
+ __asm__ __volatile__(
+ "lock; inc" ETHR_AINT_SUFFIX__ " %0"
+ : "=m"(var->counter)
+ : "m"(var->counter)
+ : "memory");
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB 1
+#endif
+
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(dec_mb)(ETHR_ATMC_T__ *var)
{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ __asm__ __volatile__(
+ "lock; dec" ETHR_AINT_SUFFIX__ " %0"
+ : "=m"(var->counter)
+ : "m"(var->counter)
+ : "memory");
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
+ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ ETHR_AINT_T__ tmp;
+
+ tmp = incr;
+ __asm__ __volatile__(
+ "lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */
+ : "=r"(tmp)
+ : "m"(var->counter), "0"(tmp)
+ : "memory");
+ /* now tmp is the atomic's previous value */
+ return tmp + incr;
}
#endif /* ETHR_TRY_INLINE_FUNCS */
diff --git a/erts/include/internal/i386/ethr_dw_atomic.h b/erts/include/internal/i386/ethr_dw_atomic.h
new file mode 100644
index 0000000000..9fb89bbe43
--- /dev/null
+++ b/erts/include/internal/i386/ethr_dw_atomic.h
@@ -0,0 +1,278 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Native double word atomics for x86/x86_64
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_X86_DW_ATOMIC_H__
+#define ETHR_X86_DW_ATOMIC_H__
+
+#ifdef ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT
+
+#define ETHR_HAVE_NATIVE_DW_ATOMIC
+#define ETHR_NATIVE_DW_ATOMIC_IMPL "ethread"
+
+/*
+ * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used
+ * at runtime in order to determine if native or fallback implementation
+ * should be used.
+ */
+#define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \
+ ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__
+
+#if ETHR_SIZEOF_PTR == 4
+typedef volatile ethr_sint64_t * ethr_native_dw_ptr_t;
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
+# define ETHR_DW_CMPXCHG_SFX__ "8b"
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
+#else
+#ifdef ETHR_HAVE_INT128_T
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint128_t
+typedef volatile ethr_sint128_t * ethr_native_dw_ptr_t;
+#else
+typedef struct {
+ ethr_sint64_t sint64[2];
+} ethr_native_sint128_t__;
+typedef volatile ethr_native_sint128_t__ * ethr_native_dw_ptr_t;
+#endif
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
+# define ETHR_DW_CMPXCHG_SFX__ "16b"
+#endif
+
+/*
+ * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
+ * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
+ * not common, and at least some glibc malloc implementations
+ * only 4 byte align in 32-bit mode.
+ *
+ * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
+ * aligned memory in 32-bit mode. A malloc implementation that does
+ * not adhere to these alignment requirements is seriously broken,
+ * and we wont bother trying to work around it.
+ *
+ * Since memory alignment may be off by one word we need to align at
+ * runtime. We, therefore, need an extra word allocated.
+ */
+#define ETHR_DW_NATMC_MEM__(VAR) \
+ (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
+typedef union {
+#ifdef ETHR_NATIVE_SU_DW_SINT_T
+ volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
+#endif
+ volatile ethr_sint_t sint[3];
+ volatile char c[ETHR_SIZEOF_PTR*3];
+} ethr_native_dw_atomic_t;
+
+
+#if (defined(ETHR_TRY_INLINE_FUNCS) \
+ || defined(ETHR_ATOMIC_IMPL__) \
+ || defined(ETHR_X86_SSE2_ASM_C__)) \
+ && ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT)
+ethr_sint64_t
+ethr_sse2_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var);
+void
+ethr_sse2_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t val);
+#endif
+
+#if (defined(ETHR_TRY_INLINE_FUNCS) \
+ || defined(ETHR_ATOMIC_IMPL__) \
+ || defined(ETHR_X86_SSE2_ASM_C__))
+# ifdef ETHR_DEBUG
+# define ETHR_DW_DBG_ALIGNED__(PTR) \
+ ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
+# else
+# define ETHR_DW_DBG_ALIGNED__(PTR)
+# endif
+#endif
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR
+static ETHR_INLINE ethr_sint_t *
+ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
+{
+ return (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
+}
+
+#if ETHR_SIZEOF_PTR == 4 && defined(__PIC__) && __PIC__
+/*
+ * When position independent code is used in 32-bit mode, the EBX register
+ * is used for storage of global offset table address, and we may not
+ * use it as input or output in an asm. We need to save and restore the
+ * EBX register explicitly (for some reason gcc doesn't provide this
+ * service to us).
+ */
+# define ETHR_NO_CLOBBER_EBX__ 1
+#else
+# define ETHR_NO_CLOBBER_EBX__ 0
+#endif
+
+#if ETHR_NO_CLOBBER_EBX__ && !defined(ETHR_CMPXCHG8B_REGISTER_SHORTAGE)
+/* When no optimization is on, we'll run into a register shortage */
+# if defined(ETHR_DEBUG) || defined(DEBUG) || defined(VALGRIND) \
+ || defined(GCOV) || defined(PURIFY) || defined(PURECOV)
+# define ETHR_CMPXCHG8B_REGISTER_SHORTAGE 1
+# else
+# define ETHR_CMPXCHG8B_REGISTER_SHORTAGE 0
+# endif
+#endif
+
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
+
+static ETHR_INLINE int
+ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ethr_sint_t *new,
+ ethr_sint_t *xchg)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ char xchgd;
+
+ ETHR_DW_DBG_ALIGNED__(p);
+
+ __asm__ __volatile__(
+#if ETHR_NO_CLOBBER_EBX__
+ "pushl %%ebx\n\t"
+# if ETHR_CMPXCHG8B_REGISTER_SHORTAGE
+ "movl (%7), %%ebx\n\t"
+ "movl 4(%7), %%ecx\n\t"
+# else
+ "movl %8, %%ebx\n\t"
+# endif
+#endif
+ "lock; cmpxchg" ETHR_DW_CMPXCHG_SFX__ " %0\n\t"
+ "setz %3\n\t"
+#if ETHR_NO_CLOBBER_EBX__
+ "popl %%ebx\n\t"
+#endif
+ : "=m"(*p), "=d"(xchg[1]), "=a"(xchg[0]), "=c"(xchgd)
+ : "m"(*p), "1"(xchg[1]), "2"(xchg[0]),
+#if ETHR_NO_CLOBBER_EBX__
+# if ETHR_CMPXCHG8B_REGISTER_SHORTAGE
+ "3"(new)
+# else
+ "3"(new[1]),
+ "r"(new[0])
+# endif
+#else
+ "3"(new[1]),
+ "b"(new[0])
+#endif
+ : "cc", "memory");
+
+ return (int) xchgd;
+}
+
+#undef ETHR_NO_CLOBBER_EBX__
+
+#if ETHR_SIZEOF_PTR == 4 && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT)
+
+typedef union {
+ ethr_sint64_t sint64;
+ ethr_sint_t sint[2];
+} ethr_dw_atomic_no_sse2_convert_t;
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ
+
+static ETHR_INLINE ethr_sint64_t
+ethr_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var)
+{
+ if (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__)
+ return ethr_sse2_native_su_dw_atomic_read(var);
+ else {
+ ethr_sint_t new[2];
+ ethr_dw_atomic_no_sse2_convert_t xchg;
+ new[0] = new[1] = xchg.sint[0] = xchg.sint[1] = 0x83838383;
+ (void) ethr_native_dw_atomic_cmpxchg_mb(var, new, xchg.sint);
+ return xchg.sint64;
+ }
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET
+
+static ETHR_INLINE void
+ethr_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t val)
+{
+ if (ETHR_X86_RUNTIME_CONF_HAVE_SSE2__)
+ ethr_sse2_native_su_dw_atomic_set(var, val);
+ else {
+ ethr_sint_t xchg[2] = {0, 0};
+ ethr_dw_atomic_no_sse2_convert_t new;
+ new.sint64 = val;
+ while (!ethr_native_dw_atomic_cmpxchg_mb(var, new.sint, xchg));
+ }
+}
+
+#endif /* ETHR_SIZEOF_PTR == 4 */
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#if defined(ETHR_X86_SSE2_ASM_C__) \
+ && ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_GCC_HAVE_SSE2_ASM_SUPPORT)
+
+/*
+ * 8-byte aligned loads and stores of 64-bit values are atomic from
+ * pentium and forward. An ordinary volatile load or store in 32-bit
+ * mode generates two 32-bit operations (at least with gcc-4.1.2 using
+ * -msse2). In order to guarantee one 64-bit load/store operation
+ * from/to memory we load/store via an xmm register using movq.
+ *
+ * Load/store can be achieved using cmpxchg8b, however, using movq is
+ * much faster. Unfortunately we cannot do the same thing in 64-bit
+ * mode; instead, we have to do loads and stores via cmpxchg16b.
+ *
+ * We do not inline these, but instead compile these into a separate
+ * object file using -msse2. This since we don't want to use -msse2 for
+ * the whole system. If we detect sse2 support (pentium4 and forward)
+ * at runtime, we use them; otherwise, we fall back to using cmpxchg8b
+ * for loads and stores. This way the binary can be moved between
+ * processors with and without sse2 support.
+ */
+
+ethr_sint64_t
+ethr_sse2_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ethr_sint64_t val;
+ ETHR_DW_DBG_ALIGNED__(p);
+ __asm__ __volatile__("movq %1, %0\n\t" : "=x"(val) : "m"(*p) : "memory");
+ return val;
+}
+
+void
+ethr_sse2_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t val)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ __asm__ __volatile__("movq %1, %0\n\t" : "=m"(*p) : "x"(val) : "memory");
+}
+
+#endif /* ETHR_X86_SSE2_ASM_C__ */
+
+#endif /* ETHR_GCC_HAVE_DW_CMPXCHG_ASM_SUPPORT */
+
+#endif /* ETHR_X86_DW_ATOMIC_H__ */
+
diff --git a/erts/include/internal/i386/ethr_membar.h b/erts/include/internal/i386/ethr_membar.h
new file mode 100644
index 0000000000..92d9de7f3f
--- /dev/null
+++ b/erts/include/internal/i386/ethr_membar.h
@@ -0,0 +1,114 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for x86/x86-64
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_X86_MEMBAR_H__
+#define ETHR_X86_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#define ETHR_NO_SSE2_MEMORY_BARRIER__ \
+do { \
+ volatile ethr_sint32_t x__ = 0; \
+ __asm__ __volatile__ ("lock; orl $0x0, %0\n\t" \
+ : "=m"(x__) \
+ : "m"(x__) \
+ : "memory"); \
+} while (0)
+
+static __inline__ void
+ethr_cfence__(void)
+{
+ __asm__ __volatile__ ("" : : : "memory");
+}
+
+static __inline__ void
+ethr_mfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MEMORY_BARRIER__;
+ else
+#endif
+ __asm__ __volatile__ ("mfence\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_sfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MEMORY_BARRIER__;
+ else
+#endif
+ __asm__ __volatile__ ("sfence\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_lfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MEMORY_BARRIER__;
+ else
+#endif
+ __asm__ __volatile__ ("lfence\n\t" : : : "memory");
+}
+
+#define ETHR_X86_OUT_OF_ORDER_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \
+ ethr_sfence__(), \
+ ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \
+ ethr_lfence__(), \
+ ethr_mfence__()))
+
+#ifdef ETHR_X86_OUT_OF_ORDER
+
+#define ETHR_MEMBAR(B) \
+ ETHR_X86_OUT_OF_ORDER_MEMBAR((B))
+
+#else /* !ETHR_X86_OUT_OF_ORDER (the default) */
+
+/*
+ * We assume that only stores before loads may be reordered. That is,
+ * we assume that *no* instructions like these are used:
+ * - CLFLUSH,
+ * - streaming stores executed with non-temporal move,
+ * - string operations, or
+ * - other instructions which aren't LoadLoad, LoadStore, and StoreStore
+ * ordered by themselves
+ * If such instructions are used, either insert memory barriers
+ * using ETHR_X86_OUT_OF_ORDER_MEMBAR() at appropriate places, or
+ * define ETHR_X86_OUT_OF_ORDER. For more info see Intel 64 and IA-32
+ * Architectures Software Developer's Manual; Vol 3A; Chapter 8.2.2.
+ */
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_mfence__(), ethr_cfence__())
+
+#endif /* !ETHR_X86_OUT_OF_ORDER */
+
+#endif /* ETHR_X86_MEMBAR_H__ */
diff --git a/erts/include/internal/i386/ethread.h b/erts/include/internal/i386/ethread.h
index b5a17caefb..80e4dc7b99 100644
--- a/erts/include/internal/i386/ethread.h
+++ b/erts/include/internal/i386/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,17 +24,15 @@
#ifndef ETHREAD_I386_ETHREAD_H
#define ETHREAD_I386_ETHREAD_H
+#include "ethr_membar.h"
#define ETHR_ATOMIC_WANT_32BIT_IMPL__
#include "atomic.h"
#if ETHR_SIZEOF_PTR == 8
# define ETHR_ATOMIC_WANT_64BIT_IMPL__
# include "atomic.h"
#endif
+#include "ethr_dw_atomic.h"
#include "spinlock.h"
#include "rwlock.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-
#endif /* ETHREAD_I386_ETHREAD_H */
diff --git a/erts/include/internal/i386/rwlock.h b/erts/include/internal/i386/rwlock.h
index be47f459ce..1a8cd7da0c 100644
--- a/erts/include/internal/i386/rwlock.h
+++ b/erts/include/internal/i386/rwlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -26,6 +26,9 @@
#ifndef ETHREAD_I386_RWLOCK_H
#define ETHREAD_I386_RWLOCK_H
+#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread"
+
/* XXX: describe the algorithm */
typedef struct {
volatile int lock;
diff --git a/erts/include/internal/i386/spinlock.h b/erts/include/internal/i386/spinlock.h
index 0325324895..a84fba91b1 100644
--- a/erts/include/internal/i386/spinlock.h
+++ b/erts/include/internal/i386/spinlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
#ifndef ETHREAD_I386_SPINLOCK_H
#define ETHREAD_I386_SPINLOCK_H
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+#define ETHR_NATIVE_SPINLOCK_IMPL "ethread"
+
/* A spinlock is the low byte of an aligned 32-bit integer.
* A non-zero value means that the lock is locked.
*/
@@ -46,16 +49,20 @@ ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
* On i386 this needs to be a locked operation
* to avoid Pentium Pro errata 66 and 92.
*/
-#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
- __asm__ __volatile__("" : : : "memory");
- *(unsigned char*)&lock->lock = 0;
-#else
- char tmp = 0;
- __asm__ __volatile__(
- "xchgb %b0, %1"
- : "=q"(tmp), "=m"(lock->lock)
- : "0"(tmp) : "memory");
+#if !defined(__x86_64__)
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__) {
+ char tmp = 0;
+ __asm__ __volatile__(
+ "xchgb %b0, %1"
+ : "=q"(tmp), "=m"(lock->lock)
+ : "0"(tmp) : "memory");
+ }
+ else
#endif
+ {
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ *(unsigned char*)&lock->lock = 0;
+ }
}
static ETHR_INLINE int
diff --git a/erts/include/internal/libatomic_ops/ethr_atomic.h b/erts/include/internal/libatomic_ops/ethr_atomic.h
index 93bc06036f..fb1288c330 100644
--- a/erts/include/internal/libatomic_ops/ethr_atomic.h
+++ b/erts/include/internal/libatomic_ops/ethr_atomic.h
@@ -25,16 +25,6 @@
#ifndef ETHR_LIBATOMIC_OPS_ATOMIC_H__
#define ETHR_LIBATOMIC_OPS_ATOMIC_H__
-#if !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_LIBATOMIC_OPS)
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-
-#if (defined(__i386__) && !defined(ETHR_PRE_PENTIUM4_COMPAT)) \
- || defined(__x86_64__)
-#define AO_USE_PENTIUM4_INSTRS
-#endif
-
-#include "atomic_ops.h"
-
/*
* libatomic_ops can be downloaded from:
* http://www.hpl.hp.com/research/linux/atomic_ops/
@@ -46,21 +36,18 @@
* - AO_store()
* - AO_compare_and_swap()
*
- * The `AO_t' type also have to be at least as large as the `void *' type.
*/
-#if ETHR_SIZEOF_AO_T < ETHR_SIZEOF_PTR
-#error The AO_t type is too small
-#endif
-
#if ETHR_SIZEOF_AO_T == 4
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "libatomic_ops"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic32_t
#define ETHR_AINT_T__ ethr_sint32_t
#define ETHR_AINT_SUFFIX__ "l"
#elif ETHR_SIZEOF_AO_T == 8
#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#define ETHR_NATIVE_ATOMIC64_IMPL "libatomic_ops"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic64_t
#define ETHR_AINT_T__ ethr_sint64_t
@@ -69,61 +56,29 @@
#error "Unsupported integer size"
#endif
-#if ETHR_SIZEOF_AO_T == 8
-typedef union {
- volatile AO_t counter;
- ethr_sint32_t sint32[2];
-} ETHR_ATMC_T__;
-#else
typedef struct {
volatile AO_t counter;
} ETHR_ATMC_T__;
-#endif
-#define ETHR_MEMORY_BARRIER AO_nop_full()
-#ifdef AO_HAVE_nop_write
-# define ETHR_WRITE_MEMORY_BARRIER AO_nop_write()
-#else
-# define ETHR_WRITE_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-#endif
-#ifdef AO_HAVE_nop_read
-# define ETHR_READ_MEMORY_BARRIER AO_nop_read()
-#else
-# define ETHR_READ_MEMORY_BARRIER ETHR_MEMORY_BARRIER
-#endif
-#ifdef AO_NO_DD_ORDERING
-# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_READ_MEMORY_BARRIER
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
#else
-# define ETHR_READ_DEPEND_MEMORY_BARRIER AO_compiler_barrier()
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
#endif
-#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-#if ETHR_SIZEOF_AO_T == 8
-/*
- * We also need to provide an ethr_native_atomic32_addr(), since
- * this 64-bit implementation will be used implementing 32-bit
- * native atomics.
- */
-
-static ETHR_INLINE ethr_sint32_t *
-ethr_native_atomic32_addr(ETHR_ATMC_T__ *var)
-{
- ETHR_ASSERT(((void *) &var->sint32[0]) == ((void *) &var->counter));
-#ifdef ETHR_BIGENDIAN
- return &var->sint32[1];
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
#else
- return &var->sint32[0];
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
#endif
-}
-
-#endif /* ETHR_SIZEOF_AO_T == 8 */
static ETHR_INLINE void
ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
@@ -131,197 +86,198 @@ ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
AO_store(&var->counter, (AO_t) value);
}
+#ifdef AO_HAVE_store_release
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1
+#endif
+
static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
+ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
{
- ETHR_NATMC_FUNC__(set)(var, value);
+ AO_store_release(&var->counter, (AO_t) value);
}
+#endif
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__) AO_load(&var->counter);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
-#ifdef AO_HAVE_fetch_and_add_full
- return ((ETHR_AINT_T__) AO_fetch_and_add_full(&var->counter, (AO_t) incr)) + incr;
+#ifdef AO_HAVE_load_acquire
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB 1
#else
- while (1) {
- AO_t exp = AO_load(&var->counter);
- AO_t new = exp + (AO_t) incr;
- if (AO_compare_and_swap_full(&var->counter, exp, new))
- return (ETHR_AINT_T__) new;
- }
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB 1
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
{
- (void) ETHR_NATMC_FUNC__(add_return)(var, incr);
+ return (ETHR_AINT_T__) AO_load_acquire(&var->counter);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
-#ifdef AO_HAVE_fetch_and_add1_full
- return ((ETHR_AINT_T__) AO_fetch_and_add1_full(&var->counter)) + 1;
-#else
- return ETHR_NATMC_FUNC__(add_return)(var, 1);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_NATMC_FUNC__(inc_return)(var);
-}
+#ifdef AO_HAVE_fetch_and_add
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
-{
-#ifdef AO_HAVE_fetch_and_sub1_full
- return ((ETHR_AINT_T__) AO_fetch_and_sub1_full(&var->counter)) - 1;
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1
#else
- return ETHR_NATMC_FUNC__(add_return)(var, -1);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN 1
#endif
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_NATMC_FUNC__(dec_return)(var);
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
+ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- while (1) {
- AO_t exp = AO_load(&var->counter);
- AO_t new = exp & ((AO_t) mask);
- if (AO_compare_and_swap_full(&var->counter, exp, new))
- return (ETHR_AINT_T__) exp;
- }
+ return ((ETHR_AINT_T__) AO_fetch_and_add(&var->counter, (AO_t) incr)) + incr;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- while (1) {
- AO_t exp = AO_load(&var->counter);
- AO_t new = exp | ((AO_t) mask);
- if (AO_compare_and_swap_full(&var->counter, exp, new))
- return (ETHR_AINT_T__) exp;
- }
-}
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ exp)
-{
- ETHR_AINT_T__ act;
- do {
- if (AO_compare_and_swap_full(&var->counter, (AO_t) exp, (AO_t) new))
- return exp;
- act = (ETHR_AINT_T__) AO_load(&var->counter);
- } while (act == exp);
- return act;
-}
+#ifdef AO_HAVE_fetch_and_add1
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
+ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
{
- while (1) {
- AO_t exp = AO_load(&var->counter);
- if (AO_compare_and_swap_full(&var->counter, exp, (AO_t) new))
- return (ETHR_AINT_T__) exp;
- }
+ return ((ETHR_AINT_T__) AO_fetch_and_add1(&var->counter)) + 1;
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
-#ifdef AO_HAVE_load_acquire
- return (ETHR_AINT_T__) AO_load_acquire(&var->counter);
+#ifdef AO_HAVE_fetch_and_add1_acquire
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1
#else
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var);
- ETHR_MEMORY_BARRIER;
- return res;
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_fetch_and_add1_acquire
return ((ETHR_AINT_T__) AO_fetch_and_add1_acquire(&var->counter)) + 1;
+}
+
+#endif
+
+#ifdef AO_HAVE_fetch_and_sub1
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN 1
#else
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(add_return)(var, 1);
- return res;
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN 1
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_store_release
- AO_store_release(&var->counter, (AO_t) value);
+ return ((ETHR_AINT_T__) AO_fetch_and_sub1(&var->counter)) - 1;
+}
+
+#endif
+
+#ifdef AO_HAVE_fetch_and_sub1_release
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB 1
#else
- ETHR_MEMORY_BARRIER;
- ETHR_NATMC_FUNC__(set)(var, value);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_fetch_and_sub1_release
return ((ETHR_AINT_T__) AO_fetch_and_sub1_release(&var->counter)) - 1;
+}
+
+#endif
+
+#ifdef AO_HAVE_compare_and_swap
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1
#else
- return ETHR_NATMC_FUNC__(dec_return)(var);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG 1
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ exp)
{
- (void) ETHR_NATMC_FUNC__(dec_return_relb)(var);
+ ETHR_AINT_T__ act;
+ do {
+ if (AO_compare_and_swap(&var->counter, (AO_t) exp, (AO_t) new))
+ return exp;
+ act = (ETHR_AINT_T__) AO_load(&var->counter);
+ } while (act == exp);
+ return act;
}
+#endif
+
+#ifdef AO_HAVE_compare_and_swap_acquire
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
ETHR_AINT_T__ new,
ETHR_AINT_T__ exp)
{
-#ifdef AO_HAVE_compare_and_swap_acquire
ETHR_AINT_T__ act;
do {
if (AO_compare_and_swap_acquire(&var->counter, (AO_t) exp, (AO_t) new))
return exp;
+#ifdef AO_HAVE_load_acquire
+ act = (ETHR_AINT_T__) AO_load_acquire(&var->counter);
+#else
act = (ETHR_AINT_T__) AO_load(&var->counter);
+#endif
} while (act == exp);
+#ifndef AO_HAVE_load_acquire
AO_nop_full();
+#endif
return act;
+}
+
+#endif
+
+#ifdef AO_HAVE_compare_and_swap_release
+
+#if ETHR_SIZEOF_AO_T == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB 1
#else
- ETHR_AINT_T__ act = ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp);
- return act;
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
ETHR_AINT_T__ new,
ETHR_AINT_T__ exp)
{
-#ifdef AO_HAVE_compare_and_swap_release
ETHR_AINT_T__ act;
do {
if (AO_compare_and_swap_release(&var->counter, (AO_t) exp, (AO_t) new))
@@ -329,11 +285,9 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
act = (ETHR_AINT_T__) AO_load(&var->counter);
} while (act == exp);
return act;
-#else
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp);
-#endif
}
+#endif
#endif /* ETHR_TRY_INLINE_FUNCS */
@@ -341,6 +295,4 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
#undef ETHR_ATMC_T__
#undef ETHR_AINT_T__
-#endif /* !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_LIBATOMIC_OPS) */
-
#endif /* ETHR_LIBATOMIC_OPS_ATOMIC_H__ */
diff --git a/erts/include/internal/libatomic_ops/ethr_membar.h b/erts/include/internal/libatomic_ops/ethr_membar.h
new file mode 100644
index 0000000000..b8530a0094
--- /dev/null
+++ b/erts/include/internal/libatomic_ops/ethr_membar.h
@@ -0,0 +1,75 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers when using libatomic_ops
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_LIBATOMIC_OPS_MEMBAR_H__
+#define ETHR_LIBATOMIC_OPS_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#ifndef AO_HAVE_nop_full
+# error "No AO_nop_full()"
+#endif
+
+static __inline__ void
+ethr_mb__(void)
+{
+ AO_nop_full();
+}
+
+static __inline__ void
+ethr_rb__(void)
+{
+#ifdef AO_HAVE_nop_read
+ AO_nop_read();
+#else
+ AO_nop_full();
+#endif
+}
+
+static __inline__ void
+ethr_wb__(void)
+{
+#ifdef AO_HAVE_nop_write
+ AO_nop_write();
+#else
+ AO_nop_full();
+#endif
+}
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \
+ ethr_wb__(), \
+ ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \
+ ethr_rb__(), \
+ ethr_mb__()))
+
+#define ETHR_COMPILER_BARRIER AO_compiler_barrier()
+#ifdef AO_NO_DD_ORDERING
+# define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_READ_MEMORY_BARRIER
+#endif
+
+#endif /* ETHR_LIBATOMIC_OPS_MEMBAR_H__ */
diff --git a/erts/include/internal/libatomic_ops/ethread.h b/erts/include/internal/libatomic_ops/ethread.h
index ee73ba73bc..e1fdd588bb 100644
--- a/erts/include/internal/libatomic_ops/ethread.h
+++ b/erts/include/internal/libatomic_ops/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,6 +25,18 @@
#ifndef ETHREAD_LIBATOMIC_OPS_H__
#define ETHREAD_LIBATOMIC_OPS_H__
+#if (defined(ETHR_HAVE_LIBATOMIC_OPS) \
+ && ((ETHR_SIZEOF_AO_T == 4 && !defined(ETHR_HAVE_NATIVE_ATOMIC32)) \
+ || (ETHR_SIZEOF_AO_T == 8 && !defined(ETHR_HAVE_NATIVE_ATOMIC64))))
+
+#if defined(__x86_64__)
+#define AO_USE_PENTIUM4_INSTRS
+#endif
+
+#include "atomic_ops.h"
+#include "ethr_membar.h"
#include "ethr_atomic.h"
#endif
+
+#endif
diff --git a/erts/include/internal/ppc32/atomic.h b/erts/include/internal/ppc32/atomic.h
index 522f433649..6001620677 100644
--- a/erts/include/internal/ppc32/atomic.h
+++ b/erts/include/internal/ppc32/atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -29,27 +29,31 @@
#define ETHREAD_PPC_ATOMIC_H
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "ethread"
typedef struct {
volatile ethr_sint32_t counter;
} ethr_native_atomic32_t;
-#define ETHR_MEMORY_BARRIER __asm__ __volatile__("sync" : : : "memory")
-
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+
static ETHR_INLINE ethr_sint32_t *
ethr_native_atomic32_addr(ethr_native_atomic32_t *var)
{
return (ethr_sint32_t *) &var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
+
static ETHR_INLINE void
-ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i)
+ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i)
{
var->counter = i;
}
-#define ethr_native_atomic32_set(v, i) ethr_native_atomic32_init((v), (i))
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_read(ethr_native_atomic32_t *var)
@@ -57,57 +61,68 @@ ethr_native_atomic32_read(ethr_native_atomic32_t *var)
return var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"add %0,%2,%0\n\t"
"stwcx. %0,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter), "r"(incr)
: "cc", "memory");
return tmp;
}
-static ETHR_INLINE void
-ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_add_return_acqb(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
- /* XXX: could use weaker version here w/o eieio+isync */
- (void)ethr_native_atomic32_add_return(var, incr);
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_add_return(var, incr);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var)
{
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"addic %0,%0,1\n\t" /* due to addi's (rA|0) behaviour */
"stwcx. %0,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter)
: "cc", "memory");
return tmp;
}
-static ETHR_INLINE void
-ethr_native_atomic32_inc(ethr_native_atomic32_t *var)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var)
{
- /* XXX: could use weaker version here w/o eieio+isync */
- (void)ethr_native_atomic32_inc_return(var);
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_inc_return(var);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
}
+
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
@@ -115,82 +130,120 @@ ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"addic %0,%0,-1\n\t"
"stwcx. %0,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter)
: "cc", "memory");
return tmp;
}
-static ETHR_INLINE void
-ethr_native_atomic32_dec(ethr_native_atomic32_t *var)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_dec_return_acqb(ethr_native_atomic32_t *var)
{
- /* XXX: could use weaker version here w/o eieio+isync */
- (void)ethr_native_atomic32_dec_return(var);
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_dec_return(var);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
ethr_sint32_t old, new;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%2\n\t"
"and %1,%0,%3\n\t"
"stwcx. %1,0,%2\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(old), "=&r"(new)
: "r"(&var->counter), "r"(mask)
: "cc", "memory");
return old;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_and_retold_acqb(ethr_native_atomic32_t *var, ethr_sint32_t mask)
+{
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_and_retold(var, mask);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
ethr_sint32_t old, new;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%2\n\t"
"or %1,%0,%3\n\t"
"stwcx. %1,0,%2\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(old), "=&r"(new)
: "r"(&var->counter), "r"(mask)
: "cc", "memory");
return old;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_or_retold_acqb(ethr_native_atomic32_t *var, ethr_sint32_t mask)
+{
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_or_retold(var, mask);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
+}
+
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val)
{
ethr_sint32_t tmp;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"stwcx. %2,0,%1\n\t"
"bne- 1b\n\t"
- "isync"
: "=&r"(tmp)
: "r"(&var->counter), "r"(val)
: "cc", "memory");
return tmp;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_xchg_acqb(ethr_native_atomic32_t *var, ethr_sint32_t val)
+{
+ ethr_sint32_t res;
+ res = ethr_native_atomic32_xchg(var, val);
+ __asm__ __volatile("isync\n\t" : : : "memory");
+ return res;
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
ethr_sint32_t new,
@@ -199,14 +252,12 @@ ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
ethr_sint32_t old;
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%2\n\t"
"cmpw 0,%0,%3\n\t"
"bne 2f\n\t"
"stwcx. %1,0,%2\n\t"
"bne- 1b\n\t"
- "isync\n"
"2:"
: "=&r"(old)
: "r"(new), "r"(&var->counter), "r"(expected)
@@ -215,25 +266,30 @@ ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
return old;
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
-static ETHR_INLINE long
-ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var,
+ ethr_sint32_t new,
+ ethr_sint32_t expected)
{
- long res = ethr_native_atomic32_read(var);
- ETHR_MEMORY_BARRIER;
- return res;
-}
+ ethr_sint32_t old;
-#define ethr_native_atomic32_set_relb ethr_native_atomic32_xchg
-#define ethr_native_atomic32_inc_return_acqb ethr_native_atomic32_inc_return
-#define ethr_native_atomic32_dec_relb ethr_native_atomic32_dec_return
-#define ethr_native_atomic32_dec_return_relb ethr_native_atomic32_dec_return
+ __asm__ __volatile__(
+ "1:\t"
+ "lwarx %0,0,%2\n\t"
+ "cmpw 0,%0,%3\n\t"
+ "bne 2f\n\t"
+ "stwcx. %1,0,%2\n\t"
+ "bne- 1b\n\t"
+ "isync\n"
+ "2:"
+ : "=&r"(old)
+ : "r"(new), "r"(&var->counter), "r"(expected)
+ : "cc", "memory");
-#define ethr_native_atomic32_cmpxchg_acqb ethr_native_atomic32_cmpxchg
-#define ethr_native_atomic32_cmpxchg_relb ethr_native_atomic32_cmpxchg
+ return old;
+}
#endif /* ETHR_TRY_INLINE_FUNCS */
diff --git a/erts/include/internal/ppc32/ethr_membar.h b/erts/include/internal/ppc32/ethr_membar.h
new file mode 100644
index 0000000000..ff5cc86bfb
--- /dev/null
+++ b/erts/include/internal/ppc32/ethr_membar.h
@@ -0,0 +1,63 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for PowerPC
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_PPC_MEMBAR_H__
+#define ETHR_PPC_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+static __inline__ void
+ethr_lwsync__(void)
+{
+#ifdef ETHR_PPC_HAVE_NO_LWSYNC
+ __asm__ __volatile__ ("sync\n\t" : : : "memory");
+#else
+#ifndef ETHR_PPC_HAVE_LWSYNC
+ if (ETHR_PPC_RUNTIME_CONF_HAVE_NO_LWSYNC__)
+ __asm__ __volatile__ ("sync\n\t" : : : "memory");
+ else
+#endif
+ __asm__ __volatile__ ("lwsync\n\t" : : : "memory");
+#endif
+}
+
+static __inline__ void
+ethr_sync__(void)
+{
+ __asm__ __volatile__ ("sync\n\t" : : : "memory");
+}
+
+/*
+ * According to the "memory barrier intstructions" section of
+ * http://www.ibm.com/developerworks/systems/articles/powerpc.html
+ * we want to use sync when a StoreLoad is needed and lwsync for
+ * everything else.
+ */
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_sync__(), ethr_lwsync__())
+
+#endif
diff --git a/erts/include/internal/ppc32/ethread.h b/erts/include/internal/ppc32/ethread.h
index 3b619e9d01..e41c83c5da 100644
--- a/erts/include/internal/ppc32/ethread.h
+++ b/erts/include/internal/ppc32/ethread.h
@@ -24,12 +24,9 @@
#ifndef ETHREAD_PPC32_ETHREAD_H
#define ETHREAD_PPC32_ETHREAD_H
+#include "ethr_membar.h"
#include "atomic.h"
#include "spinlock.h"
#include "rwlock.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-
#endif /* ETHREAD_PPC32_ETHREAD_H */
diff --git a/erts/include/internal/ppc32/rwlock.h b/erts/include/internal/ppc32/rwlock.h
index 19ec26ab68..311f000b69 100644
--- a/erts/include/internal/ppc32/rwlock.h
+++ b/erts/include/internal/ppc32/rwlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -23,12 +23,14 @@
*
* Based on the examples in Appendix E of Motorola's
* "Programming Environments Manual For 32-Bit Implementations
- * of the PowerPC Architecture". Uses eieio instead of sync
- * in the unlock sequence, as suggested in the manual.
+ * of the PowerPC Architecture".
*/
#ifndef ETHREAD_PPC_RWLOCK_H
#define ETHREAD_PPC_RWLOCK_H
+#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread"
+
/* Unlocked if zero, read-locked if negative, write-locked if +1. */
typedef struct {
volatile int lock;
@@ -47,9 +49,10 @@ ethr_native_read_unlock(ethr_native_rwlock_t *lock)
{
int tmp;
- /* this is eieio + ethr_native_atomic_inc() - isync */
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+
+ /* this is ethr_native_atomic_inc() - isync */
__asm__ __volatile__(
- "eieio\n\t"
"1:\t"
"lwarx %0,0,%1\n\t"
"addic %0,%0,1\n\t"
@@ -105,7 +108,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
static ETHR_INLINE void
ethr_native_write_unlock(ethr_native_rwlock_t *lock)
{
- __asm__ __volatile__("eieio" : : : "memory");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
diff --git a/erts/include/internal/ppc32/spinlock.h b/erts/include/internal/ppc32/spinlock.h
index c8460a3e8a..4c95ec9efb 100644
--- a/erts/include/internal/ppc32/spinlock.h
+++ b/erts/include/internal/ppc32/spinlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -23,12 +23,14 @@
*
* Based on the examples in Appendix E of Motorola's
* "Programming Environments Manual For 32-Bit Implementations
- * of the PowerPC Architecture". Uses eieio instead of sync
- * in the unlock sequence, as suggested in the manual.
+ * of the PowerPC Architecture".
*/
#ifndef ETHREAD_PPC_SPINLOCK_H
#define ETHREAD_PPC_SPINLOCK_H
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+#define ETHR_NATIVE_SPINLOCK_IMPL "ethread"
+
/* Unlocked if zero, locked if non-zero. */
typedef struct {
volatile unsigned int lock;
@@ -45,7 +47,7 @@ ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
static ETHR_INLINE void
ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- __asm__ __volatile__("eieio" : : : "memory");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
diff --git a/erts/include/internal/pthread/ethr_event.h b/erts/include/internal/pthread/ethr_event.h
index 4c29b28536..d0a77990cc 100644
--- a/erts/include/internal/pthread/ethr_event.h
+++ b/erts/include/internal/pthread/ethr_event.h
@@ -21,7 +21,7 @@
* Author: Rickard Green
*/
-#if defined(ETHR_HAVE_LINUX_FUTEX) && defined(ETHR_HAVE_NATIVE_ATOMICS)
+#if defined(ETHR_HAVE_LINUX_FUTEX) && defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
/* --- Linux futex implementation of ethread events ------------------------- */
#define ETHR_LINUX_FUTEX_IMPL__
@@ -62,8 +62,7 @@ static void ETHR_INLINE
ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
{
ethr_sint32_t val;
- ETHR_MEMORY_BARRIER;
- val = ethr_atomic32_xchg(&e->futex, ETHR_EVENT_ON__);
+ val = ethr_atomic32_xchg_mb(&e->futex, ETHR_EVENT_ON__);
if (val == ETHR_EVENT_OFF_WAITER__) {
int res = ETHR_FUTEX__(&e->futex, ETHR_FUTEX_WAKE__, 1);
if (res != 0)
@@ -99,8 +98,7 @@ static void ETHR_INLINE
ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
{
ethr_sint32_t val;
- ETHR_MEMORY_BARRIER;
- val = ethr_atomic32_xchg(&e->state, ETHR_EVENT_ON__);
+ val = ethr_atomic32_xchg_mb(&e->state, ETHR_EVENT_ON__);
if (val == ETHR_EVENT_OFF_WAITER__) {
int res = pthread_mutex_lock(&e->mtx);
if (res != 0)
diff --git a/erts/include/internal/sparc32/atomic.h b/erts/include/internal/sparc32/atomic.h
index c297522ab1..fe1daaa9cf 100644
--- a/erts/include/internal/sparc32/atomic.h
+++ b/erts/include/internal/sparc32/atomic.h
@@ -35,23 +35,16 @@
#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
-#ifndef ETHR_SPARC_V9_ATOMIC_COMMON__
-#define ETHR_SPARC_V9_ATOMIC_COMMON__
-
-#define ETHR_MEMORY_BARRIER \
- __asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreLoad|#StoreStore\n" \
- : : : "memory")
-
-#endif /* ETHR_SPARC_V9_ATOMIC_COMMON__ */
-
#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "ethread"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic32_t
#define ETHR_AINT_T__ ethr_sint32_t
#define ETHR_CAS__ "cas"
#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#define ETHR_NATIVE_ATOMIC64_IMPL "ethread"
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
#define ETHR_ATMC_T__ ethr_native_atomic64_t
#define ETHR_AINT_T__ ethr_sint64_t
@@ -66,17 +59,23 @@ typedef struct {
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
{
return (ETHR_AINT_T__ *) &var->counter;
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- var->counter = i;
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
+#endif
static ETHR_INLINE void
ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
@@ -84,182 +83,49 @@ ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
var->counter = i;
}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
return var->counter;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- ETHR_AINT_T__ old, tmp;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
- do {
- old = var->counter;
- tmp = old+incr;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(tmp)
- : "r"(old), "r"(&var->counter), "0"(tmp)
- : "memory");
- } while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old+incr;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- (void)ETHR_NATMC_FUNC__(add_return)(var, incr);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, 1);
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void)ETHR_NATMC_FUNC__(add_return)(var, 1);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(add_return)(var, -1);
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void)ETHR_NATMC_FUNC__(add_return)(var, -1);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ old, tmp;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
- do {
- old = var->counter;
- tmp = old & mask;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(tmp)
- : "r"(old), "r"(&var->counter), "0"(tmp)
- : "memory");
- } while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
-{
- ETHR_AINT_T__ old, tmp;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
- do {
- old = var->counter;
- tmp = old | mask;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(tmp)
- : "r"(old), "r"(&var->counter), "0"(tmp)
- : "memory");
- } while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old;
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
-{
- ETHR_AINT_T__ old, new;
-
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad" : : : "memory");
- do {
- old = var->counter;
- new = val;
- __asm__ __volatile__(
- ETHR_CAS__ " [%2], %1, %0"
- : "=&r"(new)
- : "r"(old), "r"(&var->counter), "0"(new)
- : "memory");
- } while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
- return old;
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
__asm__ __volatile__(
ETHR_CAS__ " [%2], %1, %0"
: "=&r"(new)
: "r"(old), "r"(&var->counter), "0"(new)
: "memory");
- __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return new;
}
-/*
- * Atomic ops with at least specified barriers.
- */
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
- return res;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
- ETHR_NATMC_FUNC__(set)(var, i);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
-{
- ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
- return res;
-}
-
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
-{
- ETHR_NATMC_FUNC__(dec)(var);
-}
-
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_NATMC_FUNC__(dec_return)(var);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return res;
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
-{
- return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
-}
-
#endif /* ETHR_TRY_INLINE_FUNCS */
#undef ETHR_NATMC_FUNC__
diff --git a/erts/include/internal/sparc32/ethr_membar.h b/erts/include/internal/sparc32/ethr_membar.h
new file mode 100644
index 0000000000..6eb0c5a1d6
--- /dev/null
+++ b/erts/include/internal/sparc32/ethr_membar.h
@@ -0,0 +1,115 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for sparc-v9
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_SPARC_V9_MEMBAR_H__
+#define ETHR_SPARC_V9_MEMBAR_H__
+
+#if defined(ETHR_SPARC_TSO)
+/* --- Total Store Order ------------------------------------------------ */
+
+#define ETHR_LoadLoad 0
+#define ETHR_LoadStore 0
+#define ETHR_StoreLoad 1
+#define ETHR_StoreStore 0
+
+static __inline__ void
+ethr_cb__(void)
+{
+ __asm__ __volatile__ ("" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreLoad__(void)
+{
+ __asm__ __volatile__ ("membar #StoreLoad\n\t" : : : "memory");
+}
+
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B), ethr_StoreLoad__(), ethr_cb__())
+
+#elif defined(ETHR_SPARC_PSO)
+/* --- Partial Store Order ---------------------------------------------- */
+
+#define ETHR_LoadLoad 0
+#define ETHR_LoadStore 0
+#define ETHR_StoreLoad (1 << 0)
+#define ETHR_StoreStore (1 << 1)
+
+static __inline__ void
+ethr_cb__(void)
+{
+ __asm__ __volatile__ ("" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreLoad__(void)
+{
+ __asm__ __volatile__ ("membar #StoreLoad\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreStore__(void)
+{
+ __asm__ __volatile__ ("membar #StoreStore\n\t" : : : "memory");
+}
+
+static __inline__ void
+ethr_StoreLoad_StoreStore__(void)
+{
+ __asm__ __volatile__ ("membar #StoreLoad|StoreStore\n\t" : : : "memory");
+}
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR( \
+ (B) == ETHR_StoreLoad, \
+ ethr_StoreLoad__(), \
+ ETHR_CHOOSE_EXPR( \
+ (B) == ETHR_StoreStore, \
+ ethr_StoreStore__(), \
+ ETHR_CHOOSE_EXPR( \
+ (B) == (ETHR_StoreLoad|ETHR_StoreStore), \
+ ethr_StoreLoad_StoreStore__(), \
+ ethr_cb__())))
+
+#elif defined(ETHR_SPARC_RMO)
+/* --- Relaxed Memory Order --------------------------------------------- */
+
+# define ETHR_LoadLoad #LoadLoad
+# define ETHR_LoadStore #LoadStore
+# define ETHR_StoreLoad #StoreLoad
+# define ETHR_StoreStore #StoreStore
+
+# define ETHR_MEMBAR_AUX__(B) \
+ __asm__ __volatile__("membar " #B "\n\t" : : : "memory")
+
+# define ETHR_MEMBAR(B) ETHR_MEMBAR_AUX__(B)
+
+#else
+
+# error "No memory order defined"
+
+#endif
+
+#endif
diff --git a/erts/include/internal/sparc32/ethread.h b/erts/include/internal/sparc32/ethread.h
index aea9794390..5ad92d3da7 100644
--- a/erts/include/internal/sparc32/ethread.h
+++ b/erts/include/internal/sparc32/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,7 @@
#ifndef ETHREAD_SPARC32_ETHREAD_H
#define ETHREAD_SPARC32_ETHREAD_H
+#include "ethr_membar.h"
#define ETHR_ATOMIC_WANT_32BIT_IMPL__
#include "atomic.h"
#if ETHR_SIZEOF_PTR == 8
@@ -33,8 +34,4 @@
#include "spinlock.h"
#include "rwlock.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-#define ETHR_HAVE_NATIVE_SPINLOCKS 1
-#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
-
#endif /* ETHREAD_SPARC32_ETHREAD_H */
diff --git a/erts/include/internal/sparc32/rwlock.h b/erts/include/internal/sparc32/rwlock.h
index 465ec96866..8b6f2e9c57 100644
--- a/erts/include/internal/sparc32/rwlock.h
+++ b/erts/include/internal/sparc32/rwlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
#ifndef ETHREAD_SPARC32_RWLOCK_H
#define ETHREAD_SPARC32_RWLOCK_H
+#define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
+#define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread"
+
/* Unlocked if zero, read-locked if positive, write-locked if -1. */
typedef struct {
volatile int lock;
@@ -42,7 +45,7 @@ ethr_native_read_unlock(ethr_native_rwlock_t *lock)
{
unsigned int old, new;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
do {
old = lock->lock;
new = old-1;
@@ -70,7 +73,7 @@ ethr_native_read_trylock(ethr_native_rwlock_t *lock)
: "r"(old), "r"(&lock->lock), "0"(new)
: "memory");
} while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return 1;
}
@@ -87,7 +90,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
if (__builtin_expect(ethr_native_read_trylock(lock) != 0, 1))
break;
do {
- __asm__ __volatile__("membar #LoadLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad);
} while (ethr_native_read_is_locked(lock));
}
}
@@ -95,7 +98,7 @@ ethr_native_read_lock(ethr_native_rwlock_t *lock)
static ETHR_INLINE void
ethr_native_write_unlock(ethr_native_rwlock_t *lock)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
@@ -115,7 +118,7 @@ ethr_native_write_trylock(ethr_native_rwlock_t *lock)
: "r"(old), "r"(&lock->lock), "0"(new)
: "memory");
} while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return 1;
}
@@ -132,7 +135,7 @@ ethr_native_write_lock(ethr_native_rwlock_t *lock)
if (__builtin_expect(ethr_native_write_trylock(lock) != 0, 1))
break;
do {
- __asm__ __volatile__("membar #LoadLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad);
} while (ethr_native_write_is_locked(lock));
}
}
diff --git a/erts/include/internal/sparc32/spinlock.h b/erts/include/internal/sparc32/spinlock.h
index 493d514210..d4e36e09cf 100644
--- a/erts/include/internal/sparc32/spinlock.h
+++ b/erts/include/internal/sparc32/spinlock.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
#ifndef ETHR_SPARC32_SPINLOCK_H
#define ETHR_SPARC32_SPINLOCK_H
+#define ETHR_HAVE_NATIVE_SPINLOCKS 1
+#define ETHR_NATIVE_SPINLOCK_IMPL "ethread"
+
/* Locked with ldstub, so unlocked when 0 and locked when non-zero. */
typedef struct {
volatile unsigned char lock;
@@ -40,7 +43,7 @@ ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
static ETHR_INLINE void
ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore");
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
@@ -51,10 +54,10 @@ ethr_native_spin_trylock(ethr_native_spinlock_t *lock)
__asm__ __volatile__(
"ldstub [%1], %0\n\t"
- "membar #StoreLoad|#StoreStore"
: "=r"(prev)
: "r"(&lock->lock)
: "memory");
+ ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
return prev == 0;
}
@@ -71,7 +74,7 @@ ethr_native_spin_lock(ethr_native_spinlock_t *lock)
if (__builtin_expect(ethr_native_spin_trylock(lock) != 0, 1))
break;
do {
- __asm__ __volatile__("membar #LoadLoad");
+ ETHR_MEMBAR(ETHR_LoadLoad);
} while (ethr_native_spin_is_locked(lock));
}
}
diff --git a/erts/include/internal/tile/atomic.h b/erts/include/internal/tile/atomic.h
index 5697afda25..1f1553c346 100644
--- a/erts/include/internal/tile/atomic.h
+++ b/erts/include/internal/tile/atomic.h
@@ -25,6 +25,7 @@
#define ETHREAD_TILE_ATOMIC_H
#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATIVE_ATOMIC32_IMPL "tilera"
#include <atomic.h>
@@ -34,27 +35,25 @@ typedef struct {
volatile ethr_sint32_t counter;
} ethr_native_atomic32_t;
-#define ETHR_MEMORY_BARRIER __insn_mf()
-
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
+
static ETHR_INLINE ethr_sint32_t *
ethr_native_atomic32_addr(ethr_native_atomic32_t *var)
{
return (ethr_sint32_t *) &var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT 1
+
static ETHR_INLINE void
ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i)
{
var->counter = i;
}
-static ETHR_INLINE void
-ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i)
-{
- atomic_exchange_acq(&var->counter, i);
-}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_read(ethr_native_atomic32_t *var)
@@ -62,139 +61,80 @@ ethr_native_atomic32_read(ethr_native_atomic32_t *var)
return var->counter;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB 1
+
+static ETHR_INLINE ethr_sint32_t
+ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
+{
+ return atomic_compare_and_exchange_val_acq(&var->counter,
+ 0x81818181,
+ 0x81818181);
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD 1
+
static ETHR_INLINE void
ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
- ETHR_MEMORY_BARRIER;
atomic_add(&var->counter, incr);
- ETHR_MEMORY_BARRIER;
-}
-
+}
+
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC 1
+
static ETHR_INLINE void
ethr_native_atomic32_inc(ethr_native_atomic32_t *var)
{
- ETHR_MEMORY_BARRIER;
atomic_increment(&var->counter);
- ETHR_MEMORY_BARRIER;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC 1
+
static ETHR_INLINE void
ethr_native_atomic32_dec(ethr_native_atomic32_t *var)
{
- ETHR_MEMORY_BARRIER;
atomic_decrement(&var->counter);
- ETHR_MEMORY_BARRIER;
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
- ethr_sint32_t res;
- ETHR_MEMORY_BARRIER;
- res = atomic_exchange_and_add(&var->counter, incr) + incr;
- ETHR_MEMORY_BARRIER;
- return res;
+ return atomic_exchange_and_add(&var->counter, incr) + incr;
}
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_add_return(var, 1);
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_add_return(var, -1);
-}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
- ethr_sint32_t res;
- ETHR_MEMORY_BARRIER;
- res = atomic_and_val(&var->counter, mask);
- ETHR_MEMORY_BARRIER;
- return res;
+ return atomic_and_val(&var->counter, mask);
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD 1
+
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
- ethr_sint32_t res;
- ETHR_MEMORY_BARRIER;
- res = atomic_or_val(&var->counter, mask);
- ETHR_MEMORY_BARRIER;
- return res;
+ return atomic_or_val(&var->counter, mask);
}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB 1
+
static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val)
+ethr_native_atomic32_xchg_acqb(ethr_native_atomic32_t *var, ethr_sint32_t val)
{
- ETHR_MEMORY_BARRIER;
return atomic_exchange_acq(&var->counter, val);
}
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t expected)
-{
- ETHR_MEMORY_BARRIER;
- return atomic_compare_and_exchange_val_acq(&var->counter, new, expected);
-}
-
-/*
- * Atomic ops with at least specified barriers.
- */
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
-{
- ethr_sint32_t res = ethr_native_atomic32_read(var);
- ETHR_MEMORY_BARRIER;
- return res;
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_inc_return(var);
-}
-
-static ETHR_INLINE void
-ethr_native_atomic32_set_relb(ethr_native_atomic32_t *var, ethr_sint32_t val)
-{
- ETHR_MEMORY_BARRIER;
- ethr_native_atomic32_set(var, val);
-}
-
-static ETHR_INLINE void
-ethr_native_atomic32_dec_relb(ethr_native_atomic32_t *var)
-{
- ethr_native_atomic32_dec(var);
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_dec_return_relb(ethr_native_atomic32_t *var)
-{
- return ethr_native_atomic32_dec_return(var);
-}
+#define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var,
ethr_sint32_t new,
- ethr_sint32_t exp)
+ ethr_sint32_t expected)
{
- return ethr_native_atomic32_cmpxchg(var, new, exp);
-}
-
-static ETHR_INLINE ethr_sint32_t
-ethr_native_atomic32_cmpxchg_relb(ethr_native_atomic32_t *var,
- ethr_sint32_t new,
- ethr_sint32_t exp)
-{
- return ethr_native_atomic32_cmpxchg(var, new, exp);
+ return atomic_compare_and_exchange_val_acq(&var->counter, new, expected);
}
#endif /* ETHR_TRY_INLINE_FUNCS */
diff --git a/erts/include/internal/tile/ethr_membar.h b/erts/include/internal/tile/ethr_membar.h
new file mode 100644
index 0000000000..7cb4f3cf9a
--- /dev/null
+++ b/erts/include/internal/tile/ethr_membar.h
@@ -0,0 +1,35 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for TILE64/TILEPro
+ * Author: Rickard Green
+ */
+
+#ifndef ETHR_TILE_MEMBAR_H__
+#define ETHR_TILE_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#define ETHR_MEMBAR(B) __insn_mf()
+
+#endif
diff --git a/erts/include/internal/tile/ethread.h b/erts/include/internal/tile/ethread.h
index 2de4d42bc6..7f579b50e7 100644
--- a/erts/include/internal/tile/ethread.h
+++ b/erts/include/internal/tile/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2009. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -23,8 +23,7 @@
#ifndef ETHREAD_TILE_ETHREAD_H
#define ETHREAD_TILE_ETHREAD_H
+#include "ethr_membar.h"
#include "atomic.h"
-#define ETHR_HAVE_NATIVE_ATOMICS 1
-
#endif /* ETHREAD_TILE_ETHREAD_H */
diff --git a/erts/include/internal/win/ethr_atomic.h b/erts/include/internal/win/ethr_atomic.h
index 60def01a7e..e11f1abf47 100644
--- a/erts/include/internal/win/ethr_atomic.h
+++ b/erts/include/internal/win/ethr_atomic.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,364 +24,408 @@
#undef ETHR_INCLUDE_ATOMIC_IMPL__
#if !defined(ETHR_WIN_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
-#define ETHR_WIN_ATOMIC32_H__
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
-#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
+# define ETHR_WIN_ATOMIC32_H__
+# if (defined(ETHR_MEMBAR) \
+ && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE) \
+ && defined(ETHR_HAVE__INTERLOCKEDEXCHANGE))
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+# endif
+# undef ETHR_ATOMIC_WANT_32BIT_IMPL__
#elif !defined(ETHR_WIN_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
-#define ETHR_WIN_ATOMIC64_H__
-#ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64
-/* _InterlockedCompareExchange64() required... */
-#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+# define ETHR_WIN_ATOMIC64_H__
+# if (defined(ETHR_MEMBAR) \
+ && (defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64) \
+ || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ) \
+ || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL)))
+# define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+# endif
+# undef ETHR_ATOMIC_WANT_64BIT_IMPL__
#endif
-#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
+
+#if !defined(_MSC_VER) || _MSC_VER < 1400
+# undef ETHR_INCLUDE_ATOMIC_IMPL__
#endif
#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
-#if defined(_MSC_VER) && _MSC_VER >= 1400
+# ifndef ETHR_WIN_ATOMIC_COMMON__
+# define ETHR_WIN_ATOMIC_COMMON__
+
+# if defined(_M_IX86) || defined(_M_AMD64) || defined(_M_IA64)
+# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 1
+# else
+# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 0
+# endif
+
+# endif /* ETHR_WIN_ATOMIC_COMMON__ */
+
+# if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+
+# define ETHR_HAVE_NATIVE_ATOMIC32 1
+# define ETHR_NATIVE_ATOMIC32_IMPL "windows-interlocked"
+
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT
+# define ETHR_WIN_HAVE_DEC
+# pragma intrinsic(_InterlockedDecrement)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT_REL
+# define ETHR_WIN_HAVE_DEC_REL
+# pragma intrinsic(_InterlockedDecrement_rel)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT
+# define ETHR_WIN_HAVE_INC
+# pragma intrinsic(_InterlockedIncrement)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT_ACQ
+# define ETHR_WIN_HAVE_INC_ACQ
+# pragma intrinsic(_InterlockedIncrement_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD
+# define ETHR_WIN_HAVE_XCHG_ADD
+# pragma intrinsic(_InterlockedExchangeAdd)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD_ACQ
+# define ETHR_WIN_HAVE_XCHG_ADD_ACQ
+# pragma intrinsic(_InterlockedExchangeAdd_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE
+# define ETHR_WIN_HAVE_XCHG
+# pragma intrinsic(_InterlockedExchange)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDAND
+# define ETHR_WIN_HAVE_AND
+# pragma intrinsic(_InterlockedAnd)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDOR
+# define ETHR_WIN_HAVE_OR
+# pragma intrinsic(_InterlockedOr)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE
+# define ETHR_WIN_HAVE_CMPXCHG
+# pragma intrinsic(_InterlockedCompareExchange)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_ACQ
+# define ETHR_WIN_HAVE_CMPXCHG_ACQ
+# pragma intrinsic(_InterlockedCompareExchange_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE_REL
+# define ETHR_WIN_HAVE_CMPXCHG_REL
+# pragma intrinsic(_InterlockedCompareExchange_rel)
+# endif
+
+# define ETHR_ILCKD__(X) _Interlocked ## X
+# define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## _acq
+# define ETHR_ILCKD_REL__(X) _Interlocked ## X ## _rel
+
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic32_t
+# define ETHR_AINT_T__ ethr_sint32_t
+
+# elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
+
+# define ETHR_HAVE_NATIVE_ATOMIC64 1
+# define ETHR_NATIVE_ATOMIC64_IMPL "windows-interlocked"
+
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64
+# define ETHR_WIN_HAVE_DEC
+# pragma intrinsic(_InterlockedDecrement64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64_REL
+# define ETHR_WIN_HAVE_DEC_REL
+# pragma intrinsic(_InterlockedDecrement64_rel)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64_ACQ
+# define ETHR_WIN_HAVE_INC_ACQ
+# pragma intrinsic(_InterlockedIncrement64_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64
+# define ETHR_WIN_HAVE_INC
+# pragma intrinsic(_InterlockedIncrement64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
+# define ETHR_WIN_HAVE_XCHG_ADD
+# pragma intrinsic(_InterlockedExchangeAdd64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64_ACQ
+# define ETHR_WIN_HAVE_XCHG_ADD_ACQ
+# pragma intrinsic(_InterlockedExchangeAdd64_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE64
+# define ETHR_WIN_HAVE_XCHG
+# pragma intrinsic(_InterlockedExchange64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDAND64
+# define ETHR_WIN_HAVE_AND
+# pragma intrinsic(_InterlockedAnd64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDOR64
+# define ETHR_WIN_HAVE_OR
+# pragma intrinsic(_InterlockedOr64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64
+# define ETHR_WIN_HAVE_CMPXCHG
+# pragma intrinsic(_InterlockedCompareExchange64)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_ACQ
+# define ETHR_WIN_HAVE_CMPXCHG_ACQ
+# pragma intrinsic(_InterlockedCompareExchange64_acq)
+# endif
+# ifdef ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64_REL
+# define ETHR_WIN_HAVE_CMPXCHG_REL
+# pragma intrinsic(_InterlockedCompareExchange64_rel)
+# endif
+
+# define ETHR_ILCKD__(X) _Interlocked ## X ## 64
+# define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64_acq
+# define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64_rel
+
+# define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+# define ETHR_ATMC_T__ ethr_native_atomic64_t
+# define ETHR_AINT_T__ ethr_sint64_t
+
+# else
+# error "Unsupported integer size"
+# endif
-#ifndef ETHR_WIN_ATOMIC_COMMON__
-#define ETHR_WIN_ATOMIC_COMMON__
+typedef struct {
+ volatile ETHR_AINT_T__ value;
+} ETHR_ATMC_T__;
-#define ETHR_HAVE_NATIVE_ATOMICS 1
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-#if defined(_M_IX86) || defined(_M_AMD64) || defined(_M_IA64)
-# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 1
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
#else
-# define ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ 0
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
#endif
-#if defined(_M_AMD64) || (defined(_M_IX86) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 1
-#else
-# define ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__ 0
-#endif
-/*
- * No configure test checking for interlocked acquire/release
- * versions have been written, yet. It should define
- * ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS if, and
- * only if, all used interlocked operations with barriers
- * exists.
- *
- * Note, that these are pure optimizations for the itanium
- * processor.
- */
+static ETHR_INLINE ETHR_AINT_T__ *
+ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
+{
+ return (ETHR_AINT_T__ *) &var->value;
+}
-#include <intrin.h>
-#undef ETHR_COMPILER_BARRIER
-#define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
-#pragma intrinsic(_ReadWriteBarrier)
-#pragma intrinsic(_InterlockedCompareExchange)
-
-#if defined(_M_AMD64) || (defined(_M_IX86) \
- && !defined(ETHR_PRE_PENTIUM4_COMPAT))
-#include <emmintrin.h>
-#include <mmintrin.h>
-#pragma intrinsic(_mm_mfence)
-#define ETHR_MEMORY_BARRIER _mm_mfence()
-#pragma intrinsic(_mm_sfence)
-#define ETHR_WRITE_MEMORY_BARRIER _mm_sfence()
-#pragma intrinsic(_mm_lfence)
-#define ETHR_READ_MEMORY_BARRIER _mm_lfence()
-#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_COMPILER_BARRIER
+#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
#else
-
-#define ETHR_MEMORY_BARRIER \
-do { \
- volatile long x___ = 0; \
- _InterlockedCompareExchange(&x___, (long) 1, (long) 0); \
-} while (0)
-
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
#endif
-#endif /* ETHR_WIN_ATOMIC_COMMON__ */
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ var->value = i;
+}
#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1
+#endif
-#define ETHR_HAVE_NATIVE_ATOMIC32 1
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+#if defined(_M_IX86)
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ (void) ETHR_ILCKD__(Exchange)(&var->value, i);
+ else
+#endif /* _M_IX86 */
+ {
+ ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
+ var->value = i;
+ }
+}
-/*
- * All used operations available as 32-bit intrinsics
- */
+#endif /* ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ */
-#pragma intrinsic(_InterlockedDecrement)
-#pragma intrinsic(_InterlockedIncrement)
-#pragma intrinsic(_InterlockedExchangeAdd)
-#pragma intrinsic(_InterlockedExchange)
-#pragma intrinsic(_InterlockedAnd)
-#pragma intrinsic(_InterlockedOr)
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#pragma intrinsic(_InterlockedExchangeAdd_acq)
-#pragma intrinsic(_InterlockedIncrement_acq)
-#pragma intrinsic(_InterlockedDecrement_rel)
-#pragma intrinsic(_InterlockedCompareExchange_acq)
-#pragma intrinsic(_InterlockedCompareExchange_rel)
-#endif
+#if defined(ETHR_WIN_HAVE_XCHG)
-#define ETHR_ILCKD__(X) _Interlocked ## X
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## _acq
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## _rel
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB 1
#else
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB 1
#endif
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic32_t
-#define ETHR_AINT_T__ ethr_sint32_t
-
-#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ (void) ETHR_ILCKD__(Exchange)(&var->value, i);
+}
-#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#endif
-/*
- * _InterlockedCompareExchange64() is required. The other may not
- * be available, but if so, we can generate them.
- */
-#pragma intrinsic(_InterlockedCompareExchange64)
+#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
-#define ETHR_OWN_ILCKD_INIT_VAL__(PTR) *(PTR)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
#else
-#define ETHR_OWN_ILCKD_INIT_VAL__(PTR) (__int64) 0
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
#endif
-#define ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, PTR, NEW, ACT, EXP, OPS, RET) \
-{ \
- __int64 NEW, ACT, EXP; \
- ACT = ETHR_OWN_ILCKD_INIT_VAL__(PTR); \
- do { \
- EXP = ACT; \
- { OPS; } \
- ACT = _InterlockedCompareExchange64(PTR, NEW, EXP); \
- } while (ACT != EXP); \
- return RET; \
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
+{
+ return var->value;
}
-#define ETHR_OWN_ILCKD_1_IMPL__(FUNC, NEW, ACT, EXP, OPS, RET) \
-static __forceinline __int64 \
-FUNC(__int64 volatile *ptr) \
-ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, ptr, NEW, ACT, EXP, OPS, RET)
-
-#define ETHR_OWN_ILCKD_2_IMPL__(FUNC, NEW, ACT, EXP, OPS, ARG, RET) \
-static __forceinline __int64 \
-FUNC(__int64 volatile *ptr, __int64 ARG) \
-ETHR_OWN_ILCKD_BODY_IMPL__(FUNC, ptr, NEW, ACT, EXP, OPS, RET)
+#endif /* ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ */
+#if defined(ETHR_WIN_HAVE_XCHG_ADD)
-#ifdef ETHR_HAVE__INTERLOCKEDDECREMENT64
-#pragma intrinsic(_InterlockedDecrement64)
-#else
-ETHR_OWN_ILCKD_1_IMPL__(_InterlockedDecrement64, new, act, exp,
- new = act - 1, new)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDINCREMENT64
-#pragma intrinsic(_InterlockedIncrement64)
-#else
-ETHR_OWN_ILCKD_1_IMPL__(_InterlockedIncrement64, new, act, exp,
- new = act + 1, new)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDEXCHANGEADD64
-#pragma intrinsic(_InterlockedExchangeAdd64)
-#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedExchangeAdd64, new, act, exp,
- new = act + arg, arg, act)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDEXCHANGE64
-#pragma intrinsic(_InterlockedExchange64)
-#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedExchange64, new, act, exp,
- new = arg, arg, act)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDAND64
-#pragma intrinsic(_InterlockedAnd64)
-#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedAnd64, new, act, exp,
- new = act & arg, arg, act)
-#endif
-#ifdef ETHR_HAVE__INTERLOCKEDOR64
-#pragma intrinsic(_InterlockedOr64)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1
#else
-ETHR_OWN_ILCKD_2_IMPL__(_InterlockedOr64, new, act, exp,
- new = act | arg, arg, act)
-#endif
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#pragma intrinsic(_InterlockedExchangeAdd64_acq)
-#pragma intrinsic(_InterlockedIncrement64_acq)
-#pragma intrinsic(_InterlockedDecrement64_rel)
-#pragma intrinsic(_InterlockedCompareExchange64_acq)
-#pragma intrinsic(_InterlockedCompareExchange64_rel)
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1
#endif
-#define ETHR_ILCKD__(X) _Interlocked ## X ## 64
-#ifdef ETHR_HAVE_INTERLOCKED_ACQUIRE_RELEASE_BARRIERS
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64_acq
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64_rel
-#else
-#define ETHR_ILCKD_ACQ__(X) _Interlocked ## X ## 64
-#define ETHR_ILCKD_REL__(X) _Interlocked ## X ## 64
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ return ETHR_ILCKD__(ExchangeAdd)(&var->value, i) + i;
+}
+
#endif
-#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
-#define ETHR_ATMC_T__ ethr_native_atomic64_t
-#define ETHR_AINT_T__ ethr_sint64_t
+#if defined(ETHR_WIN_HAVE_INC)
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB 1
#else
-#error "Unsupported integer size"
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB 1
#endif
-typedef struct {
- volatile ETHR_AINT_T__ value;
-} ETHR_ATMC_T__;
-
-#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
-
-static ETHR_INLINE ETHR_AINT_T__ *
-ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(inc_return_mb)(ETHR_ATMC_T__ *var)
{
- return (ETHR_AINT_T__ *) &var->value;
+ return ETHR_ILCKD__(Increment)(&var->value);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
- var->value = i;
-#else
- (void) ETHR_ILCKD__(Exchange)(&var->value, i);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
- var->value = i;
+#if defined(ETHR_WIN_HAVE_INC_ACQ)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB 1
#else
- (void) ETHR_ILCKD__(Exchange)(&var->value, i);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
{
-#if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
- return var->value;
-#else
- return ETHR_ILCKD__(ExchangeAdd)(&var->value, (ETHR_AINT_T__) 0);
-#endif
+ return ETHR_ILCKD_ACQ__(Increment)(&var->value);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
-{
- (void) ETHR_ILCKD__(ExchangeAdd)(&var->value, incr);
-}
+#endif
+
+#if defined(ETHR_WIN_HAVE_DEC)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+ETHR_NATMC_FUNC__(dec_return_mb)(ETHR_ATMC_T__ *var)
{
- return ETHR_ILCKD__(ExchangeAdd)(&var->value, i) + i;
+ return ETHR_ILCKD__(Decrement)(&var->value);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_ILCKD__(Increment)(&var->value);
-}
+#endif
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_ILCKD__(Decrement)(&var->value);
-}
+#if defined(ETHR_WIN_HAVE_DEC_REL)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
-{
- return ETHR_ILCKD__(Increment)(&var->value);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
{
- return ETHR_ILCKD__(Decrement)(&var->value);
+ return ETHR_ILCKD_REL__(Decrement)(&var->value);
}
+#endif
+
+#if defined(ETHR_WIN_HAVE_AND)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
+ETHR_NATMC_FUNC__(and_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
return ETHR_ILCKD__(And)(&var->value, mask);
}
+#endif
+
+#if defined(ETHR_WIN_HAVE_OR)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
+ETHR_NATMC_FUNC__(or_retold_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
return ETHR_ILCKD__(Or)(&var->value, mask);
}
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
- ETHR_AINT_T__ new,
- ETHR_AINT_T__ old)
-{
- return ETHR_ILCKD__(CompareExchange)(&var->value, new, old);
-}
+#endif
+#if defined(ETHR_WIN_HAVE_XCHG)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
+ETHR_NATMC_FUNC__(xchg_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
{
return ETHR_ILCKD__(Exchange)(&var->value, new);
}
-/*
- * Atomic ops with at least specified barriers.
- */
+#endif
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_AINT_T__ val = var->value;
- ETHR_COMPILER_BARRIER;
- return val;
+#if defined(ETHR_WIN_HAVE_CMPXCHG)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1
#else
- return ETHR_ILCKD_ACQ__(ExchangeAdd)(&var->value, (ETHR_AINT_T__) 0);
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1
#endif
-}
static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
+ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
{
- return ETHR_ILCKD_ACQ__(Increment)(&var->value);
+ return ETHR_ILCKD__(CompareExchange)(&var->value, new, old);
}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
-{
-#if ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
- ETHR_COMPILER_BARRIER;
- var->value = i;
-#else
- (void) ETHR_ILCKD_REL__(Exchange)(&var->value, i);
#endif
-}
-static ETHR_INLINE void
-ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
-{
- (void) ETHR_ILCKD_REL__(Decrement)(&var->value);
-}
+#if defined(ETHR_WIN_HAVE_CMPXCHG_ACQ)
-static ETHR_INLINE ETHR_AINT_T__
-ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
-{
- return ETHR_ILCKD_REL__(Decrement)(&var->value);
-}
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB 1
+#endif
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
@@ -391,6 +435,16 @@ ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
return ETHR_ILCKD_ACQ__(CompareExchange)(&var->value, new, old);
}
+#endif
+
+#if defined(ETHR_WIN_HAVE_CMPXCHG_REL)
+
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB 1
+#else
+# define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB 1
+#endif
+
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
ETHR_AINT_T__ new,
@@ -399,6 +453,8 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
return ETHR_ILCKD_REL__(CompareExchange)(&var->value, new, old);
}
+#endif
+
#endif /* ETHR_TRY_INLINE_FUNCS */
#undef ETHR_ILCKD__
@@ -408,8 +464,17 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
#undef ETHR_ATMC_T__
#undef ETHR_AINT_T__
#undef ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__
-#undef ETHR_READ_ACQB_AND_SET_RELB_COMPILER_BARRIER_ONLY__
-
-#endif /* _MSC_VER */
+#undef ETHR_WIN_HAVE_CMPXCHG
+#undef ETHR_WIN_HAVE_DEC
+#undef ETHR_WIN_HAVE_INC
+#undef ETHR_WIN_HAVE_XCHG_ADD
+#undef ETHR_WIN_HAVE_XCHG
+#undef ETHR_WIN_HAVE_AND
+#undef ETHR_WIN_HAVE_OR
+#undef ETHR_WIN_HAVE_XCHG_ADD_ACQ
+#undef ETHR_WIN_HAVE_INC_ACQ
+#undef ETHR_WIN_HAVE_DEC_REL
+#undef ETHR_WIN_HAVE_CMPXCHG_ACQ
+#undef ETHR_WIN_HAVE_CMPXCHG_REL
#endif /* ETHR_INCLUDE_ATOMIC_IMPL__ */
diff --git a/erts/include/internal/win/ethr_dw_atomic.h b/erts/include/internal/win/ethr_dw_atomic.h
new file mode 100644
index 0000000000..a3e7ffc3aa
--- /dev/null
+++ b/erts/include/internal/win/ethr_dw_atomic.h
@@ -0,0 +1,154 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Native double word atomics for windows
+ * Author: Rickard Green
+ */
+
+#undef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+#ifndef ETHR_X86_DW_ATOMIC_H__
+# define ETHR_X86_DW_ATOMIC_H__
+# if ((ETHR_SIZEOF_PTR == 4 \
+ && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64)) \
+ || (ETHR_SIZEOF_PTR == 8 \
+ && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE128)))
+# define ETHR_INCLUDE_DW_ATOMIC_IMPL__
+# endif
+#endif
+
+#ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__
+
+# if ETHR_SIZEOF_PTR == 4
+# define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
+# else
+# define ETHR_HAVE_NATIVE_DW_ATOMIC
+# endif
+# define ETHR_NATIVE_DW_ATOMIC_IMPL "windows-interlocked"
+
+# if defined(_M_IX86) || defined(_M_AMD64)
+/*
+ * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used
+ * at runtime in order to determine if native or fallback implementation
+ * should be used.
+ */
+# define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \
+ ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__
+# endif
+
+# include <intrin.h>
+# if ETHR_SIZEOF_PTR == 4
+# pragma intrinsic(_InterlockedCompareExchange64)
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
+# define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
+# else
+# pragma intrinsic(_InterlockedCompareExchange128)
+# define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
+# endif
+
+typedef volatile __int64 * ethr_native_dw_ptr_t;
+
+/*
+ * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
+ * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
+ * not common, and at least some glibc malloc implementations
+ * only 4 byte align in 32-bit mode.
+ *
+ * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
+ * aligned memory in 32-bit mode. A malloc implementation that does
+ * not adhere to these alignment requirements is seriously broken,
+ * and we wont bother trying to work around it.
+ *
+ * Since memory alignment may be off by one word we need to align at
+ * runtime. We, therefore, need an extra word allocated.
+ */
+#define ETHR_DW_NATMC_MEM__(VAR) \
+ (&var->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
+typedef union {
+#ifdef ETHR_NATIVE_SU_DW_SINT_T
+ volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
+#endif
+ volatile ethr_sint_t sint[3];
+ volatile char c[ETHR_SIZEOF_PTR*3];
+} ethr_native_dw_atomic_t;
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+#ifdef ETHR_DEBUG
+# define ETHR_DW_DBG_ALIGNED__(PTR) \
+ ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
+#else
+# define ETHR_DW_DBG_ALIGNED__(PTR)
+#endif
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR
+
+static ETHR_INLINE ethr_sint_t *
+ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
+{
+ ethr_sint_t *p = (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return p;
+}
+
+
+#if ETHR_SIZEOF_PTR == 4
+
+#define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
+
+static ETHR_INLINE ethr_sint64_t
+ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ethr_sint64_t new,
+ ethr_sint64_t exp)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return (ethr_sint64_t) _InterlockedCompareExchange64(p, new, exp);
+}
+
+#elif ETHR_SIZEOF_PTR == 8
+
+#define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
+
+#ifdef ETHR_BIGENDIAN
+# define ETHR_WIN_LOW_WORD__ 1
+# define ETHR_WIN_HIGH_WORD__ 0
+#else
+# define ETHR_WIN_LOW_WORD__ 0
+# define ETHR_WIN_HIGH_WORD__ 1
+#endif
+
+static ETHR_INLINE int
+ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
+ ethr_sint_t *new,
+ ethr_sint_t *xchg)
+{
+ ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
+ ETHR_DW_DBG_ALIGNED__(p);
+ return (int) _InterlockedCompareExchange128(p,
+ new[ETHR_WIN_HIGH_WORD__],
+ new[ETHR_WIN_LOW_WORD__],
+ xchg);
+}
+
+#endif
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#endif /* ETHR_INCLUDE_DW_ATOMIC_IMPL__ */
diff --git a/erts/include/internal/win/ethr_event.h b/erts/include/internal/win/ethr_event.h
index 598816b2c6..6363174a74 100644
--- a/erts/include/internal/win/ethr_event.h
+++ b/erts/include/internal/win/ethr_event.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2009-2010. All Rights Reserved.
+ * Copyright Ericsson AB 2009-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -21,12 +21,12 @@
* Author: Rickard Green
*/
-#define ETHR_EVENT_OFF_WAITER__ ((long) -1)
-#define ETHR_EVENT_OFF__ ((long) 1)
-#define ETHR_EVENT_ON__ ((long) 0)
+#define ETHR_EVENT_OFF_WAITER__ ((ethr_sint32_t) -1)
+#define ETHR_EVENT_OFF__ ((ethr_sint32_t) 1)
+#define ETHR_EVENT_ON__ ((ethr_sint32_t) 0)
typedef struct {
- volatile long state;
+ ethr_atomic32_t state;
HANDLE handle;
} ethr_event;
@@ -38,7 +38,7 @@ static ETHR_INLINE void
ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
{
/* _InterlockedExchange() imply a full memory barrier which is important */
- long state = _InterlockedExchange(&e->state, ETHR_EVENT_ON__);
+ ethr_sint32_t state = ethr_atomic32_xchg_wb(&e->state, ETHR_EVENT_ON__);
if (state == ETHR_EVENT_OFF_WAITER__) {
if (!SetEvent(e->handle))
ETHR_FATAL_ERROR__(ethr_win_get_errno__());
@@ -48,8 +48,8 @@ ETHR_INLINE_FUNC_NAME_(ethr_event_set)(ethr_event *e)
static ETHR_INLINE void
ETHR_INLINE_FUNC_NAME_(ethr_event_reset)(ethr_event *e)
{
- /* _InterlockedExchange() imply a full memory barrier which is important */
- InterlockedExchange(&e->state, ETHR_EVENT_OFF__);
+ ethr_atomic32_set(&e->state, ETHR_EVENT_OFF__);
+ ETHR_MEMORY_BARRIER;
}
#endif
diff --git a/erts/include/internal/win/ethr_membar.h b/erts/include/internal/win/ethr_membar.h
new file mode 100644
index 0000000000..8237660b2c
--- /dev/null
+++ b/erts/include/internal/win/ethr_membar.h
@@ -0,0 +1,145 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2011. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Description: Memory barriers for Windows
+ * Author: Rickard Green
+ */
+
+#if (!defined(ETHR_WIN_MEMBAR_H__) \
+ && (defined(_MSC_VER) && _MSC_VER >= 1400) \
+ && (defined(_M_AMD64) \
+ || defined(_M_IA64) \
+ || defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE)))
+#define ETHR_WIN_MEMBAR_H__
+
+#define ETHR_LoadLoad (1 << 0)
+#define ETHR_LoadStore (1 << 1)
+#define ETHR_StoreLoad (1 << 2)
+#define ETHR_StoreStore (1 << 3)
+
+#include <intrin.h>
+#undef ETHR_COMPILER_BARRIER
+#define ETHR_COMPILER_BARRIER _ReadWriteBarrier()
+#pragma intrinsic(_ReadWriteBarrier)
+
+#pragma intrinsic(_InterlockedCompareExchange)
+
+#define ETHR_MB_USING_INTERLOCKED__ \
+do { \
+ volatile long x___ = 0; \
+ (void) _InterlockedCompareExchange(&x___, 2, 1); \
+} while (0)
+
+#if defined(_M_IA64)
+
+#define ETHR_MEMBAR(B) __mf()
+
+#elif defined(_M_AMD64) || defined(_M_IX86)
+
+#include <emmintrin.h>
+#include <mmintrin.h>
+
+#if ETHR_SIZEOF_PTR == 4
+# define ETHR_NO_SSE2_MB__ ETHR_MB_USING_INTERLOCKED__
+#endif
+#pragma intrinsic(_mm_mfence)
+#pragma intrinsic(_mm_sfence)
+#pragma intrinsic(_mm_lfence)
+
+static __forceinline void
+ethr_cfence__(void)
+{
+ _ReadWriteBarrier();
+}
+
+static __forceinline void
+ethr_mfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MB__;
+ else
+#endif
+ _mm_mfence();
+}
+
+static __forceinline void
+ethr_sfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MB__;
+ else
+#endif
+ _mm_sfence();
+}
+
+static __forceinline void
+ethr_lfence__(void)
+{
+#if ETHR_SIZEOF_PTR == 4
+ if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
+ ETHR_NO_SSE2_MB__;
+ else
+#endif
+ _mm_lfence();
+}
+
+#define ETHR_X86_OUT_OF_ORDER_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore, \
+ ethr_sfence__(), \
+ ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad, \
+ ethr_lfence__(), \
+ ethr_mfence__()))
+
+#ifdef ETHR_X86_OUT_OF_ORDER
+
+#define ETHR_MEMBAR(B) \
+ ETHR_X86_OUT_OF_ORDER_MEMBAR((B))
+
+#else /* !ETHR_X86_OUT_OF_ORDER (the default) */
+
+/*
+ * We assume that only stores before loads may be reordered. That is,
+ * we assume that *no* instructions like these are used:
+ * - CLFLUSH,
+ * - streaming stores executed with non-temporal move,
+ * - string operations, or
+ * - other instructions which aren't LoadLoad, LoadStore, and StoreStore
+ * ordered by themselves
+ * If such instructions are used, either insert memory barriers
+ * using ETHR_X86_OUT_OF_ORDER_MEMBAR() at appropriate places, or
+ * define ETHR_X86_OUT_OF_ORDER. For more info see Intel 64 and IA-32
+ * Architectures Software Developer's Manual; Vol 3A; Chapter 8.2.2.
+ */
+
+#define ETHR_MEMBAR(B) \
+ ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_mfence__(), ethr_cfence__())
+
+#endif
+
+#else /* No knowledge about platform; use interlocked fallback */
+
+#define ETHR_MEMBAR(B) ETHR_MB_USING_INTERLOCKED__
+#define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_MB_USING_INTERLOCKED__
+
+#endif
+
+#endif /* ETHR_WIN_MEMBAR_H__ */
diff --git a/erts/include/internal/win/ethread.h b/erts/include/internal/win/ethread.h
index c01b17cf14..8be35e810e 100644
--- a/erts/include/internal/win/ethread.h
+++ b/erts/include/internal/win/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2010. All Rights Reserved.
+ * Copyright Ericsson AB 2010-2011. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -25,11 +25,13 @@
#ifndef ETHREAD_WIN_H__
#define ETHREAD_WIN_H__
+#include "ethr_membar.h"
#define ETHR_ATOMIC_WANT_32BIT_IMPL__
#include "ethr_atomic.h"
#if ETHR_SIZEOF_PTR == 8
# define ETHR_ATOMIC_WANT_64BIT_IMPL__
# include "ethr_atomic.h"
#endif
+#include "ethr_dw_atomic.h"
#endif