/*
 * --------------- DO NOT EDIT THIS FILE! ---------------
 * This file was automatically generated by the
 * $ERL_TOP/erts/lib_src/utils/make_atomics_api script.
 * If you need to make changes, edit the script and
 * regenerate this file.
 * --------------- DO NOT EDIT THIS FILE! ---------------
 */

/*
 * %CopyrightBegin%
 *
 * Copyright Ericsson AB 2011-2012. All Rights Reserved.
 *
 * The contents of this file are subject to the Erlang Public License,
 * Version 1.1, (the "License"); you may not use this file except in
 * compliance with the License. You should have received a copy of the
 * Erlang Public License along with this software. If not, it can be
 * retrieved online at http://www.erlang.org/.
 *
 * Software distributed under the License is distributed on an "AS IS"
 * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
 * the License for the specific language governing rights and limitations
 * under the License.
 *
 * %CopyrightEnd%
 */

/*
 * Description: The ethread atomics API
 * Author: Rickard Green
 */

/*
 * This file maps native atomic implementations to ethread
 * API atomics. If no native atomic implementation
 * is available, a less efficient fallback is used instead.
 * The API consists of 32-bit size, word size (pointer size),
 * and double word size atomics.
 *
 * The following atomic operations are implemented for
 * 32-bit size, and word size atomics:
 * - cmpxchg
 * - xchg
 * - set
 * - init
 * - add_read
 * - read
 * - inc_read
 * - dec_read
 * - add
 * - inc
 * - dec
 * - read_band
 * - read_bor
 *
 * The following atomic operations are implemented for
 * double word size atomics:
 * - cmpxchg
 * - set
 * - read
 * - init
 *
 * Appart from a function implementing the atomic operation
 * with unspecified memory barrier semantics, there are
 * functions implementing each operation with the following
 * implied memory barrier semantics:
 * - mb   - Full memory barrier. Orders both loads, and
 *          stores before, and after the atomic operation.
 *          No load or store is allowed to be reordered
 *          over the atomic operation.
 * - relb - Release barrier. Orders both loads, and
 *          stores appearing *before* the atomic
 *          operation. These are not allowed to be
 *          reordered over the atomic operation.
 * - acqb - Acquire barrier. Orders both loads, and stores
 *          appearing *after* the atomic operation. These
 *          are not allowed to be reordered over the
 *          atomic operation.
 * - wb   - Write barrier. Orders *only* stores. These are
 *          not allowed to be reordered over the barrier.
 *          Store in atomic operation is ordered *after*
 *          the barrier.
 * - rb   - Read barrier. Orders *only* loads. These are
 *          not allowed to be reordered over the barrier.
 *          Load in atomic operation is ordered *before*
 *          the barrier. 
 * - ddrb - Data dependency read barrier. Orders *only*
 *          loads according to data dependency across the
 *          barrier. Load in atomic operation is ordered
 *          before the barrier.
 *
 * We implement all of these operation/barrier
 * combinations, regardless of whether they are useful
 * or not (some of them are useless).
 *
 * Double word size atomic functions are on the followning
 * form:
 *   ethr_dw_atomic_<OP>[_<BARRIER>]
 *
 * Word size atomic functions are on the followning
 * form:
 *   ethr_atomic_<OP>[_<BARRIER>]
 *
 * 32-bit size atomic functions are on the followning
 * form:
 *   ethr_atomic32_<OP>[_<BARRIER>]
 *
 * Apart from the operation/barrier functions
 * described above also 'addr' functions are implemented
 * which return the actual memory address used of the
 * atomic variable. The 'addr' functions have no barrier
 * versions.
 *
 * The native atomic implementation does not need to
 * implement all operation/barrier combinations.
 * Functions that have no native implementation will be
 * constructed from existing native functionality. These
 * functions will perform the wanted operation and will
 * produce sufficient memory barriers, but may
 * in some cases be less efficient than pure native
 * versions.
 *
 * When we create ethread API operation/barrier functions by
 * adding barriers before and after native operations it is
 * assumed that:
 * - A native read operation begins, and ends with a load.
 * - A native set operation begins, and ends with a store.
 * - An init operation begins with either a load, or a store,
 *   and ends with either a load, or a store.
 * - All other operations begins with a load, and ends with
 *   either a load, or a store.
 *
 * This is the minimum functionality that a native
 * implementation needs to provide:
 *
 * - Functions that need to be implemented:
 *
 *   - ethr_native_[dw_|su_dw_]atomic[BITS]_addr
 *   - ethr_native_[dw_|su_dw_]atomic[BITS]_cmpxchg[_<BARRIER>]
 *     (at least one cmpxchg of optional barrier)
 *
 * - Macros that needs to be defined:
 *
 *   A macro informing about the presence of the native
 *   implementation:
 *
 *   - ETHR_HAVE_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]
 *
 *   A macro naming (a string constant) the implementation:
 *
 *   - ETHR_NATIVE_[DW_]ATOMIC[BITS]_IMPL
 *
 *   Each implemented native atomic function has to
 *   be accompanied by a defined macro on the following
 *   form informing about its presence:
 *
 *   - ETHR_HAVE_ETHR_NATIVE_[DW_|SU_DW_]ATOMIC[BITS]_<OP>[_<BARRIER>]
 *
 *   A (sparc-v9 style) membar macro:
 *
 *   - ETHR_MEMBAR(B)
 *
 *     Which takes a combination of the following macros
 *     or:ed (using |) together:
 *
 *     - ETHR_LoadLoad
 *     - ETHR_LoadStore
 *     - ETHR_StoreLoad
 *     - ETHR_StoreStore
 *
 */


#ifdef HAVE_CONFIG_H
#include "config.h"
#endif

#define ETHR_TRY_INLINE_FUNCS
#define ETHR_INLINE_DW_ATMC_FUNC_NAME_(X) X ## __
#define ETHR_INLINE_ATMC_FUNC_NAME_(X) X ## __
#define ETHR_INLINE_ATMC32_FUNC_NAME_(X) X ## __
#define ETHR_ATOMIC_IMPL__

#include "ethread.h"
#include "ethr_internal.h"

#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \
     || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS))
/*
 * Spinlock based fallback for atomics used in absence of a native
 * implementation.
 */

#define ETHR_ATMC_FLLBK_ADDR_BITS 10
#define ETHR_ATMC_FLLBK_ADDR_SHIFT 6

typedef struct {
    union {
	ethr_spinlock_t lck;
	char buf[ETHR_CACHE_LINE_ALIGN_SIZE(sizeof(ethr_spinlock_t))];
    } u;
} ethr_atomic_protection_t;

extern ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS];

#define ETHR_ATOMIC_PTR2LCK__(PTR) \
(&ethr_atomic_protection__[((((ethr_uint_t) (PTR)) >> ETHR_ATMC_FLLBK_ADDR_SHIFT) \
			   & ((1 << ETHR_ATMC_FLLBK_ADDR_BITS) - 1))].u.lck)


#define ETHR_ATOMIC_OP_FALLBACK_IMPL__(AP, EXPS)			\
do {									\
    ethr_spinlock_t *slp__ = ETHR_ATOMIC_PTR2LCK__((AP));		\
    ethr_spin_lock(slp__);						\
    { EXPS; }								\
    ethr_spin_unlock(slp__);						\
} while (0)

ethr_atomic_protection_t ethr_atomic_protection__[1 << ETHR_ATMC_FLLBK_ADDR_BITS];

#endif


#if defined(ETHR_AMC_FALLBACK__)

/*
 * Fallback for large sized (word and/or double word size) atomics using
 * an "Atomic Modification Counter" based on smaller sized native atomics.
 *
 * We use a 63-bit modification counter and a one bit exclusive flag.
 * If 32-bit native atomics are used, we need two 32-bit native atomics.
 * The exclusive flag is the least significant bit, or if multiple atomics
 * are used, the least significant bit of the least significant atomic.
 *
 * When using the AMC fallback the following is true:
 * - Reads of the same atomic variable can be done in parallel.
 * - Uncontended reads doesn't cause any cache line invalidations,
 *   since no modifications are done.
 * - Assuming that the AMC atomic(s) and the integer(s) containing the
 *   value of the implemented atomic resides in the same cache line,
 *   modifications will only cause invalidations of one cache line.
 *
 * When using the spinlock based fallback none of the above is true,
 * however, the spinlock based fallback consumes less memory.
 */

#  if ETHR_AMC_NO_ATMCS__ != 1  && ETHR_AMC_NO_ATMCS__ != 2
#    error "Not supported"
#  endif
#  define ETHR_AMC_MAX_TRY_READ__ 10
#  ifdef ETHR_DEBUG
#    define ETHR_DBG_CHK_EXCL_STATE(ASP, S) \
do { \
    ETHR_AMC_SINT_T__ act = ETHR_AMC_ATMC_FUNC__(read)(&(ASP)->atomic[0]); \
    ETHR_ASSERT(act == (S) + 1); \
    ETHR_ASSERT(act & 1); \
} while (0)
#  else
#    define ETHR_DBG_CHK_EXCL_STATE(ASP, S)
#  endif

static ETHR_INLINE void
amc_init(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
{
    avar[0] = val[0];
    if (dw)
	avar[1] = val[1];
#if ETHR_AMC_NO_ATMCS__ == 2
    ETHR_AMC_ATMC_FUNC__(init)(&amc->atomic[1], 0);
#endif
    ETHR_AMC_ATMC_FUNC__(init_wb)(&amc->atomic[0], 0);
}

static ETHR_INLINE ETHR_AMC_SINT_T__
amc_set_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ prev_state0)
{
    ETHR_AMC_SINT_T__ state0 = prev_state0;
    /* Set exclusive flag. */
    while (1) {
	ETHR_AMC_SINT_T__ act_state0, new_state0;
	while (state0 & 1) { /* Wait until exclusive bit has been cleared */
	    ETHR_SPIN_BODY;
	    state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
	}
	/* Try to set exclusive bit */
	new_state0 = state0 + 1;
	act_state0 = ETHR_AMC_ATMC_FUNC__(cmpxchg_acqb)(&amc->atomic[0],
							new_state0,
							state0);
	if (state0 == act_state0)
	    return state0; /* old state0 */
	state0 = act_state0;
    }
}

static ETHR_INLINE void
amc_inc_mc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0)
{
    ETHR_AMC_SINT_T__ state0 = old_state0;

    /* Increment modification counter and reset exclusive flag. */

    ETHR_DBG_CHK_EXCL_STATE(amc, state0);

    state0 += 2;

    ETHR_ASSERT((state0 & 1) == 0);

#if ETHR_AMC_NO_ATMCS__ == 2
    if (state0 == 0) {
	/*
	 * state0 wrapped, so we need to increment state1. There is no need
	 * for atomic inc op, since this is always done while having exclusive
	 * flag.
	 */
	ETHR_AMC_SINT_T__ state1 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]);
	state1++;
	ETHR_AMC_ATMC_FUNC__(set)(&amc->atomic[1], state1);
    }
#endif
    ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], state0);
}

static ETHR_INLINE void
amc_unset_excl(ethr_amc_t *amc, ETHR_AMC_SINT_T__ old_state0)
{
    ETHR_DBG_CHK_EXCL_STATE(amc, old_state0);
    /*
     * Reset exclusive flag, but leave modification counter unchanged,
     * i.e., restore state to what it was before setting exclusive
     * flag.
     */
    ETHR_AMC_ATMC_FUNC__(set_relb)(&amc->atomic[0], old_state0);
}

static ETHR_INLINE void
amc_set(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
{
    ETHR_AMC_SINT_T__ state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);

    state0 = amc_set_excl(amc, state0);

    avar[0] = val[0];
    if (dw)
	avar[1] = val[1];

    amc_inc_mc_unset_excl(amc, state0);
}

static ETHR_INLINE int
amc_try_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar,
	     ethr_sint_t *val, ETHR_AMC_SINT_T__ *state0p)
{
    /* *state0p should contain last read value if aborting */
    ETHR_AMC_SINT_T__ old_state0;
#if ETHR_AMC_NO_ATMCS__ == 2
    ETHR_AMC_SINT_T__ state1;
    int abrt;
#endif

    *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]);
    if ((*state0p) & 1) 
	return 0; /* exclusive flag set; abort */
#if ETHR_AMC_NO_ATMCS__ == 2
    state1 = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[1]);
#else
    ETHR_COMPILER_BARRIER;
#endif

    val[0] = avar[0];
    if (dw)
	val[1] = avar[1];

    ETHR_READ_MEMORY_BARRIER;

    /*
     * Abort if state has changed (i.e, either the exclusive
     * flag is set, or modification counter changed).
     */
    old_state0 = *state0p;
#if ETHR_AMC_NO_ATMCS__ == 2
    *state0p = ETHR_AMC_ATMC_FUNC__(read_rb)(&amc->atomic[0]);
    abrt = (old_state0 != *state0p);
    abrt |= (state1 != ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[1]));
    return abrt == 0;
#else
    *state0p = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
    return old_state0 == *state0p;
#endif
}

static ETHR_INLINE void
amc_read(ethr_amc_t *amc, int dw, ethr_sint_t *avar, ethr_sint_t *val)
{
    ETHR_AMC_SINT_T__ state0;
    int i;

#if ETHR_AMC_MAX_TRY_READ__ == 0
    state0 = ETHR_AMC_ATMC_FUNC__(read)(&amc->atomic[0]);
#else
    for (i = 0; i < ETHR_AMC_MAX_TRY_READ__; i++) {
	if (amc_try_read(amc, dw, avar, val, &state0))
	    return; /* read success */
	ETHR_SPIN_BODY;
    }
#endif

    state0 = amc_set_excl(amc, state0);

    val[0] = avar[0];
    if (dw)
	val[1] = avar[1];

    amc_unset_excl(amc, state0);
}

static ETHR_INLINE int
amc_cmpxchg(ethr_amc_t *amc, int dw, ethr_sint_t *avar,
	    ethr_sint_t *new, ethr_sint_t *xchg)
{
    ethr_sint_t val[2];
    ETHR_AMC_SINT_T__ state0;

    if (amc_try_read(amc, dw, avar, val, &state0)) {
	if (val[0] != xchg[0] || (dw && val[1] != xchg[1])) {
	    xchg[0] = val[0];
	    if (dw)
		xchg[1] = val[1];
	    return 0; /* failed */
	}
	/* Operation will succeed if not interrupted */
    }

    state0 = amc_set_excl(amc, state0);

    if (xchg[0] != avar[0] || (dw && xchg[1] != avar[1])) {
	xchg[0] = avar[0];
	if (dw)
	    xchg[1] = avar[1];

	ETHR_DBG_CHK_EXCL_STATE(amc, state0);

	amc_unset_excl(amc, state0);
	return 0; /* failed */
    }

    avar[0] = new[0];
    if (dw)
	avar[1] = new[1];

    amc_inc_mc_unset_excl(amc, state0);
    return 1;
}


#define ETHR_AMC_MODIFICATION_OPS__(AMC, OPS)			\
do {								\
    ETHR_AMC_SINT_T__ state0__;					\
    state0__ = ETHR_AMC_ATMC_FUNC__(read)(&(AMC)->atomic[0]);	\
    state0__ = amc_set_excl((AMC), state0__);			\
    { OPS; }							\
    amc_inc_mc_unset_excl((AMC), state0__);			\
} while (0)

#endif /* amc fallback */


int
ethr_init_atomics(void)
{
#if (!defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS) \
     || !defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS))
    int i;
    for (i = 0; i < (1 << ETHR_ATMC_FLLBK_ADDR_BITS); i++) {
	int res = ethr_spinlock_init(&ethr_atomic_protection__[i].u.lck);
	if (res != 0)
	    return res;
    }
#endif
    return 0;
}


/* ---------- Double word size atomic implementation ---------- */



/*
 * Double word atomics need runtime test.
 */

int ethr_have_native_dw_atomic(void)
{
    return ethr_have_native_dw_atomic__();
}
     

/* --- addr() --- */

ethr_sint_t *ETHR_DW_ATOMIC_FUNC__(addr)(ethr_dw_atomic_t *var)
{
    ethr_sint_t *res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    res =  ethr_dw_atomic_addr__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    res = (ethr_sint_t *) ((&var->fallback))->sint;
#else
    res = (ethr_sint_t *) (&var->fallback);
#endif
    return res;
}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
ethr_sint_t *ethr_dw_atomic_addr(ethr_dw_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    return ethr_dw_atomic_addr__(var);
}
#endif


/* -- cmpxchg() -- */


int ETHR_DW_ATOMIC_FUNC__(cmpxchg)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    int res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    res =  ethr_dw_atomic_cmpxchg__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), 
	{
	    res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
	    if (res) {
		(&var->fallback)->sint[0] = val->sint[0];
		(&var->fallback)->sint[1] = val->sint[1];
	    }
	    else {
		old_val->sint[0] = (&var->fallback)->sint[0];
		old_val->sint[1] = (&var->fallback)->sint[1];
	    }
	});
#endif
    return res;
}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
int ethr_dw_atomic_cmpxchg(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    return ethr_dw_atomic_cmpxchg__(var, val, old_val);
}
#endif

int ethr_dw_atomic_cmpxchg_ddrb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_dw_atomic_cmpxchg_ddrb__(var, val, old_val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_dw_atomic_cmpxchg(var, val, old_val);
#else
    return ethr_dw_atomic_cmpxchg_rb(var, val, old_val);
#endif
}

int ETHR_DW_ATOMIC_FUNC__(cmpxchg_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    int res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    res =  ethr_dw_atomic_cmpxchg_rb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), 
	{
	    res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
	    if (res) {
		(&var->fallback)->sint[0] = val->sint[0];
		(&var->fallback)->sint[1] = val->sint[1];
	    }
	    else {
		old_val->sint[0] = (&var->fallback)->sint[0];
		old_val->sint[1] = (&var->fallback)->sint[1];
	    }
	});
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
int ethr_dw_atomic_cmpxchg_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    return ethr_dw_atomic_cmpxchg_rb__(var, val, old_val);
}
#endif

int ETHR_DW_ATOMIC_FUNC__(cmpxchg_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    int res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    res =  ethr_dw_atomic_cmpxchg_wb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), 
	{
	    res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
	    if (res) {
		(&var->fallback)->sint[0] = val->sint[0];
		(&var->fallback)->sint[1] = val->sint[1];
	    }
	    else {
		old_val->sint[0] = (&var->fallback)->sint[0];
		old_val->sint[1] = (&var->fallback)->sint[1];
	    }
	});
#endif
    return res;
}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
int ethr_dw_atomic_cmpxchg_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    return ethr_dw_atomic_cmpxchg_wb__(var, val, old_val);
}
#endif

int ETHR_DW_ATOMIC_FUNC__(cmpxchg_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    int res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    res =  ethr_dw_atomic_cmpxchg_acqb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), 
	{
	    res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
	    if (res) {
		(&var->fallback)->sint[0] = val->sint[0];
		(&var->fallback)->sint[1] = val->sint[1];
	    }
	    else {
		old_val->sint[0] = (&var->fallback)->sint[0];
		old_val->sint[1] = (&var->fallback)->sint[1];
	    }
	});
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
int ethr_dw_atomic_cmpxchg_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    return ethr_dw_atomic_cmpxchg_acqb__(var, val, old_val);
}
#endif

int ETHR_DW_ATOMIC_FUNC__(cmpxchg_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    int res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    res =  ethr_dw_atomic_cmpxchg_relb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), 
	{
	    res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
	    if (res) {
		(&var->fallback)->sint[0] = val->sint[0];
		(&var->fallback)->sint[1] = val->sint[1];
	    }
	    else {
		old_val->sint[0] = (&var->fallback)->sint[0];
		old_val->sint[1] = (&var->fallback)->sint[1];
	    }
	});
#endif
    return res;
}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
int ethr_dw_atomic_cmpxchg_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    return ethr_dw_atomic_cmpxchg_relb__(var, val, old_val);
}
#endif

int ETHR_DW_ATOMIC_FUNC__(cmpxchg_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    int res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    res =  ethr_dw_atomic_cmpxchg_mb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    res = amc_cmpxchg(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint, old_val->sint);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), 
	{
	    res = ((&var->fallback)->sint[0] == old_val->sint[0] && (&var->fallback)->sint[1] == old_val->sint[1]);
	    if (res) {
		(&var->fallback)->sint[0] = val->sint[0];
		(&var->fallback)->sint[1] = val->sint[1];
	    }
	    else {
		old_val->sint[0] = (&var->fallback)->sint[0];
		old_val->sint[1] = (&var->fallback)->sint[1];
	    }
	});
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
int ethr_dw_atomic_cmpxchg_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val, ethr_dw_sint_t *old_val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    return ethr_dw_atomic_cmpxchg_mb__(var, val, old_val);
}
#endif


/* -- set() -- */


void ETHR_DW_ATOMIC_FUNC__(set)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_set__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_set(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_set__(var, val);
}
#endif

void ethr_dw_atomic_set_ddrb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_dw_atomic_set_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_dw_atomic_set(var, val);
#else
    ethr_dw_atomic_set_rb(var, val);
#endif
}

void ETHR_DW_ATOMIC_FUNC__(set_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_set_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_set_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_set_rb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(set_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_set_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_set_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_set_wb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(set_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_set_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_set_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_set_acqb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(set_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_set_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_set_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_set_relb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(set_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_set_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    amc_set(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_set_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_set_mb__(var, val);
}
#endif


/* -- read() -- */


void ETHR_DW_ATOMIC_FUNC__(read)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_read__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_read(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_read__(var, val);
}
#endif

void ethr_dw_atomic_read_ddrb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_dw_atomic_read_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_dw_atomic_read(var, val);
#else
    ethr_dw_atomic_read_rb(var, val);
#endif
}

void ETHR_DW_ATOMIC_FUNC__(read_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_read_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_read_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_read_rb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(read_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_read_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_read_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_read_wb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(read_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_read_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_LoadStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_read_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_read_acqb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(read_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_read_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_read_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_read_relb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(read_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_read_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    amc_read(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_LoadStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), val->sint[0] = (&var->fallback)->sint[0]; val->sint[1] = (&var->fallback)->sint[1]);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_read_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
    ethr_dw_atomic_read_mb__(var, val);
}
#endif


/* -- init() -- */


void ETHR_DW_ATOMIC_FUNC__(init)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_init__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_init(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
    ethr_dw_atomic_init__(var, val);
}
#endif

void ethr_dw_atomic_init_ddrb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_dw_atomic_init_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_dw_atomic_init(var, val);
#else
    ethr_dw_atomic_init_rb(var, val);
#endif
}

void ETHR_DW_ATOMIC_FUNC__(init_rb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_init_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_init_rb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
    ethr_dw_atomic_init_rb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(init_wb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_init_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_init_wb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
    ethr_dw_atomic_init_wb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(init_acqb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_init_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_init_acqb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
    ethr_dw_atomic_init_acqb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(init_relb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_init_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_init_relb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
    ethr_dw_atomic_init_relb__(var, val);
}
#endif

void ETHR_DW_ATOMIC_FUNC__(init_mb)(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_DOUBLE_WORD_SZ_NATIVE_ATOMIC_OPS) && !defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    ethr_dw_atomic_init_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    amc_init(&(&var->fallback)->amc, 1, (&var->fallback)->sint, val->sint);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__((&var->fallback), (&var->fallback)->sint[0] = val->sint[0]; (&var->fallback)->sint[1] = val->sint[1]);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

#ifdef ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__
void ethr_dw_atomic_init_mb(ethr_dw_atomic_t *var, ethr_dw_sint_t *val)
{
    ETHR_ASSERT(var);
    ethr_dw_atomic_init_mb__(var, val);
}
#endif


/* ---------- Word size atomic implementation ---------- */




/* --- addr() --- */

ethr_sint_t *ethr_atomic_addr(ethr_atomic_t *var)
{
    ethr_sint_t *res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_addr__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    res = (ethr_sint_t *) (var)->sint;
#else
    res = (ethr_sint_t *) var;
#endif
    return res;
}


/* -- cmpxchg() -- */


ethr_sint_t ethr_atomic_cmpxchg(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_cmpxchg__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    res = old_val;
    (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_cmpxchg_ddrb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_cmpxchg_ddrb__(var, val, old_val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_cmpxchg(var, val, old_val);
#else
    return ethr_atomic_cmpxchg_rb(var, val, old_val);
#endif
}

ethr_sint_t ethr_atomic_cmpxchg_rb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_cmpxchg_rb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    res = old_val;
    (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_cmpxchg_wb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_cmpxchg_wb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    res = old_val;
    (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_cmpxchg_acqb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_cmpxchg_acqb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    res = old_val;
    (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_cmpxchg_relb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_cmpxchg_relb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    res = old_val;
    (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_cmpxchg_mb(ethr_atomic_t *var, ethr_sint_t val, ethr_sint_t old_val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_cmpxchg_mb__(var, val, old_val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    res = old_val;
    (void) amc_cmpxchg(&var->amc, 0, &var->sint, &val, &res);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- xchg() -- */


ethr_sint_t ethr_atomic_xchg(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_xchg__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_xchg_ddrb(ethr_atomic_t *var, ethr_sint_t val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_xchg_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_xchg(var, val);
#else
    return ethr_atomic_xchg_rb(var, val);
#endif
}

ethr_sint_t ethr_atomic_xchg_rb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_xchg_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_xchg_wb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_xchg_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_xchg_acqb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_xchg_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_xchg_relb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_xchg_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_xchg_mb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_xchg_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- set() -- */


void ethr_atomic_set(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_set__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_set(&var->amc, 0, &var->sint, &val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic_set_ddrb(ethr_atomic_t *var, ethr_sint_t val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_set_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic_set(var, val);
#else
    ethr_atomic_set_rb(var, val);
#endif
}

void ethr_atomic_set_rb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_set_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_set(&var->amc, 0, &var->sint, &val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic_set_wb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_set_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    amc_set(&var->amc, 0, &var->sint, &val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic_set_acqb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_set_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_set(&var->amc, 0, &var->sint, &val);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic_set_relb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_set_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    amc_set(&var->amc, 0, &var->sint, &val);
#else
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic_set_mb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_set_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    amc_set(&var->amc, 0, &var->sint, &val);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- init() -- */


void ethr_atomic_init(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_init__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_init(&var->amc, 0, &var->sint, &val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic_init_ddrb(ethr_atomic_t *var, ethr_sint_t val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_init_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic_init(var, val);
#else
    ethr_atomic_init_rb(var, val);
#endif
}

void ethr_atomic_init_rb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_init_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_init(&var->amc, 0, &var->sint, &val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic_init_wb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_init_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    amc_init(&var->amc, 0, &var->sint, &val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic_init_acqb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_init_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_init(&var->amc, 0, &var->sint, &val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic_init_relb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_init_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    amc_init(&var->amc, 0, &var->sint, &val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic_init_mb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_init_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    amc_init(&var->amc, 0, &var->sint, &val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- add_read() -- */


ethr_sint_t ethr_atomic_add_read(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_add_read__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
#endif
    return res;
}

ethr_sint_t ethr_atomic_add_read_ddrb(ethr_atomic_t *var, ethr_sint_t val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_add_read_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_add_read(var, val);
#else
    return ethr_atomic_add_read_rb(var, val);
#endif
}

ethr_sint_t ethr_atomic_add_read_rb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_add_read_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_add_read_wb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_add_read_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
#endif
    return res;
}

ethr_sint_t ethr_atomic_add_read_acqb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_add_read_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_add_read_relb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_add_read_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
#endif
    return res;
}

ethr_sint_t ethr_atomic_add_read_mb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_add_read_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val; res = var->sint);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- read() -- */


ethr_sint_t ethr_atomic_read(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_read(&var->amc, 0, &var->sint, &res);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_ddrb(ethr_atomic_t *var)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_read_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_read(var);
#else
    return ethr_atomic_read_rb(var);
#endif
}

ethr_sint_t ethr_atomic_read_rb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_rb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_read(&var->amc, 0, &var->sint, &res);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_wb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_wb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    amc_read(&var->amc, 0, &var->sint, &res);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_acqb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_acqb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    amc_read(&var->amc, 0, &var->sint, &res);
    ETHR_MEMBAR(ETHR_LoadStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_relb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_relb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    amc_read(&var->amc, 0, &var->sint, &res);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_mb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_mb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    amc_read(&var->amc, 0, &var->sint, &res);
    ETHR_MEMBAR(ETHR_LoadStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#endif
    return res;
}


/* -- inc_read() -- */


ethr_sint_t ethr_atomic_inc_read(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_inc_read__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_inc_read_ddrb(ethr_atomic_t *var)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_inc_read_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_inc_read(var);
#else
    return ethr_atomic_inc_read_rb(var);
#endif
}

ethr_sint_t ethr_atomic_inc_read_rb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_inc_read_rb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_inc_read_wb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_inc_read_wb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_inc_read_acqb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_inc_read_acqb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_inc_read_relb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_inc_read_relb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_inc_read_mb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_inc_read_mb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = ++(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- dec_read() -- */


ethr_sint_t ethr_atomic_dec_read(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_dec_read__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_dec_read_ddrb(ethr_atomic_t *var)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_dec_read_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_dec_read(var);
#else
    return ethr_atomic_dec_read_rb(var);
#endif
}

ethr_sint_t ethr_atomic_dec_read_rb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_dec_read_rb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_dec_read_wb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_dec_read_wb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_dec_read_acqb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_dec_read_acqb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_dec_read_relb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_dec_read_relb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
#endif
    return res;
}

ethr_sint_t ethr_atomic_dec_read_mb(ethr_atomic_t *var)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_dec_read_mb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = --(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- add() -- */


void ethr_atomic_add(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_add__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
#endif

}

void ethr_atomic_add_ddrb(ethr_atomic_t *var, ethr_sint_t val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_add_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic_add(var, val);
#else
    ethr_atomic_add_rb(var, val);
#endif
}

void ethr_atomic_add_rb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_add_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic_add_wb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_add_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
#endif

}

void ethr_atomic_add_acqb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_add_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic_add_relb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_add_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
#endif

}

void ethr_atomic_add_mb(ethr_atomic_t *var, ethr_sint_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_add_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, var->sint += val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- inc() -- */


void ethr_atomic_inc(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_inc__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
#endif

}

void ethr_atomic_inc_ddrb(ethr_atomic_t *var)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_inc_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic_inc(var);
#else
    ethr_atomic_inc_rb(var);
#endif
}

void ethr_atomic_inc_rb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_inc_rb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic_inc_wb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_inc_wb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
#endif

}

void ethr_atomic_inc_acqb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_inc_acqb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic_inc_relb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_inc_relb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
#endif

}

void ethr_atomic_inc_mb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_inc_mb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, ++(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- dec() -- */


void ethr_atomic_dec(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_dec__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
#endif

}

void ethr_atomic_dec_ddrb(ethr_atomic_t *var)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_dec_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic_dec(var);
#else
    ethr_atomic_dec_rb(var);
#endif
}

void ethr_atomic_dec_rb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_dec_rb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic_dec_wb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_dec_wb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
#endif

}

void ethr_atomic_dec_acqb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_dec_acqb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic_dec_relb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_dec_relb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
#endif

}

void ethr_atomic_dec_mb(ethr_atomic_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    ethr_atomic_dec_mb__(var);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, --(var->sint));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- read_band() -- */


ethr_sint_t ethr_atomic_read_band(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_band__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_band_ddrb(ethr_atomic_t *var, ethr_sint_t val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_read_band_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_read_band(var, val);
#else
    return ethr_atomic_read_band_rb(var, val);
#endif
}

ethr_sint_t ethr_atomic_read_band_rb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_band_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_band_wb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_band_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_band_acqb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_band_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_band_relb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_band_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_band_mb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_band_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint &= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- read_bor() -- */


ethr_sint_t ethr_atomic_read_bor(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_bor__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_bor_ddrb(ethr_atomic_t *var, ethr_sint_t val)
{
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    return ethr_atomic_read_bor_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic_read_bor(var, val);
#else
    return ethr_atomic_read_bor_rb(var, val);
#endif
}

ethr_sint_t ethr_atomic_read_bor_rb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_bor_rb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_bor_wb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_bor_wb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_bor_acqb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_bor_acqb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_bor_relb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_bor_relb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
#endif
    return res;
}

ethr_sint_t ethr_atomic_read_bor_mb(ethr_atomic_t *var, ethr_sint_t val)
{
    ethr_sint_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_WORD_SZ_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic_read_bor_mb__(var, val);
#elif defined(ETHR_AMC_FALLBACK__)
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_AMC_MODIFICATION_OPS__(&var->amc, res = var->sint; var->sint |= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* ---------- 32-bit atomic implementation ---------- */




/* --- addr() --- */

ethr_sint32_t *ethr_atomic32_addr(ethr_atomic32_t *var)
{
    ethr_sint32_t *res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_addr__(var);

#else
    res = (ethr_sint32_t *) var;
#endif
    return res;
}


/* -- cmpxchg() -- */


ethr_sint32_t ethr_atomic32_cmpxchg(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_cmpxchg__(var, val, old_val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_cmpxchg_ddrb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_cmpxchg_ddrb__(var, val, old_val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_cmpxchg(var, val, old_val);
#else
    return ethr_atomic32_cmpxchg_rb(var, val, old_val);
#endif
}

ethr_sint32_t ethr_atomic32_cmpxchg_rb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_cmpxchg_rb__(var, val, old_val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_cmpxchg_wb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_cmpxchg_wb__(var, val, old_val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_cmpxchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_cmpxchg_acqb__(var, val, old_val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_cmpxchg_relb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_cmpxchg_relb__(var, val, old_val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_cmpxchg_mb(ethr_atomic32_t *var, ethr_sint32_t val, ethr_sint32_t old_val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_cmpxchg_mb__(var, val, old_val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = (*var == old_val ? (*var = val, old_val) : *var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- xchg() -- */


ethr_sint32_t ethr_atomic32_xchg(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_xchg__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_xchg_ddrb(ethr_atomic32_t *var, ethr_sint32_t val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_xchg_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_xchg(var, val);
#else
    return ethr_atomic32_xchg_rb(var, val);
#endif
}

ethr_sint32_t ethr_atomic32_xchg_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_xchg_rb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_xchg_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_xchg_wb__(var, val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_xchg_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_xchg_acqb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_xchg_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_xchg_relb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_xchg_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_xchg_mb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- set() -- */


void ethr_atomic32_set(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_set__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic32_set_ddrb(ethr_atomic32_t *var, ethr_sint32_t val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_set_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic32_set(var, val);
#else
    ethr_atomic32_set_rb(var, val);
#endif
}

void ethr_atomic32_set_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_set_rb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic32_set_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_set_wb__(var, val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic32_set_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_set_acqb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic32_set_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_set_relb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic32_set_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_set_mb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- init() -- */


void ethr_atomic32_init(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_init__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic32_init_ddrb(ethr_atomic32_t *var, ethr_sint32_t val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_init_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic32_init(var, val);
#else
    ethr_atomic32_init_rb(var, val);
#endif
}

void ethr_atomic32_init_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_init_rb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic32_init_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_init_wb__(var, val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic32_init_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_init_acqb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic32_init_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_init_relb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
#endif

}

void ethr_atomic32_init_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_init_mb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var = val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- add_read() -- */


ethr_sint32_t ethr_atomic32_add_read(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_add_read__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_add_read_ddrb(ethr_atomic32_t *var, ethr_sint32_t val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_add_read_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_add_read(var, val);
#else
    return ethr_atomic32_add_read_rb(var, val);
#endif
}

ethr_sint32_t ethr_atomic32_add_read_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_add_read_rb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_add_read_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_add_read_wb__(var, val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_add_read_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_add_read_acqb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_add_read_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_add_read_relb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_add_read_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_add_read_mb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val; res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- read() -- */


ethr_sint32_t ethr_atomic32_read(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_ddrb(ethr_atomic32_t *var)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_read_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_read(var);
#else
    return ethr_atomic32_read_rb(var);
#endif
}

ethr_sint32_t ethr_atomic32_read_rb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_rb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_wb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_wb__(var);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_acqb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_acqb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_relb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_relb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_mb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_mb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore);
#endif
    return res;
}


/* -- inc_read() -- */


ethr_sint32_t ethr_atomic32_inc_read(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_inc_read__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_inc_read_ddrb(ethr_atomic32_t *var)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_inc_read_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_inc_read(var);
#else
    return ethr_atomic32_inc_read_rb(var);
#endif
}

ethr_sint32_t ethr_atomic32_inc_read_rb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_inc_read_rb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_inc_read_wb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_inc_read_wb__(var);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_inc_read_acqb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_inc_read_acqb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_inc_read_relb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_inc_read_relb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_inc_read_mb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_inc_read_mb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- dec_read() -- */


ethr_sint32_t ethr_atomic32_dec_read(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_dec_read__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_dec_read_ddrb(ethr_atomic32_t *var)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_dec_read_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_dec_read(var);
#else
    return ethr_atomic32_dec_read_rb(var);
#endif
}

ethr_sint32_t ethr_atomic32_dec_read_rb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_dec_read_rb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_dec_read_wb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_dec_read_wb__(var);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_dec_read_acqb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_dec_read_acqb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_dec_read_relb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_dec_read_relb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_dec_read_mb(ethr_atomic32_t *var)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_dec_read_mb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- add() -- */


void ethr_atomic32_add(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_add__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
#endif

}

void ethr_atomic32_add_ddrb(ethr_atomic32_t *var, ethr_sint32_t val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_add_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic32_add(var, val);
#else
    ethr_atomic32_add_rb(var, val);
#endif
}

void ethr_atomic32_add_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_add_rb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic32_add_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_add_wb__(var, val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
#endif

}

void ethr_atomic32_add_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_add_acqb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic32_add_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_add_relb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
#endif

}

void ethr_atomic32_add_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_add_mb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, *var += val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- inc() -- */


void ethr_atomic32_inc(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_inc__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
#endif

}

void ethr_atomic32_inc_ddrb(ethr_atomic32_t *var)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_inc_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic32_inc(var);
#else
    ethr_atomic32_inc_rb(var);
#endif
}

void ethr_atomic32_inc_rb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_inc_rb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic32_inc_wb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_inc_wb__(var);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
#endif

}

void ethr_atomic32_inc_acqb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_inc_acqb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic32_inc_relb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_inc_relb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
#endif

}

void ethr_atomic32_inc_mb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_inc_mb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, ++(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- dec() -- */


void ethr_atomic32_dec(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_dec__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
#endif

}

void ethr_atomic32_dec_ddrb(ethr_atomic32_t *var)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_dec_ddrb__(var);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    ethr_atomic32_dec(var);
#else
    ethr_atomic32_dec_rb(var);
#endif
}

void ethr_atomic32_dec_rb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_dec_rb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif

}

void ethr_atomic32_dec_wb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_dec_wb__(var);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
#endif

}

void ethr_atomic32_dec_acqb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_dec_acqb__(var);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}

void ethr_atomic32_dec_relb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_dec_relb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
#endif

}

void ethr_atomic32_dec_mb(ethr_atomic32_t *var)
{
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    ethr_atomic32_dec_mb__(var);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, --(*var));
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif

}


/* -- read_band() -- */


ethr_sint32_t ethr_atomic32_read_band(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_band__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_band_ddrb(ethr_atomic32_t *var, ethr_sint32_t val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_read_band_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_read_band(var, val);
#else
    return ethr_atomic32_read_band_rb(var, val);
#endif
}

ethr_sint32_t ethr_atomic32_read_band_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_band_rb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_band_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_band_wb__(var, val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_band_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_band_acqb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_band_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_band_relb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_band_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_band_mb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var &= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}


/* -- read_bor() -- */


ethr_sint32_t ethr_atomic32_read_bor(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_bor__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_bor_ddrb(ethr_atomic32_t *var, ethr_sint32_t val)
{
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    return ethr_atomic32_read_bor_ddrb__(var, val);
#elif defined(ETHR_ORDERED_READ_DEPEND)
    return ethr_atomic32_read_bor(var, val);
#else
    return ethr_atomic32_read_bor_rb(var, val);
#endif
}

ethr_sint32_t ethr_atomic32_read_bor_rb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_bor_rb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
    ETHR_MEMBAR(ETHR_LoadLoad);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_bor_wb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_bor_wb__(var, val);
#else
    ETHR_MEMBAR(ETHR_StoreStore);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_bor_acqb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_bor_acqb__(var, val);
#else
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_bor_relb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_bor_relb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
#endif
    return res;
}

ethr_sint32_t ethr_atomic32_read_bor_mb(ethr_atomic32_t *var, ethr_sint32_t val)
{
    ethr_sint32_t res;
    ETHR_ASSERT(!ethr_not_inited__);
    ETHR_ASSERT(var);
#if defined(ETHR_HAVE_32BIT_NATIVE_ATOMIC_OPS)
    res =  ethr_atomic32_read_bor_mb__(var, val);
#else
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_StoreLoad);
    ETHR_ATOMIC_OP_FALLBACK_IMPL__(var, res = *var; *var |= val);
    ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore|ETHR_StoreLoad|ETHR_StoreStore);
#endif
    return res;
}



/* --------- Info functions --------- */

#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
char *zero_ops[] = {NULL};
#endif


static char *native_su_dw_atomic_ops[] = {
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG
    "cmpxchg",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RB
    "cmpxchg_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_WB
    "cmpxchg_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_ACQB
    "cmpxchg_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_RELB
    "cmpxchg_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
    "cmpxchg_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET
    "set",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RB
    "set_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_WB
    "set_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_ACQB
    "set_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RELB
    "set_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_MB
    "set_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ
    "read",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RB
    "read_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_WB
    "read_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_ACQB
    "read_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_RELB
    "read_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_MB
    "read_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT
    "init",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RB
    "init_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_WB
    "init_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_ACQB
    "init_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_RELB
    "init_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_INIT_MB
    "init_mb",
#endif
    NULL
};

char **
ethr_native_su_dw_atomic_ops(void)
{

#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    if (!ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
	return &zero_ops[0];
#endif
    return &native_su_dw_atomic_ops[0];
}


static char *native_dw_atomic_ops[] = {
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG
    "cmpxchg",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RB
    "cmpxchg_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_WB
    "cmpxchg_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_ACQB
    "cmpxchg_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_RELB
    "cmpxchg_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
    "cmpxchg_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET
    "set",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RB
    "set_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_WB
    "set_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_ACQB
    "set_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_RELB
    "set_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_SET_MB
    "set_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ
    "read",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RB
    "read_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_WB
    "read_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_ACQB
    "read_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_RELB
    "read_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_READ_MB
    "read_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT
    "init",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RB
    "init_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_WB
    "init_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_ACQB
    "init_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_RELB
    "init_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_INIT_MB
    "init_mb",
#endif
    NULL
};

char **
ethr_native_dw_atomic_ops(void)
{

#if defined(ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
    if (!ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__)
	return &zero_ops[0];
#endif
    return &native_dw_atomic_ops[0];
}


static char *native_atomic64_ops[] = {
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG
    "cmpxchg",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RB
    "cmpxchg_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_WB
    "cmpxchg_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_ACQB
    "cmpxchg_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_RELB
    "cmpxchg_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB
    "cmpxchg_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG
    "xchg",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RB
    "xchg_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_WB
    "xchg_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_ACQB
    "xchg_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_RELB
    "xchg_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB
    "xchg_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET
    "set",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RB
    "set_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_WB
    "set_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_ACQB
    "set_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB
    "set_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB
    "set_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT
    "init",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RB
    "init_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_WB
    "init_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_ACQB
    "init_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_RELB
    "init_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INIT_MB
    "init_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN
    "add_return",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RB
    "add_return_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_WB
    "add_return_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_ACQB
    "add_return_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_RELB
    "add_return_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB
    "add_return_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ
    "read",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RB
    "read_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_WB
    "read_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_ACQB
    "read_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_RELB
    "read_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ_MB
    "read_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN
    "inc_return",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RB
    "inc_return_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_WB
    "inc_return_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_ACQB
    "inc_return_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_RELB
    "inc_return_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RETURN_MB
    "inc_return_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN
    "dec_return",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RB
    "dec_return_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_WB
    "dec_return_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_ACQB
    "dec_return_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_RELB
    "dec_return_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RETURN_MB
    "dec_return_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD
    "add",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RB
    "add_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_WB
    "add_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_ACQB
    "add_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RELB
    "add_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB
    "add_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC
    "inc",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RB
    "inc_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_WB
    "inc_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_ACQB
    "inc_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_RELB
    "inc_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB
    "inc_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC
    "dec",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RB
    "dec_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_WB
    "dec_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_ACQB
    "dec_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_RELB
    "dec_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB
    "dec_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD
    "and_retold",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RB
    "and_retold_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_WB
    "and_retold_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_ACQB
    "and_retold_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_RELB
    "and_retold_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_AND_RETOLD_MB
    "and_retold_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD
    "or_retold",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RB
    "or_retold_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_WB
    "or_retold_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_ACQB
    "or_retold_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_RELB
    "or_retold_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC64_OR_RETOLD_MB
    "or_retold_mb",
#endif
    NULL
};

char **
ethr_native_atomic64_ops(void)
{

    return &native_atomic64_ops[0];
}


static char *native_atomic32_ops[] = {
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG
    "cmpxchg",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RB
    "cmpxchg_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_WB
    "cmpxchg_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_ACQB
    "cmpxchg_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_RELB
    "cmpxchg_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB
    "cmpxchg_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG
    "xchg",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RB
    "xchg_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_WB
    "xchg_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_ACQB
    "xchg_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_RELB
    "xchg_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB
    "xchg_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET
    "set",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RB
    "set_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_WB
    "set_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_ACQB
    "set_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB
    "set_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB
    "set_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT
    "init",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RB
    "init_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_WB
    "init_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_ACQB
    "init_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_RELB
    "init_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INIT_MB
    "init_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN
    "add_return",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RB
    "add_return_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_WB
    "add_return_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_ACQB
    "add_return_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_RELB
    "add_return_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB
    "add_return_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ
    "read",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RB
    "read_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_WB
    "read_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_ACQB
    "read_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_RELB
    "read_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ_MB
    "read_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN
    "inc_return",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RB
    "inc_return_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_WB
    "inc_return_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_ACQB
    "inc_return_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_RELB
    "inc_return_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RETURN_MB
    "inc_return_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN
    "dec_return",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RB
    "dec_return_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_WB
    "dec_return_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_ACQB
    "dec_return_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_RELB
    "dec_return_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RETURN_MB
    "dec_return_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD
    "add",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RB
    "add_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_WB
    "add_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_ACQB
    "add_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RELB
    "add_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB
    "add_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC
    "inc",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RB
    "inc_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_WB
    "inc_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_ACQB
    "inc_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_RELB
    "inc_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB
    "inc_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC
    "dec",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RB
    "dec_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_WB
    "dec_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_ACQB
    "dec_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_RELB
    "dec_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB
    "dec_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD
    "and_retold",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RB
    "and_retold_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_WB
    "and_retold_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_ACQB
    "and_retold_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_RELB
    "and_retold_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_AND_RETOLD_MB
    "and_retold_mb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD
    "or_retold",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RB
    "or_retold_rb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_WB
    "or_retold_wb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_ACQB
    "or_retold_acqb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_RELB
    "or_retold_relb",
#endif
#ifdef ETHR_HAVE_ETHR_NATIVE_ATOMIC32_OR_RETOLD_MB
    "or_retold_mb",
#endif
    NULL
};

char **
ethr_native_atomic32_ops(void)
{

    return &native_atomic32_ops[0];
}