diff options
author | Rickard Green <[email protected]> | 2010-12-10 23:06:27 +0100 |
---|---|---|
committer | Rickard Green <[email protected]> | 2010-12-15 14:59:16 +0100 |
commit | 076f6b11e76fd6c7978950749410dc5bccd02b7f (patch) | |
tree | 22f906d74d18bf7d9a409ba06f93a03b3e6104c2 /erts/include/internal/libatomic_ops/ethr_atomic.h | |
parent | 4708dddcac0932beaa0f396baf0de11e956a05d2 (diff) | |
download | otp-076f6b11e76fd6c7978950749410dc5bccd02b7f.tar.gz otp-076f6b11e76fd6c7978950749410dc5bccd02b7f.tar.bz2 otp-076f6b11e76fd6c7978950749410dc5bccd02b7f.zip |
Add support for 32-bit atomics
Diffstat (limited to 'erts/include/internal/libatomic_ops/ethr_atomic.h')
-rw-r--r-- | erts/include/internal/libatomic_ops/ethr_atomic.h | 201 |
1 files changed, 127 insertions, 74 deletions
diff --git a/erts/include/internal/libatomic_ops/ethr_atomic.h b/erts/include/internal/libatomic_ops/ethr_atomic.h index f244e9c626..d56693dbf8 100644 --- a/erts/include/internal/libatomic_ops/ethr_atomic.h +++ b/erts/include/internal/libatomic_ops/ethr_atomic.h @@ -53,9 +53,32 @@ #error The AO_t type is too small #endif +#if ETHR_SIZEOF_AO_T == 4 +#define ETHR_HAVE_NATIVE_ATOMIC32 1 +#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X +#define ETHR_ATMC_T__ ethr_native_atomic32_t +#define ETHR_AINT_T__ ethr_sint32_t +#define ETHR_AINT_SUFFIX__ "l" +#elif ETHR_SIZEOF_AO_T == 8 +#define ETHR_HAVE_NATIVE_ATOMIC64 1 +#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X +#define ETHR_ATMC_T__ ethr_native_atomic64_t +#define ETHR_AINT_T__ ethr_sint64_t +#define ETHR_AINT_SUFFIX__ "q" +#else +#error "Unsupported integer size" +#endif + +#if ETHR_SIZEOF_AO_T == 8 +typedef union { + volatile AO_t counter; + ethr_sint32_t sint32[2]; +} ETHR_ATMC_T__; +#else typedef struct { volatile AO_t counter; -} ethr_native_atomic_t; +} ETHR_ATMC_T__; +#endif #define ETHR_MEMORY_BARRIER AO_nop_full() #ifdef AO_HAVE_nop_write @@ -71,125 +94,151 @@ typedef struct { #ifdef AO_NO_DD_ORDERING # define ETHR_READ_DEPEND_MEMORY_BARRIER ETHR_READ_MEMORY_BARRIER #else -# define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("":::"memory") +# define ETHR_READ_DEPEND_MEMORY_BARRIER AO_compiler_barrier() #endif #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) +static ETHR_INLINE ETHR_AINT_T__ * +ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var) +{ + return (ETHR_AINT_T__ *) &var->counter; +} + +#if ETHR_SIZEOF_AO_T == 8 +/* + * We also need to provide an ethr_native_atomic32_addr(), since + * this 64-bit implementation will be used implementing 32-bit + * native atomics. + */ + +static ETHR_INLINE ethr_sint32_t * +ethr_native_atomic32_addr(ETHR_ATMC_T__ *var) +{ + ETHR_ASSERT(((void *) &var->sint32[0]) == ((void *) &var->counter)); +#ifdef ETHR_BIGENDIAN + return &var->sint32[1]; +#else + return &var->sint32[0]; +#endif +} + +#endif /* ETHR_SIZEOF_AO_T == 8 */ + static ETHR_INLINE void -ethr_native_atomic_set(ethr_native_atomic_t *var, ethr_sint_t value) +ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) { AO_store(&var->counter, (AO_t) value); } static ETHR_INLINE void -ethr_native_atomic_init(ethr_native_atomic_t *var, ethr_sint_t value) +ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) { - ethr_native_atomic_set(var, value); + ETHR_NATMC_FUNC__(set)(var, value); } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_read(ethr_native_atomic_t *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var) { - return (ethr_sint_t) AO_load(&var->counter); + return (ETHR_AINT_T__) AO_load(&var->counter); } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_add_return(ethr_native_atomic_t *var, ethr_sint_t incr) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) { #ifdef AO_HAVE_fetch_and_add - return ((ethr_sint_t) AO_fetch_and_add(&var->counter, (AO_t) incr)) + incr; + return ((ETHR_AINT_T__) AO_fetch_and_add(&var->counter, (AO_t) incr)) + incr; #else while (1) { AO_t exp = AO_load(&var->counter); AO_t new = exp + (AO_t) incr; if (AO_compare_and_swap(&var->counter, exp, new)) - return (ethr_sint_t) new; + return (ETHR_AINT_T__) new; } #endif } static ETHR_INLINE void -ethr_native_atomic_add(ethr_native_atomic_t *var, ethr_sint_t incr) +ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr) { - (void) ethr_native_atomic_add_return(var, incr); + (void) ETHR_NATMC_FUNC__(add_return)(var, incr); } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_inc_return(ethr_native_atomic_t *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var) { #ifdef AO_HAVE_fetch_and_add1 - return ((ethr_sint_t) AO_fetch_and_add1(&var->counter)) + 1; + return ((ETHR_AINT_T__) AO_fetch_and_add1(&var->counter)) + 1; #else - return ethr_native_atomic_add_return(var, 1); + return ETHR_NATMC_FUNC__(add_return)(var, 1); #endif } static ETHR_INLINE void -ethr_native_atomic_inc(ethr_native_atomic_t *var) +ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var) { - (void) ethr_native_atomic_inc_return(var); + (void) ETHR_NATMC_FUNC__(inc_return)(var); } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_dec_return(ethr_native_atomic_t *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var) { #ifdef AO_HAVE_fetch_and_sub1 - return ((ethr_sint_t) AO_fetch_and_sub1(&var->counter)) - 1; + return ((ETHR_AINT_T__) AO_fetch_and_sub1(&var->counter)) - 1; #else - return ethr_native_atomic_add_return(var, -1); + return ETHR_NATMC_FUNC__(add_return)(var, -1); #endif } static ETHR_INLINE void -ethr_native_atomic_dec(ethr_native_atomic_t *var) +ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var) { - (void) ethr_native_atomic_dec_return(var); + (void) ETHR_NATMC_FUNC__(dec_return)(var); } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_and_retold(ethr_native_atomic_t *var, ethr_sint_t mask) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) { while (1) { AO_t exp = AO_load(&var->counter); AO_t new = exp & ((AO_t) mask); if (AO_compare_and_swap(&var->counter, exp, new)) - return (ethr_sint_t) exp; + return (ETHR_AINT_T__) exp; } } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_or_retold(ethr_native_atomic_t *var, ethr_sint_t mask) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask) { while (1) { AO_t exp = AO_load(&var->counter); AO_t new = exp | ((AO_t) mask); if (AO_compare_and_swap(&var->counter, exp, new)) - return (ethr_sint_t) exp; + return (ETHR_AINT_T__) exp; } } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_cmpxchg(ethr_native_atomic_t *var, - ethr_sint_t new, - ethr_sint_t exp) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, + ETHR_AINT_T__ new, + ETHR_AINT_T__ exp) { - ethr_sint_t act; + ETHR_AINT_T__ act; do { if (AO_compare_and_swap(&var->counter, (AO_t) exp, (AO_t) new)) return exp; - act = (ethr_sint_t) AO_load(&var->counter); + act = (ETHR_AINT_T__) AO_load(&var->counter); } while (act == exp); return act; } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_xchg(ethr_native_atomic_t *var, ethr_sint_t new) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new) { while (1) { AO_t exp = AO_load(&var->counter); if (AO_compare_and_swap(&var->counter, exp, (AO_t) new)) - return (ethr_sint_t) exp; + return (ETHR_AINT_T__) exp; } } @@ -197,101 +246,105 @@ ethr_native_atomic_xchg(ethr_native_atomic_t *var, ethr_sint_t new) * Atomic ops with at least specified barriers. */ -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_read_acqb(ethr_native_atomic_t *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var) { #ifdef AO_HAVE_load_acquire - return (ethr_sint_t) AO_load_acquire(&var->counter); + return (ETHR_AINT_T__) AO_load_acquire(&var->counter); #else - ethr_sint_t res = ethr_native_atomic_read(var); + ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var); ETHR_MEMORY_BARRIER; return res; #endif } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_inc_return_acqb(ethr_native_atomic_t *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var) { #ifdef AO_HAVE_fetch_and_add1_acquire - return ((ethr_sint_t) AO_fetch_and_add1_acquire(&var->counter)) + 1; + return ((ETHR_AINT_T__) AO_fetch_and_add1_acquire(&var->counter)) + 1; #else - ethr_sint_t res = ethr_native_atomic_add_return(var, 1); + ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(add_return)(var, 1); ETHR_MEMORY_BARRIER; return res; #endif } static ETHR_INLINE void -ethr_native_atomic_set_relb(ethr_native_atomic_t *var, ethr_sint_t value) +ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ value) { #ifdef AO_HAVE_store_release AO_store_release(&var->counter, (AO_t) value); #else ETHR_MEMORY_BARRIER; - ethr_native_atomic_set(var, value); + ETHR_NATMC_FUNC__(set)(var, value); #endif } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_dec_return_relb(ethr_native_atomic_t *var) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var) { #ifdef AO_HAVE_fetch_and_sub1_release - return ((ethr_sint_t) AO_fetch_and_sub1_release(&var->counter)) - 1; + return ((ETHR_AINT_T__) AO_fetch_and_sub1_release(&var->counter)) - 1; #else ETHR_MEMORY_BARRIER; - return ethr_native_atomic_dec_return(var); + return ETHR_NATMC_FUNC__(dec_return)(var); #endif } static ETHR_INLINE void -ethr_native_atomic_dec_relb(ethr_native_atomic_t *var) +ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var) { - (void) ethr_native_atomic_dec_return_relb(var); + (void) ETHR_NATMC_FUNC__(dec_return_relb)(var); } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_cmpxchg_acqb(ethr_native_atomic_t *var, - ethr_sint_t new, - ethr_sint_t exp) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, + ETHR_AINT_T__ new, + ETHR_AINT_T__ exp) { #ifdef AO_HAVE_compare_and_swap_acquire - ethr_sint_t act; + ETHR_AINT_T__ act; do { if (AO_compare_and_swap_acquire(&var->counter, (AO_t) exp, (AO_t) new)) return exp; - act = (ethr_sint_t) AO_load(&var->counter); + act = (ETHR_AINT_T__) AO_load(&var->counter); } while (act == exp); AO_nop_full(); return act; #else - ethr_sint_t act = ethr_native_atomic_cmpxchg(var, new, exp); + ETHR_AINT_T__ act = ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp); ETHR_MEMORY_BARRIER; return act; #endif } -static ETHR_INLINE ethr_sint_t -ethr_native_atomic_cmpxchg_relb(ethr_native_atomic_t *var, - ethr_sint_t new, - ethr_sint_t exp) +static ETHR_INLINE ETHR_AINT_T__ +ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, + ETHR_AINT_T__ new, + ETHR_AINT_T__ exp) { #ifdef AO_HAVE_compare_and_swap_release - ethr_sint_t act; + ETHR_AINT_T__ act; do { if (AO_compare_and_swap_release(&var->counter, (AO_t) exp, (AO_t) new)) return exp; - act = (ethr_sint_t) AO_load(&var->counter); + act = (ETHR_AINT_T__) AO_load(&var->counter); } while (act == exp); return act; #else ETHR_MEMORY_BARRIER; - return ethr_native_atomic_cmpxchg(var, new, exp); + return ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp); #endif } -#endif +#endif /* ETHR_TRY_INLINE_FUNCS */ -#endif +#undef ETHR_NATMC_FUNC__ +#undef ETHR_ATMC_T__ +#undef ETHR_AINT_T__ -#endif +#endif /* !defined(ETHR_HAVE_NATIVE_ATOMICS) && defined(ETHR_HAVE_LIBATOMIC_OPS) */ + +#endif /* ETHR_LIBATOMIC_OPS_ATOMIC_H__ */ |