diff options
Diffstat (limited to 'erts/include/internal/tile/atomic.h')
-rw-r--r-- | erts/include/internal/tile/atomic.h | 156 |
1 files changed, 115 insertions, 41 deletions
diff --git a/erts/include/internal/tile/atomic.h b/erts/include/internal/tile/atomic.h index 59a9250e7c..5697afda25 100644 --- a/erts/include/internal/tile/atomic.h +++ b/erts/include/internal/tile/atomic.h @@ -1,7 +1,7 @@ /* * %CopyrightBegin% * - * Copyright Ericsson AB 2008-2010. All Rights Reserved. + * Copyright Ericsson AB 2008-2011. All Rights Reserved. * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in @@ -24,105 +24,179 @@ #ifndef ETHREAD_TILE_ATOMIC_H #define ETHREAD_TILE_ATOMIC_H +#define ETHR_HAVE_NATIVE_ATOMIC32 1 + #include <atomic.h> /* An atomic is an aligned int accessed via locked operations. */ typedef struct { - volatile long counter; -} ethr_native_atomic_t; + volatile ethr_sint32_t counter; +} ethr_native_atomic32_t; + +#define ETHR_MEMORY_BARRIER __insn_mf() -#ifdef ETHR_TRY_INLINE_FUNCS +#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__) + +static ETHR_INLINE ethr_sint32_t * +ethr_native_atomic32_addr(ethr_native_atomic32_t *var) +{ + return (ethr_sint32_t *) &var->counter; +} static ETHR_INLINE void -ethr_native_atomic_init(ethr_native_atomic_t *var, long i) +ethr_native_atomic32_init(ethr_native_atomic32_t *var, ethr_sint32_t i) { var->counter = i; } static ETHR_INLINE void -ethr_native_atomic_set(ethr_native_atomic_t *var, long i) +ethr_native_atomic32_set(ethr_native_atomic32_t *var, ethr_sint32_t i) { - __insn_mf(); atomic_exchange_acq(&var->counter, i); } -static ETHR_INLINE long -ethr_native_atomic_read(ethr_native_atomic_t *var) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_read(ethr_native_atomic32_t *var) { return var->counter; } static ETHR_INLINE void -ethr_native_atomic_add(ethr_native_atomic_t *var, long incr) +ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr) { - __insn_mf(); + ETHR_MEMORY_BARRIER; atomic_add(&var->counter, incr); + ETHR_MEMORY_BARRIER; } static ETHR_INLINE void -ethr_native_atomic_inc(ethr_native_atomic_t *var) +ethr_native_atomic32_inc(ethr_native_atomic32_t *var) { - __insn_mf(); + ETHR_MEMORY_BARRIER; atomic_increment(&var->counter); + ETHR_MEMORY_BARRIER; } static ETHR_INLINE void -ethr_native_atomic_dec(ethr_native_atomic_t *var) +ethr_native_atomic32_dec(ethr_native_atomic32_t *var) { - __insn_mf(); + ETHR_MEMORY_BARRIER; atomic_decrement(&var->counter); + ETHR_MEMORY_BARRIER; } -static ETHR_INLINE long -ethr_native_atomic_add_return(ethr_native_atomic_t *var, long incr) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr) { - __insn_mf(); - return atomic_exchange_and_add(&var->counter, incr) + incr; + ethr_sint32_t res; + ETHR_MEMORY_BARRIER; + res = atomic_exchange_and_add(&var->counter, incr) + incr; + ETHR_MEMORY_BARRIER; + return res; } -static ETHR_INLINE long -ethr_native_atomic_inc_return(ethr_native_atomic_t *var) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_inc_return(ethr_native_atomic32_t *var) { - return ethr_native_atomic_add_return(var, 1); + return ethr_native_atomic32_add_return(var, 1); } -static ETHR_INLINE long -ethr_native_atomic_dec_return(ethr_native_atomic_t *var) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var) { - return ethr_native_atomic_add_return(var, -1); + return ethr_native_atomic32_add_return(var, -1); } -static ETHR_INLINE long -ethr_native_atomic_and_retold(ethr_native_atomic_t *var, long mask) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask) { - /* Implement a barrier suitable for a mutex unlock. */ - __insn_mf(); - return atomic_and_val(&var->counter, mask); + ethr_sint32_t res; + ETHR_MEMORY_BARRIER; + res = atomic_and_val(&var->counter, mask); + ETHR_MEMORY_BARRIER; + return res; } -static ETHR_INLINE long -ethr_native_atomic_or_retold(ethr_native_atomic_t *var, long mask) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask) { - __insn_mf(); - return atomic_or_val(&var->counter, mask); + ethr_sint32_t res; + ETHR_MEMORY_BARRIER; + res = atomic_or_val(&var->counter, mask); + ETHR_MEMORY_BARRIER; + return res; } -static ETHR_INLINE long -ethr_native_atomic_xchg(ethr_native_atomic_t *var, long val) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val) { - __insn_mf(); + ETHR_MEMORY_BARRIER; return atomic_exchange_acq(&var->counter, val); } -static ETHR_INLINE long -ethr_native_atomic_cmpxchg(ethr_native_atomic_t *var, long new, long expected) +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var, + ethr_sint32_t new, + ethr_sint32_t expected) { - /* Implement a barrier suitable for a mutex unlock. */ - __insn_mf(); + ETHR_MEMORY_BARRIER; return atomic_compare_and_exchange_val_acq(&var->counter, new, expected); } +/* + * Atomic ops with at least specified barriers. + */ + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var) +{ + ethr_sint32_t res = ethr_native_atomic32_read(var); + ETHR_MEMORY_BARRIER; + return res; +} + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var) +{ + return ethr_native_atomic32_inc_return(var); +} + +static ETHR_INLINE void +ethr_native_atomic32_set_relb(ethr_native_atomic32_t *var, ethr_sint32_t val) +{ + ETHR_MEMORY_BARRIER; + ethr_native_atomic32_set(var, val); +} + +static ETHR_INLINE void +ethr_native_atomic32_dec_relb(ethr_native_atomic32_t *var) +{ + ethr_native_atomic32_dec(var); +} + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_dec_return_relb(ethr_native_atomic32_t *var) +{ + return ethr_native_atomic32_dec_return(var); +} + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_cmpxchg_acqb(ethr_native_atomic32_t *var, + ethr_sint32_t new, + ethr_sint32_t exp) +{ + return ethr_native_atomic32_cmpxchg(var, new, exp); +} + +static ETHR_INLINE ethr_sint32_t +ethr_native_atomic32_cmpxchg_relb(ethr_native_atomic32_t *var, + ethr_sint32_t new, + ethr_sint32_t exp) +{ + return ethr_native_atomic32_cmpxchg(var, new, exp); +} + #endif /* ETHR_TRY_INLINE_FUNCS */ #endif /* ETHREAD_TILE_ATOMIC_H */ |