aboutsummaryrefslogtreecommitdiffstats
path: root/erts
diff options
context:
space:
mode:
Diffstat (limited to 'erts')
-rw-r--r--erts/aclocal.m42
-rw-r--r--erts/include/internal/libatomic_ops/ethr_atomic.h26
-rw-r--r--erts/include/internal/sparc32/atomic.h29
-rw-r--r--erts/include/internal/tile/atomic.h33
4 files changed, 47 insertions, 43 deletions
diff --git a/erts/aclocal.m4 b/erts/aclocal.m4
index a1211bbf0c..f7e5c31910 100644
--- a/erts/aclocal.m4
+++ b/erts/aclocal.m4
@@ -1153,7 +1153,7 @@ case "$THR_LIB_NAME" in
AO_nop_full();
AO_store(&x, (AO_t) 0);
z = AO_load(&x);
- z = AO_compare_and_swap(&x, (AO_t) 0, (AO_t) 1);
+ z = AO_compare_and_swap_full(&x, (AO_t) 0, (AO_t) 1);
],
[ethr_have_native_atomics=yes
ethr_have_libatomic_ops=yes])
diff --git a/erts/include/internal/libatomic_ops/ethr_atomic.h b/erts/include/internal/libatomic_ops/ethr_atomic.h
index d56693dbf8..2fc82c99a8 100644
--- a/erts/include/internal/libatomic_ops/ethr_atomic.h
+++ b/erts/include/internal/libatomic_ops/ethr_atomic.h
@@ -146,13 +146,13 @@ ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
-#ifdef AO_HAVE_fetch_and_add
- return ((ETHR_AINT_T__) AO_fetch_and_add(&var->counter, (AO_t) incr)) + incr;
+#ifdef AO_HAVE_fetch_and_add_full
+ return ((ETHR_AINT_T__) AO_fetch_and_add_full(&var->counter, (AO_t) incr)) + incr;
#else
while (1) {
AO_t exp = AO_load(&var->counter);
AO_t new = exp + (AO_t) incr;
- if (AO_compare_and_swap(&var->counter, exp, new))
+ if (AO_compare_and_swap_full(&var->counter, exp, new))
return (ETHR_AINT_T__) new;
}
#endif
@@ -167,8 +167,8 @@ ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_fetch_and_add1
- return ((ETHR_AINT_T__) AO_fetch_and_add1(&var->counter)) + 1;
+#ifdef AO_HAVE_fetch_and_add1_full
+ return ((ETHR_AINT_T__) AO_fetch_and_add1_full(&var->counter)) + 1;
#else
return ETHR_NATMC_FUNC__(add_return)(var, 1);
#endif
@@ -183,8 +183,8 @@ ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
{
-#ifdef AO_HAVE_fetch_and_sub1
- return ((ETHR_AINT_T__) AO_fetch_and_sub1(&var->counter)) - 1;
+#ifdef AO_HAVE_fetch_and_sub1_full
+ return ((ETHR_AINT_T__) AO_fetch_and_sub1_full(&var->counter)) - 1;
#else
return ETHR_NATMC_FUNC__(add_return)(var, -1);
#endif
@@ -202,7 +202,7 @@ ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
while (1) {
AO_t exp = AO_load(&var->counter);
AO_t new = exp & ((AO_t) mask);
- if (AO_compare_and_swap(&var->counter, exp, new))
+ if (AO_compare_and_swap_full(&var->counter, exp, new))
return (ETHR_AINT_T__) exp;
}
}
@@ -213,7 +213,7 @@ ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
while (1) {
AO_t exp = AO_load(&var->counter);
AO_t new = exp | ((AO_t) mask);
- if (AO_compare_and_swap(&var->counter, exp, new))
+ if (AO_compare_and_swap_full(&var->counter, exp, new))
return (ETHR_AINT_T__) exp;
}
}
@@ -225,7 +225,7 @@ ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
{
ETHR_AINT_T__ act;
do {
- if (AO_compare_and_swap(&var->counter, (AO_t) exp, (AO_t) new))
+ if (AO_compare_and_swap_full(&var->counter, (AO_t) exp, (AO_t) new))
return exp;
act = (ETHR_AINT_T__) AO_load(&var->counter);
} while (act == exp);
@@ -237,7 +237,7 @@ ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new)
{
while (1) {
AO_t exp = AO_load(&var->counter);
- if (AO_compare_and_swap(&var->counter, exp, (AO_t) new))
+ if (AO_compare_and_swap_full(&var->counter, exp, (AO_t) new))
return (ETHR_AINT_T__) exp;
}
}
@@ -265,7 +265,6 @@ ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
return ((ETHR_AINT_T__) AO_fetch_and_add1_acquire(&var->counter)) + 1;
#else
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(add_return)(var, 1);
- ETHR_MEMORY_BARRIER;
return res;
#endif
}
@@ -287,7 +286,6 @@ ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
#ifdef AO_HAVE_fetch_and_sub1_release
return ((ETHR_AINT_T__) AO_fetch_and_sub1_release(&var->counter)) - 1;
#else
- ETHR_MEMORY_BARRIER;
return ETHR_NATMC_FUNC__(dec_return)(var);
#endif
}
@@ -314,7 +312,6 @@ ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
return act;
#else
ETHR_AINT_T__ act = ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp);
- ETHR_MEMORY_BARRIER;
return act;
#endif
}
@@ -333,7 +330,6 @@ ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
} while (act == exp);
return act;
#else
- ETHR_MEMORY_BARRIER;
return ETHR_NATMC_FUNC__(cmpxchg)(var, new, exp);
#endif
}
diff --git a/erts/include/internal/sparc32/atomic.h b/erts/include/internal/sparc32/atomic.h
index 00380dbf07..16182f8b01 100644
--- a/erts/include/internal/sparc32/atomic.h
+++ b/erts/include/internal/sparc32/atomic.h
@@ -95,7 +95,7 @@ ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
ETHR_AINT_T__ old, tmp;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
do {
old = var->counter;
tmp = old+incr;
@@ -105,7 +105,7 @@ ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
: "r"(old), "r"(&var->counter), "0"(tmp)
: "memory");
} while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old+incr;
}
@@ -144,7 +144,7 @@ ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
ETHR_AINT_T__ old, tmp;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
do {
old = var->counter;
tmp = old & mask;
@@ -154,7 +154,7 @@ ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
: "r"(old), "r"(&var->counter), "0"(tmp)
: "memory");
} while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old;
}
@@ -163,7 +163,7 @@ ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
ETHR_AINT_T__ old, tmp;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
do {
old = var->counter;
tmp = old | mask;
@@ -173,7 +173,7 @@ ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
: "r"(old), "r"(&var->counter), "0"(tmp)
: "memory");
} while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old;
}
@@ -182,7 +182,7 @@ ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
{
ETHR_AINT_T__ old, new;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad" : : : "memory");
do {
old = var->counter;
new = val;
@@ -192,20 +192,20 @@ ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
: "r"(old), "r"(&var->counter), "0"(new)
: "memory");
} while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old;
}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
__asm__ __volatile__(
ETHR_CAS__ " [%2], %1, %0"
: "=&r"(new)
: "r"(old), "r"(&var->counter), "0"(new)
: "memory");
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return new;
}
@@ -213,13 +213,11 @@ ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__
* Atomic ops with at least specified barriers.
*/
-/* TODO: relax acquire barriers */
-
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreLoad|#StoreStore" : : : "memory");
+ __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}
@@ -234,21 +232,18 @@ static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}
static ETHR_INLINE void
ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
ETHR_NATMC_FUNC__(dec)(var);
}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
return ETHR_NATMC_FUNC__(dec_return)(var);
}
@@ -256,14 +251,12 @@ static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
}
diff --git a/erts/include/internal/tile/atomic.h b/erts/include/internal/tile/atomic.h
index 48e4c0c6c8..0c7b597a6b 100644
--- a/erts/include/internal/tile/atomic.h
+++ b/erts/include/internal/tile/atomic.h
@@ -65,25 +65,35 @@ ethr_native_atomic32_read(ethr_native_atomic32_t *var)
static ETHR_INLINE void
ethr_native_atomic32_add(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
+ ETHR_MEMORY_BARRIER;
atomic_add(&var->counter, incr);
+ ETHR_MEMORY_BARRIER;
}
static ETHR_INLINE void
ethr_native_atomic32_inc(ethr_native_atomic32_t *var)
{
+ ETHR_MEMORY_BARRIER;
atomic_increment(&var->counter);
+ ETHR_MEMORY_BARRIER;
}
static ETHR_INLINE void
ethr_native_atomic32_dec(ethr_native_atomic32_t *var)
{
+ ETHR_MEMORY_BARRIER;
atomic_decrement(&var->counter);
+ ETHR_MEMORY_BARRIER;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_add_return(ethr_native_atomic32_t *var, ethr_sint32_t incr)
{
- return atomic_exchange_and_add(&var->counter, incr) + incr;
+ ethr_sint32_t res;
+ ETHR_MEMORY_BARRIER;
+ res = atomic_exchange_and_add(&var->counter, incr) + incr;
+ ETHR_MEMORY_BARRIER;
+ return res;
}
static ETHR_INLINE ethr_sint32_t
@@ -101,18 +111,27 @@ ethr_native_atomic32_dec_return(ethr_native_atomic32_t *var)
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_and_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
- return atomic_and_val(&var->counter, mask);
+ ethr_sint32_t res;
+ ETHR_MEMORY_BARRIER;
+ res = atomic_and_val(&var->counter, mask);
+ ETHR_MEMORY_BARRIER;
+ return res;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_or_retold(ethr_native_atomic32_t *var, ethr_sint32_t mask)
{
- return atomic_or_val(&var->counter, mask);
+ ethr_sint32_t res;
+ ETHR_MEMORY_BARRIER;
+ res = atomic_or_val(&var->counter, mask);
+ ETHR_MEMORY_BARRIER;
+ return res;
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_xchg(ethr_native_atomic32_t *var, ethr_sint32_t val)
{
+ ETHR_MEMORY_BARRIER;
return atomic_exchange_acq(&var->counter, val);
}
@@ -121,6 +140,7 @@ ethr_native_atomic32_cmpxchg(ethr_native_atomic32_t *var,
ethr_sint32_t new,
ethr_sint32_t expected)
{
+ ETHR_MEMORY_BARRIER;
return atomic_compare_and_exchange_val_acq(&var->counter, new, expected);
}
@@ -139,9 +159,7 @@ ethr_native_atomic32_read_acqb(ethr_native_atomic32_t *var)
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_inc_return_acqb(ethr_native_atomic32_t *var)
{
- ethr_sint32_t res = ethr_native_atomic32_inc_return(var);
- ETHR_MEMORY_BARRIER;
- return res;
+ return ethr_native_atomic32_inc_return(var);
}
static ETHR_INLINE void
@@ -154,14 +172,12 @@ ethr_native_atomic32_set_relb(ethr_native_atomic32_t *var, ethr_sint32_t val)
static ETHR_INLINE void
ethr_native_atomic32_dec_relb(ethr_native_atomic32_t *var)
{
- ETHR_MEMORY_BARRIER;
ethr_native_atomic32_dec(var);
}
static ETHR_INLINE ethr_sint32_t
ethr_native_atomic32_dec_return_relb(ethr_native_atomic32_t *var)
{
- ETHR_MEMORY_BARRIER;
return ethr_native_atomic32_dec_return(var);
}
@@ -178,7 +194,6 @@ ethr_native_atomic32_cmpxchg_relb(ethr_native_atomic32_t *var,
ethr_sint32_t new,
ethr_sint32_t exp)
{
- ETHR_MEMORY_BARRIER;
return ethr_native_atomic32_cmpxchg(var, new, exp);
}