aboutsummaryrefslogtreecommitdiffstats
path: root/erts/include/internal/sparc32/atomic.h
diff options
context:
space:
mode:
authorRickard Green <[email protected]>2011-05-13 14:27:41 +0200
committerRickard Green <[email protected]>2011-05-13 14:27:41 +0200
commit0c73cf3d305c4b033c6e1efa6ebd08796a1d8682 (patch)
tree4aa3a5a40d8303f1f6f01cd90cee43857371db07 /erts/include/internal/sparc32/atomic.h
parent926795501b71ebf9ca4b22927021fa551549f9b0 (diff)
parent139fa05489a6ba3e4384e6f20ea3f943741449d5 (diff)
downloadotp-0c73cf3d305c4b033c6e1efa6ebd08796a1d8682.tar.gz
otp-0c73cf3d305c4b033c6e1efa6ebd08796a1d8682.tar.bz2
otp-0c73cf3d305c4b033c6e1efa6ebd08796a1d8682.zip
Merge branch 'rickard/barriers/OTP-9281' into dev
* rickard/barriers/OTP-9281: Silence warnings Fix build with hipe on amd64 Reduce number of atomic ops Use 32-bit atomic for port snapshot Remove pointless erts_ports_alive variable Ensure quick break Ensure that all rehashing information are seen when done Ensure that stack updates are seen when stack is released Add needed barriers for write_concurrency tables Homogenize memory barriers on atomics
Diffstat (limited to 'erts/include/internal/sparc32/atomic.h')
-rw-r--r--erts/include/internal/sparc32/atomic.h29
1 files changed, 11 insertions, 18 deletions
diff --git a/erts/include/internal/sparc32/atomic.h b/erts/include/internal/sparc32/atomic.h
index 00380dbf07..16182f8b01 100644
--- a/erts/include/internal/sparc32/atomic.h
+++ b/erts/include/internal/sparc32/atomic.h
@@ -95,7 +95,7 @@ ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
ETHR_AINT_T__ old, tmp;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
do {
old = var->counter;
tmp = old+incr;
@@ -105,7 +105,7 @@ ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
: "r"(old), "r"(&var->counter), "0"(tmp)
: "memory");
} while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old+incr;
}
@@ -144,7 +144,7 @@ ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
ETHR_AINT_T__ old, tmp;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
do {
old = var->counter;
tmp = old & mask;
@@ -154,7 +154,7 @@ ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
: "r"(old), "r"(&var->counter), "0"(tmp)
: "memory");
} while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old;
}
@@ -163,7 +163,7 @@ ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
ETHR_AINT_T__ old, tmp;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
do {
old = var->counter;
tmp = old | mask;
@@ -173,7 +173,7 @@ ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
: "r"(old), "r"(&var->counter), "0"(tmp)
: "memory");
} while (__builtin_expect(old != tmp, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old;
}
@@ -182,7 +182,7 @@ ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
{
ETHR_AINT_T__ old, new;
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad" : : : "memory");
do {
old = var->counter;
new = val;
@@ -192,20 +192,20 @@ ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
: "r"(old), "r"(&var->counter), "0"(new)
: "memory");
} while (__builtin_expect(old != new, 0));
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return old;
}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
- __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n" : : : "memory");
__asm__ __volatile__(
ETHR_CAS__ " [%2], %1, %0"
: "=&r"(new)
: "r"(old), "r"(&var->counter), "0"(new)
: "memory");
- __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore" : : : "memory");
return new;
}
@@ -213,13 +213,11 @@ ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__
* Atomic ops with at least specified barriers.
*/
-/* TODO: relax acquire barriers */
-
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(read)(var);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreLoad|#StoreStore" : : : "memory");
+ __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}
@@ -234,21 +232,18 @@ static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}
static ETHR_INLINE void
ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
ETHR_NATMC_FUNC__(dec)(var);
}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
return ETHR_NATMC_FUNC__(dec_return)(var);
}
@@ -256,14 +251,12 @@ static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
- __asm__ __volatile__("membar #LoadLoad|#LoadStore" : : : "memory");
return res;
}
static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ new, ETHR_AINT_T__ old)
{
- __asm__ __volatile__("membar #LoadStore|#StoreStore" : : : "memory");
return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
}