aboutsummaryrefslogtreecommitdiffstats
path: root/erts/include/internal/i386
diff options
context:
space:
mode:
Diffstat (limited to 'erts/include/internal/i386')
-rw-r--r--erts/include/internal/i386/atomic.h190
-rw-r--r--erts/include/internal/i386/ethread.h7
2 files changed, 131 insertions, 66 deletions
diff --git a/erts/include/internal/i386/atomic.h b/erts/include/internal/i386/atomic.h
index b3f4a1a60d..4e402f261a 100644
--- a/erts/include/internal/i386/atomic.h
+++ b/erts/include/internal/i386/atomic.h
@@ -23,18 +23,24 @@
*
* This code requires a 486 or newer processor.
*/
-#ifndef ETHREAD_I386_ATOMIC_H
-#define ETHREAD_I386_ATOMIC_H
-#if ETHR_SIZEOF_LONG != ETHR_SIZEOF_PTR
-# error "Incompatible size of 'long'"
+#undef ETHR_INCLUDE_ATOMIC_IMPL__
+#if !defined(ETHR_X86_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
+#define ETHR_X86_ATOMIC32_H__
+#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
+#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
+#elif !defined(ETHR_X86_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
+#define ETHR_X86_ATOMIC64_H__
+#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
+#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
#endif
-/* An atomic is an aligned long accessed via locked operations.
- */
-typedef struct {
- volatile long counter;
-} ethr_native_atomic_t;
+#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
+
+#ifndef ETHR_X86_ATOMIC_COMMON__
+#define ETHR_X86_ATOMIC_COMMON__
+
+#define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory")
@@ -44,123 +50,161 @@ typedef struct {
#else
#define ETHR_MEMORY_BARRIER \
do { \
- volatile long x___ = 0; \
+ volatile ethr_sint32_t x___ = 0; \
__asm__ __volatile__("lock; incl %0" : "=m"(x___) : "m"(x___) : "memory"); \
} while (0)
#endif
-#define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
-
-#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+#endif /* ETHR_X86_ATOMIC_COMMON__ */
-#ifdef __x86_64__
-#define LONG_SUFFIX "q"
+#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
+#define ETHR_HAVE_NATIVE_ATOMIC32 1
+#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
+#define ETHR_ATMC_T__ ethr_native_atomic32_t
+#define ETHR_AINT_T__ ethr_sint32_t
+#define ETHR_AINT_SUFFIX__ "l"
+#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
+#define ETHR_HAVE_NATIVE_ATOMIC64 1
+#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
+#define ETHR_ATMC_T__ ethr_native_atomic64_t
+#define ETHR_AINT_T__ ethr_sint64_t
+#define ETHR_AINT_SUFFIX__ "q"
#else
-#define LONG_SUFFIX "l"
+#error "Unsupported integer size"
#endif
+/* An atomic is an aligned ETHR_AINT_T__ accessed via locked operations.
+ */
+typedef struct {
+ volatile ETHR_AINT_T__ counter;
+} ETHR_ATMC_T__;
+
+#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
+
+static ETHR_INLINE ETHR_AINT_T__ *
+ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
+{
+ return (ETHR_AINT_T__ *) &var->counter;
+}
+
static ETHR_INLINE void
-ethr_native_atomic_init(ethr_native_atomic_t *var, long i)
+ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
{
var->counter = i;
}
-#define ethr_native_atomic_set(v, i) ethr_native_atomic_init((v), (i))
-static ETHR_INLINE long
-ethr_native_atomic_read(ethr_native_atomic_t *var)
+static ETHR_INLINE void
+ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
+{
+ var->counter = i;
+}
+
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
{
return var->counter;
}
static ETHR_INLINE void
-ethr_native_atomic_add(ethr_native_atomic_t *var, long incr)
+ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
__asm__ __volatile__(
- "lock; add" LONG_SUFFIX " %1, %0"
+ "lock; add" ETHR_AINT_SUFFIX__ " %1, %0"
: "=m"(var->counter)
: "ir"(incr), "m"(var->counter));
}
static ETHR_INLINE void
-ethr_native_atomic_inc(ethr_native_atomic_t *var)
+ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
{
__asm__ __volatile__(
- "lock; inc" LONG_SUFFIX " %0"
+ "lock; inc" ETHR_AINT_SUFFIX__ " %0"
: "=m"(var->counter)
: "m"(var->counter));
}
static ETHR_INLINE void
-ethr_native_atomic_dec(ethr_native_atomic_t *var)
+ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
{
__asm__ __volatile__(
- "lock; dec" LONG_SUFFIX " %0"
+ "lock; dec" ETHR_AINT_SUFFIX__ " %0"
: "=m"(var->counter)
: "m"(var->counter));
}
-static ETHR_INLINE long
-ethr_native_atomic_add_return(ethr_native_atomic_t *var, long incr)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
{
- long tmp;
+ ETHR_AINT_T__ tmp;
tmp = incr;
__asm__ __volatile__(
- "lock; xadd" LONG_SUFFIX " %0, %1" /* xadd didn't exist prior to the 486 */
+ "lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */
: "=r"(tmp)
: "m"(var->counter), "0"(tmp));
/* now tmp is the atomic's previous value */
return tmp + incr;
}
-#define ethr_native_atomic_inc_return(var) ethr_native_atomic_add_return((var), 1)
-#define ethr_native_atomic_dec_return(var) ethr_native_atomic_add_return((var), -1)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
+{
+ return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) 1);
+}
-static ETHR_INLINE long
-ethr_native_atomic_cmpxchg(ethr_native_atomic_t *var, long new, long old)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
+{
+ return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) -1);
+}
+
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
{
__asm__ __volatile__(
- "lock; cmpxchg" LONG_SUFFIX " %2, %3"
+ "lock; cmpxchg" ETHR_AINT_SUFFIX__ " %2, %3"
: "=a"(old), "=m"(var->counter)
: "r"(new), "m"(var->counter), "0"(old)
: "cc", "memory"); /* full memory clobber to make this a compiler barrier */
return old;
}
-static ETHR_INLINE long
-ethr_native_atomic_and_retold(ethr_native_atomic_t *var, long mask)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
- long tmp, old;
+ ETHR_AINT_T__ tmp, old;
tmp = var->counter;
do {
old = tmp;
- tmp = ethr_native_atomic_cmpxchg(var, tmp & mask, tmp);
+ tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp & mask, tmp);
} while (__builtin_expect(tmp != old, 0));
/* now tmp is the atomic's previous value */
return tmp;
}
-static ETHR_INLINE long
-ethr_native_atomic_or_retold(ethr_native_atomic_t *var, long mask)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
{
- long tmp, old;
+ ETHR_AINT_T__ tmp, old;
tmp = var->counter;
do {
old = tmp;
- tmp = ethr_native_atomic_cmpxchg(var, tmp | mask, tmp);
+ tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp | mask, tmp);
} while (__builtin_expect(tmp != old, 0));
/* now tmp is the atomic's previous value */
return tmp;
}
-static ETHR_INLINE long
-ethr_native_atomic_xchg(ethr_native_atomic_t *var, long val)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
{
- long tmp = val;
+ ETHR_AINT_T__ tmp = val;
__asm__ __volatile__(
- "xchg" LONG_SUFFIX " %0, %1"
+ "xchg" ETHR_AINT_SUFFIX__ " %0, %1"
: "=r"(tmp)
: "m"(var->counter), "0"(tmp));
/* now tmp is the atomic's previous value */
@@ -171,57 +215,73 @@ ethr_native_atomic_xchg(ethr_native_atomic_t *var, long val)
* Atomic ops with at least specified barriers.
*/
-static ETHR_INLINE long
-ethr_native_atomic_read_acqb(ethr_native_atomic_t *var)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
{
- long val;
+ ETHR_AINT_T__ val;
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
val = var->counter;
#else
- val = ethr_native_atomic_add_return(var, 0);
+ val = ETHR_NATMC_FUNC__(add_return)(var, 0);
#endif
__asm__ __volatile__("" : : : "memory");
return val;
}
static ETHR_INLINE void
-ethr_native_atomic_set_relb(ethr_native_atomic_t *var, long i)
+ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
{
__asm__ __volatile__("" : : : "memory");
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
var->counter = i;
#else
- (void) ethr_native_atomic_xchg(var, i);
+ (void) ETHR_NATMC_FUNC__(xchg)(var, i);
#endif
}
-static ETHR_INLINE long
-ethr_native_atomic_inc_return_acqb(ethr_native_atomic_t *var)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
{
- long res = ethr_native_atomic_inc_return(var);
+ ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
__asm__ __volatile__("" : : : "memory");
return res;
}
static ETHR_INLINE void
-ethr_native_atomic_dec_relb(ethr_native_atomic_t *var)
+ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
{
__asm__ __volatile__("" : : : "memory");
- ethr_native_atomic_dec(var);
+ ETHR_NATMC_FUNC__(dec)(var);
}
-static ETHR_INLINE long
-ethr_native_atomic_dec_return_relb(ethr_native_atomic_t *var)
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
{
__asm__ __volatile__("" : : : "memory");
- return ethr_native_atomic_dec_return(var);
+ return ETHR_NATMC_FUNC__(dec_return)(var);
}
-#define ethr_native_atomic_cmpxchg_acqb ethr_native_atomic_cmpxchg
-#define ethr_native_atomic_cmpxchg_relb ethr_native_atomic_cmpxchg
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
+{
+ return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+}
-#undef LONG_SUFFIX
+static ETHR_INLINE ETHR_AINT_T__
+ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
+ ETHR_AINT_T__ new,
+ ETHR_AINT_T__ old)
+{
+ return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
+}
#endif /* ETHR_TRY_INLINE_FUNCS */
-#endif /* ETHREAD_I386_ATOMIC_H */
+#undef ETHR_NATMC_FUNC__
+#undef ETHR_ATMC_T__
+#undef ETHR_AINT_T__
+#undef ETHR_AINT_SUFFIX__
+
+#endif /* ETHR_INCLUDE_ATOMIC_IMPL__ */
diff --git a/erts/include/internal/i386/ethread.h b/erts/include/internal/i386/ethread.h
index ed43e77279..b5a17caefb 100644
--- a/erts/include/internal/i386/ethread.h
+++ b/erts/include/internal/i386/ethread.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2005-2009. All Rights Reserved.
+ * Copyright Ericsson AB 2005-2010. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -24,7 +24,12 @@
#ifndef ETHREAD_I386_ETHREAD_H
#define ETHREAD_I386_ETHREAD_H
+#define ETHR_ATOMIC_WANT_32BIT_IMPL__
#include "atomic.h"
+#if ETHR_SIZEOF_PTR == 8
+# define ETHR_ATOMIC_WANT_64BIT_IMPL__
+# include "atomic.h"
+#endif
#include "spinlock.h"
#include "rwlock.h"