aboutsummaryrefslogtreecommitdiffstats
path: root/erts/include/internal/sparc32
diff options
context:
space:
mode:
authorErlang/OTP <[email protected]>2009-11-20 14:54:40 +0000
committerErlang/OTP <[email protected]>2009-11-20 14:54:40 +0000
commit84adefa331c4159d432d22840663c38f155cd4c1 (patch)
treebff9a9c66adda4df2106dfd0e5c053ab182a12bd /erts/include/internal/sparc32
downloadotp-84adefa331c4159d432d22840663c38f155cd4c1.tar.gz
otp-84adefa331c4159d432d22840663c38f155cd4c1.tar.bz2
otp-84adefa331c4159d432d22840663c38f155cd4c1.zip
The R13B03 release.OTP_R13B03
Diffstat (limited to 'erts/include/internal/sparc32')
-rw-r--r--erts/include/internal/sparc32/atomic.h173
-rw-r--r--erts/include/internal/sparc32/ethread.h34
-rw-r--r--erts/include/internal/sparc32/rwlock.h142
-rw-r--r--erts/include/internal/sparc32/spinlock.h81
4 files changed, 430 insertions, 0 deletions
diff --git a/erts/include/internal/sparc32/atomic.h b/erts/include/internal/sparc32/atomic.h
new file mode 100644
index 0000000000..d6fdc6b2a4
--- /dev/null
+++ b/erts/include/internal/sparc32/atomic.h
@@ -0,0 +1,173 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2005-2009. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Native ethread atomics on SPARC V9.
+ * Author: Mikael Pettersson.
+ */
+#ifndef ETHR_SPARC32_ATOMIC_H
+#define ETHR_SPARC32_ATOMIC_H
+
+typedef struct {
+ volatile long counter;
+} ethr_native_atomic_t;
+
+#ifdef ETHR_TRY_INLINE_FUNCS
+
+#if defined(__arch64__)
+#define CASX "casx"
+#else
+#define CASX "cas"
+#endif
+
+static ETHR_INLINE void
+ethr_native_atomic_init(ethr_native_atomic_t *var, long i)
+{
+ var->counter = i;
+}
+#define ethr_native_atomic_set(v, i) ethr_native_atomic_init((v), (i))
+
+static ETHR_INLINE long
+ethr_native_atomic_read(ethr_native_atomic_t *var)
+{
+ return var->counter;
+}
+
+static ETHR_INLINE long
+ethr_native_atomic_add_return(ethr_native_atomic_t *var, long incr)
+{
+ long old, tmp;
+
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ do {
+ old = var->counter;
+ tmp = old+incr;
+ __asm__ __volatile__(
+ CASX " [%2], %1, %0"
+ : "=&r"(tmp)
+ : "r"(old), "r"(&var->counter), "0"(tmp)
+ : "memory");
+ } while (__builtin_expect(old != tmp, 0));
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ return old+incr;
+}
+
+static ETHR_INLINE void
+ethr_native_atomic_add(ethr_native_atomic_t *var, long incr)
+{
+ (void)ethr_native_atomic_add_return(var, incr);
+}
+
+static ETHR_INLINE long
+ethr_native_atomic_inc_return(ethr_native_atomic_t *var)
+{
+ return ethr_native_atomic_add_return(var, 1);
+}
+
+static ETHR_INLINE void
+ethr_native_atomic_inc(ethr_native_atomic_t *var)
+{
+ (void)ethr_native_atomic_add_return(var, 1);
+}
+
+static ETHR_INLINE long
+ethr_native_atomic_dec_return(ethr_native_atomic_t *var)
+{
+ return ethr_native_atomic_add_return(var, -1);
+}
+
+static ETHR_INLINE void
+ethr_native_atomic_dec(ethr_native_atomic_t *var)
+{
+ (void)ethr_native_atomic_add_return(var, -1);
+}
+
+static ETHR_INLINE long
+ethr_native_atomic_and_retold(ethr_native_atomic_t *var, long mask)
+{
+ long old, tmp;
+
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ do {
+ old = var->counter;
+ tmp = old & mask;
+ __asm__ __volatile__(
+ CASX " [%2], %1, %0"
+ : "=&r"(tmp)
+ : "r"(old), "r"(&var->counter), "0"(tmp)
+ : "memory");
+ } while (__builtin_expect(old != tmp, 0));
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ return old;
+}
+
+static ETHR_INLINE long
+ethr_native_atomic_or_retold(ethr_native_atomic_t *var, long mask)
+{
+ long old, tmp;
+
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ do {
+ old = var->counter;
+ tmp = old | mask;
+ __asm__ __volatile__(
+ CASX " [%2], %1, %0"
+ : "=&r"(tmp)
+ : "r"(old), "r"(&var->counter), "0"(tmp)
+ : "memory");
+ } while (__builtin_expect(old != tmp, 0));
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ return old;
+}
+
+static ETHR_INLINE long
+ethr_native_atomic_xchg(ethr_native_atomic_t *var, long val)
+{
+ long old, new;
+
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad");
+ do {
+ old = var->counter;
+ new = val;
+ __asm__ __volatile__(
+ CASX " [%2], %1, %0"
+ : "=&r"(new)
+ : "r"(old), "r"(&var->counter), "0"(new)
+ : "memory");
+ } while (__builtin_expect(old != new, 0));
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ return old;
+}
+
+static ETHR_INLINE long
+ethr_native_atomic_cmpxchg(ethr_native_atomic_t *var, long new, long old)
+{
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad\n");
+ __asm__ __volatile__(
+ CASX " [%2], %1, %0"
+ : "=&r"(new)
+ : "r"(old), "r"(&var->counter), "0"(new)
+ : "memory");
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ return new;
+}
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#endif /* ETHR_SPARC32_ATOMIC_H */
diff --git a/erts/include/internal/sparc32/ethread.h b/erts/include/internal/sparc32/ethread.h
new file mode 100644
index 0000000000..1d55399640
--- /dev/null
+++ b/erts/include/internal/sparc32/ethread.h
@@ -0,0 +1,34 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2005-2009. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Low-level ethread support on SPARC V9.
+ * Author: Mikael Pettersson.
+ */
+#ifndef ETHREAD_SPARC32_ETHREAD_H
+#define ETHREAD_SPARC32_ETHREAD_H
+
+#include "atomic.h"
+#include "spinlock.h"
+#include "rwlock.h"
+
+#define ETHR_HAVE_NATIVE_ATOMICS 1
+#define ETHR_HAVE_NATIVE_LOCKS 1
+
+#endif /* ETHREAD_SPARC32_ETHREAD_H */
diff --git a/erts/include/internal/sparc32/rwlock.h b/erts/include/internal/sparc32/rwlock.h
new file mode 100644
index 0000000000..12448e0b06
--- /dev/null
+++ b/erts/include/internal/sparc32/rwlock.h
@@ -0,0 +1,142 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2005-2009. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Native ethread rwlocks on SPARC V9.
+ * Author: Mikael Pettersson.
+ */
+#ifndef ETHREAD_SPARC32_RWLOCK_H
+#define ETHREAD_SPARC32_RWLOCK_H
+
+/* Unlocked if zero, read-locked if positive, write-locked if -1. */
+typedef struct {
+ volatile int lock;
+} ethr_native_rwlock_t;
+
+#ifdef ETHR_TRY_INLINE_FUNCS
+
+static ETHR_INLINE void
+ethr_native_rwlock_init(ethr_native_rwlock_t *lock)
+{
+ lock->lock = 0;
+}
+
+static ETHR_INLINE void
+ethr_native_read_unlock(ethr_native_rwlock_t *lock)
+{
+ unsigned int old, new;
+
+ __asm__ __volatile__("membar #LoadLoad|#StoreLoad");
+ do {
+ old = lock->lock;
+ new = old-1;
+ __asm__ __volatile__(
+ "cas [%2], %1, %0"
+ : "=&r"(new)
+ : "r"(old), "r"(&lock->lock), "0"(new)
+ : "memory");
+ } while (__builtin_expect(old != new, 0));
+}
+
+static ETHR_INLINE int
+ethr_native_read_trylock(ethr_native_rwlock_t *lock)
+{
+ int old, new;
+
+ do {
+ old = lock->lock;
+ if (__builtin_expect(old < 0, 0))
+ return 0;
+ new = old+1;
+ __asm__ __volatile__(
+ "cas [%2], %1, %0"
+ : "=&r"(new)
+ : "r"(old), "r"(&lock->lock), "0"(new)
+ : "memory");
+ } while (__builtin_expect(old != new, 0));
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ return 1;
+}
+
+static ETHR_INLINE int
+ethr_native_read_is_locked(ethr_native_rwlock_t *lock)
+{
+ return lock->lock < 0;
+}
+
+static ETHR_INLINE void
+ethr_native_read_lock(ethr_native_rwlock_t *lock)
+{
+ for(;;) {
+ if (__builtin_expect(ethr_native_read_trylock(lock) != 0, 1))
+ break;
+ do {
+ __asm__ __volatile__("membar #LoadLoad");
+ } while (ethr_native_read_is_locked(lock));
+ }
+}
+
+static ETHR_INLINE void
+ethr_native_write_unlock(ethr_native_rwlock_t *lock)
+{
+ __asm__ __volatile__("membar #LoadStore|#StoreStore");
+ lock->lock = 0;
+}
+
+static ETHR_INLINE int
+ethr_native_write_trylock(ethr_native_rwlock_t *lock)
+{
+ unsigned int old, new;
+
+ do {
+ old = lock->lock;
+ if (__builtin_expect(old != 0, 0))
+ return 0;
+ new = -1;
+ __asm__ __volatile__(
+ "cas [%2], %1, %0"
+ : "=&r"(new)
+ : "r"(old), "r"(&lock->lock), "0"(new)
+ : "memory");
+ } while (__builtin_expect(old != new, 0));
+ __asm__ __volatile__("membar #StoreLoad|#StoreStore");
+ return 1;
+}
+
+static ETHR_INLINE int
+ethr_native_write_is_locked(ethr_native_rwlock_t *lock)
+{
+ return lock->lock != 0;
+}
+
+static ETHR_INLINE void
+ethr_native_write_lock(ethr_native_rwlock_t *lock)
+{
+ for(;;) {
+ if (__builtin_expect(ethr_native_write_trylock(lock) != 0, 1))
+ break;
+ do {
+ __asm__ __volatile__("membar #LoadLoad");
+ } while (ethr_native_write_is_locked(lock));
+ }
+}
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#endif /* ETHREAD_SPARC32_RWLOCK_H */
diff --git a/erts/include/internal/sparc32/spinlock.h b/erts/include/internal/sparc32/spinlock.h
new file mode 100644
index 0000000000..b4fe48b714
--- /dev/null
+++ b/erts/include/internal/sparc32/spinlock.h
@@ -0,0 +1,81 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2005-2009. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+/*
+ * Native ethread spinlocks on SPARC V9.
+ * Author: Mikael Pettersson.
+ */
+#ifndef ETHR_SPARC32_SPINLOCK_H
+#define ETHR_SPARC32_SPINLOCK_H
+
+/* Locked with ldstub, so unlocked when 0 and locked when non-zero. */
+typedef struct {
+ volatile unsigned char lock;
+} ethr_native_spinlock_t;
+
+#ifdef ETHR_TRY_INLINE_FUNCS
+
+static ETHR_INLINE void
+ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
+{
+ lock->lock = 0;
+}
+
+static ETHR_INLINE void
+ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
+{
+ __asm__ __volatile__("membar #LoadStore|#StoreStore");
+ lock->lock = 0;
+}
+
+static ETHR_INLINE int
+ethr_native_spin_trylock(ethr_native_spinlock_t *lock)
+{
+ unsigned int prev;
+
+ __asm__ __volatile__(
+ "ldstub [%1], %0\n\t"
+ "membar #StoreLoad|#StoreStore"
+ : "=r"(prev)
+ : "r"(&lock->lock)
+ : "memory");
+ return prev == 0;
+}
+
+static ETHR_INLINE int
+ethr_native_spin_is_locked(ethr_native_spinlock_t *lock)
+{
+ return lock->lock != 0;
+}
+
+static ETHR_INLINE void
+ethr_native_spin_lock(ethr_native_spinlock_t *lock)
+{
+ for(;;) {
+ if (__builtin_expect(ethr_native_spin_trylock(lock) != 0, 1))
+ break;
+ do {
+ __asm__ __volatile__("membar #LoadLoad");
+ } while (ethr_native_spin_is_locked(lock));
+ }
+}
+
+#endif /* ETHR_TRY_INLINE_FUNCS */
+
+#endif /* ETHR_SPARC32_SPINLOCK_H */