blob: 829db6a1357d4baf93eb58f7788a469115d2c0b9 (
plain) (
tree)
|
|
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2005-2011. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
/*
* Native ethread spinlocks on PowerPC.
* Author: Mikael Pettersson.
*
* Based on the examples in Appendix E of Motorola's
* "Programming Environments Manual For 32-Bit Implementations
* of the PowerPC Architecture".
*/
#ifndef ETHREAD_PPC_SPINLOCK_H
#define ETHREAD_PPC_SPINLOCK_H
#define ETHR_HAVE_NATIVE_SPINLOCKS 1
#define ETHR_NATIVE_SPINLOCK_IMPL "ethread"
/* Unlocked if zero, locked if non-zero. */
typedef struct {
volatile unsigned int lock;
} ethr_native_spinlock_t;
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
static ETHR_INLINE void
ethr_native_spinlock_init(ethr_native_spinlock_t *lock)
{
lock->lock = 0;
}
static ETHR_INLINE void
ethr_native_spin_unlock(ethr_native_spinlock_t *lock)
{
ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
lock->lock = 0;
}
static ETHR_INLINE int
ethr_native_spin_trylock(ethr_native_spinlock_t *lock)
{
unsigned int prev;
__asm__ __volatile__(
"1:\t"
"lwarx %0,0,%1\n\t" /* read lock to prev */
"cmpwi 0,%0,0\n\t"
"bne- 2f\n\t" /* bail if non-zero/locked */
"stwcx. %2,0,%1\n\t" /* try to make the lock non-zero */
"bne- 1b\n\t" /* loop if lost reservation */
"isync\n\t" /* wait for previous insns to complete */
"2:"
: "=&r"(prev)
: "r"(&lock->lock), "r"(1)
: "cr0", "memory");
return prev == 0;
}
static ETHR_INLINE int
ethr_native_spin_is_locked(ethr_native_spinlock_t *lock)
{
return lock->lock != 0;
}
static ETHR_INLINE void
ethr_native_spin_lock(ethr_native_spinlock_t *lock)
{
for(;;) {
if (__builtin_expect(ethr_native_spin_trylock(lock) != 0, 1))
break;
do {
__asm__ __volatile__("":::"memory");
} while (ethr_native_spin_is_locked(lock));
}
}
#endif /* ETHR_TRY_INLINE_FUNCS */
#endif /* ETHREAD_PPC_SPINLOCK_H */
|