summaryrefslogtreecommitdiff
path: root/arch/arm/include/asm/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/include/asm/spinlock.h')
-rw-r--r--arch/arm/include/asm/spinlock.h48
1 files changed, 44 insertions, 4 deletions
diff --git a/arch/arm/include/asm/spinlock.h b/arch/arm/include/asm/spinlock.h
index 2b41ebbfa7ff..cfc6f6dc98c4 100644
--- a/arch/arm/include/asm/spinlock.h
+++ b/arch/arm/include/asm/spinlock.h
@@ -23,15 +23,38 @@
#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
+#ifdef CONFIG_ARM_ERRATA_351422
+#define spinlock_backoff_delay() \
+{ \
+ unsigned int delay; \
+ __asm__ __volatile__( \
+ "1: mrc p15, 0, %0, c0, c0, 5\n" \
+ " and %0, %0, #0xf\n" \
+ " mov %0, %0, lsl #8\n" \
+ "2: subs %0, %0, #1\n" \
+ " bpl 2b\n" \
+ : "=&r" (delay) \
+ : \
+ : "cc" ); \
+}
+#else
+#define spinlock_backoff_delay() \
+ __asm__ __volatile__("1: \n");
+#endif
+
static inline void __raw_spin_lock(raw_spinlock_t *lock)
{
unsigned long tmp;
+ spinlock_backoff_delay();
__asm__ __volatile__(
-"1: ldrex %0, [%1]\n"
+" ldrex %0, [%1]\n"
" teq %0, #0\n"
#ifdef CONFIG_CPU_32v6K
+" itee ne\n"
" wfene\n"
+#else
+" itt eq\n"
#endif
" strexeq %0, %2, [%1]\n"
" teqeq %0, #0\n"
@@ -47,9 +70,11 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
{
unsigned long tmp;
+ spinlock_backoff_delay();
__asm__ __volatile__(
" ldrex %0, [%1]\n"
" teq %0, #0\n"
+" it eq\n"
" strexeq %0, %2, [%1]"
: "=&r" (tmp)
: "r" (&lock->lock), "r" (1)
@@ -90,11 +115,15 @@ static inline void __raw_write_lock(raw_rwlock_t *rw)
{
unsigned long tmp;
+ spinlock_backoff_delay();
__asm__ __volatile__(
-"1: ldrex %0, [%1]\n"
+" ldrex %0, [%1]\n"
" teq %0, #0\n"
#ifdef CONFIG_CPU_32v6K
+" ite ne\n"
" wfene\n"
+#else
+" it eq\n"
#endif
" strexeq %0, %2, [%1]\n"
" teq %0, #0\n"
@@ -110,9 +139,11 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw)
{
unsigned long tmp;
+ spinlock_backoff_delay();
__asm__ __volatile__(
"1: ldrex %0, [%1]\n"
" teq %0, #0\n"
+" it eq\n"
" strexeq %0, %2, [%1]"
: "=&r" (tmp)
: "r" (&rw->lock), "r" (0x80000000)
@@ -160,9 +191,15 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
{
unsigned long tmp, tmp2;
+ spinlock_backoff_delay();
__asm__ __volatile__(
-"1: ldrex %0, [%2]\n"
+" ldrex %0, [%2]\n"
" adds %0, %0, #1\n"
+#ifdef CONFIG_CPU_32v6K
+" itet pl\n"
+#else
+" itt pl\n"
+#endif
" strexpl %1, %0, [%2]\n"
#ifdef CONFIG_CPU_32v6K
" wfemi\n"
@@ -182,14 +219,16 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
smp_mb();
+ spinlock_backoff_delay();
__asm__ __volatile__(
-"1: ldrex %0, [%2]\n"
+" ldrex %0, [%2]\n"
" sub %0, %0, #1\n"
" strex %1, %0, [%2]\n"
" teq %1, #0\n"
" bne 1b"
#ifdef CONFIG_CPU_32v6K
"\n cmp %0, #0\n"
+" itt eq\n"
" mcreq p15, 0, %0, c7, c10, 4\n"
" seveq"
#endif
@@ -205,6 +244,7 @@ static inline int __raw_read_trylock(raw_rwlock_t *rw)
__asm__ __volatile__(
"1: ldrex %0, [%2]\n"
" adds %0, %0, #1\n"
+" it pl\n"
" strexpl %1, %0, [%2]\n"
: "=&r" (tmp), "+r" (tmp2)
: "r" (&rw->lock)