summaryrefslogtreecommitdiff
path: root/arch/x86/include/asm/cmpxchg_64.h
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@zytor.com>2010-07-27 23:29:52 -0700
committerH. Peter Anvin <hpa@zytor.com>2010-07-27 23:29:52 -0700
commit69309a05907546fb686b251d4ab041c26afe1e1d (patch)
treeada2b711139dfec0ba6bd820e7840138af4d5bfa /arch/x86/include/asm/cmpxchg_64.h
parentd3608b5681d238605b7da6be62244e803e24c649 (diff)
x86, asm: Clean up and simplify set_64bit()
Clean up and simplify set_64bit(). This code is quite old (1.3.11) and contains a fair bit of auxilliary machinery that current versions of gcc handle just fine automatically. Worse, the auxilliary machinery can actually cause an unnecessary spill to memory. Furthermore, the loading of the old value inside the loop in the 32-bit case is unnecessary: if the value doesn't match, the CMPXCHG8B instruction will already have loaded the "new previous" value for us. Clean up the comment, too, and remove page references to obsolete versions of the Intel SDM. Signed-off-by: H. Peter Anvin <hpa@zytor.com> LKML-Reference: <tip-*@vger.kernel.org>
Diffstat (limited to 'arch/x86/include/asm/cmpxchg_64.h')
-rw-r--r--arch/x86/include/asm/cmpxchg_64.h4
1 files changed, 1 insertions, 3 deletions
diff --git a/arch/x86/include/asm/cmpxchg_64.h b/arch/x86/include/asm/cmpxchg_64.h
index b92f147339f3..9596e7c61960 100644
--- a/arch/x86/include/asm/cmpxchg_64.h
+++ b/arch/x86/include/asm/cmpxchg_64.h
@@ -5,13 +5,11 @@
#define __xg(x) ((volatile long *)(x))
-static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
+static inline void set_64bit(volatile u64 *ptr, u64 val)
{
*ptr = val;
}
-#define _set_64bit set_64bit
-
extern void __xchg_wrong_size(void);
extern void __cmpxchg_wrong_size(void);