summaryrefslogtreecommitdiff
path: root/include/asm-x86/byteorder.h
diff options
context:
space:
mode:
authorJoe Perches <joe@perches.com>2008-03-23 01:01:47 -0700
committerIngo Molnar <mingo@elte.hu>2008-04-17 17:41:22 +0200
commit346050952cac11b25a98c7e1743412b416827314 (patch)
treeee12dd7bcc25082b851626f4848bae224e3c0a23 /include/asm-x86/byteorder.h
parent86d8a08616ecbc510323bfca591816a5709c6e54 (diff)
include/asm-x86/byteorder.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/byteorder.h')
-rw-r--r--include/asm-x86/byteorder.h39
1 files changed, 24 insertions, 15 deletions
diff --git a/include/asm-x86/byteorder.h b/include/asm-x86/byteorder.h
index fe2f2e5d51ba..e02ae2d89acf 100644
--- a/include/asm-x86/byteorder.h
+++ b/include/asm-x86/byteorder.h
@@ -8,50 +8,59 @@
#ifdef __i386__
-static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x)
+static inline __attribute_const__ __u32 ___arch__swab32(__u32 x)
{
#ifdef CONFIG_X86_BSWAP
- __asm__("bswap %0" : "=r" (x) : "0" (x));
+ asm("bswap %0" : "=r" (x) : "0" (x));
#else
- __asm__("xchgb %b0,%h0\n\t" /* swap lower bytes */
- "rorl $16,%0\n\t" /* swap words */
- "xchgb %b0,%h0" /* swap higher bytes */
- :"=q" (x)
- : "0" (x));
+ asm("xchgb %b0,%h0\n\t" /* swap lower bytes */
+ "rorl $16,%0\n\t" /* swap words */
+ "xchgb %b0,%h0" /* swap higher bytes */
+ : "=q" (x)
+ : "0" (x));
#endif
return x;
}
-static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 val)
+static inline __attribute_const__ __u64 ___arch__swab64(__u64 val)
{
union {
- struct { __u32 a,b; } s;
+ struct {
+ __u32 a;
+ __u32 b;
+ } s;
__u64 u;
} v;
v.u = val;
#ifdef CONFIG_X86_BSWAP
- __asm__("bswapl %0 ; bswapl %1 ; xchgl %0,%1"
+ asm("bswapl %0 ; bswapl %1 ; xchgl %0,%1"
: "=r" (v.s.a), "=r" (v.s.b)
: "0" (v.s.a), "1" (v.s.b));
#else
v.s.a = ___arch__swab32(v.s.a);
v.s.b = ___arch__swab32(v.s.b);
- __asm__("xchgl %0,%1" : "=r" (v.s.a), "=r" (v.s.b) : "0" (v.s.a), "1" (v.s.b));
+ asm("xchgl %0,%1"
+ : "=r" (v.s.a), "=r" (v.s.b)
+ : "0" (v.s.a), "1" (v.s.b));
#endif
return v.u;
}
#else /* __i386__ */
-static __inline__ __attribute_const__ __u64 ___arch__swab64(__u64 x)
+static inline __attribute_const__ __u64 ___arch__swab64(__u64 x)
{
- __asm__("bswapq %0" : "=r" (x) : "0" (x));
+ asm("bswapq %0"
+ : "=r" (x)
+ : "0" (x));
return x;
}
-static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 x)
+static inline __attribute_const__ __u32 ___arch__swab32(__u32 x)
{
- __asm__("bswapl %0" : "=r" (x) : "0" (x));
+ asm("bswapl %0"
+ : "=r" (x)
+ : "0" (x));
return x;
}