summaryrefslogtreecommitdiff
path: root/arch/tile/include/asm/cmpxchg.h
blob: 4001d5eab4bb7f1fb59e6e62c924bd71b4b10e2f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
/*
 * cmpxchg.h -- forked from asm/atomic.h with this copyright:
 *
 * Copyright 2010 Tilera Corporation. All Rights Reserved.
 *
 *   This program is free software; you can redistribute it and/or
 *   modify it under the terms of the GNU General Public License
 *   as published by the Free Software Foundation, version 2.
 *
 *   This program is distributed in the hope that it will be useful, but
 *   WITHOUT ANY WARRANTY; without even the implied warranty of
 *   MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
 *   NON INFRINGEMENT.  See the GNU General Public License for
 *   more details.
 *
 */

#ifndef _ASM_TILE_CMPXCHG_H
#define _ASM_TILE_CMPXCHG_H

#ifndef __ASSEMBLY__

#include <asm/barrier.h>

/* Nonexistent functions intended to cause compile errors. */
extern void __xchg_called_with_bad_pointer(void)
	__compiletime_error("Bad argument size for xchg");
extern void __cmpxchg_called_with_bad_pointer(void)
	__compiletime_error("Bad argument size for cmpxchg");

#ifndef __tilegx__

/* Note the _atomic_xxx() routines include a final mb(). */
int _atomic_xchg(int *ptr, int n);
int _atomic_xchg_add(int *v, int i);
int _atomic_xchg_add_unless(int *v, int a, int u);
int _atomic_cmpxchg(int *ptr, int o, int n);
u64 _atomic64_xchg(u64 *v, u64 n);
u64 _atomic64_xchg_add(u64 *v, u64 i);
u64 _atomic64_xchg_add_unless(u64 *v, u64 a, u64 u);
u64 _atomic64_cmpxchg(u64 *v, u64 o, u64 n);

#define xchg(ptr, n)							\
	({								\
		if (sizeof(*(ptr)) != 4)				\
			__xchg_called_with_bad_pointer();		\
		smp_mb();						\
		(typeof(*(ptr)))_atomic_xchg((int *)(ptr), (int)(n));	\
	})

#define cmpxchg(ptr, o, n)						\
	({								\
		if (sizeof(*(ptr)) != 4)				\
			__cmpxchg_called_with_bad_pointer();		\
		smp_mb();						\
		(typeof(*(ptr)))_atomic_cmpxchg((int *)ptr, (int)o, (int)n); \
	})

#define xchg64(ptr, n)							\
	({								\
		if (sizeof(*(ptr)) != 8)				\
			__xchg_called_with_bad_pointer();		\
		smp_mb();						\
		(typeof(*(ptr)))_atomic64_xchg((u64 *)(ptr), (u64)(n));	\
	})

#define cmpxchg64(ptr, o, n)						\
	({								\
		if (sizeof(*(ptr)) != 8)				\
			__cmpxchg_called_with_bad_pointer();		\
		smp_mb();						\
		(typeof(*(ptr)))_atomic64_cmpxchg((u64 *)ptr, (u64)o, (u64)n); \
	})

#else

#define xchg(ptr, n)							\
	({								\
		typeof(*(ptr)) __x;					\
		smp_mb();						\
		switch (sizeof(*(ptr))) {				\
		case 4:							\
			__x = (typeof(__x))(unsigned long)		\
				__insn_exch4((ptr), (u32)(unsigned long)(n)); \
			break;						\
		case 8:							\
			__x = (typeof(__x))			\
				__insn_exch((ptr), (unsigned long)(n));	\
			break;						\
		default:						\
			__xchg_called_with_bad_pointer();		\
			break;						\
		}							\
		smp_mb();						\
		__x;							\
	})

#define cmpxchg(ptr, o, n)						\
	({								\
		typeof(*(ptr)) __x;					\
		__insn_mtspr(SPR_CMPEXCH_VALUE, (unsigned long)(o));	\
		smp_mb();						\
		switch (sizeof(*(ptr))) {				\
		case 4:							\
			__x = (typeof(__x))(unsigned long)		\
				__insn_cmpexch4((ptr), (u32)(unsigned long)(n)); \
			break;						\
		case 8:							\
			__x = (typeof(__x))__insn_cmpexch((ptr), (u64)(n)); \
			break;						\
		default:						\
			__cmpxchg_called_with_bad_pointer();		\
			break;						\
		}							\
		smp_mb();						\
		__x;							\
	})

#define xchg64 xchg
#define cmpxchg64 cmpxchg

#endif

#define tas(ptr) xchg((ptr), 1)

#endif /* __ASSEMBLY__ */

#endif /* _ASM_TILE_CMPXCHG_H */