aboutsummaryrefslogtreecommitdiff
blob: ddd5ed9803098ecf77f65ff42f0011ecdd79285b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
#ifndef IA64_INTRINSIC_H
#define IA64_INTRINSIC_H

/*
 * Compiler-dependent Intrinsics
 *
 * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
 * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
 *
 */
extern long ia64_cmpxchg_called_with_bad_pointer (void);
extern void ia64_bad_param_for_getreg (void);
#define ia64_cmpxchg(sem,ptr,o,n,s) ({					\
	uint64_t _o, _r;						\
	switch(s) {							\
		case 1: _o = (uint8_t)(long)(o); break;			\
		case 2: _o = (uint16_t)(long)(o); break;		\
		case 4: _o = (uint32_t)(long)(o); break;		\
		case 8: _o = (uint64_t)(long)(o); break;		\
		default: break;						\
	}								\
	switch(s) {							\
		case 1:							\
		_r = ia64_cmpxchg1_##sem((uint8_t*)ptr,n,_o); break;	\
		case 2:							\
		_r = ia64_cmpxchg2_##sem((uint16_t*)ptr,n,_o); break;	\
		case 4:							\
		_r = ia64_cmpxchg4_##sem((uint32_t*)ptr,n,_o); break;	\
		case 8:							\
		_r = ia64_cmpxchg8_##sem((uint64_t*)ptr,n,_o); break;	\
		default:						\
		_r = ia64_cmpxchg_called_with_bad_pointer(); break;	\
	}								\
	(__typeof__(o)) _r;						\
})

#define cmpxchg_acq(ptr,o,n) ia64_cmpxchg(acq,ptr,o,n,sizeof(*ptr))
#define cmpxchg_rel(ptr,o,n) ia64_cmpxchg(rel,ptr,o,n,sizeof(*ptr))

#ifdef __INTEL_COMPILER
void  __fc(uint64_t *addr);
void  __synci(void);
void __isrlz(void);
void __dsrlz(void);
uint64_t __getReg(const int whichReg);
uint64_t _InterlockedCompareExchange8_rel(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
uint64_t _InterlockedCompareExchange8_acq(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
uint64_t _InterlockedCompareExchange16_rel(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
uint64_t _InterlockedCompareExchange16_acq(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
uint64_t _InterlockedCompareExchange_rel(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
uint64_t _InterlockedCompareExchange_acq(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
uint64_t _InterlockedCompareExchange64_rel(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);
u64_t _InterlockedCompareExchange64_acq(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);

#define ia64_cmpxchg1_rel	_InterlockedCompareExchange8_rel
#define ia64_cmpxchg1_acq	_InterlockedCompareExchange8_acq
#define ia64_cmpxchg2_rel	_InterlockedCompareExchange16_rel
#define ia64_cmpxchg2_acq	_InterlockedCompareExchange16_acq
#define ia64_cmpxchg4_rel	_InterlockedCompareExchange_rel
#define ia64_cmpxchg4_acq	_InterlockedCompareExchange_acq
#define ia64_cmpxchg8_rel	_InterlockedCompareExchange64_rel
#define ia64_cmpxchg8_acq	_InterlockedCompareExchange64_acq

#define ia64_srlz_d		__dsrlz
#define ia64_srlz_i		__isrlz
#define __ia64_fc 		__fc
#define ia64_sync_i		__synci
#define __ia64_getreg		__getReg
#else /* __INTEL_COMPILER */
#define ia64_cmpxchg1_acq(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
	asm volatile ("cmpxchg1.acq %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_cmpxchg1_rel(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
	asm volatile ("cmpxchg1.rel %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_cmpxchg2_acq(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
	asm volatile ("cmpxchg2.acq %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_cmpxchg2_rel(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
											\
	asm volatile ("cmpxchg2.rel %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_cmpxchg4_acq(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
	asm volatile ("cmpxchg4.acq %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_cmpxchg4_rel(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
	asm volatile ("cmpxchg4.rel %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_cmpxchg8_acq(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
	asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_cmpxchg8_rel(ptr, new, old)						\
({											\
	uint64_t ia64_intri_res;							\
	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
											\
	asm volatile ("cmpxchg8.rel %0=[%1],%2,ar.ccv":					\
			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
	ia64_intri_res;									\
})

#define ia64_srlz_i()	asm volatile (";; srlz.i ;;" ::: "memory")
#define ia64_srlz_d()	asm volatile (";; srlz.d" ::: "memory");
#define __ia64_fc(addr)	asm volatile ("fc %0" :: "r"(addr) : "memory")
#define ia64_sync_i()	asm volatile (";; sync.i" ::: "memory")

#endif /* __INTEL_COMPILER */
#endif /* IA64_INTRINSIC_H */