xref: /illumos-kvm-cmd/ia64intrin.h (revision 68396ea9)
1 #ifndef IA64_INTRINSIC_H
2 #define IA64_INTRINSIC_H
3 
4 /*
5  * Compiler-dependent Intrinsics
6  *
7  * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
8  * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
9  *
10  */
11 extern long ia64_cmpxchg_called_with_bad_pointer (void);
12 extern void ia64_bad_param_for_getreg (void);
13 #define ia64_cmpxchg(sem,ptr,o,n,s) ({					\
14 	uint64_t _o, _r;						\
15 	switch(s) {							\
16 		case 1: _o = (uint8_t)(long)(o); break;			\
17 		case 2: _o = (uint16_t)(long)(o); break;		\
18 		case 4: _o = (uint32_t)(long)(o); break;		\
19 		case 8: _o = (uint64_t)(long)(o); break;		\
20 		default: break;						\
21 	}								\
22 	switch(s) {							\
23 		case 1:							\
24 		_r = ia64_cmpxchg1_##sem((uint8_t*)ptr,n,_o); break;	\
25 		case 2:							\
26 		_r = ia64_cmpxchg2_##sem((uint16_t*)ptr,n,_o); break;	\
27 		case 4:							\
28 		_r = ia64_cmpxchg4_##sem((uint32_t*)ptr,n,_o); break;	\
29 		case 8:							\
30 		_r = ia64_cmpxchg8_##sem((uint64_t*)ptr,n,_o); break;	\
31 		default:						\
32 		_r = ia64_cmpxchg_called_with_bad_pointer(); break;	\
33 	}								\
34 	(__typeof__(o)) _r;						\
35 })
36 
37 #define cmpxchg_acq(ptr,o,n) ia64_cmpxchg(acq,ptr,o,n,sizeof(*ptr))
38 #define cmpxchg_rel(ptr,o,n) ia64_cmpxchg(rel,ptr,o,n,sizeof(*ptr))
39 
40 #ifdef __INTEL_COMPILER
41 void  __fc(uint64_t *addr);
42 void  __synci(void);
43 void __isrlz(void);
44 void __dsrlz(void);
45 uint64_t __getReg(const int whichReg);
46 uint64_t _InterlockedCompareExchange8_rel(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
47 uint64_t _InterlockedCompareExchange8_acq(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
48 uint64_t _InterlockedCompareExchange16_rel(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
49 uint64_t _InterlockedCompareExchange16_acq(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
50 uint64_t _InterlockedCompareExchange_rel(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
51 uint64_t _InterlockedCompareExchange_acq(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
52 uint64_t _InterlockedCompareExchange64_rel(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);
53 u64_t _InterlockedCompareExchange64_acq(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);
54 
55 #define ia64_cmpxchg1_rel	_InterlockedCompareExchange8_rel
56 #define ia64_cmpxchg1_acq	_InterlockedCompareExchange8_acq
57 #define ia64_cmpxchg2_rel	_InterlockedCompareExchange16_rel
58 #define ia64_cmpxchg2_acq	_InterlockedCompareExchange16_acq
59 #define ia64_cmpxchg4_rel	_InterlockedCompareExchange_rel
60 #define ia64_cmpxchg4_acq	_InterlockedCompareExchange_acq
61 #define ia64_cmpxchg8_rel	_InterlockedCompareExchange64_rel
62 #define ia64_cmpxchg8_acq	_InterlockedCompareExchange64_acq
63 
64 #define ia64_srlz_d		__dsrlz
65 #define ia64_srlz_i		__isrlz
66 #define __ia64_fc 		__fc
67 #define ia64_sync_i		__synci
68 #define __ia64_getreg		__getReg
69 #else /* __INTEL_COMPILER */
70 #define ia64_cmpxchg1_acq(ptr, new, old)						\
71 ({											\
72 	uint64_t ia64_intri_res;							\
73 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
74 	asm volatile ("cmpxchg1.acq %0=[%1],%2,ar.ccv":					\
75 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
76 	ia64_intri_res;									\
77 })
78 
79 #define ia64_cmpxchg1_rel(ptr, new, old)						\
80 ({											\
81 	uint64_t ia64_intri_res;							\
82 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
83 	asm volatile ("cmpxchg1.rel %0=[%1],%2,ar.ccv":					\
84 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
85 	ia64_intri_res;									\
86 })
87 
88 #define ia64_cmpxchg2_acq(ptr, new, old)						\
89 ({											\
90 	uint64_t ia64_intri_res;							\
91 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
92 	asm volatile ("cmpxchg2.acq %0=[%1],%2,ar.ccv":					\
93 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
94 	ia64_intri_res;									\
95 })
96 
97 #define ia64_cmpxchg2_rel(ptr, new, old)						\
98 ({											\
99 	uint64_t ia64_intri_res;							\
100 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
101 											\
102 	asm volatile ("cmpxchg2.rel %0=[%1],%2,ar.ccv":					\
103 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
104 	ia64_intri_res;									\
105 })
106 
107 #define ia64_cmpxchg4_acq(ptr, new, old)						\
108 ({											\
109 	uint64_t ia64_intri_res;							\
110 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
111 	asm volatile ("cmpxchg4.acq %0=[%1],%2,ar.ccv":					\
112 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
113 	ia64_intri_res;									\
114 })
115 
116 #define ia64_cmpxchg4_rel(ptr, new, old)						\
117 ({											\
118 	uint64_t ia64_intri_res;							\
119 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
120 	asm volatile ("cmpxchg4.rel %0=[%1],%2,ar.ccv":					\
121 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
122 	ia64_intri_res;									\
123 })
124 
125 #define ia64_cmpxchg8_acq(ptr, new, old)						\
126 ({											\
127 	uint64_t ia64_intri_res;							\
128 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
129 	asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv":					\
130 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
131 	ia64_intri_res;									\
132 })
133 
134 #define ia64_cmpxchg8_rel(ptr, new, old)						\
135 ({											\
136 	uint64_t ia64_intri_res;							\
137 	asm volatile ("mov ar.ccv=%0;;" :: "rO"(old));					\
138 											\
139 	asm volatile ("cmpxchg8.rel %0=[%1],%2,ar.ccv":					\
140 			      "=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory");	\
141 	ia64_intri_res;									\
142 })
143 
144 #define ia64_srlz_i()	asm volatile (";; srlz.i ;;" ::: "memory")
145 #define ia64_srlz_d()	asm volatile (";; srlz.d" ::: "memory");
146 #define __ia64_fc(addr)	asm volatile ("fc %0" :: "r"(addr) : "memory")
147 #define ia64_sync_i()	asm volatile (";; sync.i" ::: "memory")
148 
149 #endif /* __INTEL_COMPILER */
150 #endif /* IA64_INTRINSIC_H */
151