1 #ifndef _ASM_IA64_INTEL_INTRIN_H
2 #define _ASM_IA64_INTEL_INTRIN_H
3 /*
4  * Intel Compiler Intrinsics
5  *
6  * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
7  * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
8  * Copyright (C) 2005,2006 Hongjiu Lu <hongjiu.lu@intel.com>
9  *
10  */
11 #include <ia64intrin.h>
12 
13 #define ia64_barrier()		__memory_barrier()
14 
15 #define ia64_stop()	/* Nothing: As of now stop bit is generated for each
16 		 	 * intrinsic
17 		 	 */
18 
19 #define ia64_native_getreg	__getReg
20 #define ia64_native_setreg	__setReg
21 
22 #define ia64_hint		__hint
23 #define ia64_hint_pause		__hint_pause
24 
25 #define ia64_mux1_brcst		_m64_mux1_brcst
26 #define ia64_mux1_mix		_m64_mux1_mix
27 #define ia64_mux1_shuf		_m64_mux1_shuf
28 #define ia64_mux1_alt		_m64_mux1_alt
29 #define ia64_mux1_rev		_m64_mux1_rev
30 
31 #define ia64_mux1(x,v)		_m_to_int64(_m64_mux1(_m_from_int64(x), (v)))
32 #define ia64_popcnt		_m64_popcnt
33 #define ia64_getf_exp		__getf_exp
34 #define ia64_shrp		_m64_shrp
35 
36 #define ia64_tpa		__tpa
37 #define ia64_invala		__invala
38 #define ia64_invala_gr		__invala_gr
39 #define ia64_invala_fr		__invala_fr
40 #define ia64_nop		__nop
41 #define ia64_sum		__sum
42 #define ia64_native_ssm		__ssm
43 #define ia64_rum		__rum
44 #define ia64_native_rsm		__rsm
45 #define ia64_native_fc 		__fc
46 
47 #define ia64_ldfs		__ldfs
48 #define ia64_ldfd		__ldfd
49 #define ia64_ldfe		__ldfe
50 #define ia64_ldf8		__ldf8
51 #define ia64_ldf_fill		__ldf_fill
52 
53 #define ia64_stfs		__stfs
54 #define ia64_stfd		__stfd
55 #define ia64_stfe		__stfe
56 #define ia64_stf8		__stf8
57 #define ia64_stf_spill		__stf_spill
58 
59 #define ia64_mf			__mf
60 #define ia64_mfa		__mfa
61 
62 #define ia64_fetchadd4_acq	__fetchadd4_acq
63 #define ia64_fetchadd4_rel	__fetchadd4_rel
64 #define ia64_fetchadd8_acq	__fetchadd8_acq
65 #define ia64_fetchadd8_rel	__fetchadd8_rel
66 
67 #define ia64_xchg1		_InterlockedExchange8
68 #define ia64_xchg2		_InterlockedExchange16
69 #define ia64_xchg4		_InterlockedExchange
70 #define ia64_xchg8		_InterlockedExchange64
71 
72 #define ia64_cmpxchg1_rel	_InterlockedCompareExchange8_rel
73 #define ia64_cmpxchg1_acq	_InterlockedCompareExchange8_acq
74 #define ia64_cmpxchg2_rel	_InterlockedCompareExchange16_rel
75 #define ia64_cmpxchg2_acq	_InterlockedCompareExchange16_acq
76 #define ia64_cmpxchg4_rel	_InterlockedCompareExchange_rel
77 #define ia64_cmpxchg4_acq	_InterlockedCompareExchange_acq
78 #define ia64_cmpxchg8_rel	_InterlockedCompareExchange64_rel
79 #define ia64_cmpxchg8_acq	_InterlockedCompareExchange64_acq
80 
81 #define __ia64_set_dbr(index, val)	\
82 		__setIndReg(_IA64_REG_INDR_DBR, index, val)
83 #define ia64_set_ibr(index, val)	\
84 		__setIndReg(_IA64_REG_INDR_IBR, index, val)
85 #define ia64_set_pkr(index, val)	\
86 		__setIndReg(_IA64_REG_INDR_PKR, index, val)
87 #define ia64_set_pmc(index, val)	\
88 		__setIndReg(_IA64_REG_INDR_PMC, index, val)
89 #define ia64_set_pmd(index, val)	\
90 		__setIndReg(_IA64_REG_INDR_PMD, index, val)
91 #define ia64_native_set_rr(index, val)	\
92 		__setIndReg(_IA64_REG_INDR_RR, index, val)
93 
94 #define ia64_native_get_cpuid(index)	\
95 		__getIndReg(_IA64_REG_INDR_CPUID, index)
96 #define __ia64_get_dbr(index)		__getIndReg(_IA64_REG_INDR_DBR, index)
97 #define ia64_get_ibr(index)		__getIndReg(_IA64_REG_INDR_IBR, index)
98 #define ia64_get_pkr(index)		__getIndReg(_IA64_REG_INDR_PKR, index)
99 #define ia64_get_pmc(index)		__getIndReg(_IA64_REG_INDR_PMC, index)
100 #define ia64_native_get_pmd(index)	__getIndReg(_IA64_REG_INDR_PMD, index)
101 #define ia64_native_get_rr(index)	__getIndReg(_IA64_REG_INDR_RR, index)
102 
103 #define ia64_srlz_d		__dsrlz
104 #define ia64_srlz_i		__isrlz
105 
106 #define ia64_dv_serialize_data()
107 #define ia64_dv_serialize_instruction()
108 
109 #define ia64_st1_rel		__st1_rel
110 #define ia64_st2_rel		__st2_rel
111 #define ia64_st4_rel		__st4_rel
112 #define ia64_st8_rel		__st8_rel
113 
114 /* FIXME: need st4.rel.nta intrinsic */
115 #define ia64_st4_rel_nta	__st4_rel
116 
117 #define ia64_ld1_acq		__ld1_acq
118 #define ia64_ld2_acq		__ld2_acq
119 #define ia64_ld4_acq		__ld4_acq
120 #define ia64_ld8_acq		__ld8_acq
121 
122 #define ia64_sync_i		__synci
123 #define ia64_native_thash	__thash
124 #define ia64_native_ttag	__ttag
125 #define ia64_itcd		__itcd
126 #define ia64_itci		__itci
127 #define ia64_itrd		__itrd
128 #define ia64_itri		__itri
129 #define ia64_ptce		__ptce
130 #define ia64_ptcl		__ptcl
131 #define ia64_native_ptcg	__ptcg
132 #define ia64_native_ptcga	__ptcga
133 #define ia64_ptri		__ptri
134 #define ia64_ptrd		__ptrd
135 #define ia64_dep_mi		_m64_dep_mi
136 
137 /* Values for lfhint in __lfetch and __lfetch_fault */
138 
139 #define ia64_lfhint_none	__lfhint_none
140 #define ia64_lfhint_nt1		__lfhint_nt1
141 #define ia64_lfhint_nt2		__lfhint_nt2
142 #define ia64_lfhint_nta		__lfhint_nta
143 
144 #define ia64_lfetch		__lfetch
145 #define ia64_lfetch_excl	__lfetch_excl
146 #define ia64_lfetch_fault	__lfetch_fault
147 #define ia64_lfetch_fault_excl	__lfetch_fault_excl
148 
149 #define ia64_native_intrin_local_irq_restore(x)		\
150 do {							\
151 	if ((x) != 0) {					\
152 		ia64_native_ssm(IA64_PSR_I);		\
153 		ia64_srlz_d();				\
154 	} else {					\
155 		ia64_native_rsm(IA64_PSR_I);		\
156 	}						\
157 } while (0)
158 
159 #define __builtin_trap()	__break(0);
160 
161 #endif /* _ASM_IA64_INTEL_INTRIN_H */
162