sh: Use clk_always_enable() on sh7722 / Migo-R / SE7722
[deliverable/linux.git] / include / asm-ia64 / intel_intrin.h
CommitLineData
1da177e4
LT
1#ifndef _ASM_IA64_INTEL_INTRIN_H
2#define _ASM_IA64_INTEL_INTRIN_H
3/*
4 * Intel Compiler Intrinsics
5 *
6 * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
7 * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
c583f66d 8 * Copyright (C) 2005,2006 Hongjiu Lu <hongjiu.lu@intel.com>
1da177e4
LT
9 *
10 */
c583f66d 11#include <ia64intrin.h>
1da177e4
LT
12
13#define ia64_barrier() __memory_barrier()
14
15#define ia64_stop() /* Nothing: As of now stop bit is generated for each
16 * intrinsic
17 */
18
1ff730b5
IY
19#define ia64_native_getreg __getReg
20#define ia64_native_setreg __setReg
1da177e4 21
4bf64e72
CK
22#define ia64_hint __hint
23#define ia64_hint_pause __hint_pause
1da177e4 24
412e6a37
CK
25#define ia64_mux1_brcst _m64_mux1_brcst
26#define ia64_mux1_mix _m64_mux1_mix
27#define ia64_mux1_shuf _m64_mux1_shuf
28#define ia64_mux1_alt _m64_mux1_alt
29#define ia64_mux1_rev _m64_mux1_rev
1da177e4 30
c583f66d 31#define ia64_mux1(x,v) _m_to_int64(_m64_mux1(_m_from_int64(x), (v)))
1da177e4
LT
32#define ia64_popcnt _m64_popcnt
33#define ia64_getf_exp __getf_exp
34#define ia64_shrp _m64_shrp
35
36#define ia64_tpa __tpa
37#define ia64_invala __invala
38#define ia64_invala_gr __invala_gr
39#define ia64_invala_fr __invala_fr
40#define ia64_nop __nop
41#define ia64_sum __sum
1ff730b5 42#define ia64_native_ssm __ssm
1da177e4 43#define ia64_rum __rum
1ff730b5
IY
44#define ia64_native_rsm __rsm
45#define ia64_native_fc __fc
1da177e4
LT
46
47#define ia64_ldfs __ldfs
48#define ia64_ldfd __ldfd
49#define ia64_ldfe __ldfe
50#define ia64_ldf8 __ldf8
51#define ia64_ldf_fill __ldf_fill
52
53#define ia64_stfs __stfs
54#define ia64_stfd __stfd
55#define ia64_stfe __stfe
56#define ia64_stf8 __stf8
57#define ia64_stf_spill __stf_spill
58
412e6a37 59#define ia64_mf __mf
1da177e4
LT
60#define ia64_mfa __mfa
61
62#define ia64_fetchadd4_acq __fetchadd4_acq
63#define ia64_fetchadd4_rel __fetchadd4_rel
64#define ia64_fetchadd8_acq __fetchadd8_acq
65#define ia64_fetchadd8_rel __fetchadd8_rel
66
67#define ia64_xchg1 _InterlockedExchange8
68#define ia64_xchg2 _InterlockedExchange16
69#define ia64_xchg4 _InterlockedExchange
70#define ia64_xchg8 _InterlockedExchange64
71
72#define ia64_cmpxchg1_rel _InterlockedCompareExchange8_rel
73#define ia64_cmpxchg1_acq _InterlockedCompareExchange8_acq
74#define ia64_cmpxchg2_rel _InterlockedCompareExchange16_rel
75#define ia64_cmpxchg2_acq _InterlockedCompareExchange16_acq
76#define ia64_cmpxchg4_rel _InterlockedCompareExchange_rel
77#define ia64_cmpxchg4_acq _InterlockedCompareExchange_acq
78#define ia64_cmpxchg8_rel _InterlockedCompareExchange64_rel
79#define ia64_cmpxchg8_acq _InterlockedCompareExchange64_acq
80
81#define __ia64_set_dbr(index, val) \
82 __setIndReg(_IA64_REG_INDR_DBR, index, val)
83#define ia64_set_ibr(index, val) \
84 __setIndReg(_IA64_REG_INDR_IBR, index, val)
85#define ia64_set_pkr(index, val) \
86 __setIndReg(_IA64_REG_INDR_PKR, index, val)
87#define ia64_set_pmc(index, val) \
88 __setIndReg(_IA64_REG_INDR_PMC, index, val)
89#define ia64_set_pmd(index, val) \
90 __setIndReg(_IA64_REG_INDR_PMD, index, val)
1ff730b5 91#define ia64_native_set_rr(index, val) \
1da177e4
LT
92 __setIndReg(_IA64_REG_INDR_RR, index, val)
93
1ff730b5
IY
94#define ia64_native_get_cpuid(index) \
95 __getIndReg(_IA64_REG_INDR_CPUID, index)
96#define __ia64_get_dbr(index) __getIndReg(_IA64_REG_INDR_DBR, index)
97#define ia64_get_ibr(index) __getIndReg(_IA64_REG_INDR_IBR, index)
98#define ia64_get_pkr(index) __getIndReg(_IA64_REG_INDR_PKR, index)
99#define ia64_get_pmc(index) __getIndReg(_IA64_REG_INDR_PMC, index)
100#define ia64_native_get_pmd(index) __getIndReg(_IA64_REG_INDR_PMD, index)
101#define ia64_native_get_rr(index) __getIndReg(_IA64_REG_INDR_RR, index)
1da177e4
LT
102
103#define ia64_srlz_d __dsrlz
104#define ia64_srlz_i __isrlz
105
106#define ia64_dv_serialize_data()
107#define ia64_dv_serialize_instruction()
108
109#define ia64_st1_rel __st1_rel
110#define ia64_st2_rel __st2_rel
111#define ia64_st4_rel __st4_rel
112#define ia64_st8_rel __st8_rel
113
a3ebdb6c
CL
114/* FIXME: need st4.rel.nta intrinsic */
115#define ia64_st4_rel_nta __st4_rel
116
1da177e4
LT
117#define ia64_ld1_acq __ld1_acq
118#define ia64_ld2_acq __ld2_acq
119#define ia64_ld4_acq __ld4_acq
120#define ia64_ld8_acq __ld8_acq
121
122#define ia64_sync_i __synci
1ff730b5
IY
123#define ia64_native_thash __thash
124#define ia64_native_ttag __ttag
1da177e4
LT
125#define ia64_itcd __itcd
126#define ia64_itci __itci
127#define ia64_itrd __itrd
128#define ia64_itri __itri
129#define ia64_ptce __ptce
130#define ia64_ptcl __ptcl
1ff730b5
IY
131#define ia64_native_ptcg __ptcg
132#define ia64_native_ptcga __ptcga
1da177e4
LT
133#define ia64_ptri __ptri
134#define ia64_ptrd __ptrd
135#define ia64_dep_mi _m64_dep_mi
136
137/* Values for lfhint in __lfetch and __lfetch_fault */
138
412e6a37
CK
139#define ia64_lfhint_none __lfhint_none
140#define ia64_lfhint_nt1 __lfhint_nt1
141#define ia64_lfhint_nt2 __lfhint_nt2
142#define ia64_lfhint_nta __lfhint_nta
1da177e4
LT
143
144#define ia64_lfetch __lfetch
145#define ia64_lfetch_excl __lfetch_excl
146#define ia64_lfetch_fault __lfetch_fault
147#define ia64_lfetch_fault_excl __lfetch_fault_excl
148
1ff730b5 149#define ia64_native_intrin_local_irq_restore(x) \
1da177e4
LT
150do { \
151 if ((x) != 0) { \
1ff730b5 152 ia64_native_ssm(IA64_PSR_I); \
1da177e4
LT
153 ia64_srlz_d(); \
154 } else { \
1ff730b5 155 ia64_native_rsm(IA64_PSR_I); \
1da177e4
LT
156 } \
157} while (0)
158
9df79dec
CK
159#define __builtin_trap() __break(0);
160
1da177e4 161#endif /* _ASM_IA64_INTEL_INTRIN_H */
This page took 0.346188 seconds and 5 git commands to generate.