Merge branches 'tracing/kmemtrace2' and 'tracing/ftrace' into tracing/urgent
[deliverable/linux.git] / arch / x86 / include / asm / msr.h
CommitLineData
1965aae3
PA
1#ifndef _ASM_X86_MSR_H
2#define _ASM_X86_MSR_H
be7baf80
TG
3
4#include <asm/msr-index.h>
5
d43a3312
MF
6#ifndef __ASSEMBLY__
7# include <linux/types.h>
8#endif
9
8f12dea6
GOC
10#ifdef __KERNEL__
11#ifndef __ASSEMBLY__
c210d249
GOC
12
13#include <asm/asm.h>
14#include <asm/errno.h>
15
1e160cc3 16static inline unsigned long long native_read_tscp(unsigned int *aux)
8f12dea6
GOC
17{
18 unsigned long low, high;
abb0ade0
JP
19 asm volatile(".byte 0x0f,0x01,0xf9"
20 : "=a" (low), "=d" (high), "=c" (*aux));
41aefdcc 21 return low | ((u64)high << 32);
8f12dea6
GOC
22}
23
c210d249 24/*
d4f1b103
JS
25 * both i386 and x86_64 returns 64-bit value in edx:eax, but gcc's "A"
26 * constraint has different meanings. For i386, "A" means exactly
27 * edx:eax, while for x86_64 it doesn't mean rdx:rax or edx:eax. Instead,
28 * it means rax *or* rdx.
c210d249
GOC
29 */
30#ifdef CONFIG_X86_64
31#define DECLARE_ARGS(val, low, high) unsigned low, high
abb0ade0 32#define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32))
c210d249
GOC
33#define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high)
34#define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high)
35#else
36#define DECLARE_ARGS(val, low, high) unsigned long long val
37#define EAX_EDX_VAL(val, low, high) (val)
38#define EAX_EDX_ARGS(val, low, high) "A" (val)
39#define EAX_EDX_RET(val, low, high) "=A" (val)
8f12dea6
GOC
40#endif
41
be7baf80
TG
42static inline unsigned long long native_read_msr(unsigned int msr)
43{
c210d249 44 DECLARE_ARGS(val, low, high);
be7baf80 45
c210d249
GOC
46 asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr));
47 return EAX_EDX_VAL(val, low, high);
be7baf80
TG
48}
49
50static inline unsigned long long native_read_msr_safe(unsigned int msr,
51 int *err)
52{
c210d249 53 DECLARE_ARGS(val, low, high);
be7baf80 54
08970fc4 55 asm volatile("2: rdmsr ; xor %[err],%[err]\n"
be7baf80
TG
56 "1:\n\t"
57 ".section .fixup,\"ax\"\n\t"
08970fc4 58 "3: mov %[fault],%[err] ; jmp 1b\n\t"
be7baf80 59 ".previous\n\t"
abb0ade0 60 _ASM_EXTABLE(2b, 3b)
08970fc4
PA
61 : [err] "=r" (*err), EAX_EDX_RET(val, low, high)
62 : "c" (msr), [fault] "i" (-EFAULT));
c210d249 63 return EAX_EDX_VAL(val, low, high);
be7baf80
TG
64}
65
b05f78f5
YL
66static inline unsigned long long native_read_msr_amd_safe(unsigned int msr,
67 int *err)
68{
69 DECLARE_ARGS(val, low, high);
70
56ec1ddc 71 asm volatile("2: rdmsr ; xor %0,%0\n"
be7baf80
TG
72 "1:\n\t"
73 ".section .fixup,\"ax\"\n\t"
56ec1ddc 74 "3: mov %3,%0 ; jmp 1b\n\t"
be7baf80 75 ".previous\n\t"
abb0ade0 76 _ASM_EXTABLE(2b, 3b)
c210d249 77 : "=r" (*err), EAX_EDX_RET(val, low, high)
b05f78f5 78 : "c" (msr), "D" (0x9c5a203a), "i" (-EFAULT));
c210d249 79 return EAX_EDX_VAL(val, low, high);
be7baf80
TG
80}
81
c9dcda5c
GOC
82static inline void native_write_msr(unsigned int msr,
83 unsigned low, unsigned high)
be7baf80 84{
af2b1c60 85 asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory");
be7baf80
TG
86}
87
0ca59dd9
FW
88/* Can be uninlined because referenced by paravirt */
89notrace static inline int native_write_msr_safe(unsigned int msr,
c9dcda5c 90 unsigned low, unsigned high)
be7baf80
TG
91{
92 int err;
08970fc4 93 asm volatile("2: wrmsr ; xor %[err],%[err]\n"
be7baf80
TG
94 "1:\n\t"
95 ".section .fixup,\"ax\"\n\t"
08970fc4 96 "3: mov %[fault],%[err] ; jmp 1b\n\t"
be7baf80 97 ".previous\n\t"
abb0ade0 98 _ASM_EXTABLE(2b, 3b)
08970fc4 99 : [err] "=a" (err)
c9dcda5c 100 : "c" (msr), "0" (low), "d" (high),
08970fc4 101 [fault] "i" (-EFAULT)
af2b1c60 102 : "memory");
be7baf80
TG
103 return err;
104}
105
cdc7957d 106extern unsigned long long native_read_tsc(void);
be7baf80 107
92767af0
IM
108static __always_inline unsigned long long __native_read_tsc(void)
109{
110 DECLARE_ARGS(val, low, high);
111
92767af0 112 asm volatile("rdtsc" : EAX_EDX_RET(val, low, high));
92767af0
IM
113
114 return EAX_EDX_VAL(val, low, high);
115}
116
b8d1fae7 117static inline unsigned long long native_read_pmc(int counter)
be7baf80 118{
c210d249
GOC
119 DECLARE_ARGS(val, low, high);
120
121 asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter));
122 return EAX_EDX_VAL(val, low, high);
be7baf80
TG
123}
124
125#ifdef CONFIG_PARAVIRT
126#include <asm/paravirt.h>
96a388de 127#else
be7baf80
TG
128#include <linux/errno.h>
129/*
130 * Access to machine-specific registers (available on 586 and better only)
131 * Note: the rd* operations modify the parameters directly (without using
132 * pointer indirection), this allows gcc to optimize better
133 */
134
abb0ade0
JP
135#define rdmsr(msr, val1, val2) \
136do { \
137 u64 __val = native_read_msr((msr)); \
138 (val1) = (u32)__val; \
139 (val2) = (u32)(__val >> 32); \
140} while (0)
be7baf80 141
c9dcda5c 142static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
be7baf80 143{
c9dcda5c 144 native_write_msr(msr, low, high);
be7baf80
TG
145}
146
abb0ade0
JP
147#define rdmsrl(msr, val) \
148 ((val) = native_read_msr((msr)))
be7baf80 149
c210d249 150#define wrmsrl(msr, val) \
abb0ade0 151 native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32))
be7baf80
TG
152
153/* wrmsr with exception handling */
c9dcda5c 154static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
be7baf80 155{
c9dcda5c 156 return native_write_msr_safe(msr, low, high);
be7baf80
TG
157}
158
159/* rdmsr with exception handling */
abb0ade0
JP
160#define rdmsr_safe(msr, p1, p2) \
161({ \
162 int __err; \
163 u64 __val = native_read_msr_safe((msr), &__err); \
164 (*p1) = (u32)__val; \
165 (*p2) = (u32)(__val >> 32); \
166 __err; \
167})
be7baf80 168
1de87bd4
AK
169static inline int rdmsrl_safe(unsigned msr, unsigned long long *p)
170{
171 int err;
172
173 *p = native_read_msr_safe(msr, &err);
174 return err;
175}
b05f78f5
YL
176static inline int rdmsrl_amd_safe(unsigned msr, unsigned long long *p)
177{
178 int err;
179
180 *p = native_read_msr_amd_safe(msr, &err);
181 return err;
182}
1de87bd4 183
be7baf80 184#define rdtscl(low) \
205516c1 185 ((low) = (u32)__native_read_tsc())
be7baf80
TG
186
187#define rdtscll(val) \
205516c1 188 ((val) = __native_read_tsc())
be7baf80 189
abb0ade0
JP
190#define rdpmc(counter, low, high) \
191do { \
192 u64 _l = native_read_pmc((counter)); \
193 (low) = (u32)_l; \
194 (high) = (u32)(_l >> 32); \
195} while (0)
be7baf80 196
abb0ade0
JP
197#define rdtscp(low, high, aux) \
198do { \
199 unsigned long long _val = native_read_tscp(&(aux)); \
200 (low) = (u32)_val; \
201 (high) = (u32)(_val >> 32); \
202} while (0)
be7baf80 203
c210d249 204#define rdtscpll(val, aux) (val) = native_read_tscp(&(aux))
be7baf80 205
c210d249 206#endif /* !CONFIG_PARAVIRT */
be7baf80 207
be7baf80 208
abb0ade0
JP
209#define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val), \
210 (u32)((val) >> 32))
be7baf80 211
abb0ade0 212#define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2))
be7baf80 213
abb0ade0 214#define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0)
be7baf80 215
be7baf80 216#ifdef CONFIG_SMP
c6f31932
PA
217int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
218int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
be7baf80
TG
219int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
220int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
221#else /* CONFIG_SMP */
c6f31932 222static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
be7baf80
TG
223{
224 rdmsr(msr_no, *l, *h);
c6f31932 225 return 0;
be7baf80 226}
c6f31932 227static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
be7baf80
TG
228{
229 wrmsr(msr_no, l, h);
c6f31932 230 return 0;
be7baf80 231}
abb0ade0
JP
232static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no,
233 u32 *l, u32 *h)
be7baf80
TG
234{
235 return rdmsr_safe(msr_no, l, h);
236}
237static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
238{
239 return wrmsr_safe(msr_no, l, h);
240}
241#endif /* CONFIG_SMP */
751de83c 242#endif /* __ASSEMBLY__ */
c210d249
GOC
243#endif /* __KERNEL__ */
244
be7baf80 245
1965aae3 246#endif /* _ASM_X86_MSR_H */
This page took 0.354163 seconds and 5 git commands to generate.