1#ifndef _ASM_X86_MSR_H
2#define _ASM_X86_MSR_H
3
4#include "msr-index.h"
5
6#ifndef __ASSEMBLY__
7
8#include <asm/asm.h>
9#include <asm/errno.h>
10#include <asm/cpumask.h>
11#include <uapi/asm/msr.h>
12
13struct msr {
14 union {
15 struct {
16 u32 l;
17 u32 h;
18 };
19 u64 q;
20 };
21};
22
23struct msr_info {
24 u32 msr_no;
25 struct msr reg;
26 struct msr *msrs;
27 int err;
28};
29
30struct msr_regs_info {
31 u32 *regs;
32 int err;
33};
34
35static inline unsigned long long native_read_tscp(unsigned int *aux)
36{
37 unsigned long low, high;
38 asm volatile(".byte 0x0f,0x01,0xf9"
39 : "=a" (low), "=d" (high), "=c" (*aux));
40 return low | ((u64)high << 32);
41}
42
43
44
45
46
47
48
49#ifdef CONFIG_X86_64
50#define DECLARE_ARGS(val, low, high) unsigned low, high
51#define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32))
52#define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high)
53#define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high)
54#else
55#define DECLARE_ARGS(val, low, high) unsigned long long val
56#define EAX_EDX_VAL(val, low, high) (val)
57#define EAX_EDX_ARGS(val, low, high) "A" (val)
58#define EAX_EDX_RET(val, low, high) "=A" (val)
59#endif
60
61static __always_inline unsigned long long native_read_msr(unsigned int msr)
62{
63 DECLARE_ARGS(val, low, high);
64
65 asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr));
66 return EAX_EDX_VAL(val, low, high);
67}
68
69static inline unsigned long long native_read_msr_safe(unsigned int msr,
70 int *err)
71{
72 DECLARE_ARGS(val, low, high);
73
74 asm volatile("2: rdmsr ; xor %[err],%[err]\n"
75 "1:\n\t"
76 ".section .fixup,\"ax\"\n\t"
77 "3: mov %[fault],%[err] ; jmp 1b\n\t"
78 ".previous\n\t"
79 _ASM_EXTABLE(2b, 3b)
80 : [err] "=r" (*err), EAX_EDX_RET(val, low, high)
81 : "c" (msr), [fault] "i" (-EIO));
82 return EAX_EDX_VAL(val, low, high);
83}
84
85static __always_inline void native_write_msr(unsigned int msr,
86 unsigned low, unsigned high)
87{
88 asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory");
89}
90
91
92notrace static inline int native_write_msr_safe(unsigned int msr,
93 unsigned low, unsigned high)
94{
95 int err;
96 asm volatile("2: wrmsr ; xor %[err],%[err]\n"
97 "1:\n\t"
98 ".section .fixup,\"ax\"\n\t"
99 "3: mov %[fault],%[err] ; jmp 1b\n\t"
100 ".previous\n\t"
101 _ASM_EXTABLE(2b, 3b)
102 : [err] "=a" (err)
103 : "c" (msr), "0" (low), "d" (high),
104 [fault] "i" (-EIO)
105 : "memory");
106 return err;
107}
108
109extern int rdmsr_safe_regs(u32 regs[8]);
110extern int wrmsr_safe_regs(u32 regs[8]);
111
112
113
114
115
116
117
118
119
120
121static __always_inline unsigned long long rdtsc(void)
122{
123 DECLARE_ARGS(val, low, high);
124
125 asm volatile("rdtsc" : EAX_EDX_RET(val, low, high));
126
127 return EAX_EDX_VAL(val, low, high);
128}
129
130
131
132
133
134extern unsigned long long native_read_tsc(void);
135
136
137
138
139
140
141
142
143
144static __always_inline unsigned long long rdtsc_ordered(void)
145{
146
147
148
149
150
151
152
153
154
155
156
157 barrier_nospec();
158 return rdtsc();
159}
160
161
162#define rdtscll(now) do { (now) = rdtsc_ordered(); } while (0)
163
164static inline unsigned long long native_read_pmc(int counter)
165{
166 DECLARE_ARGS(val, low, high);
167
168 asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter));
169 return EAX_EDX_VAL(val, low, high);
170}
171
172#ifdef CONFIG_PARAVIRT
173#include <asm/paravirt.h>
174#else
175#include <linux/errno.h>
176
177
178
179
180
181
182#define rdmsr(msr, low, high) \
183do { \
184 u64 __val = native_read_msr((msr)); \
185 (void)((low) = (u32)__val); \
186 (void)((high) = (u32)(__val >> 32)); \
187} while (0)
188
189static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
190{
191 native_write_msr(msr, low, high);
192}
193
194#define rdmsrl(msr, val) \
195 ((val) = native_read_msr((msr)))
196
197#define wrmsrl(msr, val) \
198 native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32))
199
200
201static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
202{
203 return native_write_msr_safe(msr, low, high);
204}
205
206
207#define rdmsr_safe(msr, low, high) \
208({ \
209 int __err; \
210 u64 __val = native_read_msr_safe((msr), &__err); \
211 (*low) = (u32)__val; \
212 (*high) = (u32)(__val >> 32); \
213 __err; \
214})
215
216static inline int rdmsrl_safe(unsigned msr, unsigned long long *p)
217{
218 int err;
219
220 *p = native_read_msr_safe(msr, &err);
221 return err;
222}
223
224#define rdpmc(counter, low, high) \
225do { \
226 u64 _l = native_read_pmc((counter)); \
227 (low) = (u32)_l; \
228 (high) = (u32)(_l >> 32); \
229} while (0)
230
231#define rdpmcl(counter, val) ((val) = native_read_pmc(counter))
232
233#endif
234
235#define wrmsrl_safe(msr, val) wrmsr_safe((msr), (u32)(val), \
236 (u32)((val) >> 32))
237
238#define write_tsc(low, high) wrmsr(MSR_IA32_TSC, (low), (high))
239
240#define write_rdtscp_aux(val) wrmsr(MSR_TSC_AUX, (val), 0)
241
242struct msr *msrs_alloc(void);
243void msrs_free(struct msr *msrs);
244int msr_set_bit(u32 msr, u8 bit);
245int msr_clear_bit(u32 msr, u8 bit);
246
247#ifdef CONFIG_SMP
248int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
249int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
250int rdmsrl_on_cpu(unsigned int cpu, u32 msr_no, u64 *q);
251int wrmsrl_on_cpu(unsigned int cpu, u32 msr_no, u64 q);
252void rdmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs);
253void wrmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs);
254int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
255int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
256int rdmsrl_safe_on_cpu(unsigned int cpu, u32 msr_no, u64 *q);
257int wrmsrl_safe_on_cpu(unsigned int cpu, u32 msr_no, u64 q);
258int rdmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8]);
259int wrmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8]);
260#else
261static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
262{
263 rdmsr(msr_no, *l, *h);
264 return 0;
265}
266static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
267{
268 wrmsr(msr_no, l, h);
269 return 0;
270}
271static inline int rdmsrl_on_cpu(unsigned int cpu, u32 msr_no, u64 *q)
272{
273 rdmsrl(msr_no, *q);
274 return 0;
275}
276static inline int wrmsrl_on_cpu(unsigned int cpu, u32 msr_no, u64 q)
277{
278 wrmsrl(msr_no, q);
279 return 0;
280}
281static inline void rdmsr_on_cpus(const struct cpumask *m, u32 msr_no,
282 struct msr *msrs)
283{
284 rdmsr_on_cpu(0, msr_no, &(msrs[0].l), &(msrs[0].h));
285}
286static inline void wrmsr_on_cpus(const struct cpumask *m, u32 msr_no,
287 struct msr *msrs)
288{
289 wrmsr_on_cpu(0, msr_no, msrs[0].l, msrs[0].h);
290}
291static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no,
292 u32 *l, u32 *h)
293{
294 return rdmsr_safe(msr_no, l, h);
295}
296static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
297{
298 return wrmsr_safe(msr_no, l, h);
299}
300static inline int rdmsrl_safe_on_cpu(unsigned int cpu, u32 msr_no, u64 *q)
301{
302 return rdmsrl_safe(msr_no, q);
303}
304static inline int wrmsrl_safe_on_cpu(unsigned int cpu, u32 msr_no, u64 q)
305{
306 return wrmsrl_safe(msr_no, q);
307}
308static inline int rdmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8])
309{
310 return rdmsr_safe_regs(regs);
311}
312static inline int wrmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8])
313{
314 return wrmsr_safe_regs(regs);
315}
316#endif
317#endif
318#endif
319