Merge branches 'x86/apic', 'x86/cleanups', 'x86/cpufeature', 'x86/crashdump', 'x86...
[linux-2.6] / arch / x86 / include / asm / msr.h
1 #ifndef _ASM_X86_MSR_H
2 #define _ASM_X86_MSR_H
3
4 #include <asm/msr-index.h>
5
6 #ifndef __ASSEMBLY__
7 # include <linux/types.h>
8 #endif
9
10 #ifdef __KERNEL__
11 #ifndef __ASSEMBLY__
12
13 #include <asm/asm.h>
14 #include <asm/errno.h>
15
16 static inline unsigned long long native_read_tscp(unsigned int *aux)
17 {
18         unsigned long low, high;
19         asm volatile(".byte 0x0f,0x01,0xf9"
20                      : "=a" (low), "=d" (high), "=c" (*aux));
21         return low | ((u64)high << 32);
22 }
23
24 /*
25  * both i386 and x86_64 returns 64-bit value in edx:eax, but gcc's "A"
26  * constraint has different meanings. For i386, "A" means exactly
27  * edx:eax, while for x86_64 it doesn't mean rdx:rax or edx:eax. Instead,
28  * it means rax *or* rdx.
29  */
30 #ifdef CONFIG_X86_64
31 #define DECLARE_ARGS(val, low, high)    unsigned low, high
32 #define EAX_EDX_VAL(val, low, high)     ((low) | ((u64)(high) << 32))
33 #define EAX_EDX_ARGS(val, low, high)    "a" (low), "d" (high)
34 #define EAX_EDX_RET(val, low, high)     "=a" (low), "=d" (high)
35 #else
36 #define DECLARE_ARGS(val, low, high)    unsigned long long val
37 #define EAX_EDX_VAL(val, low, high)     (val)
38 #define EAX_EDX_ARGS(val, low, high)    "A" (val)
39 #define EAX_EDX_RET(val, low, high)     "=A" (val)
40 #endif
41
42 static inline unsigned long long native_read_msr(unsigned int msr)
43 {
44         DECLARE_ARGS(val, low, high);
45
46         asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr));
47         return EAX_EDX_VAL(val, low, high);
48 }
49
50 static inline unsigned long long native_read_msr_safe(unsigned int msr,
51                                                       int *err)
52 {
53         DECLARE_ARGS(val, low, high);
54
55         asm volatile("2: rdmsr ; xor %[err],%[err]\n"
56                      "1:\n\t"
57                      ".section .fixup,\"ax\"\n\t"
58                      "3:  mov %[fault],%[err] ; jmp 1b\n\t"
59                      ".previous\n\t"
60                      _ASM_EXTABLE(2b, 3b)
61                      : [err] "=r" (*err), EAX_EDX_RET(val, low, high)
62                      : "c" (msr), [fault] "i" (-EFAULT));
63         return EAX_EDX_VAL(val, low, high);
64 }
65
66 static inline unsigned long long native_read_msr_amd_safe(unsigned int msr,
67                                                       int *err)
68 {
69         DECLARE_ARGS(val, low, high);
70
71         asm volatile("2: rdmsr ; xor %0,%0\n"
72                      "1:\n\t"
73                      ".section .fixup,\"ax\"\n\t"
74                      "3:  mov %3,%0 ; jmp 1b\n\t"
75                      ".previous\n\t"
76                      _ASM_EXTABLE(2b, 3b)
77                      : "=r" (*err), EAX_EDX_RET(val, low, high)
78                      : "c" (msr), "D" (0x9c5a203a), "i" (-EFAULT));
79         return EAX_EDX_VAL(val, low, high);
80 }
81
82 static inline void native_write_msr(unsigned int msr,
83                                     unsigned low, unsigned high)
84 {
85         asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory");
86 }
87
88 static inline int native_write_msr_safe(unsigned int msr,
89                                         unsigned low, unsigned high)
90 {
91         int err;
92         asm volatile("2: wrmsr ; xor %[err],%[err]\n"
93                      "1:\n\t"
94                      ".section .fixup,\"ax\"\n\t"
95                      "3:  mov %[fault],%[err] ; jmp 1b\n\t"
96                      ".previous\n\t"
97                      _ASM_EXTABLE(2b, 3b)
98                      : [err] "=a" (err)
99                      : "c" (msr), "0" (low), "d" (high),
100                        [fault] "i" (-EFAULT)
101                      : "memory");
102         return err;
103 }
104
105 extern unsigned long long native_read_tsc(void);
106
107 static __always_inline unsigned long long __native_read_tsc(void)
108 {
109         DECLARE_ARGS(val, low, high);
110
111         asm volatile("rdtsc" : EAX_EDX_RET(val, low, high));
112
113         return EAX_EDX_VAL(val, low, high);
114 }
115
116 static inline unsigned long long native_read_pmc(int counter)
117 {
118         DECLARE_ARGS(val, low, high);
119
120         asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter));
121         return EAX_EDX_VAL(val, low, high);
122 }
123
124 #ifdef CONFIG_PARAVIRT
125 #include <asm/paravirt.h>
126 #else
127 #include <linux/errno.h>
128 /*
129  * Access to machine-specific registers (available on 586 and better only)
130  * Note: the rd* operations modify the parameters directly (without using
131  * pointer indirection), this allows gcc to optimize better
132  */
133
134 #define rdmsr(msr, val1, val2)                                  \
135 do {                                                            \
136         u64 __val = native_read_msr((msr));                     \
137         (val1) = (u32)__val;                                    \
138         (val2) = (u32)(__val >> 32);                            \
139 } while (0)
140
141 static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
142 {
143         native_write_msr(msr, low, high);
144 }
145
146 #define rdmsrl(msr, val)                        \
147         ((val) = native_read_msr((msr)))
148
149 #define wrmsrl(msr, val)                                                \
150         native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32))
151
152 /* wrmsr with exception handling */
153 static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
154 {
155         return native_write_msr_safe(msr, low, high);
156 }
157
158 /* rdmsr with exception handling */
159 #define rdmsr_safe(msr, p1, p2)                                 \
160 ({                                                              \
161         int __err;                                              \
162         u64 __val = native_read_msr_safe((msr), &__err);        \
163         (*p1) = (u32)__val;                                     \
164         (*p2) = (u32)(__val >> 32);                             \
165         __err;                                                  \
166 })
167
168 static inline int rdmsrl_safe(unsigned msr, unsigned long long *p)
169 {
170         int err;
171
172         *p = native_read_msr_safe(msr, &err);
173         return err;
174 }
175 static inline int rdmsrl_amd_safe(unsigned msr, unsigned long long *p)
176 {
177         int err;
178
179         *p = native_read_msr_amd_safe(msr, &err);
180         return err;
181 }
182
183 #define rdtscl(low)                                             \
184         ((low) = (u32)__native_read_tsc())
185
186 #define rdtscll(val)                                            \
187         ((val) = __native_read_tsc())
188
189 #define rdpmc(counter, low, high)                       \
190 do {                                                    \
191         u64 _l = native_read_pmc((counter));            \
192         (low)  = (u32)_l;                               \
193         (high) = (u32)(_l >> 32);                       \
194 } while (0)
195
196 #define rdtscp(low, high, aux)                                  \
197 do {                                                            \
198         unsigned long long _val = native_read_tscp(&(aux));     \
199         (low) = (u32)_val;                                      \
200         (high) = (u32)(_val >> 32);                             \
201 } while (0)
202
203 #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux))
204
205 #endif  /* !CONFIG_PARAVIRT */
206
207
208 #define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val),         \
209                                              (u32)((val) >> 32))
210
211 #define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2))
212
213 #define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0)
214
215 #ifdef CONFIG_SMP
216 int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
217 int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
218 int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
219 int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
220 #else  /*  CONFIG_SMP  */
221 static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
222 {
223         rdmsr(msr_no, *l, *h);
224         return 0;
225 }
226 static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
227 {
228         wrmsr(msr_no, l, h);
229         return 0;
230 }
231 static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no,
232                                     u32 *l, u32 *h)
233 {
234         return rdmsr_safe(msr_no, l, h);
235 }
236 static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
237 {
238         return wrmsr_safe(msr_no, l, h);
239 }
240 #endif  /* CONFIG_SMP */
241 #endif /* __ASSEMBLY__ */
242 #endif /* __KERNEL__ */
243
244
245 #endif /* _ASM_X86_MSR_H */