4 #include <asm/msr-index.h>
11 static inline unsigned long long native_read_msr(unsigned int msr)
13 unsigned long long val;
15 asm volatile("rdmsr" : "=A" (val) : "c" (msr));
19 static inline unsigned long long native_read_msr_safe(unsigned int msr,
22 unsigned long long val;
24 asm volatile("2: rdmsr ; xorl %0,%0\n"
26 ".section .fixup,\"ax\"\n\t"
27 "3: movl %3,%0 ; jmp 1b\n\t"
29 ".section __ex_table,\"a\"\n"
33 : "=r" (*err), "=A" (val)
34 : "c" (msr), "i" (-EFAULT));
39 static inline void native_write_msr(unsigned int msr, unsigned long long val)
41 asm volatile("wrmsr" : : "c" (msr), "A"(val));
44 static inline int native_write_msr_safe(unsigned int msr,
45 unsigned long long val)
48 asm volatile("2: wrmsr ; xorl %0,%0\n"
50 ".section .fixup,\"ax\"\n\t"
51 "3: movl %4,%0 ; jmp 1b\n\t"
53 ".section __ex_table,\"a\"\n"
58 : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)),
63 static inline unsigned long long native_read_tsc(void)
65 unsigned long long val;
66 asm volatile("rdtsc" : "=A" (val));
70 static inline unsigned long long native_read_pmc(void)
72 unsigned long long val;
73 asm volatile("rdpmc" : "=A" (val));
77 #ifdef CONFIG_PARAVIRT
78 #include <asm/paravirt.h>
82 * Access to machine-specific registers (available on 586 and better only)
83 * Note: the rd* operations modify the parameters directly (without using
84 * pointer indirection), this allows gcc to optimize better
87 #define rdmsr(msr,val1,val2) \
89 unsigned long long __val = native_read_msr(msr); \
94 #define wrmsr(msr,val1,val2) \
95 native_write_msr(msr, ((unsigned long long)val2 << 32) | val1)
97 #define rdmsrl(msr,val) \
99 (val) = native_read_msr(msr); \
102 static inline void wrmsrl (unsigned long msr, unsigned long long val)
104 unsigned long lo, hi;
105 lo = (unsigned long) val;
110 /* wrmsr with exception handling */
111 #define wrmsr_safe(msr,val1,val2) \
112 (native_write_msr_safe(msr, ((unsigned long long)val2 << 32) | val1))
114 /* rdmsr with exception handling */
115 #define rdmsr_safe(msr,p1,p2) \
118 unsigned long long __val = native_read_msr_safe(msr, &__err);\
120 (*p2) = __val >> 32; \
124 #define rdtsc(low,high) \
126 u64 _l = native_read_tsc(); \
131 #define rdtscl(low) \
133 (low) = native_read_tsc(); \
136 #define rdtscll(val) ((val) = native_read_tsc())
138 #define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
140 #define rdpmc(counter,low,high) \
142 u64 _l = native_read_pmc(); \
146 #endif /* !CONFIG_PARAVIRT */
149 void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
150 void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
151 #else /* CONFIG_SMP */
152 static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
154 rdmsr(msr_no, *l, *h);
156 static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
160 #endif /* CONFIG_SMP */
163 #endif /* __ASM_MSR_H */