4 #include <asm/msr-index.h>
7 #include <linux/errno.h>
9 * Access to machine-specific registers (available on 586 and better only)
10 * Note: the rd* operations modify the parameters directly (without using
11 * pointer indirection), this allows gcc to optimize better
14 #define rdmsr(msr,val1,val2) \
15 __asm__ __volatile__("rdmsr" \
16 : "=a" (val1), "=d" (val2) \
20 #define rdmsrl(msr,val) do { unsigned long a__,b__; \
21 __asm__ __volatile__("rdmsr" \
22 : "=a" (a__), "=d" (b__) \
24 val = a__ | (b__<<32); \
27 #define wrmsr(msr,val1,val2) \
28 __asm__ __volatile__("wrmsr" \
30 : "c" (msr), "a" (val1), "d" (val2))
32 #define wrmsrl(msr,val) wrmsr(msr,(__u32)((__u64)(val)),((__u64)(val))>>32)
34 /* wrmsr with exception handling */
35 #define wrmsr_safe(msr,a,b) ({ int ret__; \
36 asm volatile("2: wrmsr ; xorl %0,%0\n" \
38 ".section .fixup,\"ax\"\n\t" \
39 "3: movl %4,%0 ; jmp 1b\n\t" \
41 ".section __ex_table,\"a\"\n" \
46 : "c" (msr), "0" (a), "d" (b), "i" (-EFAULT)); \
49 #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32))
51 #define rdmsr_safe(msr,a,b) \
53 asm volatile ("1: rdmsr\n" \
55 ".section .fixup,\"ax\"\n" \
59 ".section __ex_table,\"a\"\n" \
62 ".previous":"=&bDS" (ret__), "=a"(*(a)), "=d"(*(b))\
63 :"c"(msr), "i"(-EIO), "0"(0)); \
66 #define rdtsc(low,high) \
67 __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high))
70 __asm__ __volatile__ ("rdtsc" : "=a" (low) : : "edx")
72 #define rdtscp(low,high,aux) \
73 asm volatile (".byte 0x0f,0x01,0xf9" : "=a" (low), "=d" (high), "=c" (aux))
75 #define rdtscll(val) do { \
76 unsigned int __a,__d; \
77 asm volatile("rdtsc" : "=a" (__a), "=d" (__d)); \
78 (val) = ((unsigned long)__a) | (((unsigned long)__d)<<32); \
81 #define rdtscpll(val, aux) do { \
82 unsigned long __a, __d; \
83 asm volatile (".byte 0x0f,0x01,0xf9" : "=a" (__a), "=d" (__d), "=c" (aux)); \
84 (val) = (__d << 32) | __a; \
87 #define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
89 #define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0)
91 #define rdpmc(counter,low,high) \
92 __asm__ __volatile__("rdpmc" \
93 : "=a" (low), "=d" (high) \
96 static inline void cpuid(int op
, unsigned int *eax
, unsigned int *ebx
,
97 unsigned int *ecx
, unsigned int *edx
)
107 /* Some CPUID calls want 'count' to be placed in ecx */
108 static inline void cpuid_count(int op
, int count
, int *eax
, int *ebx
, int *ecx
,
116 : "0" (op
), "c" (count
));
120 * CPUID functions returning a single datum
122 static inline unsigned int cpuid_eax(unsigned int op
)
132 static inline unsigned int cpuid_ebx(unsigned int op
)
134 unsigned int eax
, ebx
;
137 : "=a" (eax
), "=b" (ebx
)
142 static inline unsigned int cpuid_ecx(unsigned int op
)
144 unsigned int eax
, ecx
;
147 : "=a" (eax
), "=c" (ecx
)
152 static inline unsigned int cpuid_edx(unsigned int op
)
154 unsigned int eax
, edx
;
157 : "=a" (eax
), "=d" (edx
)
164 void rdmsr_on_cpu(unsigned int cpu
, u32 msr_no
, u32
*l
, u32
*h
);
165 void wrmsr_on_cpu(unsigned int cpu
, u32 msr_no
, u32 l
, u32 h
);
166 int rdmsr_safe_on_cpu(unsigned int cpu
, u32 msr_no
, u32
*l
, u32
*h
);
167 int wrmsr_safe_on_cpu(unsigned int cpu
, u32 msr_no
, u32 l
, u32 h
);
168 #else /* CONFIG_SMP */
169 static inline void rdmsr_on_cpu(unsigned int cpu
, u32 msr_no
, u32
*l
, u32
*h
)
171 rdmsr(msr_no
, *l
, *h
);
173 static inline void wrmsr_on_cpu(unsigned int cpu
, u32 msr_no
, u32 l
, u32 h
)
177 static inline int rdmsr_safe_on_cpu(unsigned int cpu
, u32 msr_no
, u32
*l
, u32
*h
)
179 return rdmsr_safe(msr_no
, l
, h
);
181 static inline int wrmsr_safe_on_cpu(unsigned int cpu
, u32 msr_no
, u32 l
, u32 h
)
183 return wrmsr_safe(msr_no
, l
, h
);
185 #endif /* CONFIG_SMP */
186 #endif /* __ASSEMBLY__ */
187 #endif /* X86_64_MSR_H */