11 __asm__
volatile(
"rdmsr" :
"=a"(lo),
"=d"(hi) :
"c"(msr));
12 return ((
u64) hi << 32) | lo;
17 u32 lo = val & 0xFFFFFFFF;
19 __asm__
volatile(
"wrmsr" : :
"a"(lo),
"d"(hi),
"c"(msr));
25 __asm__
volatile(
"hlt");
28#define x86_cpuid(return_reg, leaf, subleaf) \
30 reg32_t a = 0, b = 0, c = 0, d = 0; \
31 __get_cpuid_count(leaf, subleaf, &a, &b, &c, &d); \
36#define x86_cpu_get_crx(x) __extension__({ reg_t crx; __asm__ volatile("mov %%cr" #x ", %0" : "=r"(crx)); crx; })
37#define x86_cpu_set_crx(x, val) __asm__ volatile("mov %0, %%cr" #x : : "r"(val) : "memory")
40#define x86_cpu_get_cr0() x86_cpu_get_crx(0)
41#define x86_cpu_get_cr2() x86_cpu_get_crx(2)
42#define x86_cpu_get_cr3() x86_cpu_get_crx(3)
43#define x86_cpu_get_cr4() x86_cpu_get_crx(4)
45#define x86_cpu_set_cr0(val) x86_cpu_set_crx(0, val)
46#define x86_cpu_set_cr3(val) x86_cpu_set_crx(3, val)
47#define x86_cpu_set_cr4(val) x86_cpu_set_crx(4, val)
51 __asm__
volatile(
"invlpg (%0)" : :
"r"(addr) :
"memory");
56 __asm__
volatile(
"mov %%cr3, %%rax; mov %%rax, %%cr3" : : :
"rax",
"memory");
void x86_cpu_initialise_caps(void)
should_inline void x86_cpu_invlpg_all(void)
should_inline void x86_cpu_invlpg(ptr_t addr)
void x86_cpu_setup_xsave_area(void)
should_inline void cpu_wrmsr(u32 msr, u64 val)
should_inline u64 cpu_rdmsr(u32 msr)
should_inline void x86_cpu_halt(void)