1 | // SPDX-License-Identifier: GPL-3.0-or-later |
2 | |
3 | #pragma once |
4 | |
5 | #include <cpuid.h> |
6 | #include <mos/types.h> |
7 | |
8 | should_inline u64 cpu_rdmsr(u32 msr) |
9 | { |
10 | u32 lo, hi; |
11 | __asm__ volatile("rdmsr" : "=a" (lo), "=d" (hi) : "c" (msr)); |
12 | return ((u64) hi << 32) | lo; |
13 | } |
14 | |
15 | should_inline void cpu_wrmsr(u32 msr, u64 val) |
16 | { |
17 | u32 lo = val & 0xFFFFFFFF; |
18 | u32 hi = val >> 32; |
19 | __asm__ volatile("wrmsr" : : "a" (lo), "d" (hi), "c" (msr)); |
20 | } |
21 | |
22 | [[noreturn]] should_inline void x86_cpu_halt(void) |
23 | { |
24 | while (true) |
25 | __asm__ volatile("hlt" ); |
26 | } |
27 | |
28 | #define x86_cpuid(return_reg, leaf, subleaf) \ |
29 | __extension__({ \ |
30 | reg32_t a = 0, b = 0, c = 0, d = 0; \ |
31 | __get_cpuid_count(leaf, subleaf, &a, &b, &c, &d); \ |
32 | return_reg; \ |
33 | }) |
34 | |
35 | // clang-format off |
36 | #define x86_cpu_get_crx(x) __extension__({ reg_t crx; __asm__ volatile("mov %%cr" #x ", %0" : "=r"(crx)); crx; }) |
37 | #define x86_cpu_set_crx(x, val) __asm__ volatile("mov %0, %%cr" #x : : "r"(val) : "memory") |
38 | // clang-format on |
39 | |
40 | #define x86_cpu_get_cr0() x86_cpu_get_crx(0) |
41 | #define x86_cpu_get_cr2() x86_cpu_get_crx(2) |
42 | #define x86_cpu_get_cr3() x86_cpu_get_crx(3) |
43 | #define x86_cpu_get_cr4() x86_cpu_get_crx(4) |
44 | |
45 | #define x86_cpu_set_cr0(val) x86_cpu_set_crx(0, val) |
46 | #define x86_cpu_set_cr3(val) x86_cpu_set_crx(3, val) |
47 | #define x86_cpu_set_cr4(val) x86_cpu_set_crx(4, val) |
48 | |
49 | should_inline void x86_cpu_invlpg(ptr_t addr) |
50 | { |
51 | __asm__ volatile("invlpg (%0)" : : "r" (addr) : "memory" ); |
52 | } |
53 | |
54 | should_inline void x86_cpu_invlpg_all(void) |
55 | { |
56 | __asm__ volatile("mov %%cr3, %%rax; mov %%rax, %%cr3" : : : "rax" , "memory" ); |
57 | } |
58 | |
59 | void x86_cpu_initialise_caps(void); |
60 | void x86_cpu_setup_xsave_area(void); |
61 | |