1 | #pragma once |
2 | |
3 | /* |
4 | * Most of this header is a giant workaround for MSVC to make atomics into a |
5 | * somewhat unified interface with how GCC and Clang handle them. |
6 | * |
7 | * We don't use the absolutely disgusting C11 stdatomic.h header because it is |
8 | * unable to operate on non _Atomic types, which enforce implicit sequential |
9 | * consistency and alter the behavior of the standard C binary/unary operators. |
10 | * |
11 | * The strictness of the atomic helpers defined here is assumed to be at least |
12 | * acquire for loads and release for stores. Cmpxchg uses the standard acq/rel |
13 | * for success, acq for failure, and is assumed to be strong. |
14 | */ |
15 | |
16 | #ifdef UACPI_OVERRIDE_ATOMIC |
17 | #include "uacpi_atomic.h" |
18 | #else |
19 | |
20 | #include <uacpi/platform/compiler.h> |
21 | |
22 | #ifdef _MSC_VER |
23 | |
24 | #include <intrin.h> |
25 | |
26 | // mimic __atomic_compare_exchange_n that doesn't exist on MSVC |
27 | #define UACPI_MAKE_MSVC_CMPXCHG(width, type, suffix) \ |
28 | static inline int uacpi_do_atomic_cmpxchg##width( \ |
29 | type volatile *ptr, type volatile *expected, type desired \ |
30 | ) \ |
31 | { \ |
32 | type current; \ |
33 | \ |
34 | current = _InterlockedCompareExchange##suffix(ptr, *expected, desired); \ |
35 | if (current != *expected) { \ |
36 | *expected = current; \ |
37 | return 0; \ |
38 | } \ |
39 | return 1; \ |
40 | } |
41 | |
42 | #define UACPI_MSVC_CMPXCHG_INVOKE(ptr, expected, desired, width, type) \ |
43 | uacpi_do_atomic_cmpxchg##width( \ |
44 | (type volatile*)ptr, (type volatile*)expected, desired \ |
45 | ) |
46 | |
47 | #define UACPI_MSVC_ATOMIC_STORE(ptr, value, type, width) \ |
48 | _InterlockedExchange##width((type volatile*)(ptr), (type)(value)) |
49 | |
50 | #define UACPI_MSVC_ATOMIC_LOAD(ptr, type, width) \ |
51 | _InterlockedOr##width((type volatile*)(ptr), 0) |
52 | |
53 | UACPI_MAKE_MSVC_CMPXCHG(64, __int64, 64) |
54 | UACPI_MAKE_MSVC_CMPXCHG(32, long,) |
55 | UACPI_MAKE_MSVC_CMPXCHG(16, short, 16) |
56 | |
57 | #define uacpi_atomic_cmpxchg16(ptr, expected, desired) \ |
58 | UACPI_MSVC_CMPXCHG_INVOKE(ptr, expected, desired, 16, short) |
59 | |
60 | #define uacpi_atomic_cmpxchg32(ptr, expected, desired) \ |
61 | UACPI_MSVC_CMPXCHG_INVOKE(ptr, expected, desired, 32, long) |
62 | |
63 | #define uacpi_atomic_cmpxchg64(ptr, expected, desired) \ |
64 | UACPI_MSVC_CMPXCHG_INVOKE(ptr, expected, desired, 64, __int64) |
65 | |
66 | #define uacpi_atomic_load8(ptr) UACPI_MSVC_ATOMIC_LOAD(ptr, char, 8) |
67 | #define uacpi_atomic_load16(ptr) UACPI_MSVC_ATOMIC_LOAD(ptr, short, 16) |
68 | #define uacpi_atomic_load32(ptr) UACPI_MSVC_ATOMIC_LOAD(ptr, long,) |
69 | #define uacpi_atomic_load64(ptr) UACPI_MSVC_ATOMIC_LOAD(ptr, __int64, 64) |
70 | |
71 | #define uacpi_atomic_store8(ptr, value) UACPI_MSVC_ATOMIC_STORE(ptr, value, char, 8) |
72 | #define uacpi_atomic_store16(ptr, value) UACPI_MSVC_ATOMIC_STORE(ptr, value, short, 16) |
73 | #define uacpi_atomic_store32(ptr, value) UACPI_MSVC_ATOMIC_STORE(ptr, value, long,) |
74 | #define uacpi_atomic_store64(ptr, value) UACPI_MSVC_ATOMIC_STORE(ptr, value, __int64, 64) |
75 | #else |
76 | |
77 | #define UACPI_DO_CMPXCHG(ptr, expected, desired) \ |
78 | __atomic_compare_exchange_n(ptr, expected, desired, 0, \ |
79 | __ATOMIC_ACQ_REL, __ATOMIC_ACQUIRE) |
80 | |
81 | #define uacpi_atomic_cmpxchg16(ptr, expected, desired) \ |
82 | UACPI_DO_CMPXCHG(ptr, expected, desired) |
83 | #define uacpi_atomic_cmpxchg32(ptr, expected, desired) \ |
84 | UACPI_DO_CMPXCHG(ptr, expected, desired) |
85 | #define uacpi_atomic_cmpxchg64(ptr, expected, desired) \ |
86 | UACPI_DO_CMPXCHG(ptr, expected, desired) |
87 | |
88 | #define uacpi_atomic_load8(ptr) __atomic_load_n(ptr, __ATOMIC_ACQUIRE) |
89 | #define uacpi_atomic_load16(ptr) __atomic_load_n(ptr, __ATOMIC_ACQUIRE) |
90 | #define uacpi_atomic_load32(ptr) __atomic_load_n(ptr, __ATOMIC_ACQUIRE) |
91 | #define uacpi_atomic_load64(ptr) __atomic_load_n(ptr, __ATOMIC_ACQUIRE) |
92 | |
93 | #define uacpi_atomic_store8(ptr, value) __atomic_store_n(ptr, value, __ATOMIC_RELEASE) |
94 | #define uacpi_atomic_store16(ptr, value) __atomic_store_n(ptr, value, __ATOMIC_RELEASE) |
95 | #define uacpi_atomic_store32(ptr, value) __atomic_store_n(ptr, value, __ATOMIC_RELEASE) |
96 | #define uacpi_atomic_store64(ptr, value) __atomic_store_n(ptr, value, __ATOMIC_RELEASE) |
97 | #endif |
98 | |
99 | #if UACPI_POINTER_SIZE == 4 |
100 | #define uacpi_atomic_load_ptr(ptr_to_ptr) uacpi_atomic_load32(ptr_to_ptr) |
101 | #define uacpi_atomic_store_ptr(ptr_to_ptr, value) uacpi_atomic_store32(ptr_to_ptr, value) |
102 | #else |
103 | #define uacpi_atomic_load_ptr(ptr_to_ptr) uacpi_atomic_load64(ptr_to_ptr) |
104 | #define uacpi_atomic_store_ptr(ptr_to_ptr, value) uacpi_atomic_store64(ptr_to_ptr, value) |
105 | #endif |
106 | |
107 | #endif |
108 | |