WPILibC++ 2027.0.0-alpha-3
Loading...
Searching...
No Matches
atomic.h
Go to the documentation of this file.
1// Protocol Buffers - Google's data interchange format
2// Copyright 2023 Google LLC. All rights reserved.
3//
4// Use of this source code is governed by a BSD-style
5// license that can be found in the LICENSE file or at
6// https://developers.google.com/open-source/licenses/bsd
7
8#ifndef UPB_PORT_ATOMIC_H_
9#define UPB_PORT_ATOMIC_H_
10
11#include "upb/port/def.inc"
12
13#ifdef UPB_USE_C11_ATOMICS
14
15// IWYU pragma: begin_exports
16#include <stdatomic.h>
17#include <stdbool.h>
18// IWYU pragma: end_exports
19
20#define upb_Atomic_Init(addr, val) atomic_init(addr, val)
21#define upb_Atomic_Load(addr, order) atomic_load_explicit(addr, order)
22#define upb_Atomic_Store(addr, val, order) \
23 atomic_store_explicit(addr, val, order)
24#define upb_Atomic_Exchange(addr, val, order) \
25 atomic_exchange_explicit(addr, val, order)
26#define upb_Atomic_CompareExchangeStrong(addr, expected, desired, \
27 success_order, failure_order) \
28 atomic_compare_exchange_strong_explicit(addr, expected, desired, \
29 success_order, failure_order)
30#define upb_Atomic_CompareExchangeWeak(addr, expected, desired, success_order, \
31 failure_order) \
32 atomic_compare_exchange_weak_explicit(addr, expected, desired, \
33 success_order, failure_order)
34
35#elif defined(UPB_USE_MSC_ATOMICS)
36#include <intrin.h>
37#include <stdbool.h>
38#include <stdint.h>
39
40#define upb_Atomic_Init(addr, val) (*(addr) = val)
41
42#if defined(_WIN64)
43// MSVC, without C11 atomics, does not have any way in pure C to force
44// load-acquire store-release behavior, so we hack it with exchanges.
45#pragma intrinsic(_InterlockedExchange64)
46#define upb_Atomic_Store(addr, val, order) \
47 (void)_InterlockedExchange64((uint64_t volatile *)addr, (uint64_t)val)
48
49#pragma intrinsic(_InterlockedCompareExchange64)
50static uintptr_t upb_Atomic_LoadMsc(uint64_t volatile *addr) {
51 // Compare exchange with an unlikely value reduces the risk of a spurious
52 // (but harmless) store
53 return _InterlockedCompareExchange64(addr, 0xDEADC0DEBAADF00D,
54 0xDEADC0DEBAADF00D);
55}
56// If _Generic is available, use it to avoid emitting a "'uintptr_t' differs in
57// levels of indirection from 'void *'" or -Wint-conversion compiler warning.
58#if __STDC_VERSION__ >= 201112L
59#define upb_Atomic_Load(addr, order) \
60 _Generic(addr, \
61 UPB_ATOMIC(uintptr_t) *: upb_Atomic_LoadMsc( \
62 (uint64_t volatile *)(addr)), \
63 default: (void *)upb_Atomic_LoadMsc((uint64_t volatile *)(addr)))
64
65#define upb_Atomic_Exchange(addr, val, order) \
66 _Generic(addr, \
67 UPB_ATOMIC(uintptr_t) *: _InterlockedExchange64( \
68 (uint64_t volatile *)(addr), (uint64_t)val), \
69 default: (void *)_InterlockedExchange64((uint64_t volatile *)addr, \
70 (uint64_t)val))
71#else
72// Compare exchange with an unlikely value reduces the risk of a spurious
73// (but harmless) store
74#define upb_Atomic_Load(addr, order) \
75 (void *)upb_Atomic_LoadMsc((uint64_t volatile *)(addr))
76
77#define upb_Atomic_Exchange(addr, val, order) \
78 (void *)_InterlockedExchange64((uint64_t volatile *)addr, (uint64_t)val)
79#endif
80
81#pragma intrinsic(_InterlockedCompareExchange64)
82static bool upb_Atomic_CompareExchangeMscP(uint64_t volatile *addr,
83 uint64_t *expected,
84 uint64_t desired) {
85 uint64_t expect_val = *expected;
86 uint64_t actual_val =
87 _InterlockedCompareExchange64(addr, desired, expect_val);
88 if (expect_val != actual_val) {
89 *expected = actual_val;
90 return false;
91 }
92 return true;
93}
94
95#define upb_Atomic_CompareExchangeStrong(addr, expected, desired, \
96 success_order, failure_order) \
97 upb_Atomic_CompareExchangeMscP((uint64_t volatile *)addr, \
98 (uint64_t *)expected, (uint64_t)desired)
99
100#define upb_Atomic_CompareExchangeWeak(addr, expected, desired, success_order, \
101 failure_order) \
102 upb_Atomic_CompareExchangeMscP((uint64_t volatile *)addr, \
103 (uint64_t *)expected, (uint64_t)desired)
104
105#else // 32 bit pointers
106#pragma intrinsic(_InterlockedExchange)
107#define upb_Atomic_Store(addr, val, order) \
108 (void)_InterlockedExchange((uint32_t volatile *)addr, (uint32_t)val)
109
110#pragma intrinsic(_InterlockedCompareExchange)
111static uintptr_t upb_Atomic_LoadMsc(uint32_t volatile *addr) {
112 // Compare exchange with an unlikely value reduces the risk of a spurious
113 // (but harmless) store
114 return _InterlockedCompareExchange(addr, 0xDEADC0DE, 0xDEADC0DE);
115}
116// If _Generic is available, use it to avoid emitting 'uintptr_t' differs in
117// levels of indirection from 'void *'
118#if __STDC_VERSION__ >= 201112L
119#define upb_Atomic_Load(addr, order) \
120 _Generic(addr, \
121 UPB_ATOMIC(uintptr_t) *: upb_Atomic_LoadMsc( \
122 (uint32_t volatile *)(addr)), \
123 default: (void *)upb_Atomic_LoadMsc((uint32_t volatile *)(addr)))
124
125#define upb_Atomic_Exchange(addr, val, order) \
126 _Generic(addr, \
127 UPB_ATOMIC(uintptr_t) *: _InterlockedExchange( \
128 (uint32_t volatile *)(addr), (uint32_t)val), \
129 default: (void *)_InterlockedExchange64((uint32_t volatile *)addr, \
130 (uint32_t)val))
131#else
132#define upb_Atomic_Load(addr, order) \
133 (void *)upb_Atomic_LoadMsc((uint32_t volatile *)(addr))
134
135#define upb_Atomic_Exchange(addr, val, order) \
136 (void *)_InterlockedExchange((uint32_t volatile *)addr, (uint32_t)val)
137#endif
138
139#pragma intrinsic(_InterlockedCompareExchange)
140static bool upb_Atomic_CompareExchangeMscP(uint32_t volatile *addr,
141 uint32_t *expected,
142 uint32_t desired) {
143 uint32_t expect_val = *expected;
144 uint32_t actual_val = _InterlockedCompareExchange(addr, desired, expect_val);
145 if (expect_val != actual_val) {
146 *expected = actual_val;
147 return false;
148 }
149 return true;
150}
151
152#define upb_Atomic_CompareExchangeStrong(addr, expected, desired, \
153 success_order, failure_order) \
154 upb_Atomic_CompareExchangeMscP((uint32_t volatile *)addr, \
155 (uint32_t *)expected, (uint32_t)desired)
156
157#define upb_Atomic_CompareExchangeWeak(addr, expected, desired, success_order, \
158 failure_order) \
159 upb_Atomic_CompareExchangeMscP((uint32_t volatile *)addr, \
160 (uint32_t *)expected, (uint32_t)desired)
161#endif
162
163#else // No atomics
164
165#if !defined(UPB_SUPPRESS_MISSING_ATOMICS)
166// NOLINTNEXTLINE
167#error Your compiler does not support atomic instructions, which UPB uses. If you do not use UPB on multiple threads, you can suppress this error by defining UPB_SUPPRESS_MISSING_ATOMICS.
168#endif
169
170#include <string.h>
171
172#define upb_Atomic_Init(addr, val) (*addr = val)
173#define upb_Atomic_Load(addr, order) (*addr)
174#define upb_Atomic_Store(addr, val, order) (*(addr) = val)
175
176UPB_INLINE void* _upb_NonAtomic_Exchange(void* addr, void* value) {
177 void* old;
178 memcpy(&old, addr, sizeof(value));
179 memcpy(addr, &value, sizeof(value));
180 return old;
181}
182
183#define upb_Atomic_Exchange(addr, val, order) _upb_NonAtomic_Exchange(addr, val)
184
185// `addr` and `expected` are logically double pointers.
187 void* expected,
188 void* desired) {
189 if (memcmp(addr, expected, sizeof(desired)) == 0) {
190 memcpy(addr, &desired, sizeof(desired));
191 return true;
192 } else {
193 memcpy(expected, addr, sizeof(desired));
194 return false;
195 }
196}
197
198#define upb_Atomic_CompareExchangeStrong(addr, expected, desired, \
199 success_order, failure_order) \
200 _upb_NonAtomic_CompareExchangeStrongP((void*)addr, (void*)expected, \
201 (void*)desired)
202#define upb_Atomic_CompareExchangeWeak(addr, expected, desired, success_order, \
203 failure_order) \
204 upb_Atomic_CompareExchangeStrong(addr, expected, desired, 0, 0)
205
206#endif
207
208#include "upb/port/undef.inc"
209
210#endif // UPB_PORT_ATOMIC_H_
UPB_INLINE void * _upb_NonAtomic_Exchange(void *addr, void *value)
Definition atomic.h:176
UPB_INLINE bool _upb_NonAtomic_CompareExchangeStrongP(void *addr, void *expected, void *desired)
Definition atomic.h:186
#define UPB_INLINE
Definition def.inc:144
uint128_t uintptr_t
Definition format.h:418