aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVictor Stinner <vstinner@python.org>2020-12-23 03:41:08 +0100
committerGitHub <noreply@github.com>2020-12-23 03:41:08 +0100
commit52a327c1cbb86c7f2f5c460645889b23615261bf (patch)
treedf67ef901c5300349c073e8031c4d410ba016ca7 /Include
parentFix typos in sysmodule (GH-23883) (diff)
downloadcpython-52a327c1cbb86c7f2f5c460645889b23615261bf.tar.gz
cpython-52a327c1cbb86c7f2f5c460645889b23615261bf.tar.bz2
cpython-52a327c1cbb86c7f2f5c460645889b23615261bf.zip
bpo-39465: Add pycore_atomic_funcs.h header (GH-20766)
Add pycore_atomic_funcs.h internal header file: similar to pycore_atomic.h but don't require to declare variables as atomic. Add _Py_atomic_size_get() and _Py_atomic_size_set() functions.
Diffstat (limited to 'Include')
-rw-r--r--Include/internal/pycore_atomic.h6
-rw-r--r--Include/internal/pycore_atomic_funcs.h94
2 files changed, 97 insertions, 3 deletions
diff --git a/Include/internal/pycore_atomic.h b/Include/internal/pycore_atomic.h
index 1d5c5621677..3d42e54464c 100644
--- a/Include/internal/pycore_atomic.h
+++ b/Include/internal/pycore_atomic.h
@@ -11,8 +11,8 @@ extern "C" {
#include "dynamic_annotations.h" /* _Py_ANNOTATE_MEMORY_ORDER */
#include "pyconfig.h"
-#if defined(HAVE_STD_ATOMIC)
-#include <stdatomic.h>
+#ifdef HAVE_STD_ATOMIC
+# include <stdatomic.h>
#endif
@@ -62,7 +62,7 @@ typedef struct _Py_atomic_int {
#define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) \
atomic_load_explicit(&((ATOMIC_VAL)->_value), ORDER)
-/* Use builtin atomic operations in GCC >= 4.7 */
+// Use builtin atomic operations in GCC >= 4.7 and clang
#elif defined(HAVE_BUILTIN_ATOMIC)
typedef enum _Py_memory_order {
diff --git a/Include/internal/pycore_atomic_funcs.h b/Include/internal/pycore_atomic_funcs.h
new file mode 100644
index 00000000000..a708789cea7
--- /dev/null
+++ b/Include/internal/pycore_atomic_funcs.h
@@ -0,0 +1,94 @@
+/* Atomic functions: similar to pycore_atomic.h, but don't need
+ to declare variables as atomic.
+
+ Py_ssize_t type:
+
+ * value = _Py_atomic_size_get(&var)
+ * _Py_atomic_size_set(&var, value)
+
+ Use sequentially-consistent ordering (__ATOMIC_SEQ_CST memory order):
+ enforce total ordering with all other atomic functions.
+*/
+#ifndef Py_ATOMIC_FUNC_H
+#define Py_ATOMIC_FUNC_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#if defined(_MSC_VER)
+# include <intrin.h> // _InterlockedExchange()
+#endif
+
+
+// Use builtin atomic operations in GCC >= 4.7 and clang
+#ifdef HAVE_BUILTIN_ATOMIC
+
+static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
+{
+ return __atomic_load_n(var, __ATOMIC_SEQ_CST);
+}
+
+static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
+{
+ __atomic_store_n(var, value, __ATOMIC_SEQ_CST);
+}
+
+#elif defined(_MSC_VER)
+
+static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
+{
+#if SIZEOF_VOID_P == 8
+ Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var));
+ volatile __int64 *volatile_var = (volatile __int64 *)var;
+ __int64 old;
+ do {
+ old = *volatile_var;
+ } while(_InterlockedCompareExchange64(volatile_var, old, old) != old);
+#else
+ Py_BUILD_ASSERT(sizeof(long) == sizeof(*var));
+ volatile long *volatile_var = (volatile long *)var;
+ long old;
+ do {
+ old = *volatile_var;
+ } while(_InterlockedCompareExchange(volatile_var, old, old) != old);
+#endif
+ return old;
+}
+
+static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
+{
+#if SIZEOF_VOID_P == 8
+ Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var));
+ volatile __int64 *volatile_var = (volatile __int64 *)var;
+ _InterlockedExchange64(volatile_var, value);
+#else
+ Py_BUILD_ASSERT(sizeof(long) == sizeof(*var));
+ volatile long *volatile_var = (volatile long *)var;
+ _InterlockedExchange(volatile_var, value);
+#endif
+}
+
+#else
+// Fallback implementation using volatile
+
+static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var)
+{
+ volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var;
+ return *volatile_var;
+}
+
+static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value)
+{
+ volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var;
+ *volatile_var = value;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* Py_ATOMIC_FUNC_H */