summaryrefslogtreecommitdiff
path: root/ruby_atomic.h
diff options
context:
space:
mode:
authorNobuyoshi Nakada <nobu@ruby-lang.org>2025-11-22 00:54:58 +0900
committerNobuyoshi Nakada <nobu@ruby-lang.org>2025-11-22 01:20:04 +0900
commitd3b6f835d565ec1590059773fc87589ddf8adc37 (patch)
treef43d1bcb7e079829c744fb37aa57fff94735da40 /ruby_atomic.h
parentffa105c27f943bf4170247137733ff7640cf24d0 (diff)
Fix stdatomic case in `rbimpl_atomic_u64_fetch_add`
On some platoforms, 64bit atomic operations need the dedicated helper library.
Diffstat (limited to 'ruby_atomic.h')
-rw-r--r--ruby_atomic.h7
1 files changed, 5 insertions, 2 deletions
diff --git a/ruby_atomic.h b/ruby_atomic.h
index c194f7ec3b..3a541d9208 100644
--- a/ruby_atomic.h
+++ b/ruby_atomic.h
@@ -2,6 +2,9 @@
#define INTERNAL_ATOMIC_H
#include "ruby/atomic.h"
+#ifdef HAVE_STDATOMIC_H
+# include <stdatomic.h>
+#endif
#define RUBY_ATOMIC_VALUE_LOAD(x) rbimpl_atomic_value_load(&(x), RBIMPL_ATOMIC_SEQ_CST)
@@ -76,9 +79,9 @@ rbimpl_atomic_u64_fetch_add(volatile rbimpl_atomic_uint64_t *ptr, uint64_t val)
return InterlockedExchangeAdd64((volatile LONG64 *)ptr, val);
#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
return atomic_add_64_nv(ptr, val) - val;
+#elif defined(HAVE_STDATOMIC_H)
+ return atomic_fetch_add_explicit((_Atomic uint64_t *)ptr, val, memory_order_seq_cst);
#else
- // TODO: stdatomic
-
// Fallback using mutex for platforms without 64-bit atomics
static rb_native_mutex_t lock = RB_NATIVE_MUTEX_INITIALIZER;
rb_native_mutex_lock(&lock);