27#include "ruby/internal/config.h"
33#ifdef HAVE_SYS_TYPES_H
34# include <sys/types.h>
37#if RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
38# pragma intrinsic(_InterlockedOr)
39#elif defined(__sun) && defined(HAVE_ATOMIC_H)
44#include "ruby/backward/2/limits.h"
49#include "ruby/internal/cast.h"
59#if defined(__DOXYGEN__) || defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
60# define RUBY_ATOMIC_GENERIC_MACRO 1
68#if defined(__DOXYGEN__)
70#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
72#elif defined(HAVE_GCC_SYNC_BUILTINS)
76#elif defined(__sun) && defined(HAVE_ATOMIC_H)
79# error No atomic operation found
91#define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val))
102#define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val))
114#define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val))
125#define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val))
138#define RUBY_ATOMIC_CAS(var, oldval, newval) \
139 rbimpl_atomic_cas(&(var), (oldval), (newval))
148#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var))
158#define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_set(&(var), (val))
168#define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val))
178#define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val))
187#define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var))
196#define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var))
207#define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var))
218#define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var))
231#define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \
232 rbimpl_atomic_size_exchange(&(var), (val))
245#define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \
246 rbimpl_atomic_size_cas(&(var), (oldval), (newval))
258#define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val))
270#define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val))
288#define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \
289 RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val))
299#define RUBY_ATOMIC_PTR_LOAD(var) \
300 RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var))
313#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \
314 RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (oldval), (newval)))
327#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \
328 rbimpl_atomic_value_exchange(&(var), (val))
341#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \
342 rbimpl_atomic_value_cas(&(var), (oldval), (newval))
353#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
354 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
356#elif defined(HAVE_GCC_SYNC_BUILTINS)
357 return __sync_fetch_and_add(ptr, val);
360 return InterlockedExchangeAdd(ptr, val);
362#elif defined(__sun) && defined(HAVE_ATOMIC_H)
369 return atomic_add_int_nv(ptr, val) - val;
372# error Unsupported platform.
384#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
390 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
392#elif defined(HAVE_GCC_SYNC_BUILTINS)
393 __sync_add_and_fetch(ptr, val);
401 InterlockedExchangeAdd(ptr, val);
403#elif defined(__sun) && defined(HAVE_ATOMIC_H)
406 atomic_add_int(ptr, val);
409# error Unsupported platform.
417rbimpl_atomic_size_add(volatile
size_t *ptr,
size_t val)
421#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
422 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
424#elif defined(HAVE_GCC_SYNC_BUILTINS)
425 __sync_add_and_fetch(ptr, val);
427#elif defined(_WIN32) && defined(_M_AMD64)
429 InterlockedExchangeAdd64(ptr, val);
431#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
434 atomic_add_long(ptr, val);
440 rbimpl_atomic_add(tmp, val);
453#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
454 rbimpl_atomic_add(ptr, 1);
457 InterlockedIncrement(ptr);
459#elif defined(__sun) && defined(HAVE_ATOMIC_H)
460 atomic_inc_uint(ptr);
463 rbimpl_atomic_add(ptr, 1);
472rbimpl_atomic_size_inc(volatile
size_t *ptr)
476#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
477 rbimpl_atomic_size_add(ptr, 1);
479#elif defined(_WIN32) && defined(_M_AMD64)
480 InterlockedIncrement64(ptr);
482#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
483 atomic_inc_ulong(ptr);
486 rbimpl_atomic_size_add(ptr, 1);
499#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
500 return __atomic_fetch_sub(ptr, val, __ATOMIC_SEQ_CST);
502#elif defined(HAVE_GCC_SYNC_BUILTINS)
503 return __sync_fetch_and_sub(ptr, val);
507 return InterlockedExchangeAdd(ptr, -val);
509#elif defined(__sun) && defined(HAVE_ATOMIC_H)
511 const signed neg = -1;
513 return atomic_add_int_nv(ptr, neg * val) + val;
516# error Unsupported platform.
528#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
529 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
531#elif defined(HAVE_GCC_SYNC_BUILTINS)
532 __sync_sub_and_fetch(ptr, val);
535 InterlockedExchangeAdd(ptr, -val);
537#elif defined(__sun) && defined(HAVE_ATOMIC_H)
538 const signed neg = -1;
540 atomic_add_int(ptr, neg * val);
543# error Unsupported platform.
551rbimpl_atomic_size_sub(volatile
size_t *ptr,
size_t val)
555#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
556 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
558#elif defined(HAVE_GCC_SYNC_BUILTINS)
559 __sync_sub_and_fetch(ptr, val);
561#elif defined(_WIN32) && defined(_M_AMD64)
562 const ssize_t neg = -1;
563 InterlockedExchangeAdd64(ptr, neg * val);
565#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
566 const signed neg = -1;
568 atomic_add_long(ptr, neg * val);
574 rbimpl_atomic_sub(tmp, val);
587#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
588 rbimpl_atomic_sub(ptr, 1);
591 InterlockedDecrement(ptr);
593#elif defined(__sun) && defined(HAVE_ATOMIC_H)
594 atomic_dec_uint(ptr);
597 rbimpl_atomic_sub(ptr, 1);
606rbimpl_atomic_size_dec(volatile
size_t *ptr)
610#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
611 rbimpl_atomic_size_sub(ptr, 1);
613#elif defined(_WIN32) && defined(_M_AMD64)
614 InterlockedDecrement64(ptr);
616#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
617 atomic_dec_ulong(ptr);
620 rbimpl_atomic_size_sub(ptr, 1);
633#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
634 __atomic_or_fetch(ptr, val, __ATOMIC_SEQ_CST);
636#elif defined(HAVE_GCC_SYNC_BUILTINS)
637 __sync_or_and_fetch(ptr, val);
639#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
640 _InterlockedOr(ptr, val);
642#elif defined(_WIN32) && defined(__GNUC__)
650#elif defined(_WIN32) && defined(_M_IX86)
653 __asm lock or [eax], ecx;
655#elif defined(__sun) && defined(HAVE_ATOMIC_H)
656 atomic_or_uint(ptr, val);
659# error Unsupported platform.
664#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
668 return rbimpl_atomic_or(var, val);
680#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
681 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
683#elif defined(HAVE_GCC_SYNC_BUILTINS)
684 return __sync_lock_test_and_set(ptr, val);
687 return InterlockedExchange(ptr, val);
689#elif defined(__sun) && defined(HAVE_ATOMIC_H)
690 return atomic_swap_uint(ptr, val);
693# error Unsupported platform.
701rbimpl_atomic_size_exchange(volatile
size_t *ptr,
size_t val)
705#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
706 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
708#elif defined(HAVE_GCC_SYNC_BUILTINS)
709 return __sync_lock_test_and_set(ptr, val);
711#elif defined(_WIN32) && defined(_M_AMD64)
712 return InterlockedExchange64(ptr, val);
714#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
715 return atomic_swap_ulong(ptr, val);
721 const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val);
722 return RBIMPL_CAST((
size_t)ret);
731rbimpl_atomic_ptr_exchange(
void *volatile *ptr, const
void *val)
735#elif defined(InterlockedExchangePointer)
737 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
738 PVOID pval = RBIMPL_CAST((PVOID)val);
739 return InterlockedExchangePointer(pptr, pval);
741#elif defined(__sun) && defined(HAVE_ATOMIC_H)
742 return atomic_swap_ptr(ptr, RBIMPL_CAST((
void *)val));
747 const size_t sval = RBIMPL_CAST((
size_t)val);
748 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
749 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
750 return RBIMPL_CAST((
void *)sret);
759rbimpl_atomic_value_exchange(volatile
VALUE *ptr,
VALUE val)
763 const size_t sval = RBIMPL_CAST((
size_t)val);
764 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
765 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
766 return RBIMPL_CAST((
VALUE)sret);
777#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
778 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
780 return rbimpl_atomic_fetch_add(ptr, 0);
792#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
793 __atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
797 rbimpl_atomic_exchange(ptr, val);
810#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
811 __atomic_compare_exchange_n(
812 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
815#elif defined(HAVE_GCC_SYNC_BUILTINS)
816 return __sync_val_compare_and_swap(ptr, oldval, newval);
818#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
819 return InterlockedCompareExchange(ptr, newval, oldval);
822 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
823 PVOID pold = RBIMPL_CAST((PVOID)oldval);
824 PVOID pnew = RBIMPL_CAST((PVOID)newval);
825 PVOID pret = InterlockedCompareExchange(pptr, pnew, pold);
828#elif defined(__sun) && defined(HAVE_ATOMIC_H)
829 return atomic_cas_uint(ptr, oldval, newval);
832# error Unsupported platform.
837#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
841 return rbimpl_atomic_cas(var, oldval, newval);
849rbimpl_atomic_size_cas(volatile
size_t *ptr,
size_t oldval,
size_t newval)
853#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
854 __atomic_compare_exchange_n(
855 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
858#elif defined(HAVE_GCC_SYNC_BUILTINS)
859 return __sync_val_compare_and_swap(ptr, oldval, newval);
861#elif defined(_WIN32) && defined(_M_AMD64)
862 return InterlockedCompareExchange64(ptr, newval, oldval);
864#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
865 return atomic_cas_ulong(ptr, oldval, newval);
871 return rbimpl_atomic_cas(tmp, oldval, newval);
880rbimpl_atomic_ptr_cas(
void **ptr, const
void *oldval, const
void *newval)
884#elif defined(InterlockedExchangePointer)
887 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
888 PVOID pold = RBIMPL_CAST((PVOID)oldval);
889 PVOID pnew = RBIMPL_CAST((PVOID)newval);
890 return InterlockedCompareExchangePointer(pptr, pnew, pold);
892#elif defined(__sun) && defined(HAVE_ATOMIC_H)
893 void *pold = RBIMPL_CAST((
void *)oldval);
894 void *pnew = RBIMPL_CAST((
void *)newval);
895 return atomic_cas_ptr(ptr, pold, pnew);
901 const size_t snew = RBIMPL_CAST((
size_t)newval);
902 const size_t sold = RBIMPL_CAST((
size_t)oldval);
903 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
904 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
905 return RBIMPL_CAST((
void *)sret);
914rbimpl_atomic_ptr_load(
void **ptr)
918#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
919 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
922 return rbimpl_atomic_ptr_cas(ptr, val, val);
930rbimpl_atomic_value_cas(volatile
VALUE *ptr,
VALUE oldval,
VALUE newval)
934 const size_t snew = RBIMPL_CAST((
size_t)newval);
935 const size_t sold = RBIMPL_CAST((
size_t)oldval);
936 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
937 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
938 return RBIMPL_CAST((
VALUE)sret);
Defines RBIMPL_ATTR_ARTIFICIAL.
#define RBIMPL_ATTR_ARTIFICIAL()
Wraps (or simulates) __attribute__((artificial))
#define RBIMPL_ASSERT_OR_ASSUME(expr)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Defines RBIMPL_COMPILER_SINCE.
Defines RBIMPL_STATIC_ASSERT.
#define RBIMPL_STATIC_ASSERT
Wraps (or simulates) static_assert
Defines RBIMPL_ATTR_NOALIAS.
#define RBIMPL_ATTR_NOALIAS()
Wraps (or simulates) __declspec((noalias))
Defines RBIMPL_ATTR_NONNULL.
#define RBIMPL_ATTR_NONNULL(list)
Wraps (or simulates) __attribute__((nonnull))
#define inline
Old Visual Studio versions do not support the inline keyword, so we need to define it to be __inline.
uintptr_t VALUE
Type that represents a Ruby object.