18 #ifndef RIPPLE_BASICS_SPINLOCK_H_INCLUDED
19 #define RIPPLE_BASICS_SPINLOCK_H_INCLUDED
27 #include <immintrin.h>
47 asm volatile(
"yield");
92 static_assert(std::is_unsigned_v<T>);
97 "std::atomic<T>::fetch_and(T) and std::atomic<T>::fetch_and(T) are required by packed_spinlock");
120 assert(index >= 0 && (
mask_ != 0));
126 return (
bits_.fetch_or(
mask_, std::memory_order_acquire) &
mask_) == 0;
138 while ((
bits_.load(std::memory_order_relaxed) &
mask_) != 0)
146 bits_.fetch_and(~
mask_, std::memory_order_release);
165 static_assert(std::is_unsigned_v<T>);
192 return lock_.compare_exchange_weak(
195 std::memory_order_acquire,
196 std::memory_order_relaxed);
208 while (
lock_.load(std::memory_order_relaxed) != 0)
216 lock_.store(0, std::memory_order_release);