Date: Sat, 10 Jan 2026 03:40:06 +0000
On Sat, Jan 10, 2026 at 1:50 AM Frederick Virchanza Gotham wrote:
>
> I think I pretty much have "std::lockfree_2ptrs" finished:
>
> https://godbolt.org/z/ExTMf8sn8
It needs to be adjusted to use "__sync" instead of "__atomic" on the
GNU g++ compiler, something like this:
https://godbolt.org/z/EvKqfGYWW
but using the __sync function, you needlessly use the cx16 instruction
for more simple operations like 'load' and 'store'. I think maybe
'libatomic' needs to be patched to expose more of the inline
functions.
And here it is copy-pasted:
#include <climits> // CHAR_BIT might be 16
#include <cstddef> // size_t
#include <cstdint> // intptr_t, uint16_t, uint32_t, uint64_t, uintptr_t
#include <atomic> // memory_order
#include <bit> // bit_cast
#include <type_traits> // is_function, is_pointer, is_reference, is_same
template< typename P = void*, typename Q = std::uintptr_t >
class lockfree_2ptr final {
// Next two lines make sure that P and Q are either pointers or
intptr_t or uintptr_t
static_assert( std::is_same_v<P,std::uintptr_t> || std::is_same_v<P,
std::intptr_t> || std::is_pointer_v<P> );
static_assert( std::is_same_v<Q,std::uintptr_t> || std::is_same_v<Q,
std::intptr_t> || std::is_pointer_v<Q> );
// It's possible for one or both pointers to be a pointer
// to a function, and in this scenario, we must ensure
// that a code pointer is compatible with a data pointer.
// The following two static_assert's ensure this:
static_assert( (!std::is_function_v< std::remove_pointer_t<P> > &&
!std::is_function_v< std::remove_pointer_t<Q> >)
|| sizeof(void*) == sizeof(void(*)(void)),
"Code pointers cannot be bigger or smaller than data
pointers");
static_assert( (!std::is_function_v< std::remove_pointer_t<P> > &&
!std::is_function_v< std::remove_pointer_t<Q> >)
|| alignof(void*) >= alignof(void(*)(void)),
"Code pointers cannot be more strictly aligned than
data pointers");
// Figure out what integer size we need to store two pointers
static consteval auto DetermineInt2Ptrs(void) noexcept
{
constexpr std::size_t bits_per_pointer = CHAR_BIT * sizeof(void*);
/**/ if constexpr ( 64u == bits_per_pointer ) return __uint128_t();
else if constexpr ( 32u == bits_per_pointer ) return std::uint64_t();
else if constexpr ( 16u == bits_per_pointer ) return std::uint32_t();
else if constexpr ( 8u == bits_per_pointer ) return std::uint16_t();
else static_assert(false, "Sorry cannot accommodate your
Andromedan computer");
}
// Int2Ptrs is the integer type we use for storing the two pointers
typedef decltype(DetermineInt2Ptrs()) Int2Ptrs;
// 'n' is the member object containing the data.
// We keept it aligned to 2 * pointer_size for
// when we do exchanges
alignas(sizeof(Int2Ptrs)) Int2Ptrs n;
public:
// This is the Pair struct we use for
// getting and setting the data, easier
// to keep it aligned to 2 * pointer_size
// for when we have to do exchanges
struct alignas(sizeof(Int2Ptrs)) PtrPair {
P first;
Q second;
_GLIBCXX_ALWAYS_INLINE
constexpr PtrPair(P const p, Q const q) noexcept
: first(p), second(q) {}
_GLIBCXX_ALWAYS_INLINE
constexpr PtrPair(Int2Ptrs const arg) noexcept
{
*this = std::bit_cast<PtrPair>(arg);
}
_GLIBCXX_ALWAYS_INLINE
constexpr auto AsInt(void) const noexcept // returns by value
{
return std::bit_cast<Int2Ptrs>(*this);
}
};
static_assert( sizeof(PtrPair) == sizeof(Int2Ptrs),
"Integer type 'Int2Ptrs' must be exactly the same
size as two pointers");
//static_assert( __atomic_always_lock_free(sizeof(Int2Ptrs), 0),
// "The target CPU cannot do lockfree access on two pointers");
public:
// Two constructors:
_GLIBCXX_ALWAYS_INLINE constexpr lockfree_2ptr(void) noexcept : n(0) {}
_GLIBCXX_ALWAYS_INLINE constexpr lockfree_2ptr(PtrPair const arg)
noexcept : n( arg.AsInt() ) {}
// Delete copy-constructor and assignment operator
lockfree_2ptr(lockfree_2ptr const &) = delete;
lockfree_2ptr &operator=(lockfree_2ptr const &) = delete;
_GLIBCXX_ALWAYS_INLINE
void store(PtrPair const arg, std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
(void)m;
__glibcxx_assert(m != std::memory_order_acquire);
__glibcxx_assert(m != std::memory_order_acq_rel);
__glibcxx_assert(m != std::memory_order_consume);
alignas(sizeof(Int2Ptrs)) Int2Ptrs desired = arg.AsInt();
for (; /* ever */ ;)
{
Int2Ptrs old = __sync_val_compare_and_swap(&n, (Int2Ptrs)0, (Int2Ptrs)0);
if ( __sync_bool_compare_and_swap(&n, old, desired) ) return;
}
}
_GLIBCXX_ALWAYS_INLINE
PtrPair load(std::memory_order const m = std::memory_order_seq_cst)
const noexcept
{
(void)m;
__glibcxx_assert(m != std::memory_order_release);
__glibcxx_assert(m != std::memory_order_acq_rel);
return __sync_val_compare_and_swap(const_cast<Int2Ptrs*>(&n),
(Int2Ptrs)0, (Int2Ptrs)0);
}
// Implicit conversion to PtrPair
_GLIBCXX_ALWAYS_INLINE operator PtrPair(void) const noexcept {
return load(); }
// Assignment
_GLIBCXX_ALWAYS_INLINE PtrPair operator=(PtrPair const arg) noexcept
{ store(arg); return arg; }
_GLIBCXX_ALWAYS_INLINE
PtrPair exchange(PtrPair const arg, std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
(void)m;
alignas(sizeof(Int2Ptrs)) Int2Ptrs desired = arg.AsInt();
for (;;)
{
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, (Int2Ptrs)0, (Int2Ptrs)0);
if ( __sync_bool_compare_and_swap(&n, old, desired) ) return old;
}
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_strong(PtrPair &expected, PtrPair const desired,
std::memory_order const m1,
std::memory_order const m2) noexcept
{
(void)m1;
__glibcxx_assert(__is_valid_cmpexch_failure_order(m2));
alignas(sizeof(Int2Ptrs)) Int2Ptrs x = expected.AsInt(); // don't
make 'x' const
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, x, desired.AsInt());
if ( old == x ) return true;
expected = old;
return false;
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_strong(PtrPair &expected, PtrPair const desired,
std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
return compare_exchange_strong(expected, desired, m,
__cmpexch_failure_order(m));
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_weak(PtrPair &expected, PtrPair const desired,
std::memory_order const m1,
std::memory_order const m2) noexcept
{
(void)m1;
__glibcxx_assert(__is_valid_cmpexch_failure_order(m2));
alignas(sizeof(Int2Ptrs)) Int2Ptrs x = expected.AsInt(); // don't
make 'x' const
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, x, desired.AsInt());
if ( old == x ) return true;
expected = old;
return false;
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_weak(PtrPair &expected, PtrPair const desired,
std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
return compare_exchange_weak(expected, desired, m,
__cmpexch_failure_order(m));
}
#if __glibcxx_atomic_wait
_GLIBCXX_ALWAYS_INLINE
void wait(PtrPair const old, std::memory_order const m =
std::memory_order_seq_cst) const noexcept
{
std::__atomic_wait_address_v(
&n,
old.AsInt(),
[m, this] { return __atomic_load_n(&n, int(m)); });
}
_GLIBCXX_ALWAYS_INLINE
void notify_one(void) noexcept
{
std::__atomic_notify_address(&n, false);
}
_GLIBCXX_ALWAYS_INLINE
void notify_all(void) noexcept
{
std::__atomic_notify_address(&n, true);
}
#endif
private:
// For lockfree containers, you can set the
// first datum and increment/decrement the second datum
template<bool up = true>
_GLIBCXX_ALWAYS_INLINE
Q set_and_IncOrDec(P const arg) noexcept
{
std::memory_order const m1 = std::memory_order_seq_cst,
m2 = __cmpexch_failure_order(m1);
alignas(sizeof(Int2Ptrs)) Int2Ptrs expected = load().AsInt();
for (; /* ever */; )
{
PtrPair desired = expected;
desired.first = arg;
if constexpr ( up ) ++desired.second; else --desired.second;
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, expected, desired.AsInt());
if ( old == expected ) return desired.second;
expected = old;
}
}
public:
// For lockfree containers, you can set the
// first datum and increment the second datum
_GLIBCXX_ALWAYS_INLINE
Q set_and_increment(P const arg) noexcept
{
return set_and_IncOrDec<true >(arg);
}
// For lockfree containers, you can set the
// first datum and decrement the second datum
_GLIBCXX_ALWAYS_INLINE
Q set_and_decrement(P const arg) noexcept
{
return set_and_IncOrDec<false>(arg);
}
};
// ================================= Here comes the test code =================
#include <iostream>
#include <typeinfo>
struct chimeric_ptr {
lockfree_2ptr< void*, void*(*)(std::type_info const&) > ptrs;
template<typename T>
chimeric_ptr(T *const parg)
{
ptrs = { parg, +[](std::type_info const&){ return (void*)nullptr; } };
}
};
struct container {
lockfree_2ptr< void*, std::uintptr_t > ptrs;
void add( void *const arg )
{
ptrs.set_and_increment( arg );
}
void remove( void *const arg )
{
ptrs.set_and_decrement( arg );
}
};
struct something_strange {
lockfree_2ptr< void*, char* > ptrs;
void add( void *const arg )
{
ptrs.set_and_increment( arg );
}
void remove( void *const arg )
{
ptrs.set_and_decrement( arg );
}
};
int main(void)
{
chimeric_ptr var( &std::cout );
container myc;
myc.add( nullptr );
myc.remove( nullptr );
something_strange ss;
ss.add( nullptr );
ss.remove( nullptr );
lockfree_2ptr< void*, void* > vv;
//vv.set_and_increment(nullptr); -- won't work because cannot
increment a void*
}
>
> I think I pretty much have "std::lockfree_2ptrs" finished:
>
> https://godbolt.org/z/ExTMf8sn8
It needs to be adjusted to use "__sync" instead of "__atomic" on the
GNU g++ compiler, something like this:
https://godbolt.org/z/EvKqfGYWW
but using the __sync function, you needlessly use the cx16 instruction
for more simple operations like 'load' and 'store'. I think maybe
'libatomic' needs to be patched to expose more of the inline
functions.
And here it is copy-pasted:
#include <climits> // CHAR_BIT might be 16
#include <cstddef> // size_t
#include <cstdint> // intptr_t, uint16_t, uint32_t, uint64_t, uintptr_t
#include <atomic> // memory_order
#include <bit> // bit_cast
#include <type_traits> // is_function, is_pointer, is_reference, is_same
template< typename P = void*, typename Q = std::uintptr_t >
class lockfree_2ptr final {
// Next two lines make sure that P and Q are either pointers or
intptr_t or uintptr_t
static_assert( std::is_same_v<P,std::uintptr_t> || std::is_same_v<P,
std::intptr_t> || std::is_pointer_v<P> );
static_assert( std::is_same_v<Q,std::uintptr_t> || std::is_same_v<Q,
std::intptr_t> || std::is_pointer_v<Q> );
// It's possible for one or both pointers to be a pointer
// to a function, and in this scenario, we must ensure
// that a code pointer is compatible with a data pointer.
// The following two static_assert's ensure this:
static_assert( (!std::is_function_v< std::remove_pointer_t<P> > &&
!std::is_function_v< std::remove_pointer_t<Q> >)
|| sizeof(void*) == sizeof(void(*)(void)),
"Code pointers cannot be bigger or smaller than data
pointers");
static_assert( (!std::is_function_v< std::remove_pointer_t<P> > &&
!std::is_function_v< std::remove_pointer_t<Q> >)
|| alignof(void*) >= alignof(void(*)(void)),
"Code pointers cannot be more strictly aligned than
data pointers");
// Figure out what integer size we need to store two pointers
static consteval auto DetermineInt2Ptrs(void) noexcept
{
constexpr std::size_t bits_per_pointer = CHAR_BIT * sizeof(void*);
/**/ if constexpr ( 64u == bits_per_pointer ) return __uint128_t();
else if constexpr ( 32u == bits_per_pointer ) return std::uint64_t();
else if constexpr ( 16u == bits_per_pointer ) return std::uint32_t();
else if constexpr ( 8u == bits_per_pointer ) return std::uint16_t();
else static_assert(false, "Sorry cannot accommodate your
Andromedan computer");
}
// Int2Ptrs is the integer type we use for storing the two pointers
typedef decltype(DetermineInt2Ptrs()) Int2Ptrs;
// 'n' is the member object containing the data.
// We keept it aligned to 2 * pointer_size for
// when we do exchanges
alignas(sizeof(Int2Ptrs)) Int2Ptrs n;
public:
// This is the Pair struct we use for
// getting and setting the data, easier
// to keep it aligned to 2 * pointer_size
// for when we have to do exchanges
struct alignas(sizeof(Int2Ptrs)) PtrPair {
P first;
Q second;
_GLIBCXX_ALWAYS_INLINE
constexpr PtrPair(P const p, Q const q) noexcept
: first(p), second(q) {}
_GLIBCXX_ALWAYS_INLINE
constexpr PtrPair(Int2Ptrs const arg) noexcept
{
*this = std::bit_cast<PtrPair>(arg);
}
_GLIBCXX_ALWAYS_INLINE
constexpr auto AsInt(void) const noexcept // returns by value
{
return std::bit_cast<Int2Ptrs>(*this);
}
};
static_assert( sizeof(PtrPair) == sizeof(Int2Ptrs),
"Integer type 'Int2Ptrs' must be exactly the same
size as two pointers");
//static_assert( __atomic_always_lock_free(sizeof(Int2Ptrs), 0),
// "The target CPU cannot do lockfree access on two pointers");
public:
// Two constructors:
_GLIBCXX_ALWAYS_INLINE constexpr lockfree_2ptr(void) noexcept : n(0) {}
_GLIBCXX_ALWAYS_INLINE constexpr lockfree_2ptr(PtrPair const arg)
noexcept : n( arg.AsInt() ) {}
// Delete copy-constructor and assignment operator
lockfree_2ptr(lockfree_2ptr const &) = delete;
lockfree_2ptr &operator=(lockfree_2ptr const &) = delete;
_GLIBCXX_ALWAYS_INLINE
void store(PtrPair const arg, std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
(void)m;
__glibcxx_assert(m != std::memory_order_acquire);
__glibcxx_assert(m != std::memory_order_acq_rel);
__glibcxx_assert(m != std::memory_order_consume);
alignas(sizeof(Int2Ptrs)) Int2Ptrs desired = arg.AsInt();
for (; /* ever */ ;)
{
Int2Ptrs old = __sync_val_compare_and_swap(&n, (Int2Ptrs)0, (Int2Ptrs)0);
if ( __sync_bool_compare_and_swap(&n, old, desired) ) return;
}
}
_GLIBCXX_ALWAYS_INLINE
PtrPair load(std::memory_order const m = std::memory_order_seq_cst)
const noexcept
{
(void)m;
__glibcxx_assert(m != std::memory_order_release);
__glibcxx_assert(m != std::memory_order_acq_rel);
return __sync_val_compare_and_swap(const_cast<Int2Ptrs*>(&n),
(Int2Ptrs)0, (Int2Ptrs)0);
}
// Implicit conversion to PtrPair
_GLIBCXX_ALWAYS_INLINE operator PtrPair(void) const noexcept {
return load(); }
// Assignment
_GLIBCXX_ALWAYS_INLINE PtrPair operator=(PtrPair const arg) noexcept
{ store(arg); return arg; }
_GLIBCXX_ALWAYS_INLINE
PtrPair exchange(PtrPair const arg, std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
(void)m;
alignas(sizeof(Int2Ptrs)) Int2Ptrs desired = arg.AsInt();
for (;;)
{
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, (Int2Ptrs)0, (Int2Ptrs)0);
if ( __sync_bool_compare_and_swap(&n, old, desired) ) return old;
}
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_strong(PtrPair &expected, PtrPair const desired,
std::memory_order const m1,
std::memory_order const m2) noexcept
{
(void)m1;
__glibcxx_assert(__is_valid_cmpexch_failure_order(m2));
alignas(sizeof(Int2Ptrs)) Int2Ptrs x = expected.AsInt(); // don't
make 'x' const
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, x, desired.AsInt());
if ( old == x ) return true;
expected = old;
return false;
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_strong(PtrPair &expected, PtrPair const desired,
std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
return compare_exchange_strong(expected, desired, m,
__cmpexch_failure_order(m));
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_weak(PtrPair &expected, PtrPair const desired,
std::memory_order const m1,
std::memory_order const m2) noexcept
{
(void)m1;
__glibcxx_assert(__is_valid_cmpexch_failure_order(m2));
alignas(sizeof(Int2Ptrs)) Int2Ptrs x = expected.AsInt(); // don't
make 'x' const
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, x, desired.AsInt());
if ( old == x ) return true;
expected = old;
return false;
}
_GLIBCXX_ALWAYS_INLINE
bool compare_exchange_weak(PtrPair &expected, PtrPair const desired,
std::memory_order const m =
std::memory_order_seq_cst) noexcept
{
return compare_exchange_weak(expected, desired, m,
__cmpexch_failure_order(m));
}
#if __glibcxx_atomic_wait
_GLIBCXX_ALWAYS_INLINE
void wait(PtrPair const old, std::memory_order const m =
std::memory_order_seq_cst) const noexcept
{
std::__atomic_wait_address_v(
&n,
old.AsInt(),
[m, this] { return __atomic_load_n(&n, int(m)); });
}
_GLIBCXX_ALWAYS_INLINE
void notify_one(void) noexcept
{
std::__atomic_notify_address(&n, false);
}
_GLIBCXX_ALWAYS_INLINE
void notify_all(void) noexcept
{
std::__atomic_notify_address(&n, true);
}
#endif
private:
// For lockfree containers, you can set the
// first datum and increment/decrement the second datum
template<bool up = true>
_GLIBCXX_ALWAYS_INLINE
Q set_and_IncOrDec(P const arg) noexcept
{
std::memory_order const m1 = std::memory_order_seq_cst,
m2 = __cmpexch_failure_order(m1);
alignas(sizeof(Int2Ptrs)) Int2Ptrs expected = load().AsInt();
for (; /* ever */; )
{
PtrPair desired = expected;
desired.first = arg;
if constexpr ( up ) ++desired.second; else --desired.second;
alignas(sizeof(Int2Ptrs)) Int2Ptrs old =
__sync_val_compare_and_swap(&n, expected, desired.AsInt());
if ( old == expected ) return desired.second;
expected = old;
}
}
public:
// For lockfree containers, you can set the
// first datum and increment the second datum
_GLIBCXX_ALWAYS_INLINE
Q set_and_increment(P const arg) noexcept
{
return set_and_IncOrDec<true >(arg);
}
// For lockfree containers, you can set the
// first datum and decrement the second datum
_GLIBCXX_ALWAYS_INLINE
Q set_and_decrement(P const arg) noexcept
{
return set_and_IncOrDec<false>(arg);
}
};
// ================================= Here comes the test code =================
#include <iostream>
#include <typeinfo>
struct chimeric_ptr {
lockfree_2ptr< void*, void*(*)(std::type_info const&) > ptrs;
template<typename T>
chimeric_ptr(T *const parg)
{
ptrs = { parg, +[](std::type_info const&){ return (void*)nullptr; } };
}
};
struct container {
lockfree_2ptr< void*, std::uintptr_t > ptrs;
void add( void *const arg )
{
ptrs.set_and_increment( arg );
}
void remove( void *const arg )
{
ptrs.set_and_decrement( arg );
}
};
struct something_strange {
lockfree_2ptr< void*, char* > ptrs;
void add( void *const arg )
{
ptrs.set_and_increment( arg );
}
void remove( void *const arg )
{
ptrs.set_and_decrement( arg );
}
};
int main(void)
{
chimeric_ptr var( &std::cout );
container myc;
myc.add( nullptr );
myc.remove( nullptr );
something_strange ss;
ss.add( nullptr );
ss.remove( nullptr );
lockfree_2ptr< void*, void* > vv;
//vv.set_and_increment(nullptr); -- won't work because cannot
increment a void*
}
Received on 2026-01-10 03:39:15
