Contributors: 12
Author Tokens Token Proportion Commits Commit Proportion
Peter Zijlstra 165 68.18% 1 6.67%
Uros Bizjak 25 10.33% 3 20.00%
Mark Rutland 18 7.44% 2 13.33%
Andi Kleen 16 6.61% 1 6.67%
Jeff Dike 6 2.48% 1 6.67%
H. Peter Anvin 3 1.24% 1 6.67%
Mathieu Desnoyers 2 0.83% 1 6.67%
Jeremy Fitzhardinge 2 0.83% 1 6.67%
Dmitriy Vyukov 2 0.83% 1 6.67%
Borislav Petkov 1 0.41% 1 6.67%
Christoph Lameter 1 0.41% 1 6.67%
Greg Kroah-Hartman 1 0.41% 1 6.67%
Total 242 15


/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_CMPXCHG_64_H
#define _ASM_X86_CMPXCHG_64_H

#define arch_cmpxchg64(ptr, o, n)					\
({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	arch_cmpxchg((ptr), (o), (n));					\
})

#define arch_cmpxchg64_local(ptr, o, n)					\
({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	arch_cmpxchg_local((ptr), (o), (n));				\
})

#define arch_try_cmpxchg64(ptr, po, n)					\
({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	arch_try_cmpxchg((ptr), (po), (n));				\
})

#define arch_try_cmpxchg64_local(ptr, po, n)				\
({									\
	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
	arch_try_cmpxchg_local((ptr), (po), (n));			\
})

union __u128_halves {
	u128 full;
	struct {
		u64 low, high;
	};
};

#define __arch_cmpxchg128(_ptr, _old, _new, _lock)			\
({									\
	union __u128_halves o = { .full = (_old), },			\
			    n = { .full = (_new), };			\
									\
	asm volatile(_lock "cmpxchg16b %[ptr]"				\
		     : [ptr] "+m" (*(_ptr)),				\
		       "+a" (o.low), "+d" (o.high)			\
		     : "b" (n.low), "c" (n.high)			\
		     : "memory");					\
									\
	o.full;								\
})

static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
{
	return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX);
}
#define arch_cmpxchg128 arch_cmpxchg128

static __always_inline u128 arch_cmpxchg128_local(volatile u128 *ptr, u128 old, u128 new)
{
	return __arch_cmpxchg128(ptr, old, new,);
}
#define arch_cmpxchg128_local arch_cmpxchg128_local

#define __arch_try_cmpxchg128(_ptr, _oldp, _new, _lock)			\
({									\
	union __u128_halves o = { .full = *(_oldp), },			\
			    n = { .full = (_new), };			\
	bool ret;							\
									\
	asm volatile(_lock "cmpxchg16b %[ptr]"				\
		     CC_SET(e)						\
		     : CC_OUT(e) (ret),					\
		       [ptr] "+m" (*(_ptr)),				\
		       "+a" (o.low), "+d" (o.high)			\
		     : "b" (n.low), "c" (n.high)			\
		     : "memory");					\
									\
	if (unlikely(!ret))						\
		*(_oldp) = o.full;					\
									\
	likely(ret);							\
})

static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp, u128 new)
{
	return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX);
}
#define arch_try_cmpxchg128 arch_try_cmpxchg128

static __always_inline bool arch_try_cmpxchg128_local(volatile u128 *ptr, u128 *oldp, u128 new)
{
	return __arch_try_cmpxchg128(ptr, oldp, new,);
}
#define arch_try_cmpxchg128_local arch_try_cmpxchg128_local

#define system_has_cmpxchg128()		boot_cpu_has(X86_FEATURE_CX16)

#endif /* _ASM_X86_CMPXCHG_64_H */