summaryrefslogtreecommitdiffstats
path: root/arch/alpha/include/asm/cmpxchg.h
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:27:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:27:49 +0000
commitace9429bb58fd418f0c81d4c2835699bddf6bde6 (patch)
treeb2d64bc10158fdd5497876388cd68142ca374ed3 /arch/alpha/include/asm/cmpxchg.h
parentInitial commit. (diff)
downloadlinux-ace9429bb58fd418f0c81d4c2835699bddf6bde6.tar.xz
linux-ace9429bb58fd418f0c81d4c2835699bddf6bde6.zip
Adding upstream version 6.6.15.upstream/6.6.15
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'arch/alpha/include/asm/cmpxchg.h')
-rw-r--r--arch/alpha/include/asm/cmpxchg.h76
1 files changed, 76 insertions, 0 deletions
diff --git a/arch/alpha/include/asm/cmpxchg.h b/arch/alpha/include/asm/cmpxchg.h
new file mode 100644
index 0000000000..91d4a4d925
--- /dev/null
+++ b/arch/alpha/include/asm/cmpxchg.h
@@ -0,0 +1,76 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+#ifndef _ALPHA_CMPXCHG_H
+#define _ALPHA_CMPXCHG_H
+
+/*
+ * Atomic exchange routines.
+ */
+
+#define ____xchg(type, args...) __arch_xchg ## type ## _local(args)
+#define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args)
+#include <asm/xchg.h>
+
+#define xchg_local(ptr, x) \
+({ \
+ __typeof__(*(ptr)) _x_ = (x); \
+ (__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\
+ sizeof(*(ptr))); \
+})
+
+#define arch_cmpxchg_local(ptr, o, n) \
+({ \
+ __typeof__(*(ptr)) _o_ = (o); \
+ __typeof__(*(ptr)) _n_ = (n); \
+ (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
+ (unsigned long)_n_, \
+ sizeof(*(ptr))); \
+})
+
+#define arch_cmpxchg64_local(ptr, o, n) \
+({ \
+ BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
+ cmpxchg_local((ptr), (o), (n)); \
+})
+
+#undef ____xchg
+#undef ____cmpxchg
+#define ____xchg(type, args...) __arch_xchg ##type(args)
+#define ____cmpxchg(type, args...) __cmpxchg ##type(args)
+#include <asm/xchg.h>
+
+/*
+ * The leading and the trailing memory barriers guarantee that these
+ * operations are fully ordered.
+ */
+#define arch_xchg(ptr, x) \
+({ \
+ __typeof__(*(ptr)) __ret; \
+ __typeof__(*(ptr)) _x_ = (x); \
+ smp_mb(); \
+ __ret = (__typeof__(*(ptr))) \
+ __arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
+ smp_mb(); \
+ __ret; \
+})
+
+#define arch_cmpxchg(ptr, o, n) \
+({ \
+ __typeof__(*(ptr)) __ret; \
+ __typeof__(*(ptr)) _o_ = (o); \
+ __typeof__(*(ptr)) _n_ = (n); \
+ smp_mb(); \
+ __ret = (__typeof__(*(ptr))) __cmpxchg((ptr), \
+ (unsigned long)_o_, (unsigned long)_n_, sizeof(*(ptr)));\
+ smp_mb(); \
+ __ret; \
+})
+
+#define arch_cmpxchg64(ptr, o, n) \
+({ \
+ BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
+ arch_cmpxchg((ptr), (o), (n)); \
+})
+
+#undef ____cmpxchg
+
+#endif /* _ALPHA_CMPXCHG_H */