aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2023-05-18 08:22:51 -0700
committerRichard Henderson <richard.henderson@linaro.org>2023-05-23 16:51:18 -0700
commit412db3d5b54ff4577f02aad7918c85d1b9ee7ab3 (patch)
tree034bee2a49a3cb869ee5a69fa2c90c19e5af7395
parent0dd0c7fa2055d5f95413b510386753bb00d61202 (diff)
include/host: Split out atomic128-cas.h
Separates the aarch64-specific portion into its own file. Reviewed-by: Alex Bennée <alex.bennee@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
-rw-r--r--host/include/aarch64/host/atomic128-cas.h43
-rw-r--r--host/include/generic/host/atomic128-cas.h43
-rw-r--r--include/qemu/atomic128.h55
3 files changed, 87 insertions, 54 deletions
diff --git a/host/include/aarch64/host/atomic128-cas.h b/host/include/aarch64/host/atomic128-cas.h
new file mode 100644
index 0000000000..80de58e06d
--- /dev/null
+++ b/host/include/aarch64/host/atomic128-cas.h
@@ -0,0 +1,43 @@
+/*
+ * SPDX-License-Identifier: GPL-2.0-or-later
+ * Compare-and-swap for 128-bit atomic operations, AArch64 version.
+ *
+ * Copyright (C) 2018, 2023 Linaro, Ltd.
+ *
+ * See docs/devel/atomics.rst for discussion about the guarantees each
+ * atomic primitive is meant to provide.
+ */
+
+#ifndef AARCH64_ATOMIC128_CAS_H
+#define AARCH64_ATOMIC128_CAS_H
+
+/* Through gcc 10, aarch64 has no support for 128-bit atomics. */
+#if defined(CONFIG_ATOMIC128) || defined(CONFIG_CMPXCHG128)
+#include "host/include/generic/host/atomic128-cas.h"
+#else
+static inline Int128 atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new)
+{
+ uint64_t cmpl = int128_getlo(cmp), cmph = int128_gethi(cmp);
+ uint64_t newl = int128_getlo(new), newh = int128_gethi(new);
+ uint64_t oldl, oldh;
+ uint32_t tmp;
+
+ asm("0: ldaxp %[oldl], %[oldh], %[mem]\n\t"
+ "cmp %[oldl], %[cmpl]\n\t"
+ "ccmp %[oldh], %[cmph], #0, eq\n\t"
+ "b.ne 1f\n\t"
+ "stlxp %w[tmp], %[newl], %[newh], %[mem]\n\t"
+ "cbnz %w[tmp], 0b\n"
+ "1:"
+ : [mem] "+m"(*ptr), [tmp] "=&r"(tmp),
+ [oldl] "=&r"(oldl), [oldh] "=&r"(oldh)
+ : [cmpl] "r"(cmpl), [cmph] "r"(cmph),
+ [newl] "r"(newl), [newh] "r"(newh)
+ : "memory", "cc");
+
+ return int128_make128(oldl, oldh);
+}
+# define HAVE_CMPXCHG128 1
+#endif
+
+#endif /* AARCH64_ATOMIC128_CAS_H */
diff --git a/host/include/generic/host/atomic128-cas.h b/host/include/generic/host/atomic128-cas.h
new file mode 100644
index 0000000000..513622fe34
--- /dev/null
+++ b/host/include/generic/host/atomic128-cas.h
@@ -0,0 +1,43 @@
+/*
+ * SPDX-License-Identifier: GPL-2.0-or-later
+ * Compare-and-swap for 128-bit atomic operations, generic version.
+ *
+ * Copyright (C) 2018, 2023 Linaro, Ltd.
+ *
+ * See docs/devel/atomics.rst for discussion about the guarantees each
+ * atomic primitive is meant to provide.
+ */
+
+#ifndef HOST_ATOMIC128_CAS_H
+#define HOST_ATOMIC128_CAS_H
+
+#if defined(CONFIG_ATOMIC128)
+static inline Int128 atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new)
+{
+ Int128Alias r, c, n;
+
+ c.s = cmp;
+ n.s = new;
+ r.i = qatomic_cmpxchg__nocheck((__int128_t *)ptr, c.i, n.i);
+ return r.s;
+}
+# define HAVE_CMPXCHG128 1
+#elif defined(CONFIG_CMPXCHG128)
+static inline Int128 atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new)
+{
+ Int128Alias r, c, n;
+
+ c.s = cmp;
+ n.s = new;
+ r.i = __sync_val_compare_and_swap_16((__int128_t *)ptr, c.i, n.i);
+ return r.s;
+}
+# define HAVE_CMPXCHG128 1
+#else
+/* Fallback definition that must be optimized away, or error. */
+Int128 QEMU_ERROR("unsupported atomic")
+ atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new);
+# define HAVE_CMPXCHG128 0
+#endif
+
+#endif /* HOST_ATOMIC128_CAS_H */
diff --git a/include/qemu/atomic128.h b/include/qemu/atomic128.h
index d0ba0b9c65..10a2322c44 100644
--- a/include/qemu/atomic128.h
+++ b/include/qemu/atomic128.h
@@ -41,60 +41,7 @@
* Therefore, special case each platform.
*/
-#if defined(CONFIG_ATOMIC128)
-static inline Int128 atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new)
-{
- Int128Alias r, c, n;
-
- c.s = cmp;
- n.s = new;
- r.i = qatomic_cmpxchg__nocheck((__int128_t *)ptr, c.i, n.i);
- return r.s;
-}
-# define HAVE_CMPXCHG128 1
-#elif defined(CONFIG_CMPXCHG128)
-static inline Int128 atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new)
-{
- Int128Alias r, c, n;
-
- c.s = cmp;
- n.s = new;
- r.i = __sync_val_compare_and_swap_16((__int128_t *)ptr, c.i, n.i);
- return r.s;
-}
-# define HAVE_CMPXCHG128 1
-#elif defined(__aarch64__)
-/* Through gcc 8, aarch64 has no support for 128-bit at all. */
-static inline Int128 atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new)
-{
- uint64_t cmpl = int128_getlo(cmp), cmph = int128_gethi(cmp);
- uint64_t newl = int128_getlo(new), newh = int128_gethi(new);
- uint64_t oldl, oldh;
- uint32_t tmp;
-
- asm("0: ldaxp %[oldl], %[oldh], %[mem]\n\t"
- "cmp %[oldl], %[cmpl]\n\t"
- "ccmp %[oldh], %[cmph], #0, eq\n\t"
- "b.ne 1f\n\t"
- "stlxp %w[tmp], %[newl], %[newh], %[mem]\n\t"
- "cbnz %w[tmp], 0b\n"
- "1:"
- : [mem] "+m"(*ptr), [tmp] "=&r"(tmp),
- [oldl] "=&r"(oldl), [oldh] "=&r"(oldh)
- : [cmpl] "r"(cmpl), [cmph] "r"(cmph),
- [newl] "r"(newl), [newh] "r"(newh)
- : "memory", "cc");
-
- return int128_make128(oldl, oldh);
-}
-# define HAVE_CMPXCHG128 1
-#else
-/* Fallback definition that must be optimized away, or error. */
-Int128 QEMU_ERROR("unsupported atomic")
- atomic16_cmpxchg(Int128 *ptr, Int128 cmp, Int128 new);
-# define HAVE_CMPXCHG128 0
-#endif /* Some definition for HAVE_CMPXCHG128 */
-
+#include "host/atomic128-cas.h"
#if defined(CONFIG_ATOMIC128)
static inline Int128 atomic16_read(Int128 *ptr)