[RFC PATCH v1 1/2] riscv/cmpxchg: Deduplicate cmpxchg() asm and macros
Guo Ren
guoren at kernel.org
Fri Apr 7 01:29:30 PDT 2023
On Thu, Apr 6, 2023 at 4:20 PM Leonardo Bras <leobras at redhat.com> wrote:
>
> In this header every cmpxchg define (_relaxed, _acquire, _release,
> vanilla) contain it's own asm file, both for 4-byte variables an 8-byte
> variables, on a total of 8 versions of mostly the same asm.
>
> This is usually bad, as it means any change may be done in up to 8
> different places.
>
> Unify those versions by creating a new define with enough parameters to
> generate any version of the previous 8.
>
> Then unify the result under a more general define, and simplify
> arch_cmpxchg* generation
>
> (This did not cause any change in generated asm)
>
> Signed-off-by: Leonardo Bras <leobras at redhat.com>
> ---
> arch/riscv/include/asm/cmpxchg.h | 184 ++++++-------------------------
> 1 file changed, 36 insertions(+), 148 deletions(-)
>
> diff --git a/arch/riscv/include/asm/cmpxchg.h b/arch/riscv/include/asm/cmpxchg.h
> index 12debce235e52..f88fae357071c 100644
> --- a/arch/riscv/include/asm/cmpxchg.h
> +++ b/arch/riscv/include/asm/cmpxchg.h
> @@ -163,51 +163,23 @@
> * store NEW in MEM. Return the initial value in MEM. Success is
> * indicated by comparing RETURN with OLD.
> */
> -#define __cmpxchg_relaxed(ptr, old, new, size) \
> -({ \
> - __typeof__(ptr) __ptr = (ptr); \
> - __typeof__(*(ptr)) __old = (old); \
> - __typeof__(*(ptr)) __new = (new); \
> - __typeof__(*(ptr)) __ret; \
> - register unsigned int __rc; \
> - switch (size) { \
> - case 4: \
> - __asm__ __volatile__ ( \
> - "0: lr.w %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.w %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" ((long)__old), "rJ" (__new) \
> - : "memory"); \
> - break; \
> - case 8: \
> - __asm__ __volatile__ ( \
> - "0: lr.d %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.d %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" (__old), "rJ" (__new) \
> - : "memory"); \
> - break; \
> - default: \
> - BUILD_BUG(); \
> - } \
> - __ret; \
> -})
>
> -#define arch_cmpxchg_relaxed(ptr, o, n) \
> +#define ____cmpxchg(lr_sfx, sc_sfx, prepend, append, r, rc, p, co, o, n)\
> ({ \
> - __typeof__(*(ptr)) _o_ = (o); \
> - __typeof__(*(ptr)) _n_ = (n); \
> - (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \
> - _o_, _n_, sizeof(*(ptr))); \
> + __asm__ __volatile__ ( \
> + prepend \
> + "0: lr" lr_sfx " %0, %2\n" \
> + " bne %0, %z3, 1f\n" \
> + " sc" sc_sfx " %1, %z4, %2\n" \
> + " bnez %1, 0b\n" \
> + append \
> + "1:\n" \
> + : "=&r" (r), "=&r" (rc), "+A" (*(p)) \
> + : "rJ" (co o), "rJ" (n) \
> + : "memory"); \
> })
>
> -#define __cmpxchg_acquire(ptr, old, new, size) \
> +#define ___cmpxchg(ptr, old, new, size, sc_sfx, prepend, append) \
> ({ \
> __typeof__(ptr) __ptr = (ptr); \
> __typeof__(*(ptr)) __old = (old); \
> @@ -216,28 +188,12 @@
> register unsigned int __rc; \
> switch (size) { \
> case 4: \
> - __asm__ __volatile__ ( \
> - "0: lr.w %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.w %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - RISCV_ACQUIRE_BARRIER \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" ((long)__old), "rJ" (__new) \
> - : "memory"); \
> + ____cmpxchg(".w", ".w" sc_sfx, prepend, append, \
> + __ret, __rc, __ptr, (long), __old, __new); \
> break; \
> case 8: \
> - __asm__ __volatile__ ( \
> - "0: lr.d %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.d %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - RISCV_ACQUIRE_BARRIER \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" (__old), "rJ" (__new) \
> - : "memory"); \
> + ____cmpxchg(".d", ".d" sc_sfx, prepend, append, \
> + __ret, __rc, __ptr, /**/, __old, __new); \
> break; \
> default: \
> BUILD_BUG(); \
> @@ -245,105 +201,37 @@
> __ret; \
> })
>
> -#define arch_cmpxchg_acquire(ptr, o, n) \
> +#define __cmpxchg_relaxed(ptr, old, new, size) \
> + ___cmpxchg(ptr, old, new, size, "", "", "")
> +
> +#define _arch_cmpxchg(order, ptr, o, n) \
> ({ \
> __typeof__(*(ptr)) _o_ = (o); \
> __typeof__(*(ptr)) _n_ = (n); \
> - (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \
> - _o_, _n_, sizeof(*(ptr))); \
> + (__typeof__(*(ptr))) __cmpxchg ## order((ptr), _o_, _n_, \
> + sizeof(*(ptr))); \
> })
>
> +#define arch_cmpxchg_relaxed(ptr, o, n) \
> + _arch_cmpxchg(_relaxed, ptr, o, n)
> +
> +#define __cmpxchg_acquire(ptr, old, new, size) \
> + ___cmpxchg(ptr, old, new, size, "", "", RISCV_ACQUIRE_BARRIER)
> +
> +#define arch_cmpxchg_acquire(ptr, o, n) \
> + _arch_cmpxchg(_acquire, ptr, o, n)
> +
> #define __cmpxchg_release(ptr, old, new, size) \
> -({ \
> - __typeof__(ptr) __ptr = (ptr); \
> - __typeof__(*(ptr)) __old = (old); \
> - __typeof__(*(ptr)) __new = (new); \
> - __typeof__(*(ptr)) __ret; \
> - register unsigned int __rc; \
> - switch (size) { \
> - case 4: \
> - __asm__ __volatile__ ( \
> - RISCV_RELEASE_BARRIER \
> - "0: lr.w %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.w %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" ((long)__old), "rJ" (__new) \
> - : "memory"); \
> - break; \
> - case 8: \
> - __asm__ __volatile__ ( \
> - RISCV_RELEASE_BARRIER \
> - "0: lr.d %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.d %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" (__old), "rJ" (__new) \
> - : "memory"); \
> - break; \
> - default: \
> - BUILD_BUG(); \
> - } \
> - __ret; \
> -})
> + ___cmpxchg(ptr, old, new, size, "", RISCV_RELEASE_BARRIER, "")
>
> #define arch_cmpxchg_release(ptr, o, n) \
> -({ \
> - __typeof__(*(ptr)) _o_ = (o); \
> - __typeof__(*(ptr)) _n_ = (n); \
> - (__typeof__(*(ptr))) __cmpxchg_release((ptr), \
> - _o_, _n_, sizeof(*(ptr))); \
> -})
> + _arch_cmpxchg(_release, ptr, o, n)
>
> #define __cmpxchg(ptr, old, new, size) \
> -({ \
> - __typeof__(ptr) __ptr = (ptr); \
> - __typeof__(*(ptr)) __old = (old); \
> - __typeof__(*(ptr)) __new = (new); \
> - __typeof__(*(ptr)) __ret; \
> - register unsigned int __rc; \
> - switch (size) { \
> - case 4: \
> - __asm__ __volatile__ ( \
> - "0: lr.w %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.w.rl %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - " fence rw, rw\n" \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" ((long)__old), "rJ" (__new) \
> - : "memory"); \
> - break; \
> - case 8: \
> - __asm__ __volatile__ ( \
> - "0: lr.d %0, %2\n" \
> - " bne %0, %z3, 1f\n" \
> - " sc.d.rl %1, %z4, %2\n" \
> - " bnez %1, 0b\n" \
> - " fence rw, rw\n" \
> - "1:\n" \
> - : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
> - : "rJ" (__old), "rJ" (__new) \
> - : "memory"); \
> - break; \
> - default: \
> - BUILD_BUG(); \
> - } \
> - __ret; \
> -})
> + ___cmpxchg(ptr, old, new, size, ".rl", "", " fence rw, rw\n")
>
> #define arch_cmpxchg(ptr, o, n) \
> -({ \
> - __typeof__(*(ptr)) _o_ = (o); \
> - __typeof__(*(ptr)) _n_ = (n); \
> - (__typeof__(*(ptr))) __cmpxchg((ptr), \
> - _o_, _n_, sizeof(*(ptr))); \
> -})
> + _arch_cmpxchg(, ptr, o, n)
>
> #define arch_cmpxchg_local(ptr, o, n) \
> (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
> --
> 2.40.0
>
One patch is much easier to review :)
Reviewed-by: Guo Ren <guoren at kernel.org>
--
Best Regards
Guo Ren
More information about the linux-riscv
mailing list