[PATCH] crypto: arm64/sha256 - fix build when CONFIG_PREEMPT_VOLUNTARY=y
Eric Biggers
ebiggers at kernel.org
Wed May 7 10:09:01 PDT 2025
From: Eric Biggers <ebiggers at google.com>
Fix the build of sha256-ce.S when CONFIG_PREEMPT_VOLUNTARY=y by passing
the correct label to the cond_yield macro. Also adjust the code to
execute only one branch instruction when CONFIG_PREEMPT_VOLUNTARY=n.
Fixes: 6e36be511d28 ("crypto: arm64/sha256 - implement library instead of shash")
Reported-by: kernel test robot <lkp at intel.com>
Closes: https://lore.kernel.org/oe-kbuild-all/202505071811.yYpLUbav-lkp@intel.com/
Signed-off-by: Eric Biggers <ebiggers at google.com>
---
arch/arm64/lib/crypto/sha256-ce.S | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/arch/arm64/lib/crypto/sha256-ce.S b/arch/arm64/lib/crypto/sha256-ce.S
index a8461d6dad634..f3e21c6d87d2e 100644
--- a/arch/arm64/lib/crypto/sha256-ce.S
+++ b/arch/arm64/lib/crypto/sha256-ce.S
@@ -121,14 +121,15 @@ CPU_LE( rev32 v19.16b, v19.16b )
/* update state */
add dgav.4s, dgav.4s, dg0v.4s
add dgbv.4s, dgbv.4s, dg1v.4s
+ /* return early if voluntary preemption is needed */
+ cond_yield 1f, x5, x6
+
/* handled all input blocks? */
- cbz x2, 1f
- cond_yield 3f, x5, x6
- b 0b
+ cbnz x2, 0b
/* store new state */
1: st1 {dgav.4s, dgbv.4s}, [x0]
mov x0, x2
ret
base-commit: 20e9579f11b6cbdf0556d9cd85a0aa7653caf341
--
2.49.0
More information about the linux-arm-kernel
mailing list