[PATCH 8/8] ARM: entry: Make asm coproc dispatch code NWFPE only

Ard Biesheuvel ardb at kernel.org
Tue May 9 05:29:38 PDT 2023


Now that we can dispatch all VFP and iWMMXT related undef exceptions
using undef hooks implemented in C code, we no longer need the asm entry
code that takes care of this unless we are using FPE. As this means it
is ARM only, we can also remove the Thumb2 specific decorations.

It also means the non-standard, asm-only calling convention where
returning via LR means failure and returning via R9 means success is now
only used on legacy platforms that lack any kind of function return
prediction, avoiding the associated performance impact.

Reviewed-by: Linus Walleij <linus.walleij at linaro.org>
Signed-off-by: Ard Biesheuvel <ardb at kernel.org>
---
 arch/arm/kernel/entry-armv.S | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index a3f82f5a9464fb50..2924b49bd93e2cc1 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -454,7 +454,9 @@ __und_usr:
 	tst	r5, #PSR_T_BIT			@ Thumb mode?
 	mov	r1, #2				@ set insn size to 2 for Thumb
 	bne	0f				@ handle as Thumb undef exception
+#ifdef CONFIG_FPE_NWFPE
 	bl	call_fpe			@ does not return on success
+#endif
 	mov	r1, #4				@ set insn size to 4 for ARM
 0:	mov	r0, sp
 	uaccess_disable ip
@@ -463,6 +465,7 @@ __und_usr:
  UNWIND(.fnend)
 ENDPROC(__und_usr)
 
+#ifdef CONFIG_FPE_NWFPE
 /*
  * The out of line fixup for the ldrt instruction below.
  */
@@ -490,7 +493,7 @@ ENDPROC(__und_usr)
  * IRQs enabled, FIQs enabled.
  */
 call_fpe:
-	badr	r9, ret_from_exception		@ r9  = "successful" return address
+	adr	r9, ret_from_exception		@ r9  = "successful" return address
 	mov	r2, r4
 	sub	r4, r4, #4			@ ARM instr at LR - 4
 USERL(	4b,	ldrt r0, [r4])
@@ -509,9 +512,7 @@ ARM_BE8(rev	r0, r0)				@ little endian instruction
 	movscs	r7, r5, lsr #(TIF_USING_IWMMXT + 1)
 	bcs	iwmmxt_task_enable
 #endif
- ARM(	add	pc, pc, r8, lsr #6	)
- THUMB(	lsr	r8, r8, #6		)
- THUMB(	add	pc, r8			)
+	add	pc, pc, r8, lsr #6
 	nop
 
 	ret.w	lr				@ CP#0
@@ -553,6 +554,7 @@ ENTRY(fp_enter)
 ENTRY(no_fp)
 	ret	lr
 ENDPROC(no_fp)
+#endif
 
 	.align	5
 __pabt_usr:
-- 
2.39.2




More information about the linux-arm-kernel mailing list