[PATCH] arm64/entry: deduplicate SW PAN entry/exit routines

Ard Biesheuvel ardb at kernel.org
Mon Jul 6 11:39:47 EDT 2020


On Mon, 6 Jul 2020 at 18:35, Ard Biesheuvel <ardb at kernel.org> wrote:
>
> Factor the 12 copies of the SW PAN entry and exit code into callable
> subroutines, and use alternatives patching to either emit a 'bl'
> instruction to call them, or a NOP if h/w PAN is found to be available
> at runtime.
>
> Signed-off-by: Ard Biesheuvel <ardb at kernel.org>
> ---
>  arch/arm64/kernel/entry.S | 96 +++++++++++++++++++--------------------
>  1 file changed, 48 insertions(+), 48 deletions(-)
>
> diff --git a/arch/arm64/kernel/entry.S b/arch/arm64/kernel/entry.S
> index 5304d193c79d..96a9fb2ed49f 100644
> --- a/arch/arm64/kernel/entry.S
> +++ b/arch/arm64/kernel/entry.S
> @@ -209,28 +209,9 @@ alternative_cb_end
>         add     x29, sp, #S_STACKFRAME
>
>  #ifdef CONFIG_ARM64_SW_TTBR0_PAN
> -       /*
> -        * Set the TTBR0 PAN bit in SPSR. When the exception is taken from
> -        * EL0, there is no need to check the state of TTBR0_EL1 since
> -        * accesses are always enabled.
> -        * Note that the meaning of this bit differs from the ARMv8.1 PAN
> -        * feature as all TTBR0_EL1 accesses are disabled, not just those to
> -        * user mappings.
> -        */
> -alternative_if ARM64_HAS_PAN
> -       b       1f                              // skip TTBR0 PAN
> +alternative_if_not ARM64_HAS_PAN
> +       bl      __swpan_entry_el\el
>  alternative_else_nop_endif
> -
> -       .if     \el != 0
> -       mrs     x21, ttbr0_el1
> -       tst     x21, #TTBR_ASID_MASK            // Check for the reserved ASID
> -       orr     x23, x23, #PSR_PAN_BIT          // Set the emulated PAN in the saved SPSR
> -       b.eq    1f                              // TTBR0 access already disabled
> -       and     x23, x23, #~PSR_PAN_BIT         // Clear the emulated PAN in the saved SPSR
> -       .endif
> -
> -       __uaccess_ttbr0_disable x21
> -1:
>  #endif
>
>         stp     x22, x23, [sp, #S_PC]
> @@ -284,34 +265,9 @@ alternative_else_nop_endif
>         .endif
>
>  #ifdef CONFIG_ARM64_SW_TTBR0_PAN
> -       /*
> -        * Restore access to TTBR0_EL1. If returning to EL0, no need for SPSR
> -        * PAN bit checking.
> -        */
> -alternative_if ARM64_HAS_PAN
> -       b       2f                              // skip TTBR0 PAN
> +alternative_if_not ARM64_HAS_PAN
> +       bl      __swpan_exit_el\el
>  alternative_else_nop_endif
> -
> -       .if     \el != 0
> -       tbnz    x22, #22, 1f                    // Skip re-enabling TTBR0 access if the PSR_PAN_BIT is set
> -       .endif
> -
> -       __uaccess_ttbr0_enable x0, x1
> -
> -       .if     \el == 0
> -       /*
> -        * Enable errata workarounds only if returning to user. The only
> -        * workaround currently required for TTBR0_EL1 changes are for the
> -        * Cavium erratum 27456 (broadcast TLBI instructions may cause I-cache
> -        * corruption).
> -        */
> -       bl      post_ttbr_update_workaround
> -       .endif
> -1:
> -       .if     \el != 0
> -       and     x22, x22, #~PSR_PAN_BIT         // ARMv8.0 CPUs do not understand this bit
> -       .endif
> -2:
>  #endif
>
>         .if     \el == 0
> @@ -391,6 +347,50 @@ alternative_insn eret, nop, ARM64_UNMAP_KERNEL_AT_EL0
>         sb
>         .endm
>
> +#ifdef CONFIG_ARM64_SW_TTBR0_PAN
> +       /*
> +        * Set the TTBR0 PAN bit in SPSR. When the exception is taken from
> +        * EL0, there is no need to check the state of TTBR0_EL1 since
> +        * accesses are always enabled.
> +        * Note that the meaning of this bit differs from the ARMv8.1 PAN
> +        * feature as all TTBR0_EL1 accesses are disabled, not just those to
> +        * user mappings.
> +        */
> +SYM_CODE_START_LOCAL(__swpan_entry_el1)
> +       mrs     x21, ttbr0_el1
> +       tst     x21, #TTBR_ASID_MASK            // Check for the reserved ASID
> +       orr     x23, x23, #PSR_PAN_BIT          // Set the emulated PAN in the saved SPSR
> +       b.eq    1f                              // TTBR0 access already disabled
> +       and     x23, x23, #~PSR_PAN_BIT         // Clear the emulated PAN in the saved SPSR
> +SYM_INNER_LABEL(__swpan_entry_el0, SYM_L_LOCAL)
> +       __uaccess_ttbr0_disable x21
> +1:     ret
> +SYM_CODE_END(__swpan_entry_el1)
> +
> +       /*
> +        * Restore access to TTBR0_EL1. If returning to EL0, no need for SPSR
> +        * PAN bit checking.
> +        */
> +SYM_CODE_START_LOCAL(__swpan_exit_el1)
> +       tbnz    x22, #22, 1f                    // Skip re-enabling TTBR0 access if the PSR_PAN_BIT is set
> +       __uaccess_ttbr0_enable x0, x1
> +1:     and     x22, x22, #~PSR_PAN_BIT         // ARMv8.0 CPUs do not understand this bit
> +       ret
> +SYM_CODE_END(__swpan_exit_el1)
> +
> +SYM_CODE_START_LOCAL(__swpan_exit_el0)
> +       __uaccess_ttbr0_enable x0, x1
> +       /*
> +        * Enable errata workarounds only if returning to user. The only
> +        * workaround currently required for TTBR0_EL1 changes are for the
> +        * Cavium erratum 27456 (broadcast TLBI instructions may cause I-cache
> +        * corruption).
> +        */
> +       bl      post_ttbr_update_workaround
> +       ret

Oops - this should be a tail call as lr is obviously clobbered at this point.

> +SYM_CODE_END(__swpan_exit_el0)
> +#endif
> +
>         .macro  irq_stack_entry
>         mov     x19, sp                 // preserve the original sp
>  #ifdef CONFIG_SHADOW_CALL_STACK
> --
> 2.20.1
>



More information about the linux-arm-kernel mailing list