[boot-wrapper v3 2/4] aarch64: Enable Armv8-R EL2 boot

Luca Fancellu luca.fancellu at arm.com
Wed Jul 31 07:11:01 PDT 2024


When booting on Armv8-R, EL3 is not implemented and the Linux
kernel needs to be booted in EL1, so initialise EL2 and start
the kernel in the expected exception level.

To do so, introduce the 'aarch64-r' argument for the
--with-bw-arch parameter.

PSCI is not yet implemented for aarch64-r.

Signed-off-by: Luca Fancellu <luca.fancellu at arm.com>
---
Changes from v2:
 - Major rework, reason in the cover letter.
---
 Makefile.am                    |  4 +++
 arch/aarch64/boot.S            |  5 ++++
 arch/aarch64/include/asm/cpu.h | 24 +++++++++++++++
 arch/aarch64/init.c            | 55 ++++++++++++++++++++++++++++++++++
 configure.ac                   |  8 +++--
 5 files changed, 93 insertions(+), 3 deletions(-)

diff --git a/Makefile.am b/Makefile.am
index 6ebece25b230..bf97b989d5d7 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -25,6 +25,10 @@ DEFINES		+= $(if $(SYSREGS_BASE), -DSYSREGS_BASE=$(SYSREGS_BASE), )
 DEFINES		+= -DUART_BASE=$(UART_BASE)
 DEFINES		+= -DSTACK_SIZE=256
 
+if BOOTWRAPPER_64R
+DEFINES		+= -DBOOTWRAPPER_64R
+endif
+
 if KERNEL_32
 DEFINES		+= -DKERNEL_32
 PSCI_CPU_ON	:= 0x84000003
diff --git a/arch/aarch64/boot.S b/arch/aarch64/boot.S
index f8fc8082fbee..0b4f6824045a 100644
--- a/arch/aarch64/boot.S
+++ b/arch/aarch64/boot.S
@@ -100,7 +100,12 @@ ASM_FUNC(jump_kernel)
 	mov	x1, x21
 	mov	x2, x22
 	mov	x3, x23
+#if defined(BOOTWRAPPER_64R)
+	// On Armv8-R Linux needs to be booted at EL1
+	mov	x4, #SPSR_KERNEL_EL1
+#else
 	mov	x4, #SPSR_KERNEL
+#endif
 
 	mrs	x5, CurrentEL
 	cmp	x5, #CURRENTEL_EL3
diff --git a/arch/aarch64/include/asm/cpu.h b/arch/aarch64/include/asm/cpu.h
index a5744e16e4b2..6686161c7c0b 100644
--- a/arch/aarch64/include/asm/cpu.h
+++ b/arch/aarch64/include/asm/cpu.h
@@ -32,10 +32,15 @@
 	(BIT(29) | BIT(28) | BIT(23) | BIT(22) | BIT(18) | BIT(16) |	\
 	 BIT(11) | BIT(5) | BIT(4))
 
+#define CPTR_EL2_NO_E2H_RES1					\
+	(BITS(13,12) | BIT(9) | BITS(7,0))
+
 #define SCTLR_EL2_RES1							\
 	(BIT(29) | BIT(28) | BIT(23) | BIT(22) | BIT(18) | BIT(16) |	\
 	 BIT(11) | BIT(5) | BIT(4))
 
+#define VSTCR_EL2_RES1							(BIT(31))
+
 #define SCTLR_EL1_RES1							\
 	(BIT(29) | BIT(28) | BIT(23) | BIT(22) | BIT(20) | BIT(11) |	\
 	 BIT(8) | BIT(7) | BIT(4))
@@ -64,7 +69,13 @@
 #define SCR_EL3_PIEN			BIT(45)
 #define SCR_EL3_D128En			BIT(47)
 
+#define VTCR_EL2_MSA			BIT(31)
+
 #define HCR_EL2_RES1			BIT(1)
+#define HCR_EL2_APK_NOTRAP		BIT(40)
+#define HCR_EL2_API_NOTRAP		BIT(41)
+#define HCR_EL2_FIEN_NOTRAP		BIT(47)
+#define HCR_EL2_ENSCXT_NOTRAP		BIT(53)
 
 #define ID_AA64DFR0_EL1_PMSVER		BITS(35, 32)
 #define ID_AA64DFR0_EL1_TRACEBUFFER	BITS(47, 44)
@@ -81,6 +92,8 @@
 #define ID_AA64ISAR2_EL1_GPA3		BITS(11, 8)
 #define ID_AA64ISAR2_EL1_APA3		BITS(15, 12)
 
+#define ID_AA64MMFR0_EL1_MSA		BITS(51, 48)
+#define ID_AA64MMFR0_EL1_MSA_frac	BITS(55, 52)
 #define ID_AA64MMFR0_EL1_FGT		BITS(59, 56)
 #define ID_AA64MMFR0_EL1_ECV		BITS(63, 60)
 
@@ -96,8 +109,12 @@
 
 #define ID_AA64PFR1_EL1_MTE		BITS(11, 8)
 #define ID_AA64PFR1_EL1_SME		BITS(27, 24)
+#define ID_AA64PFR1_EL1_CSV2_frac	BITS(35, 32)
 #define ID_AA64PFR1_EL1_THE		BITS(51, 48)
+
+#define ID_AA64PFR0_EL1_RAS		BITS(31, 28)
 #define ID_AA64PFR0_EL1_SVE		BITS(35, 32)
+#define ID_AA64PFR0_EL1_CSV2		BITS(59, 56)
 
 #define ID_AA64SMFR0_EL1		s3_0_c0_c4_5
 #define ID_AA64SMFR0_EL1_FA64		BIT(63)
@@ -106,7 +123,9 @@
  * Initial register values required for the boot-wrapper to run out-of-reset.
  */
 #define SCTLR_EL3_RESET		SCTLR_EL3_RES1
+#define CPTR_EL2_RESET		CPTR_EL2_NO_E2H_RES1
 #define SCTLR_EL2_RESET		SCTLR_EL2_RES1
+#define VSTCR_EL2_RESET		VSTCR_EL2_RES1
 #define SCTLR_EL1_RESET		SCTLR_EL1_RES1
 #define HCR_EL2_RESET		HCR_EL2_RES1
 
@@ -123,6 +142,7 @@
 #define SPSR_I			(1 << 7)	/* IRQ masked */
 #define SPSR_F			(1 << 6)	/* FIQ masked */
 #define SPSR_T			(1 << 5)	/* Thumb */
+#define SPSR_EL1H		(5 << 0)	/* EL1 Handler mode */
 #define SPSR_EL2H		(9 << 0)	/* EL2 Handler mode */
 #define SPSR_HYP		(0x1a << 0)	/* M[3:0] = hyp, M[4] = AArch32 */
 
@@ -135,6 +155,9 @@
 #define ICC_CTLR_EL3		S3_6_C12_C12_4
 #define ICC_PMR_EL1		S3_0_C4_C6_0
 
+#define VSTCR_EL2		s3_4_c2_c6_2
+#define VSCTLR_EL2		s3_4_c2_c0_0
+
 #define ZCR_EL3			s3_6_c1_c2_0
 #define ZCR_EL3_LEN_MAX		0xf
 
@@ -162,6 +185,7 @@
 #else
 #define SCTLR_EL1_KERNEL	SCTLR_EL1_RES1
 #define SPSR_KERNEL		(SPSR_A | SPSR_D | SPSR_I | SPSR_F | SPSR_EL2H)
+#define SPSR_KERNEL_EL1		(SPSR_A | SPSR_D | SPSR_I | SPSR_F | SPSR_EL1H)
 #endif
 
 #ifndef __ASSEMBLY__
diff --git a/arch/aarch64/init.c b/arch/aarch64/init.c
index 81fe33ab80ac..b0abde11ebd6 100644
--- a/arch/aarch64/init.c
+++ b/arch/aarch64/init.c
@@ -54,6 +54,18 @@ static inline bool cpu_has_permission_indirection(void)
 	return mrs(ID_AA64MMFR3_EL1) & mask;
 }
 
+static bool cpu_has_scxt(void)
+{
+	unsigned long csv2 = mrs_field(ID_AA64PFR0_EL1, CSV2);
+
+	if (csv2 >= 2)
+		return true;
+	if (csv2 < 1)
+		return false;
+
+	return mrs_field(ID_AA64PFR1_EL1, CSV2_frac) >= 2;
+}
+
 static void cpu_init_el3(void)
 {
 	unsigned long scr = SCR_EL3_RES1 | SCR_EL3_NS | SCR_EL3_HCE;
@@ -157,6 +169,43 @@ static void cpu_init_el3(void)
 	}
 }
 
+void cpu_init_el2_armv8r(void)
+{
+	unsigned long hcr = mrs(hcr_el2);
+
+	/* On Armv8-R ID_AA64MMFR0_EL1[51:48] == 0xF */
+	if ((mrs_field(ID_AA64MMFR0_EL1, MSA) != 0xF) ||
+		(mrs_field(ID_AA64MMFR0_EL1, MSA_frac) < 2))
+	{
+		print_string("Initialization failed, Armv8-R not detected or VMSA not supported at EL1.\r\n");
+		while(1) {}
+		unreachable();
+	}
+
+	msr(vpidr_el2, mrs(midr_el1));
+	msr(vmpidr_el2, mrs(mpidr_el1));
+
+	msr(VSCTLR_EL2, 0);
+	msr(VSTCR_EL2, VSTCR_EL2_RESET);
+	msr(vtcr_el2, VTCR_EL2_MSA);
+
+	msr(cntvoff_el2, 0);
+	msr(cptr_el2, CPTR_EL2_RESET);
+	msr(mdcr_el2, 0);
+
+	if (cpu_has_scxt())
+		hcr |= HCR_EL2_ENSCXT_NOTRAP;
+
+	if (mrs_field(ID_AA64PFR0_EL1, RAS) >= 2)
+		hcr |= HCR_EL2_FIEN_NOTRAP;
+
+	if (cpu_has_pauth())
+		hcr |= HCR_EL2_APK_NOTRAP | HCR_EL2_API_NOTRAP;
+
+	msr(hcr_el2, hcr);
+	isb();
+}
+
 #ifdef PSCI
 extern char psci_vectors[];
 
@@ -181,6 +230,12 @@ void cpu_init_arch(unsigned int cpu)
 		gic_secure_init();
 	}
 
+#if defined(BOOTWRAPPER_64R)
+	if (mrs(CurrentEL) == CURRENTEL_EL2) {
+		cpu_init_el2_armv8r();
+	}
+#endif
+
 	cpu_init_psci_arch(cpu);
 
 	msr(CNTFRQ_EL0, COUNTER_FREQ);
diff --git a/configure.ac b/configure.ac
index bba42fa1dba8..88dbf9ba4f08 100644
--- a/configure.ac
+++ b/configure.ac
@@ -9,14 +9,16 @@ AC_INIT([boot-wrapper], [v0.2])
 
 AM_INIT_AUTOMAKE([foreign])
 
-# Allow a user to pass --with-bw-arch={aarch64-a,aarch32-a}
+# Allow a user to pass --with-bw-arch={aarch64-a,aarch32-a,aarch64-r}
 AC_ARG_WITH([bw-arch],
-	AS_HELP_STRING([--with-bw-arch], [aarch64-a selects AArch64-A (default), aarch32-a selects AArch32-A]),
+	AS_HELP_STRING([--with-bw-arch], [aarch64-a selects AArch64-A (default), aarch32-a selects AArch32-A, aarch64-r selects AArch64-R]),
 	[case "${withval}" in
 		no|yes|aarch64-a) USE_ARCH=aarch64-a ;;
 		aarch32-a) USE_ARCH=aarch32-a ;;
-		*) AC_MSG_ERROR([Bad value "${withval}" for --with-bw-arch. Use "aarch64-a" or "aarch32-a"]) ;;
+		aarch64-r) USE_ARCH=aarch64-r ;;
+		*) AC_MSG_ERROR([Bad value "${withval}" for --with-bw-arch. Use "aarch64-a", "aarch32-a" or "aarch64-r"]) ;;
 	esac])
+AM_CONDITIONAL([BOOTWRAPPER_64R], [test "x$USE_ARCH" = "xaarch64-r"])
 
 AS_IF([test "x$USE_ARCH" = "xaarch32-a"],
 	[BOOTWRAPPER_ES=32], [BOOTWRAPPER_ES=64]
-- 
2.34.1




More information about the linux-arm-kernel mailing list