@@ -39,6 +39,10 @@ DEFINES += $(if $(SYSREGS_BASE), -DSYSREGS_BASE=$(SYSREGS_BASE), )
DEFINES += -DUART_BASE=$(UART_BASE)
DEFINES += -DSTACK_SIZE=256
+if BOOTWRAPPER_64R
+DEFINES += -DBOOTWRAPPER_64R
+endif
+
if KERNEL_32
DEFINES += -DKERNEL_32
PSCI_CPU_ON := 0x84000003
@@ -100,7 +100,12 @@ ASM_FUNC(jump_kernel)
mov x1, x21
mov x2, x22
mov x3, x23
+#if defined(BOOTWRAPPER_64R)
+ // On Armv8-R Linux needs to be booted at EL1
+ mov x4, #SPSR_KERNEL_EL1
+#else
mov x4, #SPSR_KERNEL
+#endif
/*
* If bit 0 of the kernel address is set, we're entering in AArch32
@@ -32,10 +32,15 @@
(BIT(29) | BIT(28) | BIT(23) | BIT(22) | BIT(18) | BIT(16) | \
BIT(11) | BIT(5) | BIT(4))
+#define CPTR_EL2_NO_E2H_RES1 \
+ (BITS(13,12) | BIT(9) | BITS(7,0))
+
#define SCTLR_EL2_RES1 \
(BIT(29) | BIT(28) | BIT(23) | BIT(22) | BIT(18) | BIT(16) | \
BIT(11) | BIT(5) | BIT(4))
+#define VSTCR_EL2_RES1 (BIT(31))
+
#define SCTLR_EL1_RES1 \
(BIT(29) | BIT(28) | BIT(23) | BIT(22) | BIT(20) | BIT(11) | \
BIT(8) | BIT(7) | BIT(4))
@@ -64,7 +69,13 @@
#define SCR_EL3_PIEN BIT(45)
#define SCR_EL3_D128En BIT(47)
+#define VTCR_EL2_MSA BIT(31)
+
#define HCR_EL2_RES1 BIT(1)
+#define HCR_EL2_APK BIT(40)
+#define HCR_EL2_API BIT(41)
+#define HCR_EL2_FIEN BIT(47)
+#define HCR_EL2_EnSCXT BIT(53)
#define ID_AA64DFR0_EL1_PMSVER BITS(35, 32)
#define ID_AA64DFR0_EL1_TRACEBUFFER BITS(47, 44)
@@ -81,6 +92,8 @@
#define ID_AA64ISAR2_EL1_GPA3 BITS(11, 8)
#define ID_AA64ISAR2_EL1_APA3 BITS(15, 12)
+#define ID_AA64MMFR0_EL1_MSA BITS(51, 48)
+#define ID_AA64MMFR0_EL1_MSA_frac BITS(55, 52)
#define ID_AA64MMFR0_EL1_FGT BITS(59, 56)
#define ID_AA64MMFR0_EL1_ECV BITS(63, 60)
@@ -96,8 +109,12 @@
#define ID_AA64PFR1_EL1_MTE BITS(11, 8)
#define ID_AA64PFR1_EL1_SME BITS(27, 24)
+#define ID_AA64PFR1_EL1_CSV2_frac BITS(35, 32)
#define ID_AA64PFR1_EL1_THE BITS(51, 48)
+
+#define ID_AA64PFR0_EL1_RAS BITS(31, 28)
#define ID_AA64PFR0_EL1_SVE BITS(35, 32)
+#define ID_AA64PFR0_EL1_CSV2 BITS(59, 56)
#define ID_AA64SMFR0_EL1 s3_0_c0_c4_5
#define ID_AA64SMFR0_EL1_FA64 BIT(63)
@@ -106,7 +123,9 @@
* Initial register values required for the boot-wrapper to run out-of-reset.
*/
#define SCTLR_EL3_RESET SCTLR_EL3_RES1
+#define CPTR_EL2_RESET CPTR_EL2_NO_E2H_RES1
#define SCTLR_EL2_RESET SCTLR_EL2_RES1
+#define VSTCR_EL2_RESET VSTCR_EL2_RES1
#define SCTLR_EL1_RESET SCTLR_EL1_RES1
#define HCR_EL2_RESET HCR_EL2_RES1
@@ -123,6 +142,7 @@
#define SPSR_I (1 << 7) /* IRQ masked */
#define SPSR_F (1 << 6) /* FIQ masked */
#define SPSR_T (1 << 5) /* Thumb */
+#define SPSR_EL1H (5 << 0) /* EL1 Handler mode */
#define SPSR_EL2H (9 << 0) /* EL2 Handler mode */
#define SPSR_HYP (0x1a << 0) /* M[3:0] = hyp, M[4] = AArch32 */
@@ -135,6 +155,9 @@
#define ICC_CTLR_EL3 S3_6_C12_C12_4
#define ICC_PMR_EL1 S3_0_C4_C6_0
+#define VSTCR_EL2 s3_4_c2_c6_2
+#define VSCTLR_EL2 s3_4_c2_c0_0
+
#define ZCR_EL3 s3_6_c1_c2_0
#define ZCR_EL3_LEN_MAX 0xf
@@ -162,6 +185,7 @@
#else
#define SCTLR_EL1_KERNEL SCTLR_EL1_RES1
#define SPSR_KERNEL (SPSR_A | SPSR_D | SPSR_I | SPSR_F | SPSR_EL2H)
+#define SPSR_KERNEL_EL1 (SPSR_A | SPSR_D | SPSR_I | SPSR_F | SPSR_EL1H)
#endif
#ifndef __ASSEMBLY__
@@ -54,6 +54,27 @@ static inline bool cpu_has_permission_indirection(void)
return mrs(ID_AA64MMFR3_EL1) & mask;
}
+static bool cpu_has_scxt(void)
+{
+ unsigned long csv2 = mrs_field(ID_AA64PFR0_EL1, CSV2);
+
+ if (csv2 >= 2)
+ return true;
+ if (csv2 < 1)
+ return false;
+
+ return mrs_field(ID_AA64PFR1_EL1, CSV2_frac) >= 2;
+}
+
+static inline bool bootwrapper_is_r_class(void)
+{
+#ifdef BOOTWRAPPER_64R
+ return true;
+#else
+ return false;
+#endif
+}
+
static void cpu_init_el3(void)
{
unsigned long scr = SCR_EL3_RES1 | SCR_EL3_NS | SCR_EL3_HCE;
@@ -157,6 +178,47 @@ static void cpu_init_el3(void)
}
}
+void cpu_init_el2_armv8r(void)
+{
+ unsigned long hcr = mrs(hcr_el2);
+
+ /* On Armv8-R ID_AA64MMFR0_EL1[51:48] == 0xF */
+ if (mrs_field(ID_AA64MMFR0_EL1, MSA) != 0xF) {
+ print_string("ID_AA64MMFR0_EL1.MSA != 0xF, not R-class\r\n");
+ while (1)
+ wfe();
+ }
+
+ if (mrs_field(ID_AA64MMFR0_EL1, MSA_frac) < 2) {
+ print_string("ID_AA64MMFR0_EL1.MSA_frac < 2, EL1&0 VMSA not supported\r\n");
+ while (1)
+ wfe();
+ }
+
+ msr(vpidr_el2, mrs(midr_el1));
+ msr(vmpidr_el2, mrs(mpidr_el1));
+
+ msr(VSCTLR_EL2, 0);
+ msr(VSTCR_EL2, VSTCR_EL2_RESET);
+ msr(vtcr_el2, VTCR_EL2_MSA);
+
+ msr(cntvoff_el2, 0);
+ msr(cptr_el2, CPTR_EL2_RESET);
+ msr(mdcr_el2, 0);
+
+ if (cpu_has_scxt())
+ hcr |= HCR_EL2_EnSCXT;
+
+ if (mrs_field(ID_AA64PFR0_EL1, RAS) >= 2)
+ hcr |= HCR_EL2_FIEN;
+
+ if (cpu_has_pauth())
+ hcr |= HCR_EL2_APK | HCR_EL2_API;
+
+ msr(hcr_el2, hcr);
+ isb();
+}
+
#ifdef PSCI
extern char psci_vectors[];
@@ -176,11 +238,15 @@ static void cpu_init_psci_arch(unsigned int cpu) { }
void cpu_init_arch(unsigned int cpu)
{
- if (mrs(CurrentEL) == CURRENTEL_EL3) {
+ if (!bootwrapper_is_r_class() && mrs(CurrentEL) == CURRENTEL_EL3) {
cpu_init_el3();
gic_secure_init();
}
+ if (bootwrapper_is_r_class() && mrs(CurrentEL) == CURRENTEL_EL2) {
+ cpu_init_el2_armv8r();
+ }
+
cpu_init_psci_arch(cpu);
msr(CNTFRQ_EL0, COUNTER_FREQ);
@@ -9,14 +9,16 @@ AC_INIT([boot-wrapper], [v0.2])
AM_INIT_AUTOMAKE([foreign])
-# Allow a user to pass --with-bw-arch={aarch64-a,aarch32-a}
+# Allow a user to pass --with-bw-arch={aarch64-a,aarch32-a,aarch64-r}
AC_ARG_WITH([bw-arch],
- AS_HELP_STRING([--with-bw-arch], [specify boot-wrapper architecture: aarch64-a (default) or aarch32-a]),
+ AS_HELP_STRING([--with-bw-arch], [specify boot-wrapper architecture: aarch64-a (default), aarch32-a or aarch64-r]),
[case "${withval}" in
no|yes|aarch64-a) USE_ARCH=aarch64-a ;;
aarch32-a) USE_ARCH=aarch32-a ;;
- *) AC_MSG_ERROR([Bad value "${withval}" for --with-bw-arch. Use "aarch64-a" or "aarch32-a"]) ;;
+ aarch64-r) USE_ARCH=aarch64-r ;;
+ *) AC_MSG_ERROR([Bad value "${withval}" for --with-bw-arch. Use "aarch64-a", "aarch32-a" or "aarch64-r"]) ;;
esac])
+AM_CONDITIONAL([BOOTWRAPPER_64R], [test "x$USE_ARCH" = "xaarch64-r"])
AS_IF([test "x$USE_ARCH" = "xaarch32-a"],
[BOOTWRAPPER_ES=32], [BOOTWRAPPER_ES=64]
When booting on Armv8-R, EL3 is not implemented and the Linux kernel needs to be booted in EL1, so initialise EL2 and start the kernel in the expected exception level. To do so, introduce the 'aarch64-r' argument for the --with-bw-arch parameter. PSCI is not yet implemented for aarch64-r. Signed-off-by: Luca Fancellu <luca.fancellu@arm.com> --- v4 changes: - Remove _NOTRAP from HCR_EL2_*_NOTRAP registers - split checks for Armv8-R inside ID_AA64MMFR0_EL1 - reworked cpu_init_arch code to don't have #ifdef --- Makefile.am | 4 ++ arch/aarch64/boot.S | 5 +++ arch/aarch64/include/asm/cpu.h | 24 ++++++++++++ arch/aarch64/init.c | 68 +++++++++++++++++++++++++++++++++- configure.ac | 8 ++-- 5 files changed, 105 insertions(+), 4 deletions(-)