diff mbox series

[BOOT-WRAPPER,07/11] aarch32: Always enter kernel via exception return

Message ID 20240729161501.1806271-8-mark.rutland@arm.com (mailing list archive)
State New, archived
Headers show
Series Cleanup initialization | expand

Commit Message

Mark Rutland July 29, 2024, 4:14 p.m. UTC
When the boot-wrapper is entered at Seculre PL1 it will enter the kernel
via an exception return, ERET, and when entered at Hyp it will branch to
the kernel directly. This is an artifact of the way the boot-wrapper was
originally written in assembly, and it would be preferable to always
enter the kernel via an exception return so that PSTATE is always
initialized to a known-good value.

Rework jump_kernel() to always enter the kernel via ERET, matching the
stype of the AArch64 version of jump_kernel()

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Cc: Akos Denke <akos.denke@arm.com>
Cc: Andre Przywara <andre.przywara@arm.com>
Cc: Luca Fancellu <luca.fancellu@arm.com>
Cc: Marc Zyngier <maz@kernel.org>
---
 arch/aarch32/boot.S | 44 +++++++++++++++++++++++---------------------
 1 file changed, 23 insertions(+), 21 deletions(-)
diff mbox series

Patch

diff --git a/arch/aarch32/boot.S b/arch/aarch32/boot.S
index a6f0751..11fd7aa 100644
--- a/arch/aarch32/boot.S
+++ b/arch/aarch32/boot.S
@@ -76,10 +76,6 @@  reset_at_hyp:
 
 	bl	setup_stack
 
-	mov	r0, #1
-	ldr	r1, =flag_no_el3
-	str	r0, [r1]
-
 	bl	cpu_init_bootwrapper
 
 	bl	cpu_init_arch
@@ -96,9 +92,10 @@  err_invalid_id:
 	 * r1-r3, sp[0]: kernel arguments
 	 */
 ASM_FUNC(jump_kernel)
-	sub	sp, #4				@ Ignore fourth argument
-	push	{r0 - r3}
-	mov	r5, sp
+	mov	r4, r0
+	mov	r5, r1
+	mov	r6, r2
+	mov	r7, r3
 
 	ldr	r0, =HSCTLR_KERNEL
 	mcr	p15, 4, r0, c1, c0, 0		@ HSCTLR
@@ -111,19 +108,24 @@  ASM_FUNC(jump_kernel)
 	bl	find_logical_id
 	bl	setup_stack
 
-	ldr	lr, [r5], #4
-	ldm	r5, {r0 - r2}
-
-	ldr	r4, =flag_no_el3
-	ldr	r4, [r4]
-	cmp	r4, #1
-	bxeq	lr				@ no EL3
+	mov	r0, r5
+	mov	r1, r6
+	mov	r2, r7
+	ldr	r3, =SPSR_KERNEL
+
+	mrs	r5, cpsr
+	and	r5, #PSR_MODE_MASK
+	cmp	r5, #PSR_MON
+	beq	eret_at_mon
+	cmp	r5, #PSR_HYP
+	beq	eret_at_hyp
+	b	.
 
-	ldr	r4, =SPSR_KERNEL
-	msr	spsr_cxf, r4
+eret_at_mon:
+	mov	lr, r4
+	msr	spsr_cxf, r3
 	movs	pc, lr
-
-	.section .data
-	.align 2
-flag_no_el3:
-	.long 0
+eret_at_hyp:
+	msr	elr_hyp, r4
+	msr	spsr_cxf, r3
+	eret