@@ -23,6 +23,9 @@
.arch_extension sve
+#define ID_AA64SMFR0_EL1_SMEver_SHIFT 56
+#define ID_AA64SMFR0_EL1_SMEver_WIDTH 4
+
/*
* LDR (vector to ZA array):
* LDR ZA[\nw, #\offset], [X\nxbase, #\offset, MUL VL]
@@ -45,6 +48,26 @@
| ((\offset) & 7)
.endm
+/*
+ * LDR (ZT0)
+ *
+ * LDR ZT0, nx
+ */
+.macro _ldr_zt nx
+ .inst 0xe11f8000 \
+ | (((\nx) & 0x1f) << 5)
+.endm
+
+/*
+ * STR (ZT0)
+ *
+ * STR ZT0, nx
+ */
+.macro _str_zt nx
+ .inst 0xe13f8000 \
+ | (((\nx) & 0x1f) << 5)
+.endm
+
.globl do_syscall
do_syscall:
// Store callee saved registers x19-x29 (80 bytes) plus x0 and x1
@@ -64,7 +87,7 @@ do_syscall:
msr S3_3_C4_C2_2, x2
1:
- // Load ZA if it's enabled - uses x12 as scratch due to SME LDR
+ // Load ZA and ZT0 if enabled - uses x12 as scratch due to SME LDR
tbz x2, #SVCR_ZA_SHIFT, 1f
mov w12, #0
ldr x2, =za_in
@@ -73,6 +96,15 @@ do_syscall:
add x12, x12, #1
cmp x1, x12
bne 2b
+
+ // ZT0
+ mrs x2, S3_0_C0_C4_5 // ID_AA64SMFR0_EL1
+ ubfx x2, x2, #ID_AA64SMFR0_EL1_SMEver_SHIFT, \
+ #ID_AA64SMFR0_EL1_SMEver_WIDTH
+ cbz x2, 1f
+ adrp x2, zt_in
+ add x2, x2, :lo12:zt_in
+ _ldr_zt 2
1:
// Load GPRs x8-x28, and save our SP/FP for later comparison
@@ -235,6 +267,15 @@ do_syscall:
add x12, x12, #1
cmp x1, x12
bne 2b
+
+ // ZT0
+ mrs x2, S3_0_C0_C4_5 // ID_AA64SMFR0_EL1
+ ubfx x2, x2, #ID_AA64SMFR0_EL1_SMEver_SHIFT, \
+ #ID_AA64SMFR0_EL1_SMEver_WIDTH
+ cbz x2, 1f
+ adrp x2, zt_out
+ add x2, x2, :lo12:zt_out
+ _str_zt 2
1:
// Save the SVE state if we have some
@@ -311,6 +311,35 @@ static int check_za(struct syscall_cfg *cfg, int sve_vl, int sme_vl,
return errors;
}
+uint8_t zt_in[ZT_SIG_REG_BYTES] __attribute__((aligned(16)));
+uint8_t zt_out[ZT_SIG_REG_BYTES] __attribute__((aligned(16)));
+
+static void setup_zt(struct syscall_cfg *cfg, int sve_vl, int sme_vl,
+ uint64_t svcr)
+{
+ fill_random(zt_in, sizeof(zt_in));
+ memset(zt_out, 0, sizeof(zt_out));
+}
+
+static int check_zt(struct syscall_cfg *cfg, int sve_vl, int sme_vl,
+ uint64_t svcr)
+{
+ int errors = 0;
+
+ if (!(getauxval(AT_HWCAP2) & HWCAP2_SME2))
+ return 0;
+
+ if (!(svcr & SVCR_ZA_MASK))
+ return 0;
+
+ if (memcmp(zt_in, zt_out, sizeof(zt_in)) != 0) {
+ ksft_print_msg("SME VL %d ZT does not match\n", sme_vl);
+ errors++;
+ }
+
+ return errors;
+}
+
typedef void (*setup_fn)(struct syscall_cfg *cfg, int sve_vl, int sme_vl,
uint64_t svcr);
typedef int (*check_fn)(struct syscall_cfg *cfg, int sve_vl, int sme_vl,
@@ -334,6 +363,7 @@ static struct {
{ setup_ffr, check_ffr },
{ setup_svcr, check_svcr },
{ setup_za, check_za },
+ { setup_zt, check_zt },
};
static bool do_test(struct syscall_cfg *cfg, int sve_vl, int sme_vl,
@@ -474,6 +504,7 @@ int main(void)
{
int i;
int tests = 1; /* FPSIMD */
+ int sme_ver;
srandom(getpid());
@@ -482,10 +513,15 @@ int main(void)
tests += (sve_count_vls() * sme_count_vls()) * 3;
ksft_set_plan(ARRAY_SIZE(syscalls) * tests);
+ if (getauxval(AT_HWCAP2) & HWCAP2_SME2)
+ sme_ver = 2;
+ else
+ sme_ver = 1;
+
if (getauxval(AT_HWCAP2) & HWCAP2_SME_FA64)
- ksft_print_msg("SME with FA64\n");
+ ksft_print_msg("SME%d with FA64\n", sme_ver);
else if (getauxval(AT_HWCAP2) & HWCAP2_SME)
- ksft_print_msg("SME without FA64\n");
+ ksft_print_msg("SME%d without FA64\n", sme_ver);
for (i = 0; i < ARRAY_SIZE(syscalls); i++)
test_one_syscall(&syscalls[i]);
Verify that ZT0 is preserved over syscalls when it is present and PSTATE.ZA is set. Signed-off-by: Mark Brown <broonie@kernel.org> --- .../testing/selftests/arm64/abi/syscall-abi-asm.S | 43 +++++++++++++++++++++- tools/testing/selftests/arm64/abi/syscall-abi.c | 40 +++++++++++++++++++- 2 files changed, 80 insertions(+), 3 deletions(-)