@@ -64,7 +64,9 @@ static inline u64 set_la_non_canonical(u64 src, u64 mask)
#define X86_CR3_PCID_MASK GENMASK(11, 0)
#define X86_CR3_LAM_U57_BIT (61)
+#define X86_CR3_LAM_U57 BIT_ULL(X86_CR3_LAM_U57_BIT)
#define X86_CR3_LAM_U48_BIT (62)
+#define X86_CR3_LAM_U48 BIT_ULL(X86_CR3_LAM_U48_BIT)
#define X86_CR4_VME_BIT (0)
#define X86_CR4_VME BIT(X86_CR4_VME_BIT)
@@ -18,6 +18,9 @@
#include "vm.h"
#include "asm/io.h"
#include "ioram.h"
+#include "usermode.h"
+
+#define CR3_LAM_BITS_MASK (X86_CR3_LAM_U48 | X86_CR3_LAM_U57)
#define FLAGS_LAM_ACTIVE BIT_ULL(0)
#define FLAGS_LA57 BIT_ULL(1)
@@ -38,6 +41,16 @@ static inline bool lam_sup_active(void)
return !!(read_cr4() & X86_CR4_LAM_SUP);
}
+static inline bool lam_u48_active(void)
+{
+ return (read_cr3() & CR3_LAM_BITS_MASK) == X86_CR3_LAM_U48;
+}
+
+static inline bool lam_u57_active(void)
+{
+ return !!(read_cr3() & X86_CR3_LAM_U57);
+}
+
static void cr4_set_lam_sup(void *data)
{
unsigned long cr4;
@@ -83,6 +96,7 @@ static void do_mov(void *mem)
static u64 test_ptr(u64 arg1, u64 arg2, u64 arg3, u64 arg4)
{
bool lam_active = !!(arg1 & FLAGS_LAM_ACTIVE);
+ bool la_57 = !!(arg1 & FLAGS_LA57);
u64 lam_mask = arg2;
u64 *ptr = (u64 *)arg3;
bool is_mmio = !!arg4;
@@ -96,6 +110,17 @@ static u64 test_ptr(u64 arg1, u64 arg2, u64 arg3, u64 arg4)
report(fault != lam_active,"Test tagged addr (%s)",
is_mmio ? "MMIO" : "Memory");
+ /*
+ * This test case is only triggered when LAM_U57 is active and 4-level
+ * paging is used. For the case, bit[56:47] aren't all 0 triggers #GP.
+ */
+ if (lam_active && (lam_mask == LAM57_MASK) && !la_57) {
+ ptr = (u64 *)set_la_non_canonical((u64)ptr, LAM48_MASK);
+ fault = test_for_exception(GP_VECTOR, do_mov, ptr);
+ report(fault, "Test non-LAM-canonical addr (%s)",
+ is_mmio ? "MMIO" : "Memory");
+ }
+
return 0;
}
@@ -220,6 +245,56 @@ static void test_lam_sup(bool has_lam, bool fep_available)
test_invlpg(lam_mask, vaddr, true);
}
+static void test_lam_user_mode(bool has_lam, u64 lam_mask, u64 mem, u64 mmio)
+{
+ unsigned r;
+ bool raised_vector;
+ unsigned long cr3 = read_cr3() & ~CR3_LAM_BITS_MASK;
+ u64 flags = 0;
+
+ if (is_la57())
+ flags |= FLAGS_LA57;
+
+ if (has_lam) {
+ if (lam_mask == LAM48_MASK) {
+ r = write_cr3_safe(cr3 | X86_CR3_LAM_U48);
+ report((r == 0) && lam_u48_active(), "Set LAM_U48");
+ } else {
+ r = write_cr3_safe(cr3 | X86_CR3_LAM_U57);
+ report((r == 0) && lam_u57_active(), "Set LAM_U57");
+ }
+ }
+ if (lam_u48_active() || lam_u57_active())
+ flags |= FLAGS_LAM_ACTIVE;
+
+ run_in_user((usermode_func)test_ptr, GP_VECTOR, flags, lam_mask, mem,
+ false, &raised_vector);
+ run_in_user((usermode_func)test_ptr, GP_VECTOR, flags, lam_mask, mmio,
+ true, &raised_vector);
+}
+
+static void test_lam_user(bool has_lam)
+{
+ phys_addr_t paddr;
+
+ /*
+ * The physical address of AREA_NORMAL is within 36 bits, so that using
+ * identical mapping, the linear address will be considered as user mode
+ * address from the view of LAM, and the metadata bits are not used as
+ * address for both LAM48 and LAM57.
+ */
+ paddr = virt_to_phys(alloc_pages_flags(0, AREA_NORMAL));
+ _Static_assert((AREA_NORMAL_PFN & GENMASK(63, 47)) == 0UL,
+ "Identical mapping range check");
+
+ /*
+ * Physical memory & MMIO have already been identical mapped in
+ * setup_mmu().
+ */
+ test_lam_user_mode(has_lam, LAM48_MASK, paddr, IORAM_BASE_PHYS);
+ test_lam_user_mode(has_lam, LAM57_MASK, paddr, IORAM_BASE_PHYS);
+}
+
int main(int ac, char **av)
{
bool has_lam;
@@ -238,6 +313,7 @@ int main(int ac, char **av)
"use kvm.force_emulation_prefix=1 to enable\n");
test_lam_sup(has_lam, fep_available);
+ test_lam_user(has_lam);
return report_summary();
}