@@ -59,6 +59,9 @@
(type *)((char *)mptr__ - offsetof(type, member)); \
})
+#define AC_(n,t) (n##t)
+#define _AC(n,t) AC_(n,t)
+
#define hweight32 __builtin_popcount
#define hweight64 __builtin_popcountll
@@ -48,6 +48,7 @@ $(call as-option-add,CFLAGS,CC,"clwb (%r
$(call as-option-add,CFLAGS,CC,".equ \"x\"$$(comma)1",-DHAVE_AS_QUOTED_SYM)
$(call as-option-add,CFLAGS,CC,"invpcid (%rax)$$(comma)%rax",-DHAVE_AS_INVPCID)
$(call as-option-add,CFLAGS,CC,"movdiri %rax$$(comma)(%rax)",-DHAVE_AS_MOVDIR)
+$(call as-option-add,CFLAGS,CC,"enqcmd (%rax)$$(comma)%rax",-DHAVE_AS_ENQCMD)
# GAS's idea of true is -1. Clang's idea is 1
$(call as-option-add,CFLAGS,CC,\
@@ -855,6 +855,7 @@ struct x86_emulate_state {
} rmw;
enum {
blk_NONE,
+ blk_enqcmd,
blk_movdir,
} blk;
uint8_t modrm, modrm_mod, modrm_reg, modrm_rm;
@@ -901,6 +902,7 @@ typedef union {
uint64_t __attribute__ ((aligned(16))) xmm[2];
uint64_t __attribute__ ((aligned(32))) ymm[4];
uint64_t __attribute__ ((aligned(64))) zmm[8];
+ uint32_t data32[16];
} mmval_t;
/*
@@ -1922,6 +1924,7 @@ amd_like(const struct x86_emulate_ctxt *
#define vcpu_has_rdpid() (ctxt->cpuid->feat.rdpid)
#define vcpu_has_movdiri() (ctxt->cpuid->feat.movdiri)
#define vcpu_has_movdir64b() (ctxt->cpuid->feat.movdir64b)
+#define vcpu_has_enqcmd() (ctxt->cpuid->feat.enqcmd)
#define vcpu_has_avx512_4vnniw() (ctxt->cpuid->feat.avx512_4vnniw)
#define vcpu_has_avx512_4fmaps() (ctxt->cpuid->feat.avx512_4fmaps)
#define vcpu_has_avx512_bf16() (ctxt->cpuid->feat.avx512_bf16)
@@ -10200,6 +10203,36 @@ x86_emulate(
state->simd_size = simd_none;
break;
+ case X86EMUL_OPC_F2(0x0f38, 0xf8): /* enqcmd r,m512 */
+ case X86EMUL_OPC_F3(0x0f38, 0xf8): /* enqcmds r,m512 */
+ host_and_vcpu_must_have(enqcmd);
+ generate_exception_if(ea.type != OP_MEM, EXC_UD);
+ generate_exception_if(vex.pfx != vex_f2 && !mode_ring0(), EXC_GP, 0);
+ src.val = truncate_ea(*dst.reg);
+ generate_exception_if(!is_aligned(x86_seg_es, src.val, 64, ctxt, ops),
+ EXC_GP, 0);
+ fail_if(!ops->blk);
+ BUILD_BUG_ON(sizeof(*mmvalp) < 64);
+ if ( (rc = ops->read(ea.mem.seg, ea.mem.off, mmvalp, 64,
+ ctxt)) != X86EMUL_OKAY )
+ goto done;
+ if ( vex.pfx == vex_f2 ) /* enqcmd */
+ {
+ fail_if(!ops->read_msr);
+ if ( (rc = ops->read_msr(MSR_PASID, &msr_val,
+ ctxt)) != X86EMUL_OKAY )
+ goto done;
+ generate_exception_if(!(msr_val & PASID_VALID), EXC_GP, 0);
+ mmvalp->data32[0] = MASK_EXTR(msr_val, PASID_PASID_MASK);
+ }
+ mmvalp->data32[0] &= ~0x7ff00000;
+ state->blk = blk_enqcmd;
+ if ( (rc = ops->blk(x86_seg_es, src.val, mmvalp, 64, &_regs.eflags,
+ state, ctxt)) != X86EMUL_OKAY )
+ goto done;
+ state->simd_size = simd_none;
+ break;
+
case X86EMUL_OPC(0x0f38, 0xf9): /* movdiri mem,r */
host_and_vcpu_must_have(movdiri);
generate_exception_if(dst.type != OP_MEM, EXC_UD);
@@ -11480,11 +11513,36 @@ int x86_emul_blk(
{
switch ( state->blk )
{
+ bool zf;
+
/*
* Throughout this switch(), memory clobbers are used to compensate
* that other operands may not properly express the (full) memory
* ranges covered.
*/
+ case blk_enqcmd:
+ ASSERT(bytes == 64);
+ if ( ((unsigned long)ptr & 0x3f) )
+ {
+ ASSERT_UNREACHABLE();
+ return X86EMUL_UNHANDLEABLE;
+ }
+ *eflags &= ~EFLAGS_MASK;
+#ifdef HAVE_AS_ENQCMD
+ asm ( "enqcmds (%[src]), %[dst]" ASM_FLAG_OUT(, "; setz %[zf]")
+ : [zf] ASM_FLAG_OUT("=@ccz", "=qm") (zf)
+ : [src] "r" (data), [dst] "r" (ptr) : "memory" );
+#else
+ /* enqcmds (%rsi), %rdi */
+ asm ( ".byte 0xf3, 0x0f, 0x38, 0xf8, 0x3e"
+ ASM_FLAG_OUT(, "; setz %[zf]")
+ : [zf] ASM_FLAG_OUT("=@ccz", "=qm") (zf)
+ : "S" (data), "D" (ptr) : "memory" );
+#endif
+ if ( zf )
+ *eflags |= X86_EFLAGS_ZF;
+ break;
+
case blk_movdir:
switch ( bytes )
{
@@ -11801,6 +11859,8 @@ x86_insn_is_mem_write(const struct x86_e
switch ( ctxt->opcode )
{
case X86EMUL_OPC_66(0x0f38, 0xf8): /* MOVDIR64B */
+ case X86EMUL_OPC_F2(0x0f38, 0xf8): /* ENQCMD */
+ case X86EMUL_OPC_F3(0x0f38, 0xf8): /* ENQCMDS */
return true;
}
return false;
@@ -120,6 +120,7 @@
#define cpu_has_rdpid boot_cpu_has(X86_FEATURE_RDPID)
#define cpu_has_movdiri boot_cpu_has(X86_FEATURE_MOVDIRI)
#define cpu_has_movdir64b boot_cpu_has(X86_FEATURE_MOVDIR64B)
+#define cpu_has_enqcmd boot_cpu_has(X86_FEATURE_ENQCMD)
/* CPUID level 0x80000007.edx */
#define cpu_has_itsc boot_cpu_has(X86_FEATURE_ITSC)
@@ -74,6 +74,10 @@
#define MSR_PL3_SSP 0x000006a7
#define MSR_INTERRUPT_SSP_TABLE 0x000006a8
+#define MSR_PASID 0x00000d93
+#define PASID_PASID_MASK 0x000fffff
+#define PASID_VALID (_AC(1, ULL) << 31)
+
/*
* Legacy MSR constants in need of cleanup. No new MSRs below this comment.
*/
@@ -242,6 +242,7 @@ XEN_CPUFEATURE(RDPID, 6*32+22) /
XEN_CPUFEATURE(CLDEMOTE, 6*32+25) /*A CLDEMOTE instruction */
XEN_CPUFEATURE(MOVDIRI, 6*32+27) /*A MOVDIRI instruction */
XEN_CPUFEATURE(MOVDIR64B, 6*32+28) /*A MOVDIR64B instruction */
+XEN_CPUFEATURE(ENQCMD, 6*32+29) /* ENQCMD{,S} instructions */
/* AMD-defined CPU features, CPUID level 0x80000007.edx, word 7 */
XEN_CPUFEATURE(ITSC, 7*32+ 8) /* Invariant TSC */