@@ -223,7 +223,7 @@ LABEL_LOCAL(.Lrestore_rcx_iret_exit_to_g
/* No special register assumptions. */
iret_exit_to_guest:
andl $~(X86_EFLAGS_IOPL | X86_EFLAGS_VM), EFRAME_eflags(%rsp)
- orl $X86_EFLAGS_IF, EFRAME_eflags(%rsp)
+ orb $X86_EFLAGS_IF >> 8, EFRAME_eflags + 1(%rsp)
addq $8,%rsp
.Lft0: iretq
_ASM_PRE_EXTABLE(.Lft0, handle_exception)
@@ -346,7 +346,7 @@ LABEL(sysenter_eflags_saved, 0)
GET_STACK_END(bx)
/* PUSHF above has saved EFLAGS.IF clear (the caller had it set). */
- orl $X86_EFLAGS_IF, UREGS_eflags(%rsp)
+ orb $X86_EFLAGS_IF >> 8, UREGS_eflags + 1(%rsp)
mov STACK_CPUINFO_FIELD(xen_cr3)(%rbx), %rcx
test %rcx, %rcx
jz .Lsyse_cr3_okay
@@ -361,11 +361,11 @@ LABEL(sysenter_eflags_saved, 0)
cmpb $0,VCPU_sysenter_disables_events(%rbx)
movq VCPU_sysenter_addr(%rbx),%rax
setne %cl
- testl $X86_EFLAGS_NT,UREGS_eflags(%rsp)
+ testb $X86_EFLAGS_NT >> 8, UREGS_eflags + 1(%rsp)
leaq VCPU_trap_bounce(%rbx),%rdx
UNLIKELY_START(nz, sysenter_nt_set)
pushfq
- andl $~X86_EFLAGS_NT,(%rsp)
+ andb $~(X86_EFLAGS_NT >> 8), 1(%rsp)
popfq
UNLIKELY_END(sysenter_nt_set)
testq %rax,%rax
Much like was recently done for setting entry vector, and along the lines of what we already had in handle_exception_saved, avoid 32-bit immediates where 8-bit ones do. Reduces .text.entry size by 16 bytes in my non-CET reference build, while in my CET reference build section size doesn't change (there and in .text only padding space increases). Inspired by other long->byte conversion work. Signed-off-by: Jan Beulich <jbeulich@suse.com> --- Numbers above are biased by me also having the straight-line-speculation change in the tree, thus every JMP is followed by an INT3. Without that, .text.entry size would also shrink by 16 bytes in the CET build. --- v2: Drop switch_to_kernel change.