Message ID | 56EA72F602000078000DD933@prv-mh.provo.novell.com (mailing list archive) |
---|---|
State | New, archived |
Headers | show |
On 17/03/16 08:03, Jan Beulich wrote: > Alternatives patching code picks the most suitable NOPs for the > running system, so simply use it to replace the pre-populated ones. > > Use an arbitrary, always available feature to key off from, but > hide this behind the new X86_FEATURE_ALWAYS. > > Signed-off-by: Jan Beulich <jbeulich@suse.com> > --- > v3: Re-base. > v2: Introduce and use X86_FEATURE_ALWAYS. > > --- a/xen/arch/x86/x86_64/compat/entry.S > +++ b/xen/arch/x86/x86_64/compat/entry.S > @@ -175,12 +175,7 @@ compat_bad_hypercall: > ENTRY(compat_restore_all_guest) > ASSERT_INTERRUPTS_DISABLED > .Lcr4_orig: > - ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */ > - ASM_NOP2 /* jpe .Lcr4_alt_end */ > - ASM_NOP8 /* mov CPUINFO_cr4...(%rsp), %rax */ > - ASM_NOP6 /* and $..., %rax */ > - ASM_NOP8 /* mov %rax, CPUINFO_cr4...(%rsp) */ > - ASM_NOP3 /* mov %rax, %cr4 */ > + .skip (.Lcr4_alt_end - .Lcr4_alt) - (. - .Lcr4_orig), 0x90 > .Lcr4_orig_end: > .pushsection .altinstr_replacement, "ax" > .Lcr4_alt: This hunk should live in patch 2. Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com>
>>> On 13.05.16 at 17:57, <andrew.cooper3@citrix.com> wrote: > On 17/03/16 08:03, Jan Beulich wrote: >> Alternatives patching code picks the most suitable NOPs for the >> running system, so simply use it to replace the pre-populated ones. >> >> Use an arbitrary, always available feature to key off from, but >> hide this behind the new X86_FEATURE_ALWAYS. >> >> Signed-off-by: Jan Beulich <jbeulich@suse.com> >> --- >> v3: Re-base. >> v2: Introduce and use X86_FEATURE_ALWAYS. >> >> --- a/xen/arch/x86/x86_64/compat/entry.S >> +++ b/xen/arch/x86/x86_64/compat/entry.S >> @@ -175,12 +175,7 @@ compat_bad_hypercall: >> ENTRY(compat_restore_all_guest) >> ASSERT_INTERRUPTS_DISABLED >> .Lcr4_orig: >> - ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */ >> - ASM_NOP2 /* jpe .Lcr4_alt_end */ >> - ASM_NOP8 /* mov CPUINFO_cr4...(%rsp), %rax */ >> - ASM_NOP6 /* and $..., %rax */ >> - ASM_NOP8 /* mov %rax, CPUINFO_cr4...(%rsp) */ >> - ASM_NOP3 /* mov %rax, %cr4 */ >> + .skip (.Lcr4_alt_end - .Lcr4_alt) - (. - .Lcr4_orig), 0x90 >> .Lcr4_orig_end: >> .pushsection .altinstr_replacement, "ax" >> .Lcr4_alt: > > This hunk should live in patch 2. No. In patch 2 we want to leverage multi-byte NOPs. Here, knowing they're going to be replaced anyway, we are fine with using the simpler .fill (producing many single byte ones). > Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com> Does this stand nevertheless? Jan
On 13/05/16 17:06, Jan Beulich wrote: >>>> On 13.05.16 at 17:57, <andrew.cooper3@citrix.com> wrote: >> On 17/03/16 08:03, Jan Beulich wrote: >>> Alternatives patching code picks the most suitable NOPs for the >>> running system, so simply use it to replace the pre-populated ones. >>> >>> Use an arbitrary, always available feature to key off from, but >>> hide this behind the new X86_FEATURE_ALWAYS. >>> >>> Signed-off-by: Jan Beulich <jbeulich@suse.com> >>> --- >>> v3: Re-base. >>> v2: Introduce and use X86_FEATURE_ALWAYS. >>> >>> --- a/xen/arch/x86/x86_64/compat/entry.S >>> +++ b/xen/arch/x86/x86_64/compat/entry.S >>> @@ -175,12 +175,7 @@ compat_bad_hypercall: >>> ENTRY(compat_restore_all_guest) >>> ASSERT_INTERRUPTS_DISABLED >>> .Lcr4_orig: >>> - ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */ >>> - ASM_NOP2 /* jpe .Lcr4_alt_end */ >>> - ASM_NOP8 /* mov CPUINFO_cr4...(%rsp), %rax */ >>> - ASM_NOP6 /* and $..., %rax */ >>> - ASM_NOP8 /* mov %rax, CPUINFO_cr4...(%rsp) */ >>> - ASM_NOP3 /* mov %rax, %cr4 */ >>> + .skip (.Lcr4_alt_end - .Lcr4_alt) - (. - .Lcr4_orig), 0x90 >>> .Lcr4_orig_end: >>> .pushsection .altinstr_replacement, "ax" >>> .Lcr4_alt: >> This hunk should live in patch 2. > No. In patch 2 we want to leverage multi-byte NOPs. Here, knowing > they're going to be replaced anyway, we are fine with using the > simpler .fill (producing many single byte ones). > >> Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com> > Does this stand nevertheless? Yes. ~Andrew
--- a/xen/arch/x86/x86_64/compat/entry.S +++ b/xen/arch/x86/x86_64/compat/entry.S @@ -175,12 +175,7 @@ compat_bad_hypercall: ENTRY(compat_restore_all_guest) ASSERT_INTERRUPTS_DISABLED .Lcr4_orig: - ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */ - ASM_NOP2 /* jpe .Lcr4_alt_end */ - ASM_NOP8 /* mov CPUINFO_cr4...(%rsp), %rax */ - ASM_NOP6 /* and $..., %rax */ - ASM_NOP8 /* mov %rax, CPUINFO_cr4...(%rsp) */ - ASM_NOP3 /* mov %rax, %cr4 */ + .skip (.Lcr4_alt_end - .Lcr4_alt) - (. - .Lcr4_orig), 0x90 .Lcr4_orig_end: .pushsection .altinstr_replacement, "ax" .Lcr4_alt: @@ -192,6 +187,7 @@ ENTRY(compat_restore_all_guest) mov %rax, %cr4 .Lcr4_alt_end: .section .altinstructions, "a" + altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, 12, 0 altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, \ (.Lcr4_orig_end - .Lcr4_orig), \ (.Lcr4_alt_end - .Lcr4_alt) --- a/xen/include/asm-x86/asm_defns.h +++ b/xen/include/asm-x86/asm_defns.h @@ -204,6 +204,7 @@ void ret_from_intr(void); 662: __ASM_##op; \ .popsection; \ .pushsection .altinstructions, "a"; \ + altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \ altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3; \ .popsection @@ -215,6 +216,7 @@ void ret_from_intr(void); .pushsection .altinstr_replacement, "ax"; \ 668: call cr4_pv32_restore; \ .section .altinstructions, "a"; \ + altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \ altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5; \ altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5; \ .popsection --- a/xen/include/asm-x86/cpufeature.h +++ b/xen/include/asm-x86/cpufeature.h @@ -162,6 +162,9 @@ #define cpufeat_bit(idx) ((idx) % 32) #define cpufeat_mask(idx) (_AC(1, U) << cpufeat_bit(idx)) +/* An alias of a feature we know is always going to be present. */ +#define X86_FEATURE_ALWAYS X86_FEATURE_LM + #if !defined(__ASSEMBLY__) && !defined(X86_FEATURES_ONLY) #include <xen/bitops.h>