@@ -39,7 +39,13 @@
.macro altinstruction_entry orig alt feature orig_len alt_len pad_len
.long \orig - .
.long \alt - .
+ .iflt \feature
+ .word ~(\feature)
+ .byte 1
+ .else
.word \feature
+ .byte 0
+ .endif
.byte \orig_len
.byte \alt_len
.byte \pad_len
@@ -59,6 +59,8 @@ struct alt_instr {
s32 instr_offset; /* original instruction */
s32 repl_offset; /* offset to replacement instruction */
u16 cpuid; /* cpuid bit set for replacement */
+ u8 flag; /* flag byte */
+#define ALTINSTR_FLAG_INV 0x01
u8 instrlen; /* length of original instruction */
u8 replacementlen; /* length of new instruction */
u8 padlen; /* length of build-time padding */
@@ -145,7 +147,13 @@ static inline int alternatives_text_reserved(void *start, void *end)
#define ALTINSTR_ENTRY(feature, num) \
" .long 661b - .\n" /* label */ \
" .long " b_replacement(num)"f - .\n" /* new instruction */ \
+ " .iflt " __stringify(feature) "\n" /* inverted? */ \
+ " .word ~(" __stringify(feature) ")\n" /* feature bit */ \
+ " .byte " __stringify(ALTINSTR_FLAG_INV) "\n" /* flag byte */ \
+ " .else\n" \
" .word " __stringify(feature) "\n" /* feature bit */ \
+ " .byte 0\n" /* flag byte */ \
+ " .endif\n" \
" .byte " alt_total_slen "\n" /* source len */ \
" .byte " alt_rlen(num) "\n" /* replacement len */ \
" .byte " alt_pad_len "\n" /* pad len */
@@ -184,6 +184,7 @@ static __always_inline bool _static_cpu_has(u16 bit)
" .long 1b - .\n" /* src offset */
" .long 4f - .\n" /* repl offset */
" .word %P[always]\n" /* always replace */
+ " .byte 0\n" /* flag byte */
" .byte 3b - 1b\n" /* src len */
" .byte 5f - 4f\n" /* repl len */
" .byte 3b - 2b\n" /* pad len */
@@ -196,6 +197,7 @@ static __always_inline bool _static_cpu_has(u16 bit)
" .long 1b - .\n" /* src offset */
" .long 0\n" /* no replacement */
" .word %P[feature]\n" /* feature bit */
+ " .byte 0\n" /* flag byte */
" .byte 3b - 1b\n" /* src len */
" .byte 0\n" /* repl len */
" .byte 0\n" /* pad len */
@@ -393,14 +393,15 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
replacement = (u8 *)&a->repl_offset + a->repl_offset;
BUG_ON(a->instrlen > sizeof(insn_buff));
BUG_ON(a->cpuid >= (NCAPINTS + NBUGINTS) * 32);
- if (!boot_cpu_has(a->cpuid)) {
+ if (!boot_cpu_has(a->cpuid) == !(a->flag & ALTINSTR_FLAG_INV)) {
if (a->padlen > 1)
optimize_nops(a, instr);
continue;
}
- DPRINTK("feat: %d*32+%d, old: (%pS (%px) len: %d), repl: (%px, len: %d), pad: %d",
+ DPRINTK("feat: %s%d*32+%d, old: (%pS (%px) len: %d), repl: (%px, len: %d), pad: %d",
+ (a->flag & ALTINSTR_FLAG_INV) ? "~" : "",
a->cpuid >> 5,
a->cpuid & 0x1f,
instr, instr, a->instrlen,
@@ -10,11 +10,11 @@
#define JUMP_ORIG_OFFSET 0
#define JUMP_NEW_OFFSET 4
-#define ALT_ENTRY_SIZE 13
+#define ALT_ENTRY_SIZE 14
#define ALT_ORIG_OFFSET 0
#define ALT_NEW_OFFSET 4
#define ALT_FEATURE_OFFSET 8
-#define ALT_ORIG_LEN_OFFSET 10
-#define ALT_NEW_LEN_OFFSET 11
+#define ALT_ORIG_LEN_OFFSET 11
+#define ALT_NEW_LEN_OFFSET 12
#endif /* _X86_ARCH_SPECIAL_H */
Add support for alternative patching for the case a feature is not present on the current cpu. For this purpose add a flag byte to struct alt_instr adding the information that the inverted feature should be used. For users of ALTERNATIVE() and friends an inverted feature is specified by negating it, e.g.: ALTERNATIVE(old, new, ~feature) This requires adapting the objtool information for struct alt_instr. Signed-off-by: Juergen Gross <jgross@suse.com> --- V5: - split off from next patch - reworked to use flag byte (Boris Petkov) --- arch/x86/include/asm/alternative-asm.h | 6 ++++++ arch/x86/include/asm/alternative.h | 8 ++++++++ arch/x86/include/asm/cpufeature.h | 2 ++ arch/x86/kernel/alternative.c | 5 +++-- tools/objtool/arch/x86/include/arch/special.h | 6 +++--- 5 files changed, 22 insertions(+), 5 deletions(-)