@@ -353,11 +353,12 @@ void xrstor(struct vcpu *v, uint64_t mask)
{
switch ( __builtin_expect(ptr->fpu_sse.x[FPU_WORD_SIZE_OFFSET], 8) )
{
+ BUILD_BUG_ON(sizeof(faults) != 4); /* Clang doesn't support %z in asm. */
#define XRSTOR(pfx) \
alternative_io("1: .byte " pfx "0x0f,0xae,0x2f\n" \
"3:\n" \
" .section .fixup,\"ax\"\n" \
- "2: inc%z[faults] %[faults]\n" \
+ "2: incl %[faults]\n" \
" jmp 3b\n" \
" .previous\n" \
_ASM_EXTABLE(1b, 2b), \
@@ -2,7 +2,8 @@
#define __ASM_SPINLOCK_H
#define _raw_read_unlock(l) \
- asm volatile ( "lock; dec%z0 %0" : "+m" ((l)->lock) :: "memory" )
+ BUILD_BUG_ON(sizeof((l)->lock) != 4); /* Clang doesn't support %z in asm. */ \
+ asm volatile ( "lock; decl %0" : "+m" ((l)->lock) :: "memory" )
/*
* On x86 the only reordering is of reads with older writes. In the
Clang doesn't support the %z modifier. Replace both uses with an explicit l suffix, and cover the changes with BUILD_BUG_ON()s Signed-off-by: Andrew Cooper <andrew.cooper3@citrix.com> --- CC: Jan Beulich <JBeulich@suse.com> --- xen/arch/x86/xstate.c | 3 ++- xen/include/asm-x86/spinlock.h | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-)