@@ -374,45 +374,45 @@ ATOMIC_OPS()
#undef ATOMIC_OPS
#undef ATOMIC_OP
-static __always_inline int arch_atomic_sub_if_positive(atomic_t *v, int offset)
+static __always_inline int arch_atomic_dec_if_positive(atomic_t *v)
{
int prev, rc;
__asm__ __volatile__ (
"0: lr.w %[p], %[c]\n"
- " sub %[rc], %[p], %[o]\n"
+ " addi %[rc], %[p], -1\n"
" bltz %[rc], 1f\n"
" sc.w.aqrl %[rc], %[rc], %[c]\n"
" bnez %[rc], 0b\n"
"1:\n"
: [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter)
- : [o]"r" (offset)
+ :
: "memory");
- return prev - offset;
+ return prev - 1;
}
-#define arch_atomic_dec_if_positive(v) arch_atomic_sub_if_positive(v, 1)
+#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
#ifndef CONFIG_GENERIC_ATOMIC64
-static __always_inline s64 arch_atomic64_sub_if_positive(atomic64_t *v, s64 offset)
+static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
{
s64 prev;
long rc;
__asm__ __volatile__ (
"0: lr.d %[p], %[c]\n"
- " sub %[rc], %[p], %[o]\n"
+ " addi %[rc], %[p], -1\n"
" bltz %[rc], 1f\n"
" sc.d.aqrl %[rc], %[rc], %[c]\n"
" bnez %[rc], 0b\n"
"1:\n"
: [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter)
- : [o]"r" (offset)
+ :
: "memory");
- return prev - offset;
+ return prev - 1;
}
-#define arch_atomic64_dec_if_positive(v) arch_atomic64_sub_if_positive(v, 1)
+#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
#endif
#endif /* _ASM_RISCV_ATOMIC_H */