@@ -361,9 +361,20 @@ static inline void refcount_dec(refcount_t *r)
extern __must_check bool refcount_dec_if_one(refcount_t *r);
extern __must_check bool refcount_dec_not_one(refcount_t *r);
-extern __must_check bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock);
-extern __must_check bool refcount_dec_and_lock(refcount_t *r, spinlock_t *lock);
-extern __must_check bool refcount_dec_and_lock_irqsave(refcount_t *r,
- spinlock_t *lock,
- unsigned long *flags);
+extern __must_check bool raw_refcount_dec_and_mutex_lock(refcount_t *r,
+ struct mutex *lock);
+#define refcount_dec_and_mutex_lock(r, lock) \
+ ((bool)(__cond_lock(lock, raw_refcount_dec_and_mutex_lock(r, lock))))
+
+extern __must_check bool raw_refcount_dec_and_lock(refcount_t *r,
+ spinlock_t *lock);
+#define refcount_dec_and_lock(r, lock) \
+ ((bool)(__cond_lock(lock, raw_refcount_dec_and_lock(r, lock))))
+
+extern __must_check bool raw_refcount_dec_and_lock_irqsave(refcount_t *r,
+ spinlock_t *lock,
+ unsigned long *flags);
+#define refcount_dec_and_lock_irqsave(r, lock, flags) \
+ ((bool)(__cond_lock(lock, raw_refcount_dec_and_lock_irqsave(r, lock, flags))))
+
#endif /* _LINUX_REFCOUNT_H */
@@ -110,7 +110,7 @@ EXPORT_SYMBOL(refcount_dec_not_one);
* Return: true and hold mutex if able to decrement refcount to 0, false
* otherwise
*/
-bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock)
+bool raw_refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock)
{
if (refcount_dec_not_one(r))
return false;
@@ -123,11 +123,11 @@ bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock)
return true;
}
-EXPORT_SYMBOL(refcount_dec_and_mutex_lock);
+EXPORT_SYMBOL(raw_refcount_dec_and_mutex_lock);
/**
- * refcount_dec_and_lock - return holding spinlock if able to decrement
- * refcount to 0
+ * raw_refcount_dec_and_lock - return holding spinlock if able to decrement
+ * refcount to 0
* @r: the refcount
* @lock: the spinlock to be locked
*
@@ -141,7 +141,7 @@ EXPORT_SYMBOL(refcount_dec_and_mutex_lock);
* Return: true and hold spinlock if able to decrement refcount to 0, false
* otherwise
*/
-bool refcount_dec_and_lock(refcount_t *r, spinlock_t *lock)
+bool raw_refcount_dec_and_lock(refcount_t *r, spinlock_t *lock)
{
if (refcount_dec_not_one(r))
return false;
@@ -154,11 +154,12 @@ bool refcount_dec_and_lock(refcount_t *r, spinlock_t *lock)
return true;
}
-EXPORT_SYMBOL(refcount_dec_and_lock);
+EXPORT_SYMBOL(raw_refcount_dec_and_lock);
/**
- * refcount_dec_and_lock_irqsave - return holding spinlock with disabled
- * interrupts if able to decrement refcount to 0
+ * raw_refcount_dec_and_lock_irqsave - return holding spinlock with disabled
+ * interrupts if able to decrement
+ * refcount to 0
* @r: the refcount
* @lock: the spinlock to be locked
* @flags: saved IRQ-flags if the is acquired
@@ -169,8 +170,8 @@ EXPORT_SYMBOL(refcount_dec_and_lock);
* Return: true and hold spinlock if able to decrement refcount to 0, false
* otherwise
*/
-bool refcount_dec_and_lock_irqsave(refcount_t *r, spinlock_t *lock,
- unsigned long *flags)
+bool raw_refcount_dec_and_lock_irqsave(refcount_t *r, spinlock_t *lock,
+ unsigned long *flags)
{
if (refcount_dec_not_one(r))
return false;
@@ -183,4 +184,4 @@ bool refcount_dec_and_lock_irqsave(refcount_t *r, spinlock_t *lock,
return true;
}
-EXPORT_SYMBOL(refcount_dec_and_lock_irqsave);
+EXPORT_SYMBOL(raw_refcount_dec_and_lock_irqsave);
This patch adds the __cond_lock() macro to refcounts conditional lock API. Currently sparse cannot detect the conditional lock handling of refcount_dec_and_lock() functionality and prints a context imbalance warning like: warning: context imbalance in 'put_rsb' - unexpected unlock with this patch and having the refcount_dec_and_lock() functionality inside the if condition to decide whenever doing unlock or not the warning disappears. The patch follows a similar naming scheme like raw_spin_trylock() by adding a "raw_" prefix to refcount_dec_and_lock() functionality and introduce a macro for the replaced functions that uses __cond_lock() to signal that an acquire depends on the return value of the passed function. A cast to bool seems to be necessary because __cond_lock() does return a non-boolean scalar type. The __must_check annotation was tested and is still working with this patch applied. Signed-off-by: Alexander Aring <aahringo@redhat.com> --- include/linux/refcount.h | 21 ++++++++++++++++----- lib/refcount.c | 23 ++++++++++++----------- 2 files changed, 28 insertions(+), 16 deletions(-)