@@ -99,21 +99,36 @@ static inline int __down_read_trylock(struct rw_semaphore *sem)
/*
* lock for writing
*/
+#define ____down_write(sem, slow_path) \
+({ \
+ long tmp; \
+ struct rw_semaphore* ret = sem; \
+ asm volatile("# beginning down_write\n\t" \
+ LOCK_PREFIX " xadd %1,(%2)\n\t" \
+ /* adds 0xffff0001, returns the old value */ \
+ " test " __ASM_SEL(%w1,%k1) "," __ASM_SEL(%w1,%k1) "\n\t" \
+ /* was the active mask 0 before? */\
+ " jz 1f\n" \
+ " call " slow_path "\n" \
+ "1:\n" \
+ "# ending down_write" \
+ : "+m" (sem->count), "=d" (tmp), "+a" (ret) \
+ : "a" (sem), "1" (RWSEM_ACTIVE_WRITE_BIAS) \
+ : "memory", "cc"); \
+ ret; \
+})
+
static inline void __down_write(struct rw_semaphore *sem)
{
- long tmp;
- asm volatile("# beginning down_write\n\t"
- LOCK_PREFIX " xadd %1,(%2)\n\t"
- /* adds 0xffff0001, returns the old value */
- " test " __ASM_SEL(%w1,%k1) "," __ASM_SEL(%w1,%k1) "\n\t"
- /* was the active mask 0 before? */
- " jz 1f\n"
- " call call_rwsem_down_write_failed\n"
- "1:\n"
- "# ending down_write"
- : "+m" (sem->count), "=d" (tmp)
- : "a" (sem), "1" (RWSEM_ACTIVE_WRITE_BIAS)
- : "memory", "cc");
+ ____down_write(sem, "call_rwsem_down_write_failed");
+}
+
+static inline int __down_write_killable(struct rw_semaphore *sem)
+{
+ if (IS_ERR(____down_write(sem, "call_rwsem_down_write_failed_killable")))
+ return -EINTR;
+
+ return 0;
}
/*
@@ -101,6 +101,14 @@ ENTRY(call_rwsem_down_write_failed)
ret
ENDPROC(call_rwsem_down_write_failed)
+ENTRY(call_rwsem_down_write_failed_killable)
+ save_common_regs
+ movq %rax,%rdi
+ call rwsem_down_write_failed_killable
+ restore_common_regs
+ ret
+ENDPROC(call_rwsem_down_write_failed_killable)
+
ENTRY(call_rwsem_wake)
/* do nothing if still outstanding active readers */
__ASM_HALF_SIZE(dec) %__ASM_HALF_REG(dx)