diff mbox series

[09/37] MIPS: atomic: Fix whitespace in ATOMIC_OP macros

Message ID 20190930230806.2940505-10-paul.burton@mips.com (mailing list archive)
State Superseded
Headers show
Series MIPS: barriers & atomics cleanups | expand

Commit Message

Paul Burton Sept. 30, 2019, 11:08 p.m. UTC
We define macros in asm/atomic.h which end each line with space
characters before a backslash to continue on the next line. Remove the
space characters leaving tabs as the whitespace used for conformity with
coding convention.

Signed-off-by: Paul Burton <paul.burton@mips.com>
---

 arch/mips/include/asm/atomic.h | 184 ++++++++++++++++-----------------
 1 file changed, 92 insertions(+), 92 deletions(-)
diff mbox series

Patch

diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index 7578c807ef98..2d2a8a74c51b 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -42,102 +42,102 @@ 
  */
 #define atomic_set(v, i)	WRITE_ONCE((v)->counter, (i))
 
-#define ATOMIC_OP(op, c_op, asm_op)					      \
-static __inline__ void atomic_##op(int i, atomic_t * v)			      \
-{									      \
-	if (kernel_uses_llsc) {						      \
-		int temp;						      \
-									      \
-		loongson_llsc_mb();					      \
-		__asm__ __volatile__(					      \
-		"	.set	push					\n"   \
-		"	.set	"MIPS_ISA_LEVEL"			\n"   \
-		"1:	ll	%0, %1		# atomic_" #op "	\n"   \
-		"	" #asm_op " %0, %2				\n"   \
-		"	sc	%0, %1					\n"   \
-		"\t" __SC_BEQZ "%0, 1b					\n"   \
-		"	.set	pop					\n"   \
-		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	      \
-		: "Ir" (i) : __LLSC_CLOBBER);				      \
-	} else {							      \
-		unsigned long flags;					      \
-									      \
-		raw_local_irq_save(flags);				      \
-		v->counter c_op i;					      \
-		raw_local_irq_restore(flags);				      \
-	}								      \
+#define ATOMIC_OP(op, c_op, asm_op)					\
+static __inline__ void atomic_##op(int i, atomic_t * v)			\
+{									\
+	if (kernel_uses_llsc) {						\
+		int temp;						\
+									\
+		loongson_llsc_mb();					\
+		__asm__ __volatile__(					\
+		"	.set	push				\n"	\
+		"	.set	"MIPS_ISA_LEVEL"		\n"	\
+		"1:	ll	%0, %1	# atomic_" #op "	\n"	\
+		"	" #asm_op " %0, %2			\n"	\
+		"	sc	%0, %1				\n"	\
+		"\t" __SC_BEQZ "%0, 1b				\n"	\
+		"	.set	pop				\n"	\
+		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	\
+		: "Ir" (i) : __LLSC_CLOBBER);				\
+	} else {							\
+		unsigned long flags;					\
+									\
+		raw_local_irq_save(flags);				\
+		v->counter c_op i;					\
+		raw_local_irq_restore(flags);				\
+	}								\
 }
 
-#define ATOMIC_OP_RETURN(op, c_op, asm_op)				      \
-static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v)	      \
-{									      \
-	int result;							      \
-									      \
-	if (kernel_uses_llsc) {						      \
-		int temp;						      \
-									      \
-		loongson_llsc_mb();					      \
-		__asm__ __volatile__(					      \
-		"	.set	push					\n"   \
-		"	.set	"MIPS_ISA_LEVEL"			\n"   \
-		"1:	ll	%1, %2		# atomic_" #op "_return	\n"   \
-		"	" #asm_op " %0, %1, %3				\n"   \
-		"	sc	%0, %2					\n"   \
-		"\t" __SC_BEQZ "%0, 1b					\n"   \
-		"	" #asm_op " %0, %1, %3				\n"   \
-		"	.set	pop					\n"   \
-		: "=&r" (result), "=&r" (temp),				      \
-		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
-		: "Ir" (i) : __LLSC_CLOBBER);				      \
-	} else {							      \
-		unsigned long flags;					      \
-									      \
-		raw_local_irq_save(flags);				      \
-		result = v->counter;					      \
-		result c_op i;						      \
-		v->counter = result;					      \
-		raw_local_irq_restore(flags);				      \
-	}								      \
-									      \
-	return result;							      \
+#define ATOMIC_OP_RETURN(op, c_op, asm_op)				\
+static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v)	\
+{									\
+	int result;							\
+									\
+	if (kernel_uses_llsc) {						\
+		int temp;						\
+									\
+		loongson_llsc_mb();					\
+		__asm__ __volatile__(					\
+		"	.set	push				\n"	\
+		"	.set	"MIPS_ISA_LEVEL"		\n"	\
+		"1:	ll	%1, %2	# atomic_" #op "_return	\n"	\
+		"	" #asm_op " %0, %1, %3			\n"	\
+		"	sc	%0, %2				\n"	\
+		"\t" __SC_BEQZ "%0, 1b				\n"	\
+		"	" #asm_op " %0, %1, %3			\n"	\
+		"	.set	pop				\n"	\
+		: "=&r" (result), "=&r" (temp),				\
+		  "+" GCC_OFF_SMALL_ASM() (v->counter)			\
+		: "Ir" (i) : __LLSC_CLOBBER);				\
+	} else {							\
+		unsigned long flags;					\
+									\
+		raw_local_irq_save(flags);				\
+		result = v->counter;					\
+		result c_op i;						\
+		v->counter = result;					\
+		raw_local_irq_restore(flags);				\
+	}								\
+									\
+	return result;							\
 }
 
-#define ATOMIC_FETCH_OP(op, c_op, asm_op)				      \
-static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v)	      \
-{									      \
-	int result;							      \
-									      \
-	if (kernel_uses_llsc) {						      \
-		int temp;						      \
-									      \
-		loongson_llsc_mb();					      \
-		__asm__ __volatile__(					      \
-		"	.set	push					\n"   \
-		"	.set	"MIPS_ISA_LEVEL"			\n"   \
-		"1:	ll	%1, %2		# atomic_fetch_" #op "	\n"   \
-		"	" #asm_op " %0, %1, %3				\n"   \
-		"	sc	%0, %2					\n"   \
-		"\t" __SC_BEQZ "%0, 1b					\n"   \
-		"	.set	pop					\n"   \
-		"	move	%0, %1					\n"   \
-		: "=&r" (result), "=&r" (temp),				      \
-		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
-		: "Ir" (i) : __LLSC_CLOBBER);				      \
-	} else {							      \
-		unsigned long flags;					      \
-									      \
-		raw_local_irq_save(flags);				      \
-		result = v->counter;					      \
-		v->counter c_op i;					      \
-		raw_local_irq_restore(flags);				      \
-	}								      \
-									      \
-	return result;							      \
+#define ATOMIC_FETCH_OP(op, c_op, asm_op)				\
+static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v)	\
+{									\
+	int result;							\
+									\
+	if (kernel_uses_llsc) {						\
+		int temp;						\
+									\
+		loongson_llsc_mb();					\
+		__asm__ __volatile__(					\
+		"	.set	push				\n"	\
+		"	.set	"MIPS_ISA_LEVEL"		\n"	\
+		"1:	ll	%1, %2	# atomic_fetch_" #op "	\n"	\
+		"	" #asm_op " %0, %1, %3			\n"	\
+		"	sc	%0, %2				\n"	\
+		"\t" __SC_BEQZ "%0, 1b				\n"	\
+		"	.set	pop				\n"	\
+		"	move	%0, %1				\n"	\
+		: "=&r" (result), "=&r" (temp),				\
+		  "+" GCC_OFF_SMALL_ASM() (v->counter)			\
+		: "Ir" (i) : __LLSC_CLOBBER);				\
+	} else {							\
+		unsigned long flags;					\
+									\
+		raw_local_irq_save(flags);				\
+		result = v->counter;					\
+		v->counter c_op i;					\
+		raw_local_irq_restore(flags);				\
+	}								\
+									\
+	return result;							\
 }
 
-#define ATOMIC_OPS(op, c_op, asm_op)					      \
-	ATOMIC_OP(op, c_op, asm_op)					      \
-	ATOMIC_OP_RETURN(op, c_op, asm_op)				      \
+#define ATOMIC_OPS(op, c_op, asm_op)					\
+	ATOMIC_OP(op, c_op, asm_op)					\
+	ATOMIC_OP_RETURN(op, c_op, asm_op)				\
 	ATOMIC_FETCH_OP(op, c_op, asm_op)
 
 ATOMIC_OPS(add, +=, addu)
@@ -149,8 +149,8 @@  ATOMIC_OPS(sub, -=, subu)
 #define atomic_fetch_sub_relaxed	atomic_fetch_sub_relaxed
 
 #undef ATOMIC_OPS
-#define ATOMIC_OPS(op, c_op, asm_op)					      \
-	ATOMIC_OP(op, c_op, asm_op)					      \
+#define ATOMIC_OPS(op, c_op, asm_op)					\
+	ATOMIC_OP(op, c_op, asm_op)					\
 	ATOMIC_FETCH_OP(op, c_op, asm_op)
 
 ATOMIC_OPS(and, &=, and)