diff mbox series

MIPS: Loongson: remove unnecessary loongson_llsc_mb()

Message ID 1568000558-11823-1-git-send-email-chenhc@lemote.com (mailing list archive)
State Not Applicable
Headers show
Series MIPS: Loongson: remove unnecessary loongson_llsc_mb() | expand

Commit Message

Huacai Chen Sept. 9, 2019, 3:42 a.m. UTC
Commit 1c6c1ca318585f1 ("mips/atomic: Fix loongson_llsc_mb() wreckage")
fix the description of Loongson-3's llsc bug and try to add all missing
loongson_llsc_mb(). This is a good job but there are some unnecessary
memory barries:

loongson_llsc_mb() is a Loongson-specific problem. smp_llsc_mb(),
smp_mb__before_llsc(), loongson_llsc_mb() and and other memory barriers
are essentially the same thing on Loongson-3. So we don't need to add
loongson_llsc_mb() if there is already a smp_mb__before_llsc() or other
types of memory barriers.

So, most of loongson_llsc_mb() in Peter's patch is superfluous and can
be removed, except the one in test_and_set_bit_lock().

mips_atomic_set() is not used on Loongson-3, and if in some cases we use
it, the user-to-kernel context switching probably has the same effect of
a memory barrier. But anyway, add a memory barrier in mips_atomic_set()
is harmless, so I keep this one.

For cmpxchg.h, the 32-bit version of cmpxchg64() doesn't need to be care
about because Loongson64 can support 64-bit kernel only, cmpxchg() need
memory barriers and it already has, cmpxchg_local() doesn't need memory
barriers because only the local cpu can write, which is the same as all
other local_t ops. So I remove all loongson_llsc_mb() in cmpxchg.h from
Peter's patch.

To summarize:
I keep Peter's comments and also keep necessary loongson_llsc_mb() in
test_and_set_bit_lock()/mips_atomic_set() from Peter's patch, but all
superfluous memory barries are removed.

Cc: Huang Pei <huangpei@loongson.cn>
Cc: Peter Zijlstra <peterz@infradead.org>
Signed-off-by: Huacai Chen <chenhc@lemote.com>
---
 arch/mips/include/asm/atomic.h  | 5 ++---
 arch/mips/include/asm/bitops.h  | 4 ----
 arch/mips/include/asm/cmpxchg.h | 5 -----
 3 files changed, 2 insertions(+), 12 deletions(-)

Comments

Huacai Chen Sept. 22, 2019, 2:21 a.m. UTC | #1
Ping?

On Mon, Sep 9, 2019 at 11:41 AM Huacai Chen <chenhc@lemote.com> wrote:
>
> Commit 1c6c1ca318585f1 ("mips/atomic: Fix loongson_llsc_mb() wreckage")
> fix the description of Loongson-3's llsc bug and try to add all missing
> loongson_llsc_mb(). This is a good job but there are some unnecessary
> memory barries:
>
> loongson_llsc_mb() is a Loongson-specific problem. smp_llsc_mb(),
> smp_mb__before_llsc(), loongson_llsc_mb() and and other memory barriers
> are essentially the same thing on Loongson-3. So we don't need to add
> loongson_llsc_mb() if there is already a smp_mb__before_llsc() or other
> types of memory barriers.
>
> So, most of loongson_llsc_mb() in Peter's patch is superfluous and can
> be removed, except the one in test_and_set_bit_lock().
>
> mips_atomic_set() is not used on Loongson-3, and if in some cases we use
> it, the user-to-kernel context switching probably has the same effect of
> a memory barrier. But anyway, add a memory barrier in mips_atomic_set()
> is harmless, so I keep this one.
>
> For cmpxchg.h, the 32-bit version of cmpxchg64() doesn't need to be care
> about because Loongson64 can support 64-bit kernel only, cmpxchg() need
> memory barriers and it already has, cmpxchg_local() doesn't need memory
> barriers because only the local cpu can write, which is the same as all
> other local_t ops. So I remove all loongson_llsc_mb() in cmpxchg.h from
> Peter's patch.
>
> To summarize:
> I keep Peter's comments and also keep necessary loongson_llsc_mb() in
> test_and_set_bit_lock()/mips_atomic_set() from Peter's patch, but all
> superfluous memory barries are removed.
>
> Cc: Huang Pei <huangpei@loongson.cn>
> Cc: Peter Zijlstra <peterz@infradead.org>
> Signed-off-by: Huacai Chen <chenhc@lemote.com>
> ---
>  arch/mips/include/asm/atomic.h  | 5 ++---
>  arch/mips/include/asm/bitops.h  | 4 ----
>  arch/mips/include/asm/cmpxchg.h | 5 -----
>  3 files changed, 2 insertions(+), 12 deletions(-)
>
> diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
> index bb8658cc..4f6e538 100644
> --- a/arch/mips/include/asm/atomic.h
> +++ b/arch/mips/include/asm/atomic.h
> @@ -193,7 +193,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
>         if (kernel_uses_llsc) {
>                 int temp;
>
> -               loongson_llsc_mb();
>                 __asm__ __volatile__(
>                 "       .set    push                                    \n"
>                 "       .set    "MIPS_ISA_LEVEL"                        \n"
> @@ -201,12 +200,12 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
>                 "       .set    pop                                     \n"
>                 "       subu    %0, %1, %3                              \n"
>                 "       move    %1, %0                                  \n"
> -               "       bltz    %0, 2f                                  \n"
> +               "       bltz    %0, 1f                                  \n"
>                 "       .set    push                                    \n"
>                 "       .set    "MIPS_ISA_LEVEL"                        \n"
>                 "       sc      %1, %2                                  \n"
>                 "\t" __scbeqz " %1, 1b                                  \n"
> -               "2:                                                     \n"
> +               "1:                                                     \n"
>                 "       .set    pop                                     \n"
>                 : "=&r" (result), "=&r" (temp),
>                   "+" GCC_OFF_SMALL_ASM() (v->counter)
> diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
> index 985d6a0..5016b96 100644
> --- a/arch/mips/include/asm/bitops.h
> +++ b/arch/mips/include/asm/bitops.h
> @@ -257,7 +257,6 @@ static inline int test_and_set_bit(unsigned long nr,
>                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
>                 unsigned long temp;
>
> -               loongson_llsc_mb();
>                 do {
>                         __asm__ __volatile__(
>                         "       .set    push                            \n"
> @@ -374,7 +373,6 @@ static inline int test_and_clear_bit(unsigned long nr,
>                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
>                 unsigned long temp;
>
> -               loongson_llsc_mb();
>                 do {
>                         __asm__ __volatile__(
>                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
> @@ -390,7 +388,6 @@ static inline int test_and_clear_bit(unsigned long nr,
>                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
>                 unsigned long temp;
>
> -               loongson_llsc_mb();
>                 do {
>                         __asm__ __volatile__(
>                         "       .set    push                            \n"
> @@ -450,7 +447,6 @@ static inline int test_and_change_bit(unsigned long nr,
>                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
>                 unsigned long temp;
>
> -               loongson_llsc_mb();
>                 do {
>                         __asm__ __volatile__(
>                         "       .set    push                            \n"
> diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
> index 79bf34e..dd39bae 100644
> --- a/arch/mips/include/asm/cmpxchg.h
> +++ b/arch/mips/include/asm/cmpxchg.h
> @@ -46,7 +46,6 @@ extern unsigned long __xchg_called_with_bad_pointer(void)
>         __typeof(*(m)) __ret;                                           \
>                                                                         \
>         if (kernel_uses_llsc) {                                         \
> -               loongson_llsc_mb();                                     \
>                 __asm__ __volatile__(                                   \
>                 "       .set    push                            \n"     \
>                 "       .set    noat                            \n"     \
> @@ -118,7 +117,6 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
>         __typeof(*(m)) __ret;                                           \
>                                                                         \
>         if (kernel_uses_llsc) {                                         \
> -               loongson_llsc_mb();                                     \
>                 __asm__ __volatile__(                                   \
>                 "       .set    push                            \n"     \
>                 "       .set    noat                            \n"     \
> @@ -136,7 +134,6 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
>                 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
>                 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)      \
>                 : __LLSC_CLOBBER);                                      \
> -               loongson_llsc_mb();                                     \
>         } else {                                                        \
>                 unsigned long __flags;                                  \
>                                                                         \
> @@ -232,7 +229,6 @@ static inline unsigned long __cmpxchg64(volatile void *ptr,
>          */
>         local_irq_save(flags);
>
> -       loongson_llsc_mb();
>         asm volatile(
>         "       .set    push                            \n"
>         "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"
> @@ -278,7 +274,6 @@ static inline unsigned long __cmpxchg64(volatile void *ptr,
>           "r" (old),
>           "r" (new)
>         : "memory");
> -       loongson_llsc_mb();
>
>         local_irq_restore(flags);
>         return ret;
> --
> 2.7.0
>
diff mbox series

Patch

diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index bb8658cc..4f6e538 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -193,7 +193,6 @@  static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 	if (kernel_uses_llsc) {
 		int temp;
 
-		loongson_llsc_mb();
 		__asm__ __volatile__(
 		"	.set	push					\n"
 		"	.set	"MIPS_ISA_LEVEL"			\n"
@@ -201,12 +200,12 @@  static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
 		"	.set	pop					\n"
 		"	subu	%0, %1, %3				\n"
 		"	move	%1, %0					\n"
-		"	bltz	%0, 2f					\n"
+		"	bltz	%0, 1f					\n"
 		"	.set	push					\n"
 		"	.set	"MIPS_ISA_LEVEL"			\n"
 		"	sc	%1, %2					\n"
 		"\t" __scbeqz "	%1, 1b					\n"
-		"2:							\n"
+		"1:							\n"
 		"	.set	pop					\n"
 		: "=&r" (result), "=&r" (temp),
 		  "+" GCC_OFF_SMALL_ASM() (v->counter)
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 985d6a0..5016b96 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -257,7 +257,6 @@  static inline int test_and_set_bit(unsigned long nr,
 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 		unsigned long temp;
 
-		loongson_llsc_mb();
 		do {
 			__asm__ __volatile__(
 			"	.set	push				\n"
@@ -374,7 +373,6 @@  static inline int test_and_clear_bit(unsigned long nr,
 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 		unsigned long temp;
 
-		loongson_llsc_mb();
 		do {
 			__asm__ __volatile__(
 			"	" __LL	"%0, %1 # test_and_clear_bit	\n"
@@ -390,7 +388,6 @@  static inline int test_and_clear_bit(unsigned long nr,
 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 		unsigned long temp;
 
-		loongson_llsc_mb();
 		do {
 			__asm__ __volatile__(
 			"	.set	push				\n"
@@ -450,7 +447,6 @@  static inline int test_and_change_bit(unsigned long nr,
 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 		unsigned long temp;
 
-		loongson_llsc_mb();
 		do {
 			__asm__ __volatile__(
 			"	.set	push				\n"
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 79bf34e..dd39bae 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -46,7 +46,6 @@  extern unsigned long __xchg_called_with_bad_pointer(void)
 	__typeof(*(m)) __ret;						\
 									\
 	if (kernel_uses_llsc) {						\
-		loongson_llsc_mb();					\
 		__asm__ __volatile__(					\
 		"	.set	push				\n"	\
 		"	.set	noat				\n"	\
@@ -118,7 +117,6 @@  static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
 	__typeof(*(m)) __ret;						\
 									\
 	if (kernel_uses_llsc) {						\
-		loongson_llsc_mb();					\
 		__asm__ __volatile__(					\
 		"	.set	push				\n"	\
 		"	.set	noat				\n"	\
@@ -136,7 +134,6 @@  static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)	\
 		: __LLSC_CLOBBER);					\
-		loongson_llsc_mb();					\
 	} else {							\
 		unsigned long __flags;					\
 									\
@@ -232,7 +229,6 @@  static inline unsigned long __cmpxchg64(volatile void *ptr,
 	 */
 	local_irq_save(flags);
 
-	loongson_llsc_mb();
 	asm volatile(
 	"	.set	push				\n"
 	"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"
@@ -278,7 +274,6 @@  static inline unsigned long __cmpxchg64(volatile void *ptr,
 	  "r" (old),
 	  "r" (new)
 	: "memory");
-	loongson_llsc_mb();
 
 	local_irq_restore(flags);
 	return ret;