From patchwork Thu Apr 9 13:46:56 2015 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Russell King - ARM Linux X-Patchwork-Id: 6187891 Return-Path: X-Original-To: patchwork-linux-arm@patchwork.kernel.org Delivered-To: patchwork-parsemail@patchwork1.web.kernel.org Received: from mail.kernel.org (mail.kernel.org [198.145.29.136]) by patchwork1.web.kernel.org (Postfix) with ESMTP id CF0D79F2EC for ; Thu, 9 Apr 2015 13:49:52 +0000 (UTC) Received: from mail.kernel.org (localhost [127.0.0.1]) by mail.kernel.org (Postfix) with ESMTP id AB652203B0 for ; Thu, 9 Apr 2015 13:49:51 +0000 (UTC) Received: from bombadil.infradead.org (bombadil.infradead.org [198.137.202.9]) (using TLSv1.2 with cipher DHE-RSA-AES256-GCM-SHA384 (256/256 bits)) (No client certificate requested) by mail.kernel.org (Postfix) with ESMTPS id 7AAF920375 for ; Thu, 9 Apr 2015 13:49:50 +0000 (UTC) Received: from localhost ([127.0.0.1] helo=bombadil.infradead.org) by bombadil.infradead.org with esmtp (Exim 4.80.1 #2 (Red Hat Linux)) id 1YgCnt-0001P7-9S; Thu, 09 Apr 2015 13:47:33 +0000 Received: from pandora.arm.linux.org.uk ([2001:4d48:ad52:3201:214:fdff:fe10:1be6]) by bombadil.infradead.org with esmtps (Exim 4.80.1 #2 (Red Hat Linux)) id 1YgCnp-0001LT-7s for linux-arm-kernel@lists.infradead.org; Thu, 09 Apr 2015 13:47:30 +0000 DKIM-Signature: v=1; a=rsa-sha256; q=dns/txt; c=relaxed/relaxed; d=arm.linux.org.uk; s=pandora-2014; h=Sender:In-Reply-To:Content-Type:MIME-Version:References:Message-ID:Subject:Cc:To:From:Date; bh=I0cxPnIA5sZ0T9vUppyvQTDkXmlF52g3NhphEYHw8EA=; b=Gr/gpxq64+YIjDDqR/pr/ocL3abfH4aGS7VEFSvaBLPFnSXxvcGi2S+54sdCzGWCCjkwHQQ/RO7Hr3nQNfAwU0GmiW1CflR2wF+7LK5jOmWZ1vYV85nYELYWRKbKqZpXRMPuIpqm7A/ovAvuYd7IBaRPLWuwrDapo3EWFep7r6Y=; Received: from n2100.arm.linux.org.uk ([2002:4e20:1eda:1:214:fdff:fe10:4f86]:54795) by pandora.arm.linux.org.uk with esmtpsa (TLSv1:DHE-RSA-AES256-SHA:256) (Exim 4.82_1-5b7a7c0-XX) (envelope-from ) id 1YgCnM-0003ig-Kz; Thu, 09 Apr 2015 14:47:00 +0100 Received: from linux by n2100.arm.linux.org.uk with local (Exim 4.76) (envelope-from ) id 1YgCnJ-0000bw-Cn; Thu, 09 Apr 2015 14:46:57 +0100 Date: Thu, 9 Apr 2015 14:46:56 +0100 From: Russell King - ARM Linux To: Arnd Bergmann Subject: Re: [PATCH 4/7] ARM: cache-v7: optimise branches in v7_flush_cache_louis Message-ID: <20150409134656.GX12732@n2100.arm.linux.org.uk> References: <20150403100848.GZ24899@n2100.arm.linux.org.uk> <2570741.Vc3c6Ovx3m@wuerfel> <20150409082116.GV12732@n2100.arm.linux.org.uk> <2927556.fYRDzU3Hag@wuerfel> MIME-Version: 1.0 Content-Disposition: inline In-Reply-To: <2927556.fYRDzU3Hag@wuerfel> User-Agent: Mutt/1.5.23 (2014-03-12) X-CRM114-Version: 20100106-BlameMichelson ( TRE 0.8.0 (BSD) ) MR-646709E3 X-CRM114-CacheID: sfid-20150409_064729_844204_02AE1B9C X-CRM114-Status: GOOD ( 24.75 ) X-Spam-Score: -0.1 (/) Cc: Catalin Marinas , Will Deacon , linux-arm-kernel@lists.infradead.org X-BeenThere: linux-arm-kernel@lists.infradead.org X-Mailman-Version: 2.1.18-1 Precedence: list List-Id: List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Sender: "linux-arm-kernel" Errors-To: linux-arm-kernel-bounces+patchwork-linux-arm=patchwork.kernel.org@lists.infradead.org X-Spam-Status: No, score=-4.1 required=5.0 tests=BAYES_00,DKIM_SIGNED, RCVD_IN_DNSWL_MED, T_DKIM_INVALID, T_RP_MATCHES_RCVD, UNPARSEABLE_RELAY autolearn=ham version=3.3.1 X-Spam-Checker-Version: SpamAssassin 3.3.1 (2010-03-16) on mail.kernel.org X-Virus-Scanned: ClamAV using ClamSMTP On Thu, Apr 09, 2015 at 12:29:12PM +0200, Arnd Bergmann wrote: > On Thursday 09 April 2015 09:21:16 Russell King - ARM Linux wrote: > > On Thu, Apr 09, 2015 at 10:13:06AM +0200, Arnd Bergmann wrote: > > > > > > With this in linux-next, I get a build failure on randconfig kernels with > > > THUMB2_KERNEL enabled: > > > > > > arch/arm/mm/cache-v7.S: Assembler messages: > > > arch/arm/mm/cache-v7.S:99: Error: ALT_UP() content must assemble to exactly 4 bytes > > > > > > Any idea for a method that will work with all combinations of SMP/UP > > > and ARM/THUMB? The best I could come up with was to add an extra 'mov r0,r0', > > > but that gets rather ugly as you then have to do it only for THUMB2. > > > > How about we make ALT_UP() add the additional padding? Something like > > this maybe? > > > > diff --git a/arch/arm/include/asm/assembler.h b/arch/arm/include/asm/assembler.h > > index f67fd3afebdf..79f421796aab 100644 > > --- a/arch/arm/include/asm/assembler.h > > +++ b/arch/arm/include/asm/assembler.h > > @@ -237,6 +237,9 @@ > > .pushsection ".alt.smp.init", "a" ;\ > > .long 9998b ;\ > > 9997: instr ;\ > > + .if . - 9997b == 2 ;\ > > + nop ;\ > > + .endif > > .if . - 9997b != 4 ;\ > > .error "ALT_UP() content must assemble to exactly 4 bytes";\ > > .endif ;\ > > > > This looks like a good solution, and works fine after adding the > missing ';\' characters behind the .endif. > > I don't expect any problems but I'm doing some more randconfig builds > now with this patch, and if you don't hear back today, feel free to add > > Acked-by: Arnd Bergmann Thanks. I'm also intending to merge this too - we could go a bit further, but I don't want to at the moment... 8<==== From: Russell King Subject: [PATCH] ARM: remove uses of ALT_UP(W(...)) Since we expand a thumb ALT_UP() instruction instruction to be 32-bit, we no longer need to manually code this. Remove instances of ALT_UP(W()). Signed-off-by: Russell King --- arch/arm/include/asm/assembler.h | 4 ---- arch/arm/lib/bitops.h | 4 ++-- arch/arm/mm/cache-v7.S | 10 +++++----- arch/arm/mm/tlb-v7.S | 2 +- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/arch/arm/include/asm/assembler.h b/arch/arm/include/asm/assembler.h index 186270b3e194..ed823dcc8296 100644 --- a/arch/arm/include/asm/assembler.h +++ b/arch/arm/include/asm/assembler.h @@ -283,11 +283,7 @@ #else #error Incompatible SMP platform #endif - .ifeqs "\mode","arm" ALT_UP(nop) - .else - ALT_UP(W(nop)) - .endif #endif .endm diff --git a/arch/arm/lib/bitops.h b/arch/arm/lib/bitops.h index 7d807cfd8ef5..0f2055fe21af 100644 --- a/arch/arm/lib/bitops.h +++ b/arch/arm/lib/bitops.h @@ -14,7 +14,7 @@ UNWIND( .fnstart ) #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) .arch_extension mp ALT_SMP(W(pldw) [r1]) - ALT_UP(W(nop)) + ALT_UP(nop) #endif mov r3, r2, lsl r3 1: ldrex r2, [r1] @@ -41,7 +41,7 @@ UNWIND( .fnstart ) #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) .arch_extension mp ALT_SMP(W(pldw) [r1]) - ALT_UP(W(nop)) + ALT_UP(nop) #endif 1: ldrex r2, [r1] ands r0, r2, r3 @ save old value of bit diff --git a/arch/arm/mm/cache-v7.S b/arch/arm/mm/cache-v7.S index 51d6a336bac2..21f084bdf60a 100644 --- a/arch/arm/mm/cache-v7.S +++ b/arch/arm/mm/cache-v7.S @@ -273,7 +273,7 @@ ENTRY(v7_coherent_user_range) bic r12, r0, r3 #ifdef CONFIG_ARM_ERRATA_764369 ALT_SMP(W(dsb)) - ALT_UP(W(nop)) +ALT_UP( nop) #endif 1: USER( mcr p15, 0, r12, c7, c11, 1 ) @ clean D line to the point of unification @@ -326,7 +326,7 @@ ENTRY(v7_flush_kern_dcache_area) bic r0, r0, r3 #ifdef CONFIG_ARM_ERRATA_764369 ALT_SMP(W(dsb)) - ALT_UP(W(nop)) +ALT_UP( nop) #endif 1: mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D line / unified line @@ -354,7 +354,7 @@ v7_dma_inv_range: bic r0, r0, r3 #ifdef CONFIG_ARM_ERRATA_764369 ALT_SMP(W(dsb)) - ALT_UP(W(nop)) +ALT_UP( nop) #endif mcrne p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line @@ -381,7 +381,7 @@ v7_dma_clean_range: bic r0, r0, r3 #ifdef CONFIG_ARM_ERRATA_764369 ALT_SMP(W(dsb)) - ALT_UP(W(nop)) +ALT_UP( nop) #endif 1: mcr p15, 0, r0, c7, c10, 1 @ clean D / U line @@ -403,7 +403,7 @@ ENTRY(v7_dma_flush_range) bic r0, r0, r3 #ifdef CONFIG_ARM_ERRATA_764369 ALT_SMP(W(dsb)) - ALT_UP(W(nop)) +ALT_UP( nop) #endif 1: mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S index e5101a3bc57c..58261e0415c2 100644 --- a/arch/arm/mm/tlb-v7.S +++ b/arch/arm/mm/tlb-v7.S @@ -41,7 +41,7 @@ ENTRY(v7wbi_flush_user_tlb_range) asid r3, r3 @ mask ASID #ifdef CONFIG_ARM_ERRATA_720789 ALT_SMP(W(mov) r3, #0 ) - ALT_UP(W(nop) ) +ALT_UP( nop) #endif orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA mov r1, r1, lsl #PAGE_SHIFT