ARM: 8035/1: Disable preemption in crunch_task_enable()
authorCatalin Marinas <catalin.marinas@arm.com>
Tue, 22 Apr 2014 15:14:28 +0000 (16:14 +0100)
committerRussell King <rmk+kernel@arm.linux.org.uk>
Fri, 25 Apr 2014 11:06:37 +0000 (12:06 +0100)
This patch is in preparation for calling the crunch_task_enable()
function with interrupts enabled.

Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Cc: Hartley Sweeten <hsweeten@visionengravers.com>
Cc: Ryan Mallon <rmallon@gmail.com>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
arch/arm/mach-ep93xx/crunch-bits.S

index 0ec9bb48fab9cd9b4ea20ab32ac9694d8d0d9881..eaa5e34729d333d22f5db2cf4db930f35d30dcbd 100644 (file)
@@ -16,6 +16,7 @@
 #include <asm/ptrace.h>
 #include <asm/thread_info.h>
 #include <asm/asm-offsets.h>
+#include <asm/assembler.h>
 #include <mach/ep93xx-regs.h>
 
 /*
  * called from prefetch exception handler with interrupts disabled
  */
 ENTRY(crunch_task_enable)
+       inc_preempt_count r10, r3
+
        ldr     r8, =(EP93XX_APB_VIRT_BASE + 0x00130000)        @ syscon addr
 
        ldr     r1, [r8, #0x80]
        tst     r1, #0x00800000                 @ access to crunch enabled?
-       movne   pc, lr                          @ if so no business here
+       bne     2f                              @ if so no business here
        mov     r3, #0xaa                       @ unlock syscon swlock
        str     r3, [r8, #0xc0]
        orr     r1, r1, #0x00800000             @ enable access to crunch
@@ -142,7 +145,7 @@ crunch_save:
 
        teq             r0, #0                          @ anything to load?
        cfldr64eq       mvdx0, [r1, #CRUNCH_MVDX0]      @ mvdx0 was clobbered
-       moveq           pc, lr
+       beq             1f
 
 crunch_load:
        cfldr64         mvdx0, [r0, #CRUNCH_DSPSC]      @ load status word
@@ -190,6 +193,11 @@ crunch_load:
        cfldr64         mvdx14, [r0, #CRUNCH_MVDX14]
        cfldr64         mvdx15, [r0, #CRUNCH_MVDX15]
 
+1:
+#ifdef CONFIG_PREEMPT_COUNT
+       get_thread_info r10
+#endif
+2:     dec_preempt_count r10, r3
        mov     pc, lr
 
 /*