diff options
Diffstat (limited to 'arch/arm/include/asm/percpu.h')
-rw-r--r-- | arch/arm/include/asm/percpu.h | 35 |
1 files changed, 31 insertions, 4 deletions
diff --git a/arch/arm/include/asm/percpu.h b/arch/arm/include/asm/percpu.h index e2fcb3cfd3de..7545c87c251f 100644 --- a/arch/arm/include/asm/percpu.h +++ b/arch/arm/include/asm/percpu.h @@ -5,20 +5,27 @@ #ifndef _ASM_ARM_PERCPU_H_ #define _ASM_ARM_PERCPU_H_ +#include <asm/insn.h> + register unsigned long current_stack_pointer asm ("sp"); /* * Same as asm-generic/percpu.h, except that we store the per cpu offset * in the TPIDRPRW. TPIDRPRW only exists on V6K and V7 */ -#if defined(CONFIG_SMP) && !defined(CONFIG_CPU_V6) +#ifdef CONFIG_SMP static inline void set_my_cpu_offset(unsigned long off) { + extern unsigned int smp_on_up; + + if (IS_ENABLED(CONFIG_CPU_V6) && !smp_on_up) + return; + /* Set TPIDRPRW */ asm volatile("mcr p15, 0, %0, c13, c0, 4" : : "r" (off) : "memory"); } -static inline unsigned long __my_cpu_offset(void) +static __always_inline unsigned long __my_cpu_offset(void) { unsigned long off; @@ -27,8 +34,28 @@ static inline unsigned long __my_cpu_offset(void) * We want to allow caching the value, so avoid using volatile and * instead use a fake stack read to hazard against barrier(). */ - asm("mrc p15, 0, %0, c13, c0, 4" : "=r" (off) - : "Q" (*(const unsigned long *)current_stack_pointer)); + asm("0: mrc p15, 0, %0, c13, c0, 4 \n\t" +#ifdef CONFIG_CPU_V6 + "1: \n\t" + " .subsection 1 \n\t" +#if defined(CONFIG_ARM_HAS_GROUP_RELOCS) && \ + !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) + "2: " LOAD_SYM_ARMV6(%0, __per_cpu_offset) " \n\t" + " b 1b \n\t" +#else + "2: ldr %0, 3f \n\t" + " ldr %0, [%0] \n\t" + " b 1b \n\t" + "3: .long __per_cpu_offset \n\t" +#endif + " .previous \n\t" + " .pushsection \".alt.smp.init\", \"a\" \n\t" + " .long 0b - . \n\t" + " b . + (2b - 0b) \n\t" + " .popsection \n\t" +#endif + : "=r" (off) + : "Q" (*(const unsigned long *)current_stack_pointer)); return off; } |