Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-only */
0002 /*
0003  *  linux/arch/arm/kernel/head-nommu.S
0004  *
0005  *  Copyright (C) 1994-2002 Russell King
0006  *  Copyright (C) 2003-2006 Hyok S. Choi
0007  *
0008  *  Common kernel startup code (non-paged MM)
0009  */
0010 #include <linux/linkage.h>
0011 #include <linux/init.h>
0012 #include <linux/errno.h>
0013 
0014 #include <asm/assembler.h>
0015 #include <asm/ptrace.h>
0016 #include <asm/asm-offsets.h>
0017 #include <asm/memory.h>
0018 #include <asm/cp15.h>
0019 #include <asm/thread_info.h>
0020 #include <asm/v7m.h>
0021 #include <asm/mpu.h>
0022 #include <asm/page.h>
0023 
0024 /*
0025  * Kernel startup entry point.
0026  * ---------------------------
0027  *
0028  * This is normally called from the decompressor code.  The requirements
0029  * are: MMU = off, D-cache = off, I-cache = dont care, r0 = 0,
0030  * r1 = machine nr.
0031  *
0032  * See linux/arch/arm/tools/mach-types for the complete list of machine
0033  * numbers for r1.
0034  *
0035  */
0036 
0037     __HEAD
0038 
0039 #ifdef CONFIG_CPU_THUMBONLY
0040     .thumb
0041 ENTRY(stext)
0042 #else
0043     .arm
0044 ENTRY(stext)
0045 
0046  THUMB( badr    r9, 1f      )   @ Kernel is always entered in ARM.
0047  THUMB( bx  r9      )   @ If this is a Thumb-2 kernel,
0048  THUMB( .thumb          )   @ switch to Thumb now.
0049  THUMB(1:           )
0050 #endif
0051 
0052 #ifdef CONFIG_ARM_VIRT_EXT
0053     bl  __hyp_stub_install
0054 #endif
0055     @ ensure svc mode and all interrupts masked
0056     safe_svcmode_maskall r9
0057                         @ and irqs disabled
0058 #if defined(CONFIG_CPU_CP15)
0059     mrc p15, 0, r9, c0, c0      @ get processor id
0060 #elif defined(CONFIG_CPU_V7M)
0061     ldr r9, =BASEADDR_V7M_SCB
0062     ldr r9, [r9, V7M_SCB_CPUID]
0063 #else
0064     ldr r9, =CONFIG_PROCESSOR_ID
0065 #endif
0066     bl  __lookup_processor_type     @ r5=procinfo r9=cpuid
0067     movs    r10, r5             @ invalid processor (r5=0)?
0068     beq __error_p               @ yes, error 'p'
0069 
0070 #ifdef CONFIG_ARM_MPU
0071     bl  __setup_mpu
0072 #endif
0073 
0074     badr    lr, 1f              @ return (PIC) address
0075     ldr r12, [r10, #PROCINFO_INITFUNC]
0076     add r12, r12, r10
0077     ret r12
0078 1:  ldr lr, =__mmap_switched
0079     b   __after_proc_init
0080 ENDPROC(stext)
0081 
0082 #ifdef CONFIG_SMP
0083     .text
0084 ENTRY(secondary_startup)
0085     /*
0086      * Common entry point for secondary CPUs.
0087      *
0088      * Ensure that we're in SVC mode, and IRQs are disabled.  Lookup
0089      * the processor type - there is no need to check the machine type
0090      * as it has already been validated by the primary processor.
0091      */
0092 #ifdef CONFIG_ARM_VIRT_EXT
0093     bl  __hyp_stub_install_secondary
0094 #endif
0095     safe_svcmode_maskall r9
0096 
0097 #ifndef CONFIG_CPU_CP15
0098     ldr r9, =CONFIG_PROCESSOR_ID
0099 #else
0100     mrc p15, 0, r9, c0, c0      @ get processor id
0101 #endif
0102     bl  __lookup_processor_type     @ r5=procinfo r9=cpuid
0103     movs    r10, r5             @ invalid processor?
0104     beq __error_p           @ yes, error 'p'
0105 
0106     ldr r7, __secondary_data
0107 
0108 #ifdef CONFIG_ARM_MPU
0109     bl      __secondary_setup_mpu       @ Initialize the MPU
0110 #endif
0111 
0112     badr    lr, 1f              @ return (PIC) address
0113     ldr r12, [r10, #PROCINFO_INITFUNC]
0114     add r12, r12, r10
0115     ret r12
0116 1:  bl  __after_proc_init
0117     ldr r7, __secondary_data        @ reload r7
0118     ldr sp, [r7, #12]           @ set up the stack pointer
0119     ldr r0, [r7, #16]           @ set up task pointer
0120     mov fp, #0
0121     b   secondary_start_kernel
0122 ENDPROC(secondary_startup)
0123 
0124     .type   __secondary_data, %object
0125 __secondary_data:
0126     .long   secondary_data
0127 #endif /* CONFIG_SMP */
0128 
0129 /*
0130  * Set the Control Register and Read the process ID.
0131  */
0132     .text
0133 __after_proc_init:
0134 M_CLASS(movw    r12, #:lower16:BASEADDR_V7M_SCB)
0135 M_CLASS(movt    r12, #:upper16:BASEADDR_V7M_SCB)
0136 #ifdef CONFIG_ARM_MPU
0137 M_CLASS(ldr r3, [r12, 0x50])
0138 AR_CLASS(mrc    p15, 0, r3, c0, c1, 4)          @ Read ID_MMFR0
0139     and r3, r3, #(MMFR0_PMSA)           @ PMSA field
0140     teq r3, #(MMFR0_PMSAv7)             @ PMSA v7
0141     beq 1f
0142     teq r3, #(MMFR0_PMSAv8)     @ PMSA v8
0143     /*
0144      * Memory region attributes for PMSAv8:
0145      *
0146      *   n = AttrIndx[2:0]
0147      *                      n       MAIR
0148      *   DEVICE_nGnRnE      000     00000000
0149      *   NORMAL             001     11111111
0150      */
0151     ldreq   r3, =PMSAv8_MAIR(0x00, PMSAv8_RGN_DEVICE_nGnRnE) | \
0152              PMSAv8_MAIR(0xff, PMSAv8_RGN_NORMAL)
0153 AR_CLASS(mcreq  p15, 0, r3, c10, c2, 0)     @ MAIR 0
0154 M_CLASS(streq   r3, [r12, #PMSAv8_MAIR0])
0155     moveq   r3, #0
0156 AR_CLASS(mcreq  p15, 0, r3, c10, c2, 1)     @ MAIR 1
0157 M_CLASS(streq   r3, [r12, #PMSAv8_MAIR1])
0158 
0159 1:
0160 #endif
0161 #ifdef CONFIG_CPU_CP15
0162     /*
0163      * CP15 system control register value returned in r0 from
0164      * the CPU init function.
0165      */
0166 
0167 #ifdef CONFIG_ARM_MPU
0168     biceq   r0, r0, #CR_BR          @ Disable the 'default mem-map'
0169     orreq   r0, r0, #CR_M           @ Set SCTRL.M (MPU on)
0170 #endif
0171 #if defined(CONFIG_ALIGNMENT_TRAP) && __LINUX_ARM_ARCH__ < 6
0172     orr r0, r0, #CR_A
0173 #else
0174     bic r0, r0, #CR_A
0175 #endif
0176 #ifdef CONFIG_CPU_DCACHE_DISABLE
0177     bic r0, r0, #CR_C
0178 #endif
0179 #ifdef CONFIG_CPU_BPREDICT_DISABLE
0180     bic r0, r0, #CR_Z
0181 #endif
0182 #ifdef CONFIG_CPU_ICACHE_DISABLE
0183     bic r0, r0, #CR_I
0184 #endif
0185     mcr p15, 0, r0, c1, c0, 0       @ write control reg
0186     instr_sync
0187 #elif defined (CONFIG_CPU_V7M)
0188 #ifdef CONFIG_ARM_MPU
0189     ldreq   r3, [r12, MPU_CTRL]
0190     biceq   r3, #MPU_CTRL_PRIVDEFENA
0191     orreq   r3, #MPU_CTRL_ENABLE
0192     streq   r3, [r12, MPU_CTRL]
0193     isb
0194 #endif
0195     /* For V7M systems we want to modify the CCR similarly to the SCTLR */
0196 #ifdef CONFIG_CPU_DCACHE_DISABLE
0197     bic r0, r0, #V7M_SCB_CCR_DC
0198 #endif
0199 #ifdef CONFIG_CPU_BPREDICT_DISABLE
0200     bic r0, r0, #V7M_SCB_CCR_BP
0201 #endif
0202 #ifdef CONFIG_CPU_ICACHE_DISABLE
0203     bic r0, r0, #V7M_SCB_CCR_IC
0204 #endif
0205     str r0, [r12, V7M_SCB_CCR]
0206     /* Pass exc_ret to __mmap_switched */
0207     mov r0, r10
0208 #endif /* CONFIG_CPU_CP15 elif CONFIG_CPU_V7M */
0209     ret lr
0210 ENDPROC(__after_proc_init)
0211     .ltorg
0212 
0213 #ifdef CONFIG_ARM_MPU
0214 
0215 
0216 #ifndef CONFIG_CPU_V7M
0217 /* Set which MPU region should be programmed */
0218 .macro set_region_nr tmp, rgnr, unused
0219     mov \tmp, \rgnr         @ Use static region numbers
0220     mcr p15, 0, \tmp, c6, c2, 0     @ Write RGNR
0221 .endm
0222 
0223 /* Setup a single MPU region, either D or I side (D-side for unified) */
0224 .macro setup_region bar, acr, sr, side = PMSAv7_DATA_SIDE, unused
0225     mcr p15, 0, \bar, c6, c1, (0 + \side)   @ I/DRBAR
0226     mcr p15, 0, \acr, c6, c1, (4 + \side)   @ I/DRACR
0227     mcr p15, 0, \sr, c6, c1, (2 + \side)        @ I/DRSR
0228 .endm
0229 #else
0230 .macro set_region_nr tmp, rgnr, base
0231     mov \tmp, \rgnr
0232     str     \tmp, [\base, #PMSAv7_RNR]
0233 .endm
0234 
0235 .macro setup_region bar, acr, sr, unused, base
0236     lsl     \acr, \acr, #16
0237     orr     \acr, \acr, \sr
0238     str     \bar, [\base, #PMSAv7_RBAR]
0239     str     \acr, [\base, #PMSAv7_RASR]
0240 .endm
0241 
0242 #endif
0243 /*
0244  * Setup the MPU and initial MPU Regions. We create the following regions:
0245  * Region 0: Use this for probing the MPU details, so leave disabled.
0246  * Region 1: Background region - covers the whole of RAM as strongly ordered
0247  * Region 2: Normal, Shared, cacheable for RAM. From PHYS_OFFSET, size from r6
0248  * Region 3: Normal, shared, inaccessible from PL0 to protect the vectors page
0249  *
0250  * r6: Value to be written to DRSR (and IRSR if required) for PMSAv7_RAM_REGION
0251 */
0252     __HEAD
0253 
0254 ENTRY(__setup_mpu)
0255 
0256     /* Probe for v7 PMSA compliance */
0257 M_CLASS(movw    r12, #:lower16:BASEADDR_V7M_SCB)
0258 M_CLASS(movt    r12, #:upper16:BASEADDR_V7M_SCB)
0259 
0260 AR_CLASS(mrc    p15, 0, r0, c0, c1, 4)      @ Read ID_MMFR0
0261 M_CLASS(ldr r0, [r12, 0x50])
0262     and r0, r0, #(MMFR0_PMSA)       @ PMSA field
0263     teq r0, #(MMFR0_PMSAv7)     @ PMSA v7
0264     beq __setup_pmsa_v7
0265     teq r0, #(MMFR0_PMSAv8)     @ PMSA v8
0266     beq __setup_pmsa_v8
0267 
0268     ret lr
0269 ENDPROC(__setup_mpu)
0270 
0271 ENTRY(__setup_pmsa_v7)
0272     /* Calculate the size of a region covering just the kernel */
0273     ldr r5, =PLAT_PHYS_OFFSET       @ Region start: PHYS_OFFSET
0274     ldr     r6, =(_end)         @ Cover whole kernel
0275     sub r6, r6, r5          @ Minimum size of region to map
0276     clz r6, r6              @ Region size must be 2^N...
0277     rsb r6, r6, #31         @ ...so round up region size
0278     lsl r6, r6, #PMSAv7_RSR_SZ      @ Put size in right field
0279     orr r6, r6, #(1 << PMSAv7_RSR_EN)   @ Set region enabled bit
0280 
0281     /* Determine whether the D/I-side memory map is unified. We set the
0282      * flags here and continue to use them for the rest of this function */
0283 AR_CLASS(mrc    p15, 0, r0, c0, c0, 4)      @ MPUIR
0284 M_CLASS(ldr    r0, [r12, #MPU_TYPE])
0285     ands    r5, r0, #MPUIR_DREGION_SZMASK   @ 0 size d region => No MPU
0286     bxeq    lr
0287     tst r0, #MPUIR_nU           @ MPUIR_nU = 0 for unified
0288 
0289     /* Setup second region first to free up r6 */
0290     set_region_nr r0, #PMSAv7_RAM_REGION, r12
0291     isb
0292     /* Full access from PL0, PL1, shared for CONFIG_SMP, cacheable */
0293     ldr r0, =PLAT_PHYS_OFFSET       @ RAM starts at PHYS_OFFSET
0294     ldr r5,=(PMSAv7_AP_PL1RW_PL0RW | PMSAv7_RGN_NORMAL)
0295 
0296     setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12  @ PHYS_OFFSET, shared, enabled
0297     beq 1f                  @ Memory-map not unified
0298     setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ PHYS_OFFSET, shared, enabled
0299 1:  isb
0300 
0301     /* First/background region */
0302     set_region_nr r0, #PMSAv7_BG_REGION, r12
0303     isb
0304     /* Execute Never,  strongly ordered, inaccessible to PL0, rw PL1  */
0305     mov r0, #0              @ BG region starts at 0x0
0306     ldr r5,=(PMSAv7_ACR_XN | PMSAv7_RGN_STRONGLY_ORDERED | PMSAv7_AP_PL1RW_PL0NA)
0307     mov r6, #PMSAv7_RSR_ALL_MEM     @ 4GB region, enabled
0308 
0309     setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12  @ 0x0, BG region, enabled
0310     beq 2f                  @ Memory-map not unified
0311     setup_region r0, r5, r6, PMSAv7_INSTR_SIDE r12  @ 0x0, BG region, enabled
0312 2:  isb
0313 
0314 #ifdef CONFIG_XIP_KERNEL
0315     set_region_nr r0, #PMSAv7_ROM_REGION, r12
0316     isb
0317 
0318     ldr r5,=(PMSAv7_AP_PL1RO_PL0NA | PMSAv7_RGN_NORMAL)
0319 
0320     ldr r0, =CONFIG_XIP_PHYS_ADDR       @ ROM start
0321     ldr     r6, =(_exiprom)             @ ROM end
0322     sub r6, r6, r0              @ Minimum size of region to map
0323     clz r6, r6                  @ Region size must be 2^N...
0324     rsb r6, r6, #31             @ ...so round up region size
0325     lsl r6, r6, #PMSAv7_RSR_SZ          @ Put size in right field
0326     orr r6, r6, #(1 << PMSAv7_RSR_EN)       @ Set region enabled bit
0327 
0328     setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12  @ XIP_PHYS_ADDR, shared, enabled
0329     beq 3f                  @ Memory-map not unified
0330     setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ XIP_PHYS_ADDR, shared, enabled
0331 3:  isb
0332 #endif
0333     ret lr
0334 ENDPROC(__setup_pmsa_v7)
0335 
0336 ENTRY(__setup_pmsa_v8)
0337     mov r0, #0
0338 AR_CLASS(mcr    p15, 0, r0, c6, c2, 1)      @ PRSEL
0339 M_CLASS(str r0, [r12, #PMSAv8_RNR])
0340     isb
0341 
0342 #ifdef CONFIG_XIP_KERNEL
0343     ldr r5, =CONFIG_XIP_PHYS_ADDR       @ ROM start
0344     ldr     r6, =(_exiprom)             @ ROM end
0345     sub r6, r6, #1
0346     bic r6, r6, #(PMSAv8_MINALIGN - 1)
0347 
0348     orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED)
0349     orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_NORMAL) | PMSAv8_LAR_EN)
0350 
0351 AR_CLASS(mcr    p15, 0, r5, c6, c8, 0)          @ PRBAR0
0352 AR_CLASS(mcr    p15, 0, r6, c6, c8, 1)          @ PRLAR0
0353 M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(0)])
0354 M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(0)])
0355 #endif
0356 
0357     ldr r5, =KERNEL_START
0358     ldr r6, =KERNEL_END
0359     sub r6, r6, #1
0360     bic r6, r6, #(PMSAv8_MINALIGN - 1)
0361 
0362     orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED)
0363     orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_NORMAL) | PMSAv8_LAR_EN)
0364 
0365 AR_CLASS(mcr    p15, 0, r5, c6, c8, 4)          @ PRBAR1
0366 AR_CLASS(mcr    p15, 0, r6, c6, c8, 5)          @ PRLAR1
0367 M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(1)])
0368 M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(1)])
0369 
0370     /* Setup Background: 0x0 - min(KERNEL_START, XIP_PHYS_ADDR) */
0371 #ifdef CONFIG_XIP_KERNEL
0372     ldr r6, =KERNEL_START
0373     ldr r5, =CONFIG_XIP_PHYS_ADDR
0374     cmp r6, r5
0375     movcs   r6, r5
0376 #else
0377     ldr r6, =KERNEL_START
0378 #endif
0379     cmp r6, #0
0380     beq 1f
0381 
0382     mov r5, #0
0383     sub r6, r6, #1
0384     bic r6, r6, #(PMSAv8_MINALIGN - 1)
0385 
0386     orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN)
0387     orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN)
0388 
0389 AR_CLASS(mcr    p15, 0, r5, c6, c9, 0)          @ PRBAR2
0390 AR_CLASS(mcr    p15, 0, r6, c6, c9, 1)          @ PRLAR2
0391 M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(2)])
0392 M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(2)])
0393 
0394 1:
0395     /* Setup Background: max(KERNEL_END, _exiprom) - 0xffffffff */
0396 #ifdef CONFIG_XIP_KERNEL
0397     ldr r5, =KERNEL_END
0398     ldr r6, =(_exiprom)
0399     cmp r5, r6
0400     movcc   r5, r6
0401 #else
0402     ldr r5, =KERNEL_END
0403 #endif
0404     mov r6, #0xffffffff
0405     bic r6, r6, #(PMSAv8_MINALIGN - 1)
0406 
0407     orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN)
0408     orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN)
0409 
0410 AR_CLASS(mcr    p15, 0, r5, c6, c9, 4)          @ PRBAR3
0411 AR_CLASS(mcr    p15, 0, r6, c6, c9, 5)          @ PRLAR3
0412 M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(3)])
0413 M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(3)])
0414 
0415 #ifdef CONFIG_XIP_KERNEL
0416     /* Setup Background: min(_exiprom, KERNEL_END) - max(KERNEL_START, XIP_PHYS_ADDR) */
0417     ldr r5, =(_exiprom)
0418     ldr r6, =KERNEL_END
0419     cmp r5, r6
0420     movcs   r5, r6
0421 
0422     ldr r6, =KERNEL_START
0423     ldr r0, =CONFIG_XIP_PHYS_ADDR
0424     cmp r6, r0
0425     movcc   r6, r0
0426 
0427     sub r6, r6, #1
0428     bic r6, r6, #(PMSAv8_MINALIGN - 1)
0429 
0430     orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN)
0431     orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN)
0432 
0433 #ifdef CONFIG_CPU_V7M
0434     /* There is no alias for n == 4 */
0435     mov r0, #4
0436     str r0, [r12, #PMSAv8_RNR]          @ PRSEL
0437     isb
0438 
0439     str r5, [r12, #PMSAv8_RBAR_A(0)]
0440     str r6, [r12, #PMSAv8_RLAR_A(0)]
0441 #else
0442     mcr p15, 0, r5, c6, c10, 0          @ PRBAR4
0443     mcr p15, 0, r6, c6, c10, 1          @ PRLAR4
0444 #endif
0445 #endif
0446     ret lr
0447 ENDPROC(__setup_pmsa_v8)
0448 
0449 #ifdef CONFIG_SMP
0450 /*
0451  * r6: pointer at mpu_rgn_info
0452  */
0453 
0454     .text
0455 ENTRY(__secondary_setup_mpu)
0456     /* Use MPU region info supplied by __cpu_up */
0457     ldr r6, [r7]            @ get secondary_data.mpu_rgn_info
0458 
0459     /* Probe for v7 PMSA compliance */
0460     mrc p15, 0, r0, c0, c1, 4       @ Read ID_MMFR0
0461     and r0, r0, #(MMFR0_PMSA)       @ PMSA field
0462     teq r0, #(MMFR0_PMSAv7)     @ PMSA v7
0463     beq __secondary_setup_pmsa_v7
0464     teq r0, #(MMFR0_PMSAv8)     @ PMSA v8
0465     beq __secondary_setup_pmsa_v8
0466     b   __error_p
0467 ENDPROC(__secondary_setup_mpu)
0468 
0469 /*
0470  * r6: pointer at mpu_rgn_info
0471  */
0472 ENTRY(__secondary_setup_pmsa_v7)
0473     /* Determine whether the D/I-side memory map is unified. We set the
0474      * flags here and continue to use them for the rest of this function */
0475     mrc p15, 0, r0, c0, c0, 4       @ MPUIR
0476     ands    r5, r0, #MPUIR_DREGION_SZMASK   @ 0 size d region => No MPU
0477     beq __error_p
0478 
0479     ldr r4, [r6, #MPU_RNG_INFO_USED]
0480     mov r5, #MPU_RNG_SIZE
0481     add r3, r6, #MPU_RNG_INFO_RNGS
0482     mla r3, r4, r5, r3
0483 
0484 1:
0485     tst r0, #MPUIR_nU           @ MPUIR_nU = 0 for unified
0486     sub r3, r3, #MPU_RNG_SIZE
0487     sub r4, r4, #1
0488 
0489     set_region_nr r0, r4
0490     isb
0491 
0492     ldr r0, [r3, #MPU_RGN_DRBAR]
0493     ldr r6, [r3, #MPU_RGN_DRSR]
0494     ldr r5, [r3, #MPU_RGN_DRACR]
0495 
0496     setup_region r0, r5, r6, PMSAv7_DATA_SIDE
0497     beq 2f
0498     setup_region r0, r5, r6, PMSAv7_INSTR_SIDE
0499 2:  isb
0500 
0501     mrc p15, 0, r0, c0, c0, 4       @ Reevaluate the MPUIR
0502     cmp r4, #0
0503     bgt 1b
0504 
0505     ret lr
0506 ENDPROC(__secondary_setup_pmsa_v7)
0507 
0508 ENTRY(__secondary_setup_pmsa_v8)
0509     ldr r4, [r6, #MPU_RNG_INFO_USED]
0510 #ifndef CONFIG_XIP_KERNEL
0511     add r4, r4, #1
0512 #endif
0513     mov r5, #MPU_RNG_SIZE
0514     add r3, r6, #MPU_RNG_INFO_RNGS
0515     mla r3, r4, r5, r3
0516 
0517 1:
0518     sub r3, r3, #MPU_RNG_SIZE
0519     sub r4, r4, #1
0520 
0521     mcr p15, 0, r4, c6, c2, 1       @ PRSEL
0522     isb
0523 
0524     ldr r5, [r3, #MPU_RGN_PRBAR]
0525     ldr r6, [r3, #MPU_RGN_PRLAR]
0526 
0527     mcr p15, 0, r5, c6, c3, 0       @ PRBAR
0528     mcr p15, 0, r6, c6, c3, 1           @ PRLAR
0529 
0530     cmp r4, #0
0531     bgt 1b
0532 
0533     ret lr
0534 ENDPROC(__secondary_setup_pmsa_v8)
0535 #endif /* CONFIG_SMP */
0536 #endif /* CONFIG_ARM_MPU */
0537 #include "head-common.S"