Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-only */
0002 /*
0003  *  linux/arch/arm/mm/proc-v7.S
0004  *
0005  *  Copyright (C) 2001 Deep Blue Solutions Ltd.
0006  *
0007  *  This is the "shell" of the ARMv7 processor support.
0008  */
0009 #include <linux/arm-smccc.h>
0010 #include <linux/init.h>
0011 #include <linux/linkage.h>
0012 #include <linux/pgtable.h>
0013 #include <asm/assembler.h>
0014 #include <asm/asm-offsets.h>
0015 #include <asm/hwcap.h>
0016 #include <asm/pgtable-hwdef.h>
0017 #include <asm/memory.h>
0018 
0019 #include "proc-macros.S"
0020 
0021 #ifdef CONFIG_ARM_LPAE
0022 #include "proc-v7-3level.S"
0023 #else
0024 #include "proc-v7-2level.S"
0025 #endif
0026 
0027 ENTRY(cpu_v7_proc_init)
0028     ret lr
0029 ENDPROC(cpu_v7_proc_init)
0030 
0031 ENTRY(cpu_v7_proc_fin)
0032     mrc p15, 0, r0, c1, c0, 0       @ ctrl register
0033     bic r0, r0, #0x1000         @ ...i............
0034     bic r0, r0, #0x0006         @ .............ca.
0035     mcr p15, 0, r0, c1, c0, 0       @ disable caches
0036     ret lr
0037 ENDPROC(cpu_v7_proc_fin)
0038 
0039 /*
0040  *  cpu_v7_reset(loc, hyp)
0041  *
0042  *  Perform a soft reset of the system.  Put the CPU into the
0043  *  same state as it would be if it had been reset, and branch
0044  *  to what would be the reset vector.
0045  *
0046  *  - loc   - location to jump to for soft reset
0047  *  - hyp   - indicate if restart occurs in HYP mode
0048  *
0049  *  This code must be executed using a flat identity mapping with
0050  *      caches disabled.
0051  */
0052     .align  5
0053     .pushsection    .idmap.text, "ax"
0054 ENTRY(cpu_v7_reset)
0055     mrc p15, 0, r2, c1, c0, 0       @ ctrl register
0056     bic r2, r2, #0x1            @ ...............m
0057  THUMB( bic r2, r2, #1 << 30 )      @ SCTLR.TE (Thumb exceptions)
0058     mcr p15, 0, r2, c1, c0, 0       @ disable MMU
0059     isb
0060 #ifdef CONFIG_ARM_VIRT_EXT
0061     teq r1, #0
0062     bne __hyp_soft_restart
0063 #endif
0064     bx  r0
0065 ENDPROC(cpu_v7_reset)
0066     .popsection
0067 
0068 /*
0069  *  cpu_v7_do_idle()
0070  *
0071  *  Idle the processor (eg, wait for interrupt).
0072  *
0073  *  IRQs are already disabled.
0074  */
0075 ENTRY(cpu_v7_do_idle)
0076     dsb                 @ WFI may enter a low-power mode
0077     wfi
0078     ret lr
0079 ENDPROC(cpu_v7_do_idle)
0080 
0081 ENTRY(cpu_v7_dcache_clean_area)
0082     ALT_SMP(W(nop))         @ MP extensions imply L1 PTW
0083     ALT_UP_B(1f)
0084     ret lr
0085 1:  dcache_line_size r2, r3
0086 2:  mcr p15, 0, r0, c7, c10, 1      @ clean D entry
0087     add r0, r0, r2
0088     subs    r1, r1, r2
0089     bhi 2b
0090     dsb ishst
0091     ret lr
0092 ENDPROC(cpu_v7_dcache_clean_area)
0093 
0094 #ifdef CONFIG_ARM_PSCI
0095     .arch_extension sec
0096 ENTRY(cpu_v7_smc_switch_mm)
0097     stmfd   sp!, {r0 - r3}
0098     movw    r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
0099     movt    r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
0100     smc #0
0101     ldmfd   sp!, {r0 - r3}
0102     b   cpu_v7_switch_mm
0103 ENDPROC(cpu_v7_smc_switch_mm)
0104     .arch_extension virt
0105 ENTRY(cpu_v7_hvc_switch_mm)
0106     stmfd   sp!, {r0 - r3}
0107     movw    r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
0108     movt    r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
0109     hvc #0
0110     ldmfd   sp!, {r0 - r3}
0111     b   cpu_v7_switch_mm
0112 ENDPROC(cpu_v7_hvc_switch_mm)
0113 #endif
0114 ENTRY(cpu_v7_iciallu_switch_mm)
0115     mov r3, #0
0116     mcr p15, 0, r3, c7, c5, 0       @ ICIALLU
0117     b   cpu_v7_switch_mm
0118 ENDPROC(cpu_v7_iciallu_switch_mm)
0119 ENTRY(cpu_v7_bpiall_switch_mm)
0120     mov r3, #0
0121     mcr p15, 0, r3, c7, c5, 6       @ flush BTAC/BTB
0122     b   cpu_v7_switch_mm
0123 ENDPROC(cpu_v7_bpiall_switch_mm)
0124 
0125     string  cpu_v7_name, "ARMv7 Processor"
0126     .align
0127 
0128 /* Suspend/resume support: derived from arch/arm/mach-s5pv210/sleep.S */
0129 .globl  cpu_v7_suspend_size
0130 .equ    cpu_v7_suspend_size, 4 * 9
0131 #ifdef CONFIG_ARM_CPU_SUSPEND
0132 ENTRY(cpu_v7_do_suspend)
0133     stmfd   sp!, {r4 - r11, lr}
0134     mrc p15, 0, r4, c13, c0, 0  @ FCSE/PID
0135     mrc p15, 0, r5, c13, c0, 3  @ User r/o thread ID
0136     stmia   r0!, {r4 - r5}
0137 #ifdef CONFIG_MMU
0138     mrc p15, 0, r6, c3, c0, 0   @ Domain ID
0139 #ifdef CONFIG_ARM_LPAE
0140     mrrc    p15, 1, r5, r7, c2  @ TTB 1
0141 #else
0142     mrc p15, 0, r7, c2, c0, 1   @ TTB 1
0143 #endif
0144     mrc p15, 0, r11, c2, c0, 2  @ TTB control register
0145 #endif
0146     mrc p15, 0, r8, c1, c0, 0   @ Control register
0147     mrc p15, 0, r9, c1, c0, 1   @ Auxiliary control register
0148     mrc p15, 0, r10, c1, c0, 2  @ Co-processor access control
0149     stmia   r0, {r5 - r11}
0150     ldmfd   sp!, {r4 - r11, pc}
0151 ENDPROC(cpu_v7_do_suspend)
0152 
0153 ENTRY(cpu_v7_do_resume)
0154     mov ip, #0
0155     mcr p15, 0, ip, c7, c5, 0   @ invalidate I cache
0156     mcr p15, 0, ip, c13, c0, 1  @ set reserved context ID
0157     ldmia   r0!, {r4 - r5}
0158     mcr p15, 0, r4, c13, c0, 0  @ FCSE/PID
0159     mcr p15, 0, r5, c13, c0, 3  @ User r/o thread ID
0160     ldmia   r0, {r5 - r11}
0161 #ifdef CONFIG_MMU
0162     mcr p15, 0, ip, c8, c7, 0   @ invalidate TLBs
0163     mcr p15, 0, r6, c3, c0, 0   @ Domain ID
0164 #ifdef CONFIG_ARM_LPAE
0165     mcrr    p15, 0, r1, ip, c2  @ TTB 0
0166     mcrr    p15, 1, r5, r7, c2  @ TTB 1
0167 #else
0168     ALT_SMP(orr r1, r1, #TTB_FLAGS_SMP)
0169     ALT_UP(orr  r1, r1, #TTB_FLAGS_UP)
0170     mcr p15, 0, r1, c2, c0, 0   @ TTB 0
0171     mcr p15, 0, r7, c2, c0, 1   @ TTB 1
0172 #endif
0173     mcr p15, 0, r11, c2, c0, 2  @ TTB control register
0174     ldr r4, =PRRR       @ PRRR
0175     ldr r5, =NMRR       @ NMRR
0176     mcr p15, 0, r4, c10, c2, 0  @ write PRRR
0177     mcr p15, 0, r5, c10, c2, 1  @ write NMRR
0178 #endif  /* CONFIG_MMU */
0179     mrc p15, 0, r4, c1, c0, 1   @ Read Auxiliary control register
0180     teq r4, r9          @ Is it already set?
0181     mcrne   p15, 0, r9, c1, c0, 1   @ No, so write it
0182     mcr p15, 0, r10, c1, c0, 2  @ Co-processor access control
0183     isb
0184     dsb
0185     mov r0, r8          @ control register
0186     b   cpu_resume_mmu
0187 ENDPROC(cpu_v7_do_resume)
0188 #endif
0189 
0190 .globl  cpu_ca9mp_suspend_size
0191 .equ    cpu_ca9mp_suspend_size, cpu_v7_suspend_size + 4 * 2
0192 #ifdef CONFIG_ARM_CPU_SUSPEND
0193 ENTRY(cpu_ca9mp_do_suspend)
0194     stmfd   sp!, {r4 - r5}
0195     mrc p15, 0, r4, c15, c0, 1      @ Diagnostic register
0196     mrc p15, 0, r5, c15, c0, 0      @ Power register
0197     stmia   r0!, {r4 - r5}
0198     ldmfd   sp!, {r4 - r5}
0199     b   cpu_v7_do_suspend
0200 ENDPROC(cpu_ca9mp_do_suspend)
0201 
0202 ENTRY(cpu_ca9mp_do_resume)
0203     ldmia   r0!, {r4 - r5}
0204     mrc p15, 0, r10, c15, c0, 1     @ Read Diagnostic register
0205     teq r4, r10             @ Already restored?
0206     mcrne   p15, 0, r4, c15, c0, 1      @ No, so restore it
0207     mrc p15, 0, r10, c15, c0, 0     @ Read Power register
0208     teq r5, r10             @ Already restored?
0209     mcrne   p15, 0, r5, c15, c0, 0      @ No, so restore it
0210     b   cpu_v7_do_resume
0211 ENDPROC(cpu_ca9mp_do_resume)
0212 #endif
0213 
0214 #ifdef CONFIG_CPU_PJ4B
0215     globl_equ   cpu_pj4b_switch_mm,     cpu_v7_switch_mm
0216     globl_equ   cpu_pj4b_set_pte_ext,   cpu_v7_set_pte_ext
0217     globl_equ   cpu_pj4b_proc_init, cpu_v7_proc_init
0218     globl_equ   cpu_pj4b_proc_fin,  cpu_v7_proc_fin
0219     globl_equ   cpu_pj4b_reset,     cpu_v7_reset
0220 #ifdef CONFIG_PJ4B_ERRATA_4742
0221 ENTRY(cpu_pj4b_do_idle)
0222     dsb                 @ WFI may enter a low-power mode
0223     wfi
0224     dsb                 @barrier
0225     ret lr
0226 ENDPROC(cpu_pj4b_do_idle)
0227 #else
0228     globl_equ   cpu_pj4b_do_idle,   cpu_v7_do_idle
0229 #endif
0230     globl_equ   cpu_pj4b_dcache_clean_area, cpu_v7_dcache_clean_area
0231 #ifdef CONFIG_ARM_CPU_SUSPEND
0232 ENTRY(cpu_pj4b_do_suspend)
0233     stmfd   sp!, {r6 - r10}
0234     mrc p15, 1, r6, c15, c1, 0  @ save CP15 - extra features
0235     mrc p15, 1, r7, c15, c2, 0  @ save CP15 - Aux Func Modes Ctrl 0
0236     mrc p15, 1, r8, c15, c1, 2  @ save CP15 - Aux Debug Modes Ctrl 2
0237     mrc p15, 1, r9, c15, c1, 1  @ save CP15 - Aux Debug Modes Ctrl 1
0238     mrc p15, 0, r10, c9, c14, 0  @ save CP15 - PMC
0239     stmia   r0!, {r6 - r10}
0240     ldmfd   sp!, {r6 - r10}
0241     b cpu_v7_do_suspend
0242 ENDPROC(cpu_pj4b_do_suspend)
0243 
0244 ENTRY(cpu_pj4b_do_resume)
0245     ldmia   r0!, {r6 - r10}
0246     mcr p15, 1, r6, c15, c1, 0  @ restore CP15 - extra features
0247     mcr p15, 1, r7, c15, c2, 0  @ restore CP15 - Aux Func Modes Ctrl 0
0248     mcr p15, 1, r8, c15, c1, 2  @ restore CP15 - Aux Debug Modes Ctrl 2
0249     mcr p15, 1, r9, c15, c1, 1  @ restore CP15 - Aux Debug Modes Ctrl 1
0250     mcr p15, 0, r10, c9, c14, 0  @ restore CP15 - PMC
0251     b cpu_v7_do_resume
0252 ENDPROC(cpu_pj4b_do_resume)
0253 #endif
0254 .globl  cpu_pj4b_suspend_size
0255 .equ    cpu_pj4b_suspend_size, cpu_v7_suspend_size + 4 * 5
0256 
0257 #endif
0258 
0259     @
0260     @ Invoke the v7_invalidate_l1() function, which adheres to the AAPCS
0261     @ rules, and so it may corrupt registers that we need to preserve.
0262     @
0263     .macro  do_invalidate_l1
0264     mov r6, r1
0265     mov r7, r2
0266     mov r10, lr
0267     bl  v7_invalidate_l1        @ corrupts {r0-r3, ip, lr}
0268     mov r1, r6
0269     mov r2, r7
0270     mov lr, r10
0271     .endm
0272 
0273 /*
0274  *  __v7_setup
0275  *
0276  *  Initialise TLB, Caches, and MMU state ready to switch the MMU
0277  *  on.  Return in r0 the new CP15 C1 control register setting.
0278  *
0279  *  r1, r2, r4, r5, r9, r13 must be preserved - r13 is not a stack
0280  *  r4: TTBR0 (low word)
0281  *  r5: TTBR0 (high word if LPAE)
0282  *  r8: TTBR1
0283  *  r9: Main ID register
0284  *
0285  *  This should be able to cover all ARMv7 cores.
0286  *
0287  *  It is assumed that:
0288  *  - cache type register is implemented
0289  */
0290 __v7_ca5mp_setup:
0291 __v7_ca9mp_setup:
0292 __v7_cr7mp_setup:
0293 __v7_cr8mp_setup:
0294     do_invalidate_l1
0295     mov r10, #(1 << 0)          @ Cache/TLB ops broadcasting
0296     b   1f
0297 __v7_ca7mp_setup:
0298 __v7_ca12mp_setup:
0299 __v7_ca15mp_setup:
0300 __v7_b15mp_setup:
0301 __v7_ca17mp_setup:
0302     do_invalidate_l1
0303     mov r10, #0
0304 1:
0305 #ifdef CONFIG_SMP
0306     orr r10, r10, #(1 << 6)     @ Enable SMP/nAMP mode
0307     ALT_SMP(mrc p15, 0, r0, c1, c0, 1)
0308     ALT_UP(mov  r0, r10)        @ fake it for UP
0309     orr r10, r10, r0            @ Set required bits
0310     teq r10, r0             @ Were they already set?
0311     mcrne   p15, 0, r10, c1, c0, 1      @ No, update register
0312 #endif
0313     b   __v7_setup_cont
0314 
0315 /*
0316  * Errata:
0317  *  r0, r10 available for use
0318  *  r1, r2, r4, r5, r9, r13: must be preserved
0319  *  r3: contains MIDR rX number in bits 23-20
0320  *  r6: contains MIDR rXpY as 8-bit XY number
0321  *  r9: MIDR
0322  */
0323 __ca8_errata:
0324 #if defined(CONFIG_ARM_ERRATA_430973) && !defined(CONFIG_ARCH_MULTIPLATFORM)
0325     teq r3, #0x00100000         @ only present in r1p*
0326     mrceq   p15, 0, r0, c1, c0, 1       @ read aux control register
0327     orreq   r0, r0, #(1 << 6)       @ set IBE to 1
0328     mcreq   p15, 0, r0, c1, c0, 1       @ write aux control register
0329 #endif
0330 #ifdef CONFIG_ARM_ERRATA_458693
0331     teq r6, #0x20           @ only present in r2p0
0332     mrceq   p15, 0, r0, c1, c0, 1       @ read aux control register
0333     orreq   r0, r0, #(1 << 5)       @ set L1NEON to 1
0334     orreq   r0, r0, #(1 << 9)       @ set PLDNOP to 1
0335     mcreq   p15, 0, r0, c1, c0, 1       @ write aux control register
0336 #endif
0337 #ifdef CONFIG_ARM_ERRATA_460075
0338     teq r6, #0x20           @ only present in r2p0
0339     mrceq   p15, 1, r0, c9, c0, 2       @ read L2 cache aux ctrl register
0340     tsteq   r0, #1 << 22
0341     orreq   r0, r0, #(1 << 22)      @ set the Write Allocate disable bit
0342     mcreq   p15, 1, r0, c9, c0, 2       @ write the L2 cache aux ctrl register
0343 #endif
0344     b   __errata_finish
0345 
0346 __ca9_errata:
0347 #ifdef CONFIG_ARM_ERRATA_742230
0348     cmp r6, #0x22           @ only present up to r2p2
0349     mrcle   p15, 0, r0, c15, c0, 1      @ read diagnostic register
0350     orrle   r0, r0, #1 << 4         @ set bit #4
0351     mcrle   p15, 0, r0, c15, c0, 1      @ write diagnostic register
0352 #endif
0353 #ifdef CONFIG_ARM_ERRATA_742231
0354     teq r6, #0x20           @ present in r2p0
0355     teqne   r6, #0x21           @ present in r2p1
0356     teqne   r6, #0x22           @ present in r2p2
0357     mrceq   p15, 0, r0, c15, c0, 1      @ read diagnostic register
0358     orreq   r0, r0, #1 << 12        @ set bit #12
0359     orreq   r0, r0, #1 << 22        @ set bit #22
0360     mcreq   p15, 0, r0, c15, c0, 1      @ write diagnostic register
0361 #endif
0362 #ifdef CONFIG_ARM_ERRATA_743622
0363     teq r3, #0x00200000         @ only present in r2p*
0364     mrceq   p15, 0, r0, c15, c0, 1      @ read diagnostic register
0365     orreq   r0, r0, #1 << 6         @ set bit #6
0366     mcreq   p15, 0, r0, c15, c0, 1      @ write diagnostic register
0367 #endif
0368 #if defined(CONFIG_ARM_ERRATA_751472) && defined(CONFIG_SMP)
0369     ALT_SMP(cmp r6, #0x30)          @ present prior to r3p0
0370     ALT_UP_B(1f)
0371     mrclt   p15, 0, r0, c15, c0, 1      @ read diagnostic register
0372     orrlt   r0, r0, #1 << 11        @ set bit #11
0373     mcrlt   p15, 0, r0, c15, c0, 1      @ write diagnostic register
0374 1:
0375 #endif
0376     b   __errata_finish
0377 
0378 __ca15_errata:
0379 #ifdef CONFIG_ARM_ERRATA_773022
0380     cmp r6, #0x4            @ only present up to r0p4
0381     mrcle   p15, 0, r0, c1, c0, 1       @ read aux control register
0382     orrle   r0, r0, #1 << 1         @ disable loop buffer
0383     mcrle   p15, 0, r0, c1, c0, 1       @ write aux control register
0384 #endif
0385     b   __errata_finish
0386 
0387 __ca12_errata:
0388 #ifdef CONFIG_ARM_ERRATA_818325_852422
0389     mrc p15, 0, r10, c15, c0, 1     @ read diagnostic register
0390     orr r10, r10, #1 << 12      @ set bit #12
0391     mcr p15, 0, r10, c15, c0, 1     @ write diagnostic register
0392 #endif
0393 #ifdef CONFIG_ARM_ERRATA_821420
0394     mrc p15, 0, r10, c15, c0, 2     @ read internal feature reg
0395     orr r10, r10, #1 << 1       @ set bit #1
0396     mcr p15, 0, r10, c15, c0, 2     @ write internal feature reg
0397 #endif
0398 #ifdef CONFIG_ARM_ERRATA_825619
0399     mrc p15, 0, r10, c15, c0, 1     @ read diagnostic register
0400     orr r10, r10, #1 << 24      @ set bit #24
0401     mcr p15, 0, r10, c15, c0, 1     @ write diagnostic register
0402 #endif
0403 #ifdef CONFIG_ARM_ERRATA_857271
0404     mrc p15, 0, r10, c15, c0, 1     @ read diagnostic register
0405     orr r10, r10, #3 << 10      @ set bits #10 and #11
0406     mcr p15, 0, r10, c15, c0, 1     @ write diagnostic register
0407 #endif
0408     b   __errata_finish
0409 
0410 __ca17_errata:
0411 #ifdef CONFIG_ARM_ERRATA_852421
0412     cmp r6, #0x12           @ only present up to r1p2
0413     mrcle   p15, 0, r10, c15, c0, 1     @ read diagnostic register
0414     orrle   r10, r10, #1 << 24      @ set bit #24
0415     mcrle   p15, 0, r10, c15, c0, 1     @ write diagnostic register
0416 #endif
0417 #ifdef CONFIG_ARM_ERRATA_852423
0418     cmp r6, #0x12           @ only present up to r1p2
0419     mrcle   p15, 0, r10, c15, c0, 1     @ read diagnostic register
0420     orrle   r10, r10, #1 << 12      @ set bit #12
0421     mcrle   p15, 0, r10, c15, c0, 1     @ write diagnostic register
0422 #endif
0423 #ifdef CONFIG_ARM_ERRATA_857272
0424     mrc p15, 0, r10, c15, c0, 1     @ read diagnostic register
0425     orr r10, r10, #3 << 10      @ set bits #10 and #11
0426     mcr p15, 0, r10, c15, c0, 1     @ write diagnostic register
0427 #endif
0428     b   __errata_finish
0429 
0430 __v7_pj4b_setup:
0431 #ifdef CONFIG_CPU_PJ4B
0432 
0433 /* Auxiliary Debug Modes Control 1 Register */
0434 #define PJ4B_STATIC_BP (1 << 2) /* Enable Static BP */
0435 #define PJ4B_INTER_PARITY (1 << 8) /* Disable Internal Parity Handling */
0436 #define PJ4B_CLEAN_LINE (1 << 16) /* Disable data transfer for clean line */
0437 
0438 /* Auxiliary Debug Modes Control 2 Register */
0439 #define PJ4B_FAST_LDR (1 << 23) /* Disable fast LDR */
0440 #define PJ4B_SNOOP_DATA (1 << 25) /* Do not interleave write and snoop data */
0441 #define PJ4B_CWF (1 << 27) /* Disable Critical Word First feature */
0442 #define PJ4B_OUTSDNG_NC (1 << 29) /* Disable outstanding non cacheable rqst */
0443 #define PJ4B_L1_REP_RR (1 << 30) /* L1 replacement - Strict round robin */
0444 #define PJ4B_AUX_DBG_CTRL2 (PJ4B_SNOOP_DATA | PJ4B_CWF |\
0445                 PJ4B_OUTSDNG_NC | PJ4B_L1_REP_RR)
0446 
0447 /* Auxiliary Functional Modes Control Register 0 */
0448 #define PJ4B_SMP_CFB (1 << 1) /* Set SMP mode. Join the coherency fabric */
0449 #define PJ4B_L1_PAR_CHK (1 << 2) /* Support L1 parity checking */
0450 #define PJ4B_BROADCAST_CACHE (1 << 8) /* Broadcast Cache and TLB maintenance */
0451 
0452 /* Auxiliary Debug Modes Control 0 Register */
0453 #define PJ4B_WFI_WFE (1 << 22) /* WFI/WFE - serve the DVM and back to idle */
0454 
0455     /* Auxiliary Debug Modes Control 1 Register */
0456     mrc p15, 1, r0, c15, c1, 1
0457     orr     r0, r0, #PJ4B_CLEAN_LINE
0458     orr     r0, r0, #PJ4B_INTER_PARITY
0459     bic r0, r0, #PJ4B_STATIC_BP
0460     mcr p15, 1, r0, c15, c1, 1
0461 
0462     /* Auxiliary Debug Modes Control 2 Register */
0463     mrc p15, 1, r0, c15, c1, 2
0464     bic r0, r0, #PJ4B_FAST_LDR
0465     orr r0, r0, #PJ4B_AUX_DBG_CTRL2
0466     mcr p15, 1, r0, c15, c1, 2
0467 
0468     /* Auxiliary Functional Modes Control Register 0 */
0469     mrc p15, 1, r0, c15, c2, 0
0470 #ifdef CONFIG_SMP
0471     orr r0, r0, #PJ4B_SMP_CFB
0472 #endif
0473     orr r0, r0, #PJ4B_L1_PAR_CHK
0474     orr r0, r0, #PJ4B_BROADCAST_CACHE
0475     mcr p15, 1, r0, c15, c2, 0
0476 
0477     /* Auxiliary Debug Modes Control 0 Register */
0478     mrc p15, 1, r0, c15, c1, 0
0479     orr r0, r0, #PJ4B_WFI_WFE
0480     mcr p15, 1, r0, c15, c1, 0
0481 
0482 #endif /* CONFIG_CPU_PJ4B */
0483 
0484 __v7_setup:
0485     do_invalidate_l1
0486 
0487 __v7_setup_cont:
0488     and r0, r9, #0xff000000     @ ARM?
0489     teq r0, #0x41000000
0490     bne __errata_finish
0491     and r3, r9, #0x00f00000     @ variant
0492     and r6, r9, #0x0000000f     @ revision
0493     orr r6, r6, r3, lsr #20-4       @ combine variant and revision
0494     ubfx    r0, r9, #4, #12         @ primary part number
0495 
0496     /* Cortex-A8 Errata */
0497     ldr r10, =0x00000c08        @ Cortex-A8 primary part number
0498     teq r0, r10
0499     beq __ca8_errata
0500 
0501     /* Cortex-A9 Errata */
0502     ldr r10, =0x00000c09        @ Cortex-A9 primary part number
0503     teq r0, r10
0504     beq __ca9_errata
0505 
0506     /* Cortex-A12 Errata */
0507     ldr r10, =0x00000c0d        @ Cortex-A12 primary part number
0508     teq r0, r10
0509     beq __ca12_errata
0510 
0511     /* Cortex-A17 Errata */
0512     ldr r10, =0x00000c0e        @ Cortex-A17 primary part number
0513     teq r0, r10
0514     beq __ca17_errata
0515 
0516     /* Cortex-A15 Errata */
0517     ldr r10, =0x00000c0f        @ Cortex-A15 primary part number
0518     teq r0, r10
0519     beq __ca15_errata
0520 
0521 __errata_finish:
0522     mov r10, #0
0523     mcr p15, 0, r10, c7, c5, 0      @ I+BTB cache invalidate
0524 #ifdef CONFIG_MMU
0525     mcr p15, 0, r10, c8, c7, 0      @ invalidate I + D TLBs
0526     v7_ttb_setup r10, r4, r5, r8, r3    @ TTBCR, TTBRx setup
0527     ldr r3, =PRRR           @ PRRR
0528     ldr r6, =NMRR           @ NMRR
0529     mcr p15, 0, r3, c10, c2, 0      @ write PRRR
0530     mcr p15, 0, r6, c10, c2, 1      @ write NMRR
0531 #endif
0532     dsb                 @ Complete invalidations
0533 #ifndef CONFIG_ARM_THUMBEE
0534     mrc p15, 0, r0, c0, c1, 0       @ read ID_PFR0 for ThumbEE
0535     and r0, r0, #(0xf << 12)        @ ThumbEE enabled field
0536     teq r0, #(1 << 12)          @ check if ThumbEE is present
0537     bne 1f
0538     mov r3, #0
0539     mcr p14, 6, r3, c1, c0, 0       @ Initialize TEEHBR to 0
0540     mrc p14, 6, r0, c0, c0, 0       @ load TEECR
0541     orr r0, r0, #1          @ set the 1st bit in order to
0542     mcr p14, 6, r0, c0, c0, 0       @ stop userspace TEEHBR access
0543 1:
0544 #endif
0545     adr r3, v7_crval
0546     ldmia   r3, {r3, r6}
0547  ARM_BE8(orr    r6, r6, #1 << 25)       @ big-endian page tables
0548 #ifdef CONFIG_SWP_EMULATE
0549     orr     r3, r3, #(1 << 10)              @ set SW bit in "clear"
0550     bic     r6, r6, #(1 << 10)              @ clear it in "mmuset"
0551 #endif
0552     mrc p15, 0, r0, c1, c0, 0       @ read control register
0553     bic r0, r0, r3          @ clear bits them
0554     orr r0, r0, r6          @ set them
0555  THUMB( orr r0, r0, #1 << 30    )   @ Thumb exceptions
0556     ret lr              @ return to head.S:__ret
0557 ENDPROC(__v7_setup)
0558 
0559     __INITDATA
0560 
0561     .weak cpu_v7_bugs_init
0562 
0563     @ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
0564     define_processor_functions v7, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
0565 
0566 #ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
0567     @ generic v7 bpiall on context switch
0568     globl_equ   cpu_v7_bpiall_proc_init,    cpu_v7_proc_init
0569     globl_equ   cpu_v7_bpiall_proc_fin,     cpu_v7_proc_fin
0570     globl_equ   cpu_v7_bpiall_reset,        cpu_v7_reset
0571     globl_equ   cpu_v7_bpiall_do_idle,      cpu_v7_do_idle
0572     globl_equ   cpu_v7_bpiall_dcache_clean_area, cpu_v7_dcache_clean_area
0573     globl_equ   cpu_v7_bpiall_set_pte_ext,  cpu_v7_set_pte_ext
0574     globl_equ   cpu_v7_bpiall_suspend_size, cpu_v7_suspend_size
0575 #ifdef CONFIG_ARM_CPU_SUSPEND
0576     globl_equ   cpu_v7_bpiall_do_suspend,   cpu_v7_do_suspend
0577     globl_equ   cpu_v7_bpiall_do_resume,    cpu_v7_do_resume
0578 #endif
0579     define_processor_functions v7_bpiall, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
0580 
0581 #define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_bpiall_processor_functions
0582 #else
0583 #define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_processor_functions
0584 #endif
0585 
0586 #ifndef CONFIG_ARM_LPAE
0587     @ Cortex-A8 - always needs bpiall switch_mm implementation
0588     globl_equ   cpu_ca8_proc_init,  cpu_v7_proc_init
0589     globl_equ   cpu_ca8_proc_fin,   cpu_v7_proc_fin
0590     globl_equ   cpu_ca8_reset,      cpu_v7_reset
0591     globl_equ   cpu_ca8_do_idle,    cpu_v7_do_idle
0592     globl_equ   cpu_ca8_dcache_clean_area, cpu_v7_dcache_clean_area
0593     globl_equ   cpu_ca8_set_pte_ext,    cpu_v7_set_pte_ext
0594     globl_equ   cpu_ca8_switch_mm,  cpu_v7_bpiall_switch_mm
0595     globl_equ   cpu_ca8_suspend_size,   cpu_v7_suspend_size
0596 #ifdef CONFIG_ARM_CPU_SUSPEND
0597     globl_equ   cpu_ca8_do_suspend, cpu_v7_do_suspend
0598     globl_equ   cpu_ca8_do_resume,  cpu_v7_do_resume
0599 #endif
0600     define_processor_functions ca8, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca8_ibe
0601 
0602     @ Cortex-A9 - needs more registers preserved across suspend/resume
0603     @ and bpiall switch_mm for hardening
0604     globl_equ   cpu_ca9mp_proc_init,    cpu_v7_proc_init
0605     globl_equ   cpu_ca9mp_proc_fin, cpu_v7_proc_fin
0606     globl_equ   cpu_ca9mp_reset,    cpu_v7_reset
0607     globl_equ   cpu_ca9mp_do_idle,  cpu_v7_do_idle
0608     globl_equ   cpu_ca9mp_dcache_clean_area, cpu_v7_dcache_clean_area
0609 #ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
0610     globl_equ   cpu_ca9mp_switch_mm,    cpu_v7_bpiall_switch_mm
0611 #else
0612     globl_equ   cpu_ca9mp_switch_mm,    cpu_v7_switch_mm
0613 #endif
0614     globl_equ   cpu_ca9mp_set_pte_ext,  cpu_v7_set_pte_ext
0615     define_processor_functions ca9mp, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
0616 #endif
0617 
0618     @ Cortex-A15 - needs iciallu switch_mm for hardening
0619     globl_equ   cpu_ca15_proc_init, cpu_v7_proc_init
0620     globl_equ   cpu_ca15_proc_fin,  cpu_v7_proc_fin
0621     globl_equ   cpu_ca15_reset,     cpu_v7_reset
0622     globl_equ   cpu_ca15_do_idle,   cpu_v7_do_idle
0623     globl_equ   cpu_ca15_dcache_clean_area, cpu_v7_dcache_clean_area
0624 #ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
0625     globl_equ   cpu_ca15_switch_mm, cpu_v7_iciallu_switch_mm
0626 #else
0627     globl_equ   cpu_ca15_switch_mm, cpu_v7_switch_mm
0628 #endif
0629     globl_equ   cpu_ca15_set_pte_ext,   cpu_v7_set_pte_ext
0630     globl_equ   cpu_ca15_suspend_size,  cpu_v7_suspend_size
0631     globl_equ   cpu_ca15_do_suspend,    cpu_v7_do_suspend
0632     globl_equ   cpu_ca15_do_resume, cpu_v7_do_resume
0633     define_processor_functions ca15, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca15_ibe
0634 #ifdef CONFIG_CPU_PJ4B
0635     define_processor_functions pj4b, dabort=v7_early_abort, pabort=v7_pabort, suspend=1
0636 #endif
0637 
0638     .section ".rodata"
0639 
0640     string  cpu_arch_name, "armv7"
0641     string  cpu_elf_name, "v7"
0642     .align
0643 
0644     .section ".proc.info.init", "a"
0645 
0646     /*
0647      * Standard v7 proc info content
0648      */
0649 .macro __v7_proc name, initfunc, mm_mmuflags = 0, io_mmuflags = 0, hwcaps = 0, proc_fns = v7_processor_functions, cache_fns = v7_cache_fns
0650     ALT_SMP(.long   PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
0651             PMD_SECT_AF | PMD_FLAGS_SMP | \mm_mmuflags)
0652     ALT_UP(.long    PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
0653             PMD_SECT_AF | PMD_FLAGS_UP | \mm_mmuflags)
0654     .long   PMD_TYPE_SECT | PMD_SECT_AP_WRITE | \
0655         PMD_SECT_AP_READ | PMD_SECT_AF | \io_mmuflags
0656     initfn  \initfunc, \name
0657     .long   cpu_arch_name
0658     .long   cpu_elf_name
0659     .long   HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | HWCAP_FAST_MULT | \
0660         HWCAP_EDSP | HWCAP_TLS | \hwcaps
0661     .long   cpu_v7_name
0662     .long   \proc_fns
0663     .long   v7wbi_tlb_fns
0664     .long   v6_user_fns
0665     .long   \cache_fns
0666 .endm
0667 
0668 #ifndef CONFIG_ARM_LPAE
0669     /*
0670      * ARM Ltd. Cortex A5 processor.
0671      */
0672     .type   __v7_ca5mp_proc_info, #object
0673 __v7_ca5mp_proc_info:
0674     .long   0x410fc050
0675     .long   0xff0ffff0
0676     __v7_proc __v7_ca5mp_proc_info, __v7_ca5mp_setup
0677     .size   __v7_ca5mp_proc_info, . - __v7_ca5mp_proc_info
0678 
0679     /*
0680      * ARM Ltd. Cortex A9 processor.
0681      */
0682     .type   __v7_ca9mp_proc_info, #object
0683 __v7_ca9mp_proc_info:
0684     .long   0x410fc090
0685     .long   0xff0ffff0
0686     __v7_proc __v7_ca9mp_proc_info, __v7_ca9mp_setup, proc_fns = ca9mp_processor_functions
0687     .size   __v7_ca9mp_proc_info, . - __v7_ca9mp_proc_info
0688 
0689     /*
0690      * ARM Ltd. Cortex A8 processor.
0691      */
0692     .type   __v7_ca8_proc_info, #object
0693 __v7_ca8_proc_info:
0694     .long   0x410fc080
0695     .long   0xff0ffff0
0696     __v7_proc __v7_ca8_proc_info, __v7_setup, proc_fns = ca8_processor_functions
0697     .size   __v7_ca8_proc_info, . - __v7_ca8_proc_info
0698 
0699 #endif  /* CONFIG_ARM_LPAE */
0700 
0701     /*
0702      * Marvell PJ4B processor.
0703      */
0704 #ifdef CONFIG_CPU_PJ4B
0705     .type   __v7_pj4b_proc_info, #object
0706 __v7_pj4b_proc_info:
0707     .long   0x560f5800
0708     .long   0xff0fff00
0709     __v7_proc __v7_pj4b_proc_info, __v7_pj4b_setup, proc_fns = pj4b_processor_functions
0710     .size   __v7_pj4b_proc_info, . - __v7_pj4b_proc_info
0711 #endif
0712 
0713     /*
0714      * ARM Ltd. Cortex R7 processor.
0715      */
0716     .type   __v7_cr7mp_proc_info, #object
0717 __v7_cr7mp_proc_info:
0718     .long   0x410fc170
0719     .long   0xff0ffff0
0720     __v7_proc __v7_cr7mp_proc_info, __v7_cr7mp_setup
0721     .size   __v7_cr7mp_proc_info, . - __v7_cr7mp_proc_info
0722 
0723     /*
0724      * ARM Ltd. Cortex R8 processor.
0725      */
0726     .type   __v7_cr8mp_proc_info, #object
0727 __v7_cr8mp_proc_info:
0728     .long   0x410fc180
0729     .long   0xff0ffff0
0730     __v7_proc __v7_cr8mp_proc_info, __v7_cr8mp_setup
0731     .size   __v7_cr8mp_proc_info, . - __v7_cr8mp_proc_info
0732 
0733     /*
0734      * ARM Ltd. Cortex A7 processor.
0735      */
0736     .type   __v7_ca7mp_proc_info, #object
0737 __v7_ca7mp_proc_info:
0738     .long   0x410fc070
0739     .long   0xff0ffff0
0740     __v7_proc __v7_ca7mp_proc_info, __v7_ca7mp_setup
0741     .size   __v7_ca7mp_proc_info, . - __v7_ca7mp_proc_info
0742 
0743     /*
0744      * ARM Ltd. Cortex A12 processor.
0745      */
0746     .type   __v7_ca12mp_proc_info, #object
0747 __v7_ca12mp_proc_info:
0748     .long   0x410fc0d0
0749     .long   0xff0ffff0
0750     __v7_proc __v7_ca12mp_proc_info, __v7_ca12mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
0751     .size   __v7_ca12mp_proc_info, . - __v7_ca12mp_proc_info
0752 
0753     /*
0754      * ARM Ltd. Cortex A15 processor.
0755      */
0756     .type   __v7_ca15mp_proc_info, #object
0757 __v7_ca15mp_proc_info:
0758     .long   0x410fc0f0
0759     .long   0xff0ffff0
0760     __v7_proc __v7_ca15mp_proc_info, __v7_ca15mp_setup, proc_fns = ca15_processor_functions
0761     .size   __v7_ca15mp_proc_info, . - __v7_ca15mp_proc_info
0762 
0763     /*
0764      * Broadcom Corporation Brahma-B15 processor.
0765      */
0766     .type   __v7_b15mp_proc_info, #object
0767 __v7_b15mp_proc_info:
0768     .long   0x420f00f0
0769     .long   0xff0ffff0
0770     __v7_proc __v7_b15mp_proc_info, __v7_b15mp_setup, proc_fns = ca15_processor_functions, cache_fns = b15_cache_fns
0771     .size   __v7_b15mp_proc_info, . - __v7_b15mp_proc_info
0772 
0773     /*
0774      * ARM Ltd. Cortex A17 processor.
0775      */
0776     .type   __v7_ca17mp_proc_info, #object
0777 __v7_ca17mp_proc_info:
0778     .long   0x410fc0e0
0779     .long   0xff0ffff0
0780     __v7_proc __v7_ca17mp_proc_info, __v7_ca17mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
0781     .size   __v7_ca17mp_proc_info, . - __v7_ca17mp_proc_info
0782 
0783     /* ARM Ltd. Cortex A73 processor */
0784     .type   __v7_ca73_proc_info, #object
0785 __v7_ca73_proc_info:
0786     .long   0x410fd090
0787     .long   0xff0ffff0
0788     __v7_proc __v7_ca73_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
0789     .size   __v7_ca73_proc_info, . - __v7_ca73_proc_info
0790 
0791     /* ARM Ltd. Cortex A75 processor */
0792     .type   __v7_ca75_proc_info, #object
0793 __v7_ca75_proc_info:
0794     .long   0x410fd0a0
0795     .long   0xff0ffff0
0796     __v7_proc __v7_ca75_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
0797     .size   __v7_ca75_proc_info, . - __v7_ca75_proc_info
0798 
0799     /*
0800      * Qualcomm Inc. Krait processors.
0801      */
0802     .type   __krait_proc_info, #object
0803 __krait_proc_info:
0804     .long   0x510f0400      @ Required ID value
0805     .long   0xff0ffc00      @ Mask for ID
0806     /*
0807      * Some Krait processors don't indicate support for SDIV and UDIV
0808      * instructions in the ARM instruction set, even though they actually
0809      * do support them. They also don't indicate support for fused multiply
0810      * instructions even though they actually do support them.
0811      */
0812     __v7_proc __krait_proc_info, __v7_setup, hwcaps = HWCAP_IDIV | HWCAP_VFPv4
0813     .size   __krait_proc_info, . - __krait_proc_info
0814 
0815     /*
0816      * Match any ARMv7 processor core.
0817      */
0818     .type   __v7_proc_info, #object
0819 __v7_proc_info:
0820     .long   0x000f0000      @ Required ID value
0821     .long   0x000f0000      @ Mask for ID
0822     __v7_proc __v7_proc_info, __v7_setup
0823     .size   __v7_proc_info, . - __v7_proc_info