0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <linux/linkage.h>
0011 #include <linux/init.h>
0012 #include <asm/assembler.h>
0013 #include <asm/errno.h>
0014 #include <asm/unwind.h>
0015 #include <asm/hardware/cache-b15-rac.h>
0016
0017 #include "proc-macros.S"
0018
0019 #ifdef CONFIG_CPU_ICACHE_MISMATCH_WORKAROUND
0020 .globl icache_size
0021 .data
0022 .align 2
0023 icache_size:
0024 .long 64
0025 .text
0026 #endif
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039 ENTRY(v7_invalidate_l1)
0040 mov r0, #0
0041 mcr p15, 2, r0, c0, c0, 0 @ select L1 data cache in CSSELR
0042 isb
0043 mrc p15, 1, r0, c0, c0, 0 @ read cache geometry from CCSIDR
0044
0045 movw r3, #0x3ff
0046 and r3, r3, r0, lsr #3 @ 'Associativity' in CCSIDR[12:3]
0047 clz r1, r3 @ WayShift
0048 mov r2, #1
0049 mov r3, r3, lsl r1 @ NumWays-1 shifted into bits [31:...]
0050 movs r1, r2, lsl r1 @ #1 shifted left by same amount
0051 moveq r1, #1 @ r1 needs value > 0 even if only 1 way
0052
0053 and r2, r0, #0x7
0054 add r2, r2, #4 @ SetShift
0055
0056 1: movw ip, #0x7fff
0057 and r0, ip, r0, lsr #13 @ 'NumSets' in CCSIDR[27:13]
0058
0059 2: mov ip, r0, lsl r2 @ NumSet << SetShift
0060 orr ip, ip, r3 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
0061 mcr p15, 0, ip, c7, c6, 2
0062 subs r0, r0, #1 @ Set--
0063 bpl 2b
0064 subs r3, r3, r1 @ Way--
0065 bcc 3f
0066 mrc p15, 1, r0, c0, c0, 0 @ re-read cache geometry from CCSIDR
0067 b 1b
0068 3: dsb st
0069 isb
0070 ret lr
0071 ENDPROC(v7_invalidate_l1)
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081 ENTRY(v7_flush_icache_all)
0082 mov r0, #0
0083 ALT_SMP(mcr p15, 0, r0, c7, c1, 0) @ invalidate I-cache inner shareable
0084 ALT_UP(mcr p15, 0, r0, c7, c5, 0) @ I+BTB cache invalidate
0085 ret lr
0086 ENDPROC(v7_flush_icache_all)
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096 ENTRY(v7_flush_dcache_louis)
0097 dmb @ ensure ordering with previous memory accesses
0098 mrc p15, 1, r0, c0, c0, 1 @ read clidr, r0 = clidr
0099 ALT_SMP(mov r3, r0, lsr #20) @ move LoUIS into position
0100 ALT_UP( mov r3, r0, lsr #26) @ move LoUU into position
0101 ands r3, r3, #7 << 1 @ extract LoU*2 field from clidr
0102 bne start_flush_levels @ LoU != 0, start flushing
0103 #ifdef CONFIG_ARM_ERRATA_643719
0104 ALT_SMP(mrc p15, 0, r2, c0, c0, 0) @ read main ID register
0105 ALT_UP( ret lr) @ LoUU is zero, so nothing to do
0106 movw r1, #:lower16:(0x410fc090 >> 4) @ ID of ARM Cortex A9 r0p?
0107 movt r1, #:upper16:(0x410fc090 >> 4)
0108 teq r1, r2, lsr #4 @ test for errata affected core and if so...
0109 moveq r3, #1 << 1 @ fix LoUIS value
0110 beq start_flush_levels @ start flushing cache levels
0111 #endif
0112 ret lr
0113 ENDPROC(v7_flush_dcache_louis)
0114
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124 ENTRY(v7_flush_dcache_all)
0125 dmb @ ensure ordering with previous memory accesses
0126 mrc p15, 1, r0, c0, c0, 1 @ read clidr
0127 mov r3, r0, lsr #23 @ move LoC into position
0128 ands r3, r3, #7 << 1 @ extract LoC*2 from clidr
0129 beq finished @ if loc is 0, then no need to clean
0130 start_flush_levels:
0131 mov r10, #0 @ start clean at cache level 0
0132 flush_levels:
0133 add r2, r10, r10, lsr #1 @ work out 3x current cache level
0134 mov r1, r0, lsr r2 @ extract cache type bits from clidr
0135 and r1, r1, #7 @ mask of the bits for current cache only
0136 cmp r1, #2 @ see what cache we have at this level
0137 blt skip @ skip if no cache, or just i-cache
0138 #ifdef CONFIG_PREEMPTION
0139 save_and_disable_irqs_notrace r9 @ make cssr&csidr read atomic
0140 #endif
0141 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
0142 isb @ isb to sych the new cssr&csidr
0143 mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
0144 #ifdef CONFIG_PREEMPTION
0145 restore_irqs_notrace r9
0146 #endif
0147 and r2, r1, #7 @ extract the length of the cache lines
0148 add r2, r2, #4 @ add 4 (line length offset)
0149 movw r4, #0x3ff
0150 ands r4, r4, r1, lsr #3 @ find maximum number on the way size
0151 clz r5, r4 @ find bit position of way size increment
0152 movw r6, #0x7fff
0153 and r1, r6, r1, lsr #13 @ extract max number of the index size
0154 mov r6, #1
0155 movne r4, r4, lsl r5 @ # of ways shifted into bits [31:...]
0156 movne r6, r6, lsl r5 @ 1 shifted left by same amount
0157 loop1:
0158 mov r9, r1 @ create working copy of max index
0159 loop2:
0160 mov r5, r9, lsl r2 @ factor set number into r5
0161 orr r5, r5, r4 @ factor way number into r5
0162 orr r5, r5, r10 @ factor cache level into r5
0163 mcr p15, 0, r5, c7, c14, 2 @ clean & invalidate by set/way
0164 subs r9, r9, #1 @ decrement the index
0165 bge loop2
0166 subs r4, r4, r6 @ decrement the way
0167 bcs loop1
0168 skip:
0169 add r10, r10, #2 @ increment cache number
0170 cmp r3, r10
0171 #ifdef CONFIG_ARM_ERRATA_814220
0172 dsb
0173 #endif
0174 bgt flush_levels
0175 finished:
0176 mov r10, #0 @ switch back to cache level 0
0177 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
0178 dsb st
0179 isb
0180 ret lr
0181 ENDPROC(v7_flush_dcache_all)
0182
0183
0184
0185
0186
0187
0188
0189
0190
0191
0192
0193
0194 ENTRY(v7_flush_kern_cache_all)
0195 stmfd sp!, {r4-r6, r9-r10, lr}
0196 bl v7_flush_dcache_all
0197 mov r0, #0
0198 ALT_SMP(mcr p15, 0, r0, c7, c1, 0) @ invalidate I-cache inner shareable
0199 ALT_UP(mcr p15, 0, r0, c7, c5, 0) @ I+BTB cache invalidate
0200 ldmfd sp!, {r4-r6, r9-r10, lr}
0201 ret lr
0202 ENDPROC(v7_flush_kern_cache_all)
0203
0204
0205
0206
0207
0208
0209
0210 ENTRY(v7_flush_kern_cache_louis)
0211 stmfd sp!, {r4-r6, r9-r10, lr}
0212 bl v7_flush_dcache_louis
0213 mov r0, #0
0214 ALT_SMP(mcr p15, 0, r0, c7, c1, 0) @ invalidate I-cache inner shareable
0215 ALT_UP(mcr p15, 0, r0, c7, c5, 0) @ I+BTB cache invalidate
0216 ldmfd sp!, {r4-r6, r9-r10, lr}
0217 ret lr
0218 ENDPROC(v7_flush_kern_cache_louis)
0219
0220
0221
0222
0223
0224
0225
0226
0227 ENTRY(v7_flush_user_cache_all)
0228
0229
0230
0231
0232
0233
0234
0235
0236
0237
0238
0239
0240
0241
0242 ENTRY(v7_flush_user_cache_range)
0243 ret lr
0244 ENDPROC(v7_flush_user_cache_all)
0245 ENDPROC(v7_flush_user_cache_range)
0246
0247
0248
0249
0250
0251
0252
0253
0254
0255
0256
0257
0258
0259
0260 ENTRY(v7_coherent_kern_range)
0261
0262
0263
0264
0265
0266
0267
0268
0269
0270
0271
0272
0273
0274
0275
0276 ENTRY(v7_coherent_user_range)
0277 UNWIND(.fnstart )
0278 dcache_line_size r2, r3
0279 sub r3, r2, #1
0280 bic r12, r0, r3
0281 #ifdef CONFIG_ARM_ERRATA_764369
0282 ALT_SMP(W(dsb))
0283 ALT_UP(W(nop))
0284 #endif
0285 1:
0286 USER( mcr p15, 0, r12, c7, c11, 1 ) @ clean D line to the point of unification
0287 add r12, r12, r2
0288 cmp r12, r1
0289 blo 1b
0290 dsb ishst
0291 #ifdef CONFIG_CPU_ICACHE_MISMATCH_WORKAROUND
0292 ldr r3, =icache_size
0293 ldr r2, [r3, #0]
0294 #else
0295 icache_line_size r2, r3
0296 #endif
0297 sub r3, r2, #1
0298 bic r12, r0, r3
0299 2:
0300 USER( mcr p15, 0, r12, c7, c5, 1 ) @ invalidate I line
0301 add r12, r12, r2
0302 cmp r12, r1
0303 blo 2b
0304 mov r0, #0
0305 ALT_SMP(mcr p15, 0, r0, c7, c1, 6) @ invalidate BTB Inner Shareable
0306 ALT_UP(mcr p15, 0, r0, c7, c5, 6) @ invalidate BTB
0307 dsb ishst
0308 isb
0309 ret lr
0310
0311
0312
0313
0314
0315 9001:
0316 #ifdef CONFIG_ARM_ERRATA_775420
0317 dsb
0318 #endif
0319 mov r0, #-EFAULT
0320 ret lr
0321 UNWIND(.fnend )
0322 ENDPROC(v7_coherent_kern_range)
0323 ENDPROC(v7_coherent_user_range)
0324
0325
0326
0327
0328
0329
0330
0331
0332
0333
0334 ENTRY(v7_flush_kern_dcache_area)
0335 dcache_line_size r2, r3
0336 add r1, r0, r1
0337 sub r3, r2, #1
0338 bic r0, r0, r3
0339 #ifdef CONFIG_ARM_ERRATA_764369
0340 ALT_SMP(W(dsb))
0341 ALT_UP(W(nop))
0342 #endif
0343 1:
0344 mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D line / unified line
0345 add r0, r0, r2
0346 cmp r0, r1
0347 blo 1b
0348 dsb st
0349 ret lr
0350 ENDPROC(v7_flush_kern_dcache_area)
0351
0352
0353
0354
0355
0356
0357
0358
0359
0360
0361
0362 v7_dma_inv_range:
0363 dcache_line_size r2, r3
0364 sub r3, r2, #1
0365 tst r0, r3
0366 bic r0, r0, r3
0367 #ifdef CONFIG_ARM_ERRATA_764369
0368 ALT_SMP(W(dsb))
0369 ALT_UP(W(nop))
0370 #endif
0371 mcrne p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line
0372 addne r0, r0, r2
0373
0374 tst r1, r3
0375 bic r1, r1, r3
0376 mcrne p15, 0, r1, c7, c14, 1 @ clean & invalidate D / U line
0377 cmp r0, r1
0378 1:
0379 mcrlo p15, 0, r0, c7, c6, 1 @ invalidate D / U line
0380 addlo r0, r0, r2
0381 cmplo r0, r1
0382 blo 1b
0383 dsb st
0384 ret lr
0385 ENDPROC(v7_dma_inv_range)
0386
0387
0388
0389
0390
0391
0392 v7_dma_clean_range:
0393 dcache_line_size r2, r3
0394 sub r3, r2, #1
0395 bic r0, r0, r3
0396 #ifdef CONFIG_ARM_ERRATA_764369
0397 ALT_SMP(W(dsb))
0398 ALT_UP(W(nop))
0399 #endif
0400 1:
0401 mcr p15, 0, r0, c7, c10, 1 @ clean D / U line
0402 add r0, r0, r2
0403 cmp r0, r1
0404 blo 1b
0405 dsb st
0406 ret lr
0407 ENDPROC(v7_dma_clean_range)
0408
0409
0410
0411
0412
0413
0414 ENTRY(v7_dma_flush_range)
0415 dcache_line_size r2, r3
0416 sub r3, r2, #1
0417 bic r0, r0, r3
0418 #ifdef CONFIG_ARM_ERRATA_764369
0419 ALT_SMP(W(dsb))
0420 ALT_UP(W(nop))
0421 #endif
0422 1:
0423 mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line
0424 add r0, r0, r2
0425 cmp r0, r1
0426 blo 1b
0427 dsb st
0428 ret lr
0429 ENDPROC(v7_dma_flush_range)
0430
0431
0432
0433
0434
0435
0436
0437 ENTRY(v7_dma_map_area)
0438 add r1, r1, r0
0439 teq r2, #DMA_FROM_DEVICE
0440 beq v7_dma_inv_range
0441 b v7_dma_clean_range
0442 ENDPROC(v7_dma_map_area)
0443
0444
0445
0446
0447
0448
0449
0450 ENTRY(v7_dma_unmap_area)
0451 add r1, r1, r0
0452 teq r2, #DMA_TO_DEVICE
0453 bne v7_dma_inv_range
0454 ret lr
0455 ENDPROC(v7_dma_unmap_area)
0456
0457 __INITDATA
0458
0459 @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
0460 define_cache_functions v7
0461
0462
0463
0464
0465 #ifndef CONFIG_CACHE_B15_RAC
0466 globl_equ b15_flush_kern_cache_all, v7_flush_kern_cache_all
0467 #endif
0468 globl_equ b15_flush_icache_all, v7_flush_icache_all
0469 globl_equ b15_flush_kern_cache_louis, v7_flush_kern_cache_louis
0470 globl_equ b15_flush_user_cache_all, v7_flush_user_cache_all
0471 globl_equ b15_flush_user_cache_range, v7_flush_user_cache_range
0472 globl_equ b15_coherent_kern_range, v7_coherent_kern_range
0473 globl_equ b15_coherent_user_range, v7_coherent_user_range
0474 globl_equ b15_flush_kern_dcache_area, v7_flush_kern_dcache_area
0475
0476 globl_equ b15_dma_map_area, v7_dma_map_area
0477 globl_equ b15_dma_unmap_area, v7_dma_unmap_area
0478 globl_equ b15_dma_flush_range, v7_dma_flush_range
0479
0480 define_cache_functions b15