0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #include <linux/linkage.h>
0013 #include <linux/init.h>
0014 #include <asm/assembler.h>
0015 #include <asm/errno.h>
0016 #include <asm/unwind.h>
0017 #include <asm/v7m.h>
0018
0019 #include "proc-macros.S"
0020
0021
0022 .macro v7m_cache_read, rt, reg
0023 movw \rt, #:lower16:BASEADDR_V7M_SCB + \reg
0024 movt \rt, #:upper16:BASEADDR_V7M_SCB + \reg
0025 ldr \rt, [\rt]
0026 .endm
0027
0028 .macro v7m_cacheop, rt, tmp, op, c = al
0029 movw\c \tmp, #:lower16:BASEADDR_V7M_SCB + \op
0030 movt\c \tmp, #:upper16:BASEADDR_V7M_SCB + \op
0031 str\c \rt, [\tmp]
0032 .endm
0033
0034
0035 .macro read_ccsidr, rt
0036 v7m_cache_read \rt, V7M_SCB_CCSIDR
0037 .endm
0038
0039 .macro read_clidr, rt
0040 v7m_cache_read \rt, V7M_SCB_CLIDR
0041 .endm
0042
0043 .macro write_csselr, rt, tmp
0044 v7m_cacheop \rt, \tmp, V7M_SCB_CSSELR
0045 .endm
0046
0047
0048
0049
0050 .macro dcisw, rt, tmp
0051 v7m_cacheop \rt, \tmp, V7M_SCB_DCISW
0052 .endm
0053
0054
0055
0056
0057 .macro dccisw, rt, tmp
0058 v7m_cacheop \rt, \tmp, V7M_SCB_DCCISW
0059 .endm
0060
0061
0062
0063
0064 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
0065 .macro dccimvac\c, rt, tmp
0066 v7m_cacheop \rt, \tmp, V7M_SCB_DCCIMVAC, \c
0067 .endm
0068 .endr
0069
0070
0071
0072
0073 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
0074 .macro dcimvac\c, rt, tmp
0075 v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC, \c
0076 .endm
0077 .endr
0078
0079
0080
0081
0082 .macro dccmvau, rt, tmp
0083 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAU
0084 .endm
0085
0086
0087
0088
0089 .macro dccmvac, rt, tmp
0090 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAC
0091 .endm
0092
0093
0094
0095
0096 .macro icimvau, rt, tmp
0097 v7m_cacheop \rt, \tmp, V7M_SCB_ICIMVAU
0098 .endm
0099
0100
0101
0102
0103
0104 .macro invalidate_icache, rt
0105 v7m_cacheop \rt, \rt, V7M_SCB_ICIALLU
0106 mov \rt, #0
0107 .endm
0108
0109
0110
0111
0112
0113 .macro invalidate_bp, rt
0114 v7m_cacheop \rt, \rt, V7M_SCB_BPIALL
0115 mov \rt, #0
0116 .endm
0117
0118 ENTRY(v7m_invalidate_l1)
0119 mov r0, #0
0120
0121 write_csselr r0, r1
0122 read_ccsidr r0
0123
0124 movw r1, #0x7fff
0125 and r2, r1, r0, lsr #13
0126
0127 movw r1, #0x3ff
0128
0129 and r3, r1, r0, lsr #3 @ NumWays - 1
0130 add r2, r2, #1 @ NumSets
0131
0132 and r0, r0, #0x7
0133 add r0, r0, #4 @ SetShift
0134
0135 clz r1, r3 @ WayShift
0136 add r4, r3, #1 @ NumWays
0137 1: sub r2, r2, #1 @ NumSets--
0138 mov r3, r4 @ Temp = NumWays
0139 2: subs r3, r3, #1 @ Temp--
0140 mov r5, r3, lsl r1
0141 mov r6, r2, lsl r0
0142 orr r5, r5, r6 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
0143 dcisw r5, r6
0144 bgt 2b
0145 cmp r2, #0
0146 bgt 1b
0147 dsb st
0148 isb
0149 ret lr
0150 ENDPROC(v7m_invalidate_l1)
0151
0152
0153
0154
0155
0156
0157
0158
0159
0160 ENTRY(v7m_flush_icache_all)
0161 invalidate_icache r0
0162 ret lr
0163 ENDPROC(v7m_flush_icache_all)
0164
0165
0166
0167
0168
0169
0170
0171
0172 ENTRY(v7m_flush_dcache_all)
0173 dmb @ ensure ordering with previous memory accesses
0174 read_clidr r0
0175 mov r3, r0, lsr #23 @ move LoC into position
0176 ands r3, r3, #7 << 1 @ extract LoC*2 from clidr
0177 beq finished @ if loc is 0, then no need to clean
0178 start_flush_levels:
0179 mov r10, #0 @ start clean at cache level 0
0180 flush_levels:
0181 add r2, r10, r10, lsr #1 @ work out 3x current cache level
0182 mov r1, r0, lsr r2 @ extract cache type bits from clidr
0183 and r1, r1, #7 @ mask of the bits for current cache only
0184 cmp r1, #2 @ see what cache we have at this level
0185 blt skip @ skip if no cache, or just i-cache
0186 #ifdef CONFIG_PREEMPTION
0187 save_and_disable_irqs_notrace r9 @ make cssr&csidr read atomic
0188 #endif
0189 write_csselr r10, r1 @ set current cache level
0190 isb @ isb to sych the new cssr&csidr
0191 read_ccsidr r1 @ read the new csidr
0192 #ifdef CONFIG_PREEMPTION
0193 restore_irqs_notrace r9
0194 #endif
0195 and r2, r1, #7 @ extract the length of the cache lines
0196 add r2, r2, #4 @ add 4 (line length offset)
0197 movw r4, #0x3ff
0198 ands r4, r4, r1, lsr #3 @ find maximum number on the way size
0199 clz r5, r4 @ find bit position of way size increment
0200 movw r7, #0x7fff
0201 ands r7, r7, r1, lsr #13 @ extract max number of the index size
0202 loop1:
0203 mov r9, r7 @ create working copy of max index
0204 loop2:
0205 lsl r6, r4, r5
0206 orr r11, r10, r6 @ factor way and cache number into r11
0207 lsl r6, r9, r2
0208 orr r11, r11, r6 @ factor index number into r11
0209 dccisw r11, r6 @ clean/invalidate by set/way
0210 subs r9, r9, #1 @ decrement the index
0211 bge loop2
0212 subs r4, r4, #1 @ decrement the way
0213 bge loop1
0214 skip:
0215 add r10, r10, #2 @ increment cache number
0216 cmp r3, r10
0217 bgt flush_levels
0218 finished:
0219 mov r10, #0 @ switch back to cache level 0
0220 write_csselr r10, r3 @ select current cache level in cssr
0221 dsb st
0222 isb
0223 ret lr
0224 ENDPROC(v7m_flush_dcache_all)
0225
0226
0227
0228
0229
0230
0231
0232
0233
0234
0235
0236
0237 ENTRY(v7m_flush_kern_cache_all)
0238 stmfd sp!, {r4-r7, r9-r11, lr}
0239 bl v7m_flush_dcache_all
0240 invalidate_icache r0
0241 ldmfd sp!, {r4-r7, r9-r11, lr}
0242 ret lr
0243 ENDPROC(v7m_flush_kern_cache_all)
0244
0245
0246
0247
0248
0249
0250
0251
0252 ENTRY(v7m_flush_user_cache_all)
0253
0254
0255
0256
0257
0258
0259
0260
0261
0262
0263
0264
0265
0266
0267 ENTRY(v7m_flush_user_cache_range)
0268 ret lr
0269 ENDPROC(v7m_flush_user_cache_all)
0270 ENDPROC(v7m_flush_user_cache_range)
0271
0272
0273
0274
0275
0276
0277
0278
0279
0280
0281
0282
0283
0284
0285 ENTRY(v7m_coherent_kern_range)
0286
0287
0288
0289
0290
0291
0292
0293
0294
0295
0296
0297
0298
0299
0300
0301 ENTRY(v7m_coherent_user_range)
0302 UNWIND(.fnstart )
0303 dcache_line_size r2, r3
0304 sub r3, r2, #1
0305 bic r12, r0, r3
0306 1:
0307
0308
0309
0310
0311 dccmvau r12, r3
0312 add r12, r12, r2
0313 cmp r12, r1
0314 blo 1b
0315 dsb ishst
0316 icache_line_size r2, r3
0317 sub r3, r2, #1
0318 bic r12, r0, r3
0319 2:
0320 icimvau r12, r3
0321 add r12, r12, r2
0322 cmp r12, r1
0323 blo 2b
0324 invalidate_bp r0
0325 dsb ishst
0326 isb
0327 ret lr
0328 UNWIND(.fnend )
0329 ENDPROC(v7m_coherent_kern_range)
0330 ENDPROC(v7m_coherent_user_range)
0331
0332
0333
0334
0335
0336
0337
0338
0339
0340
0341 ENTRY(v7m_flush_kern_dcache_area)
0342 dcache_line_size r2, r3
0343 add r1, r0, r1
0344 sub r3, r2, #1
0345 bic r0, r0, r3
0346 1:
0347 dccimvac r0, r3 @ clean & invalidate D line / unified line
0348 add r0, r0, r2
0349 cmp r0, r1
0350 blo 1b
0351 dsb st
0352 ret lr
0353 ENDPROC(v7m_flush_kern_dcache_area)
0354
0355
0356
0357
0358
0359
0360
0361
0362
0363
0364
0365 v7m_dma_inv_range:
0366 dcache_line_size r2, r3
0367 sub r3, r2, #1
0368 tst r0, r3
0369 bic r0, r0, r3
0370 dccimvacne r0, r3
0371 addne r0, r0, r2
0372 subne r3, r2, #1 @ restore r3, corrupted by v7m's dccimvac
0373 tst r1, r3
0374 bic r1, r1, r3
0375 dccimvacne r1, r3
0376 cmp r0, r1
0377 1:
0378 dcimvaclo r0, r3
0379 addlo r0, r0, r2
0380 cmplo r0, r1
0381 blo 1b
0382 dsb st
0383 ret lr
0384 ENDPROC(v7m_dma_inv_range)
0385
0386
0387
0388
0389
0390
0391 v7m_dma_clean_range:
0392 dcache_line_size r2, r3
0393 sub r3, r2, #1
0394 bic r0, r0, r3
0395 1:
0396 dccmvac r0, r3 @ clean D / U line
0397 add r0, r0, r2
0398 cmp r0, r1
0399 blo 1b
0400 dsb st
0401 ret lr
0402 ENDPROC(v7m_dma_clean_range)
0403
0404
0405
0406
0407
0408
0409 ENTRY(v7m_dma_flush_range)
0410 dcache_line_size r2, r3
0411 sub r3, r2, #1
0412 bic r0, r0, r3
0413 1:
0414 dccimvac r0, r3 @ clean & invalidate D / U line
0415 add r0, r0, r2
0416 cmp r0, r1
0417 blo 1b
0418 dsb st
0419 ret lr
0420 ENDPROC(v7m_dma_flush_range)
0421
0422
0423
0424
0425
0426
0427
0428 ENTRY(v7m_dma_map_area)
0429 add r1, r1, r0
0430 teq r2, #DMA_FROM_DEVICE
0431 beq v7m_dma_inv_range
0432 b v7m_dma_clean_range
0433 ENDPROC(v7m_dma_map_area)
0434
0435
0436
0437
0438
0439
0440
0441 ENTRY(v7m_dma_unmap_area)
0442 add r1, r1, r0
0443 teq r2, #DMA_TO_DEVICE
0444 bne v7m_dma_inv_range
0445 ret lr
0446 ENDPROC(v7m_dma_unmap_area)
0447
0448 .globl v7m_flush_kern_cache_louis
0449 .equ v7m_flush_kern_cache_louis, v7m_flush_kern_cache_all
0450
0451 __INITDATA
0452
0453 @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
0454 define_cache_functions v7m