Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 /*
0003  * We need constants.h for:
0004  *  VMA_VM_MM
0005  *  VMA_VM_FLAGS
0006  *  VM_EXEC
0007  */
0008 #include <asm/asm-offsets.h>
0009 #include <asm/thread_info.h>
0010 
0011 #ifdef CONFIG_CPU_V7M
0012 #include <asm/v7m.h>
0013 #endif
0014 
0015 /*
0016  * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm)
0017  */
0018     .macro  vma_vm_mm, rd, rn
0019     ldr \rd, [\rn, #VMA_VM_MM]
0020     .endm
0021 
0022 /*
0023  * vma_vm_flags - get vma->vm_flags
0024  */
0025     .macro  vma_vm_flags, rd, rn
0026     ldr \rd, [\rn, #VMA_VM_FLAGS]
0027     .endm
0028 
0029 /*
0030  * act_mm - get current->active_mm
0031  */
0032     .macro  act_mm, rd
0033     get_current \rd
0034     .if (TSK_ACTIVE_MM > IMM12_MASK)
0035     add \rd, \rd, #TSK_ACTIVE_MM & ~IMM12_MASK
0036     .endif
0037     ldr \rd, [\rd, #TSK_ACTIVE_MM & IMM12_MASK]
0038     .endm
0039 
0040 /*
0041  * mmid - get context id from mm pointer (mm->context.id)
0042  * note, this field is 64bit, so in big-endian the two words are swapped too.
0043  */
0044     .macro  mmid, rd, rn
0045 #ifdef __ARMEB__
0046     ldr \rd, [\rn, #MM_CONTEXT_ID + 4 ]
0047 #else
0048     ldr \rd, [\rn, #MM_CONTEXT_ID]
0049 #endif
0050     .endm
0051 
0052 /*
0053  * mask_asid - mask the ASID from the context ID
0054  */
0055     .macro  asid, rd, rn
0056     and \rd, \rn, #255
0057     .endm
0058 
0059     .macro  crval, clear, mmuset, ucset
0060 #ifdef CONFIG_MMU
0061     .word   \clear
0062     .word   \mmuset
0063 #else
0064     .word   \clear
0065     .word   \ucset
0066 #endif
0067     .endm
0068 
0069 /*
0070  * dcache_line_size - get the minimum D-cache line size from the CTR register
0071  * on ARMv7.
0072  */
0073     .macro  dcache_line_size, reg, tmp
0074 #ifdef CONFIG_CPU_V7M
0075     movw    \tmp, #:lower16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0076     movt    \tmp, #:upper16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0077     ldr     \tmp, [\tmp]
0078 #else
0079     mrc p15, 0, \tmp, c0, c0, 1     @ read ctr
0080 #endif
0081     lsr \tmp, \tmp, #16
0082     and \tmp, \tmp, #0xf        @ cache line size encoding
0083     mov \reg, #4            @ bytes per word
0084     mov \reg, \reg, lsl \tmp        @ actual cache line size
0085     .endm
0086 
0087 /*
0088  * icache_line_size - get the minimum I-cache line size from the CTR register
0089  * on ARMv7.
0090  */
0091     .macro  icache_line_size, reg, tmp
0092 #ifdef CONFIG_CPU_V7M
0093     movw    \tmp, #:lower16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0094     movt    \tmp, #:upper16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0095     ldr     \tmp, [\tmp]
0096 #else
0097     mrc p15, 0, \tmp, c0, c0, 1     @ read ctr
0098 #endif
0099     and \tmp, \tmp, #0xf        @ cache line size encoding
0100     mov \reg, #4            @ bytes per word
0101     mov \reg, \reg, lsl \tmp        @ actual cache line size
0102     .endm
0103 
0104 /*
0105  * Sanity check the PTE configuration for the code below - which makes
0106  * certain assumptions about how these bits are laid out.
0107  */
0108 #ifdef CONFIG_MMU
0109 #if L_PTE_SHARED != PTE_EXT_SHARED
0110 #error PTE shared bit mismatch
0111 #endif
0112 #if !defined (CONFIG_ARM_LPAE) && \
0113     (L_PTE_XN+L_PTE_USER+L_PTE_RDONLY+L_PTE_DIRTY+L_PTE_YOUNG+\
0114      L_PTE_PRESENT) > L_PTE_SHARED
0115 #error Invalid Linux PTE bit settings
0116 #endif
0117 #endif  /* CONFIG_MMU */
0118 
0119 /*
0120  * The ARMv6 and ARMv7 set_pte_ext translation function.
0121  *
0122  * Permission translation:
0123  *  YUWD  APX AP1 AP0   SVC User
0124  *  0xxx   0   0   0    no acc  no acc
0125  *  100x   1   0   1    r/o no acc
0126  *  10x0   1   0   1    r/o no acc
0127  *  1011   0   0   1    r/w no acc
0128  *  110x   1   1   1    r/o r/o
0129  *  11x0   1   1   1    r/o r/o
0130  *  1111   0   1   1    r/w r/w
0131  */
0132     .macro  armv6_mt_table pfx
0133 \pfx\()_mt_table:
0134     .long   0x00                        @ L_PTE_MT_UNCACHED
0135     .long   PTE_EXT_TEX(1)                  @ L_PTE_MT_BUFFERABLE
0136     .long   PTE_CACHEABLE                   @ L_PTE_MT_WRITETHROUGH
0137     .long   PTE_CACHEABLE | PTE_BUFFERABLE          @ L_PTE_MT_WRITEBACK
0138     .long   PTE_BUFFERABLE                  @ L_PTE_MT_DEV_SHARED
0139     .long   0x00                        @ unused
0140     .long   0x00                        @ L_PTE_MT_MINICACHE (not present)
0141     .long   PTE_EXT_TEX(1) | PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_WRITEALLOC
0142     .long   0x00                        @ unused
0143     .long   PTE_EXT_TEX(1)                  @ L_PTE_MT_DEV_WC
0144     .long   0x00                        @ unused
0145     .long   PTE_CACHEABLE | PTE_BUFFERABLE          @ L_PTE_MT_DEV_CACHED
0146     .long   PTE_EXT_TEX(2)                  @ L_PTE_MT_DEV_NONSHARED
0147     .long   0x00                        @ unused
0148     .long   0x00                        @ unused
0149     .long   PTE_CACHEABLE | PTE_BUFFERABLE | PTE_EXT_APX    @ L_PTE_MT_VECTORS
0150     .endm
0151 
0152     .macro  armv6_set_pte_ext pfx
0153     str r1, [r0], #2048         @ linux version
0154 
0155     bic r3, r1, #0x000003fc
0156     bic r3, r3, #PTE_TYPE_MASK
0157     orr r3, r3, r2
0158     orr r3, r3, #PTE_EXT_AP0 | 2
0159 
0160     adr ip, \pfx\()_mt_table
0161     and r2, r1, #L_PTE_MT_MASK
0162     ldr r2, [ip, r2]
0163 
0164     eor r1, r1, #L_PTE_DIRTY
0165     tst r1, #L_PTE_DIRTY|L_PTE_RDONLY
0166     orrne   r3, r3, #PTE_EXT_APX
0167 
0168     tst r1, #L_PTE_USER
0169     orrne   r3, r3, #PTE_EXT_AP1
0170     tstne   r3, #PTE_EXT_APX
0171 
0172     @ user read-only -> kernel read-only
0173     bicne   r3, r3, #PTE_EXT_AP0
0174 
0175     tst r1, #L_PTE_XN
0176     orrne   r3, r3, #PTE_EXT_XN
0177 
0178     eor r3, r3, r2
0179 
0180     tst r1, #L_PTE_YOUNG
0181     tstne   r1, #L_PTE_PRESENT
0182     moveq   r3, #0
0183     tstne   r1, #L_PTE_NONE
0184     movne   r3, #0
0185 
0186     str r3, [r0]
0187     mcr p15, 0, r0, c7, c10, 1      @ flush_pte
0188     .endm
0189 
0190 
0191 /*
0192  * The ARMv3, ARMv4 and ARMv5 set_pte_ext translation function,
0193  * covering most CPUs except Xscale and Xscale 3.
0194  *
0195  * Permission translation:
0196  *  YUWD   AP   SVC User
0197  *  0xxx  0x00  no acc  no acc
0198  *  100x  0x00  r/o no acc
0199  *  10x0  0x00  r/o no acc
0200  *  1011  0x55  r/w no acc
0201  *  110x  0xaa  r/w r/o
0202  *  11x0  0xaa  r/w r/o
0203  *  1111  0xff  r/w r/w
0204  */
0205     .macro  armv3_set_pte_ext wc_disable=1
0206     str r1, [r0], #2048         @ linux version
0207 
0208     eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY
0209 
0210     bic r2, r1, #PTE_SMALL_AP_MASK  @ keep C, B bits
0211     bic r2, r2, #PTE_TYPE_MASK
0212     orr r2, r2, #PTE_TYPE_SMALL
0213 
0214     tst r3, #L_PTE_USER         @ user?
0215     orrne   r2, r2, #PTE_SMALL_AP_URO_SRW
0216 
0217     tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty?
0218     orreq   r2, r2, #PTE_SMALL_AP_UNO_SRW
0219 
0220     tst r3, #L_PTE_PRESENT | L_PTE_YOUNG    @ present and young?
0221     movne   r2, #0
0222 
0223     .if \wc_disable
0224 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
0225     tst r2, #PTE_CACHEABLE
0226     bicne   r2, r2, #PTE_BUFFERABLE
0227 #endif
0228     .endif
0229     str r2, [r0]        @ hardware version
0230     .endm
0231 
0232 
0233 /*
0234  * Xscale set_pte_ext translation, split into two halves to cope
0235  * with work-arounds.  r3 must be preserved by code between these
0236  * two macros.
0237  *
0238  * Permission translation:
0239  *  YUWD  AP    SVC User
0240  *  0xxx  00    no acc  no acc
0241  *  100x  00    r/o no acc
0242  *  10x0  00    r/o no acc
0243  *  1011  01    r/w no acc
0244  *  110x  10    r/w r/o
0245  *  11x0  10    r/w r/o
0246  *  1111  11    r/w r/w
0247  */
0248     .macro  xscale_set_pte_ext_prologue
0249     str r1, [r0]            @ linux version
0250 
0251     eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY
0252 
0253     bic r2, r1, #PTE_SMALL_AP_MASK  @ keep C, B bits
0254     orr r2, r2, #PTE_TYPE_EXT       @ extended page
0255 
0256     tst r3, #L_PTE_USER         @ user?
0257     orrne   r2, r2, #PTE_EXT_AP_URO_SRW @ yes -> user r/o, system r/w
0258 
0259     tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty?
0260     orreq   r2, r2, #PTE_EXT_AP_UNO_SRW @ yes -> user n/a, system r/w
0261                         @ combined with user -> user r/w
0262     .endm
0263 
0264     .macro  xscale_set_pte_ext_epilogue
0265     tst r3, #L_PTE_PRESENT | L_PTE_YOUNG    @ present and young?
0266     movne   r2, #0              @ no -> fault
0267 
0268     str r2, [r0, #2048]!        @ hardware version
0269     mov ip, #0
0270     mcr p15, 0, r0, c7, c10, 1      @ clean L1 D line
0271     mcr p15, 0, ip, c7, c10, 4      @ data write barrier
0272     .endm
0273 
0274 .macro define_processor_functions name:req, dabort:req, pabort:req, nommu=0, suspend=0, bugs=0
0275 /*
0276  * If we are building for big.Little with branch predictor hardening,
0277  * we need the processor function tables to remain available after boot.
0278  */
0279 #if defined(CONFIG_BIG_LITTLE) && defined(CONFIG_HARDEN_BRANCH_PREDICTOR)
0280     .section ".rodata"
0281 #endif
0282     .type   \name\()_processor_functions, #object
0283     .align 2
0284 ENTRY(\name\()_processor_functions)
0285     .word   \dabort
0286     .word   \pabort
0287     .word   cpu_\name\()_proc_init
0288     .word   \bugs
0289     .word   cpu_\name\()_proc_fin
0290     .word   cpu_\name\()_reset
0291     .word   cpu_\name\()_do_idle
0292     .word   cpu_\name\()_dcache_clean_area
0293     .word   cpu_\name\()_switch_mm
0294 
0295     .if \nommu
0296     .word   0
0297     .else
0298     .word   cpu_\name\()_set_pte_ext
0299     .endif
0300 
0301     .if \suspend
0302     .word   cpu_\name\()_suspend_size
0303 #ifdef CONFIG_ARM_CPU_SUSPEND
0304     .word   cpu_\name\()_do_suspend
0305     .word   cpu_\name\()_do_resume
0306 #else
0307     .word   0
0308     .word   0
0309 #endif
0310     .else
0311     .word   0
0312     .word   0
0313     .word   0
0314     .endif
0315 
0316     .size   \name\()_processor_functions, . - \name\()_processor_functions
0317 #if defined(CONFIG_BIG_LITTLE) && defined(CONFIG_HARDEN_BRANCH_PREDICTOR)
0318     .previous
0319 #endif
0320 .endm
0321 
0322 .macro define_cache_functions name:req
0323     .align 2
0324     .type   \name\()_cache_fns, #object
0325 ENTRY(\name\()_cache_fns)
0326     .long   \name\()_flush_icache_all
0327     .long   \name\()_flush_kern_cache_all
0328     .long   \name\()_flush_kern_cache_louis
0329     .long   \name\()_flush_user_cache_all
0330     .long   \name\()_flush_user_cache_range
0331     .long   \name\()_coherent_kern_range
0332     .long   \name\()_coherent_user_range
0333     .long   \name\()_flush_kern_dcache_area
0334     .long   \name\()_dma_map_area
0335     .long   \name\()_dma_unmap_area
0336     .long   \name\()_dma_flush_range
0337     .size   \name\()_cache_fns, . - \name\()_cache_fns
0338 .endm
0339 
0340 .macro define_tlb_functions name:req, flags_up:req, flags_smp
0341     .type   \name\()_tlb_fns, #object
0342     .align 2
0343 ENTRY(\name\()_tlb_fns)
0344     .long   \name\()_flush_user_tlb_range
0345     .long   \name\()_flush_kern_tlb_range
0346     .ifnb \flags_smp
0347         ALT_SMP(.long   \flags_smp )
0348         ALT_UP(.long    \flags_up )
0349     .else
0350         .long   \flags_up
0351     .endif
0352     .size   \name\()_tlb_fns, . - \name\()_tlb_fns
0353 .endm
0354 
0355 .macro globl_equ x, y
0356     .globl  \x
0357     .equ    \x, \y
0358 .endm
0359 
0360 .macro  initfn, func, base
0361     .long   \func - \base
0362 .endm
0363 
0364     /*
0365      * Macro to calculate the log2 size for the protection region
0366      * registers. This calculates rd = log2(size) - 1.  tmp must
0367      * not be the same register as rd.
0368      */
0369 .macro  pr_sz, rd, size, tmp
0370     mov \tmp, \size, lsr #12
0371     mov \rd, #11
0372 1:  movs    \tmp, \tmp, lsr #1
0373     addne   \rd, \rd, #1
0374     bne 1b
0375 .endm
0376 
0377     /*
0378      * Macro to generate a protection region register value
0379      * given a pre-masked address, size, and enable bit.
0380      * Corrupts size.
0381      */
0382 .macro  pr_val, dest, addr, size, enable
0383     pr_sz   \dest, \size, \size     @ calculate log2(size) - 1
0384     orr \dest, \addr, \dest, lsl #1 @ mask in the region size
0385     orr \dest, \dest, \enable
0386 .endm