Back to home page

LXR

 
 

    


0001 /*
0002  * We need constants.h for:
0003  *  VMA_VM_MM
0004  *  VMA_VM_FLAGS
0005  *  VM_EXEC
0006  */
0007 #include <asm/asm-offsets.h>
0008 #include <asm/thread_info.h>
0009 
0010 #ifdef CONFIG_CPU_V7M
0011 #include <asm/v7m.h>
0012 #endif
0013 
0014 /*
0015  * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm)
0016  */
0017     .macro  vma_vm_mm, rd, rn
0018     ldr \rd, [\rn, #VMA_VM_MM]
0019     .endm
0020 
0021 /*
0022  * vma_vm_flags - get vma->vm_flags
0023  */
0024     .macro  vma_vm_flags, rd, rn
0025     ldr \rd, [\rn, #VMA_VM_FLAGS]
0026     .endm
0027 
0028     .macro  tsk_mm, rd, rn
0029     ldr \rd, [\rn, #TI_TASK]
0030     ldr \rd, [\rd, #TSK_ACTIVE_MM]
0031     .endm
0032 
0033 /*
0034  * act_mm - get current->active_mm
0035  */
0036     .macro  act_mm, rd
0037     bic \rd, sp, #8128
0038     bic \rd, \rd, #63
0039     ldr \rd, [\rd, #TI_TASK]
0040     ldr \rd, [\rd, #TSK_ACTIVE_MM]
0041     .endm
0042 
0043 /*
0044  * mmid - get context id from mm pointer (mm->context.id)
0045  * note, this field is 64bit, so in big-endian the two words are swapped too.
0046  */
0047     .macro  mmid, rd, rn
0048 #ifdef __ARMEB__
0049     ldr \rd, [\rn, #MM_CONTEXT_ID + 4 ]
0050 #else
0051     ldr \rd, [\rn, #MM_CONTEXT_ID]
0052 #endif
0053     .endm
0054 
0055 /*
0056  * mask_asid - mask the ASID from the context ID
0057  */
0058     .macro  asid, rd, rn
0059     and \rd, \rn, #255
0060     .endm
0061 
0062     .macro  crval, clear, mmuset, ucset
0063 #ifdef CONFIG_MMU
0064     .word   \clear
0065     .word   \mmuset
0066 #else
0067     .word   \clear
0068     .word   \ucset
0069 #endif
0070     .endm
0071 
0072 /*
0073  * dcache_line_size - get the minimum D-cache line size from the CTR register
0074  * on ARMv7.
0075  */
0076     .macro  dcache_line_size, reg, tmp
0077 #ifdef CONFIG_CPU_V7M
0078     movw    \tmp, #:lower16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0079     movt    \tmp, #:upper16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0080     ldr     \tmp, [\tmp]
0081 #else
0082     mrc p15, 0, \tmp, c0, c0, 1     @ read ctr
0083 #endif
0084     lsr \tmp, \tmp, #16
0085     and \tmp, \tmp, #0xf        @ cache line size encoding
0086     mov \reg, #4            @ bytes per word
0087     mov \reg, \reg, lsl \tmp        @ actual cache line size
0088     .endm
0089 
0090 /*
0091  * icache_line_size - get the minimum I-cache line size from the CTR register
0092  * on ARMv7.
0093  */
0094     .macro  icache_line_size, reg, tmp
0095 #ifdef CONFIG_CPU_V7M
0096     movw    \tmp, #:lower16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0097     movt    \tmp, #:upper16:BASEADDR_V7M_SCB + V7M_SCB_CTR
0098     ldr     \tmp, [\tmp]
0099 #else
0100     mrc p15, 0, \tmp, c0, c0, 1     @ read ctr
0101 #endif
0102     and \tmp, \tmp, #0xf        @ cache line size encoding
0103     mov \reg, #4            @ bytes per word
0104     mov \reg, \reg, lsl \tmp        @ actual cache line size
0105     .endm
0106 
0107 /*
0108  * Sanity check the PTE configuration for the code below - which makes
0109  * certain assumptions about how these bits are laid out.
0110  */
0111 #ifdef CONFIG_MMU
0112 #if L_PTE_SHARED != PTE_EXT_SHARED
0113 #error PTE shared bit mismatch
0114 #endif
0115 #if !defined (CONFIG_ARM_LPAE) && \
0116     (L_PTE_XN+L_PTE_USER+L_PTE_RDONLY+L_PTE_DIRTY+L_PTE_YOUNG+\
0117      L_PTE_PRESENT) > L_PTE_SHARED
0118 #error Invalid Linux PTE bit settings
0119 #endif
0120 #endif  /* CONFIG_MMU */
0121 
0122 /*
0123  * The ARMv6 and ARMv7 set_pte_ext translation function.
0124  *
0125  * Permission translation:
0126  *  YUWD  APX AP1 AP0   SVC User
0127  *  0xxx   0   0   0    no acc  no acc
0128  *  100x   1   0   1    r/o no acc
0129  *  10x0   1   0   1    r/o no acc
0130  *  1011   0   0   1    r/w no acc
0131  *  110x   1   1   1    r/o r/o
0132  *  11x0   1   1   1    r/o r/o
0133  *  1111   0   1   1    r/w r/w
0134  */
0135     .macro  armv6_mt_table pfx
0136 \pfx\()_mt_table:
0137     .long   0x00                        @ L_PTE_MT_UNCACHED
0138     .long   PTE_EXT_TEX(1)                  @ L_PTE_MT_BUFFERABLE
0139     .long   PTE_CACHEABLE                   @ L_PTE_MT_WRITETHROUGH
0140     .long   PTE_CACHEABLE | PTE_BUFFERABLE          @ L_PTE_MT_WRITEBACK
0141     .long   PTE_BUFFERABLE                  @ L_PTE_MT_DEV_SHARED
0142     .long   0x00                        @ unused
0143     .long   0x00                        @ L_PTE_MT_MINICACHE (not present)
0144     .long   PTE_EXT_TEX(1) | PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_WRITEALLOC
0145     .long   0x00                        @ unused
0146     .long   PTE_EXT_TEX(1)                  @ L_PTE_MT_DEV_WC
0147     .long   0x00                        @ unused
0148     .long   PTE_CACHEABLE | PTE_BUFFERABLE          @ L_PTE_MT_DEV_CACHED
0149     .long   PTE_EXT_TEX(2)                  @ L_PTE_MT_DEV_NONSHARED
0150     .long   0x00                        @ unused
0151     .long   0x00                        @ unused
0152     .long   PTE_CACHEABLE | PTE_BUFFERABLE | PTE_EXT_APX    @ L_PTE_MT_VECTORS
0153     .endm
0154 
0155     .macro  armv6_set_pte_ext pfx
0156     str r1, [r0], #2048         @ linux version
0157 
0158     bic r3, r1, #0x000003fc
0159     bic r3, r3, #PTE_TYPE_MASK
0160     orr r3, r3, r2
0161     orr r3, r3, #PTE_EXT_AP0 | 2
0162 
0163     adr ip, \pfx\()_mt_table
0164     and r2, r1, #L_PTE_MT_MASK
0165     ldr r2, [ip, r2]
0166 
0167     eor r1, r1, #L_PTE_DIRTY
0168     tst r1, #L_PTE_DIRTY|L_PTE_RDONLY
0169     orrne   r3, r3, #PTE_EXT_APX
0170 
0171     tst r1, #L_PTE_USER
0172     orrne   r3, r3, #PTE_EXT_AP1
0173     tstne   r3, #PTE_EXT_APX
0174 
0175     @ user read-only -> kernel read-only
0176     bicne   r3, r3, #PTE_EXT_AP0
0177 
0178     tst r1, #L_PTE_XN
0179     orrne   r3, r3, #PTE_EXT_XN
0180 
0181     eor r3, r3, r2
0182 
0183     tst r1, #L_PTE_YOUNG
0184     tstne   r1, #L_PTE_PRESENT
0185     moveq   r3, #0
0186     tstne   r1, #L_PTE_NONE
0187     movne   r3, #0
0188 
0189     str r3, [r0]
0190     mcr p15, 0, r0, c7, c10, 1      @ flush_pte
0191     .endm
0192 
0193 
0194 /*
0195  * The ARMv3, ARMv4 and ARMv5 set_pte_ext translation function,
0196  * covering most CPUs except Xscale and Xscale 3.
0197  *
0198  * Permission translation:
0199  *  YUWD   AP   SVC User
0200  *  0xxx  0x00  no acc  no acc
0201  *  100x  0x00  r/o no acc
0202  *  10x0  0x00  r/o no acc
0203  *  1011  0x55  r/w no acc
0204  *  110x  0xaa  r/w r/o
0205  *  11x0  0xaa  r/w r/o
0206  *  1111  0xff  r/w r/w
0207  */
0208     .macro  armv3_set_pte_ext wc_disable=1
0209     str r1, [r0], #2048         @ linux version
0210 
0211     eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY
0212 
0213     bic r2, r1, #PTE_SMALL_AP_MASK  @ keep C, B bits
0214     bic r2, r2, #PTE_TYPE_MASK
0215     orr r2, r2, #PTE_TYPE_SMALL
0216 
0217     tst r3, #L_PTE_USER         @ user?
0218     orrne   r2, r2, #PTE_SMALL_AP_URO_SRW
0219 
0220     tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty?
0221     orreq   r2, r2, #PTE_SMALL_AP_UNO_SRW
0222 
0223     tst r3, #L_PTE_PRESENT | L_PTE_YOUNG    @ present and young?
0224     movne   r2, #0
0225 
0226     .if \wc_disable
0227 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
0228     tst r2, #PTE_CACHEABLE
0229     bicne   r2, r2, #PTE_BUFFERABLE
0230 #endif
0231     .endif
0232     str r2, [r0]        @ hardware version
0233     .endm
0234 
0235 
0236 /*
0237  * Xscale set_pte_ext translation, split into two halves to cope
0238  * with work-arounds.  r3 must be preserved by code between these
0239  * two macros.
0240  *
0241  * Permission translation:
0242  *  YUWD  AP    SVC User
0243  *  0xxx  00    no acc  no acc
0244  *  100x  00    r/o no acc
0245  *  10x0  00    r/o no acc
0246  *  1011  01    r/w no acc
0247  *  110x  10    r/w r/o
0248  *  11x0  10    r/w r/o
0249  *  1111  11    r/w r/w
0250  */
0251     .macro  xscale_set_pte_ext_prologue
0252     str r1, [r0]            @ linux version
0253 
0254     eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY
0255 
0256     bic r2, r1, #PTE_SMALL_AP_MASK  @ keep C, B bits
0257     orr r2, r2, #PTE_TYPE_EXT       @ extended page
0258 
0259     tst r3, #L_PTE_USER         @ user?
0260     orrne   r2, r2, #PTE_EXT_AP_URO_SRW @ yes -> user r/o, system r/w
0261 
0262     tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty?
0263     orreq   r2, r2, #PTE_EXT_AP_UNO_SRW @ yes -> user n/a, system r/w
0264                         @ combined with user -> user r/w
0265     .endm
0266 
0267     .macro  xscale_set_pte_ext_epilogue
0268     tst r3, #L_PTE_PRESENT | L_PTE_YOUNG    @ present and young?
0269     movne   r2, #0              @ no -> fault
0270 
0271     str r2, [r0, #2048]!        @ hardware version
0272     mov ip, #0
0273     mcr p15, 0, r0, c7, c10, 1      @ clean L1 D line
0274     mcr p15, 0, ip, c7, c10, 4      @ data write barrier
0275     .endm
0276 
0277 .macro define_processor_functions name:req, dabort:req, pabort:req, nommu=0, suspend=0
0278     .type   \name\()_processor_functions, #object
0279     .align 2
0280 ENTRY(\name\()_processor_functions)
0281     .word   \dabort
0282     .word   \pabort
0283     .word   cpu_\name\()_proc_init
0284     .word   cpu_\name\()_proc_fin
0285     .word   cpu_\name\()_reset
0286     .word   cpu_\name\()_do_idle
0287     .word   cpu_\name\()_dcache_clean_area
0288     .word   cpu_\name\()_switch_mm
0289 
0290     .if \nommu
0291     .word   0
0292     .else
0293     .word   cpu_\name\()_set_pte_ext
0294     .endif
0295 
0296     .if \suspend
0297     .word   cpu_\name\()_suspend_size
0298 #ifdef CONFIG_ARM_CPU_SUSPEND
0299     .word   cpu_\name\()_do_suspend
0300     .word   cpu_\name\()_do_resume
0301 #else
0302     .word   0
0303     .word   0
0304 #endif
0305     .else
0306     .word   0
0307     .word   0
0308     .word   0
0309     .endif
0310 
0311     .size   \name\()_processor_functions, . - \name\()_processor_functions
0312 .endm
0313 
0314 .macro define_cache_functions name:req
0315     .align 2
0316     .type   \name\()_cache_fns, #object
0317 ENTRY(\name\()_cache_fns)
0318     .long   \name\()_flush_icache_all
0319     .long   \name\()_flush_kern_cache_all
0320     .long   \name\()_flush_kern_cache_louis
0321     .long   \name\()_flush_user_cache_all
0322     .long   \name\()_flush_user_cache_range
0323     .long   \name\()_coherent_kern_range
0324     .long   \name\()_coherent_user_range
0325     .long   \name\()_flush_kern_dcache_area
0326     .long   \name\()_dma_map_area
0327     .long   \name\()_dma_unmap_area
0328     .long   \name\()_dma_flush_range
0329     .size   \name\()_cache_fns, . - \name\()_cache_fns
0330 .endm
0331 
0332 .macro define_tlb_functions name:req, flags_up:req, flags_smp
0333     .type   \name\()_tlb_fns, #object
0334 ENTRY(\name\()_tlb_fns)
0335     .long   \name\()_flush_user_tlb_range
0336     .long   \name\()_flush_kern_tlb_range
0337     .ifnb \flags_smp
0338         ALT_SMP(.long   \flags_smp )
0339         ALT_UP(.long    \flags_up )
0340     .else
0341         .long   \flags_up
0342     .endif
0343     .size   \name\()_tlb_fns, . - \name\()_tlb_fns
0344 .endm
0345 
0346 .macro globl_equ x, y
0347     .globl  \x
0348     .equ    \x, \y
0349 .endm
0350 
0351 .macro  initfn, func, base
0352     .long   \func - \base
0353 .endm
0354 
0355     /*
0356      * Macro to calculate the log2 size for the protection region
0357      * registers. This calculates rd = log2(size) - 1.  tmp must
0358      * not be the same register as rd.
0359      */
0360 .macro  pr_sz, rd, size, tmp
0361     mov \tmp, \size, lsr #12
0362     mov \rd, #11
0363 1:  movs    \tmp, \tmp, lsr #1
0364     addne   \rd, \rd, #1
0365     bne 1b
0366 .endm
0367 
0368     /*
0369      * Macro to generate a protection region register value
0370      * given a pre-masked address, size, and enable bit.
0371      * Corrupts size.
0372      */
0373 .macro  pr_val, dest, addr, size, enable
0374     pr_sz   \dest, \size, \size     @ calculate log2(size) - 1
0375     orr \dest, \addr, \dest, lsl #1 @ mask in the region size
0376     orr \dest, \dest, \enable
0377 .endm