Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-or-later */
0002 /*
0003  * This file contains low-level functions for performing various
0004  * types of TLB invalidations on various processors with no hash
0005  * table.
0006  *
0007  * This file implements the following functions for all no-hash
0008  * processors. Some aren't implemented for some variants. Some
0009  * are inline in tlbflush.h
0010  *
0011  *  - tlbil_va
0012  *  - tlbil_pid
0013  *  - tlbil_all
0014  *  - tlbivax_bcast
0015  *
0016  * Code mostly moved over from misc_32.S
0017  *
0018  *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
0019  *
0020  * Partially rewritten by Cort Dougan (cort@cs.nmt.edu)
0021  * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt.
0022  */
0023 
0024 #include <asm/reg.h>
0025 #include <asm/page.h>
0026 #include <asm/cputable.h>
0027 #include <asm/mmu.h>
0028 #include <asm/ppc_asm.h>
0029 #include <asm/asm-offsets.h>
0030 #include <asm/processor.h>
0031 #include <asm/bug.h>
0032 #include <asm/asm-compat.h>
0033 #include <asm/feature-fixups.h>
0034 
0035 #if defined(CONFIG_40x)
0036 
0037 /*
0038  * 40x implementation needs only tlbil_va
0039  */
0040 _GLOBAL(__tlbil_va)
0041     /* We run the search with interrupts disabled because we have to change
0042      * the PID and I don't want to preempt when that happens.
0043      */
0044     mfmsr   r5
0045     mfspr   r6,SPRN_PID
0046     wrteei  0
0047     mtspr   SPRN_PID,r4
0048     tlbsx.  r3, 0, r3
0049     mtspr   SPRN_PID,r6
0050     wrtee   r5
0051     bne 1f
0052     sync
0053     /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is
0054      * clear. Since 25 is the V bit in the TLB_TAG, loading this value
0055      * will invalidate the TLB entry. */
0056     tlbwe   r3, r3, TLB_TAG
0057     isync
0058 1:  blr
0059 
0060 #elif defined(CONFIG_PPC_8xx)
0061 
0062 /*
0063  * Nothing to do for 8xx, everything is inline
0064  */
0065 
0066 #elif defined(CONFIG_44x) /* Includes 47x */
0067 
0068 /*
0069  * 440 implementation uses tlbsx/we for tlbil_va and a full sweep
0070  * of the TLB for everything else.
0071  */
0072 _GLOBAL(__tlbil_va)
0073     mfspr   r5,SPRN_MMUCR
0074     mfmsr   r10
0075 
0076     /*
0077      * We write 16 bits of STID since 47x supports that much, we
0078      * will never be passed out of bounds values on 440 (hopefully)
0079      */
0080     rlwimi  r5,r4,0,16,31
0081 
0082     /* We have to run the search with interrupts disabled, otherwise
0083      * an interrupt which causes a TLB miss can clobber the MMUCR
0084      * between the mtspr and the tlbsx.
0085      *
0086      * Critical and Machine Check interrupts take care of saving
0087      * and restoring MMUCR, so only normal interrupts have to be
0088      * taken care of.
0089      */
0090     wrteei  0
0091     mtspr   SPRN_MMUCR,r5
0092     tlbsx.  r6,0,r3
0093     bne 10f
0094     sync
0095 #ifndef CONFIG_PPC_47x
0096     /* On 440 There are only 64 TLB entries, so r3 < 64, which means bit
0097      * 22, is clear.  Since 22 is the V bit in the TLB_PAGEID, loading this
0098      * value will invalidate the TLB entry.
0099      */
0100     tlbwe   r6,r6,PPC44x_TLB_PAGEID
0101 #else
0102     oris    r7,r6,0x8000    /* specify way explicitly */
0103     clrrwi  r4,r3,12    /* get an EPN for the hashing with V = 0 */
0104     ori r4,r4,PPC47x_TLBE_SIZE
0105     tlbwe   r4,r7,0     /* write it */
0106 #endif /* !CONFIG_PPC_47x */
0107     isync
0108 10: wrtee   r10
0109     blr
0110 
0111 _GLOBAL(_tlbil_all)
0112 _GLOBAL(_tlbil_pid)
0113 #ifndef CONFIG_PPC_47x
0114     li  r3,0
0115     sync
0116 
0117     /* Load high watermark */
0118     lis r4,tlb_44x_hwater@ha
0119     lwz r5,tlb_44x_hwater@l(r4)
0120 
0121 1:  tlbwe   r3,r3,PPC44x_TLB_PAGEID
0122     addi    r3,r3,1
0123     cmpw    0,r3,r5
0124     ble 1b
0125 
0126     isync
0127     blr
0128 #else
0129     /* 476 variant. There's not simple way to do this, hopefully we'll
0130      * try to limit the amount of such full invalidates
0131      */
0132     mfmsr   r11     /* Interrupts off */
0133     wrteei  0
0134     li  r3,-1       /* Current set */
0135     lis r10,tlb_47x_boltmap@h
0136     ori r10,r10,tlb_47x_boltmap@l
0137     lis r7,0x8000   /* Specify way explicitly */
0138 
0139     b   9f      /* For each set */
0140 
0141 1:  li  r9,4        /* Number of ways */
0142     li  r4,0        /* Current way */
0143     li  r6,0        /* Default entry value 0 */
0144     andi.   r0,r8,1     /* Check if way 0 is bolted */
0145     mtctr   r9      /* Load way counter */
0146     bne-    3f      /* Bolted, skip loading it */
0147 
0148 2:  /* For each way */
0149     or  r5,r3,r4    /* Make way|index for tlbre */
0150     rlwimi  r5,r5,16,8,15   /* Copy index into position */
0151     tlbre   r6,r5,0     /* Read entry */
0152 3:  addis   r4,r4,0x2000    /* Next way */
0153     andi.   r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */
0154     beq 4f      /* Nope, skip it */
0155     rlwimi  r7,r5,0,1,2 /* Insert way number */
0156     rlwinm  r6,r6,0,21,19   /* Clear V */
0157     tlbwe   r6,r7,0     /* Write it */
0158 4:  bdnz    2b      /* Loop for each way */
0159     srwi    r8,r8,1     /* Next boltmap bit */
0160 9:  cmpwi   cr1,r3,255  /* Last set done ? */
0161     addi    r3,r3,1     /* Next set */
0162     beq cr1,1f      /* End of loop */
0163     andi.   r0,r3,0x1f  /* Need to load a new boltmap word ? */
0164     bne 1b      /* No, loop */
0165     lwz r8,0(r10)   /* Load boltmap entry */
0166     addi    r10,r10,4   /* Next word */
0167     b   1b      /* Then loop */
0168 1:  isync           /* Sync shadows */
0169     wrtee   r11
0170     blr
0171 #endif /* !CONFIG_PPC_47x */
0172 
0173 #ifdef CONFIG_PPC_47x
0174 
0175 /*
0176  * _tlbivax_bcast is only on 47x. We don't bother doing a runtime
0177  * check though, it will blow up soon enough if we mistakenly try
0178  * to use it on a 440.
0179  */
0180 _GLOBAL(_tlbivax_bcast)
0181     mfspr   r5,SPRN_MMUCR
0182     mfmsr   r10
0183     rlwimi  r5,r4,0,16,31
0184     wrteei  0
0185     mtspr   SPRN_MMUCR,r5
0186     isync
0187     PPC_TLBIVAX(0, R3)
0188     isync
0189     mbar
0190     tlbsync
0191 BEGIN_FTR_SECTION
0192     b   1f
0193 END_FTR_SECTION_IFSET(CPU_FTR_476_DD2)
0194     sync
0195     wrtee   r10
0196     blr
0197 /*
0198  * DD2 HW could hang if in instruction fetch happens before msync completes.
0199  * Touch enough instruction cache lines to ensure cache hits
0200  */
0201 1:  mflr    r9
0202     bcl 20,31,$+4
0203 2:  mflr    r6
0204     li  r7,32
0205     PPC_ICBT(0,R6,R7)       /* touch next cache line */
0206     add r6,r6,r7
0207     PPC_ICBT(0,R6,R7)       /* touch next cache line */
0208     add r6,r6,r7
0209     PPC_ICBT(0,R6,R7)       /* touch next cache line */
0210     sync
0211     nop
0212     nop
0213     nop
0214     nop
0215     nop
0216     nop
0217     nop
0218     nop
0219     mtlr    r9
0220     wrtee   r10
0221     blr
0222 #endif /* CONFIG_PPC_47x */
0223 
0224 #elif defined(CONFIG_FSL_BOOKE)
0225 /*
0226  * FSL BookE implementations.
0227  *
0228  * Since feature sections are using _SECTION_ELSE we need
0229  * to have the larger code path before the _SECTION_ELSE
0230  */
0231 
0232 /*
0233  * Flush MMU TLB on the local processor
0234  */
0235 _GLOBAL(_tlbil_all)
0236 BEGIN_MMU_FTR_SECTION
0237     li  r3,(MMUCSR0_TLBFI)@l
0238     mtspr   SPRN_MMUCSR0, r3
0239 1:
0240     mfspr   r3,SPRN_MMUCSR0
0241     andi.   r3,r3,MMUCSR0_TLBFI@l
0242     bne 1b
0243 MMU_FTR_SECTION_ELSE
0244     PPC_TLBILX_ALL(0,R0)
0245 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
0246     msync
0247     isync
0248     blr
0249 
0250 _GLOBAL(_tlbil_pid)
0251 BEGIN_MMU_FTR_SECTION
0252     slwi    r3,r3,16
0253     mfmsr   r10
0254     wrteei  0
0255     mfspr   r4,SPRN_MAS6    /* save MAS6 */
0256     mtspr   SPRN_MAS6,r3
0257     PPC_TLBILX_PID(0,R0)
0258     mtspr   SPRN_MAS6,r4    /* restore MAS6 */
0259     wrtee   r10
0260 MMU_FTR_SECTION_ELSE
0261     li  r3,(MMUCSR0_TLBFI)@l
0262     mtspr   SPRN_MMUCSR0, r3
0263 1:
0264     mfspr   r3,SPRN_MMUCSR0
0265     andi.   r3,r3,MMUCSR0_TLBFI@l
0266     bne 1b
0267 ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX)
0268     msync
0269     isync
0270     blr
0271 
0272 /*
0273  * Flush MMU TLB for a particular address, but only on the local processor
0274  * (no broadcast)
0275  */
0276 _GLOBAL(__tlbil_va)
0277     mfmsr   r10
0278     wrteei  0
0279     slwi    r4,r4,16
0280     ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l
0281     mtspr   SPRN_MAS6,r4        /* assume AS=0 for now */
0282 BEGIN_MMU_FTR_SECTION
0283     tlbsx   0,r3
0284     mfspr   r4,SPRN_MAS1        /* check valid */
0285     andis.  r3,r4,MAS1_VALID@h
0286     beq 1f
0287     rlwinm  r4,r4,0,1,31
0288     mtspr   SPRN_MAS1,r4
0289     tlbwe
0290 MMU_FTR_SECTION_ELSE
0291     PPC_TLBILX_VA(0,R3)
0292 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
0293     msync
0294     isync
0295 1:  wrtee   r10
0296     blr
0297 #elif defined(CONFIG_PPC_BOOK3E)
0298 /*
0299  * New Book3E (>= 2.06) implementation
0300  *
0301  * Note: We may be able to get away without the interrupt masking stuff
0302  * if we save/restore MAS6 on exceptions that might modify it
0303  */
0304 _GLOBAL(_tlbil_pid)
0305     slwi    r4,r3,MAS6_SPID_SHIFT
0306     mfmsr   r10
0307     wrteei  0
0308     mtspr   SPRN_MAS6,r4
0309     PPC_TLBILX_PID(0,R0)
0310     wrtee   r10
0311     msync
0312     isync
0313     blr
0314 
0315 _GLOBAL(_tlbil_pid_noind)
0316     slwi    r4,r3,MAS6_SPID_SHIFT
0317     mfmsr   r10
0318     ori r4,r4,MAS6_SIND
0319     wrteei  0
0320     mtspr   SPRN_MAS6,r4
0321     PPC_TLBILX_PID(0,R0)
0322     wrtee   r10
0323     msync
0324     isync
0325     blr
0326 
0327 _GLOBAL(_tlbil_all)
0328     PPC_TLBILX_ALL(0,R0)
0329     msync
0330     isync
0331     blr
0332 
0333 _GLOBAL(_tlbil_va)
0334     mfmsr   r10
0335     wrteei  0
0336     cmpwi   cr0,r6,0
0337     slwi    r4,r4,MAS6_SPID_SHIFT
0338     rlwimi  r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
0339     beq 1f
0340     rlwimi  r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
0341 1:  mtspr   SPRN_MAS6,r4        /* assume AS=0 for now */
0342     PPC_TLBILX_VA(0,R3)
0343     msync
0344     isync
0345     wrtee   r10
0346     blr
0347 
0348 _GLOBAL(_tlbivax_bcast)
0349     mfmsr   r10
0350     wrteei  0
0351     cmpwi   cr0,r6,0
0352     slwi    r4,r4,MAS6_SPID_SHIFT
0353     rlwimi  r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
0354     beq 1f
0355     rlwimi  r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
0356 1:  mtspr   SPRN_MAS6,r4        /* assume AS=0 for now */
0357     PPC_TLBIVAX(0,R3)
0358     mbar
0359     tlbsync
0360     sync
0361     wrtee   r10
0362     blr
0363 #else
0364 #error Unsupported processor type !
0365 #endif
0366 
0367 #if defined(CONFIG_PPC_FSL_BOOK3E)
0368 /*
0369  * extern void loadcam_entry(unsigned int index)
0370  *
0371  * Load TLBCAM[index] entry in to the L2 CAM MMU
0372  * Must preserve r7, r8, r9, r10, r11, r12
0373  */
0374 _GLOBAL(loadcam_entry)
0375     mflr    r5
0376     LOAD_REG_ADDR_PIC(r4, TLBCAM)
0377     mtlr    r5
0378     mulli   r5,r3,TLBCAM_SIZE
0379     add r3,r5,r4
0380     lwz r4,TLBCAM_MAS0(r3)
0381     mtspr   SPRN_MAS0,r4
0382     lwz r4,TLBCAM_MAS1(r3)
0383     mtspr   SPRN_MAS1,r4
0384     PPC_LL  r4,TLBCAM_MAS2(r3)
0385     mtspr   SPRN_MAS2,r4
0386     lwz r4,TLBCAM_MAS3(r3)
0387     mtspr   SPRN_MAS3,r4
0388 BEGIN_MMU_FTR_SECTION
0389     lwz r4,TLBCAM_MAS7(r3)
0390     mtspr   SPRN_MAS7,r4
0391 END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
0392     isync
0393     tlbwe
0394     isync
0395     blr
0396 
0397 /*
0398  * Load multiple TLB entries at once, using an alternate-space
0399  * trampoline so that we don't have to care about whether the same
0400  * TLB entry maps us before and after.
0401  *
0402  * r3 = first entry to write
0403  * r4 = number of entries to write
0404  * r5 = temporary tlb entry (0 means no switch to AS1)
0405  */
0406 _GLOBAL(loadcam_multi)
0407     mflr    r8
0408     /* Don't switch to AS=1 if already there */
0409     mfmsr   r11
0410     andi.   r11,r11,MSR_IS
0411     bne 10f
0412     mr. r12, r5
0413     beq 10f
0414 
0415     /*
0416      * Set up temporary TLB entry that is the same as what we're
0417      * running from, but in AS=1.
0418      */
0419     bcl 20,31,$+4
0420 1:  mflr    r6
0421     tlbsx   0,r8
0422     mfspr   r6,SPRN_MAS1
0423     ori r6,r6,MAS1_TS
0424     mtspr   SPRN_MAS1,r6
0425     mfspr   r6,SPRN_MAS0
0426     rlwimi  r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
0427     mr  r7,r5
0428     mtspr   SPRN_MAS0,r6
0429     isync
0430     tlbwe
0431     isync
0432 
0433     /* Switch to AS=1 */
0434     mfmsr   r6
0435     ori r6,r6,MSR_IS|MSR_DS
0436     mtmsr   r6
0437     isync
0438 
0439 10:
0440     mr  r9,r3
0441     add r10,r3,r4
0442 2:  bl  loadcam_entry
0443     addi    r9,r9,1
0444     cmpw    r9,r10
0445     mr  r3,r9
0446     blt 2b
0447 
0448     /* Don't return to AS=0 if we were in AS=1 at function start */
0449     andi.   r11,r11,MSR_IS
0450     bne 3f
0451     cmpwi   r12, 0
0452     beq 3f
0453 
0454     /* Return to AS=0 and clear the temporary entry */
0455     mfmsr   r6
0456     rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
0457     mtmsr   r6
0458     isync
0459 
0460     li  r6,0
0461     mtspr   SPRN_MAS1,r6
0462     rlwinm  r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
0463     oris    r6,r6,MAS0_TLBSEL(1)@h
0464     mtspr   SPRN_MAS0,r6
0465     isync
0466     tlbwe
0467     isync
0468 
0469 3:
0470     mtlr    r8
0471     blr
0472 #endif