0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021 #include <linux/init.h>
0022 #include <linux/memblock.h>
0023
0024 #include <asm/mmu.h>
0025 #include <asm/page.h>
0026 #include <asm/cacheflush.h>
0027 #include <asm/code-patching.h>
0028 #include <asm/smp.h>
0029
0030 #include <mm/mmu_decl.h>
0031
0032
0033
0034
0035 unsigned int tlb_44x_index;
0036 unsigned int tlb_44x_hwater = PPC44x_TLB_SIZE - 1 - PPC44x_EARLY_TLBS;
0037 int icache_44x_need_flush;
0038
0039 unsigned long tlb_47x_boltmap[1024/8];
0040
0041 static void __init ppc44x_update_tlb_hwater(void)
0042 {
0043
0044
0045
0046
0047
0048 modify_instruction_site(&patch__tlb_44x_hwater_D, 0xffff, tlb_44x_hwater);
0049 modify_instruction_site(&patch__tlb_44x_hwater_I, 0xffff, tlb_44x_hwater);
0050 }
0051
0052
0053
0054
0055 static void __init ppc44x_pin_tlb(unsigned int virt, unsigned int phys)
0056 {
0057 unsigned int entry = tlb_44x_hwater--;
0058
0059 ppc44x_update_tlb_hwater();
0060
0061 mtspr(SPRN_MMUCR, 0);
0062
0063 __asm__ __volatile__(
0064 "tlbwe %2,%3,%4\n"
0065 "tlbwe %1,%3,%5\n"
0066 "tlbwe %0,%3,%6\n"
0067 :
0068 : "r" (PPC44x_TLB_SW | PPC44x_TLB_SR | PPC44x_TLB_SX | PPC44x_TLB_G),
0069 "r" (phys),
0070 "r" (virt | PPC44x_TLB_VALID | PPC44x_TLB_256M),
0071 "r" (entry),
0072 "i" (PPC44x_TLB_PAGEID),
0073 "i" (PPC44x_TLB_XLAT),
0074 "i" (PPC44x_TLB_ATTRIB));
0075 }
0076
0077 static int __init ppc47x_find_free_bolted(void)
0078 {
0079 unsigned int mmube0 = mfspr(SPRN_MMUBE0);
0080 unsigned int mmube1 = mfspr(SPRN_MMUBE1);
0081
0082 if (!(mmube0 & MMUBE0_VBE0))
0083 return 0;
0084 if (!(mmube0 & MMUBE0_VBE1))
0085 return 1;
0086 if (!(mmube0 & MMUBE0_VBE2))
0087 return 2;
0088 if (!(mmube1 & MMUBE1_VBE3))
0089 return 3;
0090 if (!(mmube1 & MMUBE1_VBE4))
0091 return 4;
0092 if (!(mmube1 & MMUBE1_VBE5))
0093 return 5;
0094 return -1;
0095 }
0096
0097 static void __init ppc47x_update_boltmap(void)
0098 {
0099 unsigned int mmube0 = mfspr(SPRN_MMUBE0);
0100 unsigned int mmube1 = mfspr(SPRN_MMUBE1);
0101
0102 if (mmube0 & MMUBE0_VBE0)
0103 __set_bit((mmube0 >> MMUBE0_IBE0_SHIFT) & 0xff,
0104 tlb_47x_boltmap);
0105 if (mmube0 & MMUBE0_VBE1)
0106 __set_bit((mmube0 >> MMUBE0_IBE1_SHIFT) & 0xff,
0107 tlb_47x_boltmap);
0108 if (mmube0 & MMUBE0_VBE2)
0109 __set_bit((mmube0 >> MMUBE0_IBE2_SHIFT) & 0xff,
0110 tlb_47x_boltmap);
0111 if (mmube1 & MMUBE1_VBE3)
0112 __set_bit((mmube1 >> MMUBE1_IBE3_SHIFT) & 0xff,
0113 tlb_47x_boltmap);
0114 if (mmube1 & MMUBE1_VBE4)
0115 __set_bit((mmube1 >> MMUBE1_IBE4_SHIFT) & 0xff,
0116 tlb_47x_boltmap);
0117 if (mmube1 & MMUBE1_VBE5)
0118 __set_bit((mmube1 >> MMUBE1_IBE5_SHIFT) & 0xff,
0119 tlb_47x_boltmap);
0120 }
0121
0122
0123
0124
0125 static void __init ppc47x_pin_tlb(unsigned int virt, unsigned int phys)
0126 {
0127 unsigned int rA;
0128 int bolted;
0129
0130
0131 rA = 0x88000000;
0132
0133
0134 bolted = ppc47x_find_free_bolted();
0135 BUG_ON(bolted < 0);
0136
0137
0138 rA |= bolted << 24;
0139
0140 pr_debug("256M TLB entry for 0x%08x->0x%08x in bolt slot %d\n",
0141 virt, phys, bolted);
0142
0143 mtspr(SPRN_MMUCR, 0);
0144
0145 __asm__ __volatile__(
0146 "tlbwe %2,%3,0\n"
0147 "tlbwe %1,%3,1\n"
0148 "tlbwe %0,%3,2\n"
0149 :
0150 : "r" (PPC47x_TLB2_SW | PPC47x_TLB2_SR |
0151 PPC47x_TLB2_SX
0152 #ifdef CONFIG_SMP
0153 | PPC47x_TLB2_M
0154 #endif
0155 ),
0156 "r" (phys),
0157 "r" (virt | PPC47x_TLB0_VALID | PPC47x_TLB0_256M),
0158 "r" (rA));
0159 }
0160
0161 void __init MMU_init_hw(void)
0162 {
0163
0164 ppc44x_update_tlb_hwater();
0165
0166 flush_instruction_cache();
0167 }
0168
0169 unsigned long __init mmu_mapin_ram(unsigned long base, unsigned long top)
0170 {
0171 unsigned long addr;
0172 unsigned long memstart = memstart_addr & ~(PPC_PIN_SIZE - 1);
0173
0174
0175
0176 for (addr = memstart + PPC_PIN_SIZE; addr < lowmem_end_addr;
0177 addr += PPC_PIN_SIZE) {
0178 if (mmu_has_feature(MMU_FTR_TYPE_47x))
0179 ppc47x_pin_tlb(addr + PAGE_OFFSET, addr);
0180 else
0181 ppc44x_pin_tlb(addr + PAGE_OFFSET, addr);
0182 }
0183 if (mmu_has_feature(MMU_FTR_TYPE_47x)) {
0184 ppc47x_update_boltmap();
0185
0186 #ifdef DEBUG
0187 {
0188 int i;
0189
0190 printk(KERN_DEBUG "bolted entries: ");
0191 for (i = 0; i < 255; i++) {
0192 if (test_bit(i, tlb_47x_boltmap))
0193 printk("%d ", i);
0194 }
0195 printk("\n");
0196 }
0197 #endif
0198 }
0199 return total_lowmem;
0200 }
0201
0202 void setup_initial_memory_limit(phys_addr_t first_memblock_base,
0203 phys_addr_t first_memblock_size)
0204 {
0205 u64 size;
0206
0207 #ifndef CONFIG_NONSTATIC_KERNEL
0208
0209
0210
0211 BUG_ON(first_memblock_base != 0);
0212 #endif
0213
0214
0215 size = (min_t(u64, first_memblock_size, PPC_PIN_SIZE));
0216 memblock_set_current_limit(first_memblock_base + size);
0217 }
0218
0219 #ifdef CONFIG_SMP
0220 void __init mmu_init_secondary(int cpu)
0221 {
0222 unsigned long addr;
0223 unsigned long memstart = memstart_addr & ~(PPC_PIN_SIZE - 1);
0224
0225
0226
0227
0228
0229
0230
0231
0232
0233
0234 for (addr = memstart + PPC_PIN_SIZE; addr < lowmem_end_addr;
0235 addr += PPC_PIN_SIZE) {
0236 if (mmu_has_feature(MMU_FTR_TYPE_47x))
0237 ppc47x_pin_tlb(addr + PAGE_OFFSET, addr);
0238 else
0239 ppc44x_pin_tlb(addr + PAGE_OFFSET, addr);
0240 }
0241 }
0242 #endif