0001
0002
0003
0004 #include <linux/spinlock.h>
0005 #include <linux/smp.h>
0006 #include <linux/mm.h>
0007 #include <asm/cache.h>
0008 #include <asm/barrier.h>
0009
0010
0011 #define INS_CACHE (1 << 0)
0012 #define DATA_CACHE (1 << 1)
0013 #define CACHE_INV (1 << 4)
0014 #define CACHE_CLR (1 << 5)
0015 #define CACHE_OMS (1 << 6)
0016
0017 void local_icache_inv_all(void *priv)
0018 {
0019 mtcr("cr17", INS_CACHE|CACHE_INV);
0020 sync_is();
0021 }
0022
0023 #ifdef CONFIG_CPU_HAS_ICACHE_INS
0024 void icache_inv_range(unsigned long start, unsigned long end)
0025 {
0026 unsigned long i = start & ~(L1_CACHE_BYTES - 1);
0027
0028 for (; i < end; i += L1_CACHE_BYTES)
0029 asm volatile("icache.iva %0\n"::"r"(i):"memory");
0030 sync_is();
0031 }
0032 #else
0033 struct cache_range {
0034 unsigned long start;
0035 unsigned long end;
0036 };
0037
0038 static DEFINE_SPINLOCK(cache_lock);
0039
0040 static inline void cache_op_line(unsigned long i, unsigned int val)
0041 {
0042 mtcr("cr22", i);
0043 mtcr("cr17", val);
0044 }
0045
0046 void local_icache_inv_range(void *priv)
0047 {
0048 struct cache_range *param = priv;
0049 unsigned long i = param->start & ~(L1_CACHE_BYTES - 1);
0050 unsigned long flags;
0051
0052 spin_lock_irqsave(&cache_lock, flags);
0053
0054 for (; i < param->end; i += L1_CACHE_BYTES)
0055 cache_op_line(i, INS_CACHE | CACHE_INV | CACHE_OMS);
0056
0057 spin_unlock_irqrestore(&cache_lock, flags);
0058
0059 sync_is();
0060 }
0061
0062 void icache_inv_range(unsigned long start, unsigned long end)
0063 {
0064 struct cache_range param = { start, end };
0065
0066 if (irqs_disabled())
0067 local_icache_inv_range(¶m);
0068 else
0069 on_each_cpu(local_icache_inv_range, ¶m, 1);
0070 }
0071 #endif
0072
0073 inline void dcache_wb_line(unsigned long start)
0074 {
0075 asm volatile("dcache.cval1 %0\n"::"r"(start):"memory");
0076 sync_is();
0077 }
0078
0079 void dcache_wb_range(unsigned long start, unsigned long end)
0080 {
0081 unsigned long i = start & ~(L1_CACHE_BYTES - 1);
0082
0083 for (; i < end; i += L1_CACHE_BYTES)
0084 asm volatile("dcache.cval1 %0\n"::"r"(i):"memory");
0085 sync_is();
0086 }
0087
0088 void cache_wbinv_range(unsigned long start, unsigned long end)
0089 {
0090 dcache_wb_range(start, end);
0091 icache_inv_range(start, end);
0092 }
0093 EXPORT_SYMBOL(cache_wbinv_range);
0094
0095 void dma_wbinv_range(unsigned long start, unsigned long end)
0096 {
0097 unsigned long i = start & ~(L1_CACHE_BYTES - 1);
0098
0099 for (; i < end; i += L1_CACHE_BYTES)
0100 asm volatile("dcache.civa %0\n"::"r"(i):"memory");
0101 sync_is();
0102 }
0103
0104 void dma_inv_range(unsigned long start, unsigned long end)
0105 {
0106 unsigned long i = start & ~(L1_CACHE_BYTES - 1);
0107
0108 for (; i < end; i += L1_CACHE_BYTES)
0109 asm volatile("dcache.iva %0\n"::"r"(i):"memory");
0110 sync_is();
0111 }
0112
0113 void dma_wb_range(unsigned long start, unsigned long end)
0114 {
0115 unsigned long i = start & ~(L1_CACHE_BYTES - 1);
0116
0117 for (; i < end; i += L1_CACHE_BYTES)
0118 asm volatile("dcache.cva %0\n"::"r"(i):"memory");
0119 sync_is();
0120 }