Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
0003 
0004 #include <linux/spinlock.h>
0005 #include <asm/cache.h>
0006 #include <abi/reg_ops.h>
0007 
0008 /* for L1-cache */
0009 #define INS_CACHE       (1 << 0)
0010 #define DATA_CACHE      (1 << 1)
0011 #define CACHE_INV       (1 << 4)
0012 #define CACHE_CLR       (1 << 5)
0013 #define CACHE_OMS       (1 << 6)
0014 #define CACHE_ITS       (1 << 7)
0015 #define CACHE_LICF      (1 << 31)
0016 
0017 /* for L2-cache */
0018 #define CR22_LEVEL_SHIFT    (1)
0019 #define CR22_SET_SHIFT      (7)
0020 #define CR22_WAY_SHIFT      (30)
0021 #define CR22_WAY_SHIFT_L2   (29)
0022 
0023 static DEFINE_SPINLOCK(cache_lock);
0024 
0025 static inline void cache_op_line(unsigned long i, unsigned int val)
0026 {
0027     mtcr("cr22", i);
0028     mtcr("cr17", val);
0029 }
0030 
0031 #define CCR2_L2E (1 << 3)
0032 static void cache_op_all(unsigned int value, unsigned int l2)
0033 {
0034     mtcr("cr17", value | CACHE_CLR);
0035     mb();
0036 
0037     if (l2 && (mfcr_ccr2() & CCR2_L2E)) {
0038         mtcr("cr24", value | CACHE_CLR);
0039         mb();
0040     }
0041 }
0042 
0043 static void cache_op_range(
0044     unsigned int start,
0045     unsigned int end,
0046     unsigned int value,
0047     unsigned int l2)
0048 {
0049     unsigned long i, flags;
0050     unsigned int val = value | CACHE_CLR | CACHE_OMS;
0051     bool l2_sync;
0052 
0053     if (unlikely((end - start) >= PAGE_SIZE) ||
0054         unlikely(start < PAGE_OFFSET) ||
0055         unlikely(start >= PAGE_OFFSET + LOWMEM_LIMIT)) {
0056         cache_op_all(value, l2);
0057         return;
0058     }
0059 
0060     if ((mfcr_ccr2() & CCR2_L2E) && l2)
0061         l2_sync = 1;
0062     else
0063         l2_sync = 0;
0064 
0065     spin_lock_irqsave(&cache_lock, flags);
0066 
0067     i = start & ~(L1_CACHE_BYTES - 1);
0068     for (; i < end; i += L1_CACHE_BYTES) {
0069         cache_op_line(i, val);
0070         if (l2_sync) {
0071             mb();
0072             mtcr("cr24", val);
0073         }
0074     }
0075     spin_unlock_irqrestore(&cache_lock, flags);
0076 
0077     mb();
0078 }
0079 
0080 void dcache_wb_line(unsigned long start)
0081 {
0082     asm volatile("idly4\n":::"memory");
0083     cache_op_line(start, DATA_CACHE|CACHE_CLR);
0084     mb();
0085 }
0086 
0087 void icache_inv_range(unsigned long start, unsigned long end)
0088 {
0089     cache_op_range(start, end, INS_CACHE|CACHE_INV, 0);
0090 }
0091 
0092 void icache_inv_all(void)
0093 {
0094     cache_op_all(INS_CACHE|CACHE_INV, 0);
0095 }
0096 
0097 void local_icache_inv_all(void *priv)
0098 {
0099     cache_op_all(INS_CACHE|CACHE_INV, 0);
0100 }
0101 
0102 void dcache_wb_range(unsigned long start, unsigned long end)
0103 {
0104     cache_op_range(start, end, DATA_CACHE|CACHE_CLR, 0);
0105 }
0106 
0107 void dcache_wbinv_all(void)
0108 {
0109     cache_op_all(DATA_CACHE|CACHE_CLR|CACHE_INV, 0);
0110 }
0111 
0112 void cache_wbinv_range(unsigned long start, unsigned long end)
0113 {
0114     cache_op_range(start, end, INS_CACHE|DATA_CACHE|CACHE_CLR|CACHE_INV, 0);
0115 }
0116 EXPORT_SYMBOL(cache_wbinv_range);
0117 
0118 void cache_wbinv_all(void)
0119 {
0120     cache_op_all(INS_CACHE|DATA_CACHE|CACHE_CLR|CACHE_INV, 0);
0121 }
0122 
0123 void dma_wbinv_range(unsigned long start, unsigned long end)
0124 {
0125     cache_op_range(start, end, DATA_CACHE|CACHE_CLR|CACHE_INV, 1);
0126 }
0127 
0128 void dma_inv_range(unsigned long start, unsigned long end)
0129 {
0130     cache_op_range(start, end, DATA_CACHE|CACHE_CLR|CACHE_INV, 1);
0131 }
0132 
0133 void dma_wb_range(unsigned long start, unsigned long end)
0134 {
0135     cache_op_range(start, end, DATA_CACHE|CACHE_CLR|CACHE_INV, 1);
0136 }