0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #include <asm/addrspace.h>
0013 #include <asm/bug.h>
0014 #include <asm/cacheflush.h>
0015
0016 #ifndef CKSEG2
0017 #define CKSEG2 CKSSEG
0018 #endif
0019 #ifndef TO_PHYS_MASK
0020 #define TO_PHYS_MASK -1
0021 #endif
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038 unsigned long run_uncached(void *func)
0039 {
0040 register long ret __asm__("$2");
0041 long lfunc = (long)func, ufunc;
0042 long usp;
0043 long sp;
0044
0045 __asm__("move %0, $sp" : "=r" (sp));
0046
0047 if (sp >= (long)CKSEG0 && sp < (long)CKSEG2)
0048 usp = CKSEG1ADDR(sp);
0049 #ifdef CONFIG_64BIT
0050 else if ((long long)sp >= (long long)PHYS_TO_XKPHYS(0, 0) &&
0051 (long long)sp < (long long)PHYS_TO_XKPHYS(8, 0))
0052 usp = PHYS_TO_XKPHYS(K_CALG_UNCACHED,
0053 XKPHYS_TO_PHYS((long long)sp));
0054 #endif
0055 else {
0056 BUG();
0057 usp = sp;
0058 }
0059 if (lfunc >= (long)CKSEG0 && lfunc < (long)CKSEG2)
0060 ufunc = CKSEG1ADDR(lfunc);
0061 #ifdef CONFIG_64BIT
0062 else if ((long long)lfunc >= (long long)PHYS_TO_XKPHYS(0, 0) &&
0063 (long long)lfunc < (long long)PHYS_TO_XKPHYS(8, 0))
0064 ufunc = PHYS_TO_XKPHYS(K_CALG_UNCACHED,
0065 XKPHYS_TO_PHYS((long long)lfunc));
0066 #endif
0067 else {
0068 BUG();
0069 ufunc = lfunc;
0070 }
0071
0072 __asm__ __volatile__ (
0073 " move $16, $sp\n"
0074 " move $sp, %1\n"
0075 " jalr %2\n"
0076 " move $sp, $16"
0077 : "=r" (ret)
0078 : "r" (usp), "r" (ufunc)
0079 : "$16", "$31");
0080
0081 return ret;
0082 }