0001
0002
0003
0004 #include <linux/ptrace.h>
0005 #include <linux/uaccess.h>
0006 #include <abi/reg_ops.h>
0007
0008 #define MTCR_MASK 0xFC00FFE0
0009 #define MFCR_MASK 0xFC00FFE0
0010 #define MTCR_DIST 0xC0006420
0011 #define MFCR_DIST 0xC0006020
0012
0013
0014
0015
0016
0017
0018
0019
0020 int fpu_libc_helper(struct pt_regs *regs)
0021 {
0022 int fault;
0023 unsigned long instrptr, regx = 0;
0024 unsigned long index = 0, tmp = 0;
0025 unsigned long tinstr = 0;
0026 u16 instr_hi, instr_low;
0027
0028 instrptr = instruction_pointer(regs);
0029 if (instrptr & 1)
0030 return 0;
0031
0032 fault = __get_user(instr_low, (u16 *)instrptr);
0033 if (fault)
0034 return 0;
0035
0036 fault = __get_user(instr_hi, (u16 *)(instrptr + 2));
0037 if (fault)
0038 return 0;
0039
0040 tinstr = instr_hi | ((unsigned long)instr_low << 16);
0041
0042 if (((tinstr >> 21) & 0x1F) != 2)
0043 return 0;
0044
0045 if ((tinstr & MTCR_MASK) == MTCR_DIST) {
0046 index = (tinstr >> 16) & 0x1F;
0047 if (index > 13)
0048 return 0;
0049
0050 tmp = tinstr & 0x1F;
0051 if (tmp > 2)
0052 return 0;
0053
0054 regx = *(®s->a0 + index);
0055
0056 if (tmp == 1)
0057 mtcr("cr<1, 2>", regx);
0058 else if (tmp == 2)
0059 mtcr("cr<2, 2>", regx);
0060 else
0061 return 0;
0062
0063 regs->pc += 4;
0064 return 1;
0065 }
0066
0067 if ((tinstr & MFCR_MASK) == MFCR_DIST) {
0068 index = tinstr & 0x1F;
0069 if (index > 13)
0070 return 0;
0071
0072 tmp = ((tinstr >> 16) & 0x1F);
0073 if (tmp > 2)
0074 return 0;
0075
0076 if (tmp == 1)
0077 regx = mfcr("cr<1, 2>");
0078 else if (tmp == 2)
0079 regx = mfcr("cr<2, 2>");
0080 else
0081 return 0;
0082
0083 *(®s->a0 + index) = regx;
0084
0085 regs->pc += 4;
0086 return 1;
0087 }
0088
0089 return 0;
0090 }
0091
0092 void fpu_fpe(struct pt_regs *regs)
0093 {
0094 int sig, code;
0095 unsigned int fesr;
0096
0097 fesr = mfcr("cr<2, 2>");
0098
0099 sig = SIGFPE;
0100 code = FPE_FLTUNK;
0101
0102 if (fesr & FPE_ILLE) {
0103 sig = SIGILL;
0104 code = ILL_ILLOPC;
0105 } else if (fesr & FPE_IDC) {
0106 sig = SIGILL;
0107 code = ILL_ILLOPN;
0108 } else if (fesr & FPE_FEC) {
0109 sig = SIGFPE;
0110 if (fesr & FPE_IOC)
0111 code = FPE_FLTINV;
0112 else if (fesr & FPE_DZC)
0113 code = FPE_FLTDIV;
0114 else if (fesr & FPE_UFC)
0115 code = FPE_FLTUND;
0116 else if (fesr & FPE_OFC)
0117 code = FPE_FLTOVF;
0118 else if (fesr & FPE_IXC)
0119 code = FPE_FLTRES;
0120 }
0121
0122 force_sig_fault(sig, code, (void __user *)regs->pc);
0123 }
0124
0125 #define FMFVR_FPU_REGS(vrx, vry) \
0126 "fmfvrl %0, "#vrx"\n" \
0127 "fmfvrh %1, "#vrx"\n" \
0128 "fmfvrl %2, "#vry"\n" \
0129 "fmfvrh %3, "#vry"\n"
0130
0131 #define FMTVR_FPU_REGS(vrx, vry) \
0132 "fmtvrl "#vrx", %0\n" \
0133 "fmtvrh "#vrx", %1\n" \
0134 "fmtvrl "#vry", %2\n" \
0135 "fmtvrh "#vry", %3\n"
0136
0137 #define STW_FPU_REGS(a, b, c, d) \
0138 "stw %0, (%4, "#a")\n" \
0139 "stw %1, (%4, "#b")\n" \
0140 "stw %2, (%4, "#c")\n" \
0141 "stw %3, (%4, "#d")\n"
0142
0143 #define LDW_FPU_REGS(a, b, c, d) \
0144 "ldw %0, (%4, "#a")\n" \
0145 "ldw %1, (%4, "#b")\n" \
0146 "ldw %2, (%4, "#c")\n" \
0147 "ldw %3, (%4, "#d")\n"
0148
0149 void save_to_user_fp(struct user_fp *user_fp)
0150 {
0151 unsigned long flg;
0152 unsigned long tmp1, tmp2;
0153 unsigned long *fpregs;
0154
0155 local_irq_save(flg);
0156
0157 tmp1 = mfcr("cr<1, 2>");
0158 tmp2 = mfcr("cr<2, 2>");
0159
0160 user_fp->fcr = tmp1;
0161 user_fp->fesr = tmp2;
0162
0163 fpregs = &user_fp->vr[0];
0164 #ifdef CONFIG_CPU_HAS_FPUV2
0165 #ifdef CONFIG_CPU_HAS_VDSP
0166 asm volatile(
0167 "vstmu.32 vr0-vr3, (%0)\n"
0168 "vstmu.32 vr4-vr7, (%0)\n"
0169 "vstmu.32 vr8-vr11, (%0)\n"
0170 "vstmu.32 vr12-vr15, (%0)\n"
0171 "fstmu.64 vr16-vr31, (%0)\n"
0172 : "+a"(fpregs)
0173 ::"memory");
0174 #else
0175 asm volatile(
0176 "fstmu.64 vr0-vr31, (%0)\n"
0177 : "+a"(fpregs)
0178 ::"memory");
0179 #endif
0180 #else
0181 {
0182 unsigned long tmp3, tmp4;
0183
0184 asm volatile(
0185 FMFVR_FPU_REGS(vr0, vr1)
0186 STW_FPU_REGS(0, 4, 16, 20)
0187 FMFVR_FPU_REGS(vr2, vr3)
0188 STW_FPU_REGS(32, 36, 48, 52)
0189 FMFVR_FPU_REGS(vr4, vr5)
0190 STW_FPU_REGS(64, 68, 80, 84)
0191 FMFVR_FPU_REGS(vr6, vr7)
0192 STW_FPU_REGS(96, 100, 112, 116)
0193 "addi %4, 128\n"
0194 FMFVR_FPU_REGS(vr8, vr9)
0195 STW_FPU_REGS(0, 4, 16, 20)
0196 FMFVR_FPU_REGS(vr10, vr11)
0197 STW_FPU_REGS(32, 36, 48, 52)
0198 FMFVR_FPU_REGS(vr12, vr13)
0199 STW_FPU_REGS(64, 68, 80, 84)
0200 FMFVR_FPU_REGS(vr14, vr15)
0201 STW_FPU_REGS(96, 100, 112, 116)
0202 : "=a"(tmp1), "=a"(tmp2), "=a"(tmp3),
0203 "=a"(tmp4), "+a"(fpregs)
0204 ::"memory");
0205 }
0206 #endif
0207
0208 local_irq_restore(flg);
0209 }
0210
0211 void restore_from_user_fp(struct user_fp *user_fp)
0212 {
0213 unsigned long flg;
0214 unsigned long tmp1, tmp2;
0215 unsigned long *fpregs;
0216
0217 local_irq_save(flg);
0218
0219 tmp1 = user_fp->fcr;
0220 tmp2 = user_fp->fesr;
0221
0222 mtcr("cr<1, 2>", tmp1);
0223 mtcr("cr<2, 2>", tmp2);
0224
0225 fpregs = &user_fp->vr[0];
0226 #ifdef CONFIG_CPU_HAS_FPUV2
0227 #ifdef CONFIG_CPU_HAS_VDSP
0228 asm volatile(
0229 "vldmu.32 vr0-vr3, (%0)\n"
0230 "vldmu.32 vr4-vr7, (%0)\n"
0231 "vldmu.32 vr8-vr11, (%0)\n"
0232 "vldmu.32 vr12-vr15, (%0)\n"
0233 "fldmu.64 vr16-vr31, (%0)\n"
0234 : "+a"(fpregs)
0235 ::"memory");
0236 #else
0237 asm volatile(
0238 "fldmu.64 vr0-vr31, (%0)\n"
0239 : "+a"(fpregs)
0240 ::"memory");
0241 #endif
0242 #else
0243 {
0244 unsigned long tmp3, tmp4;
0245
0246 asm volatile(
0247 LDW_FPU_REGS(0, 4, 16, 20)
0248 FMTVR_FPU_REGS(vr0, vr1)
0249 LDW_FPU_REGS(32, 36, 48, 52)
0250 FMTVR_FPU_REGS(vr2, vr3)
0251 LDW_FPU_REGS(64, 68, 80, 84)
0252 FMTVR_FPU_REGS(vr4, vr5)
0253 LDW_FPU_REGS(96, 100, 112, 116)
0254 FMTVR_FPU_REGS(vr6, vr7)
0255 "addi %4, 128\n"
0256 LDW_FPU_REGS(0, 4, 16, 20)
0257 FMTVR_FPU_REGS(vr8, vr9)
0258 LDW_FPU_REGS(32, 36, 48, 52)
0259 FMTVR_FPU_REGS(vr10, vr11)
0260 LDW_FPU_REGS(64, 68, 80, 84)
0261 FMTVR_FPU_REGS(vr12, vr13)
0262 LDW_FPU_REGS(96, 100, 112, 116)
0263 FMTVR_FPU_REGS(vr14, vr15)
0264 : "=a"(tmp1), "=a"(tmp2), "=a"(tmp3),
0265 "=a"(tmp4), "+a"(fpregs)
0266 ::"memory");
0267 }
0268 #endif
0269 local_irq_restore(flg);
0270 }