0001
0002
0003
0004
0005
0006 #ifndef _ASM_MSA_H
0007 #define _ASM_MSA_H
0008
0009 #include <asm/mipsregs.h>
0010
0011 #ifndef __ASSEMBLY__
0012
0013 #include <asm/inst.h>
0014
0015 extern void _save_msa(struct task_struct *);
0016 extern void _restore_msa(struct task_struct *);
0017 extern void _init_msa_upper(void);
0018
0019 extern void read_msa_wr_b(unsigned idx, union fpureg *to);
0020 extern void read_msa_wr_h(unsigned idx, union fpureg *to);
0021 extern void read_msa_wr_w(unsigned idx, union fpureg *to);
0022 extern void read_msa_wr_d(unsigned idx, union fpureg *to);
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033 static inline void read_msa_wr(unsigned idx, union fpureg *to,
0034 enum msa_2b_fmt fmt)
0035 {
0036 switch (fmt) {
0037 case msa_fmt_b:
0038 read_msa_wr_b(idx, to);
0039 break;
0040
0041 case msa_fmt_h:
0042 read_msa_wr_h(idx, to);
0043 break;
0044
0045 case msa_fmt_w:
0046 read_msa_wr_w(idx, to);
0047 break;
0048
0049 case msa_fmt_d:
0050 read_msa_wr_d(idx, to);
0051 break;
0052
0053 default:
0054 BUG();
0055 }
0056 }
0057
0058 extern void write_msa_wr_b(unsigned idx, union fpureg *from);
0059 extern void write_msa_wr_h(unsigned idx, union fpureg *from);
0060 extern void write_msa_wr_w(unsigned idx, union fpureg *from);
0061 extern void write_msa_wr_d(unsigned idx, union fpureg *from);
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072 static inline void write_msa_wr(unsigned idx, union fpureg *from,
0073 enum msa_2b_fmt fmt)
0074 {
0075 switch (fmt) {
0076 case msa_fmt_b:
0077 write_msa_wr_b(idx, from);
0078 break;
0079
0080 case msa_fmt_h:
0081 write_msa_wr_h(idx, from);
0082 break;
0083
0084 case msa_fmt_w:
0085 write_msa_wr_w(idx, from);
0086 break;
0087
0088 case msa_fmt_d:
0089 write_msa_wr_d(idx, from);
0090 break;
0091
0092 default:
0093 BUG();
0094 }
0095 }
0096
0097 static inline void enable_msa(void)
0098 {
0099 if (cpu_has_msa) {
0100 set_c0_config5(MIPS_CONF5_MSAEN);
0101 enable_fpu_hazard();
0102 }
0103 }
0104
0105 static inline void disable_msa(void)
0106 {
0107 if (cpu_has_msa) {
0108 clear_c0_config5(MIPS_CONF5_MSAEN);
0109 disable_fpu_hazard();
0110 }
0111 }
0112
0113 static inline int is_msa_enabled(void)
0114 {
0115 if (!cpu_has_msa)
0116 return 0;
0117
0118 return read_c0_config5() & MIPS_CONF5_MSAEN;
0119 }
0120
0121 static inline int thread_msa_context_live(void)
0122 {
0123
0124
0125
0126
0127
0128 if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
0129 return 0;
0130
0131 return test_thread_flag(TIF_MSA_CTX_LIVE);
0132 }
0133
0134 static inline void save_msa(struct task_struct *t)
0135 {
0136 if (cpu_has_msa)
0137 _save_msa(t);
0138 }
0139
0140 static inline void restore_msa(struct task_struct *t)
0141 {
0142 if (cpu_has_msa)
0143 _restore_msa(t);
0144 }
0145
0146 static inline void init_msa_upper(void)
0147 {
0148
0149
0150
0151
0152
0153 if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
0154 return;
0155
0156 _init_msa_upper();
0157 }
0158
0159 #ifndef TOOLCHAIN_SUPPORTS_MSA
0160
0161
0162
0163
0164
0165
0166 #define _ASM_SET_CFCMSA \
0167 _ASM_MACRO_2R(cfcmsa, rd, cs, \
0168 _ASM_INSN_IF_MIPS(0x787e0019 | __cs << 11 | __rd << 6) \
0169 _ASM_INSN32_IF_MM(0x587e0016 | __cs << 11 | __rd << 6))
0170 #define _ASM_UNSET_CFCMSA ".purgem cfcmsa\n\t"
0171 #define _ASM_SET_CTCMSA \
0172 _ASM_MACRO_2R(ctcmsa, cd, rs, \
0173 _ASM_INSN_IF_MIPS(0x783e0019 | __rs << 11 | __cd << 6) \
0174 _ASM_INSN32_IF_MM(0x583e0016 | __rs << 11 | __cd << 6))
0175 #define _ASM_UNSET_CTCMSA ".purgem ctcmsa\n\t"
0176 #else
0177 #define _ASM_SET_CFCMSA \
0178 ".set\tfp=64\n\t" \
0179 ".set\tmsa\n\t"
0180 #define _ASM_UNSET_CFCMSA
0181 #define _ASM_SET_CTCMSA \
0182 ".set\tfp=64\n\t" \
0183 ".set\tmsa\n\t"
0184 #define _ASM_UNSET_CTCMSA
0185 #endif
0186
0187 #define __BUILD_MSA_CTL_REG(name, cs) \
0188 static inline unsigned int read_msa_##name(void) \
0189 { \
0190 unsigned int reg; \
0191 __asm__ __volatile__( \
0192 " .set push\n" \
0193 _ASM_SET_CFCMSA \
0194 " cfcmsa %0, $" #cs "\n" \
0195 _ASM_UNSET_CFCMSA \
0196 " .set pop\n" \
0197 : "=r"(reg)); \
0198 return reg; \
0199 } \
0200 \
0201 static inline void write_msa_##name(unsigned int val) \
0202 { \
0203 __asm__ __volatile__( \
0204 " .set push\n" \
0205 _ASM_SET_CTCMSA \
0206 " ctcmsa $" #cs ", %0\n" \
0207 _ASM_UNSET_CTCMSA \
0208 " .set pop\n" \
0209 : : "r"(val)); \
0210 }
0211
0212 __BUILD_MSA_CTL_REG(ir, 0)
0213 __BUILD_MSA_CTL_REG(csr, 1)
0214 __BUILD_MSA_CTL_REG(access, 2)
0215 __BUILD_MSA_CTL_REG(save, 3)
0216 __BUILD_MSA_CTL_REG(modify, 4)
0217 __BUILD_MSA_CTL_REG(request, 5)
0218 __BUILD_MSA_CTL_REG(map, 6)
0219 __BUILD_MSA_CTL_REG(unmap, 7)
0220
0221 #endif
0222
0223 #define MSA_IR 0
0224 #define MSA_CSR 1
0225 #define MSA_ACCESS 2
0226 #define MSA_SAVE 3
0227 #define MSA_MODIFY 4
0228 #define MSA_REQUEST 5
0229 #define MSA_MAP 6
0230 #define MSA_UNMAP 7
0231
0232
0233 #define MSA_IR_REVB 0
0234 #define MSA_IR_REVF (_ULCAST_(0xff) << MSA_IR_REVB)
0235 #define MSA_IR_PROCB 8
0236 #define MSA_IR_PROCF (_ULCAST_(0xff) << MSA_IR_PROCB)
0237 #define MSA_IR_WRPB 16
0238 #define MSA_IR_WRPF (_ULCAST_(0x1) << MSA_IR_WRPB)
0239
0240
0241 #define MSA_CSR_RMB 0
0242 #define MSA_CSR_RMF (_ULCAST_(0x3) << MSA_CSR_RMB)
0243 #define MSA_CSR_RM_NEAREST 0
0244 #define MSA_CSR_RM_TO_ZERO 1
0245 #define MSA_CSR_RM_TO_POS 2
0246 #define MSA_CSR_RM_TO_NEG 3
0247 #define MSA_CSR_FLAGSB 2
0248 #define MSA_CSR_FLAGSF (_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
0249 #define MSA_CSR_FLAGS_IB 2
0250 #define MSA_CSR_FLAGS_IF (_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
0251 #define MSA_CSR_FLAGS_UB 3
0252 #define MSA_CSR_FLAGS_UF (_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
0253 #define MSA_CSR_FLAGS_OB 4
0254 #define MSA_CSR_FLAGS_OF (_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
0255 #define MSA_CSR_FLAGS_ZB 5
0256 #define MSA_CSR_FLAGS_ZF (_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
0257 #define MSA_CSR_FLAGS_VB 6
0258 #define MSA_CSR_FLAGS_VF (_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
0259 #define MSA_CSR_ENABLESB 7
0260 #define MSA_CSR_ENABLESF (_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
0261 #define MSA_CSR_ENABLES_IB 7
0262 #define MSA_CSR_ENABLES_IF (_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
0263 #define MSA_CSR_ENABLES_UB 8
0264 #define MSA_CSR_ENABLES_UF (_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
0265 #define MSA_CSR_ENABLES_OB 9
0266 #define MSA_CSR_ENABLES_OF (_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
0267 #define MSA_CSR_ENABLES_ZB 10
0268 #define MSA_CSR_ENABLES_ZF (_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
0269 #define MSA_CSR_ENABLES_VB 11
0270 #define MSA_CSR_ENABLES_VF (_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
0271 #define MSA_CSR_CAUSEB 12
0272 #define MSA_CSR_CAUSEF (_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
0273 #define MSA_CSR_CAUSE_IB 12
0274 #define MSA_CSR_CAUSE_IF (_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
0275 #define MSA_CSR_CAUSE_UB 13
0276 #define MSA_CSR_CAUSE_UF (_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
0277 #define MSA_CSR_CAUSE_OB 14
0278 #define MSA_CSR_CAUSE_OF (_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
0279 #define MSA_CSR_CAUSE_ZB 15
0280 #define MSA_CSR_CAUSE_ZF (_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
0281 #define MSA_CSR_CAUSE_VB 16
0282 #define MSA_CSR_CAUSE_VF (_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
0283 #define MSA_CSR_CAUSE_EB 17
0284 #define MSA_CSR_CAUSE_EF (_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
0285 #define MSA_CSR_NXB 18
0286 #define MSA_CSR_NXF (_ULCAST_(0x1) << MSA_CSR_NXB)
0287 #define MSA_CSR_FSB 24
0288 #define MSA_CSR_FSF (_ULCAST_(0x1) << MSA_CSR_FSB)
0289
0290 #endif