Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  *
0004  * Copyright Novell Inc 2010
0005  *
0006  * Authors: Alexander Graf <agraf@suse.de>
0007  */
0008 
0009 #include <asm/kvm.h>
0010 #include <asm/kvm_ppc.h>
0011 #include <asm/disassemble.h>
0012 #include <asm/kvm_book3s.h>
0013 #include <asm/kvm_fpu.h>
0014 #include <asm/reg.h>
0015 #include <asm/cacheflush.h>
0016 #include <asm/switch_to.h>
0017 #include <linux/vmalloc.h>
0018 
0019 /* #define DEBUG */
0020 
0021 #ifdef DEBUG
0022 #define dprintk printk
0023 #else
0024 #define dprintk(...) do { } while(0);
0025 #endif
0026 
0027 #define OP_LFS          48
0028 #define OP_LFSU         49
0029 #define OP_LFD          50
0030 #define OP_LFDU         51
0031 #define OP_STFS         52
0032 #define OP_STFSU        53
0033 #define OP_STFD         54
0034 #define OP_STFDU        55
0035 #define OP_PSQ_L        56
0036 #define OP_PSQ_LU       57
0037 #define OP_PSQ_ST       60
0038 #define OP_PSQ_STU      61
0039 
0040 #define OP_31_LFSX      535
0041 #define OP_31_LFSUX     567
0042 #define OP_31_LFDX      599
0043 #define OP_31_LFDUX     631
0044 #define OP_31_STFSX     663
0045 #define OP_31_STFSUX        695
0046 #define OP_31_STFX      727
0047 #define OP_31_STFUX     759
0048 #define OP_31_LWIZX     887
0049 #define OP_31_STFIWX        983
0050 
0051 #define OP_59_FADDS     21
0052 #define OP_59_FSUBS     20
0053 #define OP_59_FSQRTS        22
0054 #define OP_59_FDIVS     18
0055 #define OP_59_FRES      24
0056 #define OP_59_FMULS     25
0057 #define OP_59_FRSQRTES      26
0058 #define OP_59_FMSUBS        28
0059 #define OP_59_FMADDS        29
0060 #define OP_59_FNMSUBS       30
0061 #define OP_59_FNMADDS       31
0062 
0063 #define OP_63_FCMPU     0
0064 #define OP_63_FCPSGN        8
0065 #define OP_63_FRSP      12
0066 #define OP_63_FCTIW     14
0067 #define OP_63_FCTIWZ        15
0068 #define OP_63_FDIV      18
0069 #define OP_63_FADD      21
0070 #define OP_63_FSQRT     22
0071 #define OP_63_FSEL      23
0072 #define OP_63_FRE       24
0073 #define OP_63_FMUL      25
0074 #define OP_63_FRSQRTE       26
0075 #define OP_63_FMSUB     28
0076 #define OP_63_FMADD     29
0077 #define OP_63_FNMSUB        30
0078 #define OP_63_FNMADD        31
0079 #define OP_63_FCMPO     32
0080 #define OP_63_MTFSB1        38 // XXX
0081 #define OP_63_FSUB      20
0082 #define OP_63_FNEG      40
0083 #define OP_63_MCRFS     64
0084 #define OP_63_MTFSB0        70
0085 #define OP_63_FMR       72
0086 #define OP_63_MTFSFI        134
0087 #define OP_63_FABS      264
0088 #define OP_63_MFFS      583
0089 #define OP_63_MTFSF     711
0090 
0091 #define OP_4X_PS_CMPU0      0
0092 #define OP_4X_PSQ_LX        6
0093 #define OP_4XW_PSQ_STX      7
0094 #define OP_4A_PS_SUM0       10
0095 #define OP_4A_PS_SUM1       11
0096 #define OP_4A_PS_MULS0      12
0097 #define OP_4A_PS_MULS1      13
0098 #define OP_4A_PS_MADDS0     14
0099 #define OP_4A_PS_MADDS1     15
0100 #define OP_4A_PS_DIV        18
0101 #define OP_4A_PS_SUB        20
0102 #define OP_4A_PS_ADD        21
0103 #define OP_4A_PS_SEL        23
0104 #define OP_4A_PS_RES        24
0105 #define OP_4A_PS_MUL        25
0106 #define OP_4A_PS_RSQRTE     26
0107 #define OP_4A_PS_MSUB       28
0108 #define OP_4A_PS_MADD       29
0109 #define OP_4A_PS_NMSUB      30
0110 #define OP_4A_PS_NMADD      31
0111 #define OP_4X_PS_CMPO0      32
0112 #define OP_4X_PSQ_LUX       38
0113 #define OP_4XW_PSQ_STUX     39
0114 #define OP_4X_PS_NEG        40
0115 #define OP_4X_PS_CMPU1      64
0116 #define OP_4X_PS_MR     72
0117 #define OP_4X_PS_CMPO1      96
0118 #define OP_4X_PS_NABS       136
0119 #define OP_4X_PS_ABS        264
0120 #define OP_4X_PS_MERGE00    528
0121 #define OP_4X_PS_MERGE01    560
0122 #define OP_4X_PS_MERGE10    592
0123 #define OP_4X_PS_MERGE11    624
0124 
0125 #define SCALAR_NONE     0
0126 #define SCALAR_HIGH     (1 << 0)
0127 #define SCALAR_LOW      (1 << 1)
0128 #define SCALAR_NO_PS0       (1 << 2)
0129 #define SCALAR_NO_PS1       (1 << 3)
0130 
0131 #define GQR_ST_TYPE_MASK    0x00000007
0132 #define GQR_ST_TYPE_SHIFT   0
0133 #define GQR_ST_SCALE_MASK   0x00003f00
0134 #define GQR_ST_SCALE_SHIFT  8
0135 #define GQR_LD_TYPE_MASK    0x00070000
0136 #define GQR_LD_TYPE_SHIFT   16
0137 #define GQR_LD_SCALE_MASK   0x3f000000
0138 #define GQR_LD_SCALE_SHIFT  24
0139 
0140 #define GQR_QUANTIZE_FLOAT  0
0141 #define GQR_QUANTIZE_U8     4
0142 #define GQR_QUANTIZE_U16    5
0143 #define GQR_QUANTIZE_S8     6
0144 #define GQR_QUANTIZE_S16    7
0145 
0146 #define FPU_LS_SINGLE       0
0147 #define FPU_LS_DOUBLE       1
0148 #define FPU_LS_SINGLE_LOW   2
0149 
0150 static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt)
0151 {
0152     kvm_cvt_df(&VCPU_FPR(vcpu, rt), &vcpu->arch.qpr[rt]);
0153 }
0154 
0155 static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store)
0156 {
0157     u32 dsisr;
0158     u64 msr = kvmppc_get_msr(vcpu);
0159 
0160     msr = kvmppc_set_field(msr, 33, 36, 0);
0161     msr = kvmppc_set_field(msr, 42, 47, 0);
0162     kvmppc_set_msr(vcpu, msr);
0163     kvmppc_set_dar(vcpu, eaddr);
0164     /* Page Fault */
0165     dsisr = kvmppc_set_field(0, 33, 33, 1);
0166     if (is_store)
0167         dsisr = kvmppc_set_field(dsisr, 38, 38, 1);
0168     kvmppc_set_dsisr(vcpu, dsisr);
0169     kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE);
0170 }
0171 
0172 static int kvmppc_emulate_fpr_load(struct kvm_vcpu *vcpu,
0173                    int rs, ulong addr, int ls_type)
0174 {
0175     int emulated = EMULATE_FAIL;
0176     int r;
0177     char tmp[8];
0178     int len = sizeof(u32);
0179 
0180     if (ls_type == FPU_LS_DOUBLE)
0181         len = sizeof(u64);
0182 
0183     /* read from memory */
0184     r = kvmppc_ld(vcpu, &addr, len, tmp, true);
0185     vcpu->arch.paddr_accessed = addr;
0186 
0187     if (r < 0) {
0188         kvmppc_inject_pf(vcpu, addr, false);
0189         goto done_load;
0190     } else if (r == EMULATE_DO_MMIO) {
0191         emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FPR | rs,
0192                           len, 1);
0193         goto done_load;
0194     }
0195 
0196     emulated = EMULATE_DONE;
0197 
0198     /* put in registers */
0199     switch (ls_type) {
0200     case FPU_LS_SINGLE:
0201         kvm_cvt_fd((u32*)tmp, &VCPU_FPR(vcpu, rs));
0202         vcpu->arch.qpr[rs] = *((u32*)tmp);
0203         break;
0204     case FPU_LS_DOUBLE:
0205         VCPU_FPR(vcpu, rs) = *((u64*)tmp);
0206         break;
0207     }
0208 
0209     dprintk(KERN_INFO "KVM: FPR_LD [0x%llx] at 0x%lx (%d)\n", *(u64*)tmp,
0210               addr, len);
0211 
0212 done_load:
0213     return emulated;
0214 }
0215 
0216 static int kvmppc_emulate_fpr_store(struct kvm_vcpu *vcpu,
0217                     int rs, ulong addr, int ls_type)
0218 {
0219     int emulated = EMULATE_FAIL;
0220     int r;
0221     char tmp[8];
0222     u64 val;
0223     int len;
0224 
0225     switch (ls_type) {
0226     case FPU_LS_SINGLE:
0227         kvm_cvt_df(&VCPU_FPR(vcpu, rs), (u32*)tmp);
0228         val = *((u32*)tmp);
0229         len = sizeof(u32);
0230         break;
0231     case FPU_LS_SINGLE_LOW:
0232         *((u32*)tmp) = VCPU_FPR(vcpu, rs);
0233         val = VCPU_FPR(vcpu, rs) & 0xffffffff;
0234         len = sizeof(u32);
0235         break;
0236     case FPU_LS_DOUBLE:
0237         *((u64*)tmp) = VCPU_FPR(vcpu, rs);
0238         val = VCPU_FPR(vcpu, rs);
0239         len = sizeof(u64);
0240         break;
0241     default:
0242         val = 0;
0243         len = 0;
0244     }
0245 
0246     r = kvmppc_st(vcpu, &addr, len, tmp, true);
0247     vcpu->arch.paddr_accessed = addr;
0248     if (r < 0) {
0249         kvmppc_inject_pf(vcpu, addr, true);
0250     } else if (r == EMULATE_DO_MMIO) {
0251         emulated = kvmppc_handle_store(vcpu, val, len, 1);
0252     } else {
0253         emulated = EMULATE_DONE;
0254     }
0255 
0256     dprintk(KERN_INFO "KVM: FPR_ST [0x%llx] at 0x%lx (%d)\n",
0257               val, addr, len);
0258 
0259     return emulated;
0260 }
0261 
0262 static int kvmppc_emulate_psq_load(struct kvm_vcpu *vcpu,
0263                    int rs, ulong addr, bool w, int i)
0264 {
0265     int emulated = EMULATE_FAIL;
0266     int r;
0267     float one = 1.0;
0268     u32 tmp[2];
0269 
0270     /* read from memory */
0271     if (w) {
0272         r = kvmppc_ld(vcpu, &addr, sizeof(u32), tmp, true);
0273         memcpy(&tmp[1], &one, sizeof(u32));
0274     } else {
0275         r = kvmppc_ld(vcpu, &addr, sizeof(u32) * 2, tmp, true);
0276     }
0277     vcpu->arch.paddr_accessed = addr;
0278     if (r < 0) {
0279         kvmppc_inject_pf(vcpu, addr, false);
0280         goto done_load;
0281     } else if ((r == EMULATE_DO_MMIO) && w) {
0282         emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FPR | rs,
0283                           4, 1);
0284         vcpu->arch.qpr[rs] = tmp[1];
0285         goto done_load;
0286     } else if (r == EMULATE_DO_MMIO) {
0287         emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FQPR | rs,
0288                           8, 1);
0289         goto done_load;
0290     }
0291 
0292     emulated = EMULATE_DONE;
0293 
0294     /* put in registers */
0295     kvm_cvt_fd(&tmp[0], &VCPU_FPR(vcpu, rs));
0296     vcpu->arch.qpr[rs] = tmp[1];
0297 
0298     dprintk(KERN_INFO "KVM: PSQ_LD [0x%x, 0x%x] at 0x%lx (%d)\n", tmp[0],
0299               tmp[1], addr, w ? 4 : 8);
0300 
0301 done_load:
0302     return emulated;
0303 }
0304 
0305 static int kvmppc_emulate_psq_store(struct kvm_vcpu *vcpu,
0306                     int rs, ulong addr, bool w, int i)
0307 {
0308     int emulated = EMULATE_FAIL;
0309     int r;
0310     u32 tmp[2];
0311     int len = w ? sizeof(u32) : sizeof(u64);
0312 
0313     kvm_cvt_df(&VCPU_FPR(vcpu, rs), &tmp[0]);
0314     tmp[1] = vcpu->arch.qpr[rs];
0315 
0316     r = kvmppc_st(vcpu, &addr, len, tmp, true);
0317     vcpu->arch.paddr_accessed = addr;
0318     if (r < 0) {
0319         kvmppc_inject_pf(vcpu, addr, true);
0320     } else if ((r == EMULATE_DO_MMIO) && w) {
0321         emulated = kvmppc_handle_store(vcpu, tmp[0], 4, 1);
0322     } else if (r == EMULATE_DO_MMIO) {
0323         u64 val = ((u64)tmp[0] << 32) | tmp[1];
0324         emulated = kvmppc_handle_store(vcpu, val, 8, 1);
0325     } else {
0326         emulated = EMULATE_DONE;
0327     }
0328 
0329     dprintk(KERN_INFO "KVM: PSQ_ST [0x%x, 0x%x] at 0x%lx (%d)\n",
0330               tmp[0], tmp[1], addr, len);
0331 
0332     return emulated;
0333 }
0334 
0335 /*
0336  * Cuts out inst bits with ordering according to spec.
0337  * That means the leftmost bit is zero. All given bits are included.
0338  */
0339 static inline u32 inst_get_field(u32 inst, int msb, int lsb)
0340 {
0341     return kvmppc_get_field(inst, msb + 32, lsb + 32);
0342 }
0343 
0344 static bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst)
0345 {
0346     if (!(vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE))
0347         return false;
0348 
0349     switch (get_op(inst)) {
0350     case OP_PSQ_L:
0351     case OP_PSQ_LU:
0352     case OP_PSQ_ST:
0353     case OP_PSQ_STU:
0354     case OP_LFS:
0355     case OP_LFSU:
0356     case OP_LFD:
0357     case OP_LFDU:
0358     case OP_STFS:
0359     case OP_STFSU:
0360     case OP_STFD:
0361     case OP_STFDU:
0362         return true;
0363     case 4:
0364         /* X form */
0365         switch (inst_get_field(inst, 21, 30)) {
0366         case OP_4X_PS_CMPU0:
0367         case OP_4X_PSQ_LX:
0368         case OP_4X_PS_CMPO0:
0369         case OP_4X_PSQ_LUX:
0370         case OP_4X_PS_NEG:
0371         case OP_4X_PS_CMPU1:
0372         case OP_4X_PS_MR:
0373         case OP_4X_PS_CMPO1:
0374         case OP_4X_PS_NABS:
0375         case OP_4X_PS_ABS:
0376         case OP_4X_PS_MERGE00:
0377         case OP_4X_PS_MERGE01:
0378         case OP_4X_PS_MERGE10:
0379         case OP_4X_PS_MERGE11:
0380             return true;
0381         }
0382         /* XW form */
0383         switch (inst_get_field(inst, 25, 30)) {
0384         case OP_4XW_PSQ_STX:
0385         case OP_4XW_PSQ_STUX:
0386             return true;
0387         }
0388         /* A form */
0389         switch (inst_get_field(inst, 26, 30)) {
0390         case OP_4A_PS_SUM1:
0391         case OP_4A_PS_SUM0:
0392         case OP_4A_PS_MULS0:
0393         case OP_4A_PS_MULS1:
0394         case OP_4A_PS_MADDS0:
0395         case OP_4A_PS_MADDS1:
0396         case OP_4A_PS_DIV:
0397         case OP_4A_PS_SUB:
0398         case OP_4A_PS_ADD:
0399         case OP_4A_PS_SEL:
0400         case OP_4A_PS_RES:
0401         case OP_4A_PS_MUL:
0402         case OP_4A_PS_RSQRTE:
0403         case OP_4A_PS_MSUB:
0404         case OP_4A_PS_MADD:
0405         case OP_4A_PS_NMSUB:
0406         case OP_4A_PS_NMADD:
0407             return true;
0408         }
0409         break;
0410     case 59:
0411         switch (inst_get_field(inst, 21, 30)) {
0412         case OP_59_FADDS:
0413         case OP_59_FSUBS:
0414         case OP_59_FDIVS:
0415         case OP_59_FRES:
0416         case OP_59_FRSQRTES:
0417             return true;
0418         }
0419         switch (inst_get_field(inst, 26, 30)) {
0420         case OP_59_FMULS:
0421         case OP_59_FMSUBS:
0422         case OP_59_FMADDS:
0423         case OP_59_FNMSUBS:
0424         case OP_59_FNMADDS:
0425             return true;
0426         }
0427         break;
0428     case 63:
0429         switch (inst_get_field(inst, 21, 30)) {
0430         case OP_63_MTFSB0:
0431         case OP_63_MTFSB1:
0432         case OP_63_MTFSF:
0433         case OP_63_MTFSFI:
0434         case OP_63_MCRFS:
0435         case OP_63_MFFS:
0436         case OP_63_FCMPU:
0437         case OP_63_FCMPO:
0438         case OP_63_FNEG:
0439         case OP_63_FMR:
0440         case OP_63_FABS:
0441         case OP_63_FRSP:
0442         case OP_63_FDIV:
0443         case OP_63_FADD:
0444         case OP_63_FSUB:
0445         case OP_63_FCTIW:
0446         case OP_63_FCTIWZ:
0447         case OP_63_FRSQRTE:
0448         case OP_63_FCPSGN:
0449             return true;
0450         }
0451         switch (inst_get_field(inst, 26, 30)) {
0452         case OP_63_FMUL:
0453         case OP_63_FSEL:
0454         case OP_63_FMSUB:
0455         case OP_63_FMADD:
0456         case OP_63_FNMSUB:
0457         case OP_63_FNMADD:
0458             return true;
0459         }
0460         break;
0461     case 31:
0462         switch (inst_get_field(inst, 21, 30)) {
0463         case OP_31_LFSX:
0464         case OP_31_LFSUX:
0465         case OP_31_LFDX:
0466         case OP_31_LFDUX:
0467         case OP_31_STFSX:
0468         case OP_31_STFSUX:
0469         case OP_31_STFX:
0470         case OP_31_STFUX:
0471         case OP_31_STFIWX:
0472             return true;
0473         }
0474         break;
0475     }
0476 
0477     return false;
0478 }
0479 
0480 static int get_d_signext(u32 inst)
0481 {
0482     int d = inst & 0x8ff;
0483 
0484     if (d & 0x800)
0485         return -(d & 0x7ff);
0486 
0487     return (d & 0x7ff);
0488 }
0489 
0490 static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc,
0491                       int reg_out, int reg_in1, int reg_in2,
0492                       int reg_in3, int scalar,
0493                       void (*func)(u64 *fpscr,
0494                          u32 *dst, u32 *src1,
0495                          u32 *src2, u32 *src3))
0496 {
0497     u32 *qpr = vcpu->arch.qpr;
0498     u32 ps0_out;
0499     u32 ps0_in1, ps0_in2, ps0_in3;
0500     u32 ps1_in1, ps1_in2, ps1_in3;
0501 
0502     /* RC */
0503     WARN_ON(rc);
0504 
0505     /* PS0 */
0506     kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
0507     kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
0508     kvm_cvt_df(&VCPU_FPR(vcpu, reg_in3), &ps0_in3);
0509 
0510     if (scalar & SCALAR_LOW)
0511         ps0_in2 = qpr[reg_in2];
0512 
0513     func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2, &ps0_in3);
0514 
0515     dprintk(KERN_INFO "PS3 ps0 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
0516               ps0_in1, ps0_in2, ps0_in3, ps0_out);
0517 
0518     if (!(scalar & SCALAR_NO_PS0))
0519         kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
0520 
0521     /* PS1 */
0522     ps1_in1 = qpr[reg_in1];
0523     ps1_in2 = qpr[reg_in2];
0524     ps1_in3 = qpr[reg_in3];
0525 
0526     if (scalar & SCALAR_HIGH)
0527         ps1_in2 = ps0_in2;
0528 
0529     if (!(scalar & SCALAR_NO_PS1))
0530         func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in1, &ps1_in2, &ps1_in3);
0531 
0532     dprintk(KERN_INFO "PS3 ps1 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
0533               ps1_in1, ps1_in2, ps1_in3, qpr[reg_out]);
0534 
0535     return EMULATE_DONE;
0536 }
0537 
0538 static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc,
0539                     int reg_out, int reg_in1, int reg_in2,
0540                     int scalar,
0541                     void (*func)(u64 *fpscr,
0542                          u32 *dst, u32 *src1,
0543                          u32 *src2))
0544 {
0545     u32 *qpr = vcpu->arch.qpr;
0546     u32 ps0_out;
0547     u32 ps0_in1, ps0_in2;
0548     u32 ps1_out;
0549     u32 ps1_in1, ps1_in2;
0550 
0551     /* RC */
0552     WARN_ON(rc);
0553 
0554     /* PS0 */
0555     kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
0556 
0557     if (scalar & SCALAR_LOW)
0558         ps0_in2 = qpr[reg_in2];
0559     else
0560         kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
0561 
0562     func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2);
0563 
0564     if (!(scalar & SCALAR_NO_PS0)) {
0565         dprintk(KERN_INFO "PS2 ps0 -> f(0x%x, 0x%x) = 0x%x\n",
0566                   ps0_in1, ps0_in2, ps0_out);
0567 
0568         kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
0569     }
0570 
0571     /* PS1 */
0572     ps1_in1 = qpr[reg_in1];
0573     ps1_in2 = qpr[reg_in2];
0574 
0575     if (scalar & SCALAR_HIGH)
0576         ps1_in2 = ps0_in2;
0577 
0578     func(&vcpu->arch.fp.fpscr, &ps1_out, &ps1_in1, &ps1_in2);
0579 
0580     if (!(scalar & SCALAR_NO_PS1)) {
0581         qpr[reg_out] = ps1_out;
0582 
0583         dprintk(KERN_INFO "PS2 ps1 -> f(0x%x, 0x%x) = 0x%x\n",
0584                   ps1_in1, ps1_in2, qpr[reg_out]);
0585     }
0586 
0587     return EMULATE_DONE;
0588 }
0589 
0590 static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc,
0591                     int reg_out, int reg_in,
0592                     void (*func)(u64 *t,
0593                          u32 *dst, u32 *src1))
0594 {
0595     u32 *qpr = vcpu->arch.qpr;
0596     u32 ps0_out, ps0_in;
0597     u32 ps1_in;
0598 
0599     /* RC */
0600     WARN_ON(rc);
0601 
0602     /* PS0 */
0603     kvm_cvt_df(&VCPU_FPR(vcpu, reg_in), &ps0_in);
0604     func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in);
0605 
0606     dprintk(KERN_INFO "PS1 ps0 -> f(0x%x) = 0x%x\n",
0607               ps0_in, ps0_out);
0608 
0609     kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
0610 
0611     /* PS1 */
0612     ps1_in = qpr[reg_in];
0613     func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in);
0614 
0615     dprintk(KERN_INFO "PS1 ps1 -> f(0x%x) = 0x%x\n",
0616               ps1_in, qpr[reg_out]);
0617 
0618     return EMULATE_DONE;
0619 }
0620 
0621 int kvmppc_emulate_paired_single(struct kvm_vcpu *vcpu)
0622 {
0623     u32 inst;
0624     enum emulation_result emulated = EMULATE_DONE;
0625     int ax_rd, ax_ra, ax_rb, ax_rc;
0626     short full_d;
0627     u64 *fpr_d, *fpr_a, *fpr_b, *fpr_c;
0628 
0629     bool rcomp;
0630     u32 cr;
0631 #ifdef DEBUG
0632     int i;
0633 #endif
0634 
0635     emulated = kvmppc_get_last_inst(vcpu, INST_GENERIC, &inst);
0636     if (emulated != EMULATE_DONE)
0637         return emulated;
0638 
0639     ax_rd = inst_get_field(inst, 6, 10);
0640     ax_ra = inst_get_field(inst, 11, 15);
0641     ax_rb = inst_get_field(inst, 16, 20);
0642     ax_rc = inst_get_field(inst, 21, 25);
0643     full_d = inst_get_field(inst, 16, 31);
0644 
0645     fpr_d = &VCPU_FPR(vcpu, ax_rd);
0646     fpr_a = &VCPU_FPR(vcpu, ax_ra);
0647     fpr_b = &VCPU_FPR(vcpu, ax_rb);
0648     fpr_c = &VCPU_FPR(vcpu, ax_rc);
0649 
0650     rcomp = (inst & 1) ? true : false;
0651     cr = kvmppc_get_cr(vcpu);
0652 
0653     if (!kvmppc_inst_is_paired_single(vcpu, inst))
0654         return EMULATE_FAIL;
0655 
0656     if (!(kvmppc_get_msr(vcpu) & MSR_FP)) {
0657         kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL);
0658         return EMULATE_AGAIN;
0659     }
0660 
0661     kvmppc_giveup_ext(vcpu, MSR_FP);
0662     preempt_disable();
0663     enable_kernel_fp();
0664     /* Do we need to clear FE0 / FE1 here? Don't think so. */
0665 
0666 #ifdef DEBUG
0667     for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
0668         u32 f;
0669         kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
0670         dprintk(KERN_INFO "FPR[%d] = 0x%x / 0x%llx    QPR[%d] = 0x%x\n",
0671             i, f, VCPU_FPR(vcpu, i), i, vcpu->arch.qpr[i]);
0672     }
0673 #endif
0674 
0675     switch (get_op(inst)) {
0676     case OP_PSQ_L:
0677     {
0678         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
0679         bool w = inst_get_field(inst, 16, 16) ? true : false;
0680         int i = inst_get_field(inst, 17, 19);
0681 
0682         addr += get_d_signext(inst);
0683         emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
0684         break;
0685     }
0686     case OP_PSQ_LU:
0687     {
0688         ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
0689         bool w = inst_get_field(inst, 16, 16) ? true : false;
0690         int i = inst_get_field(inst, 17, 19);
0691 
0692         addr += get_d_signext(inst);
0693         emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
0694 
0695         if (emulated == EMULATE_DONE)
0696             kvmppc_set_gpr(vcpu, ax_ra, addr);
0697         break;
0698     }
0699     case OP_PSQ_ST:
0700     {
0701         ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
0702         bool w = inst_get_field(inst, 16, 16) ? true : false;
0703         int i = inst_get_field(inst, 17, 19);
0704 
0705         addr += get_d_signext(inst);
0706         emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
0707         break;
0708     }
0709     case OP_PSQ_STU:
0710     {
0711         ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
0712         bool w = inst_get_field(inst, 16, 16) ? true : false;
0713         int i = inst_get_field(inst, 17, 19);
0714 
0715         addr += get_d_signext(inst);
0716         emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
0717 
0718         if (emulated == EMULATE_DONE)
0719             kvmppc_set_gpr(vcpu, ax_ra, addr);
0720         break;
0721     }
0722     case 4:
0723         /* X form */
0724         switch (inst_get_field(inst, 21, 30)) {
0725         case OP_4X_PS_CMPU0:
0726             /* XXX */
0727             emulated = EMULATE_FAIL;
0728             break;
0729         case OP_4X_PSQ_LX:
0730         {
0731             ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
0732             bool w = inst_get_field(inst, 21, 21) ? true : false;
0733             int i = inst_get_field(inst, 22, 24);
0734 
0735             addr += kvmppc_get_gpr(vcpu, ax_rb);
0736             emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
0737             break;
0738         }
0739         case OP_4X_PS_CMPO0:
0740             /* XXX */
0741             emulated = EMULATE_FAIL;
0742             break;
0743         case OP_4X_PSQ_LUX:
0744         {
0745             ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
0746             bool w = inst_get_field(inst, 21, 21) ? true : false;
0747             int i = inst_get_field(inst, 22, 24);
0748 
0749             addr += kvmppc_get_gpr(vcpu, ax_rb);
0750             emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
0751 
0752             if (emulated == EMULATE_DONE)
0753                 kvmppc_set_gpr(vcpu, ax_ra, addr);
0754             break;
0755         }
0756         case OP_4X_PS_NEG:
0757             VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
0758             VCPU_FPR(vcpu, ax_rd) ^= 0x8000000000000000ULL;
0759             vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
0760             vcpu->arch.qpr[ax_rd] ^= 0x80000000;
0761             break;
0762         case OP_4X_PS_CMPU1:
0763             /* XXX */
0764             emulated = EMULATE_FAIL;
0765             break;
0766         case OP_4X_PS_MR:
0767             WARN_ON(rcomp);
0768             VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
0769             vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
0770             break;
0771         case OP_4X_PS_CMPO1:
0772             /* XXX */
0773             emulated = EMULATE_FAIL;
0774             break;
0775         case OP_4X_PS_NABS:
0776             WARN_ON(rcomp);
0777             VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
0778             VCPU_FPR(vcpu, ax_rd) |= 0x8000000000000000ULL;
0779             vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
0780             vcpu->arch.qpr[ax_rd] |= 0x80000000;
0781             break;
0782         case OP_4X_PS_ABS:
0783             WARN_ON(rcomp);
0784             VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
0785             VCPU_FPR(vcpu, ax_rd) &= ~0x8000000000000000ULL;
0786             vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
0787             vcpu->arch.qpr[ax_rd] &= ~0x80000000;
0788             break;
0789         case OP_4X_PS_MERGE00:
0790             WARN_ON(rcomp);
0791             VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
0792             /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
0793             kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
0794                    &vcpu->arch.qpr[ax_rd]);
0795             break;
0796         case OP_4X_PS_MERGE01:
0797             WARN_ON(rcomp);
0798             VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
0799             vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
0800             break;
0801         case OP_4X_PS_MERGE10:
0802             WARN_ON(rcomp);
0803             /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
0804             kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
0805                    &VCPU_FPR(vcpu, ax_rd));
0806             /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
0807             kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
0808                    &vcpu->arch.qpr[ax_rd]);
0809             break;
0810         case OP_4X_PS_MERGE11:
0811             WARN_ON(rcomp);
0812             /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
0813             kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
0814                    &VCPU_FPR(vcpu, ax_rd));
0815             vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
0816             break;
0817         }
0818         /* XW form */
0819         switch (inst_get_field(inst, 25, 30)) {
0820         case OP_4XW_PSQ_STX:
0821         {
0822             ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
0823             bool w = inst_get_field(inst, 21, 21) ? true : false;
0824             int i = inst_get_field(inst, 22, 24);
0825 
0826             addr += kvmppc_get_gpr(vcpu, ax_rb);
0827             emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
0828             break;
0829         }
0830         case OP_4XW_PSQ_STUX:
0831         {
0832             ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
0833             bool w = inst_get_field(inst, 21, 21) ? true : false;
0834             int i = inst_get_field(inst, 22, 24);
0835 
0836             addr += kvmppc_get_gpr(vcpu, ax_rb);
0837             emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
0838 
0839             if (emulated == EMULATE_DONE)
0840                 kvmppc_set_gpr(vcpu, ax_ra, addr);
0841             break;
0842         }
0843         }
0844         /* A form */
0845         switch (inst_get_field(inst, 26, 30)) {
0846         case OP_4A_PS_SUM1:
0847             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0848                     ax_rb, ax_ra, SCALAR_NO_PS0 | SCALAR_HIGH, fps_fadds);
0849             VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rc);
0850             break;
0851         case OP_4A_PS_SUM0:
0852             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0853                     ax_ra, ax_rb, SCALAR_NO_PS1 | SCALAR_LOW, fps_fadds);
0854             vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rc];
0855             break;
0856         case OP_4A_PS_MULS0:
0857             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0858                     ax_ra, ax_rc, SCALAR_HIGH, fps_fmuls);
0859             break;
0860         case OP_4A_PS_MULS1:
0861             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0862                     ax_ra, ax_rc, SCALAR_LOW, fps_fmuls);
0863             break;
0864         case OP_4A_PS_MADDS0:
0865             emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
0866                     ax_ra, ax_rc, ax_rb, SCALAR_HIGH, fps_fmadds);
0867             break;
0868         case OP_4A_PS_MADDS1:
0869             emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
0870                     ax_ra, ax_rc, ax_rb, SCALAR_LOW, fps_fmadds);
0871             break;
0872         case OP_4A_PS_DIV:
0873             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0874                     ax_ra, ax_rb, SCALAR_NONE, fps_fdivs);
0875             break;
0876         case OP_4A_PS_SUB:
0877             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0878                     ax_ra, ax_rb, SCALAR_NONE, fps_fsubs);
0879             break;
0880         case OP_4A_PS_ADD:
0881             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0882                     ax_ra, ax_rb, SCALAR_NONE, fps_fadds);
0883             break;
0884         case OP_4A_PS_SEL:
0885             emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
0886                     ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fsel);
0887             break;
0888         case OP_4A_PS_RES:
0889             emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
0890                     ax_rb, fps_fres);
0891             break;
0892         case OP_4A_PS_MUL:
0893             emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
0894                     ax_ra, ax_rc, SCALAR_NONE, fps_fmuls);
0895             break;
0896         case OP_4A_PS_RSQRTE:
0897             emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
0898                     ax_rb, fps_frsqrte);
0899             break;
0900         case OP_4A_PS_MSUB:
0901             emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
0902                     ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmsubs);
0903             break;
0904         case OP_4A_PS_MADD:
0905             emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
0906                     ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmadds);
0907             break;
0908         case OP_4A_PS_NMSUB:
0909             emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
0910                     ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmsubs);
0911             break;
0912         case OP_4A_PS_NMADD:
0913             emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
0914                     ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmadds);
0915             break;
0916         }
0917         break;
0918 
0919     /* Real FPU operations */
0920 
0921     case OP_LFS:
0922     {
0923         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
0924 
0925         emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
0926                            FPU_LS_SINGLE);
0927         break;
0928     }
0929     case OP_LFSU:
0930     {
0931         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
0932 
0933         emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
0934                            FPU_LS_SINGLE);
0935 
0936         if (emulated == EMULATE_DONE)
0937             kvmppc_set_gpr(vcpu, ax_ra, addr);
0938         break;
0939     }
0940     case OP_LFD:
0941     {
0942         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
0943 
0944         emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
0945                            FPU_LS_DOUBLE);
0946         break;
0947     }
0948     case OP_LFDU:
0949     {
0950         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
0951 
0952         emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
0953                            FPU_LS_DOUBLE);
0954 
0955         if (emulated == EMULATE_DONE)
0956             kvmppc_set_gpr(vcpu, ax_ra, addr);
0957         break;
0958     }
0959     case OP_STFS:
0960     {
0961         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
0962 
0963         emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
0964                             FPU_LS_SINGLE);
0965         break;
0966     }
0967     case OP_STFSU:
0968     {
0969         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
0970 
0971         emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
0972                             FPU_LS_SINGLE);
0973 
0974         if (emulated == EMULATE_DONE)
0975             kvmppc_set_gpr(vcpu, ax_ra, addr);
0976         break;
0977     }
0978     case OP_STFD:
0979     {
0980         ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
0981 
0982         emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
0983                             FPU_LS_DOUBLE);
0984         break;
0985     }
0986     case OP_STFDU:
0987     {
0988         ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
0989 
0990         emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
0991                             FPU_LS_DOUBLE);
0992 
0993         if (emulated == EMULATE_DONE)
0994             kvmppc_set_gpr(vcpu, ax_ra, addr);
0995         break;
0996     }
0997     case 31:
0998         switch (inst_get_field(inst, 21, 30)) {
0999         case OP_31_LFSX:
1000         {
1001             ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
1002 
1003             addr += kvmppc_get_gpr(vcpu, ax_rb);
1004             emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1005                                addr, FPU_LS_SINGLE);
1006             break;
1007         }
1008         case OP_31_LFSUX:
1009         {
1010             ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1011                      kvmppc_get_gpr(vcpu, ax_rb);
1012 
1013             emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1014                                addr, FPU_LS_SINGLE);
1015 
1016             if (emulated == EMULATE_DONE)
1017                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1018             break;
1019         }
1020         case OP_31_LFDX:
1021         {
1022             ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1023                      kvmppc_get_gpr(vcpu, ax_rb);
1024 
1025             emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1026                                addr, FPU_LS_DOUBLE);
1027             break;
1028         }
1029         case OP_31_LFDUX:
1030         {
1031             ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1032                      kvmppc_get_gpr(vcpu, ax_rb);
1033 
1034             emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1035                                addr, FPU_LS_DOUBLE);
1036 
1037             if (emulated == EMULATE_DONE)
1038                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1039             break;
1040         }
1041         case OP_31_STFSX:
1042         {
1043             ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1044                      kvmppc_get_gpr(vcpu, ax_rb);
1045 
1046             emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1047                                 addr, FPU_LS_SINGLE);
1048             break;
1049         }
1050         case OP_31_STFSUX:
1051         {
1052             ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1053                      kvmppc_get_gpr(vcpu, ax_rb);
1054 
1055             emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1056                                 addr, FPU_LS_SINGLE);
1057 
1058             if (emulated == EMULATE_DONE)
1059                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1060             break;
1061         }
1062         case OP_31_STFX:
1063         {
1064             ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1065                      kvmppc_get_gpr(vcpu, ax_rb);
1066 
1067             emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1068                                 addr, FPU_LS_DOUBLE);
1069             break;
1070         }
1071         case OP_31_STFUX:
1072         {
1073             ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1074                      kvmppc_get_gpr(vcpu, ax_rb);
1075 
1076             emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1077                                 addr, FPU_LS_DOUBLE);
1078 
1079             if (emulated == EMULATE_DONE)
1080                 kvmppc_set_gpr(vcpu, ax_ra, addr);
1081             break;
1082         }
1083         case OP_31_STFIWX:
1084         {
1085             ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1086                      kvmppc_get_gpr(vcpu, ax_rb);
1087 
1088             emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1089                                 addr,
1090                                 FPU_LS_SINGLE_LOW);
1091             break;
1092         }
1093             break;
1094         }
1095         break;
1096     case 59:
1097         switch (inst_get_field(inst, 21, 30)) {
1098         case OP_59_FADDS:
1099             fpd_fadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1100             kvmppc_sync_qpr(vcpu, ax_rd);
1101             break;
1102         case OP_59_FSUBS:
1103             fpd_fsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1104             kvmppc_sync_qpr(vcpu, ax_rd);
1105             break;
1106         case OP_59_FDIVS:
1107             fpd_fdivs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1108             kvmppc_sync_qpr(vcpu, ax_rd);
1109             break;
1110         case OP_59_FRES:
1111             fpd_fres(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1112             kvmppc_sync_qpr(vcpu, ax_rd);
1113             break;
1114         case OP_59_FRSQRTES:
1115             fpd_frsqrtes(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1116             kvmppc_sync_qpr(vcpu, ax_rd);
1117             break;
1118         }
1119         switch (inst_get_field(inst, 26, 30)) {
1120         case OP_59_FMULS:
1121             fpd_fmuls(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1122             kvmppc_sync_qpr(vcpu, ax_rd);
1123             break;
1124         case OP_59_FMSUBS:
1125             fpd_fmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1126             kvmppc_sync_qpr(vcpu, ax_rd);
1127             break;
1128         case OP_59_FMADDS:
1129             fpd_fmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1130             kvmppc_sync_qpr(vcpu, ax_rd);
1131             break;
1132         case OP_59_FNMSUBS:
1133             fpd_fnmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1134             kvmppc_sync_qpr(vcpu, ax_rd);
1135             break;
1136         case OP_59_FNMADDS:
1137             fpd_fnmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1138             kvmppc_sync_qpr(vcpu, ax_rd);
1139             break;
1140         }
1141         break;
1142     case 63:
1143         switch (inst_get_field(inst, 21, 30)) {
1144         case OP_63_MTFSB0:
1145         case OP_63_MTFSB1:
1146         case OP_63_MCRFS:
1147         case OP_63_MTFSFI:
1148             /* XXX need to implement */
1149             break;
1150         case OP_63_MFFS:
1151             /* XXX missing CR */
1152             *fpr_d = vcpu->arch.fp.fpscr;
1153             break;
1154         case OP_63_MTFSF:
1155             /* XXX missing fm bits */
1156             /* XXX missing CR */
1157             vcpu->arch.fp.fpscr = *fpr_b;
1158             break;
1159         case OP_63_FCMPU:
1160         {
1161             u32 tmp_cr;
1162             u32 cr0_mask = 0xf0000000;
1163             u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1164 
1165             fpd_fcmpu(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1166             cr &= ~(cr0_mask >> cr_shift);
1167             cr |= (cr & cr0_mask) >> cr_shift;
1168             break;
1169         }
1170         case OP_63_FCMPO:
1171         {
1172             u32 tmp_cr;
1173             u32 cr0_mask = 0xf0000000;
1174             u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1175 
1176             fpd_fcmpo(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1177             cr &= ~(cr0_mask >> cr_shift);
1178             cr |= (cr & cr0_mask) >> cr_shift;
1179             break;
1180         }
1181         case OP_63_FNEG:
1182             fpd_fneg(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1183             break;
1184         case OP_63_FMR:
1185             *fpr_d = *fpr_b;
1186             break;
1187         case OP_63_FABS:
1188             fpd_fabs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1189             break;
1190         case OP_63_FCPSGN:
1191             fpd_fcpsgn(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1192             break;
1193         case OP_63_FDIV:
1194             fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1195             break;
1196         case OP_63_FADD:
1197             fpd_fadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1198             break;
1199         case OP_63_FSUB:
1200             fpd_fsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1201             break;
1202         case OP_63_FCTIW:
1203             fpd_fctiw(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1204             break;
1205         case OP_63_FCTIWZ:
1206             fpd_fctiwz(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1207             break;
1208         case OP_63_FRSP:
1209             fpd_frsp(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1210             kvmppc_sync_qpr(vcpu, ax_rd);
1211             break;
1212         case OP_63_FRSQRTE:
1213         {
1214             double one = 1.0f;
1215 
1216             /* fD = sqrt(fB) */
1217             fpd_fsqrt(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1218             /* fD = 1.0f / fD */
1219             fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, (u64*)&one, fpr_d);
1220             break;
1221         }
1222         }
1223         switch (inst_get_field(inst, 26, 30)) {
1224         case OP_63_FMUL:
1225             fpd_fmul(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1226             break;
1227         case OP_63_FSEL:
1228             fpd_fsel(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1229             break;
1230         case OP_63_FMSUB:
1231             fpd_fmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1232             break;
1233         case OP_63_FMADD:
1234             fpd_fmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1235             break;
1236         case OP_63_FNMSUB:
1237             fpd_fnmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1238             break;
1239         case OP_63_FNMADD:
1240             fpd_fnmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1241             break;
1242         }
1243         break;
1244     }
1245 
1246 #ifdef DEBUG
1247     for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
1248         u32 f;
1249         kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
1250         dprintk(KERN_INFO "FPR[%d] = 0x%x\n", i, f);
1251     }
1252 #endif
1253 
1254     if (rcomp)
1255         kvmppc_set_cr(vcpu, cr);
1256 
1257     disable_kernel_fp();
1258     preempt_enable();
1259 
1260     return emulated;
1261 }