linux/arch/powerpc/kvm/book3s_paired_singles.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 *
   4 * Copyright Novell Inc 2010
   5 *
   6 * Authors: Alexander Graf <agraf@suse.de>
   7 */
   8
   9#include <asm/kvm.h>
  10#include <asm/kvm_ppc.h>
  11#include <asm/disassemble.h>
  12#include <asm/kvm_book3s.h>
  13#include <asm/kvm_fpu.h>
  14#include <asm/reg.h>
  15#include <asm/cacheflush.h>
  16#include <asm/switch_to.h>
  17#include <linux/vmalloc.h>
  18
  19/* #define DEBUG */
  20
  21#ifdef DEBUG
  22#define dprintk printk
  23#else
  24#define dprintk(...) do { } while(0);
  25#endif
  26
  27#define OP_LFS                  48
  28#define OP_LFSU                 49
  29#define OP_LFD                  50
  30#define OP_LFDU                 51
  31#define OP_STFS                 52
  32#define OP_STFSU                53
  33#define OP_STFD                 54
  34#define OP_STFDU                55
  35#define OP_PSQ_L                56
  36#define OP_PSQ_LU               57
  37#define OP_PSQ_ST               60
  38#define OP_PSQ_STU              61
  39
  40#define OP_31_LFSX              535
  41#define OP_31_LFSUX             567
  42#define OP_31_LFDX              599
  43#define OP_31_LFDUX             631
  44#define OP_31_STFSX             663
  45#define OP_31_STFSUX            695
  46#define OP_31_STFX              727
  47#define OP_31_STFUX             759
  48#define OP_31_LWIZX             887
  49#define OP_31_STFIWX            983
  50
  51#define OP_59_FADDS             21
  52#define OP_59_FSUBS             20
  53#define OP_59_FSQRTS            22
  54#define OP_59_FDIVS             18
  55#define OP_59_FRES              24
  56#define OP_59_FMULS             25
  57#define OP_59_FRSQRTES          26
  58#define OP_59_FMSUBS            28
  59#define OP_59_FMADDS            29
  60#define OP_59_FNMSUBS           30
  61#define OP_59_FNMADDS           31
  62
  63#define OP_63_FCMPU             0
  64#define OP_63_FCPSGN            8
  65#define OP_63_FRSP              12
  66#define OP_63_FCTIW             14
  67#define OP_63_FCTIWZ            15
  68#define OP_63_FDIV              18
  69#define OP_63_FADD              21
  70#define OP_63_FSQRT             22
  71#define OP_63_FSEL              23
  72#define OP_63_FRE               24
  73#define OP_63_FMUL              25
  74#define OP_63_FRSQRTE           26
  75#define OP_63_FMSUB             28
  76#define OP_63_FMADD             29
  77#define OP_63_FNMSUB            30
  78#define OP_63_FNMADD            31
  79#define OP_63_FCMPO             32
  80#define OP_63_MTFSB1            38 // XXX
  81#define OP_63_FSUB              20
  82#define OP_63_FNEG              40
  83#define OP_63_MCRFS             64
  84#define OP_63_MTFSB0            70
  85#define OP_63_FMR               72
  86#define OP_63_MTFSFI            134
  87#define OP_63_FABS              264
  88#define OP_63_MFFS              583
  89#define OP_63_MTFSF             711
  90
  91#define OP_4X_PS_CMPU0          0
  92#define OP_4X_PSQ_LX            6
  93#define OP_4XW_PSQ_STX          7
  94#define OP_4A_PS_SUM0           10
  95#define OP_4A_PS_SUM1           11
  96#define OP_4A_PS_MULS0          12
  97#define OP_4A_PS_MULS1          13
  98#define OP_4A_PS_MADDS0         14
  99#define OP_4A_PS_MADDS1         15
 100#define OP_4A_PS_DIV            18
 101#define OP_4A_PS_SUB            20
 102#define OP_4A_PS_ADD            21
 103#define OP_4A_PS_SEL            23
 104#define OP_4A_PS_RES            24
 105#define OP_4A_PS_MUL            25
 106#define OP_4A_PS_RSQRTE         26
 107#define OP_4A_PS_MSUB           28
 108#define OP_4A_PS_MADD           29
 109#define OP_4A_PS_NMSUB          30
 110#define OP_4A_PS_NMADD          31
 111#define OP_4X_PS_CMPO0          32
 112#define OP_4X_PSQ_LUX           38
 113#define OP_4XW_PSQ_STUX         39
 114#define OP_4X_PS_NEG            40
 115#define OP_4X_PS_CMPU1          64
 116#define OP_4X_PS_MR             72
 117#define OP_4X_PS_CMPO1          96
 118#define OP_4X_PS_NABS           136
 119#define OP_4X_PS_ABS            264
 120#define OP_4X_PS_MERGE00        528
 121#define OP_4X_PS_MERGE01        560
 122#define OP_4X_PS_MERGE10        592
 123#define OP_4X_PS_MERGE11        624
 124
 125#define SCALAR_NONE             0
 126#define SCALAR_HIGH             (1 << 0)
 127#define SCALAR_LOW              (1 << 1)
 128#define SCALAR_NO_PS0           (1 << 2)
 129#define SCALAR_NO_PS1           (1 << 3)
 130
 131#define GQR_ST_TYPE_MASK        0x00000007
 132#define GQR_ST_TYPE_SHIFT       0
 133#define GQR_ST_SCALE_MASK       0x00003f00
 134#define GQR_ST_SCALE_SHIFT      8
 135#define GQR_LD_TYPE_MASK        0x00070000
 136#define GQR_LD_TYPE_SHIFT       16
 137#define GQR_LD_SCALE_MASK       0x3f000000
 138#define GQR_LD_SCALE_SHIFT      24
 139
 140#define GQR_QUANTIZE_FLOAT      0
 141#define GQR_QUANTIZE_U8         4
 142#define GQR_QUANTIZE_U16        5
 143#define GQR_QUANTIZE_S8         6
 144#define GQR_QUANTIZE_S16        7
 145
 146#define FPU_LS_SINGLE           0
 147#define FPU_LS_DOUBLE           1
 148#define FPU_LS_SINGLE_LOW       2
 149
 150static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt)
 151{
 152        kvm_cvt_df(&VCPU_FPR(vcpu, rt), &vcpu->arch.qpr[rt]);
 153}
 154
 155static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store)
 156{
 157        u32 dsisr;
 158        u64 msr = kvmppc_get_msr(vcpu);
 159
 160        msr = kvmppc_set_field(msr, 33, 36, 0);
 161        msr = kvmppc_set_field(msr, 42, 47, 0);
 162        kvmppc_set_msr(vcpu, msr);
 163        kvmppc_set_dar(vcpu, eaddr);
 164        /* Page Fault */
 165        dsisr = kvmppc_set_field(0, 33, 33, 1);
 166        if (is_store)
 167                dsisr = kvmppc_set_field(dsisr, 38, 38, 1);
 168        kvmppc_set_dsisr(vcpu, dsisr);
 169        kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE);
 170}
 171
 172static int kvmppc_emulate_fpr_load(struct kvm_vcpu *vcpu,
 173                                   int rs, ulong addr, int ls_type)
 174{
 175        int emulated = EMULATE_FAIL;
 176        int r;
 177        char tmp[8];
 178        int len = sizeof(u32);
 179
 180        if (ls_type == FPU_LS_DOUBLE)
 181                len = sizeof(u64);
 182
 183        /* read from memory */
 184        r = kvmppc_ld(vcpu, &addr, len, tmp, true);
 185        vcpu->arch.paddr_accessed = addr;
 186
 187        if (r < 0) {
 188                kvmppc_inject_pf(vcpu, addr, false);
 189                goto done_load;
 190        } else if (r == EMULATE_DO_MMIO) {
 191                emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FPR | rs,
 192                                              len, 1);
 193                goto done_load;
 194        }
 195
 196        emulated = EMULATE_DONE;
 197
 198        /* put in registers */
 199        switch (ls_type) {
 200        case FPU_LS_SINGLE:
 201                kvm_cvt_fd((u32*)tmp, &VCPU_FPR(vcpu, rs));
 202                vcpu->arch.qpr[rs] = *((u32*)tmp);
 203                break;
 204        case FPU_LS_DOUBLE:
 205                VCPU_FPR(vcpu, rs) = *((u64*)tmp);
 206                break;
 207        }
 208
 209        dprintk(KERN_INFO "KVM: FPR_LD [0x%llx] at 0x%lx (%d)\n", *(u64*)tmp,
 210                          addr, len);
 211
 212done_load:
 213        return emulated;
 214}
 215
 216static int kvmppc_emulate_fpr_store(struct kvm_vcpu *vcpu,
 217                                    int rs, ulong addr, int ls_type)
 218{
 219        int emulated = EMULATE_FAIL;
 220        int r;
 221        char tmp[8];
 222        u64 val;
 223        int len;
 224
 225        switch (ls_type) {
 226        case FPU_LS_SINGLE:
 227                kvm_cvt_df(&VCPU_FPR(vcpu, rs), (u32*)tmp);
 228                val = *((u32*)tmp);
 229                len = sizeof(u32);
 230                break;
 231        case FPU_LS_SINGLE_LOW:
 232                *((u32*)tmp) = VCPU_FPR(vcpu, rs);
 233                val = VCPU_FPR(vcpu, rs) & 0xffffffff;
 234                len = sizeof(u32);
 235                break;
 236        case FPU_LS_DOUBLE:
 237                *((u64*)tmp) = VCPU_FPR(vcpu, rs);
 238                val = VCPU_FPR(vcpu, rs);
 239                len = sizeof(u64);
 240                break;
 241        default:
 242                val = 0;
 243                len = 0;
 244        }
 245
 246        r = kvmppc_st(vcpu, &addr, len, tmp, true);
 247        vcpu->arch.paddr_accessed = addr;
 248        if (r < 0) {
 249                kvmppc_inject_pf(vcpu, addr, true);
 250        } else if (r == EMULATE_DO_MMIO) {
 251                emulated = kvmppc_handle_store(vcpu, val, len, 1);
 252        } else {
 253                emulated = EMULATE_DONE;
 254        }
 255
 256        dprintk(KERN_INFO "KVM: FPR_ST [0x%llx] at 0x%lx (%d)\n",
 257                          val, addr, len);
 258
 259        return emulated;
 260}
 261
 262static int kvmppc_emulate_psq_load(struct kvm_vcpu *vcpu,
 263                                   int rs, ulong addr, bool w, int i)
 264{
 265        int emulated = EMULATE_FAIL;
 266        int r;
 267        float one = 1.0;
 268        u32 tmp[2];
 269
 270        /* read from memory */
 271        if (w) {
 272                r = kvmppc_ld(vcpu, &addr, sizeof(u32), tmp, true);
 273                memcpy(&tmp[1], &one, sizeof(u32));
 274        } else {
 275                r = kvmppc_ld(vcpu, &addr, sizeof(u32) * 2, tmp, true);
 276        }
 277        vcpu->arch.paddr_accessed = addr;
 278        if (r < 0) {
 279                kvmppc_inject_pf(vcpu, addr, false);
 280                goto done_load;
 281        } else if ((r == EMULATE_DO_MMIO) && w) {
 282                emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FPR | rs,
 283                                              4, 1);
 284                vcpu->arch.qpr[rs] = tmp[1];
 285                goto done_load;
 286        } else if (r == EMULATE_DO_MMIO) {
 287                emulated = kvmppc_handle_load(vcpu, KVM_MMIO_REG_FQPR | rs,
 288                                              8, 1);
 289                goto done_load;
 290        }
 291
 292        emulated = EMULATE_DONE;
 293
 294        /* put in registers */
 295        kvm_cvt_fd(&tmp[0], &VCPU_FPR(vcpu, rs));
 296        vcpu->arch.qpr[rs] = tmp[1];
 297
 298        dprintk(KERN_INFO "KVM: PSQ_LD [0x%x, 0x%x] at 0x%lx (%d)\n", tmp[0],
 299                          tmp[1], addr, w ? 4 : 8);
 300
 301done_load:
 302        return emulated;
 303}
 304
 305static int kvmppc_emulate_psq_store(struct kvm_vcpu *vcpu,
 306                                    int rs, ulong addr, bool w, int i)
 307{
 308        int emulated = EMULATE_FAIL;
 309        int r;
 310        u32 tmp[2];
 311        int len = w ? sizeof(u32) : sizeof(u64);
 312
 313        kvm_cvt_df(&VCPU_FPR(vcpu, rs), &tmp[0]);
 314        tmp[1] = vcpu->arch.qpr[rs];
 315
 316        r = kvmppc_st(vcpu, &addr, len, tmp, true);
 317        vcpu->arch.paddr_accessed = addr;
 318        if (r < 0) {
 319                kvmppc_inject_pf(vcpu, addr, true);
 320        } else if ((r == EMULATE_DO_MMIO) && w) {
 321                emulated = kvmppc_handle_store(vcpu, tmp[0], 4, 1);
 322        } else if (r == EMULATE_DO_MMIO) {
 323                u64 val = ((u64)tmp[0] << 32) | tmp[1];
 324                emulated = kvmppc_handle_store(vcpu, val, 8, 1);
 325        } else {
 326                emulated = EMULATE_DONE;
 327        }
 328
 329        dprintk(KERN_INFO "KVM: PSQ_ST [0x%x, 0x%x] at 0x%lx (%d)\n",
 330                          tmp[0], tmp[1], addr, len);
 331
 332        return emulated;
 333}
 334
 335/*
 336 * Cuts out inst bits with ordering according to spec.
 337 * That means the leftmost bit is zero. All given bits are included.
 338 */
 339static inline u32 inst_get_field(u32 inst, int msb, int lsb)
 340{
 341        return kvmppc_get_field(inst, msb + 32, lsb + 32);
 342}
 343
 344static bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst)
 345{
 346        if (!(vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE))
 347                return false;
 348
 349        switch (get_op(inst)) {
 350        case OP_PSQ_L:
 351        case OP_PSQ_LU:
 352        case OP_PSQ_ST:
 353        case OP_PSQ_STU:
 354        case OP_LFS:
 355        case OP_LFSU:
 356        case OP_LFD:
 357        case OP_LFDU:
 358        case OP_STFS:
 359        case OP_STFSU:
 360        case OP_STFD:
 361        case OP_STFDU:
 362                return true;
 363        case 4:
 364                /* X form */
 365                switch (inst_get_field(inst, 21, 30)) {
 366                case OP_4X_PS_CMPU0:
 367                case OP_4X_PSQ_LX:
 368                case OP_4X_PS_CMPO0:
 369                case OP_4X_PSQ_LUX:
 370                case OP_4X_PS_NEG:
 371                case OP_4X_PS_CMPU1:
 372                case OP_4X_PS_MR:
 373                case OP_4X_PS_CMPO1:
 374                case OP_4X_PS_NABS:
 375                case OP_4X_PS_ABS:
 376                case OP_4X_PS_MERGE00:
 377                case OP_4X_PS_MERGE01:
 378                case OP_4X_PS_MERGE10:
 379                case OP_4X_PS_MERGE11:
 380                        return true;
 381                }
 382                /* XW form */
 383                switch (inst_get_field(inst, 25, 30)) {
 384                case OP_4XW_PSQ_STX:
 385                case OP_4XW_PSQ_STUX:
 386                        return true;
 387                }
 388                /* A form */
 389                switch (inst_get_field(inst, 26, 30)) {
 390                case OP_4A_PS_SUM1:
 391                case OP_4A_PS_SUM0:
 392                case OP_4A_PS_MULS0:
 393                case OP_4A_PS_MULS1:
 394                case OP_4A_PS_MADDS0:
 395                case OP_4A_PS_MADDS1:
 396                case OP_4A_PS_DIV:
 397                case OP_4A_PS_SUB:
 398                case OP_4A_PS_ADD:
 399                case OP_4A_PS_SEL:
 400                case OP_4A_PS_RES:
 401                case OP_4A_PS_MUL:
 402                case OP_4A_PS_RSQRTE:
 403                case OP_4A_PS_MSUB:
 404                case OP_4A_PS_MADD:
 405                case OP_4A_PS_NMSUB:
 406                case OP_4A_PS_NMADD:
 407                        return true;
 408                }
 409                break;
 410        case 59:
 411                switch (inst_get_field(inst, 21, 30)) {
 412                case OP_59_FADDS:
 413                case OP_59_FSUBS:
 414                case OP_59_FDIVS:
 415                case OP_59_FRES:
 416                case OP_59_FRSQRTES:
 417                        return true;
 418                }
 419                switch (inst_get_field(inst, 26, 30)) {
 420                case OP_59_FMULS:
 421                case OP_59_FMSUBS:
 422                case OP_59_FMADDS:
 423                case OP_59_FNMSUBS:
 424                case OP_59_FNMADDS:
 425                        return true;
 426                }
 427                break;
 428        case 63:
 429                switch (inst_get_field(inst, 21, 30)) {
 430                case OP_63_MTFSB0:
 431                case OP_63_MTFSB1:
 432                case OP_63_MTFSF:
 433                case OP_63_MTFSFI:
 434                case OP_63_MCRFS:
 435                case OP_63_MFFS:
 436                case OP_63_FCMPU:
 437                case OP_63_FCMPO:
 438                case OP_63_FNEG:
 439                case OP_63_FMR:
 440                case OP_63_FABS:
 441                case OP_63_FRSP:
 442                case OP_63_FDIV:
 443                case OP_63_FADD:
 444                case OP_63_FSUB:
 445                case OP_63_FCTIW:
 446                case OP_63_FCTIWZ:
 447                case OP_63_FRSQRTE:
 448                case OP_63_FCPSGN:
 449                        return true;
 450                }
 451                switch (inst_get_field(inst, 26, 30)) {
 452                case OP_63_FMUL:
 453                case OP_63_FSEL:
 454                case OP_63_FMSUB:
 455                case OP_63_FMADD:
 456                case OP_63_FNMSUB:
 457                case OP_63_FNMADD:
 458                        return true;
 459                }
 460                break;
 461        case 31:
 462                switch (inst_get_field(inst, 21, 30)) {
 463                case OP_31_LFSX:
 464                case OP_31_LFSUX:
 465                case OP_31_LFDX:
 466                case OP_31_LFDUX:
 467                case OP_31_STFSX:
 468                case OP_31_STFSUX:
 469                case OP_31_STFX:
 470                case OP_31_STFUX:
 471                case OP_31_STFIWX:
 472                        return true;
 473                }
 474                break;
 475        }
 476
 477        return false;
 478}
 479
 480static int get_d_signext(u32 inst)
 481{
 482        int d = inst & 0x8ff;
 483
 484        if (d & 0x800)
 485                return -(d & 0x7ff);
 486
 487        return (d & 0x7ff);
 488}
 489
 490static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc,
 491                                      int reg_out, int reg_in1, int reg_in2,
 492                                      int reg_in3, int scalar,
 493                                      void (*func)(u64 *fpscr,
 494                                                 u32 *dst, u32 *src1,
 495                                                 u32 *src2, u32 *src3))
 496{
 497        u32 *qpr = vcpu->arch.qpr;
 498        u32 ps0_out;
 499        u32 ps0_in1, ps0_in2, ps0_in3;
 500        u32 ps1_in1, ps1_in2, ps1_in3;
 501
 502        /* RC */
 503        WARN_ON(rc);
 504
 505        /* PS0 */
 506        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
 507        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
 508        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in3), &ps0_in3);
 509
 510        if (scalar & SCALAR_LOW)
 511                ps0_in2 = qpr[reg_in2];
 512
 513        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2, &ps0_in3);
 514
 515        dprintk(KERN_INFO "PS3 ps0 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
 516                          ps0_in1, ps0_in2, ps0_in3, ps0_out);
 517
 518        if (!(scalar & SCALAR_NO_PS0))
 519                kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 520
 521        /* PS1 */
 522        ps1_in1 = qpr[reg_in1];
 523        ps1_in2 = qpr[reg_in2];
 524        ps1_in3 = qpr[reg_in3];
 525
 526        if (scalar & SCALAR_HIGH)
 527                ps1_in2 = ps0_in2;
 528
 529        if (!(scalar & SCALAR_NO_PS1))
 530                func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in1, &ps1_in2, &ps1_in3);
 531
 532        dprintk(KERN_INFO "PS3 ps1 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
 533                          ps1_in1, ps1_in2, ps1_in3, qpr[reg_out]);
 534
 535        return EMULATE_DONE;
 536}
 537
 538static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc,
 539                                    int reg_out, int reg_in1, int reg_in2,
 540                                    int scalar,
 541                                    void (*func)(u64 *fpscr,
 542                                                 u32 *dst, u32 *src1,
 543                                                 u32 *src2))
 544{
 545        u32 *qpr = vcpu->arch.qpr;
 546        u32 ps0_out;
 547        u32 ps0_in1, ps0_in2;
 548        u32 ps1_out;
 549        u32 ps1_in1, ps1_in2;
 550
 551        /* RC */
 552        WARN_ON(rc);
 553
 554        /* PS0 */
 555        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
 556
 557        if (scalar & SCALAR_LOW)
 558                ps0_in2 = qpr[reg_in2];
 559        else
 560                kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
 561
 562        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2);
 563
 564        if (!(scalar & SCALAR_NO_PS0)) {
 565                dprintk(KERN_INFO "PS2 ps0 -> f(0x%x, 0x%x) = 0x%x\n",
 566                                  ps0_in1, ps0_in2, ps0_out);
 567
 568                kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 569        }
 570
 571        /* PS1 */
 572        ps1_in1 = qpr[reg_in1];
 573        ps1_in2 = qpr[reg_in2];
 574
 575        if (scalar & SCALAR_HIGH)
 576                ps1_in2 = ps0_in2;
 577
 578        func(&vcpu->arch.fp.fpscr, &ps1_out, &ps1_in1, &ps1_in2);
 579
 580        if (!(scalar & SCALAR_NO_PS1)) {
 581                qpr[reg_out] = ps1_out;
 582
 583                dprintk(KERN_INFO "PS2 ps1 -> f(0x%x, 0x%x) = 0x%x\n",
 584                                  ps1_in1, ps1_in2, qpr[reg_out]);
 585        }
 586
 587        return EMULATE_DONE;
 588}
 589
 590static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc,
 591                                    int reg_out, int reg_in,
 592                                    void (*func)(u64 *t,
 593                                                 u32 *dst, u32 *src1))
 594{
 595        u32 *qpr = vcpu->arch.qpr;
 596        u32 ps0_out, ps0_in;
 597        u32 ps1_in;
 598
 599        /* RC */
 600        WARN_ON(rc);
 601
 602        /* PS0 */
 603        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in), &ps0_in);
 604        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in);
 605
 606        dprintk(KERN_INFO "PS1 ps0 -> f(0x%x) = 0x%x\n",
 607                          ps0_in, ps0_out);
 608
 609        kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 610
 611        /* PS1 */
 612        ps1_in = qpr[reg_in];
 613        func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in);
 614
 615        dprintk(KERN_INFO "PS1 ps1 -> f(0x%x) = 0x%x\n",
 616                          ps1_in, qpr[reg_out]);
 617
 618        return EMULATE_DONE;
 619}
 620
 621int kvmppc_emulate_paired_single(struct kvm_vcpu *vcpu)
 622{
 623        u32 inst;
 624        enum emulation_result emulated = EMULATE_DONE;
 625        int ax_rd, ax_ra, ax_rb, ax_rc;
 626        short full_d;
 627        u64 *fpr_d, *fpr_a, *fpr_b, *fpr_c;
 628
 629        bool rcomp;
 630        u32 cr;
 631#ifdef DEBUG
 632        int i;
 633#endif
 634
 635        emulated = kvmppc_get_last_inst(vcpu, INST_GENERIC, &inst);
 636        if (emulated != EMULATE_DONE)
 637                return emulated;
 638
 639        ax_rd = inst_get_field(inst, 6, 10);
 640        ax_ra = inst_get_field(inst, 11, 15);
 641        ax_rb = inst_get_field(inst, 16, 20);
 642        ax_rc = inst_get_field(inst, 21, 25);
 643        full_d = inst_get_field(inst, 16, 31);
 644
 645        fpr_d = &VCPU_FPR(vcpu, ax_rd);
 646        fpr_a = &VCPU_FPR(vcpu, ax_ra);
 647        fpr_b = &VCPU_FPR(vcpu, ax_rb);
 648        fpr_c = &VCPU_FPR(vcpu, ax_rc);
 649
 650        rcomp = (inst & 1) ? true : false;
 651        cr = kvmppc_get_cr(vcpu);
 652
 653        if (!kvmppc_inst_is_paired_single(vcpu, inst))
 654                return EMULATE_FAIL;
 655
 656        if (!(kvmppc_get_msr(vcpu) & MSR_FP)) {
 657                kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL);
 658                return EMULATE_AGAIN;
 659        }
 660
 661        kvmppc_giveup_ext(vcpu, MSR_FP);
 662        preempt_disable();
 663        enable_kernel_fp();
 664        /* Do we need to clear FE0 / FE1 here? Don't think so. */
 665
 666#ifdef DEBUG
 667        for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
 668                u32 f;
 669                kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
 670                dprintk(KERN_INFO "FPR[%d] = 0x%x / 0x%llx    QPR[%d] = 0x%x\n",
 671                        i, f, VCPU_FPR(vcpu, i), i, vcpu->arch.qpr[i]);
 672        }
 673#endif
 674
 675        switch (get_op(inst)) {
 676        case OP_PSQ_L:
 677        {
 678                ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 679                bool w = inst_get_field(inst, 16, 16) ? true : false;
 680                int i = inst_get_field(inst, 17, 19);
 681
 682                addr += get_d_signext(inst);
 683                emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
 684                break;
 685        }
 686        case OP_PSQ_LU:
 687        {
 688                ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 689                bool w = inst_get_field(inst, 16, 16) ? true : false;
 690                int i = inst_get_field(inst, 17, 19);
 691
 692                addr += get_d_signext(inst);
 693                emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
 694
 695                if (emulated == EMULATE_DONE)
 696                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 697                break;
 698        }
 699        case OP_PSQ_ST:
 700        {
 701                ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 702                bool w = inst_get_field(inst, 16, 16) ? true : false;
 703                int i = inst_get_field(inst, 17, 19);
 704
 705                addr += get_d_signext(inst);
 706                emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
 707                break;
 708        }
 709        case OP_PSQ_STU:
 710        {
 711                ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 712                bool w = inst_get_field(inst, 16, 16) ? true : false;
 713                int i = inst_get_field(inst, 17, 19);
 714
 715                addr += get_d_signext(inst);
 716                emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
 717
 718                if (emulated == EMULATE_DONE)
 719                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 720                break;
 721        }
 722        case 4:
 723                /* X form */
 724                switch (inst_get_field(inst, 21, 30)) {
 725                case OP_4X_PS_CMPU0:
 726                        /* XXX */
 727                        emulated = EMULATE_FAIL;
 728                        break;
 729                case OP_4X_PSQ_LX:
 730                {
 731                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 732                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 733                        int i = inst_get_field(inst, 22, 24);
 734
 735                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 736                        emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
 737                        break;
 738                }
 739                case OP_4X_PS_CMPO0:
 740                        /* XXX */
 741                        emulated = EMULATE_FAIL;
 742                        break;
 743                case OP_4X_PSQ_LUX:
 744                {
 745                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 746                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 747                        int i = inst_get_field(inst, 22, 24);
 748
 749                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 750                        emulated = kvmppc_emulate_psq_load(vcpu, ax_rd, addr, w, i);
 751
 752                        if (emulated == EMULATE_DONE)
 753                                kvmppc_set_gpr(vcpu, ax_ra, addr);
 754                        break;
 755                }
 756                case OP_4X_PS_NEG:
 757                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 758                        VCPU_FPR(vcpu, ax_rd) ^= 0x8000000000000000ULL;
 759                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 760                        vcpu->arch.qpr[ax_rd] ^= 0x80000000;
 761                        break;
 762                case OP_4X_PS_CMPU1:
 763                        /* XXX */
 764                        emulated = EMULATE_FAIL;
 765                        break;
 766                case OP_4X_PS_MR:
 767                        WARN_ON(rcomp);
 768                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 769                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 770                        break;
 771                case OP_4X_PS_CMPO1:
 772                        /* XXX */
 773                        emulated = EMULATE_FAIL;
 774                        break;
 775                case OP_4X_PS_NABS:
 776                        WARN_ON(rcomp);
 777                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 778                        VCPU_FPR(vcpu, ax_rd) |= 0x8000000000000000ULL;
 779                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 780                        vcpu->arch.qpr[ax_rd] |= 0x80000000;
 781                        break;
 782                case OP_4X_PS_ABS:
 783                        WARN_ON(rcomp);
 784                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 785                        VCPU_FPR(vcpu, ax_rd) &= ~0x8000000000000000ULL;
 786                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 787                        vcpu->arch.qpr[ax_rd] &= ~0x80000000;
 788                        break;
 789                case OP_4X_PS_MERGE00:
 790                        WARN_ON(rcomp);
 791                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
 792                        /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
 793                        kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
 794                                   &vcpu->arch.qpr[ax_rd]);
 795                        break;
 796                case OP_4X_PS_MERGE01:
 797                        WARN_ON(rcomp);
 798                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
 799                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 800                        break;
 801                case OP_4X_PS_MERGE10:
 802                        WARN_ON(rcomp);
 803                        /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
 804                        kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
 805                                   &VCPU_FPR(vcpu, ax_rd));
 806                        /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
 807                        kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
 808                                   &vcpu->arch.qpr[ax_rd]);
 809                        break;
 810                case OP_4X_PS_MERGE11:
 811                        WARN_ON(rcomp);
 812                        /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
 813                        kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
 814                                   &VCPU_FPR(vcpu, ax_rd));
 815                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 816                        break;
 817                }
 818                /* XW form */
 819                switch (inst_get_field(inst, 25, 30)) {
 820                case OP_4XW_PSQ_STX:
 821                {
 822                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 823                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 824                        int i = inst_get_field(inst, 22, 24);
 825
 826                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 827                        emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
 828                        break;
 829                }
 830                case OP_4XW_PSQ_STUX:
 831                {
 832                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 833                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 834                        int i = inst_get_field(inst, 22, 24);
 835
 836                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 837                        emulated = kvmppc_emulate_psq_store(vcpu, ax_rd, addr, w, i);
 838
 839                        if (emulated == EMULATE_DONE)
 840                                kvmppc_set_gpr(vcpu, ax_ra, addr);
 841                        break;
 842                }
 843                }
 844                /* A form */
 845                switch (inst_get_field(inst, 26, 30)) {
 846                case OP_4A_PS_SUM1:
 847                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 848                                        ax_rb, ax_ra, SCALAR_NO_PS0 | SCALAR_HIGH, fps_fadds);
 849                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rc);
 850                        break;
 851                case OP_4A_PS_SUM0:
 852                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 853                                        ax_ra, ax_rb, SCALAR_NO_PS1 | SCALAR_LOW, fps_fadds);
 854                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rc];
 855                        break;
 856                case OP_4A_PS_MULS0:
 857                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 858                                        ax_ra, ax_rc, SCALAR_HIGH, fps_fmuls);
 859                        break;
 860                case OP_4A_PS_MULS1:
 861                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 862                                        ax_ra, ax_rc, SCALAR_LOW, fps_fmuls);
 863                        break;
 864                case OP_4A_PS_MADDS0:
 865                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 866                                        ax_ra, ax_rc, ax_rb, SCALAR_HIGH, fps_fmadds);
 867                        break;
 868                case OP_4A_PS_MADDS1:
 869                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 870                                        ax_ra, ax_rc, ax_rb, SCALAR_LOW, fps_fmadds);
 871                        break;
 872                case OP_4A_PS_DIV:
 873                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 874                                        ax_ra, ax_rb, SCALAR_NONE, fps_fdivs);
 875                        break;
 876                case OP_4A_PS_SUB:
 877                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 878                                        ax_ra, ax_rb, SCALAR_NONE, fps_fsubs);
 879                        break;
 880                case OP_4A_PS_ADD:
 881                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 882                                        ax_ra, ax_rb, SCALAR_NONE, fps_fadds);
 883                        break;
 884                case OP_4A_PS_SEL:
 885                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 886                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fsel);
 887                        break;
 888                case OP_4A_PS_RES:
 889                        emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
 890                                        ax_rb, fps_fres);
 891                        break;
 892                case OP_4A_PS_MUL:
 893                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 894                                        ax_ra, ax_rc, SCALAR_NONE, fps_fmuls);
 895                        break;
 896                case OP_4A_PS_RSQRTE:
 897                        emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
 898                                        ax_rb, fps_frsqrte);
 899                        break;
 900                case OP_4A_PS_MSUB:
 901                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 902                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmsubs);
 903                        break;
 904                case OP_4A_PS_MADD:
 905                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 906                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmadds);
 907                        break;
 908                case OP_4A_PS_NMSUB:
 909                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 910                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmsubs);
 911                        break;
 912                case OP_4A_PS_NMADD:
 913                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 914                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmadds);
 915                        break;
 916                }
 917                break;
 918
 919        /* Real FPU operations */
 920
 921        case OP_LFS:
 922        {
 923                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 924
 925                emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
 926                                                   FPU_LS_SINGLE);
 927                break;
 928        }
 929        case OP_LFSU:
 930        {
 931                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 932
 933                emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
 934                                                   FPU_LS_SINGLE);
 935
 936                if (emulated == EMULATE_DONE)
 937                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 938                break;
 939        }
 940        case OP_LFD:
 941        {
 942                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 943
 944                emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
 945                                                   FPU_LS_DOUBLE);
 946                break;
 947        }
 948        case OP_LFDU:
 949        {
 950                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 951
 952                emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd, addr,
 953                                                   FPU_LS_DOUBLE);
 954
 955                if (emulated == EMULATE_DONE)
 956                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 957                break;
 958        }
 959        case OP_STFS:
 960        {
 961                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 962
 963                emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
 964                                                    FPU_LS_SINGLE);
 965                break;
 966        }
 967        case OP_STFSU:
 968        {
 969                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 970
 971                emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
 972                                                    FPU_LS_SINGLE);
 973
 974                if (emulated == EMULATE_DONE)
 975                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 976                break;
 977        }
 978        case OP_STFD:
 979        {
 980                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 981
 982                emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
 983                                                    FPU_LS_DOUBLE);
 984                break;
 985        }
 986        case OP_STFDU:
 987        {
 988                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 989
 990                emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd, addr,
 991                                                    FPU_LS_DOUBLE);
 992
 993                if (emulated == EMULATE_DONE)
 994                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 995                break;
 996        }
 997        case 31:
 998                switch (inst_get_field(inst, 21, 30)) {
 999                case OP_31_LFSX:
1000                {
1001                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
1002
1003                        addr += kvmppc_get_gpr(vcpu, ax_rb);
1004                        emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1005                                                           addr, FPU_LS_SINGLE);
1006                        break;
1007                }
1008                case OP_31_LFSUX:
1009                {
1010                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1011                                     kvmppc_get_gpr(vcpu, ax_rb);
1012
1013                        emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1014                                                           addr, FPU_LS_SINGLE);
1015
1016                        if (emulated == EMULATE_DONE)
1017                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1018                        break;
1019                }
1020                case OP_31_LFDX:
1021                {
1022                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1023                                     kvmppc_get_gpr(vcpu, ax_rb);
1024
1025                        emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1026                                                           addr, FPU_LS_DOUBLE);
1027                        break;
1028                }
1029                case OP_31_LFDUX:
1030                {
1031                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1032                                     kvmppc_get_gpr(vcpu, ax_rb);
1033
1034                        emulated = kvmppc_emulate_fpr_load(vcpu, ax_rd,
1035                                                           addr, FPU_LS_DOUBLE);
1036
1037                        if (emulated == EMULATE_DONE)
1038                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1039                        break;
1040                }
1041                case OP_31_STFSX:
1042                {
1043                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1044                                     kvmppc_get_gpr(vcpu, ax_rb);
1045
1046                        emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1047                                                            addr, FPU_LS_SINGLE);
1048                        break;
1049                }
1050                case OP_31_STFSUX:
1051                {
1052                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1053                                     kvmppc_get_gpr(vcpu, ax_rb);
1054
1055                        emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1056                                                            addr, FPU_LS_SINGLE);
1057
1058                        if (emulated == EMULATE_DONE)
1059                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1060                        break;
1061                }
1062                case OP_31_STFX:
1063                {
1064                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1065                                     kvmppc_get_gpr(vcpu, ax_rb);
1066
1067                        emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1068                                                            addr, FPU_LS_DOUBLE);
1069                        break;
1070                }
1071                case OP_31_STFUX:
1072                {
1073                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1074                                     kvmppc_get_gpr(vcpu, ax_rb);
1075
1076                        emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1077                                                            addr, FPU_LS_DOUBLE);
1078
1079                        if (emulated == EMULATE_DONE)
1080                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1081                        break;
1082                }
1083                case OP_31_STFIWX:
1084                {
1085                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1086                                     kvmppc_get_gpr(vcpu, ax_rb);
1087
1088                        emulated = kvmppc_emulate_fpr_store(vcpu, ax_rd,
1089                                                            addr,
1090                                                            FPU_LS_SINGLE_LOW);
1091                        break;
1092                }
1093                        break;
1094                }
1095                break;
1096        case 59:
1097                switch (inst_get_field(inst, 21, 30)) {
1098                case OP_59_FADDS:
1099                        fpd_fadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1100                        kvmppc_sync_qpr(vcpu, ax_rd);
1101                        break;
1102                case OP_59_FSUBS:
1103                        fpd_fsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1104                        kvmppc_sync_qpr(vcpu, ax_rd);
1105                        break;
1106                case OP_59_FDIVS:
1107                        fpd_fdivs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1108                        kvmppc_sync_qpr(vcpu, ax_rd);
1109                        break;
1110                case OP_59_FRES:
1111                        fpd_fres(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1112                        kvmppc_sync_qpr(vcpu, ax_rd);
1113                        break;
1114                case OP_59_FRSQRTES:
1115                        fpd_frsqrtes(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1116                        kvmppc_sync_qpr(vcpu, ax_rd);
1117                        break;
1118                }
1119                switch (inst_get_field(inst, 26, 30)) {
1120                case OP_59_FMULS:
1121                        fpd_fmuls(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1122                        kvmppc_sync_qpr(vcpu, ax_rd);
1123                        break;
1124                case OP_59_FMSUBS:
1125                        fpd_fmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1126                        kvmppc_sync_qpr(vcpu, ax_rd);
1127                        break;
1128                case OP_59_FMADDS:
1129                        fpd_fmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1130                        kvmppc_sync_qpr(vcpu, ax_rd);
1131                        break;
1132                case OP_59_FNMSUBS:
1133                        fpd_fnmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1134                        kvmppc_sync_qpr(vcpu, ax_rd);
1135                        break;
1136                case OP_59_FNMADDS:
1137                        fpd_fnmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1138                        kvmppc_sync_qpr(vcpu, ax_rd);
1139                        break;
1140                }
1141                break;
1142        case 63:
1143                switch (inst_get_field(inst, 21, 30)) {
1144                case OP_63_MTFSB0:
1145                case OP_63_MTFSB1:
1146                case OP_63_MCRFS:
1147                case OP_63_MTFSFI:
1148                        /* XXX need to implement */
1149                        break;
1150                case OP_63_MFFS:
1151                        /* XXX missing CR */
1152                        *fpr_d = vcpu->arch.fp.fpscr;
1153                        break;
1154                case OP_63_MTFSF:
1155                        /* XXX missing fm bits */
1156                        /* XXX missing CR */
1157                        vcpu->arch.fp.fpscr = *fpr_b;
1158                        break;
1159                case OP_63_FCMPU:
1160                {
1161                        u32 tmp_cr;
1162                        u32 cr0_mask = 0xf0000000;
1163                        u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1164
1165                        fpd_fcmpu(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1166                        cr &= ~(cr0_mask >> cr_shift);
1167                        cr |= (cr & cr0_mask) >> cr_shift;
1168                        break;
1169                }
1170                case OP_63_FCMPO:
1171                {
1172                        u32 tmp_cr;
1173                        u32 cr0_mask = 0xf0000000;
1174                        u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1175
1176                        fpd_fcmpo(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1177                        cr &= ~(cr0_mask >> cr_shift);
1178                        cr |= (cr & cr0_mask) >> cr_shift;
1179                        break;
1180                }
1181                case OP_63_FNEG:
1182                        fpd_fneg(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1183                        break;
1184                case OP_63_FMR:
1185                        *fpr_d = *fpr_b;
1186                        break;
1187                case OP_63_FABS:
1188                        fpd_fabs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1189                        break;
1190                case OP_63_FCPSGN:
1191                        fpd_fcpsgn(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1192                        break;
1193                case OP_63_FDIV:
1194                        fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1195                        break;
1196                case OP_63_FADD:
1197                        fpd_fadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1198                        break;
1199                case OP_63_FSUB:
1200                        fpd_fsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1201                        break;
1202                case OP_63_FCTIW:
1203                        fpd_fctiw(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1204                        break;
1205                case OP_63_FCTIWZ:
1206                        fpd_fctiwz(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1207                        break;
1208                case OP_63_FRSP:
1209                        fpd_frsp(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1210                        kvmppc_sync_qpr(vcpu, ax_rd);
1211                        break;
1212                case OP_63_FRSQRTE:
1213                {
1214                        double one = 1.0f;
1215
1216                        /* fD = sqrt(fB) */
1217                        fpd_fsqrt(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1218                        /* fD = 1.0f / fD */
1219                        fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, (u64*)&one, fpr_d);
1220                        break;
1221                }
1222                }
1223                switch (inst_get_field(inst, 26, 30)) {
1224                case OP_63_FMUL:
1225                        fpd_fmul(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1226                        break;
1227                case OP_63_FSEL:
1228                        fpd_fsel(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1229                        break;
1230                case OP_63_FMSUB:
1231                        fpd_fmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1232                        break;
1233                case OP_63_FMADD:
1234                        fpd_fmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1235                        break;
1236                case OP_63_FNMSUB:
1237                        fpd_fnmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1238                        break;
1239                case OP_63_FNMADD:
1240                        fpd_fnmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1241                        break;
1242                }
1243                break;
1244        }
1245
1246#ifdef DEBUG
1247        for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
1248                u32 f;
1249                kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
1250                dprintk(KERN_INFO "FPR[%d] = 0x%x\n", i, f);
1251        }
1252#endif
1253
1254        if (rcomp)
1255                kvmppc_set_cr(vcpu, cr);
1256
1257        disable_kernel_fp();
1258        preempt_enable();
1259
1260        return emulated;
1261}
1262