linux/arch/powerpc/kvm/book3s_paired_singles.c
<<
>>
Prefs
   1/*
   2 * This program is free software; you can redistribute it and/or modify
   3 * it under the terms of the GNU General Public License, version 2, as
   4 * published by the Free Software Foundation.
   5 *
   6 * This program is distributed in the hope that it will be useful,
   7 * but WITHOUT ANY WARRANTY; without even the implied warranty of
   8 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   9 * GNU General Public License for more details.
  10 *
  11 * You should have received a copy of the GNU General Public License
  12 * along with this program; if not, write to the Free Software
  13 * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
  14 *
  15 * Copyright Novell Inc 2010
  16 *
  17 * Authors: Alexander Graf <agraf@suse.de>
  18 */
  19
  20#include <asm/kvm.h>
  21#include <asm/kvm_ppc.h>
  22#include <asm/disassemble.h>
  23#include <asm/kvm_book3s.h>
  24#include <asm/kvm_fpu.h>
  25#include <asm/reg.h>
  26#include <asm/cacheflush.h>
  27#include <asm/switch_to.h>
  28#include <linux/vmalloc.h>
  29
  30/* #define DEBUG */
  31
  32#ifdef DEBUG
  33#define dprintk printk
  34#else
  35#define dprintk(...) do { } while(0);
  36#endif
  37
  38#define OP_LFS                  48
  39#define OP_LFSU                 49
  40#define OP_LFD                  50
  41#define OP_LFDU                 51
  42#define OP_STFS                 52
  43#define OP_STFSU                53
  44#define OP_STFD                 54
  45#define OP_STFDU                55
  46#define OP_PSQ_L                56
  47#define OP_PSQ_LU               57
  48#define OP_PSQ_ST               60
  49#define OP_PSQ_STU              61
  50
  51#define OP_31_LFSX              535
  52#define OP_31_LFSUX             567
  53#define OP_31_LFDX              599
  54#define OP_31_LFDUX             631
  55#define OP_31_STFSX             663
  56#define OP_31_STFSUX            695
  57#define OP_31_STFX              727
  58#define OP_31_STFUX             759
  59#define OP_31_LWIZX             887
  60#define OP_31_STFIWX            983
  61
  62#define OP_59_FADDS             21
  63#define OP_59_FSUBS             20
  64#define OP_59_FSQRTS            22
  65#define OP_59_FDIVS             18
  66#define OP_59_FRES              24
  67#define OP_59_FMULS             25
  68#define OP_59_FRSQRTES          26
  69#define OP_59_FMSUBS            28
  70#define OP_59_FMADDS            29
  71#define OP_59_FNMSUBS           30
  72#define OP_59_FNMADDS           31
  73
  74#define OP_63_FCMPU             0
  75#define OP_63_FCPSGN            8
  76#define OP_63_FRSP              12
  77#define OP_63_FCTIW             14
  78#define OP_63_FCTIWZ            15
  79#define OP_63_FDIV              18
  80#define OP_63_FADD              21
  81#define OP_63_FSQRT             22
  82#define OP_63_FSEL              23
  83#define OP_63_FRE               24
  84#define OP_63_FMUL              25
  85#define OP_63_FRSQRTE           26
  86#define OP_63_FMSUB             28
  87#define OP_63_FMADD             29
  88#define OP_63_FNMSUB            30
  89#define OP_63_FNMADD            31
  90#define OP_63_FCMPO             32
  91#define OP_63_MTFSB1            38 // XXX
  92#define OP_63_FSUB              20
  93#define OP_63_FNEG              40
  94#define OP_63_MCRFS             64
  95#define OP_63_MTFSB0            70
  96#define OP_63_FMR               72
  97#define OP_63_MTFSFI            134
  98#define OP_63_FABS              264
  99#define OP_63_MFFS              583
 100#define OP_63_MTFSF             711
 101
 102#define OP_4X_PS_CMPU0          0
 103#define OP_4X_PSQ_LX            6
 104#define OP_4XW_PSQ_STX          7
 105#define OP_4A_PS_SUM0           10
 106#define OP_4A_PS_SUM1           11
 107#define OP_4A_PS_MULS0          12
 108#define OP_4A_PS_MULS1          13
 109#define OP_4A_PS_MADDS0         14
 110#define OP_4A_PS_MADDS1         15
 111#define OP_4A_PS_DIV            18
 112#define OP_4A_PS_SUB            20
 113#define OP_4A_PS_ADD            21
 114#define OP_4A_PS_SEL            23
 115#define OP_4A_PS_RES            24
 116#define OP_4A_PS_MUL            25
 117#define OP_4A_PS_RSQRTE         26
 118#define OP_4A_PS_MSUB           28
 119#define OP_4A_PS_MADD           29
 120#define OP_4A_PS_NMSUB          30
 121#define OP_4A_PS_NMADD          31
 122#define OP_4X_PS_CMPO0          32
 123#define OP_4X_PSQ_LUX           38
 124#define OP_4XW_PSQ_STUX         39
 125#define OP_4X_PS_NEG            40
 126#define OP_4X_PS_CMPU1          64
 127#define OP_4X_PS_MR             72
 128#define OP_4X_PS_CMPO1          96
 129#define OP_4X_PS_NABS           136
 130#define OP_4X_PS_ABS            264
 131#define OP_4X_PS_MERGE00        528
 132#define OP_4X_PS_MERGE01        560
 133#define OP_4X_PS_MERGE10        592
 134#define OP_4X_PS_MERGE11        624
 135
 136#define SCALAR_NONE             0
 137#define SCALAR_HIGH             (1 << 0)
 138#define SCALAR_LOW              (1 << 1)
 139#define SCALAR_NO_PS0           (1 << 2)
 140#define SCALAR_NO_PS1           (1 << 3)
 141
 142#define GQR_ST_TYPE_MASK        0x00000007
 143#define GQR_ST_TYPE_SHIFT       0
 144#define GQR_ST_SCALE_MASK       0x00003f00
 145#define GQR_ST_SCALE_SHIFT      8
 146#define GQR_LD_TYPE_MASK        0x00070000
 147#define GQR_LD_TYPE_SHIFT       16
 148#define GQR_LD_SCALE_MASK       0x3f000000
 149#define GQR_LD_SCALE_SHIFT      24
 150
 151#define GQR_QUANTIZE_FLOAT      0
 152#define GQR_QUANTIZE_U8         4
 153#define GQR_QUANTIZE_U16        5
 154#define GQR_QUANTIZE_S8         6
 155#define GQR_QUANTIZE_S16        7
 156
 157#define FPU_LS_SINGLE           0
 158#define FPU_LS_DOUBLE           1
 159#define FPU_LS_SINGLE_LOW       2
 160
 161static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt)
 162{
 163        kvm_cvt_df(&VCPU_FPR(vcpu, rt), &vcpu->arch.qpr[rt]);
 164}
 165
 166static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store)
 167{
 168        u32 dsisr;
 169        u64 msr = kvmppc_get_msr(vcpu);
 170
 171        msr = kvmppc_set_field(msr, 33, 36, 0);
 172        msr = kvmppc_set_field(msr, 42, 47, 0);
 173        kvmppc_set_msr(vcpu, msr);
 174        kvmppc_set_dar(vcpu, eaddr);
 175        /* Page Fault */
 176        dsisr = kvmppc_set_field(0, 33, 33, 1);
 177        if (is_store)
 178                dsisr = kvmppc_set_field(dsisr, 38, 38, 1);
 179        kvmppc_set_dsisr(vcpu, dsisr);
 180        kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE);
 181}
 182
 183static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
 184                                   int rs, ulong addr, int ls_type)
 185{
 186        int emulated = EMULATE_FAIL;
 187        int r;
 188        char tmp[8];
 189        int len = sizeof(u32);
 190
 191        if (ls_type == FPU_LS_DOUBLE)
 192                len = sizeof(u64);
 193
 194        /* read from memory */
 195        r = kvmppc_ld(vcpu, &addr, len, tmp, true);
 196        vcpu->arch.paddr_accessed = addr;
 197
 198        if (r < 0) {
 199                kvmppc_inject_pf(vcpu, addr, false);
 200                goto done_load;
 201        } else if (r == EMULATE_DO_MMIO) {
 202                emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs,
 203                                              len, 1);
 204                goto done_load;
 205        }
 206
 207        emulated = EMULATE_DONE;
 208
 209        /* put in registers */
 210        switch (ls_type) {
 211        case FPU_LS_SINGLE:
 212                kvm_cvt_fd((u32*)tmp, &VCPU_FPR(vcpu, rs));
 213                vcpu->arch.qpr[rs] = *((u32*)tmp);
 214                break;
 215        case FPU_LS_DOUBLE:
 216                VCPU_FPR(vcpu, rs) = *((u64*)tmp);
 217                break;
 218        }
 219
 220        dprintk(KERN_INFO "KVM: FPR_LD [0x%llx] at 0x%lx (%d)\n", *(u64*)tmp,
 221                          addr, len);
 222
 223done_load:
 224        return emulated;
 225}
 226
 227static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
 228                                    int rs, ulong addr, int ls_type)
 229{
 230        int emulated = EMULATE_FAIL;
 231        int r;
 232        char tmp[8];
 233        u64 val;
 234        int len;
 235
 236        switch (ls_type) {
 237        case FPU_LS_SINGLE:
 238                kvm_cvt_df(&VCPU_FPR(vcpu, rs), (u32*)tmp);
 239                val = *((u32*)tmp);
 240                len = sizeof(u32);
 241                break;
 242        case FPU_LS_SINGLE_LOW:
 243                *((u32*)tmp) = VCPU_FPR(vcpu, rs);
 244                val = VCPU_FPR(vcpu, rs) & 0xffffffff;
 245                len = sizeof(u32);
 246                break;
 247        case FPU_LS_DOUBLE:
 248                *((u64*)tmp) = VCPU_FPR(vcpu, rs);
 249                val = VCPU_FPR(vcpu, rs);
 250                len = sizeof(u64);
 251                break;
 252        default:
 253                val = 0;
 254                len = 0;
 255        }
 256
 257        r = kvmppc_st(vcpu, &addr, len, tmp, true);
 258        vcpu->arch.paddr_accessed = addr;
 259        if (r < 0) {
 260                kvmppc_inject_pf(vcpu, addr, true);
 261        } else if (r == EMULATE_DO_MMIO) {
 262                emulated = kvmppc_handle_store(run, vcpu, val, len, 1);
 263        } else {
 264                emulated = EMULATE_DONE;
 265        }
 266
 267        dprintk(KERN_INFO "KVM: FPR_ST [0x%llx] at 0x%lx (%d)\n",
 268                          val, addr, len);
 269
 270        return emulated;
 271}
 272
 273static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
 274                                   int rs, ulong addr, bool w, int i)
 275{
 276        int emulated = EMULATE_FAIL;
 277        int r;
 278        float one = 1.0;
 279        u32 tmp[2];
 280
 281        /* read from memory */
 282        if (w) {
 283                r = kvmppc_ld(vcpu, &addr, sizeof(u32), tmp, true);
 284                memcpy(&tmp[1], &one, sizeof(u32));
 285        } else {
 286                r = kvmppc_ld(vcpu, &addr, sizeof(u32) * 2, tmp, true);
 287        }
 288        vcpu->arch.paddr_accessed = addr;
 289        if (r < 0) {
 290                kvmppc_inject_pf(vcpu, addr, false);
 291                goto done_load;
 292        } else if ((r == EMULATE_DO_MMIO) && w) {
 293                emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs,
 294                                              4, 1);
 295                vcpu->arch.qpr[rs] = tmp[1];
 296                goto done_load;
 297        } else if (r == EMULATE_DO_MMIO) {
 298                emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FQPR | rs,
 299                                              8, 1);
 300                goto done_load;
 301        }
 302
 303        emulated = EMULATE_DONE;
 304
 305        /* put in registers */
 306        kvm_cvt_fd(&tmp[0], &VCPU_FPR(vcpu, rs));
 307        vcpu->arch.qpr[rs] = tmp[1];
 308
 309        dprintk(KERN_INFO "KVM: PSQ_LD [0x%x, 0x%x] at 0x%lx (%d)\n", tmp[0],
 310                          tmp[1], addr, w ? 4 : 8);
 311
 312done_load:
 313        return emulated;
 314}
 315
 316static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
 317                                    int rs, ulong addr, bool w, int i)
 318{
 319        int emulated = EMULATE_FAIL;
 320        int r;
 321        u32 tmp[2];
 322        int len = w ? sizeof(u32) : sizeof(u64);
 323
 324        kvm_cvt_df(&VCPU_FPR(vcpu, rs), &tmp[0]);
 325        tmp[1] = vcpu->arch.qpr[rs];
 326
 327        r = kvmppc_st(vcpu, &addr, len, tmp, true);
 328        vcpu->arch.paddr_accessed = addr;
 329        if (r < 0) {
 330                kvmppc_inject_pf(vcpu, addr, true);
 331        } else if ((r == EMULATE_DO_MMIO) && w) {
 332                emulated = kvmppc_handle_store(run, vcpu, tmp[0], 4, 1);
 333        } else if (r == EMULATE_DO_MMIO) {
 334                u64 val = ((u64)tmp[0] << 32) | tmp[1];
 335                emulated = kvmppc_handle_store(run, vcpu, val, 8, 1);
 336        } else {
 337                emulated = EMULATE_DONE;
 338        }
 339
 340        dprintk(KERN_INFO "KVM: PSQ_ST [0x%x, 0x%x] at 0x%lx (%d)\n",
 341                          tmp[0], tmp[1], addr, len);
 342
 343        return emulated;
 344}
 345
 346/*
 347 * Cuts out inst bits with ordering according to spec.
 348 * That means the leftmost bit is zero. All given bits are included.
 349 */
 350static inline u32 inst_get_field(u32 inst, int msb, int lsb)
 351{
 352        return kvmppc_get_field(inst, msb + 32, lsb + 32);
 353}
 354
 355static bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst)
 356{
 357        if (!(vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE))
 358                return false;
 359
 360        switch (get_op(inst)) {
 361        case OP_PSQ_L:
 362        case OP_PSQ_LU:
 363        case OP_PSQ_ST:
 364        case OP_PSQ_STU:
 365        case OP_LFS:
 366        case OP_LFSU:
 367        case OP_LFD:
 368        case OP_LFDU:
 369        case OP_STFS:
 370        case OP_STFSU:
 371        case OP_STFD:
 372        case OP_STFDU:
 373                return true;
 374        case 4:
 375                /* X form */
 376                switch (inst_get_field(inst, 21, 30)) {
 377                case OP_4X_PS_CMPU0:
 378                case OP_4X_PSQ_LX:
 379                case OP_4X_PS_CMPO0:
 380                case OP_4X_PSQ_LUX:
 381                case OP_4X_PS_NEG:
 382                case OP_4X_PS_CMPU1:
 383                case OP_4X_PS_MR:
 384                case OP_4X_PS_CMPO1:
 385                case OP_4X_PS_NABS:
 386                case OP_4X_PS_ABS:
 387                case OP_4X_PS_MERGE00:
 388                case OP_4X_PS_MERGE01:
 389                case OP_4X_PS_MERGE10:
 390                case OP_4X_PS_MERGE11:
 391                        return true;
 392                }
 393                /* XW form */
 394                switch (inst_get_field(inst, 25, 30)) {
 395                case OP_4XW_PSQ_STX:
 396                case OP_4XW_PSQ_STUX:
 397                        return true;
 398                }
 399                /* A form */
 400                switch (inst_get_field(inst, 26, 30)) {
 401                case OP_4A_PS_SUM1:
 402                case OP_4A_PS_SUM0:
 403                case OP_4A_PS_MULS0:
 404                case OP_4A_PS_MULS1:
 405                case OP_4A_PS_MADDS0:
 406                case OP_4A_PS_MADDS1:
 407                case OP_4A_PS_DIV:
 408                case OP_4A_PS_SUB:
 409                case OP_4A_PS_ADD:
 410                case OP_4A_PS_SEL:
 411                case OP_4A_PS_RES:
 412                case OP_4A_PS_MUL:
 413                case OP_4A_PS_RSQRTE:
 414                case OP_4A_PS_MSUB:
 415                case OP_4A_PS_MADD:
 416                case OP_4A_PS_NMSUB:
 417                case OP_4A_PS_NMADD:
 418                        return true;
 419                }
 420                break;
 421        case 59:
 422                switch (inst_get_field(inst, 21, 30)) {
 423                case OP_59_FADDS:
 424                case OP_59_FSUBS:
 425                case OP_59_FDIVS:
 426                case OP_59_FRES:
 427                case OP_59_FRSQRTES:
 428                        return true;
 429                }
 430                switch (inst_get_field(inst, 26, 30)) {
 431                case OP_59_FMULS:
 432                case OP_59_FMSUBS:
 433                case OP_59_FMADDS:
 434                case OP_59_FNMSUBS:
 435                case OP_59_FNMADDS:
 436                        return true;
 437                }
 438                break;
 439        case 63:
 440                switch (inst_get_field(inst, 21, 30)) {
 441                case OP_63_MTFSB0:
 442                case OP_63_MTFSB1:
 443                case OP_63_MTFSF:
 444                case OP_63_MTFSFI:
 445                case OP_63_MCRFS:
 446                case OP_63_MFFS:
 447                case OP_63_FCMPU:
 448                case OP_63_FCMPO:
 449                case OP_63_FNEG:
 450                case OP_63_FMR:
 451                case OP_63_FABS:
 452                case OP_63_FRSP:
 453                case OP_63_FDIV:
 454                case OP_63_FADD:
 455                case OP_63_FSUB:
 456                case OP_63_FCTIW:
 457                case OP_63_FCTIWZ:
 458                case OP_63_FRSQRTE:
 459                case OP_63_FCPSGN:
 460                        return true;
 461                }
 462                switch (inst_get_field(inst, 26, 30)) {
 463                case OP_63_FMUL:
 464                case OP_63_FSEL:
 465                case OP_63_FMSUB:
 466                case OP_63_FMADD:
 467                case OP_63_FNMSUB:
 468                case OP_63_FNMADD:
 469                        return true;
 470                }
 471                break;
 472        case 31:
 473                switch (inst_get_field(inst, 21, 30)) {
 474                case OP_31_LFSX:
 475                case OP_31_LFSUX:
 476                case OP_31_LFDX:
 477                case OP_31_LFDUX:
 478                case OP_31_STFSX:
 479                case OP_31_STFSUX:
 480                case OP_31_STFX:
 481                case OP_31_STFUX:
 482                case OP_31_STFIWX:
 483                        return true;
 484                }
 485                break;
 486        }
 487
 488        return false;
 489}
 490
 491static int get_d_signext(u32 inst)
 492{
 493        int d = inst & 0x8ff;
 494
 495        if (d & 0x800)
 496                return -(d & 0x7ff);
 497
 498        return (d & 0x7ff);
 499}
 500
 501static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc,
 502                                      int reg_out, int reg_in1, int reg_in2,
 503                                      int reg_in3, int scalar,
 504                                      void (*func)(u64 *fpscr,
 505                                                 u32 *dst, u32 *src1,
 506                                                 u32 *src2, u32 *src3))
 507{
 508        u32 *qpr = vcpu->arch.qpr;
 509        u32 ps0_out;
 510        u32 ps0_in1, ps0_in2, ps0_in3;
 511        u32 ps1_in1, ps1_in2, ps1_in3;
 512
 513        /* RC */
 514        WARN_ON(rc);
 515
 516        /* PS0 */
 517        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
 518        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
 519        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in3), &ps0_in3);
 520
 521        if (scalar & SCALAR_LOW)
 522                ps0_in2 = qpr[reg_in2];
 523
 524        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2, &ps0_in3);
 525
 526        dprintk(KERN_INFO "PS3 ps0 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
 527                          ps0_in1, ps0_in2, ps0_in3, ps0_out);
 528
 529        if (!(scalar & SCALAR_NO_PS0))
 530                kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 531
 532        /* PS1 */
 533        ps1_in1 = qpr[reg_in1];
 534        ps1_in2 = qpr[reg_in2];
 535        ps1_in3 = qpr[reg_in3];
 536
 537        if (scalar & SCALAR_HIGH)
 538                ps1_in2 = ps0_in2;
 539
 540        if (!(scalar & SCALAR_NO_PS1))
 541                func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in1, &ps1_in2, &ps1_in3);
 542
 543        dprintk(KERN_INFO "PS3 ps1 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
 544                          ps1_in1, ps1_in2, ps1_in3, qpr[reg_out]);
 545
 546        return EMULATE_DONE;
 547}
 548
 549static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc,
 550                                    int reg_out, int reg_in1, int reg_in2,
 551                                    int scalar,
 552                                    void (*func)(u64 *fpscr,
 553                                                 u32 *dst, u32 *src1,
 554                                                 u32 *src2))
 555{
 556        u32 *qpr = vcpu->arch.qpr;
 557        u32 ps0_out;
 558        u32 ps0_in1, ps0_in2;
 559        u32 ps1_out;
 560        u32 ps1_in1, ps1_in2;
 561
 562        /* RC */
 563        WARN_ON(rc);
 564
 565        /* PS0 */
 566        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
 567
 568        if (scalar & SCALAR_LOW)
 569                ps0_in2 = qpr[reg_in2];
 570        else
 571                kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
 572
 573        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2);
 574
 575        if (!(scalar & SCALAR_NO_PS0)) {
 576                dprintk(KERN_INFO "PS2 ps0 -> f(0x%x, 0x%x) = 0x%x\n",
 577                                  ps0_in1, ps0_in2, ps0_out);
 578
 579                kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 580        }
 581
 582        /* PS1 */
 583        ps1_in1 = qpr[reg_in1];
 584        ps1_in2 = qpr[reg_in2];
 585
 586        if (scalar & SCALAR_HIGH)
 587                ps1_in2 = ps0_in2;
 588
 589        func(&vcpu->arch.fp.fpscr, &ps1_out, &ps1_in1, &ps1_in2);
 590
 591        if (!(scalar & SCALAR_NO_PS1)) {
 592                qpr[reg_out] = ps1_out;
 593
 594                dprintk(KERN_INFO "PS2 ps1 -> f(0x%x, 0x%x) = 0x%x\n",
 595                                  ps1_in1, ps1_in2, qpr[reg_out]);
 596        }
 597
 598        return EMULATE_DONE;
 599}
 600
 601static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc,
 602                                    int reg_out, int reg_in,
 603                                    void (*func)(u64 *t,
 604                                                 u32 *dst, u32 *src1))
 605{
 606        u32 *qpr = vcpu->arch.qpr;
 607        u32 ps0_out, ps0_in;
 608        u32 ps1_in;
 609
 610        /* RC */
 611        WARN_ON(rc);
 612
 613        /* PS0 */
 614        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in), &ps0_in);
 615        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in);
 616
 617        dprintk(KERN_INFO "PS1 ps0 -> f(0x%x) = 0x%x\n",
 618                          ps0_in, ps0_out);
 619
 620        kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 621
 622        /* PS1 */
 623        ps1_in = qpr[reg_in];
 624        func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in);
 625
 626        dprintk(KERN_INFO "PS1 ps1 -> f(0x%x) = 0x%x\n",
 627                          ps1_in, qpr[reg_out]);
 628
 629        return EMULATE_DONE;
 630}
 631
 632int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu)
 633{
 634        u32 inst;
 635        enum emulation_result emulated = EMULATE_DONE;
 636        int ax_rd, ax_ra, ax_rb, ax_rc;
 637        short full_d;
 638        u64 *fpr_d, *fpr_a, *fpr_b, *fpr_c;
 639
 640        bool rcomp;
 641        u32 cr;
 642#ifdef DEBUG
 643        int i;
 644#endif
 645
 646        emulated = kvmppc_get_last_inst(vcpu, INST_GENERIC, &inst);
 647        if (emulated != EMULATE_DONE)
 648                return emulated;
 649
 650        ax_rd = inst_get_field(inst, 6, 10);
 651        ax_ra = inst_get_field(inst, 11, 15);
 652        ax_rb = inst_get_field(inst, 16, 20);
 653        ax_rc = inst_get_field(inst, 21, 25);
 654        full_d = inst_get_field(inst, 16, 31);
 655
 656        fpr_d = &VCPU_FPR(vcpu, ax_rd);
 657        fpr_a = &VCPU_FPR(vcpu, ax_ra);
 658        fpr_b = &VCPU_FPR(vcpu, ax_rb);
 659        fpr_c = &VCPU_FPR(vcpu, ax_rc);
 660
 661        rcomp = (inst & 1) ? true : false;
 662        cr = kvmppc_get_cr(vcpu);
 663
 664        if (!kvmppc_inst_is_paired_single(vcpu, inst))
 665                return EMULATE_FAIL;
 666
 667        if (!(kvmppc_get_msr(vcpu) & MSR_FP)) {
 668                kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL);
 669                return EMULATE_AGAIN;
 670        }
 671
 672        kvmppc_giveup_ext(vcpu, MSR_FP);
 673        preempt_disable();
 674        enable_kernel_fp();
 675        /* Do we need to clear FE0 / FE1 here? Don't think so. */
 676
 677#ifdef DEBUG
 678        for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
 679                u32 f;
 680                kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
 681                dprintk(KERN_INFO "FPR[%d] = 0x%x / 0x%llx    QPR[%d] = 0x%x\n",
 682                        i, f, VCPU_FPR(vcpu, i), i, vcpu->arch.qpr[i]);
 683        }
 684#endif
 685
 686        switch (get_op(inst)) {
 687        case OP_PSQ_L:
 688        {
 689                ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 690                bool w = inst_get_field(inst, 16, 16) ? true : false;
 691                int i = inst_get_field(inst, 17, 19);
 692
 693                addr += get_d_signext(inst);
 694                emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 695                break;
 696        }
 697        case OP_PSQ_LU:
 698        {
 699                ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 700                bool w = inst_get_field(inst, 16, 16) ? true : false;
 701                int i = inst_get_field(inst, 17, 19);
 702
 703                addr += get_d_signext(inst);
 704                emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 705
 706                if (emulated == EMULATE_DONE)
 707                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 708                break;
 709        }
 710        case OP_PSQ_ST:
 711        {
 712                ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 713                bool w = inst_get_field(inst, 16, 16) ? true : false;
 714                int i = inst_get_field(inst, 17, 19);
 715
 716                addr += get_d_signext(inst);
 717                emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 718                break;
 719        }
 720        case OP_PSQ_STU:
 721        {
 722                ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 723                bool w = inst_get_field(inst, 16, 16) ? true : false;
 724                int i = inst_get_field(inst, 17, 19);
 725
 726                addr += get_d_signext(inst);
 727                emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 728
 729                if (emulated == EMULATE_DONE)
 730                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 731                break;
 732        }
 733        case 4:
 734                /* X form */
 735                switch (inst_get_field(inst, 21, 30)) {
 736                case OP_4X_PS_CMPU0:
 737                        /* XXX */
 738                        emulated = EMULATE_FAIL;
 739                        break;
 740                case OP_4X_PSQ_LX:
 741                {
 742                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 743                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 744                        int i = inst_get_field(inst, 22, 24);
 745
 746                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 747                        emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 748                        break;
 749                }
 750                case OP_4X_PS_CMPO0:
 751                        /* XXX */
 752                        emulated = EMULATE_FAIL;
 753                        break;
 754                case OP_4X_PSQ_LUX:
 755                {
 756                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 757                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 758                        int i = inst_get_field(inst, 22, 24);
 759
 760                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 761                        emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 762
 763                        if (emulated == EMULATE_DONE)
 764                                kvmppc_set_gpr(vcpu, ax_ra, addr);
 765                        break;
 766                }
 767                case OP_4X_PS_NEG:
 768                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 769                        VCPU_FPR(vcpu, ax_rd) ^= 0x8000000000000000ULL;
 770                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 771                        vcpu->arch.qpr[ax_rd] ^= 0x80000000;
 772                        break;
 773                case OP_4X_PS_CMPU1:
 774                        /* XXX */
 775                        emulated = EMULATE_FAIL;
 776                        break;
 777                case OP_4X_PS_MR:
 778                        WARN_ON(rcomp);
 779                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 780                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 781                        break;
 782                case OP_4X_PS_CMPO1:
 783                        /* XXX */
 784                        emulated = EMULATE_FAIL;
 785                        break;
 786                case OP_4X_PS_NABS:
 787                        WARN_ON(rcomp);
 788                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 789                        VCPU_FPR(vcpu, ax_rd) |= 0x8000000000000000ULL;
 790                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 791                        vcpu->arch.qpr[ax_rd] |= 0x80000000;
 792                        break;
 793                case OP_4X_PS_ABS:
 794                        WARN_ON(rcomp);
 795                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 796                        VCPU_FPR(vcpu, ax_rd) &= ~0x8000000000000000ULL;
 797                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 798                        vcpu->arch.qpr[ax_rd] &= ~0x80000000;
 799                        break;
 800                case OP_4X_PS_MERGE00:
 801                        WARN_ON(rcomp);
 802                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
 803                        /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
 804                        kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
 805                                   &vcpu->arch.qpr[ax_rd]);
 806                        break;
 807                case OP_4X_PS_MERGE01:
 808                        WARN_ON(rcomp);
 809                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
 810                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 811                        break;
 812                case OP_4X_PS_MERGE10:
 813                        WARN_ON(rcomp);
 814                        /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
 815                        kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
 816                                   &VCPU_FPR(vcpu, ax_rd));
 817                        /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
 818                        kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
 819                                   &vcpu->arch.qpr[ax_rd]);
 820                        break;
 821                case OP_4X_PS_MERGE11:
 822                        WARN_ON(rcomp);
 823                        /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
 824                        kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
 825                                   &VCPU_FPR(vcpu, ax_rd));
 826                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 827                        break;
 828                }
 829                /* XW form */
 830                switch (inst_get_field(inst, 25, 30)) {
 831                case OP_4XW_PSQ_STX:
 832                {
 833                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 834                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 835                        int i = inst_get_field(inst, 22, 24);
 836
 837                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 838                        emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 839                        break;
 840                }
 841                case OP_4XW_PSQ_STUX:
 842                {
 843                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 844                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 845                        int i = inst_get_field(inst, 22, 24);
 846
 847                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 848                        emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 849
 850                        if (emulated == EMULATE_DONE)
 851                                kvmppc_set_gpr(vcpu, ax_ra, addr);
 852                        break;
 853                }
 854                }
 855                /* A form */
 856                switch (inst_get_field(inst, 26, 30)) {
 857                case OP_4A_PS_SUM1:
 858                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 859                                        ax_rb, ax_ra, SCALAR_NO_PS0 | SCALAR_HIGH, fps_fadds);
 860                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rc);
 861                        break;
 862                case OP_4A_PS_SUM0:
 863                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 864                                        ax_ra, ax_rb, SCALAR_NO_PS1 | SCALAR_LOW, fps_fadds);
 865                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rc];
 866                        break;
 867                case OP_4A_PS_MULS0:
 868                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 869                                        ax_ra, ax_rc, SCALAR_HIGH, fps_fmuls);
 870                        break;
 871                case OP_4A_PS_MULS1:
 872                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 873                                        ax_ra, ax_rc, SCALAR_LOW, fps_fmuls);
 874                        break;
 875                case OP_4A_PS_MADDS0:
 876                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 877                                        ax_ra, ax_rc, ax_rb, SCALAR_HIGH, fps_fmadds);
 878                        break;
 879                case OP_4A_PS_MADDS1:
 880                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 881                                        ax_ra, ax_rc, ax_rb, SCALAR_LOW, fps_fmadds);
 882                        break;
 883                case OP_4A_PS_DIV:
 884                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 885                                        ax_ra, ax_rb, SCALAR_NONE, fps_fdivs);
 886                        break;
 887                case OP_4A_PS_SUB:
 888                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 889                                        ax_ra, ax_rb, SCALAR_NONE, fps_fsubs);
 890                        break;
 891                case OP_4A_PS_ADD:
 892                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 893                                        ax_ra, ax_rb, SCALAR_NONE, fps_fadds);
 894                        break;
 895                case OP_4A_PS_SEL:
 896                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 897                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fsel);
 898                        break;
 899                case OP_4A_PS_RES:
 900                        emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
 901                                        ax_rb, fps_fres);
 902                        break;
 903                case OP_4A_PS_MUL:
 904                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 905                                        ax_ra, ax_rc, SCALAR_NONE, fps_fmuls);
 906                        break;
 907                case OP_4A_PS_RSQRTE:
 908                        emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
 909                                        ax_rb, fps_frsqrte);
 910                        break;
 911                case OP_4A_PS_MSUB:
 912                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 913                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmsubs);
 914                        break;
 915                case OP_4A_PS_MADD:
 916                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 917                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmadds);
 918                        break;
 919                case OP_4A_PS_NMSUB:
 920                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 921                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmsubs);
 922                        break;
 923                case OP_4A_PS_NMADD:
 924                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 925                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmadds);
 926                        break;
 927                }
 928                break;
 929
 930        /* Real FPU operations */
 931
 932        case OP_LFS:
 933        {
 934                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 935
 936                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 937                                                   FPU_LS_SINGLE);
 938                break;
 939        }
 940        case OP_LFSU:
 941        {
 942                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 943
 944                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 945                                                   FPU_LS_SINGLE);
 946
 947                if (emulated == EMULATE_DONE)
 948                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 949                break;
 950        }
 951        case OP_LFD:
 952        {
 953                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 954
 955                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 956                                                   FPU_LS_DOUBLE);
 957                break;
 958        }
 959        case OP_LFDU:
 960        {
 961                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 962
 963                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 964                                                   FPU_LS_DOUBLE);
 965
 966                if (emulated == EMULATE_DONE)
 967                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 968                break;
 969        }
 970        case OP_STFS:
 971        {
 972                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 973
 974                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
 975                                                    FPU_LS_SINGLE);
 976                break;
 977        }
 978        case OP_STFSU:
 979        {
 980                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 981
 982                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
 983                                                    FPU_LS_SINGLE);
 984
 985                if (emulated == EMULATE_DONE)
 986                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 987                break;
 988        }
 989        case OP_STFD:
 990        {
 991                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 992
 993                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
 994                                                    FPU_LS_DOUBLE);
 995                break;
 996        }
 997        case OP_STFDU:
 998        {
 999                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
1000
1001                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
1002                                                    FPU_LS_DOUBLE);
1003
1004                if (emulated == EMULATE_DONE)
1005                        kvmppc_set_gpr(vcpu, ax_ra, addr);
1006                break;
1007        }
1008        case 31:
1009                switch (inst_get_field(inst, 21, 30)) {
1010                case OP_31_LFSX:
1011                {
1012                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
1013
1014                        addr += kvmppc_get_gpr(vcpu, ax_rb);
1015                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1016                                                           addr, FPU_LS_SINGLE);
1017                        break;
1018                }
1019                case OP_31_LFSUX:
1020                {
1021                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1022                                     kvmppc_get_gpr(vcpu, ax_rb);
1023
1024                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1025                                                           addr, FPU_LS_SINGLE);
1026
1027                        if (emulated == EMULATE_DONE)
1028                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1029                        break;
1030                }
1031                case OP_31_LFDX:
1032                {
1033                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1034                                     kvmppc_get_gpr(vcpu, ax_rb);
1035
1036                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1037                                                           addr, FPU_LS_DOUBLE);
1038                        break;
1039                }
1040                case OP_31_LFDUX:
1041                {
1042                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1043                                     kvmppc_get_gpr(vcpu, ax_rb);
1044
1045                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1046                                                           addr, FPU_LS_DOUBLE);
1047
1048                        if (emulated == EMULATE_DONE)
1049                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1050                        break;
1051                }
1052                case OP_31_STFSX:
1053                {
1054                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1055                                     kvmppc_get_gpr(vcpu, ax_rb);
1056
1057                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1058                                                            addr, FPU_LS_SINGLE);
1059                        break;
1060                }
1061                case OP_31_STFSUX:
1062                {
1063                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1064                                     kvmppc_get_gpr(vcpu, ax_rb);
1065
1066                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1067                                                            addr, FPU_LS_SINGLE);
1068
1069                        if (emulated == EMULATE_DONE)
1070                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1071                        break;
1072                }
1073                case OP_31_STFX:
1074                {
1075                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1076                                     kvmppc_get_gpr(vcpu, ax_rb);
1077
1078                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1079                                                            addr, FPU_LS_DOUBLE);
1080                        break;
1081                }
1082                case OP_31_STFUX:
1083                {
1084                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1085                                     kvmppc_get_gpr(vcpu, ax_rb);
1086
1087                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1088                                                            addr, FPU_LS_DOUBLE);
1089
1090                        if (emulated == EMULATE_DONE)
1091                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1092                        break;
1093                }
1094                case OP_31_STFIWX:
1095                {
1096                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1097                                     kvmppc_get_gpr(vcpu, ax_rb);
1098
1099                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1100                                                            addr,
1101                                                            FPU_LS_SINGLE_LOW);
1102                        break;
1103                }
1104                        break;
1105                }
1106                break;
1107        case 59:
1108                switch (inst_get_field(inst, 21, 30)) {
1109                case OP_59_FADDS:
1110                        fpd_fadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1111                        kvmppc_sync_qpr(vcpu, ax_rd);
1112                        break;
1113                case OP_59_FSUBS:
1114                        fpd_fsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1115                        kvmppc_sync_qpr(vcpu, ax_rd);
1116                        break;
1117                case OP_59_FDIVS:
1118                        fpd_fdivs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1119                        kvmppc_sync_qpr(vcpu, ax_rd);
1120                        break;
1121                case OP_59_FRES:
1122                        fpd_fres(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1123                        kvmppc_sync_qpr(vcpu, ax_rd);
1124                        break;
1125                case OP_59_FRSQRTES:
1126                        fpd_frsqrtes(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1127                        kvmppc_sync_qpr(vcpu, ax_rd);
1128                        break;
1129                }
1130                switch (inst_get_field(inst, 26, 30)) {
1131                case OP_59_FMULS:
1132                        fpd_fmuls(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1133                        kvmppc_sync_qpr(vcpu, ax_rd);
1134                        break;
1135                case OP_59_FMSUBS:
1136                        fpd_fmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1137                        kvmppc_sync_qpr(vcpu, ax_rd);
1138                        break;
1139                case OP_59_FMADDS:
1140                        fpd_fmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1141                        kvmppc_sync_qpr(vcpu, ax_rd);
1142                        break;
1143                case OP_59_FNMSUBS:
1144                        fpd_fnmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1145                        kvmppc_sync_qpr(vcpu, ax_rd);
1146                        break;
1147                case OP_59_FNMADDS:
1148                        fpd_fnmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1149                        kvmppc_sync_qpr(vcpu, ax_rd);
1150                        break;
1151                }
1152                break;
1153        case 63:
1154                switch (inst_get_field(inst, 21, 30)) {
1155                case OP_63_MTFSB0:
1156                case OP_63_MTFSB1:
1157                case OP_63_MCRFS:
1158                case OP_63_MTFSFI:
1159                        /* XXX need to implement */
1160                        break;
1161                case OP_63_MFFS:
1162                        /* XXX missing CR */
1163                        *fpr_d = vcpu->arch.fp.fpscr;
1164                        break;
1165                case OP_63_MTFSF:
1166                        /* XXX missing fm bits */
1167                        /* XXX missing CR */
1168                        vcpu->arch.fp.fpscr = *fpr_b;
1169                        break;
1170                case OP_63_FCMPU:
1171                {
1172                        u32 tmp_cr;
1173                        u32 cr0_mask = 0xf0000000;
1174                        u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1175
1176                        fpd_fcmpu(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1177                        cr &= ~(cr0_mask >> cr_shift);
1178                        cr |= (cr & cr0_mask) >> cr_shift;
1179                        break;
1180                }
1181                case OP_63_FCMPO:
1182                {
1183                        u32 tmp_cr;
1184                        u32 cr0_mask = 0xf0000000;
1185                        u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1186
1187                        fpd_fcmpo(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1188                        cr &= ~(cr0_mask >> cr_shift);
1189                        cr |= (cr & cr0_mask) >> cr_shift;
1190                        break;
1191                }
1192                case OP_63_FNEG:
1193                        fpd_fneg(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1194                        break;
1195                case OP_63_FMR:
1196                        *fpr_d = *fpr_b;
1197                        break;
1198                case OP_63_FABS:
1199                        fpd_fabs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1200                        break;
1201                case OP_63_FCPSGN:
1202                        fpd_fcpsgn(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1203                        break;
1204                case OP_63_FDIV:
1205                        fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1206                        break;
1207                case OP_63_FADD:
1208                        fpd_fadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1209                        break;
1210                case OP_63_FSUB:
1211                        fpd_fsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1212                        break;
1213                case OP_63_FCTIW:
1214                        fpd_fctiw(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1215                        break;
1216                case OP_63_FCTIWZ:
1217                        fpd_fctiwz(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1218                        break;
1219                case OP_63_FRSP:
1220                        fpd_frsp(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1221                        kvmppc_sync_qpr(vcpu, ax_rd);
1222                        break;
1223                case OP_63_FRSQRTE:
1224                {
1225                        double one = 1.0f;
1226
1227                        /* fD = sqrt(fB) */
1228                        fpd_fsqrt(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1229                        /* fD = 1.0f / fD */
1230                        fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, (u64*)&one, fpr_d);
1231                        break;
1232                }
1233                }
1234                switch (inst_get_field(inst, 26, 30)) {
1235                case OP_63_FMUL:
1236                        fpd_fmul(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1237                        break;
1238                case OP_63_FSEL:
1239                        fpd_fsel(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1240                        break;
1241                case OP_63_FMSUB:
1242                        fpd_fmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1243                        break;
1244                case OP_63_FMADD:
1245                        fpd_fmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1246                        break;
1247                case OP_63_FNMSUB:
1248                        fpd_fnmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1249                        break;
1250                case OP_63_FNMADD:
1251                        fpd_fnmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1252                        break;
1253                }
1254                break;
1255        }
1256
1257#ifdef DEBUG
1258        for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
1259                u32 f;
1260                kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
1261                dprintk(KERN_INFO "FPR[%d] = 0x%x\n", i, f);
1262        }
1263#endif
1264
1265        if (rcomp)
1266                kvmppc_set_cr(vcpu, cr);
1267
1268        disable_kernel_fp();
1269        preempt_enable();
1270
1271        return emulated;
1272}
1273