linux/arch/powerpc/kvm/book3s_paired_singles.c
<<
>>
Prefs
   1/*
   2 * This program is free software; you can redistribute it and/or modify
   3 * it under the terms of the GNU General Public License, version 2, as
   4 * published by the Free Software Foundation.
   5 *
   6 * This program is distributed in the hope that it will be useful,
   7 * but WITHOUT ANY WARRANTY; without even the implied warranty of
   8 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   9 * GNU General Public License for more details.
  10 *
  11 * You should have received a copy of the GNU General Public License
  12 * along with this program; if not, write to the Free Software
  13 * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
  14 *
  15 * Copyright Novell Inc 2010
  16 *
  17 * Authors: Alexander Graf <agraf@suse.de>
  18 */
  19
  20#include <asm/kvm.h>
  21#include <asm/kvm_ppc.h>
  22#include <asm/disassemble.h>
  23#include <asm/kvm_book3s.h>
  24#include <asm/kvm_fpu.h>
  25#include <asm/reg.h>
  26#include <asm/cacheflush.h>
  27#include <asm/switch_to.h>
  28#include <linux/vmalloc.h>
  29
  30/* #define DEBUG */
  31
  32#ifdef DEBUG
  33#define dprintk printk
  34#else
  35#define dprintk(...) do { } while(0);
  36#endif
  37
  38#define OP_LFS                  48
  39#define OP_LFSU                 49
  40#define OP_LFD                  50
  41#define OP_LFDU                 51
  42#define OP_STFS                 52
  43#define OP_STFSU                53
  44#define OP_STFD                 54
  45#define OP_STFDU                55
  46#define OP_PSQ_L                56
  47#define OP_PSQ_LU               57
  48#define OP_PSQ_ST               60
  49#define OP_PSQ_STU              61
  50
  51#define OP_31_LFSX              535
  52#define OP_31_LFSUX             567
  53#define OP_31_LFDX              599
  54#define OP_31_LFDUX             631
  55#define OP_31_STFSX             663
  56#define OP_31_STFSUX            695
  57#define OP_31_STFX              727
  58#define OP_31_STFUX             759
  59#define OP_31_LWIZX             887
  60#define OP_31_STFIWX            983
  61
  62#define OP_59_FADDS             21
  63#define OP_59_FSUBS             20
  64#define OP_59_FSQRTS            22
  65#define OP_59_FDIVS             18
  66#define OP_59_FRES              24
  67#define OP_59_FMULS             25
  68#define OP_59_FRSQRTES          26
  69#define OP_59_FMSUBS            28
  70#define OP_59_FMADDS            29
  71#define OP_59_FNMSUBS           30
  72#define OP_59_FNMADDS           31
  73
  74#define OP_63_FCMPU             0
  75#define OP_63_FCPSGN            8
  76#define OP_63_FRSP              12
  77#define OP_63_FCTIW             14
  78#define OP_63_FCTIWZ            15
  79#define OP_63_FDIV              18
  80#define OP_63_FADD              21
  81#define OP_63_FSQRT             22
  82#define OP_63_FSEL              23
  83#define OP_63_FRE               24
  84#define OP_63_FMUL              25
  85#define OP_63_FRSQRTE           26
  86#define OP_63_FMSUB             28
  87#define OP_63_FMADD             29
  88#define OP_63_FNMSUB            30
  89#define OP_63_FNMADD            31
  90#define OP_63_FCMPO             32
  91#define OP_63_MTFSB1            38 // XXX
  92#define OP_63_FSUB              20
  93#define OP_63_FNEG              40
  94#define OP_63_MCRFS             64
  95#define OP_63_MTFSB0            70
  96#define OP_63_FMR               72
  97#define OP_63_MTFSFI            134
  98#define OP_63_FABS              264
  99#define OP_63_MFFS              583
 100#define OP_63_MTFSF             711
 101
 102#define OP_4X_PS_CMPU0          0
 103#define OP_4X_PSQ_LX            6
 104#define OP_4XW_PSQ_STX          7
 105#define OP_4A_PS_SUM0           10
 106#define OP_4A_PS_SUM1           11
 107#define OP_4A_PS_MULS0          12
 108#define OP_4A_PS_MULS1          13
 109#define OP_4A_PS_MADDS0         14
 110#define OP_4A_PS_MADDS1         15
 111#define OP_4A_PS_DIV            18
 112#define OP_4A_PS_SUB            20
 113#define OP_4A_PS_ADD            21
 114#define OP_4A_PS_SEL            23
 115#define OP_4A_PS_RES            24
 116#define OP_4A_PS_MUL            25
 117#define OP_4A_PS_RSQRTE         26
 118#define OP_4A_PS_MSUB           28
 119#define OP_4A_PS_MADD           29
 120#define OP_4A_PS_NMSUB          30
 121#define OP_4A_PS_NMADD          31
 122#define OP_4X_PS_CMPO0          32
 123#define OP_4X_PSQ_LUX           38
 124#define OP_4XW_PSQ_STUX         39
 125#define OP_4X_PS_NEG            40
 126#define OP_4X_PS_CMPU1          64
 127#define OP_4X_PS_MR             72
 128#define OP_4X_PS_CMPO1          96
 129#define OP_4X_PS_NABS           136
 130#define OP_4X_PS_ABS            264
 131#define OP_4X_PS_MERGE00        528
 132#define OP_4X_PS_MERGE01        560
 133#define OP_4X_PS_MERGE10        592
 134#define OP_4X_PS_MERGE11        624
 135
 136#define SCALAR_NONE             0
 137#define SCALAR_HIGH             (1 << 0)
 138#define SCALAR_LOW              (1 << 1)
 139#define SCALAR_NO_PS0           (1 << 2)
 140#define SCALAR_NO_PS1           (1 << 3)
 141
 142#define GQR_ST_TYPE_MASK        0x00000007
 143#define GQR_ST_TYPE_SHIFT       0
 144#define GQR_ST_SCALE_MASK       0x00003f00
 145#define GQR_ST_SCALE_SHIFT      8
 146#define GQR_LD_TYPE_MASK        0x00070000
 147#define GQR_LD_TYPE_SHIFT       16
 148#define GQR_LD_SCALE_MASK       0x3f000000
 149#define GQR_LD_SCALE_SHIFT      24
 150
 151#define GQR_QUANTIZE_FLOAT      0
 152#define GQR_QUANTIZE_U8         4
 153#define GQR_QUANTIZE_U16        5
 154#define GQR_QUANTIZE_S8         6
 155#define GQR_QUANTIZE_S16        7
 156
 157#define FPU_LS_SINGLE           0
 158#define FPU_LS_DOUBLE           1
 159#define FPU_LS_SINGLE_LOW       2
 160
 161static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt)
 162{
 163        kvm_cvt_df(&VCPU_FPR(vcpu, rt), &vcpu->arch.qpr[rt]);
 164}
 165
 166static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store)
 167{
 168        u32 dsisr;
 169        u64 msr = kvmppc_get_msr(vcpu);
 170
 171        msr = kvmppc_set_field(msr, 33, 36, 0);
 172        msr = kvmppc_set_field(msr, 42, 47, 0);
 173        kvmppc_set_msr(vcpu, msr);
 174        kvmppc_set_dar(vcpu, eaddr);
 175        /* Page Fault */
 176        dsisr = kvmppc_set_field(0, 33, 33, 1);
 177        if (is_store)
 178                dsisr = kvmppc_set_field(dsisr, 38, 38, 1);
 179        kvmppc_set_dsisr(vcpu, dsisr);
 180        kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE);
 181}
 182
 183static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
 184                                   int rs, ulong addr, int ls_type)
 185{
 186        int emulated = EMULATE_FAIL;
 187        int r;
 188        char tmp[8];
 189        int len = sizeof(u32);
 190
 191        if (ls_type == FPU_LS_DOUBLE)
 192                len = sizeof(u64);
 193
 194        /* read from memory */
 195        r = kvmppc_ld(vcpu, &addr, len, tmp, true);
 196        vcpu->arch.paddr_accessed = addr;
 197
 198        if (r < 0) {
 199                kvmppc_inject_pf(vcpu, addr, false);
 200                goto done_load;
 201        } else if (r == EMULATE_DO_MMIO) {
 202                emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs,
 203                                              len, 1);
 204                goto done_load;
 205        }
 206
 207        emulated = EMULATE_DONE;
 208
 209        /* put in registers */
 210        switch (ls_type) {
 211        case FPU_LS_SINGLE:
 212                kvm_cvt_fd((u32*)tmp, &VCPU_FPR(vcpu, rs));
 213                vcpu->arch.qpr[rs] = *((u32*)tmp);
 214                break;
 215        case FPU_LS_DOUBLE:
 216                VCPU_FPR(vcpu, rs) = *((u64*)tmp);
 217                break;
 218        }
 219
 220        dprintk(KERN_INFO "KVM: FPR_LD [0x%llx] at 0x%lx (%d)\n", *(u64*)tmp,
 221                          addr, len);
 222
 223done_load:
 224        return emulated;
 225}
 226
 227static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
 228                                    int rs, ulong addr, int ls_type)
 229{
 230        int emulated = EMULATE_FAIL;
 231        int r;
 232        char tmp[8];
 233        u64 val;
 234        int len;
 235
 236        switch (ls_type) {
 237        case FPU_LS_SINGLE:
 238                kvm_cvt_df(&VCPU_FPR(vcpu, rs), (u32*)tmp);
 239                val = *((u32*)tmp);
 240                len = sizeof(u32);
 241                break;
 242        case FPU_LS_SINGLE_LOW:
 243                *((u32*)tmp) = VCPU_FPR(vcpu, rs);
 244                val = VCPU_FPR(vcpu, rs) & 0xffffffff;
 245                len = sizeof(u32);
 246                break;
 247        case FPU_LS_DOUBLE:
 248                *((u64*)tmp) = VCPU_FPR(vcpu, rs);
 249                val = VCPU_FPR(vcpu, rs);
 250                len = sizeof(u64);
 251                break;
 252        default:
 253                val = 0;
 254                len = 0;
 255        }
 256
 257        r = kvmppc_st(vcpu, &addr, len, tmp, true);
 258        vcpu->arch.paddr_accessed = addr;
 259        if (r < 0) {
 260                kvmppc_inject_pf(vcpu, addr, true);
 261        } else if (r == EMULATE_DO_MMIO) {
 262                emulated = kvmppc_handle_store(run, vcpu, val, len, 1);
 263        } else {
 264                emulated = EMULATE_DONE;
 265        }
 266
 267        dprintk(KERN_INFO "KVM: FPR_ST [0x%llx] at 0x%lx (%d)\n",
 268                          val, addr, len);
 269
 270        return emulated;
 271}
 272
 273static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
 274                                   int rs, ulong addr, bool w, int i)
 275{
 276        int emulated = EMULATE_FAIL;
 277        int r;
 278        float one = 1.0;
 279        u32 tmp[2];
 280
 281        /* read from memory */
 282        if (w) {
 283                r = kvmppc_ld(vcpu, &addr, sizeof(u32), tmp, true);
 284                memcpy(&tmp[1], &one, sizeof(u32));
 285        } else {
 286                r = kvmppc_ld(vcpu, &addr, sizeof(u32) * 2, tmp, true);
 287        }
 288        vcpu->arch.paddr_accessed = addr;
 289        if (r < 0) {
 290                kvmppc_inject_pf(vcpu, addr, false);
 291                goto done_load;
 292        } else if ((r == EMULATE_DO_MMIO) && w) {
 293                emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs,
 294                                              4, 1);
 295                vcpu->arch.qpr[rs] = tmp[1];
 296                goto done_load;
 297        } else if (r == EMULATE_DO_MMIO) {
 298                emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FQPR | rs,
 299                                              8, 1);
 300                goto done_load;
 301        }
 302
 303        emulated = EMULATE_DONE;
 304
 305        /* put in registers */
 306        kvm_cvt_fd(&tmp[0], &VCPU_FPR(vcpu, rs));
 307        vcpu->arch.qpr[rs] = tmp[1];
 308
 309        dprintk(KERN_INFO "KVM: PSQ_LD [0x%x, 0x%x] at 0x%lx (%d)\n", tmp[0],
 310                          tmp[1], addr, w ? 4 : 8);
 311
 312done_load:
 313        return emulated;
 314}
 315
 316static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
 317                                    int rs, ulong addr, bool w, int i)
 318{
 319        int emulated = EMULATE_FAIL;
 320        int r;
 321        u32 tmp[2];
 322        int len = w ? sizeof(u32) : sizeof(u64);
 323
 324        kvm_cvt_df(&VCPU_FPR(vcpu, rs), &tmp[0]);
 325        tmp[1] = vcpu->arch.qpr[rs];
 326
 327        r = kvmppc_st(vcpu, &addr, len, tmp, true);
 328        vcpu->arch.paddr_accessed = addr;
 329        if (r < 0) {
 330                kvmppc_inject_pf(vcpu, addr, true);
 331        } else if ((r == EMULATE_DO_MMIO) && w) {
 332                emulated = kvmppc_handle_store(run, vcpu, tmp[0], 4, 1);
 333        } else if (r == EMULATE_DO_MMIO) {
 334                u64 val = ((u64)tmp[0] << 32) | tmp[1];
 335                emulated = kvmppc_handle_store(run, vcpu, val, 8, 1);
 336        } else {
 337                emulated = EMULATE_DONE;
 338        }
 339
 340        dprintk(KERN_INFO "KVM: PSQ_ST [0x%x, 0x%x] at 0x%lx (%d)\n",
 341                          tmp[0], tmp[1], addr, len);
 342
 343        return emulated;
 344}
 345
 346/*
 347 * Cuts out inst bits with ordering according to spec.
 348 * That means the leftmost bit is zero. All given bits are included.
 349 */
 350static inline u32 inst_get_field(u32 inst, int msb, int lsb)
 351{
 352        return kvmppc_get_field(inst, msb + 32, lsb + 32);
 353}
 354
 355/*
 356 * Replaces inst bits with ordering according to spec.
 357 */
 358static inline u32 inst_set_field(u32 inst, int msb, int lsb, int value)
 359{
 360        return kvmppc_set_field(inst, msb + 32, lsb + 32, value);
 361}
 362
 363bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst)
 364{
 365        if (!(vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE))
 366                return false;
 367
 368        switch (get_op(inst)) {
 369        case OP_PSQ_L:
 370        case OP_PSQ_LU:
 371        case OP_PSQ_ST:
 372        case OP_PSQ_STU:
 373        case OP_LFS:
 374        case OP_LFSU:
 375        case OP_LFD:
 376        case OP_LFDU:
 377        case OP_STFS:
 378        case OP_STFSU:
 379        case OP_STFD:
 380        case OP_STFDU:
 381                return true;
 382        case 4:
 383                /* X form */
 384                switch (inst_get_field(inst, 21, 30)) {
 385                case OP_4X_PS_CMPU0:
 386                case OP_4X_PSQ_LX:
 387                case OP_4X_PS_CMPO0:
 388                case OP_4X_PSQ_LUX:
 389                case OP_4X_PS_NEG:
 390                case OP_4X_PS_CMPU1:
 391                case OP_4X_PS_MR:
 392                case OP_4X_PS_CMPO1:
 393                case OP_4X_PS_NABS:
 394                case OP_4X_PS_ABS:
 395                case OP_4X_PS_MERGE00:
 396                case OP_4X_PS_MERGE01:
 397                case OP_4X_PS_MERGE10:
 398                case OP_4X_PS_MERGE11:
 399                        return true;
 400                }
 401                /* XW form */
 402                switch (inst_get_field(inst, 25, 30)) {
 403                case OP_4XW_PSQ_STX:
 404                case OP_4XW_PSQ_STUX:
 405                        return true;
 406                }
 407                /* A form */
 408                switch (inst_get_field(inst, 26, 30)) {
 409                case OP_4A_PS_SUM1:
 410                case OP_4A_PS_SUM0:
 411                case OP_4A_PS_MULS0:
 412                case OP_4A_PS_MULS1:
 413                case OP_4A_PS_MADDS0:
 414                case OP_4A_PS_MADDS1:
 415                case OP_4A_PS_DIV:
 416                case OP_4A_PS_SUB:
 417                case OP_4A_PS_ADD:
 418                case OP_4A_PS_SEL:
 419                case OP_4A_PS_RES:
 420                case OP_4A_PS_MUL:
 421                case OP_4A_PS_RSQRTE:
 422                case OP_4A_PS_MSUB:
 423                case OP_4A_PS_MADD:
 424                case OP_4A_PS_NMSUB:
 425                case OP_4A_PS_NMADD:
 426                        return true;
 427                }
 428                break;
 429        case 59:
 430                switch (inst_get_field(inst, 21, 30)) {
 431                case OP_59_FADDS:
 432                case OP_59_FSUBS:
 433                case OP_59_FDIVS:
 434                case OP_59_FRES:
 435                case OP_59_FRSQRTES:
 436                        return true;
 437                }
 438                switch (inst_get_field(inst, 26, 30)) {
 439                case OP_59_FMULS:
 440                case OP_59_FMSUBS:
 441                case OP_59_FMADDS:
 442                case OP_59_FNMSUBS:
 443                case OP_59_FNMADDS:
 444                        return true;
 445                }
 446                break;
 447        case 63:
 448                switch (inst_get_field(inst, 21, 30)) {
 449                case OP_63_MTFSB0:
 450                case OP_63_MTFSB1:
 451                case OP_63_MTFSF:
 452                case OP_63_MTFSFI:
 453                case OP_63_MCRFS:
 454                case OP_63_MFFS:
 455                case OP_63_FCMPU:
 456                case OP_63_FCMPO:
 457                case OP_63_FNEG:
 458                case OP_63_FMR:
 459                case OP_63_FABS:
 460                case OP_63_FRSP:
 461                case OP_63_FDIV:
 462                case OP_63_FADD:
 463                case OP_63_FSUB:
 464                case OP_63_FCTIW:
 465                case OP_63_FCTIWZ:
 466                case OP_63_FRSQRTE:
 467                case OP_63_FCPSGN:
 468                        return true;
 469                }
 470                switch (inst_get_field(inst, 26, 30)) {
 471                case OP_63_FMUL:
 472                case OP_63_FSEL:
 473                case OP_63_FMSUB:
 474                case OP_63_FMADD:
 475                case OP_63_FNMSUB:
 476                case OP_63_FNMADD:
 477                        return true;
 478                }
 479                break;
 480        case 31:
 481                switch (inst_get_field(inst, 21, 30)) {
 482                case OP_31_LFSX:
 483                case OP_31_LFSUX:
 484                case OP_31_LFDX:
 485                case OP_31_LFDUX:
 486                case OP_31_STFSX:
 487                case OP_31_STFSUX:
 488                case OP_31_STFX:
 489                case OP_31_STFUX:
 490                case OP_31_STFIWX:
 491                        return true;
 492                }
 493                break;
 494        }
 495
 496        return false;
 497}
 498
 499static int get_d_signext(u32 inst)
 500{
 501        int d = inst & 0x8ff;
 502
 503        if (d & 0x800)
 504                return -(d & 0x7ff);
 505
 506        return (d & 0x7ff);
 507}
 508
 509static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc,
 510                                      int reg_out, int reg_in1, int reg_in2,
 511                                      int reg_in3, int scalar,
 512                                      void (*func)(u64 *fpscr,
 513                                                 u32 *dst, u32 *src1,
 514                                                 u32 *src2, u32 *src3))
 515{
 516        u32 *qpr = vcpu->arch.qpr;
 517        u32 ps0_out;
 518        u32 ps0_in1, ps0_in2, ps0_in3;
 519        u32 ps1_in1, ps1_in2, ps1_in3;
 520
 521        /* RC */
 522        WARN_ON(rc);
 523
 524        /* PS0 */
 525        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
 526        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
 527        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in3), &ps0_in3);
 528
 529        if (scalar & SCALAR_LOW)
 530                ps0_in2 = qpr[reg_in2];
 531
 532        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2, &ps0_in3);
 533
 534        dprintk(KERN_INFO "PS3 ps0 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
 535                          ps0_in1, ps0_in2, ps0_in3, ps0_out);
 536
 537        if (!(scalar & SCALAR_NO_PS0))
 538                kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 539
 540        /* PS1 */
 541        ps1_in1 = qpr[reg_in1];
 542        ps1_in2 = qpr[reg_in2];
 543        ps1_in3 = qpr[reg_in3];
 544
 545        if (scalar & SCALAR_HIGH)
 546                ps1_in2 = ps0_in2;
 547
 548        if (!(scalar & SCALAR_NO_PS1))
 549                func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in1, &ps1_in2, &ps1_in3);
 550
 551        dprintk(KERN_INFO "PS3 ps1 -> f(0x%x, 0x%x, 0x%x) = 0x%x\n",
 552                          ps1_in1, ps1_in2, ps1_in3, qpr[reg_out]);
 553
 554        return EMULATE_DONE;
 555}
 556
 557static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc,
 558                                    int reg_out, int reg_in1, int reg_in2,
 559                                    int scalar,
 560                                    void (*func)(u64 *fpscr,
 561                                                 u32 *dst, u32 *src1,
 562                                                 u32 *src2))
 563{
 564        u32 *qpr = vcpu->arch.qpr;
 565        u32 ps0_out;
 566        u32 ps0_in1, ps0_in2;
 567        u32 ps1_out;
 568        u32 ps1_in1, ps1_in2;
 569
 570        /* RC */
 571        WARN_ON(rc);
 572
 573        /* PS0 */
 574        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1);
 575
 576        if (scalar & SCALAR_LOW)
 577                ps0_in2 = qpr[reg_in2];
 578        else
 579                kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2);
 580
 581        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2);
 582
 583        if (!(scalar & SCALAR_NO_PS0)) {
 584                dprintk(KERN_INFO "PS2 ps0 -> f(0x%x, 0x%x) = 0x%x\n",
 585                                  ps0_in1, ps0_in2, ps0_out);
 586
 587                kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 588        }
 589
 590        /* PS1 */
 591        ps1_in1 = qpr[reg_in1];
 592        ps1_in2 = qpr[reg_in2];
 593
 594        if (scalar & SCALAR_HIGH)
 595                ps1_in2 = ps0_in2;
 596
 597        func(&vcpu->arch.fp.fpscr, &ps1_out, &ps1_in1, &ps1_in2);
 598
 599        if (!(scalar & SCALAR_NO_PS1)) {
 600                qpr[reg_out] = ps1_out;
 601
 602                dprintk(KERN_INFO "PS2 ps1 -> f(0x%x, 0x%x) = 0x%x\n",
 603                                  ps1_in1, ps1_in2, qpr[reg_out]);
 604        }
 605
 606        return EMULATE_DONE;
 607}
 608
 609static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc,
 610                                    int reg_out, int reg_in,
 611                                    void (*func)(u64 *t,
 612                                                 u32 *dst, u32 *src1))
 613{
 614        u32 *qpr = vcpu->arch.qpr;
 615        u32 ps0_out, ps0_in;
 616        u32 ps1_in;
 617
 618        /* RC */
 619        WARN_ON(rc);
 620
 621        /* PS0 */
 622        kvm_cvt_df(&VCPU_FPR(vcpu, reg_in), &ps0_in);
 623        func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in);
 624
 625        dprintk(KERN_INFO "PS1 ps0 -> f(0x%x) = 0x%x\n",
 626                          ps0_in, ps0_out);
 627
 628        kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out));
 629
 630        /* PS1 */
 631        ps1_in = qpr[reg_in];
 632        func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in);
 633
 634        dprintk(KERN_INFO "PS1 ps1 -> f(0x%x) = 0x%x\n",
 635                          ps1_in, qpr[reg_out]);
 636
 637        return EMULATE_DONE;
 638}
 639
 640int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu)
 641{
 642        u32 inst;
 643        enum emulation_result emulated = EMULATE_DONE;
 644        int ax_rd, ax_ra, ax_rb, ax_rc;
 645        short full_d;
 646        u64 *fpr_d, *fpr_a, *fpr_b, *fpr_c;
 647
 648        bool rcomp;
 649        u32 cr;
 650#ifdef DEBUG
 651        int i;
 652#endif
 653
 654        emulated = kvmppc_get_last_inst(vcpu, INST_GENERIC, &inst);
 655        if (emulated != EMULATE_DONE)
 656                return emulated;
 657
 658        ax_rd = inst_get_field(inst, 6, 10);
 659        ax_ra = inst_get_field(inst, 11, 15);
 660        ax_rb = inst_get_field(inst, 16, 20);
 661        ax_rc = inst_get_field(inst, 21, 25);
 662        full_d = inst_get_field(inst, 16, 31);
 663
 664        fpr_d = &VCPU_FPR(vcpu, ax_rd);
 665        fpr_a = &VCPU_FPR(vcpu, ax_ra);
 666        fpr_b = &VCPU_FPR(vcpu, ax_rb);
 667        fpr_c = &VCPU_FPR(vcpu, ax_rc);
 668
 669        rcomp = (inst & 1) ? true : false;
 670        cr = kvmppc_get_cr(vcpu);
 671
 672        if (!kvmppc_inst_is_paired_single(vcpu, inst))
 673                return EMULATE_FAIL;
 674
 675        if (!(kvmppc_get_msr(vcpu) & MSR_FP)) {
 676                kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL);
 677                return EMULATE_AGAIN;
 678        }
 679
 680        kvmppc_giveup_ext(vcpu, MSR_FP);
 681        preempt_disable();
 682        enable_kernel_fp();
 683        /* Do we need to clear FE0 / FE1 here? Don't think so. */
 684
 685#ifdef DEBUG
 686        for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
 687                u32 f;
 688                kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
 689                dprintk(KERN_INFO "FPR[%d] = 0x%x / 0x%llx    QPR[%d] = 0x%x\n",
 690                        i, f, VCPU_FPR(vcpu, i), i, vcpu->arch.qpr[i]);
 691        }
 692#endif
 693
 694        switch (get_op(inst)) {
 695        case OP_PSQ_L:
 696        {
 697                ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 698                bool w = inst_get_field(inst, 16, 16) ? true : false;
 699                int i = inst_get_field(inst, 17, 19);
 700
 701                addr += get_d_signext(inst);
 702                emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 703                break;
 704        }
 705        case OP_PSQ_LU:
 706        {
 707                ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 708                bool w = inst_get_field(inst, 16, 16) ? true : false;
 709                int i = inst_get_field(inst, 17, 19);
 710
 711                addr += get_d_signext(inst);
 712                emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 713
 714                if (emulated == EMULATE_DONE)
 715                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 716                break;
 717        }
 718        case OP_PSQ_ST:
 719        {
 720                ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 721                bool w = inst_get_field(inst, 16, 16) ? true : false;
 722                int i = inst_get_field(inst, 17, 19);
 723
 724                addr += get_d_signext(inst);
 725                emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 726                break;
 727        }
 728        case OP_PSQ_STU:
 729        {
 730                ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 731                bool w = inst_get_field(inst, 16, 16) ? true : false;
 732                int i = inst_get_field(inst, 17, 19);
 733
 734                addr += get_d_signext(inst);
 735                emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 736
 737                if (emulated == EMULATE_DONE)
 738                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 739                break;
 740        }
 741        case 4:
 742                /* X form */
 743                switch (inst_get_field(inst, 21, 30)) {
 744                case OP_4X_PS_CMPU0:
 745                        /* XXX */
 746                        emulated = EMULATE_FAIL;
 747                        break;
 748                case OP_4X_PSQ_LX:
 749                {
 750                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 751                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 752                        int i = inst_get_field(inst, 22, 24);
 753
 754                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 755                        emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 756                        break;
 757                }
 758                case OP_4X_PS_CMPO0:
 759                        /* XXX */
 760                        emulated = EMULATE_FAIL;
 761                        break;
 762                case OP_4X_PSQ_LUX:
 763                {
 764                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 765                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 766                        int i = inst_get_field(inst, 22, 24);
 767
 768                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 769                        emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i);
 770
 771                        if (emulated == EMULATE_DONE)
 772                                kvmppc_set_gpr(vcpu, ax_ra, addr);
 773                        break;
 774                }
 775                case OP_4X_PS_NEG:
 776                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 777                        VCPU_FPR(vcpu, ax_rd) ^= 0x8000000000000000ULL;
 778                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 779                        vcpu->arch.qpr[ax_rd] ^= 0x80000000;
 780                        break;
 781                case OP_4X_PS_CMPU1:
 782                        /* XXX */
 783                        emulated = EMULATE_FAIL;
 784                        break;
 785                case OP_4X_PS_MR:
 786                        WARN_ON(rcomp);
 787                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 788                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 789                        break;
 790                case OP_4X_PS_CMPO1:
 791                        /* XXX */
 792                        emulated = EMULATE_FAIL;
 793                        break;
 794                case OP_4X_PS_NABS:
 795                        WARN_ON(rcomp);
 796                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 797                        VCPU_FPR(vcpu, ax_rd) |= 0x8000000000000000ULL;
 798                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 799                        vcpu->arch.qpr[ax_rd] |= 0x80000000;
 800                        break;
 801                case OP_4X_PS_ABS:
 802                        WARN_ON(rcomp);
 803                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb);
 804                        VCPU_FPR(vcpu, ax_rd) &= ~0x8000000000000000ULL;
 805                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 806                        vcpu->arch.qpr[ax_rd] &= ~0x80000000;
 807                        break;
 808                case OP_4X_PS_MERGE00:
 809                        WARN_ON(rcomp);
 810                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
 811                        /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
 812                        kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
 813                                   &vcpu->arch.qpr[ax_rd]);
 814                        break;
 815                case OP_4X_PS_MERGE01:
 816                        WARN_ON(rcomp);
 817                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra);
 818                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 819                        break;
 820                case OP_4X_PS_MERGE10:
 821                        WARN_ON(rcomp);
 822                        /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
 823                        kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
 824                                   &VCPU_FPR(vcpu, ax_rd));
 825                        /* vcpu->arch.qpr[ax_rd] = VCPU_FPR(vcpu, ax_rb); */
 826                        kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb),
 827                                   &vcpu->arch.qpr[ax_rd]);
 828                        break;
 829                case OP_4X_PS_MERGE11:
 830                        WARN_ON(rcomp);
 831                        /* VCPU_FPR(vcpu, ax_rd) = vcpu->arch.qpr[ax_ra]; */
 832                        kvm_cvt_fd(&vcpu->arch.qpr[ax_ra],
 833                                   &VCPU_FPR(vcpu, ax_rd));
 834                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb];
 835                        break;
 836                }
 837                /* XW form */
 838                switch (inst_get_field(inst, 25, 30)) {
 839                case OP_4XW_PSQ_STX:
 840                {
 841                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
 842                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 843                        int i = inst_get_field(inst, 22, 24);
 844
 845                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 846                        emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 847                        break;
 848                }
 849                case OP_4XW_PSQ_STUX:
 850                {
 851                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra);
 852                        bool w = inst_get_field(inst, 21, 21) ? true : false;
 853                        int i = inst_get_field(inst, 22, 24);
 854
 855                        addr += kvmppc_get_gpr(vcpu, ax_rb);
 856                        emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i);
 857
 858                        if (emulated == EMULATE_DONE)
 859                                kvmppc_set_gpr(vcpu, ax_ra, addr);
 860                        break;
 861                }
 862                }
 863                /* A form */
 864                switch (inst_get_field(inst, 26, 30)) {
 865                case OP_4A_PS_SUM1:
 866                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 867                                        ax_rb, ax_ra, SCALAR_NO_PS0 | SCALAR_HIGH, fps_fadds);
 868                        VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rc);
 869                        break;
 870                case OP_4A_PS_SUM0:
 871                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 872                                        ax_ra, ax_rb, SCALAR_NO_PS1 | SCALAR_LOW, fps_fadds);
 873                        vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rc];
 874                        break;
 875                case OP_4A_PS_MULS0:
 876                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 877                                        ax_ra, ax_rc, SCALAR_HIGH, fps_fmuls);
 878                        break;
 879                case OP_4A_PS_MULS1:
 880                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 881                                        ax_ra, ax_rc, SCALAR_LOW, fps_fmuls);
 882                        break;
 883                case OP_4A_PS_MADDS0:
 884                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 885                                        ax_ra, ax_rc, ax_rb, SCALAR_HIGH, fps_fmadds);
 886                        break;
 887                case OP_4A_PS_MADDS1:
 888                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 889                                        ax_ra, ax_rc, ax_rb, SCALAR_LOW, fps_fmadds);
 890                        break;
 891                case OP_4A_PS_DIV:
 892                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 893                                        ax_ra, ax_rb, SCALAR_NONE, fps_fdivs);
 894                        break;
 895                case OP_4A_PS_SUB:
 896                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 897                                        ax_ra, ax_rb, SCALAR_NONE, fps_fsubs);
 898                        break;
 899                case OP_4A_PS_ADD:
 900                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 901                                        ax_ra, ax_rb, SCALAR_NONE, fps_fadds);
 902                        break;
 903                case OP_4A_PS_SEL:
 904                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 905                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fsel);
 906                        break;
 907                case OP_4A_PS_RES:
 908                        emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
 909                                        ax_rb, fps_fres);
 910                        break;
 911                case OP_4A_PS_MUL:
 912                        emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd,
 913                                        ax_ra, ax_rc, SCALAR_NONE, fps_fmuls);
 914                        break;
 915                case OP_4A_PS_RSQRTE:
 916                        emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd,
 917                                        ax_rb, fps_frsqrte);
 918                        break;
 919                case OP_4A_PS_MSUB:
 920                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 921                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmsubs);
 922                        break;
 923                case OP_4A_PS_MADD:
 924                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 925                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fmadds);
 926                        break;
 927                case OP_4A_PS_NMSUB:
 928                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 929                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmsubs);
 930                        break;
 931                case OP_4A_PS_NMADD:
 932                        emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd,
 933                                        ax_ra, ax_rc, ax_rb, SCALAR_NONE, fps_fnmadds);
 934                        break;
 935                }
 936                break;
 937
 938        /* Real FPU operations */
 939
 940        case OP_LFS:
 941        {
 942                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 943
 944                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 945                                                   FPU_LS_SINGLE);
 946                break;
 947        }
 948        case OP_LFSU:
 949        {
 950                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 951
 952                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 953                                                   FPU_LS_SINGLE);
 954
 955                if (emulated == EMULATE_DONE)
 956                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 957                break;
 958        }
 959        case OP_LFD:
 960        {
 961                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 962
 963                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 964                                                   FPU_LS_DOUBLE);
 965                break;
 966        }
 967        case OP_LFDU:
 968        {
 969                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 970
 971                emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr,
 972                                                   FPU_LS_DOUBLE);
 973
 974                if (emulated == EMULATE_DONE)
 975                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 976                break;
 977        }
 978        case OP_STFS:
 979        {
 980                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
 981
 982                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
 983                                                    FPU_LS_SINGLE);
 984                break;
 985        }
 986        case OP_STFSU:
 987        {
 988                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
 989
 990                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
 991                                                    FPU_LS_SINGLE);
 992
 993                if (emulated == EMULATE_DONE)
 994                        kvmppc_set_gpr(vcpu, ax_ra, addr);
 995                break;
 996        }
 997        case OP_STFD:
 998        {
 999                ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d;
1000
1001                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
1002                                                    FPU_LS_DOUBLE);
1003                break;
1004        }
1005        case OP_STFDU:
1006        {
1007                ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d;
1008
1009                emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr,
1010                                                    FPU_LS_DOUBLE);
1011
1012                if (emulated == EMULATE_DONE)
1013                        kvmppc_set_gpr(vcpu, ax_ra, addr);
1014                break;
1015        }
1016        case 31:
1017                switch (inst_get_field(inst, 21, 30)) {
1018                case OP_31_LFSX:
1019                {
1020                        ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0;
1021
1022                        addr += kvmppc_get_gpr(vcpu, ax_rb);
1023                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1024                                                           addr, FPU_LS_SINGLE);
1025                        break;
1026                }
1027                case OP_31_LFSUX:
1028                {
1029                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1030                                     kvmppc_get_gpr(vcpu, ax_rb);
1031
1032                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1033                                                           addr, FPU_LS_SINGLE);
1034
1035                        if (emulated == EMULATE_DONE)
1036                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1037                        break;
1038                }
1039                case OP_31_LFDX:
1040                {
1041                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1042                                     kvmppc_get_gpr(vcpu, ax_rb);
1043
1044                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1045                                                           addr, FPU_LS_DOUBLE);
1046                        break;
1047                }
1048                case OP_31_LFDUX:
1049                {
1050                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1051                                     kvmppc_get_gpr(vcpu, ax_rb);
1052
1053                        emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd,
1054                                                           addr, FPU_LS_DOUBLE);
1055
1056                        if (emulated == EMULATE_DONE)
1057                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1058                        break;
1059                }
1060                case OP_31_STFSX:
1061                {
1062                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1063                                     kvmppc_get_gpr(vcpu, ax_rb);
1064
1065                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1066                                                            addr, FPU_LS_SINGLE);
1067                        break;
1068                }
1069                case OP_31_STFSUX:
1070                {
1071                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1072                                     kvmppc_get_gpr(vcpu, ax_rb);
1073
1074                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1075                                                            addr, FPU_LS_SINGLE);
1076
1077                        if (emulated == EMULATE_DONE)
1078                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1079                        break;
1080                }
1081                case OP_31_STFX:
1082                {
1083                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1084                                     kvmppc_get_gpr(vcpu, ax_rb);
1085
1086                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1087                                                            addr, FPU_LS_DOUBLE);
1088                        break;
1089                }
1090                case OP_31_STFUX:
1091                {
1092                        ulong addr = kvmppc_get_gpr(vcpu, ax_ra) +
1093                                     kvmppc_get_gpr(vcpu, ax_rb);
1094
1095                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1096                                                            addr, FPU_LS_DOUBLE);
1097
1098                        if (emulated == EMULATE_DONE)
1099                                kvmppc_set_gpr(vcpu, ax_ra, addr);
1100                        break;
1101                }
1102                case OP_31_STFIWX:
1103                {
1104                        ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) +
1105                                     kvmppc_get_gpr(vcpu, ax_rb);
1106
1107                        emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd,
1108                                                            addr,
1109                                                            FPU_LS_SINGLE_LOW);
1110                        break;
1111                }
1112                        break;
1113                }
1114                break;
1115        case 59:
1116                switch (inst_get_field(inst, 21, 30)) {
1117                case OP_59_FADDS:
1118                        fpd_fadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1119                        kvmppc_sync_qpr(vcpu, ax_rd);
1120                        break;
1121                case OP_59_FSUBS:
1122                        fpd_fsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1123                        kvmppc_sync_qpr(vcpu, ax_rd);
1124                        break;
1125                case OP_59_FDIVS:
1126                        fpd_fdivs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1127                        kvmppc_sync_qpr(vcpu, ax_rd);
1128                        break;
1129                case OP_59_FRES:
1130                        fpd_fres(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1131                        kvmppc_sync_qpr(vcpu, ax_rd);
1132                        break;
1133                case OP_59_FRSQRTES:
1134                        fpd_frsqrtes(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1135                        kvmppc_sync_qpr(vcpu, ax_rd);
1136                        break;
1137                }
1138                switch (inst_get_field(inst, 26, 30)) {
1139                case OP_59_FMULS:
1140                        fpd_fmuls(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1141                        kvmppc_sync_qpr(vcpu, ax_rd);
1142                        break;
1143                case OP_59_FMSUBS:
1144                        fpd_fmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1145                        kvmppc_sync_qpr(vcpu, ax_rd);
1146                        break;
1147                case OP_59_FMADDS:
1148                        fpd_fmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1149                        kvmppc_sync_qpr(vcpu, ax_rd);
1150                        break;
1151                case OP_59_FNMSUBS:
1152                        fpd_fnmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1153                        kvmppc_sync_qpr(vcpu, ax_rd);
1154                        break;
1155                case OP_59_FNMADDS:
1156                        fpd_fnmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1157                        kvmppc_sync_qpr(vcpu, ax_rd);
1158                        break;
1159                }
1160                break;
1161        case 63:
1162                switch (inst_get_field(inst, 21, 30)) {
1163                case OP_63_MTFSB0:
1164                case OP_63_MTFSB1:
1165                case OP_63_MCRFS:
1166                case OP_63_MTFSFI:
1167                        /* XXX need to implement */
1168                        break;
1169                case OP_63_MFFS:
1170                        /* XXX missing CR */
1171                        *fpr_d = vcpu->arch.fp.fpscr;
1172                        break;
1173                case OP_63_MTFSF:
1174                        /* XXX missing fm bits */
1175                        /* XXX missing CR */
1176                        vcpu->arch.fp.fpscr = *fpr_b;
1177                        break;
1178                case OP_63_FCMPU:
1179                {
1180                        u32 tmp_cr;
1181                        u32 cr0_mask = 0xf0000000;
1182                        u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1183
1184                        fpd_fcmpu(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1185                        cr &= ~(cr0_mask >> cr_shift);
1186                        cr |= (cr & cr0_mask) >> cr_shift;
1187                        break;
1188                }
1189                case OP_63_FCMPO:
1190                {
1191                        u32 tmp_cr;
1192                        u32 cr0_mask = 0xf0000000;
1193                        u32 cr_shift = inst_get_field(inst, 6, 8) * 4;
1194
1195                        fpd_fcmpo(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b);
1196                        cr &= ~(cr0_mask >> cr_shift);
1197                        cr |= (cr & cr0_mask) >> cr_shift;
1198                        break;
1199                }
1200                case OP_63_FNEG:
1201                        fpd_fneg(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1202                        break;
1203                case OP_63_FMR:
1204                        *fpr_d = *fpr_b;
1205                        break;
1206                case OP_63_FABS:
1207                        fpd_fabs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1208                        break;
1209                case OP_63_FCPSGN:
1210                        fpd_fcpsgn(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1211                        break;
1212                case OP_63_FDIV:
1213                        fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1214                        break;
1215                case OP_63_FADD:
1216                        fpd_fadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1217                        break;
1218                case OP_63_FSUB:
1219                        fpd_fsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b);
1220                        break;
1221                case OP_63_FCTIW:
1222                        fpd_fctiw(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1223                        break;
1224                case OP_63_FCTIWZ:
1225                        fpd_fctiwz(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1226                        break;
1227                case OP_63_FRSP:
1228                        fpd_frsp(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1229                        kvmppc_sync_qpr(vcpu, ax_rd);
1230                        break;
1231                case OP_63_FRSQRTE:
1232                {
1233                        double one = 1.0f;
1234
1235                        /* fD = sqrt(fB) */
1236                        fpd_fsqrt(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b);
1237                        /* fD = 1.0f / fD */
1238                        fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, (u64*)&one, fpr_d);
1239                        break;
1240                }
1241                }
1242                switch (inst_get_field(inst, 26, 30)) {
1243                case OP_63_FMUL:
1244                        fpd_fmul(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c);
1245                        break;
1246                case OP_63_FSEL:
1247                        fpd_fsel(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1248                        break;
1249                case OP_63_FMSUB:
1250                        fpd_fmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1251                        break;
1252                case OP_63_FMADD:
1253                        fpd_fmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1254                        break;
1255                case OP_63_FNMSUB:
1256                        fpd_fnmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1257                        break;
1258                case OP_63_FNMADD:
1259                        fpd_fnmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b);
1260                        break;
1261                }
1262                break;
1263        }
1264
1265#ifdef DEBUG
1266        for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) {
1267                u32 f;
1268                kvm_cvt_df(&VCPU_FPR(vcpu, i), &f);
1269                dprintk(KERN_INFO "FPR[%d] = 0x%x\n", i, f);
1270        }
1271#endif
1272
1273        if (rcomp)
1274                kvmppc_set_cr(vcpu, cr);
1275
1276        preempt_enable();
1277
1278        return emulated;
1279}
1280