qemu/target-ppc/translate/vsx-impl.inc.c
<<
>>
Prefs
   1/***                           VSX extension                               ***/
   2
   3static inline TCGv_i64 cpu_vsrh(int n)
   4{
   5    if (n < 32) {
   6        return cpu_fpr[n];
   7    } else {
   8        return cpu_avrh[n-32];
   9    }
  10}
  11
  12static inline TCGv_i64 cpu_vsrl(int n)
  13{
  14    if (n < 32) {
  15        return cpu_vsr[n];
  16    } else {
  17        return cpu_avrl[n-32];
  18    }
  19}
  20
  21#define VSX_LOAD_SCALAR(name, operation)                      \
  22static void gen_##name(DisasContext *ctx)                     \
  23{                                                             \
  24    TCGv EA;                                                  \
  25    if (unlikely(!ctx->vsx_enabled)) {                        \
  26        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
  27        return;                                               \
  28    }                                                         \
  29    gen_set_access_type(ctx, ACCESS_INT);                     \
  30    EA = tcg_temp_new();                                      \
  31    gen_addr_reg_index(ctx, EA);                              \
  32    gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
  33    /* NOTE: cpu_vsrl is undefined */                         \
  34    tcg_temp_free(EA);                                        \
  35}
  36
  37VSX_LOAD_SCALAR(lxsdx, ld64_i64)
  38VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
  39VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
  40VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
  41VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
  42VSX_LOAD_SCALAR(lxsspx, ld32fs)
  43
  44static void gen_lxvd2x(DisasContext *ctx)
  45{
  46    TCGv EA;
  47    if (unlikely(!ctx->vsx_enabled)) {
  48        gen_exception(ctx, POWERPC_EXCP_VSXU);
  49        return;
  50    }
  51    gen_set_access_type(ctx, ACCESS_INT);
  52    EA = tcg_temp_new();
  53    gen_addr_reg_index(ctx, EA);
  54    gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
  55    tcg_gen_addi_tl(EA, EA, 8);
  56    gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
  57    tcg_temp_free(EA);
  58}
  59
  60static void gen_lxvdsx(DisasContext *ctx)
  61{
  62    TCGv EA;
  63    if (unlikely(!ctx->vsx_enabled)) {
  64        gen_exception(ctx, POWERPC_EXCP_VSXU);
  65        return;
  66    }
  67    gen_set_access_type(ctx, ACCESS_INT);
  68    EA = tcg_temp_new();
  69    gen_addr_reg_index(ctx, EA);
  70    gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
  71    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
  72    tcg_temp_free(EA);
  73}
  74
  75static void gen_lxvw4x(DisasContext *ctx)
  76{
  77    TCGv EA;
  78    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
  79    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
  80    if (unlikely(!ctx->vsx_enabled)) {
  81        gen_exception(ctx, POWERPC_EXCP_VSXU);
  82        return;
  83    }
  84    gen_set_access_type(ctx, ACCESS_INT);
  85    EA = tcg_temp_new();
  86
  87    gen_addr_reg_index(ctx, EA);
  88    if (ctx->le_mode) {
  89        TCGv_i64 t0 = tcg_temp_new_i64();
  90        TCGv_i64 t1 = tcg_temp_new_i64();
  91
  92        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
  93        tcg_gen_shri_i64(t1, t0, 32);
  94        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
  95        tcg_gen_addi_tl(EA, EA, 8);
  96        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
  97        tcg_gen_shri_i64(t1, t0, 32);
  98        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
  99        tcg_temp_free_i64(t0);
 100        tcg_temp_free_i64(t1);
 101    } else {
 102        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
 103        tcg_gen_addi_tl(EA, EA, 8);
 104        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
 105    }
 106    tcg_temp_free(EA);
 107}
 108
 109static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
 110                          TCGv_i64 inh, TCGv_i64 inl)
 111{
 112    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
 113    TCGv_i64 t0 = tcg_temp_new_i64();
 114    TCGv_i64 t1 = tcg_temp_new_i64();
 115
 116    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
 117    tcg_gen_and_i64(t0, inh, mask);
 118    tcg_gen_shli_i64(t0, t0, 8);
 119    tcg_gen_shri_i64(t1, inh, 8);
 120    tcg_gen_and_i64(t1, t1, mask);
 121    tcg_gen_or_i64(outh, t0, t1);
 122
 123    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
 124    tcg_gen_and_i64(t0, inl, mask);
 125    tcg_gen_shli_i64(t0, t0, 8);
 126    tcg_gen_shri_i64(t1, inl, 8);
 127    tcg_gen_and_i64(t1, t1, mask);
 128    tcg_gen_or_i64(outl, t0, t1);
 129
 130    tcg_temp_free_i64(t0);
 131    tcg_temp_free_i64(t1);
 132    tcg_temp_free_i64(mask);
 133}
 134
 135static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
 136                          TCGv_i64 inh, TCGv_i64 inl)
 137{
 138    TCGv_i64 hi = tcg_temp_new_i64();
 139    TCGv_i64 lo = tcg_temp_new_i64();
 140
 141    tcg_gen_bswap64_i64(hi, inh);
 142    tcg_gen_bswap64_i64(lo, inl);
 143    tcg_gen_shri_i64(outh, hi, 32);
 144    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
 145    tcg_gen_shri_i64(outl, lo, 32);
 146    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
 147
 148    tcg_temp_free_i64(hi);
 149    tcg_temp_free_i64(lo);
 150}
 151static void gen_lxvh8x(DisasContext *ctx)
 152{
 153    TCGv EA;
 154    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 155    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 156
 157    if (unlikely(!ctx->vsx_enabled)) {
 158        gen_exception(ctx, POWERPC_EXCP_VSXU);
 159        return;
 160    }
 161    gen_set_access_type(ctx, ACCESS_INT);
 162
 163    EA = tcg_temp_new();
 164    gen_addr_reg_index(ctx, EA);
 165    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
 166    tcg_gen_addi_tl(EA, EA, 8);
 167    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
 168    if (ctx->le_mode) {
 169        gen_bswap16x8(xth, xtl, xth, xtl);
 170    }
 171    tcg_temp_free(EA);
 172}
 173
 174static void gen_lxvb16x(DisasContext *ctx)
 175{
 176    TCGv EA;
 177    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 178    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 179
 180    if (unlikely(!ctx->vsx_enabled)) {
 181        gen_exception(ctx, POWERPC_EXCP_VSXU);
 182        return;
 183    }
 184    gen_set_access_type(ctx, ACCESS_INT);
 185    EA = tcg_temp_new();
 186    gen_addr_reg_index(ctx, EA);
 187    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
 188    tcg_gen_addi_tl(EA, EA, 8);
 189    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
 190    tcg_temp_free(EA);
 191}
 192
 193#define VSX_STORE_SCALAR(name, operation)                     \
 194static void gen_##name(DisasContext *ctx)                     \
 195{                                                             \
 196    TCGv EA;                                                  \
 197    if (unlikely(!ctx->vsx_enabled)) {                        \
 198        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
 199        return;                                               \
 200    }                                                         \
 201    gen_set_access_type(ctx, ACCESS_INT);                     \
 202    EA = tcg_temp_new();                                      \
 203    gen_addr_reg_index(ctx, EA);                              \
 204    gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
 205    tcg_temp_free(EA);                                        \
 206}
 207
 208VSX_STORE_SCALAR(stxsdx, st64_i64)
 209
 210VSX_STORE_SCALAR(stxsibx, st8_i64)
 211VSX_STORE_SCALAR(stxsihx, st16_i64)
 212VSX_STORE_SCALAR(stxsiwx, st32_i64)
 213VSX_STORE_SCALAR(stxsspx, st32fs)
 214
 215static void gen_stxvd2x(DisasContext *ctx)
 216{
 217    TCGv EA;
 218    if (unlikely(!ctx->vsx_enabled)) {
 219        gen_exception(ctx, POWERPC_EXCP_VSXU);
 220        return;
 221    }
 222    gen_set_access_type(ctx, ACCESS_INT);
 223    EA = tcg_temp_new();
 224    gen_addr_reg_index(ctx, EA);
 225    gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
 226    tcg_gen_addi_tl(EA, EA, 8);
 227    gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
 228    tcg_temp_free(EA);
 229}
 230
 231static void gen_stxvw4x(DisasContext *ctx)
 232{
 233    TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
 234    TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
 235    TCGv EA;
 236    if (unlikely(!ctx->vsx_enabled)) {
 237        gen_exception(ctx, POWERPC_EXCP_VSXU);
 238        return;
 239    }
 240    gen_set_access_type(ctx, ACCESS_INT);
 241    EA = tcg_temp_new();
 242    gen_addr_reg_index(ctx, EA);
 243    if (ctx->le_mode) {
 244        TCGv_i64 t0 = tcg_temp_new_i64();
 245        TCGv_i64 t1 = tcg_temp_new_i64();
 246
 247        tcg_gen_shri_i64(t0, xsh, 32);
 248        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
 249        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
 250        tcg_gen_addi_tl(EA, EA, 8);
 251        tcg_gen_shri_i64(t0, xsl, 32);
 252        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
 253        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
 254        tcg_temp_free_i64(t0);
 255        tcg_temp_free_i64(t1);
 256    } else {
 257        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
 258        tcg_gen_addi_tl(EA, EA, 8);
 259        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
 260    }
 261    tcg_temp_free(EA);
 262}
 263
 264static void gen_stxvh8x(DisasContext *ctx)
 265{
 266    TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
 267    TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
 268    TCGv EA;
 269
 270    if (unlikely(!ctx->vsx_enabled)) {
 271        gen_exception(ctx, POWERPC_EXCP_VSXU);
 272        return;
 273    }
 274    gen_set_access_type(ctx, ACCESS_INT);
 275    EA = tcg_temp_new();
 276    gen_addr_reg_index(ctx, EA);
 277    if (ctx->le_mode) {
 278        TCGv_i64 outh = tcg_temp_new_i64();
 279        TCGv_i64 outl = tcg_temp_new_i64();
 280
 281        gen_bswap16x8(outh, outl, xsh, xsl);
 282        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
 283        tcg_gen_addi_tl(EA, EA, 8);
 284        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
 285        tcg_temp_free_i64(outh);
 286        tcg_temp_free_i64(outl);
 287    } else {
 288        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
 289        tcg_gen_addi_tl(EA, EA, 8);
 290        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
 291    }
 292    tcg_temp_free(EA);
 293}
 294
 295static void gen_stxvb16x(DisasContext *ctx)
 296{
 297    TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
 298    TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
 299    TCGv EA;
 300
 301    if (unlikely(!ctx->vsx_enabled)) {
 302        gen_exception(ctx, POWERPC_EXCP_VSXU);
 303        return;
 304    }
 305    gen_set_access_type(ctx, ACCESS_INT);
 306    EA = tcg_temp_new();
 307    gen_addr_reg_index(ctx, EA);
 308    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
 309    tcg_gen_addi_tl(EA, EA, 8);
 310    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
 311    tcg_temp_free(EA);
 312}
 313
 314#define MV_VSRW(name, tcgop1, tcgop2, target, source)           \
 315static void gen_##name(DisasContext *ctx)                       \
 316{                                                               \
 317    if (xS(ctx->opcode) < 32) {                                 \
 318        if (unlikely(!ctx->fpu_enabled)) {                      \
 319            gen_exception(ctx, POWERPC_EXCP_FPU);               \
 320            return;                                             \
 321        }                                                       \
 322    } else {                                                    \
 323        if (unlikely(!ctx->altivec_enabled)) {                  \
 324            gen_exception(ctx, POWERPC_EXCP_VPU);               \
 325            return;                                             \
 326        }                                                       \
 327    }                                                           \
 328    TCGv_i64 tmp = tcg_temp_new_i64();                          \
 329    tcg_gen_##tcgop1(tmp, source);                              \
 330    tcg_gen_##tcgop2(target, tmp);                              \
 331    tcg_temp_free_i64(tmp);                                     \
 332}
 333
 334
 335MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
 336        cpu_vsrh(xS(ctx->opcode)))
 337MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
 338        cpu_gpr[rA(ctx->opcode)])
 339MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
 340        cpu_gpr[rA(ctx->opcode)])
 341
 342#if defined(TARGET_PPC64)
 343#define MV_VSRD(name, target, source)                           \
 344static void gen_##name(DisasContext *ctx)                       \
 345{                                                               \
 346    if (xS(ctx->opcode) < 32) {                                 \
 347        if (unlikely(!ctx->fpu_enabled)) {                      \
 348            gen_exception(ctx, POWERPC_EXCP_FPU);               \
 349            return;                                             \
 350        }                                                       \
 351    } else {                                                    \
 352        if (unlikely(!ctx->altivec_enabled)) {                  \
 353            gen_exception(ctx, POWERPC_EXCP_VPU);               \
 354            return;                                             \
 355        }                                                       \
 356    }                                                           \
 357    tcg_gen_mov_i64(target, source);                            \
 358}
 359
 360MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
 361MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
 362
 363static void gen_mfvsrld(DisasContext *ctx)
 364{
 365    if (xS(ctx->opcode) < 32) {
 366        if (unlikely(!ctx->vsx_enabled)) {
 367            gen_exception(ctx, POWERPC_EXCP_VSXU);
 368            return;
 369        }
 370    } else {
 371        if (unlikely(!ctx->altivec_enabled)) {
 372            gen_exception(ctx, POWERPC_EXCP_VPU);
 373            return;
 374        }
 375    }
 376
 377    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
 378}
 379
 380static void gen_mtvsrdd(DisasContext *ctx)
 381{
 382    if (xT(ctx->opcode) < 32) {
 383        if (unlikely(!ctx->vsx_enabled)) {
 384            gen_exception(ctx, POWERPC_EXCP_VSXU);
 385            return;
 386        }
 387    } else {
 388        if (unlikely(!ctx->altivec_enabled)) {
 389            gen_exception(ctx, POWERPC_EXCP_VPU);
 390            return;
 391        }
 392    }
 393
 394    if (!rA(ctx->opcode)) {
 395        tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
 396    } else {
 397        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
 398    }
 399
 400    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
 401}
 402
 403static void gen_mtvsrws(DisasContext *ctx)
 404{
 405    if (xT(ctx->opcode) < 32) {
 406        if (unlikely(!ctx->vsx_enabled)) {
 407            gen_exception(ctx, POWERPC_EXCP_VSXU);
 408            return;
 409        }
 410    } else {
 411        if (unlikely(!ctx->altivec_enabled)) {
 412            gen_exception(ctx, POWERPC_EXCP_VPU);
 413            return;
 414        }
 415    }
 416
 417    tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
 418                        cpu_gpr[rA(ctx->opcode)], 32, 32);
 419    tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
 420}
 421
 422#endif
 423
 424static void gen_xxpermdi(DisasContext *ctx)
 425{
 426    if (unlikely(!ctx->vsx_enabled)) {
 427        gen_exception(ctx, POWERPC_EXCP_VSXU);
 428        return;
 429    }
 430
 431    if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
 432                 (xT(ctx->opcode) == xB(ctx->opcode)))) {
 433        TCGv_i64 xh, xl;
 434
 435        xh = tcg_temp_new_i64();
 436        xl = tcg_temp_new_i64();
 437
 438        if ((DM(ctx->opcode) & 2) == 0) {
 439            tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
 440        } else {
 441            tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
 442        }
 443        if ((DM(ctx->opcode) & 1) == 0) {
 444            tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
 445        } else {
 446            tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
 447        }
 448
 449        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
 450        tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
 451
 452        tcg_temp_free_i64(xh);
 453        tcg_temp_free_i64(xl);
 454    } else {
 455        if ((DM(ctx->opcode) & 2) == 0) {
 456            tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
 457        } else {
 458            tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
 459        }
 460        if ((DM(ctx->opcode) & 1) == 0) {
 461            tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
 462        } else {
 463            tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
 464        }
 465    }
 466}
 467
 468#define OP_ABS 1
 469#define OP_NABS 2
 470#define OP_NEG 3
 471#define OP_CPSGN 4
 472#define SGN_MASK_DP  0x8000000000000000ull
 473#define SGN_MASK_SP 0x8000000080000000ull
 474
 475#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
 476static void glue(gen_, name)(DisasContext * ctx)                  \
 477    {                                                             \
 478        TCGv_i64 xb, sgm;                                         \
 479        if (unlikely(!ctx->vsx_enabled)) {                        \
 480            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
 481            return;                                               \
 482        }                                                         \
 483        xb = tcg_temp_new_i64();                                  \
 484        sgm = tcg_temp_new_i64();                                 \
 485        tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode)));           \
 486        tcg_gen_movi_i64(sgm, sgn_mask);                          \
 487        switch (op) {                                             \
 488            case OP_ABS: {                                        \
 489                tcg_gen_andc_i64(xb, xb, sgm);                    \
 490                break;                                            \
 491            }                                                     \
 492            case OP_NABS: {                                       \
 493                tcg_gen_or_i64(xb, xb, sgm);                      \
 494                break;                                            \
 495            }                                                     \
 496            case OP_NEG: {                                        \
 497                tcg_gen_xor_i64(xb, xb, sgm);                     \
 498                break;                                            \
 499            }                                                     \
 500            case OP_CPSGN: {                                      \
 501                TCGv_i64 xa = tcg_temp_new_i64();                 \
 502                tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode)));   \
 503                tcg_gen_and_i64(xa, xa, sgm);                     \
 504                tcg_gen_andc_i64(xb, xb, sgm);                    \
 505                tcg_gen_or_i64(xb, xb, xa);                       \
 506                tcg_temp_free_i64(xa);                            \
 507                break;                                            \
 508            }                                                     \
 509        }                                                         \
 510        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb);           \
 511        tcg_temp_free_i64(xb);                                    \
 512        tcg_temp_free_i64(sgm);                                   \
 513    }
 514
 515VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
 516VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
 517VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
 518VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
 519
 520#define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
 521static void glue(gen_, name)(DisasContext * ctx)                 \
 522    {                                                            \
 523        TCGv_i64 xbh, xbl, sgm;                                  \
 524        if (unlikely(!ctx->vsx_enabled)) {                       \
 525            gen_exception(ctx, POWERPC_EXCP_VSXU);               \
 526            return;                                              \
 527        }                                                        \
 528        xbh = tcg_temp_new_i64();                                \
 529        xbl = tcg_temp_new_i64();                                \
 530        sgm = tcg_temp_new_i64();                                \
 531        tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode)));         \
 532        tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode)));         \
 533        tcg_gen_movi_i64(sgm, sgn_mask);                         \
 534        switch (op) {                                            \
 535            case OP_ABS: {                                       \
 536                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
 537                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
 538                break;                                           \
 539            }                                                    \
 540            case OP_NABS: {                                      \
 541                tcg_gen_or_i64(xbh, xbh, sgm);                   \
 542                tcg_gen_or_i64(xbl, xbl, sgm);                   \
 543                break;                                           \
 544            }                                                    \
 545            case OP_NEG: {                                       \
 546                tcg_gen_xor_i64(xbh, xbh, sgm);                  \
 547                tcg_gen_xor_i64(xbl, xbl, sgm);                  \
 548                break;                                           \
 549            }                                                    \
 550            case OP_CPSGN: {                                     \
 551                TCGv_i64 xah = tcg_temp_new_i64();               \
 552                TCGv_i64 xal = tcg_temp_new_i64();               \
 553                tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
 554                tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
 555                tcg_gen_and_i64(xah, xah, sgm);                  \
 556                tcg_gen_and_i64(xal, xal, sgm);                  \
 557                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
 558                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
 559                tcg_gen_or_i64(xbh, xbh, xah);                   \
 560                tcg_gen_or_i64(xbl, xbl, xal);                   \
 561                tcg_temp_free_i64(xah);                          \
 562                tcg_temp_free_i64(xal);                          \
 563                break;                                           \
 564            }                                                    \
 565        }                                                        \
 566        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh);         \
 567        tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl);         \
 568        tcg_temp_free_i64(xbh);                                  \
 569        tcg_temp_free_i64(xbl);                                  \
 570        tcg_temp_free_i64(sgm);                                  \
 571    }
 572
 573VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
 574VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
 575VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
 576VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
 577VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
 578VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
 579VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
 580VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
 581
 582#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
 583static void gen_##name(DisasContext * ctx)                                    \
 584{                                                                             \
 585    TCGv_i32 opc;                                                             \
 586    if (unlikely(!ctx->vsx_enabled)) {                                        \
 587        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
 588        return;                                                               \
 589    }                                                                         \
 590    opc = tcg_const_i32(ctx->opcode);                                         \
 591    gen_helper_##name(cpu_env, opc);                                          \
 592    tcg_temp_free_i32(opc);                                                   \
 593}
 594
 595#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
 596static void gen_##name(DisasContext * ctx)                    \
 597{                                                             \
 598    if (unlikely(!ctx->vsx_enabled)) {                        \
 599        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
 600        return;                                               \
 601    }                                                         \
 602    gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env,     \
 603                      cpu_vsrh(xB(ctx->opcode)));             \
 604}
 605
 606GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
 607GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
 608GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
 609GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
 610GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
 611GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
 612GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
 613GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
 614GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
 615GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
 616GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
 617GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
 618GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
 619GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
 620GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
 621GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
 622GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
 623GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
 624GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
 625GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
 626GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
 627GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
 628GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
 629GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
 630GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
 631GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
 632GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
 633GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
 634GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
 635GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
 636GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
 637GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
 638GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
 639GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
 640GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
 641GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
 642GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
 643GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
 644GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
 645GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
 646GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
 647
 648GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
 649GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
 650GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
 651GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
 652GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
 653GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
 654GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
 655GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
 656GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
 657GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
 658GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
 659GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
 660GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
 661GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
 662GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
 663GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
 664GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
 665
 666GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
 667GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
 668GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
 669GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
 670GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
 671GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
 672GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
 673GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
 674GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
 675GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
 676GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
 677GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
 678GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
 679GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
 680GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
 681GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
 682GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
 683GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
 684GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
 685GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
 686GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
 687GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
 688GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
 689GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
 690GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
 691GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
 692GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
 693GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
 694GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
 695GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
 696GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
 697GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
 698GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
 699GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
 700GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
 701GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
 702GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
 703
 704GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
 705GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
 706GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
 707GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
 708GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
 709GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
 710GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
 711GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
 712GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
 713GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
 714GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
 715GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
 716GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
 717GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
 718GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
 719GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
 720GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
 721GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
 722GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
 723GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
 724GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
 725GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
 726GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
 727GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
 728GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
 729GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
 730GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
 731GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
 732GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
 733GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
 734GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
 735GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
 736GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
 737GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
 738GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
 739GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
 740GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
 741
 742static void gen_xxbrd(DisasContext *ctx)
 743{
 744    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 745    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 746    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
 747    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
 748
 749    if (unlikely(!ctx->vsx_enabled)) {
 750        gen_exception(ctx, POWERPC_EXCP_VSXU);
 751        return;
 752    }
 753    tcg_gen_bswap64_i64(xth, xbh);
 754    tcg_gen_bswap64_i64(xtl, xbl);
 755}
 756
 757static void gen_xxbrh(DisasContext *ctx)
 758{
 759    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 760    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 761    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
 762    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
 763
 764    if (unlikely(!ctx->vsx_enabled)) {
 765        gen_exception(ctx, POWERPC_EXCP_VSXU);
 766        return;
 767    }
 768    gen_bswap16x8(xth, xtl, xbh, xbl);
 769}
 770
 771static void gen_xxbrq(DisasContext *ctx)
 772{
 773    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 774    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 775    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
 776    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
 777    TCGv_i64 t0 = tcg_temp_new_i64();
 778
 779    if (unlikely(!ctx->vsx_enabled)) {
 780        gen_exception(ctx, POWERPC_EXCP_VSXU);
 781        return;
 782    }
 783    tcg_gen_bswap64_i64(t0, xbl);
 784    tcg_gen_bswap64_i64(xtl, xbh);
 785    tcg_gen_mov_i64(xth, t0);
 786    tcg_temp_free_i64(t0);
 787}
 788
 789static void gen_xxbrw(DisasContext *ctx)
 790{
 791    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 792    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 793    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
 794    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
 795
 796    if (unlikely(!ctx->vsx_enabled)) {
 797        gen_exception(ctx, POWERPC_EXCP_VSXU);
 798        return;
 799    }
 800    gen_bswap32x4(xth, xtl, xbh, xbl);
 801}
 802
 803#define VSX_LOGICAL(name, tcg_op)                                    \
 804static void glue(gen_, name)(DisasContext * ctx)                     \
 805    {                                                                \
 806        if (unlikely(!ctx->vsx_enabled)) {                           \
 807            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
 808            return;                                                  \
 809        }                                                            \
 810        tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
 811            cpu_vsrh(xB(ctx->opcode)));                              \
 812        tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
 813            cpu_vsrl(xB(ctx->opcode)));                              \
 814    }
 815
 816VSX_LOGICAL(xxland, tcg_gen_and_i64)
 817VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
 818VSX_LOGICAL(xxlor, tcg_gen_or_i64)
 819VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
 820VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
 821VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
 822VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
 823VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
 824
 825#define VSX_XXMRG(name, high)                               \
 826static void glue(gen_, name)(DisasContext * ctx)            \
 827    {                                                       \
 828        TCGv_i64 a0, a1, b0, b1;                            \
 829        if (unlikely(!ctx->vsx_enabled)) {                  \
 830            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
 831            return;                                         \
 832        }                                                   \
 833        a0 = tcg_temp_new_i64();                            \
 834        a1 = tcg_temp_new_i64();                            \
 835        b0 = tcg_temp_new_i64();                            \
 836        b1 = tcg_temp_new_i64();                            \
 837        if (high) {                                         \
 838            tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
 839            tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
 840            tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
 841            tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
 842        } else {                                            \
 843            tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
 844            tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
 845            tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
 846            tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
 847        }                                                   \
 848        tcg_gen_shri_i64(a0, a0, 32);                       \
 849        tcg_gen_shri_i64(b0, b0, 32);                       \
 850        tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)),      \
 851                            b0, a0, 32, 32);                \
 852        tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)),      \
 853                            b1, a1, 32, 32);                \
 854        tcg_temp_free_i64(a0);                              \
 855        tcg_temp_free_i64(a1);                              \
 856        tcg_temp_free_i64(b0);                              \
 857        tcg_temp_free_i64(b1);                              \
 858    }
 859
 860VSX_XXMRG(xxmrghw, 1)
 861VSX_XXMRG(xxmrglw, 0)
 862
 863static void gen_xxsel(DisasContext * ctx)
 864{
 865    TCGv_i64 a, b, c;
 866    if (unlikely(!ctx->vsx_enabled)) {
 867        gen_exception(ctx, POWERPC_EXCP_VSXU);
 868        return;
 869    }
 870    a = tcg_temp_new_i64();
 871    b = tcg_temp_new_i64();
 872    c = tcg_temp_new_i64();
 873
 874    tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
 875    tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
 876    tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
 877
 878    tcg_gen_and_i64(b, b, c);
 879    tcg_gen_andc_i64(a, a, c);
 880    tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
 881
 882    tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
 883    tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
 884    tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
 885
 886    tcg_gen_and_i64(b, b, c);
 887    tcg_gen_andc_i64(a, a, c);
 888    tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
 889
 890    tcg_temp_free_i64(a);
 891    tcg_temp_free_i64(b);
 892    tcg_temp_free_i64(c);
 893}
 894
 895static void gen_xxspltw(DisasContext *ctx)
 896{
 897    TCGv_i64 b, b2;
 898    TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
 899                   cpu_vsrl(xB(ctx->opcode)) :
 900                   cpu_vsrh(xB(ctx->opcode));
 901
 902    if (unlikely(!ctx->vsx_enabled)) {
 903        gen_exception(ctx, POWERPC_EXCP_VSXU);
 904        return;
 905    }
 906
 907    b = tcg_temp_new_i64();
 908    b2 = tcg_temp_new_i64();
 909
 910    if (UIM(ctx->opcode) & 1) {
 911        tcg_gen_ext32u_i64(b, vsr);
 912    } else {
 913        tcg_gen_shri_i64(b, vsr, 32);
 914    }
 915
 916    tcg_gen_shli_i64(b2, b, 32);
 917    tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
 918    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
 919
 920    tcg_temp_free_i64(b);
 921    tcg_temp_free_i64(b2);
 922}
 923
 924#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
 925
 926static void gen_xxspltib(DisasContext *ctx)
 927{
 928    unsigned char uim8 = IMM8(ctx->opcode);
 929    if (xS(ctx->opcode) < 32) {
 930        if (unlikely(!ctx->altivec_enabled)) {
 931            gen_exception(ctx, POWERPC_EXCP_VPU);
 932            return;
 933        }
 934    } else {
 935        if (unlikely(!ctx->vsx_enabled)) {
 936            gen_exception(ctx, POWERPC_EXCP_VSXU);
 937            return;
 938        }
 939    }
 940    tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
 941    tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
 942}
 943
 944static void gen_xxsldwi(DisasContext *ctx)
 945{
 946    TCGv_i64 xth, xtl;
 947    if (unlikely(!ctx->vsx_enabled)) {
 948        gen_exception(ctx, POWERPC_EXCP_VSXU);
 949        return;
 950    }
 951    xth = tcg_temp_new_i64();
 952    xtl = tcg_temp_new_i64();
 953
 954    switch (SHW(ctx->opcode)) {
 955        case 0: {
 956            tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
 957            tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
 958            break;
 959        }
 960        case 1: {
 961            TCGv_i64 t0 = tcg_temp_new_i64();
 962            tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
 963            tcg_gen_shli_i64(xth, xth, 32);
 964            tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
 965            tcg_gen_shri_i64(t0, t0, 32);
 966            tcg_gen_or_i64(xth, xth, t0);
 967            tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
 968            tcg_gen_shli_i64(xtl, xtl, 32);
 969            tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
 970            tcg_gen_shri_i64(t0, t0, 32);
 971            tcg_gen_or_i64(xtl, xtl, t0);
 972            tcg_temp_free_i64(t0);
 973            break;
 974        }
 975        case 2: {
 976            tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
 977            tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
 978            break;
 979        }
 980        case 3: {
 981            TCGv_i64 t0 = tcg_temp_new_i64();
 982            tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
 983            tcg_gen_shli_i64(xth, xth, 32);
 984            tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
 985            tcg_gen_shri_i64(t0, t0, 32);
 986            tcg_gen_or_i64(xth, xth, t0);
 987            tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
 988            tcg_gen_shli_i64(xtl, xtl, 32);
 989            tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
 990            tcg_gen_shri_i64(t0, t0, 32);
 991            tcg_gen_or_i64(xtl, xtl, t0);
 992            tcg_temp_free_i64(t0);
 993            break;
 994        }
 995    }
 996
 997    tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
 998    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
 999
1000    tcg_temp_free_i64(xth);
1001    tcg_temp_free_i64(xtl);
1002}
1003
1004#undef GEN_XX2FORM
1005#undef GEN_XX3FORM
1006#undef GEN_XX2IFORM
1007#undef GEN_XX3_RC_FORM
1008#undef GEN_XX3FORM_DM
1009#undef VSX_LOGICAL
1010