qemu/target/ppc/translate/vsx-impl.inc.c
<<
>>
Prefs
   1/***                           VSX extension                               ***/
   2
   3static inline TCGv_i64 cpu_vsrh(int n)
   4{
   5    if (n < 32) {
   6        return cpu_fpr[n];
   7    } else {
   8        return cpu_avrh[n-32];
   9    }
  10}
  11
  12static inline TCGv_i64 cpu_vsrl(int n)
  13{
  14    if (n < 32) {
  15        return cpu_vsr[n];
  16    } else {
  17        return cpu_avrl[n-32];
  18    }
  19}
  20
  21#define VSX_LOAD_SCALAR(name, operation)                      \
  22static void gen_##name(DisasContext *ctx)                     \
  23{                                                             \
  24    TCGv EA;                                                  \
  25    if (unlikely(!ctx->vsx_enabled)) {                        \
  26        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
  27        return;                                               \
  28    }                                                         \
  29    gen_set_access_type(ctx, ACCESS_INT);                     \
  30    EA = tcg_temp_new();                                      \
  31    gen_addr_reg_index(ctx, EA);                              \
  32    gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
  33    /* NOTE: cpu_vsrl is undefined */                         \
  34    tcg_temp_free(EA);                                        \
  35}
  36
  37VSX_LOAD_SCALAR(lxsdx, ld64_i64)
  38VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
  39VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
  40VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
  41VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
  42VSX_LOAD_SCALAR(lxsspx, ld32fs)
  43
  44static void gen_lxvd2x(DisasContext *ctx)
  45{
  46    TCGv EA;
  47    if (unlikely(!ctx->vsx_enabled)) {
  48        gen_exception(ctx, POWERPC_EXCP_VSXU);
  49        return;
  50    }
  51    gen_set_access_type(ctx, ACCESS_INT);
  52    EA = tcg_temp_new();
  53    gen_addr_reg_index(ctx, EA);
  54    gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
  55    tcg_gen_addi_tl(EA, EA, 8);
  56    gen_qemu_ld64_i64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
  57    tcg_temp_free(EA);
  58}
  59
  60static void gen_lxvdsx(DisasContext *ctx)
  61{
  62    TCGv EA;
  63    if (unlikely(!ctx->vsx_enabled)) {
  64        gen_exception(ctx, POWERPC_EXCP_VSXU);
  65        return;
  66    }
  67    gen_set_access_type(ctx, ACCESS_INT);
  68    EA = tcg_temp_new();
  69    gen_addr_reg_index(ctx, EA);
  70    gen_qemu_ld64_i64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
  71    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
  72    tcg_temp_free(EA);
  73}
  74
  75static void gen_lxvw4x(DisasContext *ctx)
  76{
  77    TCGv EA;
  78    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
  79    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
  80    if (unlikely(!ctx->vsx_enabled)) {
  81        gen_exception(ctx, POWERPC_EXCP_VSXU);
  82        return;
  83    }
  84    gen_set_access_type(ctx, ACCESS_INT);
  85    EA = tcg_temp_new();
  86
  87    gen_addr_reg_index(ctx, EA);
  88    if (ctx->le_mode) {
  89        TCGv_i64 t0 = tcg_temp_new_i64();
  90        TCGv_i64 t1 = tcg_temp_new_i64();
  91
  92        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
  93        tcg_gen_shri_i64(t1, t0, 32);
  94        tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
  95        tcg_gen_addi_tl(EA, EA, 8);
  96        tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
  97        tcg_gen_shri_i64(t1, t0, 32);
  98        tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
  99        tcg_temp_free_i64(t0);
 100        tcg_temp_free_i64(t1);
 101    } else {
 102        tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
 103        tcg_gen_addi_tl(EA, EA, 8);
 104        tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
 105    }
 106    tcg_temp_free(EA);
 107}
 108
 109static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
 110                          TCGv_i64 inh, TCGv_i64 inl)
 111{
 112    TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
 113    TCGv_i64 t0 = tcg_temp_new_i64();
 114    TCGv_i64 t1 = tcg_temp_new_i64();
 115
 116    /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
 117    tcg_gen_and_i64(t0, inh, mask);
 118    tcg_gen_shli_i64(t0, t0, 8);
 119    tcg_gen_shri_i64(t1, inh, 8);
 120    tcg_gen_and_i64(t1, t1, mask);
 121    tcg_gen_or_i64(outh, t0, t1);
 122
 123    /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
 124    tcg_gen_and_i64(t0, inl, mask);
 125    tcg_gen_shli_i64(t0, t0, 8);
 126    tcg_gen_shri_i64(t1, inl, 8);
 127    tcg_gen_and_i64(t1, t1, mask);
 128    tcg_gen_or_i64(outl, t0, t1);
 129
 130    tcg_temp_free_i64(t0);
 131    tcg_temp_free_i64(t1);
 132    tcg_temp_free_i64(mask);
 133}
 134
 135static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
 136                          TCGv_i64 inh, TCGv_i64 inl)
 137{
 138    TCGv_i64 hi = tcg_temp_new_i64();
 139    TCGv_i64 lo = tcg_temp_new_i64();
 140
 141    tcg_gen_bswap64_i64(hi, inh);
 142    tcg_gen_bswap64_i64(lo, inl);
 143    tcg_gen_shri_i64(outh, hi, 32);
 144    tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
 145    tcg_gen_shri_i64(outl, lo, 32);
 146    tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
 147
 148    tcg_temp_free_i64(hi);
 149    tcg_temp_free_i64(lo);
 150}
 151static void gen_lxvh8x(DisasContext *ctx)
 152{
 153    TCGv EA;
 154    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 155    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 156
 157    if (unlikely(!ctx->vsx_enabled)) {
 158        gen_exception(ctx, POWERPC_EXCP_VSXU);
 159        return;
 160    }
 161    gen_set_access_type(ctx, ACCESS_INT);
 162
 163    EA = tcg_temp_new();
 164    gen_addr_reg_index(ctx, EA);
 165    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
 166    tcg_gen_addi_tl(EA, EA, 8);
 167    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
 168    if (ctx->le_mode) {
 169        gen_bswap16x8(xth, xtl, xth, xtl);
 170    }
 171    tcg_temp_free(EA);
 172}
 173
 174static void gen_lxvb16x(DisasContext *ctx)
 175{
 176    TCGv EA;
 177    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 178    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 179
 180    if (unlikely(!ctx->vsx_enabled)) {
 181        gen_exception(ctx, POWERPC_EXCP_VSXU);
 182        return;
 183    }
 184    gen_set_access_type(ctx, ACCESS_INT);
 185    EA = tcg_temp_new();
 186    gen_addr_reg_index(ctx, EA);
 187    tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
 188    tcg_gen_addi_tl(EA, EA, 8);
 189    tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
 190    tcg_temp_free(EA);
 191}
 192
 193#define VSX_VECTOR_LOAD_STORE(name, op, indexed)            \
 194static void gen_##name(DisasContext *ctx)                   \
 195{                                                           \
 196    int xt;                                                 \
 197    TCGv EA;                                                \
 198    TCGv_i64 xth, xtl;                                      \
 199                                                            \
 200    if (indexed) {                                          \
 201        xt = xT(ctx->opcode);                               \
 202    } else {                                                \
 203        xt = DQxT(ctx->opcode);                             \
 204    }                                                       \
 205    xth = cpu_vsrh(xt);                                     \
 206    xtl = cpu_vsrl(xt);                                     \
 207                                                            \
 208    if (xt < 32) {                                          \
 209        if (unlikely(!ctx->vsx_enabled)) {                  \
 210            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
 211            return;                                         \
 212        }                                                   \
 213    } else {                                                \
 214        if (unlikely(!ctx->altivec_enabled)) {              \
 215            gen_exception(ctx, POWERPC_EXCP_VPU);           \
 216            return;                                         \
 217        }                                                   \
 218    }                                                       \
 219    gen_set_access_type(ctx, ACCESS_INT);                   \
 220    EA = tcg_temp_new();                                    \
 221    if (indexed) {                                          \
 222        gen_addr_reg_index(ctx, EA);                        \
 223    } else {                                                \
 224        gen_addr_imm_index(ctx, EA, 0x0F);                  \
 225    }                                                       \
 226    if (ctx->le_mode) {                                     \
 227        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ);   \
 228        tcg_gen_addi_tl(EA, EA, 8);                         \
 229        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ);   \
 230    } else {                                                \
 231        tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ);   \
 232        tcg_gen_addi_tl(EA, EA, 8);                         \
 233        tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ);   \
 234    }                                                       \
 235    tcg_temp_free(EA);                                      \
 236}
 237
 238VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
 239VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
 240VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
 241VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
 242
 243#ifdef TARGET_PPC64
 244#define VSX_VECTOR_LOAD_STORE_LENGTH(name)                      \
 245static void gen_##name(DisasContext *ctx)                       \
 246{                                                               \
 247    TCGv EA, xt;                                                \
 248                                                                \
 249    if (xT(ctx->opcode) < 32) {                                 \
 250        if (unlikely(!ctx->vsx_enabled)) {                      \
 251            gen_exception(ctx, POWERPC_EXCP_VSXU);              \
 252            return;                                             \
 253        }                                                       \
 254    } else {                                                    \
 255        if (unlikely(!ctx->altivec_enabled)) {                  \
 256            gen_exception(ctx, POWERPC_EXCP_VPU);               \
 257            return;                                             \
 258        }                                                       \
 259    }                                                           \
 260    EA = tcg_temp_new();                                        \
 261    xt = tcg_const_tl(xT(ctx->opcode));                         \
 262    gen_set_access_type(ctx, ACCESS_INT);                       \
 263    gen_addr_register(ctx, EA);                                 \
 264    gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
 265    tcg_temp_free(EA);                                          \
 266    tcg_temp_free(xt);                                          \
 267}
 268
 269VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
 270VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
 271VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
 272VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
 273#endif
 274
 275#define VSX_LOAD_SCALAR_DS(name, operation)                       \
 276static void gen_##name(DisasContext *ctx)                         \
 277{                                                                 \
 278    TCGv EA;                                                      \
 279    TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);                \
 280                                                                  \
 281    if (unlikely(!ctx->altivec_enabled)) {                        \
 282        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
 283        return;                                                   \
 284    }                                                             \
 285    gen_set_access_type(ctx, ACCESS_INT);                         \
 286    EA = tcg_temp_new();                                          \
 287    gen_addr_imm_index(ctx, EA, 0x03);                            \
 288    gen_qemu_##operation(ctx, xth, EA);                           \
 289    /* NOTE: cpu_vsrl is undefined */                             \
 290    tcg_temp_free(EA);                                            \
 291}
 292
 293VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
 294VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
 295
 296#define VSX_STORE_SCALAR(name, operation)                     \
 297static void gen_##name(DisasContext *ctx)                     \
 298{                                                             \
 299    TCGv EA;                                                  \
 300    if (unlikely(!ctx->vsx_enabled)) {                        \
 301        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
 302        return;                                               \
 303    }                                                         \
 304    gen_set_access_type(ctx, ACCESS_INT);                     \
 305    EA = tcg_temp_new();                                      \
 306    gen_addr_reg_index(ctx, EA);                              \
 307    gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
 308    tcg_temp_free(EA);                                        \
 309}
 310
 311VSX_STORE_SCALAR(stxsdx, st64_i64)
 312
 313VSX_STORE_SCALAR(stxsibx, st8_i64)
 314VSX_STORE_SCALAR(stxsihx, st16_i64)
 315VSX_STORE_SCALAR(stxsiwx, st32_i64)
 316VSX_STORE_SCALAR(stxsspx, st32fs)
 317
 318static void gen_stxvd2x(DisasContext *ctx)
 319{
 320    TCGv EA;
 321    if (unlikely(!ctx->vsx_enabled)) {
 322        gen_exception(ctx, POWERPC_EXCP_VSXU);
 323        return;
 324    }
 325    gen_set_access_type(ctx, ACCESS_INT);
 326    EA = tcg_temp_new();
 327    gen_addr_reg_index(ctx, EA);
 328    gen_qemu_st64_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
 329    tcg_gen_addi_tl(EA, EA, 8);
 330    gen_qemu_st64_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
 331    tcg_temp_free(EA);
 332}
 333
 334static void gen_stxvw4x(DisasContext *ctx)
 335{
 336    TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
 337    TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
 338    TCGv EA;
 339    if (unlikely(!ctx->vsx_enabled)) {
 340        gen_exception(ctx, POWERPC_EXCP_VSXU);
 341        return;
 342    }
 343    gen_set_access_type(ctx, ACCESS_INT);
 344    EA = tcg_temp_new();
 345    gen_addr_reg_index(ctx, EA);
 346    if (ctx->le_mode) {
 347        TCGv_i64 t0 = tcg_temp_new_i64();
 348        TCGv_i64 t1 = tcg_temp_new_i64();
 349
 350        tcg_gen_shri_i64(t0, xsh, 32);
 351        tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
 352        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
 353        tcg_gen_addi_tl(EA, EA, 8);
 354        tcg_gen_shri_i64(t0, xsl, 32);
 355        tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
 356        tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
 357        tcg_temp_free_i64(t0);
 358        tcg_temp_free_i64(t1);
 359    } else {
 360        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
 361        tcg_gen_addi_tl(EA, EA, 8);
 362        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
 363    }
 364    tcg_temp_free(EA);
 365}
 366
 367static void gen_stxvh8x(DisasContext *ctx)
 368{
 369    TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
 370    TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
 371    TCGv EA;
 372
 373    if (unlikely(!ctx->vsx_enabled)) {
 374        gen_exception(ctx, POWERPC_EXCP_VSXU);
 375        return;
 376    }
 377    gen_set_access_type(ctx, ACCESS_INT);
 378    EA = tcg_temp_new();
 379    gen_addr_reg_index(ctx, EA);
 380    if (ctx->le_mode) {
 381        TCGv_i64 outh = tcg_temp_new_i64();
 382        TCGv_i64 outl = tcg_temp_new_i64();
 383
 384        gen_bswap16x8(outh, outl, xsh, xsl);
 385        tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
 386        tcg_gen_addi_tl(EA, EA, 8);
 387        tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
 388        tcg_temp_free_i64(outh);
 389        tcg_temp_free_i64(outl);
 390    } else {
 391        tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
 392        tcg_gen_addi_tl(EA, EA, 8);
 393        tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
 394    }
 395    tcg_temp_free(EA);
 396}
 397
 398static void gen_stxvb16x(DisasContext *ctx)
 399{
 400    TCGv_i64 xsh = cpu_vsrh(xS(ctx->opcode));
 401    TCGv_i64 xsl = cpu_vsrl(xS(ctx->opcode));
 402    TCGv EA;
 403
 404    if (unlikely(!ctx->vsx_enabled)) {
 405        gen_exception(ctx, POWERPC_EXCP_VSXU);
 406        return;
 407    }
 408    gen_set_access_type(ctx, ACCESS_INT);
 409    EA = tcg_temp_new();
 410    gen_addr_reg_index(ctx, EA);
 411    tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
 412    tcg_gen_addi_tl(EA, EA, 8);
 413    tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
 414    tcg_temp_free(EA);
 415}
 416
 417#define VSX_STORE_SCALAR_DS(name, operation)                      \
 418static void gen_##name(DisasContext *ctx)                         \
 419{                                                                 \
 420    TCGv EA;                                                      \
 421    TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);                \
 422                                                                  \
 423    if (unlikely(!ctx->altivec_enabled)) {                        \
 424        gen_exception(ctx, POWERPC_EXCP_VPU);                     \
 425        return;                                                   \
 426    }                                                             \
 427    gen_set_access_type(ctx, ACCESS_INT);                         \
 428    EA = tcg_temp_new();                                          \
 429    gen_addr_imm_index(ctx, EA, 0x03);                            \
 430    gen_qemu_##operation(ctx, xth, EA);                           \
 431    /* NOTE: cpu_vsrl is undefined */                             \
 432    tcg_temp_free(EA);                                            \
 433}
 434
 435VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
 436VSX_LOAD_SCALAR_DS(stxssp, st32fs)
 437
 438#define MV_VSRW(name, tcgop1, tcgop2, target, source)           \
 439static void gen_##name(DisasContext *ctx)                       \
 440{                                                               \
 441    if (xS(ctx->opcode) < 32) {                                 \
 442        if (unlikely(!ctx->fpu_enabled)) {                      \
 443            gen_exception(ctx, POWERPC_EXCP_FPU);               \
 444            return;                                             \
 445        }                                                       \
 446    } else {                                                    \
 447        if (unlikely(!ctx->altivec_enabled)) {                  \
 448            gen_exception(ctx, POWERPC_EXCP_VPU);               \
 449            return;                                             \
 450        }                                                       \
 451    }                                                           \
 452    TCGv_i64 tmp = tcg_temp_new_i64();                          \
 453    tcg_gen_##tcgop1(tmp, source);                              \
 454    tcg_gen_##tcgop2(target, tmp);                              \
 455    tcg_temp_free_i64(tmp);                                     \
 456}
 457
 458
 459MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
 460        cpu_vsrh(xS(ctx->opcode)))
 461MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
 462        cpu_gpr[rA(ctx->opcode)])
 463MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
 464        cpu_gpr[rA(ctx->opcode)])
 465
 466#if defined(TARGET_PPC64)
 467#define MV_VSRD(name, target, source)                           \
 468static void gen_##name(DisasContext *ctx)                       \
 469{                                                               \
 470    if (xS(ctx->opcode) < 32) {                                 \
 471        if (unlikely(!ctx->fpu_enabled)) {                      \
 472            gen_exception(ctx, POWERPC_EXCP_FPU);               \
 473            return;                                             \
 474        }                                                       \
 475    } else {                                                    \
 476        if (unlikely(!ctx->altivec_enabled)) {                  \
 477            gen_exception(ctx, POWERPC_EXCP_VPU);               \
 478            return;                                             \
 479        }                                                       \
 480    }                                                           \
 481    tcg_gen_mov_i64(target, source);                            \
 482}
 483
 484MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
 485MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
 486
 487static void gen_mfvsrld(DisasContext *ctx)
 488{
 489    if (xS(ctx->opcode) < 32) {
 490        if (unlikely(!ctx->vsx_enabled)) {
 491            gen_exception(ctx, POWERPC_EXCP_VSXU);
 492            return;
 493        }
 494    } else {
 495        if (unlikely(!ctx->altivec_enabled)) {
 496            gen_exception(ctx, POWERPC_EXCP_VPU);
 497            return;
 498        }
 499    }
 500
 501    tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], cpu_vsrl(xS(ctx->opcode)));
 502}
 503
 504static void gen_mtvsrdd(DisasContext *ctx)
 505{
 506    if (xT(ctx->opcode) < 32) {
 507        if (unlikely(!ctx->vsx_enabled)) {
 508            gen_exception(ctx, POWERPC_EXCP_VSXU);
 509            return;
 510        }
 511    } else {
 512        if (unlikely(!ctx->altivec_enabled)) {
 513            gen_exception(ctx, POWERPC_EXCP_VPU);
 514            return;
 515        }
 516    }
 517
 518    if (!rA(ctx->opcode)) {
 519        tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);
 520    } else {
 521        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)]);
 522    }
 523
 524    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rB(ctx->opcode)]);
 525}
 526
 527static void gen_mtvsrws(DisasContext *ctx)
 528{
 529    if (xT(ctx->opcode) < 32) {
 530        if (unlikely(!ctx->vsx_enabled)) {
 531            gen_exception(ctx, POWERPC_EXCP_VSXU);
 532            return;
 533        }
 534    } else {
 535        if (unlikely(!ctx->altivec_enabled)) {
 536            gen_exception(ctx, POWERPC_EXCP_VPU);
 537            return;
 538        }
 539    }
 540
 541    tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)],
 542                        cpu_gpr[rA(ctx->opcode)], 32, 32);
 543    tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xT(ctx->opcode)));
 544}
 545
 546#endif
 547
 548static void gen_xxpermdi(DisasContext *ctx)
 549{
 550    if (unlikely(!ctx->vsx_enabled)) {
 551        gen_exception(ctx, POWERPC_EXCP_VSXU);
 552        return;
 553    }
 554
 555    if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
 556                 (xT(ctx->opcode) == xB(ctx->opcode)))) {
 557        TCGv_i64 xh, xl;
 558
 559        xh = tcg_temp_new_i64();
 560        xl = tcg_temp_new_i64();
 561
 562        if ((DM(ctx->opcode) & 2) == 0) {
 563            tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
 564        } else {
 565            tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
 566        }
 567        if ((DM(ctx->opcode) & 1) == 0) {
 568            tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
 569        } else {
 570            tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
 571        }
 572
 573        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
 574        tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
 575
 576        tcg_temp_free_i64(xh);
 577        tcg_temp_free_i64(xl);
 578    } else {
 579        if ((DM(ctx->opcode) & 2) == 0) {
 580            tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
 581        } else {
 582            tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
 583        }
 584        if ((DM(ctx->opcode) & 1) == 0) {
 585            tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
 586        } else {
 587            tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
 588        }
 589    }
 590}
 591
 592#define OP_ABS 1
 593#define OP_NABS 2
 594#define OP_NEG 3
 595#define OP_CPSGN 4
 596#define SGN_MASK_DP  0x8000000000000000ull
 597#define SGN_MASK_SP 0x8000000080000000ull
 598
 599#define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
 600static void glue(gen_, name)(DisasContext * ctx)                  \
 601    {                                                             \
 602        TCGv_i64 xb, sgm;                                         \
 603        if (unlikely(!ctx->vsx_enabled)) {                        \
 604            gen_exception(ctx, POWERPC_EXCP_VSXU);                \
 605            return;                                               \
 606        }                                                         \
 607        xb = tcg_temp_new_i64();                                  \
 608        sgm = tcg_temp_new_i64();                                 \
 609        tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode)));           \
 610        tcg_gen_movi_i64(sgm, sgn_mask);                          \
 611        switch (op) {                                             \
 612            case OP_ABS: {                                        \
 613                tcg_gen_andc_i64(xb, xb, sgm);                    \
 614                break;                                            \
 615            }                                                     \
 616            case OP_NABS: {                                       \
 617                tcg_gen_or_i64(xb, xb, sgm);                      \
 618                break;                                            \
 619            }                                                     \
 620            case OP_NEG: {                                        \
 621                tcg_gen_xor_i64(xb, xb, sgm);                     \
 622                break;                                            \
 623            }                                                     \
 624            case OP_CPSGN: {                                      \
 625                TCGv_i64 xa = tcg_temp_new_i64();                 \
 626                tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode)));   \
 627                tcg_gen_and_i64(xa, xa, sgm);                     \
 628                tcg_gen_andc_i64(xb, xb, sgm);                    \
 629                tcg_gen_or_i64(xb, xb, xa);                       \
 630                tcg_temp_free_i64(xa);                            \
 631                break;                                            \
 632            }                                                     \
 633        }                                                         \
 634        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb);           \
 635        tcg_temp_free_i64(xb);                                    \
 636        tcg_temp_free_i64(sgm);                                   \
 637    }
 638
 639VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
 640VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
 641VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
 642VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
 643
 644#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
 645static void glue(gen_, name)(DisasContext *ctx)                   \
 646{                                                                 \
 647    int xa;                                                       \
 648    int xt = rD(ctx->opcode) + 32;                                \
 649    int xb = rB(ctx->opcode) + 32;                                \
 650    TCGv_i64 xah, xbh, xbl, sgm;                                  \
 651                                                                  \
 652    if (unlikely(!ctx->vsx_enabled)) {                            \
 653        gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
 654        return;                                                   \
 655    }                                                             \
 656    xbh = tcg_temp_new_i64();                                     \
 657    xbl = tcg_temp_new_i64();                                     \
 658    sgm = tcg_temp_new_i64();                                     \
 659    tcg_gen_mov_i64(xbh, cpu_vsrh(xb));                           \
 660    tcg_gen_mov_i64(xbl, cpu_vsrl(xb));                           \
 661    tcg_gen_movi_i64(sgm, sgn_mask);                              \
 662    switch (op) {                                                 \
 663    case OP_ABS:                                                  \
 664        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
 665        break;                                                    \
 666    case OP_NABS:                                                 \
 667        tcg_gen_or_i64(xbh, xbh, sgm);                            \
 668        break;                                                    \
 669    case OP_NEG:                                                  \
 670        tcg_gen_xor_i64(xbh, xbh, sgm);                           \
 671        break;                                                    \
 672    case OP_CPSGN:                                                \
 673        xah = tcg_temp_new_i64();                                 \
 674        xa = rA(ctx->opcode) + 32;                                \
 675        tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm);                  \
 676        tcg_gen_andc_i64(xbh, xbh, sgm);                          \
 677        tcg_gen_or_i64(xbh, xbh, xah);                            \
 678        tcg_temp_free_i64(xah);                                   \
 679        break;                                                    \
 680    }                                                             \
 681    tcg_gen_mov_i64(cpu_vsrh(xt), xbh);                           \
 682    tcg_gen_mov_i64(cpu_vsrl(xt), xbl);                           \
 683    tcg_temp_free_i64(xbl);                                       \
 684    tcg_temp_free_i64(xbh);                                       \
 685    tcg_temp_free_i64(sgm);                                       \
 686}
 687
 688VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
 689VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
 690VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
 691VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
 692
 693#define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
 694static void glue(gen_, name)(DisasContext * ctx)                 \
 695    {                                                            \
 696        TCGv_i64 xbh, xbl, sgm;                                  \
 697        if (unlikely(!ctx->vsx_enabled)) {                       \
 698            gen_exception(ctx, POWERPC_EXCP_VSXU);               \
 699            return;                                              \
 700        }                                                        \
 701        xbh = tcg_temp_new_i64();                                \
 702        xbl = tcg_temp_new_i64();                                \
 703        sgm = tcg_temp_new_i64();                                \
 704        tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode)));         \
 705        tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode)));         \
 706        tcg_gen_movi_i64(sgm, sgn_mask);                         \
 707        switch (op) {                                            \
 708            case OP_ABS: {                                       \
 709                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
 710                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
 711                break;                                           \
 712            }                                                    \
 713            case OP_NABS: {                                      \
 714                tcg_gen_or_i64(xbh, xbh, sgm);                   \
 715                tcg_gen_or_i64(xbl, xbl, sgm);                   \
 716                break;                                           \
 717            }                                                    \
 718            case OP_NEG: {                                       \
 719                tcg_gen_xor_i64(xbh, xbh, sgm);                  \
 720                tcg_gen_xor_i64(xbl, xbl, sgm);                  \
 721                break;                                           \
 722            }                                                    \
 723            case OP_CPSGN: {                                     \
 724                TCGv_i64 xah = tcg_temp_new_i64();               \
 725                TCGv_i64 xal = tcg_temp_new_i64();               \
 726                tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
 727                tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
 728                tcg_gen_and_i64(xah, xah, sgm);                  \
 729                tcg_gen_and_i64(xal, xal, sgm);                  \
 730                tcg_gen_andc_i64(xbh, xbh, sgm);                 \
 731                tcg_gen_andc_i64(xbl, xbl, sgm);                 \
 732                tcg_gen_or_i64(xbh, xbh, xah);                   \
 733                tcg_gen_or_i64(xbl, xbl, xal);                   \
 734                tcg_temp_free_i64(xah);                          \
 735                tcg_temp_free_i64(xal);                          \
 736                break;                                           \
 737            }                                                    \
 738        }                                                        \
 739        tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh);         \
 740        tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl);         \
 741        tcg_temp_free_i64(xbh);                                  \
 742        tcg_temp_free_i64(xbl);                                  \
 743        tcg_temp_free_i64(sgm);                                  \
 744    }
 745
 746VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
 747VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
 748VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
 749VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
 750VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
 751VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
 752VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
 753VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
 754
 755#define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
 756static void gen_##name(DisasContext * ctx)                                    \
 757{                                                                             \
 758    TCGv_i32 opc;                                                             \
 759    if (unlikely(!ctx->vsx_enabled)) {                                        \
 760        gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
 761        return;                                                               \
 762    }                                                                         \
 763    opc = tcg_const_i32(ctx->opcode);                                         \
 764    gen_helper_##name(cpu_env, opc);                                          \
 765    tcg_temp_free_i32(opc);                                                   \
 766}
 767
 768#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
 769static void gen_##name(DisasContext * ctx)                    \
 770{                                                             \
 771    if (unlikely(!ctx->vsx_enabled)) {                        \
 772        gen_exception(ctx, POWERPC_EXCP_VSXU);                \
 773        return;                                               \
 774    }                                                         \
 775    gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env,     \
 776                      cpu_vsrh(xB(ctx->opcode)));             \
 777}
 778
 779GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
 780GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
 781GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
 782GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
 783GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
 784GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
 785GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
 786GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
 787GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
 788GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
 789GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
 790GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
 791GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
 792GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
 793GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
 794GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
 795GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
 796GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
 797GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
 798GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
 799GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
 800GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
 801GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
 802GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
 803GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
 804GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
 805GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
 806GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
 807GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
 808GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
 809GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
 810GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
 811GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
 812GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
 813GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
 814GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
 815GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
 816GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
 817GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
 818GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
 819GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
 820GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
 821GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
 822GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
 823GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
 824GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
 825GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
 826GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
 827GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
 828GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
 829GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
 830GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
 831GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
 832GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
 833GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
 834GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
 835GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
 836GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
 837GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
 838GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
 839GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
 840GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
 841
 842GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
 843GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
 844GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
 845GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
 846
 847GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
 848GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
 849GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
 850GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
 851GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
 852GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
 853GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
 854GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
 855GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
 856GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
 857GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
 858GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
 859GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
 860GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
 861GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
 862GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
 863GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
 864GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
 865GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
 866GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
 867
 868GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
 869GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
 870GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
 871GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
 872GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
 873GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
 874GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
 875GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
 876GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
 877GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
 878GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
 879GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
 880GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
 881GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
 882GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
 883GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
 884GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
 885GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
 886GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
 887GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
 888GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
 889GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
 890GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
 891GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
 892GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
 893GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
 894GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
 895GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
 896GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
 897GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
 898GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
 899GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
 900GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
 901GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
 902GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
 903GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
 904GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
 905
 906GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
 907GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
 908GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
 909GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
 910GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
 911GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
 912GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
 913GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
 914GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
 915GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
 916GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
 917GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
 918GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
 919GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
 920GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
 921GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
 922GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
 923GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
 924GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
 925GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
 926GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
 927GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
 928GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
 929GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
 930GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
 931GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
 932GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
 933GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
 934GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
 935GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
 936GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
 937GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
 938GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
 939GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
 940GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
 941GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
 942GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
 943GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
 944GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
 945GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
 946GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
 947GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
 948GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
 949
 950static void gen_xxbrd(DisasContext *ctx)
 951{
 952    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 953    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 954    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
 955    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
 956
 957    if (unlikely(!ctx->vsx_enabled)) {
 958        gen_exception(ctx, POWERPC_EXCP_VSXU);
 959        return;
 960    }
 961    tcg_gen_bswap64_i64(xth, xbh);
 962    tcg_gen_bswap64_i64(xtl, xbl);
 963}
 964
 965static void gen_xxbrh(DisasContext *ctx)
 966{
 967    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 968    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 969    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
 970    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
 971
 972    if (unlikely(!ctx->vsx_enabled)) {
 973        gen_exception(ctx, POWERPC_EXCP_VSXU);
 974        return;
 975    }
 976    gen_bswap16x8(xth, xtl, xbh, xbl);
 977}
 978
 979static void gen_xxbrq(DisasContext *ctx)
 980{
 981    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
 982    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
 983    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
 984    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
 985    TCGv_i64 t0 = tcg_temp_new_i64();
 986
 987    if (unlikely(!ctx->vsx_enabled)) {
 988        gen_exception(ctx, POWERPC_EXCP_VSXU);
 989        return;
 990    }
 991    tcg_gen_bswap64_i64(t0, xbl);
 992    tcg_gen_bswap64_i64(xtl, xbh);
 993    tcg_gen_mov_i64(xth, t0);
 994    tcg_temp_free_i64(t0);
 995}
 996
 997static void gen_xxbrw(DisasContext *ctx)
 998{
 999    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1000    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1001    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1002    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1003
1004    if (unlikely(!ctx->vsx_enabled)) {
1005        gen_exception(ctx, POWERPC_EXCP_VSXU);
1006        return;
1007    }
1008    gen_bswap32x4(xth, xtl, xbh, xbl);
1009}
1010
1011#define VSX_LOGICAL(name, tcg_op)                                    \
1012static void glue(gen_, name)(DisasContext * ctx)                     \
1013    {                                                                \
1014        if (unlikely(!ctx->vsx_enabled)) {                           \
1015            gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1016            return;                                                  \
1017        }                                                            \
1018        tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
1019            cpu_vsrh(xB(ctx->opcode)));                              \
1020        tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
1021            cpu_vsrl(xB(ctx->opcode)));                              \
1022    }
1023
1024VSX_LOGICAL(xxland, tcg_gen_and_i64)
1025VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
1026VSX_LOGICAL(xxlor, tcg_gen_or_i64)
1027VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
1028VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
1029VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
1030VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
1031VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
1032
1033#define VSX_XXMRG(name, high)                               \
1034static void glue(gen_, name)(DisasContext * ctx)            \
1035    {                                                       \
1036        TCGv_i64 a0, a1, b0, b1;                            \
1037        if (unlikely(!ctx->vsx_enabled)) {                  \
1038            gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1039            return;                                         \
1040        }                                                   \
1041        a0 = tcg_temp_new_i64();                            \
1042        a1 = tcg_temp_new_i64();                            \
1043        b0 = tcg_temp_new_i64();                            \
1044        b1 = tcg_temp_new_i64();                            \
1045        if (high) {                                         \
1046            tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
1047            tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
1048            tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
1049            tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
1050        } else {                                            \
1051            tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
1052            tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
1053            tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
1054            tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
1055        }                                                   \
1056        tcg_gen_shri_i64(a0, a0, 32);                       \
1057        tcg_gen_shri_i64(b0, b0, 32);                       \
1058        tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)),      \
1059                            b0, a0, 32, 32);                \
1060        tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)),      \
1061                            b1, a1, 32, 32);                \
1062        tcg_temp_free_i64(a0);                              \
1063        tcg_temp_free_i64(a1);                              \
1064        tcg_temp_free_i64(b0);                              \
1065        tcg_temp_free_i64(b1);                              \
1066    }
1067
1068VSX_XXMRG(xxmrghw, 1)
1069VSX_XXMRG(xxmrglw, 0)
1070
1071static void gen_xxsel(DisasContext * ctx)
1072{
1073    TCGv_i64 a, b, c;
1074    if (unlikely(!ctx->vsx_enabled)) {
1075        gen_exception(ctx, POWERPC_EXCP_VSXU);
1076        return;
1077    }
1078    a = tcg_temp_new_i64();
1079    b = tcg_temp_new_i64();
1080    c = tcg_temp_new_i64();
1081
1082    tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
1083    tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
1084    tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
1085
1086    tcg_gen_and_i64(b, b, c);
1087    tcg_gen_andc_i64(a, a, c);
1088    tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
1089
1090    tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
1091    tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
1092    tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
1093
1094    tcg_gen_and_i64(b, b, c);
1095    tcg_gen_andc_i64(a, a, c);
1096    tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
1097
1098    tcg_temp_free_i64(a);
1099    tcg_temp_free_i64(b);
1100    tcg_temp_free_i64(c);
1101}
1102
1103static void gen_xxspltw(DisasContext *ctx)
1104{
1105    TCGv_i64 b, b2;
1106    TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
1107                   cpu_vsrl(xB(ctx->opcode)) :
1108                   cpu_vsrh(xB(ctx->opcode));
1109
1110    if (unlikely(!ctx->vsx_enabled)) {
1111        gen_exception(ctx, POWERPC_EXCP_VSXU);
1112        return;
1113    }
1114
1115    b = tcg_temp_new_i64();
1116    b2 = tcg_temp_new_i64();
1117
1118    if (UIM(ctx->opcode) & 1) {
1119        tcg_gen_ext32u_i64(b, vsr);
1120    } else {
1121        tcg_gen_shri_i64(b, vsr, 32);
1122    }
1123
1124    tcg_gen_shli_i64(b2, b, 32);
1125    tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
1126    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
1127
1128    tcg_temp_free_i64(b);
1129    tcg_temp_free_i64(b2);
1130}
1131
1132#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1133
1134static void gen_xxspltib(DisasContext *ctx)
1135{
1136    unsigned char uim8 = IMM8(ctx->opcode);
1137    if (xS(ctx->opcode) < 32) {
1138        if (unlikely(!ctx->altivec_enabled)) {
1139            gen_exception(ctx, POWERPC_EXCP_VPU);
1140            return;
1141        }
1142    } else {
1143        if (unlikely(!ctx->vsx_enabled)) {
1144            gen_exception(ctx, POWERPC_EXCP_VSXU);
1145            return;
1146        }
1147    }
1148    tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), pattern(uim8));
1149    tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), pattern(uim8));
1150}
1151
1152static void gen_xxsldwi(DisasContext *ctx)
1153{
1154    TCGv_i64 xth, xtl;
1155    if (unlikely(!ctx->vsx_enabled)) {
1156        gen_exception(ctx, POWERPC_EXCP_VSXU);
1157        return;
1158    }
1159    xth = tcg_temp_new_i64();
1160    xtl = tcg_temp_new_i64();
1161
1162    switch (SHW(ctx->opcode)) {
1163        case 0: {
1164            tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1165            tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1166            break;
1167        }
1168        case 1: {
1169            TCGv_i64 t0 = tcg_temp_new_i64();
1170            tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
1171            tcg_gen_shli_i64(xth, xth, 32);
1172            tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
1173            tcg_gen_shri_i64(t0, t0, 32);
1174            tcg_gen_or_i64(xth, xth, t0);
1175            tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
1176            tcg_gen_shli_i64(xtl, xtl, 32);
1177            tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1178            tcg_gen_shri_i64(t0, t0, 32);
1179            tcg_gen_or_i64(xtl, xtl, t0);
1180            tcg_temp_free_i64(t0);
1181            break;
1182        }
1183        case 2: {
1184            tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1185            tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1186            break;
1187        }
1188        case 3: {
1189            TCGv_i64 t0 = tcg_temp_new_i64();
1190            tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
1191            tcg_gen_shli_i64(xth, xth, 32);
1192            tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
1193            tcg_gen_shri_i64(t0, t0, 32);
1194            tcg_gen_or_i64(xth, xth, t0);
1195            tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
1196            tcg_gen_shli_i64(xtl, xtl, 32);
1197            tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
1198            tcg_gen_shri_i64(t0, t0, 32);
1199            tcg_gen_or_i64(xtl, xtl, t0);
1200            tcg_temp_free_i64(t0);
1201            break;
1202        }
1203    }
1204
1205    tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
1206    tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
1207
1208    tcg_temp_free_i64(xth);
1209    tcg_temp_free_i64(xtl);
1210}
1211
1212#define VSX_EXTRACT_INSERT(name)                                \
1213static void gen_##name(DisasContext *ctx)                       \
1214{                                                               \
1215    TCGv xt, xb;                                                \
1216    TCGv_i32 t0 = tcg_temp_new_i32();                           \
1217    uint8_t uimm = UIMM4(ctx->opcode);                          \
1218                                                                \
1219    if (unlikely(!ctx->vsx_enabled)) {                          \
1220        gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1221        return;                                                 \
1222    }                                                           \
1223    xt = tcg_const_tl(xT(ctx->opcode));                         \
1224    xb = tcg_const_tl(xB(ctx->opcode));                         \
1225    /* uimm > 15 out of bound and for                           \
1226     * uimm > 12 handle as per hardware in helper               \
1227     */                                                         \
1228    if (uimm > 15) {                                            \
1229        tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0);         \
1230        tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), 0);         \
1231        return;                                                 \
1232    }                                                           \
1233    tcg_gen_movi_i32(t0, uimm);                                 \
1234    gen_helper_##name(cpu_env, xt, xb, t0);                     \
1235    tcg_temp_free(xb);                                          \
1236    tcg_temp_free(xt);                                          \
1237    tcg_temp_free_i32(t0);                                      \
1238}
1239
1240VSX_EXTRACT_INSERT(xxextractuw)
1241VSX_EXTRACT_INSERT(xxinsertw)
1242
1243#ifdef TARGET_PPC64
1244static void gen_xsxexpdp(DisasContext *ctx)
1245{
1246    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1247    if (unlikely(!ctx->vsx_enabled)) {
1248        gen_exception(ctx, POWERPC_EXCP_VSXU);
1249        return;
1250    }
1251    tcg_gen_extract_i64(rt, cpu_vsrh(xB(ctx->opcode)), 52, 11);
1252}
1253
1254static void gen_xsxexpqp(DisasContext *ctx)
1255{
1256    TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);
1257    TCGv_i64 xtl = cpu_vsrl(rD(ctx->opcode) + 32);
1258    TCGv_i64 xbh = cpu_vsrh(rB(ctx->opcode) + 32);
1259
1260    if (unlikely(!ctx->vsx_enabled)) {
1261        gen_exception(ctx, POWERPC_EXCP_VSXU);
1262        return;
1263    }
1264    tcg_gen_extract_i64(xth, xbh, 48, 15);
1265    tcg_gen_movi_i64(xtl, 0);
1266}
1267
1268static void gen_xsiexpdp(DisasContext *ctx)
1269{
1270    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1271    TCGv ra = cpu_gpr[rA(ctx->opcode)];
1272    TCGv rb = cpu_gpr[rB(ctx->opcode)];
1273    TCGv_i64 t0;
1274
1275    if (unlikely(!ctx->vsx_enabled)) {
1276        gen_exception(ctx, POWERPC_EXCP_VSXU);
1277        return;
1278    }
1279    t0 = tcg_temp_new_i64();
1280    tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1281    tcg_gen_andi_i64(t0, rb, 0x7FF);
1282    tcg_gen_shli_i64(t0, t0, 52);
1283    tcg_gen_or_i64(xth, xth, t0);
1284    /* dword[1] is undefined */
1285    tcg_temp_free_i64(t0);
1286}
1287
1288static void gen_xsiexpqp(DisasContext *ctx)
1289{
1290    TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);
1291    TCGv_i64 xtl = cpu_vsrl(rD(ctx->opcode) + 32);
1292    TCGv_i64 xah = cpu_vsrh(rA(ctx->opcode) + 32);
1293    TCGv_i64 xal = cpu_vsrl(rA(ctx->opcode) + 32);
1294    TCGv_i64 xbh = cpu_vsrh(rB(ctx->opcode) + 32);
1295    TCGv_i64 t0;
1296
1297    if (unlikely(!ctx->vsx_enabled)) {
1298        gen_exception(ctx, POWERPC_EXCP_VSXU);
1299        return;
1300    }
1301    t0 = tcg_temp_new_i64();
1302    tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1303    tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1304    tcg_gen_shli_i64(t0, t0, 48);
1305    tcg_gen_or_i64(xth, xth, t0);
1306    tcg_gen_mov_i64(xtl, xal);
1307    tcg_temp_free_i64(t0);
1308}
1309
1310static void gen_xsxsigdp(DisasContext *ctx)
1311{
1312    TCGv rt = cpu_gpr[rD(ctx->opcode)];
1313    TCGv_i64 t0, zr, nan, exp;
1314
1315    if (unlikely(!ctx->vsx_enabled)) {
1316        gen_exception(ctx, POWERPC_EXCP_VSXU);
1317        return;
1318    }
1319    exp = tcg_temp_new_i64();
1320    t0 = tcg_temp_new_i64();
1321    zr = tcg_const_i64(0);
1322    nan = tcg_const_i64(2047);
1323
1324    tcg_gen_extract_i64(exp, cpu_vsrh(xB(ctx->opcode)), 52, 11);
1325    tcg_gen_movi_i64(t0, 0x0010000000000000);
1326    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1327    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1328    tcg_gen_andi_i64(rt, cpu_vsrh(xB(ctx->opcode)), 0x000FFFFFFFFFFFFF);
1329    tcg_gen_or_i64(rt, rt, t0);
1330
1331    tcg_temp_free_i64(t0);
1332    tcg_temp_free_i64(exp);
1333    tcg_temp_free_i64(zr);
1334    tcg_temp_free_i64(nan);
1335}
1336
1337static void gen_xsxsigqp(DisasContext *ctx)
1338{
1339    TCGv_i64 t0, zr, nan, exp;
1340    TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32);
1341    TCGv_i64 xtl = cpu_vsrl(rD(ctx->opcode) + 32);
1342
1343    if (unlikely(!ctx->vsx_enabled)) {
1344        gen_exception(ctx, POWERPC_EXCP_VSXU);
1345        return;
1346    }
1347    exp = tcg_temp_new_i64();
1348    t0 = tcg_temp_new_i64();
1349    zr = tcg_const_i64(0);
1350    nan = tcg_const_i64(32767);
1351
1352    tcg_gen_extract_i64(exp, cpu_vsrh(rB(ctx->opcode) + 32), 48, 15);
1353    tcg_gen_movi_i64(t0, 0x0001000000000000);
1354    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1355    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1356    tcg_gen_andi_i64(xth, cpu_vsrh(rB(ctx->opcode) + 32), 0x0000FFFFFFFFFFFF);
1357    tcg_gen_or_i64(xth, xth, t0);
1358    tcg_gen_mov_i64(xtl, cpu_vsrl(rB(ctx->opcode) + 32));
1359
1360    tcg_temp_free_i64(t0);
1361    tcg_temp_free_i64(exp);
1362    tcg_temp_free_i64(zr);
1363    tcg_temp_free_i64(nan);
1364}
1365#endif
1366
1367static void gen_xviexpsp(DisasContext *ctx)
1368{
1369    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1370    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1371    TCGv_i64 xah = cpu_vsrh(xA(ctx->opcode));
1372    TCGv_i64 xal = cpu_vsrl(xA(ctx->opcode));
1373    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1374    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1375    TCGv_i64 t0;
1376
1377    if (unlikely(!ctx->vsx_enabled)) {
1378        gen_exception(ctx, POWERPC_EXCP_VSXU);
1379        return;
1380    }
1381    t0 = tcg_temp_new_i64();
1382    tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1383    tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1384    tcg_gen_shli_i64(t0, t0, 23);
1385    tcg_gen_or_i64(xth, xth, t0);
1386    tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1387    tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1388    tcg_gen_shli_i64(t0, t0, 23);
1389    tcg_gen_or_i64(xtl, xtl, t0);
1390    tcg_temp_free_i64(t0);
1391}
1392
1393static void gen_xviexpdp(DisasContext *ctx)
1394{
1395    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1396    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1397    TCGv_i64 xah = cpu_vsrh(xA(ctx->opcode));
1398    TCGv_i64 xal = cpu_vsrl(xA(ctx->opcode));
1399    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1400    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1401    TCGv_i64 t0;
1402
1403    if (unlikely(!ctx->vsx_enabled)) {
1404        gen_exception(ctx, POWERPC_EXCP_VSXU);
1405        return;
1406    }
1407    t0 = tcg_temp_new_i64();
1408    tcg_gen_andi_i64(xth, xah, 0x800FFFFFFFFFFFFF);
1409    tcg_gen_andi_i64(t0, xbh, 0x7FF);
1410    tcg_gen_shli_i64(t0, t0, 52);
1411    tcg_gen_or_i64(xth, xth, t0);
1412    tcg_gen_andi_i64(xtl, xal, 0x800FFFFFFFFFFFFF);
1413    tcg_gen_andi_i64(t0, xbl, 0x7FF);
1414    tcg_gen_shli_i64(t0, t0, 52);
1415    tcg_gen_or_i64(xtl, xtl, t0);
1416    tcg_temp_free_i64(t0);
1417}
1418
1419static void gen_xvxexpsp(DisasContext *ctx)
1420{
1421    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1422    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1423    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1424    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1425
1426    if (unlikely(!ctx->vsx_enabled)) {
1427        gen_exception(ctx, POWERPC_EXCP_VSXU);
1428        return;
1429    }
1430    tcg_gen_shri_i64(xth, xbh, 23);
1431    tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1432    tcg_gen_shri_i64(xtl, xbl, 23);
1433    tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1434}
1435
1436static void gen_xvxexpdp(DisasContext *ctx)
1437{
1438    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1439    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1440    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1441    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1442
1443    if (unlikely(!ctx->vsx_enabled)) {
1444        gen_exception(ctx, POWERPC_EXCP_VSXU);
1445        return;
1446    }
1447    tcg_gen_extract_i64(xth, xbh, 52, 11);
1448    tcg_gen_extract_i64(xtl, xbl, 52, 11);
1449}
1450
1451GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1452
1453static void gen_xvxsigdp(DisasContext *ctx)
1454{
1455    TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
1456    TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
1457    TCGv_i64 xbh = cpu_vsrh(xB(ctx->opcode));
1458    TCGv_i64 xbl = cpu_vsrl(xB(ctx->opcode));
1459
1460    TCGv_i64 t0, zr, nan, exp;
1461
1462    if (unlikely(!ctx->vsx_enabled)) {
1463        gen_exception(ctx, POWERPC_EXCP_VSXU);
1464        return;
1465    }
1466    exp = tcg_temp_new_i64();
1467    t0 = tcg_temp_new_i64();
1468    zr = tcg_const_i64(0);
1469    nan = tcg_const_i64(2047);
1470
1471    tcg_gen_extract_i64(exp, xbh, 52, 11);
1472    tcg_gen_movi_i64(t0, 0x0010000000000000);
1473    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1474    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1475    tcg_gen_andi_i64(xth, xbh, 0x000FFFFFFFFFFFFF);
1476    tcg_gen_or_i64(xth, xth, t0);
1477
1478    tcg_gen_extract_i64(exp, xbl, 52, 11);
1479    tcg_gen_movi_i64(t0, 0x0010000000000000);
1480    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1481    tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1482    tcg_gen_andi_i64(xtl, xbl, 0x000FFFFFFFFFFFFF);
1483    tcg_gen_or_i64(xtl, xtl, t0);
1484
1485    tcg_temp_free_i64(t0);
1486    tcg_temp_free_i64(exp);
1487    tcg_temp_free_i64(zr);
1488    tcg_temp_free_i64(nan);
1489}
1490
1491#undef GEN_XX2FORM
1492#undef GEN_XX3FORM
1493#undef GEN_XX2IFORM
1494#undef GEN_XX3_RC_FORM
1495#undef GEN_XX3FORM_DM
1496#undef VSX_LOGICAL
1497