qemu/target/sparc/translate.c
<<
>>
Prefs
   1/*
   2   SPARC translation
   3
   4   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
   5   Copyright (C) 2003-2005 Fabrice Bellard
   6
   7   This library is free software; you can redistribute it and/or
   8   modify it under the terms of the GNU Lesser General Public
   9   License as published by the Free Software Foundation; either
  10   version 2 of the License, or (at your option) any later version.
  11
  12   This library is distributed in the hope that it will be useful,
  13   but WITHOUT ANY WARRANTY; without even the implied warranty of
  14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15   Lesser General Public License for more details.
  16
  17   You should have received a copy of the GNU Lesser General Public
  18   License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19 */
  20
  21#include "qemu/osdep.h"
  22
  23#include "cpu.h"
  24#include "disas/disas.h"
  25#include "exec/helper-proto.h"
  26#include "exec/exec-all.h"
  27#include "tcg-op.h"
  28#include "exec/cpu_ldst.h"
  29
  30#include "exec/helper-gen.h"
  31
  32#include "trace-tcg.h"
  33#include "exec/log.h"
  34#include "asi.h"
  35
  36
  37#define DEBUG_DISAS
  38
  39#define DYNAMIC_PC  1 /* dynamic pc value */
  40#define JUMP_PC     2 /* dynamic pc value which takes only two values
  41                         according to jump_pc[T2] */
  42
  43/* global register indexes */
  44static TCGv_ptr cpu_regwptr;
  45static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
  46static TCGv_i32 cpu_cc_op;
  47static TCGv_i32 cpu_psr;
  48static TCGv cpu_fsr, cpu_pc, cpu_npc;
  49static TCGv cpu_regs[32];
  50static TCGv cpu_y;
  51#ifndef CONFIG_USER_ONLY
  52static TCGv cpu_tbr;
  53#endif
  54static TCGv cpu_cond;
  55#ifdef TARGET_SPARC64
  56static TCGv_i32 cpu_xcc, cpu_fprs;
  57static TCGv cpu_gsr;
  58static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
  59static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
  60#else
  61static TCGv cpu_wim;
  62#endif
  63/* Floating point registers */
  64static TCGv_i64 cpu_fpr[TARGET_DPREGS];
  65
  66#include "exec/gen-icount.h"
  67
  68typedef struct DisasContext {
  69    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
  70    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
  71    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
  72    int is_br;
  73    int mem_idx;
  74    bool fpu_enabled;
  75    bool address_mask_32bit;
  76    bool singlestep;
  77#ifndef CONFIG_USER_ONLY
  78    bool supervisor;
  79#ifdef TARGET_SPARC64
  80    bool hypervisor;
  81#endif
  82#endif
  83
  84    uint32_t cc_op;  /* current CC operation */
  85    struct TranslationBlock *tb;
  86    sparc_def_t *def;
  87    TCGv_i32 t32[3];
  88    TCGv ttl[5];
  89    int n_t32;
  90    int n_ttl;
  91#ifdef TARGET_SPARC64
  92    int fprs_dirty;
  93    int asi;
  94#endif
  95} DisasContext;
  96
  97typedef struct {
  98    TCGCond cond;
  99    bool is_bool;
 100    bool g1, g2;
 101    TCGv c1, c2;
 102} DisasCompare;
 103
 104// This function uses non-native bit order
 105#define GET_FIELD(X, FROM, TO)                                  \
 106    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
 107
 108// This function uses the order in the manuals, i.e. bit 0 is 2^0
 109#define GET_FIELD_SP(X, FROM, TO)               \
 110    GET_FIELD(X, 31 - (TO), 31 - (FROM))
 111
 112#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
 113#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
 114
 115#ifdef TARGET_SPARC64
 116#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
 117#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
 118#else
 119#define DFPREG(r) (r & 0x1e)
 120#define QFPREG(r) (r & 0x1c)
 121#endif
 122
 123#define UA2005_HTRAP_MASK 0xff
 124#define V8_TRAP_MASK 0x7f
 125
 126static int sign_extend(int x, int len)
 127{
 128    len = 32 - len;
 129    return (x << len) >> len;
 130}
 131
 132#define IS_IMM (insn & (1<<13))
 133
 134static inline TCGv_i32 get_temp_i32(DisasContext *dc)
 135{
 136    TCGv_i32 t;
 137    assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
 138    dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
 139    return t;
 140}
 141
 142static inline TCGv get_temp_tl(DisasContext *dc)
 143{
 144    TCGv t;
 145    assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
 146    dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
 147    return t;
 148}
 149
 150static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
 151{
 152#if defined(TARGET_SPARC64)
 153    int bit = (rd < 32) ? 1 : 2;
 154    /* If we know we've already set this bit within the TB,
 155       we can avoid setting it again.  */
 156    if (!(dc->fprs_dirty & bit)) {
 157        dc->fprs_dirty |= bit;
 158        tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
 159    }
 160#endif
 161}
 162
 163/* floating point registers moves */
 164static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
 165{
 166#if TCG_TARGET_REG_BITS == 32
 167    if (src & 1) {
 168        return TCGV_LOW(cpu_fpr[src / 2]);
 169    } else {
 170        return TCGV_HIGH(cpu_fpr[src / 2]);
 171    }
 172#else
 173    TCGv_i32 ret = get_temp_i32(dc);
 174    if (src & 1) {
 175        tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
 176    } else {
 177        tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
 178    }
 179    return ret;
 180#endif
 181}
 182
 183static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
 184{
 185#if TCG_TARGET_REG_BITS == 32
 186    if (dst & 1) {
 187        tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
 188    } else {
 189        tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
 190    }
 191#else
 192    TCGv_i64 t = (TCGv_i64)v;
 193    tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
 194                        (dst & 1 ? 0 : 32), 32);
 195#endif
 196    gen_update_fprs_dirty(dc, dst);
 197}
 198
 199static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
 200{
 201    return get_temp_i32(dc);
 202}
 203
 204static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
 205{
 206    src = DFPREG(src);
 207    return cpu_fpr[src / 2];
 208}
 209
 210static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
 211{
 212    dst = DFPREG(dst);
 213    tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
 214    gen_update_fprs_dirty(dc, dst);
 215}
 216
 217static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
 218{
 219    return cpu_fpr[DFPREG(dst) / 2];
 220}
 221
 222static void gen_op_load_fpr_QT0(unsigned int src)
 223{
 224    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 225                   offsetof(CPU_QuadU, ll.upper));
 226    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 227                   offsetof(CPU_QuadU, ll.lower));
 228}
 229
 230static void gen_op_load_fpr_QT1(unsigned int src)
 231{
 232    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
 233                   offsetof(CPU_QuadU, ll.upper));
 234    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
 235                   offsetof(CPU_QuadU, ll.lower));
 236}
 237
 238static void gen_op_store_QT0_fpr(unsigned int dst)
 239{
 240    tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 241                   offsetof(CPU_QuadU, ll.upper));
 242    tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 243                   offsetof(CPU_QuadU, ll.lower));
 244}
 245
 246static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
 247                            TCGv_i64 v1, TCGv_i64 v2)
 248{
 249    dst = QFPREG(dst);
 250
 251    tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
 252    tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
 253    gen_update_fprs_dirty(dc, dst);
 254}
 255
 256#ifdef TARGET_SPARC64
 257static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
 258{
 259    src = QFPREG(src);
 260    return cpu_fpr[src / 2];
 261}
 262
 263static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
 264{
 265    src = QFPREG(src);
 266    return cpu_fpr[src / 2 + 1];
 267}
 268
 269static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
 270{
 271    rd = QFPREG(rd);
 272    rs = QFPREG(rs);
 273
 274    tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
 275    tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
 276    gen_update_fprs_dirty(dc, rd);
 277}
 278#endif
 279
 280/* moves */
 281#ifdef CONFIG_USER_ONLY
 282#define supervisor(dc) 0
 283#ifdef TARGET_SPARC64
 284#define hypervisor(dc) 0
 285#endif
 286#else
 287#ifdef TARGET_SPARC64
 288#define hypervisor(dc) (dc->hypervisor)
 289#define supervisor(dc) (dc->supervisor | dc->hypervisor)
 290#else
 291#define supervisor(dc) (dc->supervisor)
 292#endif
 293#endif
 294
 295#ifdef TARGET_SPARC64
 296#ifndef TARGET_ABI32
 297#define AM_CHECK(dc) ((dc)->address_mask_32bit)
 298#else
 299#define AM_CHECK(dc) (1)
 300#endif
 301#endif
 302
 303static inline void gen_address_mask(DisasContext *dc, TCGv addr)
 304{
 305#ifdef TARGET_SPARC64
 306    if (AM_CHECK(dc))
 307        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
 308#endif
 309}
 310
 311static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
 312{
 313    if (reg > 0) {
 314        assert(reg < 32);
 315        return cpu_regs[reg];
 316    } else {
 317        TCGv t = get_temp_tl(dc);
 318        tcg_gen_movi_tl(t, 0);
 319        return t;
 320    }
 321}
 322
 323static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
 324{
 325    if (reg > 0) {
 326        assert(reg < 32);
 327        tcg_gen_mov_tl(cpu_regs[reg], v);
 328    }
 329}
 330
 331static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
 332{
 333    if (reg > 0) {
 334        assert(reg < 32);
 335        return cpu_regs[reg];
 336    } else {
 337        return get_temp_tl(dc);
 338    }
 339}
 340
 341static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
 342                               target_ulong npc)
 343{
 344    if (unlikely(s->singlestep)) {
 345        return false;
 346    }
 347
 348#ifndef CONFIG_USER_ONLY
 349    return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
 350           (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
 351#else
 352    return true;
 353#endif
 354}
 355
 356static inline void gen_goto_tb(DisasContext *s, int tb_num,
 357                               target_ulong pc, target_ulong npc)
 358{
 359    if (use_goto_tb(s, pc, npc))  {
 360        /* jump to same page: we can use a direct jump */
 361        tcg_gen_goto_tb(tb_num);
 362        tcg_gen_movi_tl(cpu_pc, pc);
 363        tcg_gen_movi_tl(cpu_npc, npc);
 364        tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
 365    } else {
 366        /* jump to another page: currently not optimized */
 367        tcg_gen_movi_tl(cpu_pc, pc);
 368        tcg_gen_movi_tl(cpu_npc, npc);
 369        tcg_gen_exit_tb(0);
 370    }
 371}
 372
 373// XXX suboptimal
 374static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
 375{
 376    tcg_gen_extu_i32_tl(reg, src);
 377    tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
 378}
 379
 380static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
 381{
 382    tcg_gen_extu_i32_tl(reg, src);
 383    tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
 384}
 385
 386static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
 387{
 388    tcg_gen_extu_i32_tl(reg, src);
 389    tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
 390}
 391
 392static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
 393{
 394    tcg_gen_extu_i32_tl(reg, src);
 395    tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
 396}
 397
 398static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
 399{
 400    tcg_gen_mov_tl(cpu_cc_src, src1);
 401    tcg_gen_mov_tl(cpu_cc_src2, src2);
 402    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 403    tcg_gen_mov_tl(dst, cpu_cc_dst);
 404}
 405
 406static TCGv_i32 gen_add32_carry32(void)
 407{
 408    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 409
 410    /* Carry is computed from a previous add: (dst < src)  */
 411#if TARGET_LONG_BITS == 64
 412    cc_src1_32 = tcg_temp_new_i32();
 413    cc_src2_32 = tcg_temp_new_i32();
 414    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
 415    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
 416#else
 417    cc_src1_32 = cpu_cc_dst;
 418    cc_src2_32 = cpu_cc_src;
 419#endif
 420
 421    carry_32 = tcg_temp_new_i32();
 422    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 423
 424#if TARGET_LONG_BITS == 64
 425    tcg_temp_free_i32(cc_src1_32);
 426    tcg_temp_free_i32(cc_src2_32);
 427#endif
 428
 429    return carry_32;
 430}
 431
 432static TCGv_i32 gen_sub32_carry32(void)
 433{
 434    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 435
 436    /* Carry is computed from a previous borrow: (src1 < src2)  */
 437#if TARGET_LONG_BITS == 64
 438    cc_src1_32 = tcg_temp_new_i32();
 439    cc_src2_32 = tcg_temp_new_i32();
 440    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
 441    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
 442#else
 443    cc_src1_32 = cpu_cc_src;
 444    cc_src2_32 = cpu_cc_src2;
 445#endif
 446
 447    carry_32 = tcg_temp_new_i32();
 448    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 449
 450#if TARGET_LONG_BITS == 64
 451    tcg_temp_free_i32(cc_src1_32);
 452    tcg_temp_free_i32(cc_src2_32);
 453#endif
 454
 455    return carry_32;
 456}
 457
 458static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
 459                            TCGv src2, int update_cc)
 460{
 461    TCGv_i32 carry_32;
 462    TCGv carry;
 463
 464    switch (dc->cc_op) {
 465    case CC_OP_DIV:
 466    case CC_OP_LOGIC:
 467        /* Carry is known to be zero.  Fall back to plain ADD.  */
 468        if (update_cc) {
 469            gen_op_add_cc(dst, src1, src2);
 470        } else {
 471            tcg_gen_add_tl(dst, src1, src2);
 472        }
 473        return;
 474
 475    case CC_OP_ADD:
 476    case CC_OP_TADD:
 477    case CC_OP_TADDTV:
 478        if (TARGET_LONG_BITS == 32) {
 479            /* We can re-use the host's hardware carry generation by using
 480               an ADD2 opcode.  We discard the low part of the output.
 481               Ideally we'd combine this operation with the add that
 482               generated the carry in the first place.  */
 483            carry = tcg_temp_new();
 484            tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 485            tcg_temp_free(carry);
 486            goto add_done;
 487        }
 488        carry_32 = gen_add32_carry32();
 489        break;
 490
 491    case CC_OP_SUB:
 492    case CC_OP_TSUB:
 493    case CC_OP_TSUBTV:
 494        carry_32 = gen_sub32_carry32();
 495        break;
 496
 497    default:
 498        /* We need external help to produce the carry.  */
 499        carry_32 = tcg_temp_new_i32();
 500        gen_helper_compute_C_icc(carry_32, cpu_env);
 501        break;
 502    }
 503
 504#if TARGET_LONG_BITS == 64
 505    carry = tcg_temp_new();
 506    tcg_gen_extu_i32_i64(carry, carry_32);
 507#else
 508    carry = carry_32;
 509#endif
 510
 511    tcg_gen_add_tl(dst, src1, src2);
 512    tcg_gen_add_tl(dst, dst, carry);
 513
 514    tcg_temp_free_i32(carry_32);
 515#if TARGET_LONG_BITS == 64
 516    tcg_temp_free(carry);
 517#endif
 518
 519 add_done:
 520    if (update_cc) {
 521        tcg_gen_mov_tl(cpu_cc_src, src1);
 522        tcg_gen_mov_tl(cpu_cc_src2, src2);
 523        tcg_gen_mov_tl(cpu_cc_dst, dst);
 524        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
 525        dc->cc_op = CC_OP_ADDX;
 526    }
 527}
 528
 529static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
 530{
 531    tcg_gen_mov_tl(cpu_cc_src, src1);
 532    tcg_gen_mov_tl(cpu_cc_src2, src2);
 533    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 534    tcg_gen_mov_tl(dst, cpu_cc_dst);
 535}
 536
 537static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
 538                            TCGv src2, int update_cc)
 539{
 540    TCGv_i32 carry_32;
 541    TCGv carry;
 542
 543    switch (dc->cc_op) {
 544    case CC_OP_DIV:
 545    case CC_OP_LOGIC:
 546        /* Carry is known to be zero.  Fall back to plain SUB.  */
 547        if (update_cc) {
 548            gen_op_sub_cc(dst, src1, src2);
 549        } else {
 550            tcg_gen_sub_tl(dst, src1, src2);
 551        }
 552        return;
 553
 554    case CC_OP_ADD:
 555    case CC_OP_TADD:
 556    case CC_OP_TADDTV:
 557        carry_32 = gen_add32_carry32();
 558        break;
 559
 560    case CC_OP_SUB:
 561    case CC_OP_TSUB:
 562    case CC_OP_TSUBTV:
 563        if (TARGET_LONG_BITS == 32) {
 564            /* We can re-use the host's hardware carry generation by using
 565               a SUB2 opcode.  We discard the low part of the output.
 566               Ideally we'd combine this operation with the add that
 567               generated the carry in the first place.  */
 568            carry = tcg_temp_new();
 569            tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 570            tcg_temp_free(carry);
 571            goto sub_done;
 572        }
 573        carry_32 = gen_sub32_carry32();
 574        break;
 575
 576    default:
 577        /* We need external help to produce the carry.  */
 578        carry_32 = tcg_temp_new_i32();
 579        gen_helper_compute_C_icc(carry_32, cpu_env);
 580        break;
 581    }
 582
 583#if TARGET_LONG_BITS == 64
 584    carry = tcg_temp_new();
 585    tcg_gen_extu_i32_i64(carry, carry_32);
 586#else
 587    carry = carry_32;
 588#endif
 589
 590    tcg_gen_sub_tl(dst, src1, src2);
 591    tcg_gen_sub_tl(dst, dst, carry);
 592
 593    tcg_temp_free_i32(carry_32);
 594#if TARGET_LONG_BITS == 64
 595    tcg_temp_free(carry);
 596#endif
 597
 598 sub_done:
 599    if (update_cc) {
 600        tcg_gen_mov_tl(cpu_cc_src, src1);
 601        tcg_gen_mov_tl(cpu_cc_src2, src2);
 602        tcg_gen_mov_tl(cpu_cc_dst, dst);
 603        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
 604        dc->cc_op = CC_OP_SUBX;
 605    }
 606}
 607
 608static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
 609{
 610    TCGv r_temp, zero, t0;
 611
 612    r_temp = tcg_temp_new();
 613    t0 = tcg_temp_new();
 614
 615    /* old op:
 616    if (!(env->y & 1))
 617        T1 = 0;
 618    */
 619    zero = tcg_const_tl(0);
 620    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
 621    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
 622    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
 623    tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
 624                       zero, cpu_cc_src2);
 625    tcg_temp_free(zero);
 626
 627    // b2 = T0 & 1;
 628    // env->y = (b2 << 31) | (env->y >> 1);
 629    tcg_gen_extract_tl(t0, cpu_y, 1, 31);
 630    tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
 631
 632    // b1 = N ^ V;
 633    gen_mov_reg_N(t0, cpu_psr);
 634    gen_mov_reg_V(r_temp, cpu_psr);
 635    tcg_gen_xor_tl(t0, t0, r_temp);
 636    tcg_temp_free(r_temp);
 637
 638    // T0 = (b1 << 31) | (T0 >> 1);
 639    // src1 = T0;
 640    tcg_gen_shli_tl(t0, t0, 31);
 641    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
 642    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
 643    tcg_temp_free(t0);
 644
 645    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 646
 647    tcg_gen_mov_tl(dst, cpu_cc_dst);
 648}
 649
 650static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
 651{
 652#if TARGET_LONG_BITS == 32
 653    if (sign_ext) {
 654        tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
 655    } else {
 656        tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
 657    }
 658#else
 659    TCGv t0 = tcg_temp_new_i64();
 660    TCGv t1 = tcg_temp_new_i64();
 661
 662    if (sign_ext) {
 663        tcg_gen_ext32s_i64(t0, src1);
 664        tcg_gen_ext32s_i64(t1, src2);
 665    } else {
 666        tcg_gen_ext32u_i64(t0, src1);
 667        tcg_gen_ext32u_i64(t1, src2);
 668    }
 669
 670    tcg_gen_mul_i64(dst, t0, t1);
 671    tcg_temp_free(t0);
 672    tcg_temp_free(t1);
 673
 674    tcg_gen_shri_i64(cpu_y, dst, 32);
 675#endif
 676}
 677
 678static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
 679{
 680    /* zero-extend truncated operands before multiplication */
 681    gen_op_multiply(dst, src1, src2, 0);
 682}
 683
 684static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
 685{
 686    /* sign-extend truncated operands before multiplication */
 687    gen_op_multiply(dst, src1, src2, 1);
 688}
 689
 690// 1
 691static inline void gen_op_eval_ba(TCGv dst)
 692{
 693    tcg_gen_movi_tl(dst, 1);
 694}
 695
 696// Z
 697static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
 698{
 699    gen_mov_reg_Z(dst, src);
 700}
 701
 702// Z | (N ^ V)
 703static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
 704{
 705    TCGv t0 = tcg_temp_new();
 706    gen_mov_reg_N(t0, src);
 707    gen_mov_reg_V(dst, src);
 708    tcg_gen_xor_tl(dst, dst, t0);
 709    gen_mov_reg_Z(t0, src);
 710    tcg_gen_or_tl(dst, dst, t0);
 711    tcg_temp_free(t0);
 712}
 713
 714// N ^ V
 715static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
 716{
 717    TCGv t0 = tcg_temp_new();
 718    gen_mov_reg_V(t0, src);
 719    gen_mov_reg_N(dst, src);
 720    tcg_gen_xor_tl(dst, dst, t0);
 721    tcg_temp_free(t0);
 722}
 723
 724// C | Z
 725static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
 726{
 727    TCGv t0 = tcg_temp_new();
 728    gen_mov_reg_Z(t0, src);
 729    gen_mov_reg_C(dst, src);
 730    tcg_gen_or_tl(dst, dst, t0);
 731    tcg_temp_free(t0);
 732}
 733
 734// C
 735static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
 736{
 737    gen_mov_reg_C(dst, src);
 738}
 739
 740// V
 741static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
 742{
 743    gen_mov_reg_V(dst, src);
 744}
 745
 746// 0
 747static inline void gen_op_eval_bn(TCGv dst)
 748{
 749    tcg_gen_movi_tl(dst, 0);
 750}
 751
 752// N
 753static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
 754{
 755    gen_mov_reg_N(dst, src);
 756}
 757
 758// !Z
 759static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
 760{
 761    gen_mov_reg_Z(dst, src);
 762    tcg_gen_xori_tl(dst, dst, 0x1);
 763}
 764
 765// !(Z | (N ^ V))
 766static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
 767{
 768    gen_op_eval_ble(dst, src);
 769    tcg_gen_xori_tl(dst, dst, 0x1);
 770}
 771
 772// !(N ^ V)
 773static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
 774{
 775    gen_op_eval_bl(dst, src);
 776    tcg_gen_xori_tl(dst, dst, 0x1);
 777}
 778
 779// !(C | Z)
 780static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
 781{
 782    gen_op_eval_bleu(dst, src);
 783    tcg_gen_xori_tl(dst, dst, 0x1);
 784}
 785
 786// !C
 787static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
 788{
 789    gen_mov_reg_C(dst, src);
 790    tcg_gen_xori_tl(dst, dst, 0x1);
 791}
 792
 793// !N
 794static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
 795{
 796    gen_mov_reg_N(dst, src);
 797    tcg_gen_xori_tl(dst, dst, 0x1);
 798}
 799
 800// !V
 801static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
 802{
 803    gen_mov_reg_V(dst, src);
 804    tcg_gen_xori_tl(dst, dst, 0x1);
 805}
 806
 807/*
 808  FPSR bit field FCC1 | FCC0:
 809   0 =
 810   1 <
 811   2 >
 812   3 unordered
 813*/
 814static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
 815                                    unsigned int fcc_offset)
 816{
 817    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
 818    tcg_gen_andi_tl(reg, reg, 0x1);
 819}
 820
 821static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
 822                                    unsigned int fcc_offset)
 823{
 824    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
 825    tcg_gen_andi_tl(reg, reg, 0x1);
 826}
 827
 828// !0: FCC0 | FCC1
 829static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
 830                                    unsigned int fcc_offset)
 831{
 832    TCGv t0 = tcg_temp_new();
 833    gen_mov_reg_FCC0(dst, src, fcc_offset);
 834    gen_mov_reg_FCC1(t0, src, fcc_offset);
 835    tcg_gen_or_tl(dst, dst, t0);
 836    tcg_temp_free(t0);
 837}
 838
 839// 1 or 2: FCC0 ^ FCC1
 840static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
 841                                    unsigned int fcc_offset)
 842{
 843    TCGv t0 = tcg_temp_new();
 844    gen_mov_reg_FCC0(dst, src, fcc_offset);
 845    gen_mov_reg_FCC1(t0, src, fcc_offset);
 846    tcg_gen_xor_tl(dst, dst, t0);
 847    tcg_temp_free(t0);
 848}
 849
 850// 1 or 3: FCC0
 851static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
 852                                    unsigned int fcc_offset)
 853{
 854    gen_mov_reg_FCC0(dst, src, fcc_offset);
 855}
 856
 857// 1: FCC0 & !FCC1
 858static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
 859                                    unsigned int fcc_offset)
 860{
 861    TCGv t0 = tcg_temp_new();
 862    gen_mov_reg_FCC0(dst, src, fcc_offset);
 863    gen_mov_reg_FCC1(t0, src, fcc_offset);
 864    tcg_gen_andc_tl(dst, dst, t0);
 865    tcg_temp_free(t0);
 866}
 867
 868// 2 or 3: FCC1
 869static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
 870                                    unsigned int fcc_offset)
 871{
 872    gen_mov_reg_FCC1(dst, src, fcc_offset);
 873}
 874
 875// 2: !FCC0 & FCC1
 876static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
 877                                    unsigned int fcc_offset)
 878{
 879    TCGv t0 = tcg_temp_new();
 880    gen_mov_reg_FCC0(dst, src, fcc_offset);
 881    gen_mov_reg_FCC1(t0, src, fcc_offset);
 882    tcg_gen_andc_tl(dst, t0, dst);
 883    tcg_temp_free(t0);
 884}
 885
 886// 3: FCC0 & FCC1
 887static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
 888                                    unsigned int fcc_offset)
 889{
 890    TCGv t0 = tcg_temp_new();
 891    gen_mov_reg_FCC0(dst, src, fcc_offset);
 892    gen_mov_reg_FCC1(t0, src, fcc_offset);
 893    tcg_gen_and_tl(dst, dst, t0);
 894    tcg_temp_free(t0);
 895}
 896
 897// 0: !(FCC0 | FCC1)
 898static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
 899                                    unsigned int fcc_offset)
 900{
 901    TCGv t0 = tcg_temp_new();
 902    gen_mov_reg_FCC0(dst, src, fcc_offset);
 903    gen_mov_reg_FCC1(t0, src, fcc_offset);
 904    tcg_gen_or_tl(dst, dst, t0);
 905    tcg_gen_xori_tl(dst, dst, 0x1);
 906    tcg_temp_free(t0);
 907}
 908
 909// 0 or 3: !(FCC0 ^ FCC1)
 910static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
 911                                    unsigned int fcc_offset)
 912{
 913    TCGv t0 = tcg_temp_new();
 914    gen_mov_reg_FCC0(dst, src, fcc_offset);
 915    gen_mov_reg_FCC1(t0, src, fcc_offset);
 916    tcg_gen_xor_tl(dst, dst, t0);
 917    tcg_gen_xori_tl(dst, dst, 0x1);
 918    tcg_temp_free(t0);
 919}
 920
 921// 0 or 2: !FCC0
 922static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
 923                                    unsigned int fcc_offset)
 924{
 925    gen_mov_reg_FCC0(dst, src, fcc_offset);
 926    tcg_gen_xori_tl(dst, dst, 0x1);
 927}
 928
 929// !1: !(FCC0 & !FCC1)
 930static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
 931                                    unsigned int fcc_offset)
 932{
 933    TCGv t0 = tcg_temp_new();
 934    gen_mov_reg_FCC0(dst, src, fcc_offset);
 935    gen_mov_reg_FCC1(t0, src, fcc_offset);
 936    tcg_gen_andc_tl(dst, dst, t0);
 937    tcg_gen_xori_tl(dst, dst, 0x1);
 938    tcg_temp_free(t0);
 939}
 940
 941// 0 or 1: !FCC1
 942static inline void gen_op_eval_fble(TCGv dst, TCGv src,
 943                                    unsigned int fcc_offset)
 944{
 945    gen_mov_reg_FCC1(dst, src, fcc_offset);
 946    tcg_gen_xori_tl(dst, dst, 0x1);
 947}
 948
 949// !2: !(!FCC0 & FCC1)
 950static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
 951                                    unsigned int fcc_offset)
 952{
 953    TCGv t0 = tcg_temp_new();
 954    gen_mov_reg_FCC0(dst, src, fcc_offset);
 955    gen_mov_reg_FCC1(t0, src, fcc_offset);
 956    tcg_gen_andc_tl(dst, t0, dst);
 957    tcg_gen_xori_tl(dst, dst, 0x1);
 958    tcg_temp_free(t0);
 959}
 960
 961// !3: !(FCC0 & FCC1)
 962static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
 963                                    unsigned int fcc_offset)
 964{
 965    TCGv t0 = tcg_temp_new();
 966    gen_mov_reg_FCC0(dst, src, fcc_offset);
 967    gen_mov_reg_FCC1(t0, src, fcc_offset);
 968    tcg_gen_and_tl(dst, dst, t0);
 969    tcg_gen_xori_tl(dst, dst, 0x1);
 970    tcg_temp_free(t0);
 971}
 972
 973static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
 974                               target_ulong pc2, TCGv r_cond)
 975{
 976    TCGLabel *l1 = gen_new_label();
 977
 978    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
 979
 980    gen_goto_tb(dc, 0, pc1, pc1 + 4);
 981
 982    gen_set_label(l1);
 983    gen_goto_tb(dc, 1, pc2, pc2 + 4);
 984}
 985
 986static void gen_branch_a(DisasContext *dc, target_ulong pc1)
 987{
 988    TCGLabel *l1 = gen_new_label();
 989    target_ulong npc = dc->npc;
 990
 991    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
 992
 993    gen_goto_tb(dc, 0, npc, pc1);
 994
 995    gen_set_label(l1);
 996    gen_goto_tb(dc, 1, npc + 4, npc + 8);
 997
 998    dc->is_br = 1;
 999}
1000
1001static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1002{
1003    target_ulong npc = dc->npc;
1004
1005    if (likely(npc != DYNAMIC_PC)) {
1006        dc->pc = npc;
1007        dc->jump_pc[0] = pc1;
1008        dc->jump_pc[1] = npc + 4;
1009        dc->npc = JUMP_PC;
1010    } else {
1011        TCGv t, z;
1012
1013        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1014
1015        tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1016        t = tcg_const_tl(pc1);
1017        z = tcg_const_tl(0);
1018        tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1019        tcg_temp_free(t);
1020        tcg_temp_free(z);
1021
1022        dc->pc = DYNAMIC_PC;
1023    }
1024}
1025
1026static inline void gen_generic_branch(DisasContext *dc)
1027{
1028    TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1029    TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1030    TCGv zero = tcg_const_tl(0);
1031
1032    tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1033
1034    tcg_temp_free(npc0);
1035    tcg_temp_free(npc1);
1036    tcg_temp_free(zero);
1037}
1038
1039/* call this function before using the condition register as it may
1040   have been set for a jump */
1041static inline void flush_cond(DisasContext *dc)
1042{
1043    if (dc->npc == JUMP_PC) {
1044        gen_generic_branch(dc);
1045        dc->npc = DYNAMIC_PC;
1046    }
1047}
1048
1049static inline void save_npc(DisasContext *dc)
1050{
1051    if (dc->npc == JUMP_PC) {
1052        gen_generic_branch(dc);
1053        dc->npc = DYNAMIC_PC;
1054    } else if (dc->npc != DYNAMIC_PC) {
1055        tcg_gen_movi_tl(cpu_npc, dc->npc);
1056    }
1057}
1058
1059static inline void update_psr(DisasContext *dc)
1060{
1061    if (dc->cc_op != CC_OP_FLAGS) {
1062        dc->cc_op = CC_OP_FLAGS;
1063        gen_helper_compute_psr(cpu_env);
1064    }
1065}
1066
1067static inline void save_state(DisasContext *dc)
1068{
1069    tcg_gen_movi_tl(cpu_pc, dc->pc);
1070    save_npc(dc);
1071}
1072
1073static void gen_exception(DisasContext *dc, int which)
1074{
1075    TCGv_i32 t;
1076
1077    save_state(dc);
1078    t = tcg_const_i32(which);
1079    gen_helper_raise_exception(cpu_env, t);
1080    tcg_temp_free_i32(t);
1081    dc->is_br = 1;
1082}
1083
1084static void gen_check_align(TCGv addr, int mask)
1085{
1086    TCGv_i32 r_mask = tcg_const_i32(mask);
1087    gen_helper_check_align(cpu_env, addr, r_mask);
1088    tcg_temp_free_i32(r_mask);
1089}
1090
1091static inline void gen_mov_pc_npc(DisasContext *dc)
1092{
1093    if (dc->npc == JUMP_PC) {
1094        gen_generic_branch(dc);
1095        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1096        dc->pc = DYNAMIC_PC;
1097    } else if (dc->npc == DYNAMIC_PC) {
1098        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1099        dc->pc = DYNAMIC_PC;
1100    } else {
1101        dc->pc = dc->npc;
1102    }
1103}
1104
1105static inline void gen_op_next_insn(void)
1106{
1107    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1108    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1109}
1110
1111static void free_compare(DisasCompare *cmp)
1112{
1113    if (!cmp->g1) {
1114        tcg_temp_free(cmp->c1);
1115    }
1116    if (!cmp->g2) {
1117        tcg_temp_free(cmp->c2);
1118    }
1119}
1120
1121static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1122                        DisasContext *dc)
1123{
1124    static int subcc_cond[16] = {
1125        TCG_COND_NEVER,
1126        TCG_COND_EQ,
1127        TCG_COND_LE,
1128        TCG_COND_LT,
1129        TCG_COND_LEU,
1130        TCG_COND_LTU,
1131        -1, /* neg */
1132        -1, /* overflow */
1133        TCG_COND_ALWAYS,
1134        TCG_COND_NE,
1135        TCG_COND_GT,
1136        TCG_COND_GE,
1137        TCG_COND_GTU,
1138        TCG_COND_GEU,
1139        -1, /* pos */
1140        -1, /* no overflow */
1141    };
1142
1143    static int logic_cond[16] = {
1144        TCG_COND_NEVER,
1145        TCG_COND_EQ,     /* eq:  Z */
1146        TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1147        TCG_COND_LT,     /* lt:  N ^ V -> N */
1148        TCG_COND_EQ,     /* leu: C | Z -> Z */
1149        TCG_COND_NEVER,  /* ltu: C -> 0 */
1150        TCG_COND_LT,     /* neg: N */
1151        TCG_COND_NEVER,  /* vs:  V -> 0 */
1152        TCG_COND_ALWAYS,
1153        TCG_COND_NE,     /* ne:  !Z */
1154        TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1155        TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1156        TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1157        TCG_COND_ALWAYS, /* geu: !C -> 1 */
1158        TCG_COND_GE,     /* pos: !N */
1159        TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1160    };
1161
1162    TCGv_i32 r_src;
1163    TCGv r_dst;
1164
1165#ifdef TARGET_SPARC64
1166    if (xcc) {
1167        r_src = cpu_xcc;
1168    } else {
1169        r_src = cpu_psr;
1170    }
1171#else
1172    r_src = cpu_psr;
1173#endif
1174
1175    switch (dc->cc_op) {
1176    case CC_OP_LOGIC:
1177        cmp->cond = logic_cond[cond];
1178    do_compare_dst_0:
1179        cmp->is_bool = false;
1180        cmp->g2 = false;
1181        cmp->c2 = tcg_const_tl(0);
1182#ifdef TARGET_SPARC64
1183        if (!xcc) {
1184            cmp->g1 = false;
1185            cmp->c1 = tcg_temp_new();
1186            tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1187            break;
1188        }
1189#endif
1190        cmp->g1 = true;
1191        cmp->c1 = cpu_cc_dst;
1192        break;
1193
1194    case CC_OP_SUB:
1195        switch (cond) {
1196        case 6:  /* neg */
1197        case 14: /* pos */
1198            cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1199            goto do_compare_dst_0;
1200
1201        case 7: /* overflow */
1202        case 15: /* !overflow */
1203            goto do_dynamic;
1204
1205        default:
1206            cmp->cond = subcc_cond[cond];
1207            cmp->is_bool = false;
1208#ifdef TARGET_SPARC64
1209            if (!xcc) {
1210                /* Note that sign-extension works for unsigned compares as
1211                   long as both operands are sign-extended.  */
1212                cmp->g1 = cmp->g2 = false;
1213                cmp->c1 = tcg_temp_new();
1214                cmp->c2 = tcg_temp_new();
1215                tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1216                tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1217                break;
1218            }
1219#endif
1220            cmp->g1 = cmp->g2 = true;
1221            cmp->c1 = cpu_cc_src;
1222            cmp->c2 = cpu_cc_src2;
1223            break;
1224        }
1225        break;
1226
1227    default:
1228    do_dynamic:
1229        gen_helper_compute_psr(cpu_env);
1230        dc->cc_op = CC_OP_FLAGS;
1231        /* FALLTHRU */
1232
1233    case CC_OP_FLAGS:
1234        /* We're going to generate a boolean result.  */
1235        cmp->cond = TCG_COND_NE;
1236        cmp->is_bool = true;
1237        cmp->g1 = cmp->g2 = false;
1238        cmp->c1 = r_dst = tcg_temp_new();
1239        cmp->c2 = tcg_const_tl(0);
1240
1241        switch (cond) {
1242        case 0x0:
1243            gen_op_eval_bn(r_dst);
1244            break;
1245        case 0x1:
1246            gen_op_eval_be(r_dst, r_src);
1247            break;
1248        case 0x2:
1249            gen_op_eval_ble(r_dst, r_src);
1250            break;
1251        case 0x3:
1252            gen_op_eval_bl(r_dst, r_src);
1253            break;
1254        case 0x4:
1255            gen_op_eval_bleu(r_dst, r_src);
1256            break;
1257        case 0x5:
1258            gen_op_eval_bcs(r_dst, r_src);
1259            break;
1260        case 0x6:
1261            gen_op_eval_bneg(r_dst, r_src);
1262            break;
1263        case 0x7:
1264            gen_op_eval_bvs(r_dst, r_src);
1265            break;
1266        case 0x8:
1267            gen_op_eval_ba(r_dst);
1268            break;
1269        case 0x9:
1270            gen_op_eval_bne(r_dst, r_src);
1271            break;
1272        case 0xa:
1273            gen_op_eval_bg(r_dst, r_src);
1274            break;
1275        case 0xb:
1276            gen_op_eval_bge(r_dst, r_src);
1277            break;
1278        case 0xc:
1279            gen_op_eval_bgu(r_dst, r_src);
1280            break;
1281        case 0xd:
1282            gen_op_eval_bcc(r_dst, r_src);
1283            break;
1284        case 0xe:
1285            gen_op_eval_bpos(r_dst, r_src);
1286            break;
1287        case 0xf:
1288            gen_op_eval_bvc(r_dst, r_src);
1289            break;
1290        }
1291        break;
1292    }
1293}
1294
1295static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1296{
1297    unsigned int offset;
1298    TCGv r_dst;
1299
1300    /* For now we still generate a straight boolean result.  */
1301    cmp->cond = TCG_COND_NE;
1302    cmp->is_bool = true;
1303    cmp->g1 = cmp->g2 = false;
1304    cmp->c1 = r_dst = tcg_temp_new();
1305    cmp->c2 = tcg_const_tl(0);
1306
1307    switch (cc) {
1308    default:
1309    case 0x0:
1310        offset = 0;
1311        break;
1312    case 0x1:
1313        offset = 32 - 10;
1314        break;
1315    case 0x2:
1316        offset = 34 - 10;
1317        break;
1318    case 0x3:
1319        offset = 36 - 10;
1320        break;
1321    }
1322
1323    switch (cond) {
1324    case 0x0:
1325        gen_op_eval_bn(r_dst);
1326        break;
1327    case 0x1:
1328        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1329        break;
1330    case 0x2:
1331        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1332        break;
1333    case 0x3:
1334        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1335        break;
1336    case 0x4:
1337        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1338        break;
1339    case 0x5:
1340        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1341        break;
1342    case 0x6:
1343        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1344        break;
1345    case 0x7:
1346        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1347        break;
1348    case 0x8:
1349        gen_op_eval_ba(r_dst);
1350        break;
1351    case 0x9:
1352        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1353        break;
1354    case 0xa:
1355        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1356        break;
1357    case 0xb:
1358        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1359        break;
1360    case 0xc:
1361        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1362        break;
1363    case 0xd:
1364        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1365        break;
1366    case 0xe:
1367        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1368        break;
1369    case 0xf:
1370        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1371        break;
1372    }
1373}
1374
1375static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1376                     DisasContext *dc)
1377{
1378    DisasCompare cmp;
1379    gen_compare(&cmp, cc, cond, dc);
1380
1381    /* The interface is to return a boolean in r_dst.  */
1382    if (cmp.is_bool) {
1383        tcg_gen_mov_tl(r_dst, cmp.c1);
1384    } else {
1385        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1386    }
1387
1388    free_compare(&cmp);
1389}
1390
1391static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1392{
1393    DisasCompare cmp;
1394    gen_fcompare(&cmp, cc, cond);
1395
1396    /* The interface is to return a boolean in r_dst.  */
1397    if (cmp.is_bool) {
1398        tcg_gen_mov_tl(r_dst, cmp.c1);
1399    } else {
1400        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1401    }
1402
1403    free_compare(&cmp);
1404}
1405
1406#ifdef TARGET_SPARC64
1407// Inverted logic
1408static const int gen_tcg_cond_reg[8] = {
1409    -1,
1410    TCG_COND_NE,
1411    TCG_COND_GT,
1412    TCG_COND_GE,
1413    -1,
1414    TCG_COND_EQ,
1415    TCG_COND_LE,
1416    TCG_COND_LT,
1417};
1418
1419static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1420{
1421    cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1422    cmp->is_bool = false;
1423    cmp->g1 = true;
1424    cmp->g2 = false;
1425    cmp->c1 = r_src;
1426    cmp->c2 = tcg_const_tl(0);
1427}
1428
1429static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1430{
1431    DisasCompare cmp;
1432    gen_compare_reg(&cmp, cond, r_src);
1433
1434    /* The interface is to return a boolean in r_dst.  */
1435    tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1436
1437    free_compare(&cmp);
1438}
1439#endif
1440
1441static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1442{
1443    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1444    target_ulong target = dc->pc + offset;
1445
1446#ifdef TARGET_SPARC64
1447    if (unlikely(AM_CHECK(dc))) {
1448        target &= 0xffffffffULL;
1449    }
1450#endif
1451    if (cond == 0x0) {
1452        /* unconditional not taken */
1453        if (a) {
1454            dc->pc = dc->npc + 4;
1455            dc->npc = dc->pc + 4;
1456        } else {
1457            dc->pc = dc->npc;
1458            dc->npc = dc->pc + 4;
1459        }
1460    } else if (cond == 0x8) {
1461        /* unconditional taken */
1462        if (a) {
1463            dc->pc = target;
1464            dc->npc = dc->pc + 4;
1465        } else {
1466            dc->pc = dc->npc;
1467            dc->npc = target;
1468            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1469        }
1470    } else {
1471        flush_cond(dc);
1472        gen_cond(cpu_cond, cc, cond, dc);
1473        if (a) {
1474            gen_branch_a(dc, target);
1475        } else {
1476            gen_branch_n(dc, target);
1477        }
1478    }
1479}
1480
1481static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1482{
1483    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1484    target_ulong target = dc->pc + offset;
1485
1486#ifdef TARGET_SPARC64
1487    if (unlikely(AM_CHECK(dc))) {
1488        target &= 0xffffffffULL;
1489    }
1490#endif
1491    if (cond == 0x0) {
1492        /* unconditional not taken */
1493        if (a) {
1494            dc->pc = dc->npc + 4;
1495            dc->npc = dc->pc + 4;
1496        } else {
1497            dc->pc = dc->npc;
1498            dc->npc = dc->pc + 4;
1499        }
1500    } else if (cond == 0x8) {
1501        /* unconditional taken */
1502        if (a) {
1503            dc->pc = target;
1504            dc->npc = dc->pc + 4;
1505        } else {
1506            dc->pc = dc->npc;
1507            dc->npc = target;
1508            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1509        }
1510    } else {
1511        flush_cond(dc);
1512        gen_fcond(cpu_cond, cc, cond);
1513        if (a) {
1514            gen_branch_a(dc, target);
1515        } else {
1516            gen_branch_n(dc, target);
1517        }
1518    }
1519}
1520
1521#ifdef TARGET_SPARC64
1522static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1523                          TCGv r_reg)
1524{
1525    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1526    target_ulong target = dc->pc + offset;
1527
1528    if (unlikely(AM_CHECK(dc))) {
1529        target &= 0xffffffffULL;
1530    }
1531    flush_cond(dc);
1532    gen_cond_reg(cpu_cond, cond, r_reg);
1533    if (a) {
1534        gen_branch_a(dc, target);
1535    } else {
1536        gen_branch_n(dc, target);
1537    }
1538}
1539
1540static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1541{
1542    switch (fccno) {
1543    case 0:
1544        gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1545        break;
1546    case 1:
1547        gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1548        break;
1549    case 2:
1550        gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1551        break;
1552    case 3:
1553        gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1554        break;
1555    }
1556}
1557
1558static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1559{
1560    switch (fccno) {
1561    case 0:
1562        gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1563        break;
1564    case 1:
1565        gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1566        break;
1567    case 2:
1568        gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1569        break;
1570    case 3:
1571        gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1572        break;
1573    }
1574}
1575
1576static inline void gen_op_fcmpq(int fccno)
1577{
1578    switch (fccno) {
1579    case 0:
1580        gen_helper_fcmpq(cpu_fsr, cpu_env);
1581        break;
1582    case 1:
1583        gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1584        break;
1585    case 2:
1586        gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1587        break;
1588    case 3:
1589        gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1590        break;
1591    }
1592}
1593
1594static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1595{
1596    switch (fccno) {
1597    case 0:
1598        gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1599        break;
1600    case 1:
1601        gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1602        break;
1603    case 2:
1604        gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1605        break;
1606    case 3:
1607        gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1608        break;
1609    }
1610}
1611
1612static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1613{
1614    switch (fccno) {
1615    case 0:
1616        gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1617        break;
1618    case 1:
1619        gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1620        break;
1621    case 2:
1622        gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1623        break;
1624    case 3:
1625        gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1626        break;
1627    }
1628}
1629
1630static inline void gen_op_fcmpeq(int fccno)
1631{
1632    switch (fccno) {
1633    case 0:
1634        gen_helper_fcmpeq(cpu_fsr, cpu_env);
1635        break;
1636    case 1:
1637        gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1638        break;
1639    case 2:
1640        gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1641        break;
1642    case 3:
1643        gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1644        break;
1645    }
1646}
1647
1648#else
1649
1650static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1651{
1652    gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1653}
1654
1655static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1656{
1657    gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1658}
1659
1660static inline void gen_op_fcmpq(int fccno)
1661{
1662    gen_helper_fcmpq(cpu_fsr, cpu_env);
1663}
1664
1665static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1666{
1667    gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1668}
1669
1670static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1671{
1672    gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1673}
1674
1675static inline void gen_op_fcmpeq(int fccno)
1676{
1677    gen_helper_fcmpeq(cpu_fsr, cpu_env);
1678}
1679#endif
1680
1681static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1682{
1683    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1684    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1685    gen_exception(dc, TT_FP_EXCP);
1686}
1687
1688static int gen_trap_ifnofpu(DisasContext *dc)
1689{
1690#if !defined(CONFIG_USER_ONLY)
1691    if (!dc->fpu_enabled) {
1692        gen_exception(dc, TT_NFPU_INSN);
1693        return 1;
1694    }
1695#endif
1696    return 0;
1697}
1698
1699static inline void gen_op_clear_ieee_excp_and_FTT(void)
1700{
1701    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1702}
1703
1704static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1705                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1706{
1707    TCGv_i32 dst, src;
1708
1709    src = gen_load_fpr_F(dc, rs);
1710    dst = gen_dest_fpr_F(dc);
1711
1712    gen(dst, cpu_env, src);
1713    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1714
1715    gen_store_fpr_F(dc, rd, dst);
1716}
1717
1718static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1719                                 void (*gen)(TCGv_i32, TCGv_i32))
1720{
1721    TCGv_i32 dst, src;
1722
1723    src = gen_load_fpr_F(dc, rs);
1724    dst = gen_dest_fpr_F(dc);
1725
1726    gen(dst, src);
1727
1728    gen_store_fpr_F(dc, rd, dst);
1729}
1730
1731static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1732                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1733{
1734    TCGv_i32 dst, src1, src2;
1735
1736    src1 = gen_load_fpr_F(dc, rs1);
1737    src2 = gen_load_fpr_F(dc, rs2);
1738    dst = gen_dest_fpr_F(dc);
1739
1740    gen(dst, cpu_env, src1, src2);
1741    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1742
1743    gen_store_fpr_F(dc, rd, dst);
1744}
1745
1746#ifdef TARGET_SPARC64
1747static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1748                                  void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1749{
1750    TCGv_i32 dst, src1, src2;
1751
1752    src1 = gen_load_fpr_F(dc, rs1);
1753    src2 = gen_load_fpr_F(dc, rs2);
1754    dst = gen_dest_fpr_F(dc);
1755
1756    gen(dst, src1, src2);
1757
1758    gen_store_fpr_F(dc, rd, dst);
1759}
1760#endif
1761
1762static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1763                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1764{
1765    TCGv_i64 dst, src;
1766
1767    src = gen_load_fpr_D(dc, rs);
1768    dst = gen_dest_fpr_D(dc, rd);
1769
1770    gen(dst, cpu_env, src);
1771    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1772
1773    gen_store_fpr_D(dc, rd, dst);
1774}
1775
1776#ifdef TARGET_SPARC64
1777static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1778                                 void (*gen)(TCGv_i64, TCGv_i64))
1779{
1780    TCGv_i64 dst, src;
1781
1782    src = gen_load_fpr_D(dc, rs);
1783    dst = gen_dest_fpr_D(dc, rd);
1784
1785    gen(dst, src);
1786
1787    gen_store_fpr_D(dc, rd, dst);
1788}
1789#endif
1790
1791static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1792                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1793{
1794    TCGv_i64 dst, src1, src2;
1795
1796    src1 = gen_load_fpr_D(dc, rs1);
1797    src2 = gen_load_fpr_D(dc, rs2);
1798    dst = gen_dest_fpr_D(dc, rd);
1799
1800    gen(dst, cpu_env, src1, src2);
1801    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1802
1803    gen_store_fpr_D(dc, rd, dst);
1804}
1805
1806#ifdef TARGET_SPARC64
1807static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1808                                  void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1809{
1810    TCGv_i64 dst, src1, src2;
1811
1812    src1 = gen_load_fpr_D(dc, rs1);
1813    src2 = gen_load_fpr_D(dc, rs2);
1814    dst = gen_dest_fpr_D(dc, rd);
1815
1816    gen(dst, src1, src2);
1817
1818    gen_store_fpr_D(dc, rd, dst);
1819}
1820
1821static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1822                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1823{
1824    TCGv_i64 dst, src1, src2;
1825
1826    src1 = gen_load_fpr_D(dc, rs1);
1827    src2 = gen_load_fpr_D(dc, rs2);
1828    dst = gen_dest_fpr_D(dc, rd);
1829
1830    gen(dst, cpu_gsr, src1, src2);
1831
1832    gen_store_fpr_D(dc, rd, dst);
1833}
1834
1835static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1836                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1837{
1838    TCGv_i64 dst, src0, src1, src2;
1839
1840    src1 = gen_load_fpr_D(dc, rs1);
1841    src2 = gen_load_fpr_D(dc, rs2);
1842    src0 = gen_load_fpr_D(dc, rd);
1843    dst = gen_dest_fpr_D(dc, rd);
1844
1845    gen(dst, src0, src1, src2);
1846
1847    gen_store_fpr_D(dc, rd, dst);
1848}
1849#endif
1850
1851static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1852                              void (*gen)(TCGv_ptr))
1853{
1854    gen_op_load_fpr_QT1(QFPREG(rs));
1855
1856    gen(cpu_env);
1857    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1858
1859    gen_op_store_QT0_fpr(QFPREG(rd));
1860    gen_update_fprs_dirty(dc, QFPREG(rd));
1861}
1862
1863#ifdef TARGET_SPARC64
1864static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1865                                 void (*gen)(TCGv_ptr))
1866{
1867    gen_op_load_fpr_QT1(QFPREG(rs));
1868
1869    gen(cpu_env);
1870
1871    gen_op_store_QT0_fpr(QFPREG(rd));
1872    gen_update_fprs_dirty(dc, QFPREG(rd));
1873}
1874#endif
1875
1876static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1877                               void (*gen)(TCGv_ptr))
1878{
1879    gen_op_load_fpr_QT0(QFPREG(rs1));
1880    gen_op_load_fpr_QT1(QFPREG(rs2));
1881
1882    gen(cpu_env);
1883    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1884
1885    gen_op_store_QT0_fpr(QFPREG(rd));
1886    gen_update_fprs_dirty(dc, QFPREG(rd));
1887}
1888
1889static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1890                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1891{
1892    TCGv_i64 dst;
1893    TCGv_i32 src1, src2;
1894
1895    src1 = gen_load_fpr_F(dc, rs1);
1896    src2 = gen_load_fpr_F(dc, rs2);
1897    dst = gen_dest_fpr_D(dc, rd);
1898
1899    gen(dst, cpu_env, src1, src2);
1900    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1901
1902    gen_store_fpr_D(dc, rd, dst);
1903}
1904
1905static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1906                               void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1907{
1908    TCGv_i64 src1, src2;
1909
1910    src1 = gen_load_fpr_D(dc, rs1);
1911    src2 = gen_load_fpr_D(dc, rs2);
1912
1913    gen(cpu_env, src1, src2);
1914    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1915
1916    gen_op_store_QT0_fpr(QFPREG(rd));
1917    gen_update_fprs_dirty(dc, QFPREG(rd));
1918}
1919
1920#ifdef TARGET_SPARC64
1921static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1922                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1923{
1924    TCGv_i64 dst;
1925    TCGv_i32 src;
1926
1927    src = gen_load_fpr_F(dc, rs);
1928    dst = gen_dest_fpr_D(dc, rd);
1929
1930    gen(dst, cpu_env, src);
1931    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1932
1933    gen_store_fpr_D(dc, rd, dst);
1934}
1935#endif
1936
1937static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1938                                 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1939{
1940    TCGv_i64 dst;
1941    TCGv_i32 src;
1942
1943    src = gen_load_fpr_F(dc, rs);
1944    dst = gen_dest_fpr_D(dc, rd);
1945
1946    gen(dst, cpu_env, src);
1947
1948    gen_store_fpr_D(dc, rd, dst);
1949}
1950
1951static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1952                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1953{
1954    TCGv_i32 dst;
1955    TCGv_i64 src;
1956
1957    src = gen_load_fpr_D(dc, rs);
1958    dst = gen_dest_fpr_F(dc);
1959
1960    gen(dst, cpu_env, src);
1961    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1962
1963    gen_store_fpr_F(dc, rd, dst);
1964}
1965
1966static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1967                              void (*gen)(TCGv_i32, TCGv_ptr))
1968{
1969    TCGv_i32 dst;
1970
1971    gen_op_load_fpr_QT1(QFPREG(rs));
1972    dst = gen_dest_fpr_F(dc);
1973
1974    gen(dst, cpu_env);
1975    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1976
1977    gen_store_fpr_F(dc, rd, dst);
1978}
1979
1980static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1981                              void (*gen)(TCGv_i64, TCGv_ptr))
1982{
1983    TCGv_i64 dst;
1984
1985    gen_op_load_fpr_QT1(QFPREG(rs));
1986    dst = gen_dest_fpr_D(dc, rd);
1987
1988    gen(dst, cpu_env);
1989    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1990
1991    gen_store_fpr_D(dc, rd, dst);
1992}
1993
1994static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1995                                 void (*gen)(TCGv_ptr, TCGv_i32))
1996{
1997    TCGv_i32 src;
1998
1999    src = gen_load_fpr_F(dc, rs);
2000
2001    gen(cpu_env, src);
2002
2003    gen_op_store_QT0_fpr(QFPREG(rd));
2004    gen_update_fprs_dirty(dc, QFPREG(rd));
2005}
2006
2007static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2008                                 void (*gen)(TCGv_ptr, TCGv_i64))
2009{
2010    TCGv_i64 src;
2011
2012    src = gen_load_fpr_D(dc, rs);
2013
2014    gen(cpu_env, src);
2015
2016    gen_op_store_QT0_fpr(QFPREG(rd));
2017    gen_update_fprs_dirty(dc, QFPREG(rd));
2018}
2019
2020static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2021                     TCGv addr, int mmu_idx, TCGMemOp memop)
2022{
2023    gen_address_mask(dc, addr);
2024    tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2025}
2026
2027static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2028{
2029    TCGv m1 = tcg_const_tl(0xff);
2030    gen_address_mask(dc, addr);
2031    tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2032    tcg_temp_free(m1);
2033}
2034
2035/* asi moves */
2036#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2037typedef enum {
2038    GET_ASI_HELPER,
2039    GET_ASI_EXCP,
2040    GET_ASI_DIRECT,
2041    GET_ASI_DTWINX,
2042    GET_ASI_BLOCK,
2043    GET_ASI_SHORT,
2044    GET_ASI_BCOPY,
2045    GET_ASI_BFILL,
2046} ASIType;
2047
2048typedef struct {
2049    ASIType type;
2050    int asi;
2051    int mem_idx;
2052    TCGMemOp memop;
2053} DisasASI;
2054
2055static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2056{
2057    int asi = GET_FIELD(insn, 19, 26);
2058    ASIType type = GET_ASI_HELPER;
2059    int mem_idx = dc->mem_idx;
2060
2061#ifndef TARGET_SPARC64
2062    /* Before v9, all asis are immediate and privileged.  */
2063    if (IS_IMM) {
2064        gen_exception(dc, TT_ILL_INSN);
2065        type = GET_ASI_EXCP;
2066    } else if (supervisor(dc)
2067               /* Note that LEON accepts ASI_USERDATA in user mode, for
2068                  use with CASA.  Also note that previous versions of
2069                  QEMU allowed (and old versions of gcc emitted) ASI_P
2070                  for LEON, which is incorrect.  */
2071               || (asi == ASI_USERDATA
2072                   && (dc->def->features & CPU_FEATURE_CASA))) {
2073        switch (asi) {
2074        case ASI_USERDATA:   /* User data access */
2075            mem_idx = MMU_USER_IDX;
2076            type = GET_ASI_DIRECT;
2077            break;
2078        case ASI_KERNELDATA: /* Supervisor data access */
2079            mem_idx = MMU_KERNEL_IDX;
2080            type = GET_ASI_DIRECT;
2081            break;
2082        case ASI_M_BYPASS:    /* MMU passthrough */
2083        case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2084            mem_idx = MMU_PHYS_IDX;
2085            type = GET_ASI_DIRECT;
2086            break;
2087        case ASI_M_BCOPY: /* Block copy, sta access */
2088            mem_idx = MMU_KERNEL_IDX;
2089            type = GET_ASI_BCOPY;
2090            break;
2091        case ASI_M_BFILL: /* Block fill, stda access */
2092            mem_idx = MMU_KERNEL_IDX;
2093            type = GET_ASI_BFILL;
2094            break;
2095        }
2096    } else {
2097        gen_exception(dc, TT_PRIV_INSN);
2098        type = GET_ASI_EXCP;
2099    }
2100#else
2101    if (IS_IMM) {
2102        asi = dc->asi;
2103    }
2104    /* With v9, all asis below 0x80 are privileged.  */
2105    /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2106       down that bit into DisasContext.  For the moment that's ok,
2107       since the direct implementations below doesn't have any ASIs
2108       in the restricted [0x30, 0x7f] range, and the check will be
2109       done properly in the helper.  */
2110    if (!supervisor(dc) && asi < 0x80) {
2111        gen_exception(dc, TT_PRIV_ACT);
2112        type = GET_ASI_EXCP;
2113    } else {
2114        switch (asi) {
2115        case ASI_REAL:      /* Bypass */
2116        case ASI_REAL_IO:   /* Bypass, non-cacheable */
2117        case ASI_REAL_L:    /* Bypass LE */
2118        case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2119        case ASI_TWINX_REAL:   /* Real address, twinx */
2120        case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2121        case ASI_QUAD_LDD_PHYS:
2122        case ASI_QUAD_LDD_PHYS_L:
2123            mem_idx = MMU_PHYS_IDX;
2124            break;
2125        case ASI_N:  /* Nucleus */
2126        case ASI_NL: /* Nucleus LE */
2127        case ASI_TWINX_N:
2128        case ASI_TWINX_NL:
2129        case ASI_NUCLEUS_QUAD_LDD:
2130        case ASI_NUCLEUS_QUAD_LDD_L:
2131            if (hypervisor(dc)) {
2132                mem_idx = MMU_PHYS_IDX;
2133            } else {
2134                mem_idx = MMU_NUCLEUS_IDX;
2135            }
2136            break;
2137        case ASI_AIUP:  /* As if user primary */
2138        case ASI_AIUPL: /* As if user primary LE */
2139        case ASI_TWINX_AIUP:
2140        case ASI_TWINX_AIUP_L:
2141        case ASI_BLK_AIUP_4V:
2142        case ASI_BLK_AIUP_L_4V:
2143        case ASI_BLK_AIUP:
2144        case ASI_BLK_AIUPL:
2145            mem_idx = MMU_USER_IDX;
2146            break;
2147        case ASI_AIUS:  /* As if user secondary */
2148        case ASI_AIUSL: /* As if user secondary LE */
2149        case ASI_TWINX_AIUS:
2150        case ASI_TWINX_AIUS_L:
2151        case ASI_BLK_AIUS_4V:
2152        case ASI_BLK_AIUS_L_4V:
2153        case ASI_BLK_AIUS:
2154        case ASI_BLK_AIUSL:
2155            mem_idx = MMU_USER_SECONDARY_IDX;
2156            break;
2157        case ASI_S:  /* Secondary */
2158        case ASI_SL: /* Secondary LE */
2159        case ASI_TWINX_S:
2160        case ASI_TWINX_SL:
2161        case ASI_BLK_COMMIT_S:
2162        case ASI_BLK_S:
2163        case ASI_BLK_SL:
2164        case ASI_FL8_S:
2165        case ASI_FL8_SL:
2166        case ASI_FL16_S:
2167        case ASI_FL16_SL:
2168            if (mem_idx == MMU_USER_IDX) {
2169                mem_idx = MMU_USER_SECONDARY_IDX;
2170            } else if (mem_idx == MMU_KERNEL_IDX) {
2171                mem_idx = MMU_KERNEL_SECONDARY_IDX;
2172            }
2173            break;
2174        case ASI_P:  /* Primary */
2175        case ASI_PL: /* Primary LE */
2176        case ASI_TWINX_P:
2177        case ASI_TWINX_PL:
2178        case ASI_BLK_COMMIT_P:
2179        case ASI_BLK_P:
2180        case ASI_BLK_PL:
2181        case ASI_FL8_P:
2182        case ASI_FL8_PL:
2183        case ASI_FL16_P:
2184        case ASI_FL16_PL:
2185            break;
2186        }
2187        switch (asi) {
2188        case ASI_REAL:
2189        case ASI_REAL_IO:
2190        case ASI_REAL_L:
2191        case ASI_REAL_IO_L:
2192        case ASI_N:
2193        case ASI_NL:
2194        case ASI_AIUP:
2195        case ASI_AIUPL:
2196        case ASI_AIUS:
2197        case ASI_AIUSL:
2198        case ASI_S:
2199        case ASI_SL:
2200        case ASI_P:
2201        case ASI_PL:
2202            type = GET_ASI_DIRECT;
2203            break;
2204        case ASI_TWINX_REAL:
2205        case ASI_TWINX_REAL_L:
2206        case ASI_TWINX_N:
2207        case ASI_TWINX_NL:
2208        case ASI_TWINX_AIUP:
2209        case ASI_TWINX_AIUP_L:
2210        case ASI_TWINX_AIUS:
2211        case ASI_TWINX_AIUS_L:
2212        case ASI_TWINX_P:
2213        case ASI_TWINX_PL:
2214        case ASI_TWINX_S:
2215        case ASI_TWINX_SL:
2216        case ASI_QUAD_LDD_PHYS:
2217        case ASI_QUAD_LDD_PHYS_L:
2218        case ASI_NUCLEUS_QUAD_LDD:
2219        case ASI_NUCLEUS_QUAD_LDD_L:
2220            type = GET_ASI_DTWINX;
2221            break;
2222        case ASI_BLK_COMMIT_P:
2223        case ASI_BLK_COMMIT_S:
2224        case ASI_BLK_AIUP_4V:
2225        case ASI_BLK_AIUP_L_4V:
2226        case ASI_BLK_AIUP:
2227        case ASI_BLK_AIUPL:
2228        case ASI_BLK_AIUS_4V:
2229        case ASI_BLK_AIUS_L_4V:
2230        case ASI_BLK_AIUS:
2231        case ASI_BLK_AIUSL:
2232        case ASI_BLK_S:
2233        case ASI_BLK_SL:
2234        case ASI_BLK_P:
2235        case ASI_BLK_PL:
2236            type = GET_ASI_BLOCK;
2237            break;
2238        case ASI_FL8_S:
2239        case ASI_FL8_SL:
2240        case ASI_FL8_P:
2241        case ASI_FL8_PL:
2242            memop = MO_UB;
2243            type = GET_ASI_SHORT;
2244            break;
2245        case ASI_FL16_S:
2246        case ASI_FL16_SL:
2247        case ASI_FL16_P:
2248        case ASI_FL16_PL:
2249            memop = MO_TEUW;
2250            type = GET_ASI_SHORT;
2251            break;
2252        }
2253        /* The little-endian asis all have bit 3 set.  */
2254        if (asi & 8) {
2255            memop ^= MO_BSWAP;
2256        }
2257    }
2258#endif
2259
2260    return (DisasASI){ type, asi, mem_idx, memop };
2261}
2262
2263static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2264                       int insn, TCGMemOp memop)
2265{
2266    DisasASI da = get_asi(dc, insn, memop);
2267
2268    switch (da.type) {
2269    case GET_ASI_EXCP:
2270        break;
2271    case GET_ASI_DTWINX: /* Reserved for ldda.  */
2272        gen_exception(dc, TT_ILL_INSN);
2273        break;
2274    case GET_ASI_DIRECT:
2275        gen_address_mask(dc, addr);
2276        tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2277        break;
2278    default:
2279        {
2280            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2281            TCGv_i32 r_mop = tcg_const_i32(memop);
2282
2283            save_state(dc);
2284#ifdef TARGET_SPARC64
2285            gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2286#else
2287            {
2288                TCGv_i64 t64 = tcg_temp_new_i64();
2289                gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2290                tcg_gen_trunc_i64_tl(dst, t64);
2291                tcg_temp_free_i64(t64);
2292            }
2293#endif
2294            tcg_temp_free_i32(r_mop);
2295            tcg_temp_free_i32(r_asi);
2296        }
2297        break;
2298    }
2299}
2300
2301static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2302                       int insn, TCGMemOp memop)
2303{
2304    DisasASI da = get_asi(dc, insn, memop);
2305
2306    switch (da.type) {
2307    case GET_ASI_EXCP:
2308        break;
2309    case GET_ASI_DTWINX: /* Reserved for stda.  */
2310#ifndef TARGET_SPARC64
2311        gen_exception(dc, TT_ILL_INSN);
2312        break;
2313#else
2314        if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2315            /* Pre OpenSPARC CPUs don't have these */
2316            gen_exception(dc, TT_ILL_INSN);
2317            return;
2318        }
2319        /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2320         * are ST_BLKINIT_ ASIs */
2321        /* fall through */
2322#endif
2323    case GET_ASI_DIRECT:
2324        gen_address_mask(dc, addr);
2325        tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2326        break;
2327#if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2328    case GET_ASI_BCOPY:
2329        /* Copy 32 bytes from the address in SRC to ADDR.  */
2330        /* ??? The original qemu code suggests 4-byte alignment, dropping
2331           the low bits, but the only place I can see this used is in the
2332           Linux kernel with 32 byte alignment, which would make more sense
2333           as a cacheline-style operation.  */
2334        {
2335            TCGv saddr = tcg_temp_new();
2336            TCGv daddr = tcg_temp_new();
2337            TCGv four = tcg_const_tl(4);
2338            TCGv_i32 tmp = tcg_temp_new_i32();
2339            int i;
2340
2341            tcg_gen_andi_tl(saddr, src, -4);
2342            tcg_gen_andi_tl(daddr, addr, -4);
2343            for (i = 0; i < 32; i += 4) {
2344                /* Since the loads and stores are paired, allow the
2345                   copy to happen in the host endianness.  */
2346                tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2347                tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2348                tcg_gen_add_tl(saddr, saddr, four);
2349                tcg_gen_add_tl(daddr, daddr, four);
2350            }
2351
2352            tcg_temp_free(saddr);
2353            tcg_temp_free(daddr);
2354            tcg_temp_free(four);
2355            tcg_temp_free_i32(tmp);
2356        }
2357        break;
2358#endif
2359    default:
2360        {
2361            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2362            TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2363
2364            save_state(dc);
2365#ifdef TARGET_SPARC64
2366            gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2367#else
2368            {
2369                TCGv_i64 t64 = tcg_temp_new_i64();
2370                tcg_gen_extu_tl_i64(t64, src);
2371                gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2372                tcg_temp_free_i64(t64);
2373            }
2374#endif
2375            tcg_temp_free_i32(r_mop);
2376            tcg_temp_free_i32(r_asi);
2377
2378            /* A write to a TLB register may alter page maps.  End the TB. */
2379            dc->npc = DYNAMIC_PC;
2380        }
2381        break;
2382    }
2383}
2384
2385static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2386                         TCGv addr, int insn)
2387{
2388    DisasASI da = get_asi(dc, insn, MO_TEUL);
2389
2390    switch (da.type) {
2391    case GET_ASI_EXCP:
2392        break;
2393    case GET_ASI_DIRECT:
2394        gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2395        break;
2396    default:
2397        /* ??? Should be DAE_invalid_asi.  */
2398        gen_exception(dc, TT_DATA_ACCESS);
2399        break;
2400    }
2401}
2402
2403static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2404                        int insn, int rd)
2405{
2406    DisasASI da = get_asi(dc, insn, MO_TEUL);
2407    TCGv oldv;
2408
2409    switch (da.type) {
2410    case GET_ASI_EXCP:
2411        return;
2412    case GET_ASI_DIRECT:
2413        oldv = tcg_temp_new();
2414        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2415                                  da.mem_idx, da.memop);
2416        gen_store_gpr(dc, rd, oldv);
2417        tcg_temp_free(oldv);
2418        break;
2419    default:
2420        /* ??? Should be DAE_invalid_asi.  */
2421        gen_exception(dc, TT_DATA_ACCESS);
2422        break;
2423    }
2424}
2425
2426static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2427{
2428    DisasASI da = get_asi(dc, insn, MO_UB);
2429
2430    switch (da.type) {
2431    case GET_ASI_EXCP:
2432        break;
2433    case GET_ASI_DIRECT:
2434        gen_ldstub(dc, dst, addr, da.mem_idx);
2435        break;
2436    default:
2437        /* ??? In theory, this should be raise DAE_invalid_asi.
2438           But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2439        if (tb_cflags(dc->tb) & CF_PARALLEL) {
2440            gen_helper_exit_atomic(cpu_env);
2441        } else {
2442            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2443            TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2444            TCGv_i64 s64, t64;
2445
2446            save_state(dc);
2447            t64 = tcg_temp_new_i64();
2448            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2449
2450            s64 = tcg_const_i64(0xff);
2451            gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2452            tcg_temp_free_i64(s64);
2453            tcg_temp_free_i32(r_mop);
2454            tcg_temp_free_i32(r_asi);
2455
2456            tcg_gen_trunc_i64_tl(dst, t64);
2457            tcg_temp_free_i64(t64);
2458
2459            /* End the TB.  */
2460            dc->npc = DYNAMIC_PC;
2461        }
2462        break;
2463    }
2464}
2465#endif
2466
2467#ifdef TARGET_SPARC64
2468static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2469                        int insn, int size, int rd)
2470{
2471    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2472    TCGv_i32 d32;
2473    TCGv_i64 d64;
2474
2475    switch (da.type) {
2476    case GET_ASI_EXCP:
2477        break;
2478
2479    case GET_ASI_DIRECT:
2480        gen_address_mask(dc, addr);
2481        switch (size) {
2482        case 4:
2483            d32 = gen_dest_fpr_F(dc);
2484            tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2485            gen_store_fpr_F(dc, rd, d32);
2486            break;
2487        case 8:
2488            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2489                                da.memop | MO_ALIGN_4);
2490            break;
2491        case 16:
2492            d64 = tcg_temp_new_i64();
2493            tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2494            tcg_gen_addi_tl(addr, addr, 8);
2495            tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2496                                da.memop | MO_ALIGN_4);
2497            tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2498            tcg_temp_free_i64(d64);
2499            break;
2500        default:
2501            g_assert_not_reached();
2502        }
2503        break;
2504
2505    case GET_ASI_BLOCK:
2506        /* Valid for lddfa on aligned registers only.  */
2507        if (size == 8 && (rd & 7) == 0) {
2508            TCGMemOp memop;
2509            TCGv eight;
2510            int i;
2511
2512            gen_address_mask(dc, addr);
2513
2514            /* The first operation checks required alignment.  */
2515            memop = da.memop | MO_ALIGN_64;
2516            eight = tcg_const_tl(8);
2517            for (i = 0; ; ++i) {
2518                tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2519                                    da.mem_idx, memop);
2520                if (i == 7) {
2521                    break;
2522                }
2523                tcg_gen_add_tl(addr, addr, eight);
2524                memop = da.memop;
2525            }
2526            tcg_temp_free(eight);
2527        } else {
2528            gen_exception(dc, TT_ILL_INSN);
2529        }
2530        break;
2531
2532    case GET_ASI_SHORT:
2533        /* Valid for lddfa only.  */
2534        if (size == 8) {
2535            gen_address_mask(dc, addr);
2536            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2537        } else {
2538            gen_exception(dc, TT_ILL_INSN);
2539        }
2540        break;
2541
2542    default:
2543        {
2544            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2545            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2546
2547            save_state(dc);
2548            /* According to the table in the UA2011 manual, the only
2549               other asis that are valid for ldfa/lddfa/ldqfa are
2550               the NO_FAULT asis.  We still need a helper for these,
2551               but we can just use the integer asi helper for them.  */
2552            switch (size) {
2553            case 4:
2554                d64 = tcg_temp_new_i64();
2555                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2556                d32 = gen_dest_fpr_F(dc);
2557                tcg_gen_extrl_i64_i32(d32, d64);
2558                tcg_temp_free_i64(d64);
2559                gen_store_fpr_F(dc, rd, d32);
2560                break;
2561            case 8:
2562                gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2563                break;
2564            case 16:
2565                d64 = tcg_temp_new_i64();
2566                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2567                tcg_gen_addi_tl(addr, addr, 8);
2568                gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2569                tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2570                tcg_temp_free_i64(d64);
2571                break;
2572            default:
2573                g_assert_not_reached();
2574            }
2575            tcg_temp_free_i32(r_mop);
2576            tcg_temp_free_i32(r_asi);
2577        }
2578        break;
2579    }
2580}
2581
2582static void gen_stf_asi(DisasContext *dc, TCGv addr,
2583                        int insn, int size, int rd)
2584{
2585    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2586    TCGv_i32 d32;
2587
2588    switch (da.type) {
2589    case GET_ASI_EXCP:
2590        break;
2591
2592    case GET_ASI_DIRECT:
2593        gen_address_mask(dc, addr);
2594        switch (size) {
2595        case 4:
2596            d32 = gen_load_fpr_F(dc, rd);
2597            tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2598            break;
2599        case 8:
2600            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2601                                da.memop | MO_ALIGN_4);
2602            break;
2603        case 16:
2604            /* Only 4-byte alignment required.  However, it is legal for the
2605               cpu to signal the alignment fault, and the OS trap handler is
2606               required to fix it up.  Requiring 16-byte alignment here avoids
2607               having to probe the second page before performing the first
2608               write.  */
2609            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2610                                da.memop | MO_ALIGN_16);
2611            tcg_gen_addi_tl(addr, addr, 8);
2612            tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2613            break;
2614        default:
2615            g_assert_not_reached();
2616        }
2617        break;
2618
2619    case GET_ASI_BLOCK:
2620        /* Valid for stdfa on aligned registers only.  */
2621        if (size == 8 && (rd & 7) == 0) {
2622            TCGMemOp memop;
2623            TCGv eight;
2624            int i;
2625
2626            gen_address_mask(dc, addr);
2627
2628            /* The first operation checks required alignment.  */
2629            memop = da.memop | MO_ALIGN_64;
2630            eight = tcg_const_tl(8);
2631            for (i = 0; ; ++i) {
2632                tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2633                                    da.mem_idx, memop);
2634                if (i == 7) {
2635                    break;
2636                }
2637                tcg_gen_add_tl(addr, addr, eight);
2638                memop = da.memop;
2639            }
2640            tcg_temp_free(eight);
2641        } else {
2642            gen_exception(dc, TT_ILL_INSN);
2643        }
2644        break;
2645
2646    case GET_ASI_SHORT:
2647        /* Valid for stdfa only.  */
2648        if (size == 8) {
2649            gen_address_mask(dc, addr);
2650            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2651        } else {
2652            gen_exception(dc, TT_ILL_INSN);
2653        }
2654        break;
2655
2656    default:
2657        /* According to the table in the UA2011 manual, the only
2658           other asis that are valid for ldfa/lddfa/ldqfa are
2659           the PST* asis, which aren't currently handled.  */
2660        gen_exception(dc, TT_ILL_INSN);
2661        break;
2662    }
2663}
2664
2665static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2666{
2667    DisasASI da = get_asi(dc, insn, MO_TEQ);
2668    TCGv_i64 hi = gen_dest_gpr(dc, rd);
2669    TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2670
2671    switch (da.type) {
2672    case GET_ASI_EXCP:
2673        return;
2674
2675    case GET_ASI_DTWINX:
2676        gen_address_mask(dc, addr);
2677        tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2678        tcg_gen_addi_tl(addr, addr, 8);
2679        tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2680        break;
2681
2682    case GET_ASI_DIRECT:
2683        {
2684            TCGv_i64 tmp = tcg_temp_new_i64();
2685
2686            gen_address_mask(dc, addr);
2687            tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2688
2689            /* Note that LE ldda acts as if each 32-bit register
2690               result is byte swapped.  Having just performed one
2691               64-bit bswap, we need now to swap the writebacks.  */
2692            if ((da.memop & MO_BSWAP) == MO_TE) {
2693                tcg_gen_extr32_i64(lo, hi, tmp);
2694            } else {
2695                tcg_gen_extr32_i64(hi, lo, tmp);
2696            }
2697            tcg_temp_free_i64(tmp);
2698        }
2699        break;
2700
2701    default:
2702        /* ??? In theory we've handled all of the ASIs that are valid
2703           for ldda, and this should raise DAE_invalid_asi.  However,
2704           real hardware allows others.  This can be seen with e.g.
2705           FreeBSD 10.3 wrt ASI_IC_TAG.  */
2706        {
2707            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2708            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2709            TCGv_i64 tmp = tcg_temp_new_i64();
2710
2711            save_state(dc);
2712            gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2713            tcg_temp_free_i32(r_asi);
2714            tcg_temp_free_i32(r_mop);
2715
2716            /* See above.  */
2717            if ((da.memop & MO_BSWAP) == MO_TE) {
2718                tcg_gen_extr32_i64(lo, hi, tmp);
2719            } else {
2720                tcg_gen_extr32_i64(hi, lo, tmp);
2721            }
2722            tcg_temp_free_i64(tmp);
2723        }
2724        break;
2725    }
2726
2727    gen_store_gpr(dc, rd, hi);
2728    gen_store_gpr(dc, rd + 1, lo);
2729}
2730
2731static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2732                         int insn, int rd)
2733{
2734    DisasASI da = get_asi(dc, insn, MO_TEQ);
2735    TCGv lo = gen_load_gpr(dc, rd + 1);
2736
2737    switch (da.type) {
2738    case GET_ASI_EXCP:
2739        break;
2740
2741    case GET_ASI_DTWINX:
2742        gen_address_mask(dc, addr);
2743        tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2744        tcg_gen_addi_tl(addr, addr, 8);
2745        tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2746        break;
2747
2748    case GET_ASI_DIRECT:
2749        {
2750            TCGv_i64 t64 = tcg_temp_new_i64();
2751
2752            /* Note that LE stda acts as if each 32-bit register result is
2753               byte swapped.  We will perform one 64-bit LE store, so now
2754               we must swap the order of the construction.  */
2755            if ((da.memop & MO_BSWAP) == MO_TE) {
2756                tcg_gen_concat32_i64(t64, lo, hi);
2757            } else {
2758                tcg_gen_concat32_i64(t64, hi, lo);
2759            }
2760            gen_address_mask(dc, addr);
2761            tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2762            tcg_temp_free_i64(t64);
2763        }
2764        break;
2765
2766    default:
2767        /* ??? In theory we've handled all of the ASIs that are valid
2768           for stda, and this should raise DAE_invalid_asi.  */
2769        {
2770            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2771            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2772            TCGv_i64 t64 = tcg_temp_new_i64();
2773
2774            /* See above.  */
2775            if ((da.memop & MO_BSWAP) == MO_TE) {
2776                tcg_gen_concat32_i64(t64, lo, hi);
2777            } else {
2778                tcg_gen_concat32_i64(t64, hi, lo);
2779            }
2780
2781            save_state(dc);
2782            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2783            tcg_temp_free_i32(r_mop);
2784            tcg_temp_free_i32(r_asi);
2785            tcg_temp_free_i64(t64);
2786        }
2787        break;
2788    }
2789}
2790
2791static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2792                         int insn, int rd)
2793{
2794    DisasASI da = get_asi(dc, insn, MO_TEQ);
2795    TCGv oldv;
2796
2797    switch (da.type) {
2798    case GET_ASI_EXCP:
2799        return;
2800    case GET_ASI_DIRECT:
2801        oldv = tcg_temp_new();
2802        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2803                                  da.mem_idx, da.memop);
2804        gen_store_gpr(dc, rd, oldv);
2805        tcg_temp_free(oldv);
2806        break;
2807    default:
2808        /* ??? Should be DAE_invalid_asi.  */
2809        gen_exception(dc, TT_DATA_ACCESS);
2810        break;
2811    }
2812}
2813
2814#elif !defined(CONFIG_USER_ONLY)
2815static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2816{
2817    /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2818       whereby "rd + 1" elicits "error: array subscript is above array".
2819       Since we have already asserted that rd is even, the semantics
2820       are unchanged.  */
2821    TCGv lo = gen_dest_gpr(dc, rd | 1);
2822    TCGv hi = gen_dest_gpr(dc, rd);
2823    TCGv_i64 t64 = tcg_temp_new_i64();
2824    DisasASI da = get_asi(dc, insn, MO_TEQ);
2825
2826    switch (da.type) {
2827    case GET_ASI_EXCP:
2828        tcg_temp_free_i64(t64);
2829        return;
2830    case GET_ASI_DIRECT:
2831        gen_address_mask(dc, addr);
2832        tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2833        break;
2834    default:
2835        {
2836            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2837            TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2838
2839            save_state(dc);
2840            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2841            tcg_temp_free_i32(r_mop);
2842            tcg_temp_free_i32(r_asi);
2843        }
2844        break;
2845    }
2846
2847    tcg_gen_extr_i64_i32(lo, hi, t64);
2848    tcg_temp_free_i64(t64);
2849    gen_store_gpr(dc, rd | 1, lo);
2850    gen_store_gpr(dc, rd, hi);
2851}
2852
2853static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2854                         int insn, int rd)
2855{
2856    DisasASI da = get_asi(dc, insn, MO_TEQ);
2857    TCGv lo = gen_load_gpr(dc, rd + 1);
2858    TCGv_i64 t64 = tcg_temp_new_i64();
2859
2860    tcg_gen_concat_tl_i64(t64, lo, hi);
2861
2862    switch (da.type) {
2863    case GET_ASI_EXCP:
2864        break;
2865    case GET_ASI_DIRECT:
2866        gen_address_mask(dc, addr);
2867        tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2868        break;
2869    case GET_ASI_BFILL:
2870        /* Store 32 bytes of T64 to ADDR.  */
2871        /* ??? The original qemu code suggests 8-byte alignment, dropping
2872           the low bits, but the only place I can see this used is in the
2873           Linux kernel with 32 byte alignment, which would make more sense
2874           as a cacheline-style operation.  */
2875        {
2876            TCGv d_addr = tcg_temp_new();
2877            TCGv eight = tcg_const_tl(8);
2878            int i;
2879
2880            tcg_gen_andi_tl(d_addr, addr, -8);
2881            for (i = 0; i < 32; i += 8) {
2882                tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2883                tcg_gen_add_tl(d_addr, d_addr, eight);
2884            }
2885
2886            tcg_temp_free(d_addr);
2887            tcg_temp_free(eight);
2888        }
2889        break;
2890    default:
2891        {
2892            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2893            TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2894
2895            save_state(dc);
2896            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2897            tcg_temp_free_i32(r_mop);
2898            tcg_temp_free_i32(r_asi);
2899        }
2900        break;
2901    }
2902
2903    tcg_temp_free_i64(t64);
2904}
2905#endif
2906
2907static TCGv get_src1(DisasContext *dc, unsigned int insn)
2908{
2909    unsigned int rs1 = GET_FIELD(insn, 13, 17);
2910    return gen_load_gpr(dc, rs1);
2911}
2912
2913static TCGv get_src2(DisasContext *dc, unsigned int insn)
2914{
2915    if (IS_IMM) { /* immediate */
2916        target_long simm = GET_FIELDs(insn, 19, 31);
2917        TCGv t = get_temp_tl(dc);
2918        tcg_gen_movi_tl(t, simm);
2919        return t;
2920    } else {      /* register */
2921        unsigned int rs2 = GET_FIELD(insn, 27, 31);
2922        return gen_load_gpr(dc, rs2);
2923    }
2924}
2925
2926#ifdef TARGET_SPARC64
2927static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2928{
2929    TCGv_i32 c32, zero, dst, s1, s2;
2930
2931    /* We have two choices here: extend the 32 bit data and use movcond_i64,
2932       or fold the comparison down to 32 bits and use movcond_i32.  Choose
2933       the later.  */
2934    c32 = tcg_temp_new_i32();
2935    if (cmp->is_bool) {
2936        tcg_gen_extrl_i64_i32(c32, cmp->c1);
2937    } else {
2938        TCGv_i64 c64 = tcg_temp_new_i64();
2939        tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2940        tcg_gen_extrl_i64_i32(c32, c64);
2941        tcg_temp_free_i64(c64);
2942    }
2943
2944    s1 = gen_load_fpr_F(dc, rs);
2945    s2 = gen_load_fpr_F(dc, rd);
2946    dst = gen_dest_fpr_F(dc);
2947    zero = tcg_const_i32(0);
2948
2949    tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2950
2951    tcg_temp_free_i32(c32);
2952    tcg_temp_free_i32(zero);
2953    gen_store_fpr_F(dc, rd, dst);
2954}
2955
2956static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2957{
2958    TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2959    tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2960                        gen_load_fpr_D(dc, rs),
2961                        gen_load_fpr_D(dc, rd));
2962    gen_store_fpr_D(dc, rd, dst);
2963}
2964
2965static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2966{
2967    int qd = QFPREG(rd);
2968    int qs = QFPREG(rs);
2969
2970    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2971                        cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2972    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2973                        cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2974
2975    gen_update_fprs_dirty(dc, qd);
2976}
2977
2978#ifndef CONFIG_USER_ONLY
2979static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2980{
2981    TCGv_i32 r_tl = tcg_temp_new_i32();
2982
2983    /* load env->tl into r_tl */
2984    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2985
2986    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2987    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2988
2989    /* calculate offset to current trap state from env->ts, reuse r_tl */
2990    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2991    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2992
2993    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2994    {
2995        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2996        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2997        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2998        tcg_temp_free_ptr(r_tl_tmp);
2999    }
3000
3001    tcg_temp_free_i32(r_tl);
3002}
3003#endif
3004
3005static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3006                     int width, bool cc, bool left)
3007{
3008    TCGv lo1, lo2, t1, t2;
3009    uint64_t amask, tabl, tabr;
3010    int shift, imask, omask;
3011
3012    if (cc) {
3013        tcg_gen_mov_tl(cpu_cc_src, s1);
3014        tcg_gen_mov_tl(cpu_cc_src2, s2);
3015        tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3016        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3017        dc->cc_op = CC_OP_SUB;
3018    }
3019
3020    /* Theory of operation: there are two tables, left and right (not to
3021       be confused with the left and right versions of the opcode).  These
3022       are indexed by the low 3 bits of the inputs.  To make things "easy",
3023       these tables are loaded into two constants, TABL and TABR below.
3024       The operation index = (input & imask) << shift calculates the index
3025       into the constant, while val = (table >> index) & omask calculates
3026       the value we're looking for.  */
3027    switch (width) {
3028    case 8:
3029        imask = 0x7;
3030        shift = 3;
3031        omask = 0xff;
3032        if (left) {
3033            tabl = 0x80c0e0f0f8fcfeffULL;
3034            tabr = 0xff7f3f1f0f070301ULL;
3035        } else {
3036            tabl = 0x0103070f1f3f7fffULL;
3037            tabr = 0xfffefcf8f0e0c080ULL;
3038        }
3039        break;
3040    case 16:
3041        imask = 0x6;
3042        shift = 1;
3043        omask = 0xf;
3044        if (left) {
3045            tabl = 0x8cef;
3046            tabr = 0xf731;
3047        } else {
3048            tabl = 0x137f;
3049            tabr = 0xfec8;
3050        }
3051        break;
3052    case 32:
3053        imask = 0x4;
3054        shift = 0;
3055        omask = 0x3;
3056        if (left) {
3057            tabl = (2 << 2) | 3;
3058            tabr = (3 << 2) | 1;
3059        } else {
3060            tabl = (1 << 2) | 3;
3061            tabr = (3 << 2) | 2;
3062        }
3063        break;
3064    default:
3065        abort();
3066    }
3067
3068    lo1 = tcg_temp_new();
3069    lo2 = tcg_temp_new();
3070    tcg_gen_andi_tl(lo1, s1, imask);
3071    tcg_gen_andi_tl(lo2, s2, imask);
3072    tcg_gen_shli_tl(lo1, lo1, shift);
3073    tcg_gen_shli_tl(lo2, lo2, shift);
3074
3075    t1 = tcg_const_tl(tabl);
3076    t2 = tcg_const_tl(tabr);
3077    tcg_gen_shr_tl(lo1, t1, lo1);
3078    tcg_gen_shr_tl(lo2, t2, lo2);
3079    tcg_gen_andi_tl(dst, lo1, omask);
3080    tcg_gen_andi_tl(lo2, lo2, omask);
3081
3082    amask = -8;
3083    if (AM_CHECK(dc)) {
3084        amask &= 0xffffffffULL;
3085    }
3086    tcg_gen_andi_tl(s1, s1, amask);
3087    tcg_gen_andi_tl(s2, s2, amask);
3088
3089    /* We want to compute
3090        dst = (s1 == s2 ? lo1 : lo1 & lo2).
3091       We've already done dst = lo1, so this reduces to
3092        dst &= (s1 == s2 ? -1 : lo2)
3093       Which we perform by
3094        lo2 |= -(s1 == s2)
3095        dst &= lo2
3096    */
3097    tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3098    tcg_gen_neg_tl(t1, t1);
3099    tcg_gen_or_tl(lo2, lo2, t1);
3100    tcg_gen_and_tl(dst, dst, lo2);
3101
3102    tcg_temp_free(lo1);
3103    tcg_temp_free(lo2);
3104    tcg_temp_free(t1);
3105    tcg_temp_free(t2);
3106}
3107
3108static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3109{
3110    TCGv tmp = tcg_temp_new();
3111
3112    tcg_gen_add_tl(tmp, s1, s2);
3113    tcg_gen_andi_tl(dst, tmp, -8);
3114    if (left) {
3115        tcg_gen_neg_tl(tmp, tmp);
3116    }
3117    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3118
3119    tcg_temp_free(tmp);
3120}
3121
3122static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3123{
3124    TCGv t1, t2, shift;
3125
3126    t1 = tcg_temp_new();
3127    t2 = tcg_temp_new();
3128    shift = tcg_temp_new();
3129
3130    tcg_gen_andi_tl(shift, gsr, 7);
3131    tcg_gen_shli_tl(shift, shift, 3);
3132    tcg_gen_shl_tl(t1, s1, shift);
3133
3134    /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3135       shift of (up to 63) followed by a constant shift of 1.  */
3136    tcg_gen_xori_tl(shift, shift, 63);
3137    tcg_gen_shr_tl(t2, s2, shift);
3138    tcg_gen_shri_tl(t2, t2, 1);
3139
3140    tcg_gen_or_tl(dst, t1, t2);
3141
3142    tcg_temp_free(t1);
3143    tcg_temp_free(t2);
3144    tcg_temp_free(shift);
3145}
3146#endif
3147
3148#define CHECK_IU_FEATURE(dc, FEATURE)                      \
3149    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3150        goto illegal_insn;
3151#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3152    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3153        goto nfpu_insn;
3154
3155/* before an instruction, dc->pc must be static */
3156static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3157{
3158    unsigned int opc, rs1, rs2, rd;
3159    TCGv cpu_src1, cpu_src2;
3160    TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3161    TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3162    target_long simm;
3163
3164    opc = GET_FIELD(insn, 0, 1);
3165    rd = GET_FIELD(insn, 2, 6);
3166
3167    switch (opc) {
3168    case 0:                     /* branches/sethi */
3169        {
3170            unsigned int xop = GET_FIELD(insn, 7, 9);
3171            int32_t target;
3172            switch (xop) {
3173#ifdef TARGET_SPARC64
3174            case 0x1:           /* V9 BPcc */
3175                {
3176                    int cc;
3177
3178                    target = GET_FIELD_SP(insn, 0, 18);
3179                    target = sign_extend(target, 19);
3180                    target <<= 2;
3181                    cc = GET_FIELD_SP(insn, 20, 21);
3182                    if (cc == 0)
3183                        do_branch(dc, target, insn, 0);
3184                    else if (cc == 2)
3185                        do_branch(dc, target, insn, 1);
3186                    else
3187                        goto illegal_insn;
3188                    goto jmp_insn;
3189                }
3190            case 0x3:           /* V9 BPr */
3191                {
3192                    target = GET_FIELD_SP(insn, 0, 13) |
3193                        (GET_FIELD_SP(insn, 20, 21) << 14);
3194                    target = sign_extend(target, 16);
3195                    target <<= 2;
3196                    cpu_src1 = get_src1(dc, insn);
3197                    do_branch_reg(dc, target, insn, cpu_src1);
3198                    goto jmp_insn;
3199                }
3200            case 0x5:           /* V9 FBPcc */
3201                {
3202                    int cc = GET_FIELD_SP(insn, 20, 21);
3203                    if (gen_trap_ifnofpu(dc)) {
3204                        goto jmp_insn;
3205                    }
3206                    target = GET_FIELD_SP(insn, 0, 18);
3207                    target = sign_extend(target, 19);
3208                    target <<= 2;
3209                    do_fbranch(dc, target, insn, cc);
3210                    goto jmp_insn;
3211                }
3212#else
3213            case 0x7:           /* CBN+x */
3214                {
3215                    goto ncp_insn;
3216                }
3217#endif
3218            case 0x2:           /* BN+x */
3219                {
3220                    target = GET_FIELD(insn, 10, 31);
3221                    target = sign_extend(target, 22);
3222                    target <<= 2;
3223                    do_branch(dc, target, insn, 0);
3224                    goto jmp_insn;
3225                }
3226            case 0x6:           /* FBN+x */
3227                {
3228                    if (gen_trap_ifnofpu(dc)) {
3229                        goto jmp_insn;
3230                    }
3231                    target = GET_FIELD(insn, 10, 31);
3232                    target = sign_extend(target, 22);
3233                    target <<= 2;
3234                    do_fbranch(dc, target, insn, 0);
3235                    goto jmp_insn;
3236                }
3237            case 0x4:           /* SETHI */
3238                /* Special-case %g0 because that's the canonical nop.  */
3239                if (rd) {
3240                    uint32_t value = GET_FIELD(insn, 10, 31);
3241                    TCGv t = gen_dest_gpr(dc, rd);
3242                    tcg_gen_movi_tl(t, value << 10);
3243                    gen_store_gpr(dc, rd, t);
3244                }
3245                break;
3246            case 0x0:           /* UNIMPL */
3247            default:
3248                goto illegal_insn;
3249            }
3250            break;
3251        }
3252        break;
3253    case 1:                     /*CALL*/
3254        {
3255            target_long target = GET_FIELDs(insn, 2, 31) << 2;
3256            TCGv o7 = gen_dest_gpr(dc, 15);
3257
3258            tcg_gen_movi_tl(o7, dc->pc);
3259            gen_store_gpr(dc, 15, o7);
3260            target += dc->pc;
3261            gen_mov_pc_npc(dc);
3262#ifdef TARGET_SPARC64
3263            if (unlikely(AM_CHECK(dc))) {
3264                target &= 0xffffffffULL;
3265            }
3266#endif
3267            dc->npc = target;
3268        }
3269        goto jmp_insn;
3270    case 2:                     /* FPU & Logical Operations */
3271        {
3272            unsigned int xop = GET_FIELD(insn, 7, 12);
3273            TCGv cpu_dst = get_temp_tl(dc);
3274            TCGv cpu_tmp0;
3275
3276            if (xop == 0x3a) {  /* generate trap */
3277                int cond = GET_FIELD(insn, 3, 6);
3278                TCGv_i32 trap;
3279                TCGLabel *l1 = NULL;
3280                int mask;
3281
3282                if (cond == 0) {
3283                    /* Trap never.  */
3284                    break;
3285                }
3286
3287                save_state(dc);
3288
3289                if (cond != 8) {
3290                    /* Conditional trap.  */
3291                    DisasCompare cmp;
3292#ifdef TARGET_SPARC64
3293                    /* V9 icc/xcc */
3294                    int cc = GET_FIELD_SP(insn, 11, 12);
3295                    if (cc == 0) {
3296                        gen_compare(&cmp, 0, cond, dc);
3297                    } else if (cc == 2) {
3298                        gen_compare(&cmp, 1, cond, dc);
3299                    } else {
3300                        goto illegal_insn;
3301                    }
3302#else
3303                    gen_compare(&cmp, 0, cond, dc);
3304#endif
3305                    l1 = gen_new_label();
3306                    tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3307                                      cmp.c1, cmp.c2, l1);
3308                    free_compare(&cmp);
3309                }
3310
3311                mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3312                        ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3313
3314                /* Don't use the normal temporaries, as they may well have
3315                   gone out of scope with the branch above.  While we're
3316                   doing that we might as well pre-truncate to 32-bit.  */
3317                trap = tcg_temp_new_i32();
3318
3319                rs1 = GET_FIELD_SP(insn, 14, 18);
3320                if (IS_IMM) {
3321                    rs2 = GET_FIELD_SP(insn, 0, 7);
3322                    if (rs1 == 0) {
3323                        tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3324                        /* Signal that the trap value is fully constant.  */
3325                        mask = 0;
3326                    } else {
3327                        TCGv t1 = gen_load_gpr(dc, rs1);
3328                        tcg_gen_trunc_tl_i32(trap, t1);
3329                        tcg_gen_addi_i32(trap, trap, rs2);
3330                    }
3331                } else {
3332                    TCGv t1, t2;
3333                    rs2 = GET_FIELD_SP(insn, 0, 4);
3334                    t1 = gen_load_gpr(dc, rs1);
3335                    t2 = gen_load_gpr(dc, rs2);
3336                    tcg_gen_add_tl(t1, t1, t2);
3337                    tcg_gen_trunc_tl_i32(trap, t1);
3338                }
3339                if (mask != 0) {
3340                    tcg_gen_andi_i32(trap, trap, mask);
3341                    tcg_gen_addi_i32(trap, trap, TT_TRAP);
3342                }
3343
3344                gen_helper_raise_exception(cpu_env, trap);
3345                tcg_temp_free_i32(trap);
3346
3347                if (cond == 8) {
3348                    /* An unconditional trap ends the TB.  */
3349                    dc->is_br = 1;
3350                    goto jmp_insn;
3351                } else {
3352                    /* A conditional trap falls through to the next insn.  */
3353                    gen_set_label(l1);
3354                    break;
3355                }
3356            } else if (xop == 0x28) {
3357                rs1 = GET_FIELD(insn, 13, 17);
3358                switch(rs1) {
3359                case 0: /* rdy */
3360#ifndef TARGET_SPARC64
3361                case 0x01 ... 0x0e: /* undefined in the SPARCv8
3362                                       manual, rdy on the microSPARC
3363                                       II */
3364                case 0x0f:          /* stbar in the SPARCv8 manual,
3365                                       rdy on the microSPARC II */
3366                case 0x10 ... 0x1f: /* implementation-dependent in the
3367                                       SPARCv8 manual, rdy on the
3368                                       microSPARC II */
3369                    /* Read Asr17 */
3370                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3371                        TCGv t = gen_dest_gpr(dc, rd);
3372                        /* Read Asr17 for a Leon3 monoprocessor */
3373                        tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3374                        gen_store_gpr(dc, rd, t);
3375                        break;
3376                    }
3377#endif
3378                    gen_store_gpr(dc, rd, cpu_y);
3379                    break;
3380#ifdef TARGET_SPARC64
3381                case 0x2: /* V9 rdccr */
3382                    update_psr(dc);
3383                    gen_helper_rdccr(cpu_dst, cpu_env);
3384                    gen_store_gpr(dc, rd, cpu_dst);
3385                    break;
3386                case 0x3: /* V9 rdasi */
3387                    tcg_gen_movi_tl(cpu_dst, dc->asi);
3388                    gen_store_gpr(dc, rd, cpu_dst);
3389                    break;
3390                case 0x4: /* V9 rdtick */
3391                    {
3392                        TCGv_ptr r_tickptr;
3393                        TCGv_i32 r_const;
3394
3395                        r_tickptr = tcg_temp_new_ptr();
3396                        r_const = tcg_const_i32(dc->mem_idx);
3397                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3398                                       offsetof(CPUSPARCState, tick));
3399                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3400                                                  r_const);
3401                        tcg_temp_free_ptr(r_tickptr);
3402                        tcg_temp_free_i32(r_const);
3403                        gen_store_gpr(dc, rd, cpu_dst);
3404                    }
3405                    break;
3406                case 0x5: /* V9 rdpc */
3407                    {
3408                        TCGv t = gen_dest_gpr(dc, rd);
3409                        if (unlikely(AM_CHECK(dc))) {
3410                            tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3411                        } else {
3412                            tcg_gen_movi_tl(t, dc->pc);
3413                        }
3414                        gen_store_gpr(dc, rd, t);
3415                    }
3416                    break;
3417                case 0x6: /* V9 rdfprs */
3418                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3419                    gen_store_gpr(dc, rd, cpu_dst);
3420                    break;
3421                case 0xf: /* V9 membar */
3422                    break; /* no effect */
3423                case 0x13: /* Graphics Status */
3424                    if (gen_trap_ifnofpu(dc)) {
3425                        goto jmp_insn;
3426                    }
3427                    gen_store_gpr(dc, rd, cpu_gsr);
3428                    break;
3429                case 0x16: /* Softint */
3430                    tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3431                                     offsetof(CPUSPARCState, softint));
3432                    gen_store_gpr(dc, rd, cpu_dst);
3433                    break;
3434                case 0x17: /* Tick compare */
3435                    gen_store_gpr(dc, rd, cpu_tick_cmpr);
3436                    break;
3437                case 0x18: /* System tick */
3438                    {
3439                        TCGv_ptr r_tickptr;
3440                        TCGv_i32 r_const;
3441
3442                        r_tickptr = tcg_temp_new_ptr();
3443                        r_const = tcg_const_i32(dc->mem_idx);
3444                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3445                                       offsetof(CPUSPARCState, stick));
3446                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3447                                                  r_const);
3448                        tcg_temp_free_ptr(r_tickptr);
3449                        tcg_temp_free_i32(r_const);
3450                        gen_store_gpr(dc, rd, cpu_dst);
3451                    }
3452                    break;
3453                case 0x19: /* System tick compare */
3454                    gen_store_gpr(dc, rd, cpu_stick_cmpr);
3455                    break;
3456                case 0x1a: /* UltraSPARC-T1 Strand status */
3457                    /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3458                     * this ASR as impl. dep
3459                     */
3460                    CHECK_IU_FEATURE(dc, HYPV);
3461                    {
3462                        TCGv t = gen_dest_gpr(dc, rd);
3463                        tcg_gen_movi_tl(t, 1UL);
3464                        gen_store_gpr(dc, rd, t);
3465                    }
3466                    break;
3467                case 0x10: /* Performance Control */
3468                case 0x11: /* Performance Instrumentation Counter */
3469                case 0x12: /* Dispatch Control */
3470                case 0x14: /* Softint set, WO */
3471                case 0x15: /* Softint clear, WO */
3472#endif
3473                default:
3474                    goto illegal_insn;
3475                }
3476#if !defined(CONFIG_USER_ONLY)
3477            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3478#ifndef TARGET_SPARC64
3479                if (!supervisor(dc)) {
3480                    goto priv_insn;
3481                }
3482                update_psr(dc);
3483                gen_helper_rdpsr(cpu_dst, cpu_env);
3484#else
3485                CHECK_IU_FEATURE(dc, HYPV);
3486                if (!hypervisor(dc))
3487                    goto priv_insn;
3488                rs1 = GET_FIELD(insn, 13, 17);
3489                switch (rs1) {
3490                case 0: // hpstate
3491                    tcg_gen_ld_i64(cpu_dst, cpu_env,
3492                                   offsetof(CPUSPARCState, hpstate));
3493                    break;
3494                case 1: // htstate
3495                    // gen_op_rdhtstate();
3496                    break;
3497                case 3: // hintp
3498                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3499                    break;
3500                case 5: // htba
3501                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
3502                    break;
3503                case 6: // hver
3504                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
3505                    break;
3506                case 31: // hstick_cmpr
3507                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3508                    break;
3509                default:
3510                    goto illegal_insn;
3511                }
3512#endif
3513                gen_store_gpr(dc, rd, cpu_dst);
3514                break;
3515            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3516                if (!supervisor(dc)) {
3517                    goto priv_insn;
3518                }
3519                cpu_tmp0 = get_temp_tl(dc);
3520#ifdef TARGET_SPARC64
3521                rs1 = GET_FIELD(insn, 13, 17);
3522                switch (rs1) {
3523                case 0: // tpc
3524                    {
3525                        TCGv_ptr r_tsptr;
3526
3527                        r_tsptr = tcg_temp_new_ptr();
3528                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3529                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3530                                      offsetof(trap_state, tpc));
3531                        tcg_temp_free_ptr(r_tsptr);
3532                    }
3533                    break;
3534                case 1: // tnpc
3535                    {
3536                        TCGv_ptr r_tsptr;
3537
3538                        r_tsptr = tcg_temp_new_ptr();
3539                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3540                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3541                                      offsetof(trap_state, tnpc));
3542                        tcg_temp_free_ptr(r_tsptr);
3543                    }
3544                    break;
3545                case 2: // tstate
3546                    {
3547                        TCGv_ptr r_tsptr;
3548
3549                        r_tsptr = tcg_temp_new_ptr();
3550                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3551                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3552                                      offsetof(trap_state, tstate));
3553                        tcg_temp_free_ptr(r_tsptr);
3554                    }
3555                    break;
3556                case 3: // tt
3557                    {
3558                        TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3559
3560                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3561                        tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3562                                         offsetof(trap_state, tt));
3563                        tcg_temp_free_ptr(r_tsptr);
3564                    }
3565                    break;
3566                case 4: // tick
3567                    {
3568                        TCGv_ptr r_tickptr;
3569                        TCGv_i32 r_const;
3570
3571                        r_tickptr = tcg_temp_new_ptr();
3572                        r_const = tcg_const_i32(dc->mem_idx);
3573                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3574                                       offsetof(CPUSPARCState, tick));
3575                        gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3576                                                  r_tickptr, r_const);
3577                        tcg_temp_free_ptr(r_tickptr);
3578                        tcg_temp_free_i32(r_const);
3579                    }
3580                    break;
3581                case 5: // tba
3582                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3583                    break;
3584                case 6: // pstate
3585                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3586                                     offsetof(CPUSPARCState, pstate));
3587                    break;
3588                case 7: // tl
3589                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3590                                     offsetof(CPUSPARCState, tl));
3591                    break;
3592                case 8: // pil
3593                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3594                                     offsetof(CPUSPARCState, psrpil));
3595                    break;
3596                case 9: // cwp
3597                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
3598                    break;
3599                case 10: // cansave
3600                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3601                                     offsetof(CPUSPARCState, cansave));
3602                    break;
3603                case 11: // canrestore
3604                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3605                                     offsetof(CPUSPARCState, canrestore));
3606                    break;
3607                case 12: // cleanwin
3608                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3609                                     offsetof(CPUSPARCState, cleanwin));
3610                    break;
3611                case 13: // otherwin
3612                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3613                                     offsetof(CPUSPARCState, otherwin));
3614                    break;
3615                case 14: // wstate
3616                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3617                                     offsetof(CPUSPARCState, wstate));
3618                    break;
3619                case 16: // UA2005 gl
3620                    CHECK_IU_FEATURE(dc, GL);
3621                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3622                                     offsetof(CPUSPARCState, gl));
3623                    break;
3624                case 26: // UA2005 strand status
3625                    CHECK_IU_FEATURE(dc, HYPV);
3626                    if (!hypervisor(dc))
3627                        goto priv_insn;
3628                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3629                    break;
3630                case 31: // ver
3631                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3632                    break;
3633                case 15: // fq
3634                default:
3635                    goto illegal_insn;
3636                }
3637#else
3638                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3639#endif
3640                gen_store_gpr(dc, rd, cpu_tmp0);
3641                break;
3642            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3643#ifdef TARGET_SPARC64
3644                gen_helper_flushw(cpu_env);
3645#else
3646                if (!supervisor(dc))
3647                    goto priv_insn;
3648                gen_store_gpr(dc, rd, cpu_tbr);
3649#endif
3650                break;
3651#endif
3652            } else if (xop == 0x34) {   /* FPU Operations */
3653                if (gen_trap_ifnofpu(dc)) {
3654                    goto jmp_insn;
3655                }
3656                gen_op_clear_ieee_excp_and_FTT();
3657                rs1 = GET_FIELD(insn, 13, 17);
3658                rs2 = GET_FIELD(insn, 27, 31);
3659                xop = GET_FIELD(insn, 18, 26);
3660
3661                switch (xop) {
3662                case 0x1: /* fmovs */
3663                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3664                    gen_store_fpr_F(dc, rd, cpu_src1_32);
3665                    break;
3666                case 0x5: /* fnegs */
3667                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3668                    break;
3669                case 0x9: /* fabss */
3670                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3671                    break;
3672                case 0x29: /* fsqrts */
3673                    CHECK_FPU_FEATURE(dc, FSQRT);
3674                    gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3675                    break;
3676                case 0x2a: /* fsqrtd */
3677                    CHECK_FPU_FEATURE(dc, FSQRT);
3678                    gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3679                    break;
3680                case 0x2b: /* fsqrtq */
3681                    CHECK_FPU_FEATURE(dc, FLOAT128);
3682                    gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3683                    break;
3684                case 0x41: /* fadds */
3685                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3686                    break;
3687                case 0x42: /* faddd */
3688                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3689                    break;
3690                case 0x43: /* faddq */
3691                    CHECK_FPU_FEATURE(dc, FLOAT128);
3692                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3693                    break;
3694                case 0x45: /* fsubs */
3695                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3696                    break;
3697                case 0x46: /* fsubd */
3698                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3699                    break;
3700                case 0x47: /* fsubq */
3701                    CHECK_FPU_FEATURE(dc, FLOAT128);
3702                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3703                    break;
3704                case 0x49: /* fmuls */
3705                    CHECK_FPU_FEATURE(dc, FMUL);
3706                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3707                    break;
3708                case 0x4a: /* fmuld */
3709                    CHECK_FPU_FEATURE(dc, FMUL);
3710                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3711                    break;
3712                case 0x4b: /* fmulq */
3713                    CHECK_FPU_FEATURE(dc, FLOAT128);
3714                    CHECK_FPU_FEATURE(dc, FMUL);
3715                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3716                    break;
3717                case 0x4d: /* fdivs */
3718                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3719                    break;
3720                case 0x4e: /* fdivd */
3721                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3722                    break;
3723                case 0x4f: /* fdivq */
3724                    CHECK_FPU_FEATURE(dc, FLOAT128);
3725                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3726                    break;
3727                case 0x69: /* fsmuld */
3728                    CHECK_FPU_FEATURE(dc, FSMULD);
3729                    gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3730                    break;
3731                case 0x6e: /* fdmulq */
3732                    CHECK_FPU_FEATURE(dc, FLOAT128);
3733                    gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3734                    break;
3735                case 0xc4: /* fitos */
3736                    gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3737                    break;
3738                case 0xc6: /* fdtos */
3739                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3740                    break;
3741                case 0xc7: /* fqtos */
3742                    CHECK_FPU_FEATURE(dc, FLOAT128);
3743                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3744                    break;
3745                case 0xc8: /* fitod */
3746                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3747                    break;
3748                case 0xc9: /* fstod */
3749                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3750                    break;
3751                case 0xcb: /* fqtod */
3752                    CHECK_FPU_FEATURE(dc, FLOAT128);
3753                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3754                    break;
3755                case 0xcc: /* fitoq */
3756                    CHECK_FPU_FEATURE(dc, FLOAT128);
3757                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3758                    break;
3759                case 0xcd: /* fstoq */
3760                    CHECK_FPU_FEATURE(dc, FLOAT128);
3761                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3762                    break;
3763                case 0xce: /* fdtoq */
3764                    CHECK_FPU_FEATURE(dc, FLOAT128);
3765                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3766                    break;
3767                case 0xd1: /* fstoi */
3768                    gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3769                    break;
3770                case 0xd2: /* fdtoi */
3771                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3772                    break;
3773                case 0xd3: /* fqtoi */
3774                    CHECK_FPU_FEATURE(dc, FLOAT128);
3775                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3776                    break;
3777#ifdef TARGET_SPARC64
3778                case 0x2: /* V9 fmovd */
3779                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3780                    gen_store_fpr_D(dc, rd, cpu_src1_64);
3781                    break;
3782                case 0x3: /* V9 fmovq */
3783                    CHECK_FPU_FEATURE(dc, FLOAT128);
3784                    gen_move_Q(dc, rd, rs2);
3785                    break;
3786                case 0x6: /* V9 fnegd */
3787                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3788                    break;
3789                case 0x7: /* V9 fnegq */
3790                    CHECK_FPU_FEATURE(dc, FLOAT128);
3791                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3792                    break;
3793                case 0xa: /* V9 fabsd */
3794                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3795                    break;
3796                case 0xb: /* V9 fabsq */
3797                    CHECK_FPU_FEATURE(dc, FLOAT128);
3798                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3799                    break;
3800                case 0x81: /* V9 fstox */
3801                    gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3802                    break;
3803                case 0x82: /* V9 fdtox */
3804                    gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3805                    break;
3806                case 0x83: /* V9 fqtox */
3807                    CHECK_FPU_FEATURE(dc, FLOAT128);
3808                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3809                    break;
3810                case 0x84: /* V9 fxtos */
3811                    gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3812                    break;
3813                case 0x88: /* V9 fxtod */
3814                    gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3815                    break;
3816                case 0x8c: /* V9 fxtoq */
3817                    CHECK_FPU_FEATURE(dc, FLOAT128);
3818                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3819                    break;
3820#endif
3821                default:
3822                    goto illegal_insn;
3823                }
3824            } else if (xop == 0x35) {   /* FPU Operations */
3825#ifdef TARGET_SPARC64
3826                int cond;
3827#endif
3828                if (gen_trap_ifnofpu(dc)) {
3829                    goto jmp_insn;
3830                }
3831                gen_op_clear_ieee_excp_and_FTT();
3832                rs1 = GET_FIELD(insn, 13, 17);
3833                rs2 = GET_FIELD(insn, 27, 31);
3834                xop = GET_FIELD(insn, 18, 26);
3835
3836#ifdef TARGET_SPARC64
3837#define FMOVR(sz)                                                  \
3838                do {                                               \
3839                    DisasCompare cmp;                              \
3840                    cond = GET_FIELD_SP(insn, 10, 12);             \
3841                    cpu_src1 = get_src1(dc, insn);                 \
3842                    gen_compare_reg(&cmp, cond, cpu_src1);         \
3843                    gen_fmov##sz(dc, &cmp, rd, rs2);               \
3844                    free_compare(&cmp);                            \
3845                } while (0)
3846
3847                if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3848                    FMOVR(s);
3849                    break;
3850                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3851                    FMOVR(d);
3852                    break;
3853                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3854                    CHECK_FPU_FEATURE(dc, FLOAT128);
3855                    FMOVR(q);
3856                    break;
3857                }
3858#undef FMOVR
3859#endif
3860                switch (xop) {
3861#ifdef TARGET_SPARC64
3862#define FMOVCC(fcc, sz)                                                 \
3863                    do {                                                \
3864                        DisasCompare cmp;                               \
3865                        cond = GET_FIELD_SP(insn, 14, 17);              \
3866                        gen_fcompare(&cmp, fcc, cond);                  \
3867                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3868                        free_compare(&cmp);                             \
3869                    } while (0)
3870
3871                    case 0x001: /* V9 fmovscc %fcc0 */
3872                        FMOVCC(0, s);
3873                        break;
3874                    case 0x002: /* V9 fmovdcc %fcc0 */
3875                        FMOVCC(0, d);
3876                        break;
3877                    case 0x003: /* V9 fmovqcc %fcc0 */
3878                        CHECK_FPU_FEATURE(dc, FLOAT128);
3879                        FMOVCC(0, q);
3880                        break;
3881                    case 0x041: /* V9 fmovscc %fcc1 */
3882                        FMOVCC(1, s);
3883                        break;
3884                    case 0x042: /* V9 fmovdcc %fcc1 */
3885                        FMOVCC(1, d);
3886                        break;
3887                    case 0x043: /* V9 fmovqcc %fcc1 */
3888                        CHECK_FPU_FEATURE(dc, FLOAT128);
3889                        FMOVCC(1, q);
3890                        break;
3891                    case 0x081: /* V9 fmovscc %fcc2 */
3892                        FMOVCC(2, s);
3893                        break;
3894                    case 0x082: /* V9 fmovdcc %fcc2 */
3895                        FMOVCC(2, d);
3896                        break;
3897                    case 0x083: /* V9 fmovqcc %fcc2 */
3898                        CHECK_FPU_FEATURE(dc, FLOAT128);
3899                        FMOVCC(2, q);
3900                        break;
3901                    case 0x0c1: /* V9 fmovscc %fcc3 */
3902                        FMOVCC(3, s);
3903                        break;
3904                    case 0x0c2: /* V9 fmovdcc %fcc3 */
3905                        FMOVCC(3, d);
3906                        break;
3907                    case 0x0c3: /* V9 fmovqcc %fcc3 */
3908                        CHECK_FPU_FEATURE(dc, FLOAT128);
3909                        FMOVCC(3, q);
3910                        break;
3911#undef FMOVCC
3912#define FMOVCC(xcc, sz)                                                 \
3913                    do {                                                \
3914                        DisasCompare cmp;                               \
3915                        cond = GET_FIELD_SP(insn, 14, 17);              \
3916                        gen_compare(&cmp, xcc, cond, dc);               \
3917                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3918                        free_compare(&cmp);                             \
3919                    } while (0)
3920
3921                    case 0x101: /* V9 fmovscc %icc */
3922                        FMOVCC(0, s);
3923                        break;
3924                    case 0x102: /* V9 fmovdcc %icc */
3925                        FMOVCC(0, d);
3926                        break;
3927                    case 0x103: /* V9 fmovqcc %icc */
3928                        CHECK_FPU_FEATURE(dc, FLOAT128);
3929                        FMOVCC(0, q);
3930                        break;
3931                    case 0x181: /* V9 fmovscc %xcc */
3932                        FMOVCC(1, s);
3933                        break;
3934                    case 0x182: /* V9 fmovdcc %xcc */
3935                        FMOVCC(1, d);
3936                        break;
3937                    case 0x183: /* V9 fmovqcc %xcc */
3938                        CHECK_FPU_FEATURE(dc, FLOAT128);
3939                        FMOVCC(1, q);
3940                        break;
3941#undef FMOVCC
3942#endif
3943                    case 0x51: /* fcmps, V9 %fcc */
3944                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3945                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3946                        gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3947                        break;
3948                    case 0x52: /* fcmpd, V9 %fcc */
3949                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3950                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3951                        gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3952                        break;
3953                    case 0x53: /* fcmpq, V9 %fcc */
3954                        CHECK_FPU_FEATURE(dc, FLOAT128);
3955                        gen_op_load_fpr_QT0(QFPREG(rs1));
3956                        gen_op_load_fpr_QT1(QFPREG(rs2));
3957                        gen_op_fcmpq(rd & 3);
3958                        break;
3959                    case 0x55: /* fcmpes, V9 %fcc */
3960                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3961                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3962                        gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3963                        break;
3964                    case 0x56: /* fcmped, V9 %fcc */
3965                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3966                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3967                        gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3968                        break;
3969                    case 0x57: /* fcmpeq, V9 %fcc */
3970                        CHECK_FPU_FEATURE(dc, FLOAT128);
3971                        gen_op_load_fpr_QT0(QFPREG(rs1));
3972                        gen_op_load_fpr_QT1(QFPREG(rs2));
3973                        gen_op_fcmpeq(rd & 3);
3974                        break;
3975                    default:
3976                        goto illegal_insn;
3977                }
3978            } else if (xop == 0x2) {
3979                TCGv dst = gen_dest_gpr(dc, rd);
3980                rs1 = GET_FIELD(insn, 13, 17);
3981                if (rs1 == 0) {
3982                    /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3983                    if (IS_IMM) {       /* immediate */
3984                        simm = GET_FIELDs(insn, 19, 31);
3985                        tcg_gen_movi_tl(dst, simm);
3986                        gen_store_gpr(dc, rd, dst);
3987                    } else {            /* register */
3988                        rs2 = GET_FIELD(insn, 27, 31);
3989                        if (rs2 == 0) {
3990                            tcg_gen_movi_tl(dst, 0);
3991                            gen_store_gpr(dc, rd, dst);
3992                        } else {
3993                            cpu_src2 = gen_load_gpr(dc, rs2);
3994                            gen_store_gpr(dc, rd, cpu_src2);
3995                        }
3996                    }
3997                } else {
3998                    cpu_src1 = get_src1(dc, insn);
3999                    if (IS_IMM) {       /* immediate */
4000                        simm = GET_FIELDs(insn, 19, 31);
4001                        tcg_gen_ori_tl(dst, cpu_src1, simm);
4002                        gen_store_gpr(dc, rd, dst);
4003                    } else {            /* register */
4004                        rs2 = GET_FIELD(insn, 27, 31);
4005                        if (rs2 == 0) {
4006                            /* mov shortcut:  or x, %g0, y -> mov x, y */
4007                            gen_store_gpr(dc, rd, cpu_src1);
4008                        } else {
4009                            cpu_src2 = gen_load_gpr(dc, rs2);
4010                            tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4011                            gen_store_gpr(dc, rd, dst);
4012                        }
4013                    }
4014                }
4015#ifdef TARGET_SPARC64
4016            } else if (xop == 0x25) { /* sll, V9 sllx */
4017                cpu_src1 = get_src1(dc, insn);
4018                if (IS_IMM) {   /* immediate */
4019                    simm = GET_FIELDs(insn, 20, 31);
4020                    if (insn & (1 << 12)) {
4021                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4022                    } else {
4023                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4024                    }
4025                } else {                /* register */
4026                    rs2 = GET_FIELD(insn, 27, 31);
4027                    cpu_src2 = gen_load_gpr(dc, rs2);
4028                    cpu_tmp0 = get_temp_tl(dc);
4029                    if (insn & (1 << 12)) {
4030                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4031                    } else {
4032                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4033                    }
4034                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4035                }
4036                gen_store_gpr(dc, rd, cpu_dst);
4037            } else if (xop == 0x26) { /* srl, V9 srlx */
4038                cpu_src1 = get_src1(dc, insn);
4039                if (IS_IMM) {   /* immediate */
4040                    simm = GET_FIELDs(insn, 20, 31);
4041                    if (insn & (1 << 12)) {
4042                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4043                    } else {
4044                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4045                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4046                    }
4047                } else {                /* register */
4048                    rs2 = GET_FIELD(insn, 27, 31);
4049                    cpu_src2 = gen_load_gpr(dc, rs2);
4050                    cpu_tmp0 = get_temp_tl(dc);
4051                    if (insn & (1 << 12)) {
4052                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4053                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4054                    } else {
4055                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4056                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4057                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4058                    }
4059                }
4060                gen_store_gpr(dc, rd, cpu_dst);
4061            } else if (xop == 0x27) { /* sra, V9 srax */
4062                cpu_src1 = get_src1(dc, insn);
4063                if (IS_IMM) {   /* immediate */
4064                    simm = GET_FIELDs(insn, 20, 31);
4065                    if (insn & (1 << 12)) {
4066                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4067                    } else {
4068                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4069                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4070                    }
4071                } else {                /* register */
4072                    rs2 = GET_FIELD(insn, 27, 31);
4073                    cpu_src2 = gen_load_gpr(dc, rs2);
4074                    cpu_tmp0 = get_temp_tl(dc);
4075                    if (insn & (1 << 12)) {
4076                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4077                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4078                    } else {
4079                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4080                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4081                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4082                    }
4083                }
4084                gen_store_gpr(dc, rd, cpu_dst);
4085#endif
4086            } else if (xop < 0x36) {
4087                if (xop < 0x20) {
4088                    cpu_src1 = get_src1(dc, insn);
4089                    cpu_src2 = get_src2(dc, insn);
4090                    switch (xop & ~0x10) {
4091                    case 0x0: /* add */
4092                        if (xop & 0x10) {
4093                            gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4094                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4095                            dc->cc_op = CC_OP_ADD;
4096                        } else {
4097                            tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4098                        }
4099                        break;
4100                    case 0x1: /* and */
4101                        tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4102                        if (xop & 0x10) {
4103                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4104                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4105                            dc->cc_op = CC_OP_LOGIC;
4106                        }
4107                        break;
4108                    case 0x2: /* or */
4109                        tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4110                        if (xop & 0x10) {
4111                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4112                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4113                            dc->cc_op = CC_OP_LOGIC;
4114                        }
4115                        break;
4116                    case 0x3: /* xor */
4117                        tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4118                        if (xop & 0x10) {
4119                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4120                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4121                            dc->cc_op = CC_OP_LOGIC;
4122                        }
4123                        break;
4124                    case 0x4: /* sub */
4125                        if (xop & 0x10) {
4126                            gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4127                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4128                            dc->cc_op = CC_OP_SUB;
4129                        } else {
4130                            tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4131                        }
4132                        break;
4133                    case 0x5: /* andn */
4134                        tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4135                        if (xop & 0x10) {
4136                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4137                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4138                            dc->cc_op = CC_OP_LOGIC;
4139                        }
4140                        break;
4141                    case 0x6: /* orn */
4142                        tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4143                        if (xop & 0x10) {
4144                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4145                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4146                            dc->cc_op = CC_OP_LOGIC;
4147                        }
4148                        break;
4149                    case 0x7: /* xorn */
4150                        tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4151                        if (xop & 0x10) {
4152                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4153                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4154                            dc->cc_op = CC_OP_LOGIC;
4155                        }
4156                        break;
4157                    case 0x8: /* addx, V9 addc */
4158                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4159                                        (xop & 0x10));
4160                        break;
4161#ifdef TARGET_SPARC64
4162                    case 0x9: /* V9 mulx */
4163                        tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4164                        break;
4165#endif
4166                    case 0xa: /* umul */
4167                        CHECK_IU_FEATURE(dc, MUL);
4168                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4169                        if (xop & 0x10) {
4170                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4171                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4172                            dc->cc_op = CC_OP_LOGIC;
4173                        }
4174                        break;
4175                    case 0xb: /* smul */
4176                        CHECK_IU_FEATURE(dc, MUL);
4177                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4178                        if (xop & 0x10) {
4179                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4180                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4181                            dc->cc_op = CC_OP_LOGIC;
4182                        }
4183                        break;
4184                    case 0xc: /* subx, V9 subc */
4185                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4186                                        (xop & 0x10));
4187                        break;
4188#ifdef TARGET_SPARC64
4189                    case 0xd: /* V9 udivx */
4190                        gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4191                        break;
4192#endif
4193                    case 0xe: /* udiv */
4194                        CHECK_IU_FEATURE(dc, DIV);
4195                        if (xop & 0x10) {
4196                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4197                                               cpu_src2);
4198                            dc->cc_op = CC_OP_DIV;
4199                        } else {
4200                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4201                                            cpu_src2);
4202                        }
4203                        break;
4204                    case 0xf: /* sdiv */
4205                        CHECK_IU_FEATURE(dc, DIV);
4206                        if (xop & 0x10) {
4207                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4208                                               cpu_src2);
4209                            dc->cc_op = CC_OP_DIV;
4210                        } else {
4211                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4212                                            cpu_src2);
4213                        }
4214                        break;
4215                    default:
4216                        goto illegal_insn;
4217                    }
4218                    gen_store_gpr(dc, rd, cpu_dst);
4219                } else {
4220                    cpu_src1 = get_src1(dc, insn);
4221                    cpu_src2 = get_src2(dc, insn);
4222                    switch (xop) {
4223                    case 0x20: /* taddcc */
4224                        gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4225                        gen_store_gpr(dc, rd, cpu_dst);
4226                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4227                        dc->cc_op = CC_OP_TADD;
4228                        break;
4229                    case 0x21: /* tsubcc */
4230                        gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4231                        gen_store_gpr(dc, rd, cpu_dst);
4232                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4233                        dc->cc_op = CC_OP_TSUB;
4234                        break;
4235                    case 0x22: /* taddcctv */
4236                        gen_helper_taddcctv(cpu_dst, cpu_env,
4237                                            cpu_src1, cpu_src2);
4238                        gen_store_gpr(dc, rd, cpu_dst);
4239                        dc->cc_op = CC_OP_TADDTV;
4240                        break;
4241                    case 0x23: /* tsubcctv */
4242                        gen_helper_tsubcctv(cpu_dst, cpu_env,
4243                                            cpu_src1, cpu_src2);
4244                        gen_store_gpr(dc, rd, cpu_dst);
4245                        dc->cc_op = CC_OP_TSUBTV;
4246                        break;
4247                    case 0x24: /* mulscc */
4248                        update_psr(dc);
4249                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4250                        gen_store_gpr(dc, rd, cpu_dst);
4251                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4252                        dc->cc_op = CC_OP_ADD;
4253                        break;
4254#ifndef TARGET_SPARC64
4255                    case 0x25:  /* sll */
4256                        if (IS_IMM) { /* immediate */
4257                            simm = GET_FIELDs(insn, 20, 31);
4258                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4259                        } else { /* register */
4260                            cpu_tmp0 = get_temp_tl(dc);
4261                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4262                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4263                        }
4264                        gen_store_gpr(dc, rd, cpu_dst);
4265                        break;
4266                    case 0x26:  /* srl */
4267                        if (IS_IMM) { /* immediate */
4268                            simm = GET_FIELDs(insn, 20, 31);
4269                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4270                        } else { /* register */
4271                            cpu_tmp0 = get_temp_tl(dc);
4272                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4273                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4274                        }
4275                        gen_store_gpr(dc, rd, cpu_dst);
4276                        break;
4277                    case 0x27:  /* sra */
4278                        if (IS_IMM) { /* immediate */
4279                            simm = GET_FIELDs(insn, 20, 31);
4280                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4281                        } else { /* register */
4282                            cpu_tmp0 = get_temp_tl(dc);
4283                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4284                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4285                        }
4286                        gen_store_gpr(dc, rd, cpu_dst);
4287                        break;
4288#endif
4289                    case 0x30:
4290                        {
4291                            cpu_tmp0 = get_temp_tl(dc);
4292                            switch(rd) {
4293                            case 0: /* wry */
4294                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4295                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4296                                break;
4297#ifndef TARGET_SPARC64
4298                            case 0x01 ... 0x0f: /* undefined in the
4299                                                   SPARCv8 manual, nop
4300                                                   on the microSPARC
4301                                                   II */
4302                            case 0x10 ... 0x1f: /* implementation-dependent
4303                                                   in the SPARCv8
4304                                                   manual, nop on the
4305                                                   microSPARC II */
4306                                if ((rd == 0x13) && (dc->def->features &
4307                                                     CPU_FEATURE_POWERDOWN)) {
4308                                    /* LEON3 power-down */
4309                                    save_state(dc);
4310                                    gen_helper_power_down(cpu_env);
4311                                }
4312                                break;
4313#else
4314                            case 0x2: /* V9 wrccr */
4315                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4316                                gen_helper_wrccr(cpu_env, cpu_tmp0);
4317                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4318                                dc->cc_op = CC_OP_FLAGS;
4319                                break;
4320                            case 0x3: /* V9 wrasi */
4321                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4322                                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4323                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4324                                                offsetof(CPUSPARCState, asi));
4325                                /* End TB to notice changed ASI.  */
4326                                save_state(dc);
4327                                gen_op_next_insn();
4328                                tcg_gen_exit_tb(0);
4329                                dc->is_br = 1;
4330                                break;
4331                            case 0x6: /* V9 wrfprs */
4332                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4333                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4334                                dc->fprs_dirty = 0;
4335                                save_state(dc);
4336                                gen_op_next_insn();
4337                                tcg_gen_exit_tb(0);
4338                                dc->is_br = 1;
4339                                break;
4340                            case 0xf: /* V9 sir, nop if user */
4341#if !defined(CONFIG_USER_ONLY)
4342                                if (supervisor(dc)) {
4343                                    ; // XXX
4344                                }
4345#endif
4346                                break;
4347                            case 0x13: /* Graphics Status */
4348                                if (gen_trap_ifnofpu(dc)) {
4349                                    goto jmp_insn;
4350                                }
4351                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4352                                break;
4353                            case 0x14: /* Softint set */
4354                                if (!supervisor(dc))
4355                                    goto illegal_insn;
4356                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4357                                gen_helper_set_softint(cpu_env, cpu_tmp0);
4358                                break;
4359                            case 0x15: /* Softint clear */
4360                                if (!supervisor(dc))
4361                                    goto illegal_insn;
4362                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4363                                gen_helper_clear_softint(cpu_env, cpu_tmp0);
4364                                break;
4365                            case 0x16: /* Softint write */
4366                                if (!supervisor(dc))
4367                                    goto illegal_insn;
4368                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4369                                gen_helper_write_softint(cpu_env, cpu_tmp0);
4370                                break;
4371                            case 0x17: /* Tick compare */
4372#if !defined(CONFIG_USER_ONLY)
4373                                if (!supervisor(dc))
4374                                    goto illegal_insn;
4375#endif
4376                                {
4377                                    TCGv_ptr r_tickptr;
4378
4379                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4380                                                   cpu_src2);
4381                                    r_tickptr = tcg_temp_new_ptr();
4382                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4383                                                   offsetof(CPUSPARCState, tick));
4384                                    gen_helper_tick_set_limit(r_tickptr,
4385                                                              cpu_tick_cmpr);
4386                                    tcg_temp_free_ptr(r_tickptr);
4387                                }
4388                                break;
4389                            case 0x18: /* System tick */
4390#if !defined(CONFIG_USER_ONLY)
4391                                if (!supervisor(dc))
4392                                    goto illegal_insn;
4393#endif
4394                                {
4395                                    TCGv_ptr r_tickptr;
4396
4397                                    tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4398                                                   cpu_src2);
4399                                    r_tickptr = tcg_temp_new_ptr();
4400                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4401                                                   offsetof(CPUSPARCState, stick));
4402                                    gen_helper_tick_set_count(r_tickptr,
4403                                                              cpu_tmp0);
4404                                    tcg_temp_free_ptr(r_tickptr);
4405                                }
4406                                break;
4407                            case 0x19: /* System tick compare */
4408#if !defined(CONFIG_USER_ONLY)
4409                                if (!supervisor(dc))
4410                                    goto illegal_insn;
4411#endif
4412                                {
4413                                    TCGv_ptr r_tickptr;
4414
4415                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4416                                                   cpu_src2);
4417                                    r_tickptr = tcg_temp_new_ptr();
4418                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4419                                                   offsetof(CPUSPARCState, stick));
4420                                    gen_helper_tick_set_limit(r_tickptr,
4421                                                              cpu_stick_cmpr);
4422                                    tcg_temp_free_ptr(r_tickptr);
4423                                }
4424                                break;
4425
4426                            case 0x10: /* Performance Control */
4427                            case 0x11: /* Performance Instrumentation
4428                                          Counter */
4429                            case 0x12: /* Dispatch Control */
4430#endif
4431                            default:
4432                                goto illegal_insn;
4433                            }
4434                        }
4435                        break;
4436#if !defined(CONFIG_USER_ONLY)
4437                    case 0x31: /* wrpsr, V9 saved, restored */
4438                        {
4439                            if (!supervisor(dc))
4440                                goto priv_insn;
4441#ifdef TARGET_SPARC64
4442                            switch (rd) {
4443                            case 0:
4444                                gen_helper_saved(cpu_env);
4445                                break;
4446                            case 1:
4447                                gen_helper_restored(cpu_env);
4448                                break;
4449                            case 2: /* UA2005 allclean */
4450                            case 3: /* UA2005 otherw */
4451                            case 4: /* UA2005 normalw */
4452                            case 5: /* UA2005 invalw */
4453                                // XXX
4454                            default:
4455                                goto illegal_insn;
4456                            }
4457#else
4458                            cpu_tmp0 = get_temp_tl(dc);
4459                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4460                            gen_helper_wrpsr(cpu_env, cpu_tmp0);
4461                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4462                            dc->cc_op = CC_OP_FLAGS;
4463                            save_state(dc);
4464                            gen_op_next_insn();
4465                            tcg_gen_exit_tb(0);
4466                            dc->is_br = 1;
4467#endif
4468                        }
4469                        break;
4470                    case 0x32: /* wrwim, V9 wrpr */
4471                        {
4472                            if (!supervisor(dc))
4473                                goto priv_insn;
4474                            cpu_tmp0 = get_temp_tl(dc);
4475                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4476#ifdef TARGET_SPARC64
4477                            switch (rd) {
4478                            case 0: // tpc
4479                                {
4480                                    TCGv_ptr r_tsptr;
4481
4482                                    r_tsptr = tcg_temp_new_ptr();
4483                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4484                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4485                                                  offsetof(trap_state, tpc));
4486                                    tcg_temp_free_ptr(r_tsptr);
4487                                }
4488                                break;
4489                            case 1: // tnpc
4490                                {
4491                                    TCGv_ptr r_tsptr;
4492
4493                                    r_tsptr = tcg_temp_new_ptr();
4494                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4495                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4496                                                  offsetof(trap_state, tnpc));
4497                                    tcg_temp_free_ptr(r_tsptr);
4498                                }
4499                                break;
4500                            case 2: // tstate
4501                                {
4502                                    TCGv_ptr r_tsptr;
4503
4504                                    r_tsptr = tcg_temp_new_ptr();
4505                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4506                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4507                                                  offsetof(trap_state,
4508                                                           tstate));
4509                                    tcg_temp_free_ptr(r_tsptr);
4510                                }
4511                                break;
4512                            case 3: // tt
4513                                {
4514                                    TCGv_ptr r_tsptr;
4515
4516                                    r_tsptr = tcg_temp_new_ptr();
4517                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4518                                    tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4519                                                    offsetof(trap_state, tt));
4520                                    tcg_temp_free_ptr(r_tsptr);
4521                                }
4522                                break;
4523                            case 4: // tick
4524                                {
4525                                    TCGv_ptr r_tickptr;
4526
4527                                    r_tickptr = tcg_temp_new_ptr();
4528                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4529                                                   offsetof(CPUSPARCState, tick));
4530                                    gen_helper_tick_set_count(r_tickptr,
4531                                                              cpu_tmp0);
4532                                    tcg_temp_free_ptr(r_tickptr);
4533                                }
4534                                break;
4535                            case 5: // tba
4536                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4537                                break;
4538                            case 6: // pstate
4539                                save_state(dc);
4540                                gen_helper_wrpstate(cpu_env, cpu_tmp0);
4541                                dc->npc = DYNAMIC_PC;
4542                                break;
4543                            case 7: // tl
4544                                save_state(dc);
4545                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4546                                               offsetof(CPUSPARCState, tl));
4547                                dc->npc = DYNAMIC_PC;
4548                                break;
4549                            case 8: // pil
4550                                gen_helper_wrpil(cpu_env, cpu_tmp0);
4551                                break;
4552                            case 9: // cwp
4553                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
4554                                break;
4555                            case 10: // cansave
4556                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4557                                                offsetof(CPUSPARCState,
4558                                                         cansave));
4559                                break;
4560                            case 11: // canrestore
4561                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4562                                                offsetof(CPUSPARCState,
4563                                                         canrestore));
4564                                break;
4565                            case 12: // cleanwin
4566                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4567                                                offsetof(CPUSPARCState,
4568                                                         cleanwin));
4569                                break;
4570                            case 13: // otherwin
4571                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4572                                                offsetof(CPUSPARCState,
4573                                                         otherwin));
4574                                break;
4575                            case 14: // wstate
4576                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4577                                                offsetof(CPUSPARCState,
4578                                                         wstate));
4579                                break;
4580                            case 16: // UA2005 gl
4581                                CHECK_IU_FEATURE(dc, GL);
4582                                gen_helper_wrgl(cpu_env, cpu_tmp0);
4583                                break;
4584                            case 26: // UA2005 strand status
4585                                CHECK_IU_FEATURE(dc, HYPV);
4586                                if (!hypervisor(dc))
4587                                    goto priv_insn;
4588                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4589                                break;
4590                            default:
4591                                goto illegal_insn;
4592                            }
4593#else
4594                            tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4595                            if (dc->def->nwindows != 32) {
4596                                tcg_gen_andi_tl(cpu_wim, cpu_wim,
4597                                                (1 << dc->def->nwindows) - 1);
4598                            }
4599#endif
4600                        }
4601                        break;
4602                    case 0x33: /* wrtbr, UA2005 wrhpr */
4603                        {
4604#ifndef TARGET_SPARC64
4605                            if (!supervisor(dc))
4606                                goto priv_insn;
4607                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4608#else
4609                            CHECK_IU_FEATURE(dc, HYPV);
4610                            if (!hypervisor(dc))
4611                                goto priv_insn;
4612                            cpu_tmp0 = get_temp_tl(dc);
4613                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4614                            switch (rd) {
4615                            case 0: // hpstate
4616                                tcg_gen_st_i64(cpu_tmp0, cpu_env,
4617                                               offsetof(CPUSPARCState,
4618                                                        hpstate));
4619                                save_state(dc);
4620                                gen_op_next_insn();
4621                                tcg_gen_exit_tb(0);
4622                                dc->is_br = 1;
4623                                break;
4624                            case 1: // htstate
4625                                // XXX gen_op_wrhtstate();
4626                                break;
4627                            case 3: // hintp
4628                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4629                                break;
4630                            case 5: // htba
4631                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4632                                break;
4633                            case 31: // hstick_cmpr
4634                                {
4635                                    TCGv_ptr r_tickptr;
4636
4637                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4638                                    r_tickptr = tcg_temp_new_ptr();
4639                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4640                                                   offsetof(CPUSPARCState, hstick));
4641                                    gen_helper_tick_set_limit(r_tickptr,
4642                                                              cpu_hstick_cmpr);
4643                                    tcg_temp_free_ptr(r_tickptr);
4644                                }
4645                                break;
4646                            case 6: // hver readonly
4647                            default:
4648                                goto illegal_insn;
4649                            }
4650#endif
4651                        }
4652                        break;
4653#endif
4654#ifdef TARGET_SPARC64
4655                    case 0x2c: /* V9 movcc */
4656                        {
4657                            int cc = GET_FIELD_SP(insn, 11, 12);
4658                            int cond = GET_FIELD_SP(insn, 14, 17);
4659                            DisasCompare cmp;
4660                            TCGv dst;
4661
4662                            if (insn & (1 << 18)) {
4663                                if (cc == 0) {
4664                                    gen_compare(&cmp, 0, cond, dc);
4665                                } else if (cc == 2) {
4666                                    gen_compare(&cmp, 1, cond, dc);
4667                                } else {
4668                                    goto illegal_insn;
4669                                }
4670                            } else {
4671                                gen_fcompare(&cmp, cc, cond);
4672                            }
4673
4674                            /* The get_src2 above loaded the normal 13-bit
4675                               immediate field, not the 11-bit field we have
4676                               in movcc.  But it did handle the reg case.  */
4677                            if (IS_IMM) {
4678                                simm = GET_FIELD_SPs(insn, 0, 10);
4679                                tcg_gen_movi_tl(cpu_src2, simm);
4680                            }
4681
4682                            dst = gen_load_gpr(dc, rd);
4683                            tcg_gen_movcond_tl(cmp.cond, dst,
4684                                               cmp.c1, cmp.c2,
4685                                               cpu_src2, dst);
4686                            free_compare(&cmp);
4687                            gen_store_gpr(dc, rd, dst);
4688                            break;
4689                        }
4690                    case 0x2d: /* V9 sdivx */
4691                        gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4692                        gen_store_gpr(dc, rd, cpu_dst);
4693                        break;
4694                    case 0x2e: /* V9 popc */
4695                        tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4696                        gen_store_gpr(dc, rd, cpu_dst);
4697                        break;
4698                    case 0x2f: /* V9 movr */
4699                        {
4700                            int cond = GET_FIELD_SP(insn, 10, 12);
4701                            DisasCompare cmp;
4702                            TCGv dst;
4703
4704                            gen_compare_reg(&cmp, cond, cpu_src1);
4705
4706                            /* The get_src2 above loaded the normal 13-bit
4707                               immediate field, not the 10-bit field we have
4708                               in movr.  But it did handle the reg case.  */
4709                            if (IS_IMM) {
4710                                simm = GET_FIELD_SPs(insn, 0, 9);
4711                                tcg_gen_movi_tl(cpu_src2, simm);
4712                            }
4713
4714                            dst = gen_load_gpr(dc, rd);
4715                            tcg_gen_movcond_tl(cmp.cond, dst,
4716                                               cmp.c1, cmp.c2,
4717                                               cpu_src2, dst);
4718                            free_compare(&cmp);
4719                            gen_store_gpr(dc, rd, dst);
4720                            break;
4721                        }
4722#endif
4723                    default:
4724                        goto illegal_insn;
4725                    }
4726                }
4727            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4728#ifdef TARGET_SPARC64
4729                int opf = GET_FIELD_SP(insn, 5, 13);
4730                rs1 = GET_FIELD(insn, 13, 17);
4731                rs2 = GET_FIELD(insn, 27, 31);
4732                if (gen_trap_ifnofpu(dc)) {
4733                    goto jmp_insn;
4734                }
4735
4736                switch (opf) {
4737                case 0x000: /* VIS I edge8cc */
4738                    CHECK_FPU_FEATURE(dc, VIS1);
4739                    cpu_src1 = gen_load_gpr(dc, rs1);
4740                    cpu_src2 = gen_load_gpr(dc, rs2);
4741                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4742                    gen_store_gpr(dc, rd, cpu_dst);
4743                    break;
4744                case 0x001: /* VIS II edge8n */
4745                    CHECK_FPU_FEATURE(dc, VIS2);
4746                    cpu_src1 = gen_load_gpr(dc, rs1);
4747                    cpu_src2 = gen_load_gpr(dc, rs2);
4748                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4749                    gen_store_gpr(dc, rd, cpu_dst);
4750                    break;
4751                case 0x002: /* VIS I edge8lcc */
4752                    CHECK_FPU_FEATURE(dc, VIS1);
4753                    cpu_src1 = gen_load_gpr(dc, rs1);
4754                    cpu_src2 = gen_load_gpr(dc, rs2);
4755                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4756                    gen_store_gpr(dc, rd, cpu_dst);
4757                    break;
4758                case 0x003: /* VIS II edge8ln */
4759                    CHECK_FPU_FEATURE(dc, VIS2);
4760                    cpu_src1 = gen_load_gpr(dc, rs1);
4761                    cpu_src2 = gen_load_gpr(dc, rs2);
4762                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4763                    gen_store_gpr(dc, rd, cpu_dst);
4764                    break;
4765                case 0x004: /* VIS I edge16cc */
4766                    CHECK_FPU_FEATURE(dc, VIS1);
4767                    cpu_src1 = gen_load_gpr(dc, rs1);
4768                    cpu_src2 = gen_load_gpr(dc, rs2);
4769                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4770                    gen_store_gpr(dc, rd, cpu_dst);
4771                    break;
4772                case 0x005: /* VIS II edge16n */
4773                    CHECK_FPU_FEATURE(dc, VIS2);
4774                    cpu_src1 = gen_load_gpr(dc, rs1);
4775                    cpu_src2 = gen_load_gpr(dc, rs2);
4776                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4777                    gen_store_gpr(dc, rd, cpu_dst);
4778                    break;
4779                case 0x006: /* VIS I edge16lcc */
4780                    CHECK_FPU_FEATURE(dc, VIS1);
4781                    cpu_src1 = gen_load_gpr(dc, rs1);
4782                    cpu_src2 = gen_load_gpr(dc, rs2);
4783                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4784                    gen_store_gpr(dc, rd, cpu_dst);
4785                    break;
4786                case 0x007: /* VIS II edge16ln */
4787                    CHECK_FPU_FEATURE(dc, VIS2);
4788                    cpu_src1 = gen_load_gpr(dc, rs1);
4789                    cpu_src2 = gen_load_gpr(dc, rs2);
4790                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4791                    gen_store_gpr(dc, rd, cpu_dst);
4792                    break;
4793                case 0x008: /* VIS I edge32cc */
4794                    CHECK_FPU_FEATURE(dc, VIS1);
4795                    cpu_src1 = gen_load_gpr(dc, rs1);
4796                    cpu_src2 = gen_load_gpr(dc, rs2);
4797                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4798                    gen_store_gpr(dc, rd, cpu_dst);
4799                    break;
4800                case 0x009: /* VIS II edge32n */
4801                    CHECK_FPU_FEATURE(dc, VIS2);
4802                    cpu_src1 = gen_load_gpr(dc, rs1);
4803                    cpu_src2 = gen_load_gpr(dc, rs2);
4804                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4805                    gen_store_gpr(dc, rd, cpu_dst);
4806                    break;
4807                case 0x00a: /* VIS I edge32lcc */
4808                    CHECK_FPU_FEATURE(dc, VIS1);
4809                    cpu_src1 = gen_load_gpr(dc, rs1);
4810                    cpu_src2 = gen_load_gpr(dc, rs2);
4811                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4812                    gen_store_gpr(dc, rd, cpu_dst);
4813                    break;
4814                case 0x00b: /* VIS II edge32ln */
4815                    CHECK_FPU_FEATURE(dc, VIS2);
4816                    cpu_src1 = gen_load_gpr(dc, rs1);
4817                    cpu_src2 = gen_load_gpr(dc, rs2);
4818                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4819                    gen_store_gpr(dc, rd, cpu_dst);
4820                    break;
4821                case 0x010: /* VIS I array8 */
4822                    CHECK_FPU_FEATURE(dc, VIS1);
4823                    cpu_src1 = gen_load_gpr(dc, rs1);
4824                    cpu_src2 = gen_load_gpr(dc, rs2);
4825                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4826                    gen_store_gpr(dc, rd, cpu_dst);
4827                    break;
4828                case 0x012: /* VIS I array16 */
4829                    CHECK_FPU_FEATURE(dc, VIS1);
4830                    cpu_src1 = gen_load_gpr(dc, rs1);
4831                    cpu_src2 = gen_load_gpr(dc, rs2);
4832                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4833                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4834                    gen_store_gpr(dc, rd, cpu_dst);
4835                    break;
4836                case 0x014: /* VIS I array32 */
4837                    CHECK_FPU_FEATURE(dc, VIS1);
4838                    cpu_src1 = gen_load_gpr(dc, rs1);
4839                    cpu_src2 = gen_load_gpr(dc, rs2);
4840                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4841                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4842                    gen_store_gpr(dc, rd, cpu_dst);
4843                    break;
4844                case 0x018: /* VIS I alignaddr */
4845                    CHECK_FPU_FEATURE(dc, VIS1);
4846                    cpu_src1 = gen_load_gpr(dc, rs1);
4847                    cpu_src2 = gen_load_gpr(dc, rs2);
4848                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4849                    gen_store_gpr(dc, rd, cpu_dst);
4850                    break;
4851                case 0x01a: /* VIS I alignaddrl */
4852                    CHECK_FPU_FEATURE(dc, VIS1);
4853                    cpu_src1 = gen_load_gpr(dc, rs1);
4854                    cpu_src2 = gen_load_gpr(dc, rs2);
4855                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4856                    gen_store_gpr(dc, rd, cpu_dst);
4857                    break;
4858                case 0x019: /* VIS II bmask */
4859                    CHECK_FPU_FEATURE(dc, VIS2);
4860                    cpu_src1 = gen_load_gpr(dc, rs1);
4861                    cpu_src2 = gen_load_gpr(dc, rs2);
4862                    tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4863                    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4864                    gen_store_gpr(dc, rd, cpu_dst);
4865                    break;
4866                case 0x020: /* VIS I fcmple16 */
4867                    CHECK_FPU_FEATURE(dc, VIS1);
4868                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4869                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4870                    gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4871                    gen_store_gpr(dc, rd, cpu_dst);
4872                    break;
4873                case 0x022: /* VIS I fcmpne16 */
4874                    CHECK_FPU_FEATURE(dc, VIS1);
4875                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4876                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4877                    gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4878                    gen_store_gpr(dc, rd, cpu_dst);
4879                    break;
4880                case 0x024: /* VIS I fcmple32 */
4881                    CHECK_FPU_FEATURE(dc, VIS1);
4882                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4883                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4884                    gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4885                    gen_store_gpr(dc, rd, cpu_dst);
4886                    break;
4887                case 0x026: /* VIS I fcmpne32 */
4888                    CHECK_FPU_FEATURE(dc, VIS1);
4889                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4890                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4891                    gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4892                    gen_store_gpr(dc, rd, cpu_dst);
4893                    break;
4894                case 0x028: /* VIS I fcmpgt16 */
4895                    CHECK_FPU_FEATURE(dc, VIS1);
4896                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4897                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4898                    gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4899                    gen_store_gpr(dc, rd, cpu_dst);
4900                    break;
4901                case 0x02a: /* VIS I fcmpeq16 */
4902                    CHECK_FPU_FEATURE(dc, VIS1);
4903                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4904                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4905                    gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4906                    gen_store_gpr(dc, rd, cpu_dst);
4907                    break;
4908                case 0x02c: /* VIS I fcmpgt32 */
4909                    CHECK_FPU_FEATURE(dc, VIS1);
4910                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4911                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4912                    gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4913                    gen_store_gpr(dc, rd, cpu_dst);
4914                    break;
4915                case 0x02e: /* VIS I fcmpeq32 */
4916                    CHECK_FPU_FEATURE(dc, VIS1);
4917                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4918                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4919                    gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4920                    gen_store_gpr(dc, rd, cpu_dst);
4921                    break;
4922                case 0x031: /* VIS I fmul8x16 */
4923                    CHECK_FPU_FEATURE(dc, VIS1);
4924                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4925                    break;
4926                case 0x033: /* VIS I fmul8x16au */
4927                    CHECK_FPU_FEATURE(dc, VIS1);
4928                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4929                    break;
4930                case 0x035: /* VIS I fmul8x16al */
4931                    CHECK_FPU_FEATURE(dc, VIS1);
4932                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4933                    break;
4934                case 0x036: /* VIS I fmul8sux16 */
4935                    CHECK_FPU_FEATURE(dc, VIS1);
4936                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4937                    break;
4938                case 0x037: /* VIS I fmul8ulx16 */
4939                    CHECK_FPU_FEATURE(dc, VIS1);
4940                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4941                    break;
4942                case 0x038: /* VIS I fmuld8sux16 */
4943                    CHECK_FPU_FEATURE(dc, VIS1);
4944                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4945                    break;
4946                case 0x039: /* VIS I fmuld8ulx16 */
4947                    CHECK_FPU_FEATURE(dc, VIS1);
4948                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4949                    break;
4950                case 0x03a: /* VIS I fpack32 */
4951                    CHECK_FPU_FEATURE(dc, VIS1);
4952                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4953                    break;
4954                case 0x03b: /* VIS I fpack16 */
4955                    CHECK_FPU_FEATURE(dc, VIS1);
4956                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4957                    cpu_dst_32 = gen_dest_fpr_F(dc);
4958                    gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4959                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4960                    break;
4961                case 0x03d: /* VIS I fpackfix */
4962                    CHECK_FPU_FEATURE(dc, VIS1);
4963                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4964                    cpu_dst_32 = gen_dest_fpr_F(dc);
4965                    gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4966                    gen_store_fpr_F(dc, rd, cpu_dst_32);
4967                    break;
4968                case 0x03e: /* VIS I pdist */
4969                    CHECK_FPU_FEATURE(dc, VIS1);
4970                    gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4971                    break;
4972                case 0x048: /* VIS I faligndata */
4973                    CHECK_FPU_FEATURE(dc, VIS1);
4974                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4975                    break;
4976                case 0x04b: /* VIS I fpmerge */
4977                    CHECK_FPU_FEATURE(dc, VIS1);
4978                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4979                    break;
4980                case 0x04c: /* VIS II bshuffle */
4981                    CHECK_FPU_FEATURE(dc, VIS2);
4982                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4983                    break;
4984                case 0x04d: /* VIS I fexpand */
4985                    CHECK_FPU_FEATURE(dc, VIS1);
4986                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4987                    break;
4988                case 0x050: /* VIS I fpadd16 */
4989                    CHECK_FPU_FEATURE(dc, VIS1);
4990                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4991                    break;
4992                case 0x051: /* VIS I fpadd16s */
4993                    CHECK_FPU_FEATURE(dc, VIS1);
4994                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4995                    break;
4996                case 0x052: /* VIS I fpadd32 */
4997                    CHECK_FPU_FEATURE(dc, VIS1);
4998                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4999                    break;
5000                case 0x053: /* VIS I fpadd32s */
5001                    CHECK_FPU_FEATURE(dc, VIS1);
5002                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5003                    break;
5004                case 0x054: /* VIS I fpsub16 */
5005                    CHECK_FPU_FEATURE(dc, VIS1);
5006                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5007                    break;
5008                case 0x055: /* VIS I fpsub16s */
5009                    CHECK_FPU_FEATURE(dc, VIS1);
5010                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5011                    break;
5012                case 0x056: /* VIS I fpsub32 */
5013                    CHECK_FPU_FEATURE(dc, VIS1);
5014                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5015                    break;
5016                case 0x057: /* VIS I fpsub32s */
5017                    CHECK_FPU_FEATURE(dc, VIS1);
5018                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5019                    break;
5020                case 0x060: /* VIS I fzero */
5021                    CHECK_FPU_FEATURE(dc, VIS1);
5022                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5023                    tcg_gen_movi_i64(cpu_dst_64, 0);
5024                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5025                    break;
5026                case 0x061: /* VIS I fzeros */
5027                    CHECK_FPU_FEATURE(dc, VIS1);
5028                    cpu_dst_32 = gen_dest_fpr_F(dc);
5029                    tcg_gen_movi_i32(cpu_dst_32, 0);
5030                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5031                    break;
5032                case 0x062: /* VIS I fnor */
5033                    CHECK_FPU_FEATURE(dc, VIS1);
5034                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5035                    break;
5036                case 0x063: /* VIS I fnors */
5037                    CHECK_FPU_FEATURE(dc, VIS1);
5038                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5039                    break;
5040                case 0x064: /* VIS I fandnot2 */
5041                    CHECK_FPU_FEATURE(dc, VIS1);
5042                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5043                    break;
5044                case 0x065: /* VIS I fandnot2s */
5045                    CHECK_FPU_FEATURE(dc, VIS1);
5046                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5047                    break;
5048                case 0x066: /* VIS I fnot2 */
5049                    CHECK_FPU_FEATURE(dc, VIS1);
5050                    gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5051                    break;
5052                case 0x067: /* VIS I fnot2s */
5053                    CHECK_FPU_FEATURE(dc, VIS1);
5054                    gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5055                    break;
5056                case 0x068: /* VIS I fandnot1 */
5057                    CHECK_FPU_FEATURE(dc, VIS1);
5058                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5059                    break;
5060                case 0x069: /* VIS I fandnot1s */
5061                    CHECK_FPU_FEATURE(dc, VIS1);
5062                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5063                    break;
5064                case 0x06a: /* VIS I fnot1 */
5065                    CHECK_FPU_FEATURE(dc, VIS1);
5066                    gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5067                    break;
5068                case 0x06b: /* VIS I fnot1s */
5069                    CHECK_FPU_FEATURE(dc, VIS1);
5070                    gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5071                    break;
5072                case 0x06c: /* VIS I fxor */
5073                    CHECK_FPU_FEATURE(dc, VIS1);
5074                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5075                    break;
5076                case 0x06d: /* VIS I fxors */
5077                    CHECK_FPU_FEATURE(dc, VIS1);
5078                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5079                    break;
5080                case 0x06e: /* VIS I fnand */
5081                    CHECK_FPU_FEATURE(dc, VIS1);
5082                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5083                    break;
5084                case 0x06f: /* VIS I fnands */
5085                    CHECK_FPU_FEATURE(dc, VIS1);
5086                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5087                    break;
5088                case 0x070: /* VIS I fand */
5089                    CHECK_FPU_FEATURE(dc, VIS1);
5090                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5091                    break;
5092                case 0x071: /* VIS I fands */
5093                    CHECK_FPU_FEATURE(dc, VIS1);
5094                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5095                    break;
5096                case 0x072: /* VIS I fxnor */
5097                    CHECK_FPU_FEATURE(dc, VIS1);
5098                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5099                    break;
5100                case 0x073: /* VIS I fxnors */
5101                    CHECK_FPU_FEATURE(dc, VIS1);
5102                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5103                    break;
5104                case 0x074: /* VIS I fsrc1 */
5105                    CHECK_FPU_FEATURE(dc, VIS1);
5106                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5107                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5108                    break;
5109                case 0x075: /* VIS I fsrc1s */
5110                    CHECK_FPU_FEATURE(dc, VIS1);
5111                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5112                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5113                    break;
5114                case 0x076: /* VIS I fornot2 */
5115                    CHECK_FPU_FEATURE(dc, VIS1);
5116                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5117                    break;
5118                case 0x077: /* VIS I fornot2s */
5119                    CHECK_FPU_FEATURE(dc, VIS1);
5120                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5121                    break;
5122                case 0x078: /* VIS I fsrc2 */
5123                    CHECK_FPU_FEATURE(dc, VIS1);
5124                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5125                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5126                    break;
5127                case 0x079: /* VIS I fsrc2s */
5128                    CHECK_FPU_FEATURE(dc, VIS1);
5129                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5130                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5131                    break;
5132                case 0x07a: /* VIS I fornot1 */
5133                    CHECK_FPU_FEATURE(dc, VIS1);
5134                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5135                    break;
5136                case 0x07b: /* VIS I fornot1s */
5137                    CHECK_FPU_FEATURE(dc, VIS1);
5138                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5139                    break;
5140                case 0x07c: /* VIS I for */
5141                    CHECK_FPU_FEATURE(dc, VIS1);
5142                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5143                    break;
5144                case 0x07d: /* VIS I fors */
5145                    CHECK_FPU_FEATURE(dc, VIS1);
5146                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5147                    break;
5148                case 0x07e: /* VIS I fone */
5149                    CHECK_FPU_FEATURE(dc, VIS1);
5150                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5151                    tcg_gen_movi_i64(cpu_dst_64, -1);
5152                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5153                    break;
5154                case 0x07f: /* VIS I fones */
5155                    CHECK_FPU_FEATURE(dc, VIS1);
5156                    cpu_dst_32 = gen_dest_fpr_F(dc);
5157                    tcg_gen_movi_i32(cpu_dst_32, -1);
5158                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5159                    break;
5160                case 0x080: /* VIS I shutdown */
5161                case 0x081: /* VIS II siam */
5162                    // XXX
5163                    goto illegal_insn;
5164                default:
5165                    goto illegal_insn;
5166                }
5167#else
5168                goto ncp_insn;
5169#endif
5170            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5171#ifdef TARGET_SPARC64
5172                goto illegal_insn;
5173#else
5174                goto ncp_insn;
5175#endif
5176#ifdef TARGET_SPARC64
5177            } else if (xop == 0x39) { /* V9 return */
5178                save_state(dc);
5179                cpu_src1 = get_src1(dc, insn);
5180                cpu_tmp0 = get_temp_tl(dc);
5181                if (IS_IMM) {   /* immediate */
5182                    simm = GET_FIELDs(insn, 19, 31);
5183                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5184                } else {                /* register */
5185                    rs2 = GET_FIELD(insn, 27, 31);
5186                    if (rs2) {
5187                        cpu_src2 = gen_load_gpr(dc, rs2);
5188                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5189                    } else {
5190                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5191                    }
5192                }
5193                gen_helper_restore(cpu_env);
5194                gen_mov_pc_npc(dc);
5195                gen_check_align(cpu_tmp0, 3);
5196                tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5197                dc->npc = DYNAMIC_PC;
5198                goto jmp_insn;
5199#endif
5200            } else {
5201                cpu_src1 = get_src1(dc, insn);
5202                cpu_tmp0 = get_temp_tl(dc);
5203                if (IS_IMM) {   /* immediate */
5204                    simm = GET_FIELDs(insn, 19, 31);
5205                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5206                } else {                /* register */
5207                    rs2 = GET_FIELD(insn, 27, 31);
5208                    if (rs2) {
5209                        cpu_src2 = gen_load_gpr(dc, rs2);
5210                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5211                    } else {
5212                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5213                    }
5214                }
5215                switch (xop) {
5216                case 0x38:      /* jmpl */
5217                    {
5218                        TCGv t = gen_dest_gpr(dc, rd);
5219                        tcg_gen_movi_tl(t, dc->pc);
5220                        gen_store_gpr(dc, rd, t);
5221
5222                        gen_mov_pc_npc(dc);
5223                        gen_check_align(cpu_tmp0, 3);
5224                        gen_address_mask(dc, cpu_tmp0);
5225                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5226                        dc->npc = DYNAMIC_PC;
5227                    }
5228                    goto jmp_insn;
5229#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5230                case 0x39:      /* rett, V9 return */
5231                    {
5232                        if (!supervisor(dc))
5233                            goto priv_insn;
5234                        gen_mov_pc_npc(dc);
5235                        gen_check_align(cpu_tmp0, 3);
5236                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5237                        dc->npc = DYNAMIC_PC;
5238                        gen_helper_rett(cpu_env);
5239                    }
5240                    goto jmp_insn;
5241#endif
5242                case 0x3b: /* flush */
5243                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5244                        goto unimp_flush;
5245                    /* nop */
5246                    break;
5247                case 0x3c:      /* save */
5248                    gen_helper_save(cpu_env);
5249                    gen_store_gpr(dc, rd, cpu_tmp0);
5250                    break;
5251                case 0x3d:      /* restore */
5252                    gen_helper_restore(cpu_env);
5253                    gen_store_gpr(dc, rd, cpu_tmp0);
5254                    break;
5255#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5256                case 0x3e:      /* V9 done/retry */
5257                    {
5258                        switch (rd) {
5259                        case 0:
5260                            if (!supervisor(dc))
5261                                goto priv_insn;
5262                            dc->npc = DYNAMIC_PC;
5263                            dc->pc = DYNAMIC_PC;
5264                            gen_helper_done(cpu_env);
5265                            goto jmp_insn;
5266                        case 1:
5267                            if (!supervisor(dc))
5268                                goto priv_insn;
5269                            dc->npc = DYNAMIC_PC;
5270                            dc->pc = DYNAMIC_PC;
5271                            gen_helper_retry(cpu_env);
5272                            goto jmp_insn;
5273                        default:
5274                            goto illegal_insn;
5275                        }
5276                    }
5277                    break;
5278#endif
5279                default:
5280                    goto illegal_insn;
5281                }
5282            }
5283            break;
5284        }
5285        break;
5286    case 3:                     /* load/store instructions */
5287        {
5288            unsigned int xop = GET_FIELD(insn, 7, 12);
5289            /* ??? gen_address_mask prevents us from using a source
5290               register directly.  Always generate a temporary.  */
5291            TCGv cpu_addr = get_temp_tl(dc);
5292
5293            tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5294            if (xop == 0x3c || xop == 0x3e) {
5295                /* V9 casa/casxa : no offset */
5296            } else if (IS_IMM) {     /* immediate */
5297                simm = GET_FIELDs(insn, 19, 31);
5298                if (simm != 0) {
5299                    tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5300                }
5301            } else {            /* register */
5302                rs2 = GET_FIELD(insn, 27, 31);
5303                if (rs2 != 0) {
5304                    tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5305                }
5306            }
5307            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5308                (xop > 0x17 && xop <= 0x1d ) ||
5309                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5310                TCGv cpu_val = gen_dest_gpr(dc, rd);
5311
5312                switch (xop) {
5313                case 0x0:       /* ld, V9 lduw, load unsigned word */
5314                    gen_address_mask(dc, cpu_addr);
5315                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5316                    break;
5317                case 0x1:       /* ldub, load unsigned byte */
5318                    gen_address_mask(dc, cpu_addr);
5319                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5320                    break;
5321                case 0x2:       /* lduh, load unsigned halfword */
5322                    gen_address_mask(dc, cpu_addr);
5323                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5324                    break;
5325                case 0x3:       /* ldd, load double word */
5326                    if (rd & 1)
5327                        goto illegal_insn;
5328                    else {
5329                        TCGv_i64 t64;
5330
5331                        gen_address_mask(dc, cpu_addr);
5332                        t64 = tcg_temp_new_i64();
5333                        tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5334                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5335                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5336                        gen_store_gpr(dc, rd + 1, cpu_val);
5337                        tcg_gen_shri_i64(t64, t64, 32);
5338                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5339                        tcg_temp_free_i64(t64);
5340                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5341                    }
5342                    break;
5343                case 0x9:       /* ldsb, load signed byte */
5344                    gen_address_mask(dc, cpu_addr);
5345                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5346                    break;
5347                case 0xa:       /* ldsh, load signed halfword */
5348                    gen_address_mask(dc, cpu_addr);
5349                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5350                    break;
5351                case 0xd:       /* ldstub */
5352                    gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5353                    break;
5354                case 0x0f:
5355                    /* swap, swap register with memory. Also atomically */
5356                    CHECK_IU_FEATURE(dc, SWAP);
5357                    cpu_src1 = gen_load_gpr(dc, rd);
5358                    gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5359                             dc->mem_idx, MO_TEUL);
5360                    break;
5361#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5362                case 0x10:      /* lda, V9 lduwa, load word alternate */
5363                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5364                    break;
5365                case 0x11:      /* lduba, load unsigned byte alternate */
5366                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5367                    break;
5368                case 0x12:      /* lduha, load unsigned halfword alternate */
5369                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5370                    break;
5371                case 0x13:      /* ldda, load double word alternate */
5372                    if (rd & 1) {
5373                        goto illegal_insn;
5374                    }
5375                    gen_ldda_asi(dc, cpu_addr, insn, rd);
5376                    goto skip_move;
5377                case 0x19:      /* ldsba, load signed byte alternate */
5378                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5379                    break;
5380                case 0x1a:      /* ldsha, load signed halfword alternate */
5381                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5382                    break;
5383                case 0x1d:      /* ldstuba -- XXX: should be atomically */
5384                    gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5385                    break;
5386                case 0x1f:      /* swapa, swap reg with alt. memory. Also
5387                                   atomically */
5388                    CHECK_IU_FEATURE(dc, SWAP);
5389                    cpu_src1 = gen_load_gpr(dc, rd);
5390                    gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5391                    break;
5392
5393#ifndef TARGET_SPARC64
5394                case 0x30: /* ldc */
5395                case 0x31: /* ldcsr */
5396                case 0x33: /* lddc */
5397                    goto ncp_insn;
5398#endif
5399#endif
5400#ifdef TARGET_SPARC64
5401                case 0x08: /* V9 ldsw */
5402                    gen_address_mask(dc, cpu_addr);
5403                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5404                    break;
5405                case 0x0b: /* V9 ldx */
5406                    gen_address_mask(dc, cpu_addr);
5407                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5408                    break;
5409                case 0x18: /* V9 ldswa */
5410                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5411                    break;
5412                case 0x1b: /* V9 ldxa */
5413                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5414                    break;
5415                case 0x2d: /* V9 prefetch, no effect */
5416                    goto skip_move;
5417                case 0x30: /* V9 ldfa */
5418                    if (gen_trap_ifnofpu(dc)) {
5419                        goto jmp_insn;
5420                    }
5421                    gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5422                    gen_update_fprs_dirty(dc, rd);
5423                    goto skip_move;
5424                case 0x33: /* V9 lddfa */
5425                    if (gen_trap_ifnofpu(dc)) {
5426                        goto jmp_insn;
5427                    }
5428                    gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5429                    gen_update_fprs_dirty(dc, DFPREG(rd));
5430                    goto skip_move;
5431                case 0x3d: /* V9 prefetcha, no effect */
5432                    goto skip_move;
5433                case 0x32: /* V9 ldqfa */
5434                    CHECK_FPU_FEATURE(dc, FLOAT128);
5435                    if (gen_trap_ifnofpu(dc)) {
5436                        goto jmp_insn;
5437                    }
5438                    gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5439                    gen_update_fprs_dirty(dc, QFPREG(rd));
5440                    goto skip_move;
5441#endif
5442                default:
5443                    goto illegal_insn;
5444                }
5445                gen_store_gpr(dc, rd, cpu_val);
5446#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5447            skip_move: ;
5448#endif
5449            } else if (xop >= 0x20 && xop < 0x24) {
5450                if (gen_trap_ifnofpu(dc)) {
5451                    goto jmp_insn;
5452                }
5453                switch (xop) {
5454                case 0x20:      /* ldf, load fpreg */
5455                    gen_address_mask(dc, cpu_addr);
5456                    cpu_dst_32 = gen_dest_fpr_F(dc);
5457                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5458                                        dc->mem_idx, MO_TEUL);
5459                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5460                    break;
5461                case 0x21:      /* ldfsr, V9 ldxfsr */
5462#ifdef TARGET_SPARC64
5463                    gen_address_mask(dc, cpu_addr);
5464                    if (rd == 1) {
5465                        TCGv_i64 t64 = tcg_temp_new_i64();
5466                        tcg_gen_qemu_ld_i64(t64, cpu_addr,
5467                                            dc->mem_idx, MO_TEQ);
5468                        gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5469                        tcg_temp_free_i64(t64);
5470                        break;
5471                    }
5472#endif
5473                    cpu_dst_32 = get_temp_i32(dc);
5474                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5475                                        dc->mem_idx, MO_TEUL);
5476                    gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5477                    break;
5478                case 0x22:      /* ldqf, load quad fpreg */
5479                    CHECK_FPU_FEATURE(dc, FLOAT128);
5480                    gen_address_mask(dc, cpu_addr);
5481                    cpu_src1_64 = tcg_temp_new_i64();
5482                    tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5483                                        MO_TEQ | MO_ALIGN_4);
5484                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5485                    cpu_src2_64 = tcg_temp_new_i64();
5486                    tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5487                                        MO_TEQ | MO_ALIGN_4);
5488                    gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5489                    tcg_temp_free_i64(cpu_src1_64);
5490                    tcg_temp_free_i64(cpu_src2_64);
5491                    break;
5492                case 0x23:      /* lddf, load double fpreg */
5493                    gen_address_mask(dc, cpu_addr);
5494                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5495                    tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5496                                        MO_TEQ | MO_ALIGN_4);
5497                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5498                    break;
5499                default:
5500                    goto illegal_insn;
5501                }
5502            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5503                       xop == 0xe || xop == 0x1e) {
5504                TCGv cpu_val = gen_load_gpr(dc, rd);
5505
5506                switch (xop) {
5507                case 0x4: /* st, store word */
5508                    gen_address_mask(dc, cpu_addr);
5509                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5510                    break;
5511                case 0x5: /* stb, store byte */
5512                    gen_address_mask(dc, cpu_addr);
5513                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5514                    break;
5515                case 0x6: /* sth, store halfword */
5516                    gen_address_mask(dc, cpu_addr);
5517                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5518                    break;
5519                case 0x7: /* std, store double word */
5520                    if (rd & 1)
5521                        goto illegal_insn;
5522                    else {
5523                        TCGv_i64 t64;
5524                        TCGv lo;
5525
5526                        gen_address_mask(dc, cpu_addr);
5527                        lo = gen_load_gpr(dc, rd + 1);
5528                        t64 = tcg_temp_new_i64();
5529                        tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5530                        tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5531                        tcg_temp_free_i64(t64);
5532                    }
5533                    break;
5534#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5535                case 0x14: /* sta, V9 stwa, store word alternate */
5536                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5537                    break;
5538                case 0x15: /* stba, store byte alternate */
5539                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5540                    break;
5541                case 0x16: /* stha, store halfword alternate */
5542                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5543                    break;
5544                case 0x17: /* stda, store double word alternate */
5545                    if (rd & 1) {
5546                        goto illegal_insn;
5547                    }
5548                    gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5549                    break;
5550#endif
5551#ifdef TARGET_SPARC64
5552                case 0x0e: /* V9 stx */
5553                    gen_address_mask(dc, cpu_addr);
5554                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5555                    break;
5556                case 0x1e: /* V9 stxa */
5557                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5558                    break;
5559#endif
5560                default:
5561                    goto illegal_insn;
5562                }
5563            } else if (xop > 0x23 && xop < 0x28) {
5564                if (gen_trap_ifnofpu(dc)) {
5565                    goto jmp_insn;
5566                }
5567                switch (xop) {
5568                case 0x24: /* stf, store fpreg */
5569                    gen_address_mask(dc, cpu_addr);
5570                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
5571                    tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5572                                        dc->mem_idx, MO_TEUL);
5573                    break;
5574                case 0x25: /* stfsr, V9 stxfsr */
5575                    {
5576#ifdef TARGET_SPARC64
5577                        gen_address_mask(dc, cpu_addr);
5578                        if (rd == 1) {
5579                            tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5580                            break;
5581                        }
5582#endif
5583                        tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5584                    }
5585                    break;
5586                case 0x26:
5587#ifdef TARGET_SPARC64
5588                    /* V9 stqf, store quad fpreg */
5589                    CHECK_FPU_FEATURE(dc, FLOAT128);
5590                    gen_address_mask(dc, cpu_addr);
5591                    /* ??? While stqf only requires 4-byte alignment, it is
5592                       legal for the cpu to signal the unaligned exception.
5593                       The OS trap handler is then required to fix it up.
5594                       For qemu, this avoids having to probe the second page
5595                       before performing the first write.  */
5596                    cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5597                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5598                                        dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5599                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5600                    cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5601                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5602                                        dc->mem_idx, MO_TEQ);
5603                    break;
5604#else /* !TARGET_SPARC64 */
5605                    /* stdfq, store floating point queue */
5606#if defined(CONFIG_USER_ONLY)
5607                    goto illegal_insn;
5608#else
5609                    if (!supervisor(dc))
5610                        goto priv_insn;
5611                    if (gen_trap_ifnofpu(dc)) {
5612                        goto jmp_insn;
5613                    }
5614                    goto nfq_insn;
5615#endif
5616#endif
5617                case 0x27: /* stdf, store double fpreg */
5618                    gen_address_mask(dc, cpu_addr);
5619                    cpu_src1_64 = gen_load_fpr_D(dc, rd);
5620                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5621                                        MO_TEQ | MO_ALIGN_4);
5622                    break;
5623                default:
5624                    goto illegal_insn;
5625                }
5626            } else if (xop > 0x33 && xop < 0x3f) {
5627                switch (xop) {
5628#ifdef TARGET_SPARC64
5629                case 0x34: /* V9 stfa */
5630                    if (gen_trap_ifnofpu(dc)) {
5631                        goto jmp_insn;
5632                    }
5633                    gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5634                    break;
5635                case 0x36: /* V9 stqfa */
5636                    {
5637                        CHECK_FPU_FEATURE(dc, FLOAT128);
5638                        if (gen_trap_ifnofpu(dc)) {
5639                            goto jmp_insn;
5640                        }
5641                        gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5642                    }
5643                    break;
5644                case 0x37: /* V9 stdfa */
5645                    if (gen_trap_ifnofpu(dc)) {
5646                        goto jmp_insn;
5647                    }
5648                    gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5649                    break;
5650                case 0x3e: /* V9 casxa */
5651                    rs2 = GET_FIELD(insn, 27, 31);
5652                    cpu_src2 = gen_load_gpr(dc, rs2);
5653                    gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5654                    break;
5655#else
5656                case 0x34: /* stc */
5657                case 0x35: /* stcsr */
5658                case 0x36: /* stdcq */
5659                case 0x37: /* stdc */
5660                    goto ncp_insn;
5661#endif
5662#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5663                case 0x3c: /* V9 or LEON3 casa */
5664#ifndef TARGET_SPARC64
5665                    CHECK_IU_FEATURE(dc, CASA);
5666#endif
5667                    rs2 = GET_FIELD(insn, 27, 31);
5668                    cpu_src2 = gen_load_gpr(dc, rs2);
5669                    gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5670                    break;
5671#endif
5672                default:
5673                    goto illegal_insn;
5674                }
5675            } else {
5676                goto illegal_insn;
5677            }
5678        }
5679        break;
5680    }
5681    /* default case for non jump instructions */
5682    if (dc->npc == DYNAMIC_PC) {
5683        dc->pc = DYNAMIC_PC;
5684        gen_op_next_insn();
5685    } else if (dc->npc == JUMP_PC) {
5686        /* we can do a static jump */
5687        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5688        dc->is_br = 1;
5689    } else {
5690        dc->pc = dc->npc;
5691        dc->npc = dc->npc + 4;
5692    }
5693 jmp_insn:
5694    goto egress;
5695 illegal_insn:
5696    gen_exception(dc, TT_ILL_INSN);
5697    goto egress;
5698 unimp_flush:
5699    gen_exception(dc, TT_UNIMP_FLUSH);
5700    goto egress;
5701#if !defined(CONFIG_USER_ONLY)
5702 priv_insn:
5703    gen_exception(dc, TT_PRIV_INSN);
5704    goto egress;
5705#endif
5706 nfpu_insn:
5707    gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5708    goto egress;
5709#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5710 nfq_insn:
5711    gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5712    goto egress;
5713#endif
5714#ifndef TARGET_SPARC64
5715 ncp_insn:
5716    gen_exception(dc, TT_NCP_INSN);
5717    goto egress;
5718#endif
5719 egress:
5720    if (dc->n_t32 != 0) {
5721        int i;
5722        for (i = dc->n_t32 - 1; i >= 0; --i) {
5723            tcg_temp_free_i32(dc->t32[i]);
5724        }
5725        dc->n_t32 = 0;
5726    }
5727    if (dc->n_ttl != 0) {
5728        int i;
5729        for (i = dc->n_ttl - 1; i >= 0; --i) {
5730            tcg_temp_free(dc->ttl[i]);
5731        }
5732        dc->n_ttl = 0;
5733    }
5734}
5735
5736void gen_intermediate_code(CPUState *cs, TranslationBlock * tb)
5737{
5738    CPUSPARCState *env = cs->env_ptr;
5739    target_ulong pc_start, last_pc;
5740    DisasContext dc1, *dc = &dc1;
5741    int num_insns;
5742    int max_insns;
5743    unsigned int insn;
5744
5745    memset(dc, 0, sizeof(DisasContext));
5746    dc->tb = tb;
5747    pc_start = tb->pc;
5748    dc->pc = pc_start;
5749    last_pc = dc->pc;
5750    dc->npc = (target_ulong) tb->cs_base;
5751    dc->cc_op = CC_OP_DYNAMIC;
5752    dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5753    dc->def = &env->def;
5754    dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5755    dc->address_mask_32bit = tb_am_enabled(tb->flags);
5756    dc->singlestep = (cs->singlestep_enabled || singlestep);
5757#ifndef CONFIG_USER_ONLY
5758    dc->supervisor = (tb->flags & TB_FLAG_SUPER) != 0;
5759#endif
5760#ifdef TARGET_SPARC64
5761    dc->fprs_dirty = 0;
5762    dc->asi = (tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5763#ifndef CONFIG_USER_ONLY
5764    dc->hypervisor = (tb->flags & TB_FLAG_HYPER) != 0;
5765#endif
5766#endif
5767
5768    num_insns = 0;
5769    max_insns = tb_cflags(tb) & CF_COUNT_MASK;
5770    if (max_insns == 0) {
5771        max_insns = CF_COUNT_MASK;
5772    }
5773    if (max_insns > TCG_MAX_INSNS) {
5774        max_insns = TCG_MAX_INSNS;
5775    }
5776
5777    gen_tb_start(tb);
5778    do {
5779        if (dc->npc & JUMP_PC) {
5780            assert(dc->jump_pc[1] == dc->pc + 4);
5781            tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5782        } else {
5783            tcg_gen_insn_start(dc->pc, dc->npc);
5784        }
5785        num_insns++;
5786        last_pc = dc->pc;
5787
5788        if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5789            if (dc->pc != pc_start) {
5790                save_state(dc);
5791            }
5792            gen_helper_debug(cpu_env);
5793            tcg_gen_exit_tb(0);
5794            dc->is_br = 1;
5795            goto exit_gen_loop;
5796        }
5797
5798        if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
5799            gen_io_start();
5800        }
5801
5802        insn = cpu_ldl_code(env, dc->pc);
5803
5804        disas_sparc_insn(dc, insn);
5805
5806        if (dc->is_br)
5807            break;
5808        /* if the next PC is different, we abort now */
5809        if (dc->pc != (last_pc + 4))
5810            break;
5811        /* if we reach a page boundary, we stop generation so that the
5812           PC of a TT_TFAULT exception is always in the right page */
5813        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5814            break;
5815        /* if single step mode, we generate only one instruction and
5816           generate an exception */
5817        if (dc->singlestep) {
5818            break;
5819        }
5820    } while (!tcg_op_buf_full() &&
5821             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5822             num_insns < max_insns);
5823
5824 exit_gen_loop:
5825    if (tb_cflags(tb) & CF_LAST_IO) {
5826        gen_io_end();
5827    }
5828    if (!dc->is_br) {
5829        if (dc->pc != DYNAMIC_PC &&
5830            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5831            /* static PC and NPC: we can use direct chaining */
5832            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5833        } else {
5834            if (dc->pc != DYNAMIC_PC) {
5835                tcg_gen_movi_tl(cpu_pc, dc->pc);
5836            }
5837            save_npc(dc);
5838            tcg_gen_exit_tb(0);
5839        }
5840    }
5841    gen_tb_end(tb, num_insns);
5842
5843    tb->size = last_pc + 4 - pc_start;
5844    tb->icount = num_insns;
5845
5846#ifdef DEBUG_DISAS
5847    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5848        && qemu_log_in_addr_range(pc_start)) {
5849        qemu_log_lock();
5850        qemu_log("--------------\n");
5851        qemu_log("IN: %s\n", lookup_symbol(pc_start));
5852        log_target_disas(cs, pc_start, last_pc + 4 - pc_start);
5853        qemu_log("\n");
5854        qemu_log_unlock();
5855    }
5856#endif
5857}
5858
5859void sparc_tcg_init(void)
5860{
5861    static const char gregnames[32][4] = {
5862        "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5863        "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5864        "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5865        "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5866    };
5867    static const char fregnames[32][4] = {
5868        "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5869        "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5870        "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5871        "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5872    };
5873
5874    static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5875#ifdef TARGET_SPARC64
5876        { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5877        { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5878#else
5879        { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5880#endif
5881        { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5882        { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5883    };
5884
5885    static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5886#ifdef TARGET_SPARC64
5887        { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5888        { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5889        { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5890        { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5891          "hstick_cmpr" },
5892        { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5893        { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5894        { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5895        { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5896        { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5897#endif
5898        { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5899        { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5900        { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5901        { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5902        { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5903        { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5904        { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5905        { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5906#ifndef CONFIG_USER_ONLY
5907        { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5908#endif
5909    };
5910
5911    unsigned int i;
5912
5913    cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5914                                         offsetof(CPUSPARCState, regwptr),
5915                                         "regwptr");
5916
5917    for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5918        *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5919    }
5920
5921    for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5922        *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5923    }
5924
5925    TCGV_UNUSED(cpu_regs[0]);
5926    for (i = 1; i < 8; ++i) {
5927        cpu_regs[i] = tcg_global_mem_new(cpu_env,
5928                                         offsetof(CPUSPARCState, gregs[i]),
5929                                         gregnames[i]);
5930    }
5931
5932    for (i = 8; i < 32; ++i) {
5933        cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5934                                         (i - 8) * sizeof(target_ulong),
5935                                         gregnames[i]);
5936    }
5937
5938    for (i = 0; i < TARGET_DPREGS; i++) {
5939        cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5940                                            offsetof(CPUSPARCState, fpr[i]),
5941                                            fregnames[i]);
5942    }
5943}
5944
5945void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5946                          target_ulong *data)
5947{
5948    target_ulong pc = data[0];
5949    target_ulong npc = data[1];
5950
5951    env->pc = pc;
5952    if (npc == DYNAMIC_PC) {
5953        /* dynamic NPC: already stored */
5954    } else if (npc & JUMP_PC) {
5955        /* jump PC: use 'cond' and the jump targets of the translation */
5956        if (env->cond) {
5957            env->npc = npc & ~3;
5958        } else {
5959            env->npc = pc + 4;
5960        }
5961    } else {
5962        env->npc = npc;
5963    }
5964}
5965