qemu/target/sparc/translate.c
<<
>>
Prefs
   1/*
   2   SPARC translation
   3
   4   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
   5   Copyright (C) 2003-2005 Fabrice Bellard
   6
   7   This library is free software; you can redistribute it and/or
   8   modify it under the terms of the GNU Lesser General Public
   9   License as published by the Free Software Foundation; either
  10   version 2.1 of the License, or (at your option) any later version.
  11
  12   This library is distributed in the hope that it will be useful,
  13   but WITHOUT ANY WARRANTY; without even the implied warranty of
  14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15   Lesser General Public License for more details.
  16
  17   You should have received a copy of the GNU Lesser General Public
  18   License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19 */
  20
  21#include "qemu/osdep.h"
  22
  23#include "cpu.h"
  24#include "disas/disas.h"
  25#include "exec/helper-proto.h"
  26#include "exec/exec-all.h"
  27#include "tcg/tcg-op.h"
  28#include "exec/cpu_ldst.h"
  29
  30#include "exec/helper-gen.h"
  31
  32#include "exec/translator.h"
  33#include "exec/log.h"
  34#include "asi.h"
  35
  36
  37#define DEBUG_DISAS
  38
  39#define DYNAMIC_PC  1 /* dynamic pc value */
  40#define JUMP_PC     2 /* dynamic pc value which takes only two values
  41                         according to jump_pc[T2] */
  42
  43#define DISAS_EXIT  DISAS_TARGET_0
  44
  45/* global register indexes */
  46static TCGv_ptr cpu_regwptr;
  47static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
  48static TCGv_i32 cpu_cc_op;
  49static TCGv_i32 cpu_psr;
  50static TCGv cpu_fsr, cpu_pc, cpu_npc;
  51static TCGv cpu_regs[32];
  52static TCGv cpu_y;
  53#ifndef CONFIG_USER_ONLY
  54static TCGv cpu_tbr;
  55#endif
  56static TCGv cpu_cond;
  57#ifdef TARGET_SPARC64
  58static TCGv_i32 cpu_xcc, cpu_fprs;
  59static TCGv cpu_gsr;
  60static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
  61static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
  62#else
  63static TCGv cpu_wim;
  64#endif
  65/* Floating point registers */
  66static TCGv_i64 cpu_fpr[TARGET_DPREGS];
  67
  68#include "exec/gen-icount.h"
  69
  70typedef struct DisasContext {
  71    DisasContextBase base;
  72    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
  73    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
  74    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
  75    int mem_idx;
  76    bool fpu_enabled;
  77    bool address_mask_32bit;
  78#ifndef CONFIG_USER_ONLY
  79    bool supervisor;
  80#ifdef TARGET_SPARC64
  81    bool hypervisor;
  82#endif
  83#endif
  84
  85    uint32_t cc_op;  /* current CC operation */
  86    sparc_def_t *def;
  87    TCGv_i32 t32[3];
  88    TCGv ttl[5];
  89    int n_t32;
  90    int n_ttl;
  91#ifdef TARGET_SPARC64
  92    int fprs_dirty;
  93    int asi;
  94#endif
  95} DisasContext;
  96
  97typedef struct {
  98    TCGCond cond;
  99    bool is_bool;
 100    bool g1, g2;
 101    TCGv c1, c2;
 102} DisasCompare;
 103
 104// This function uses non-native bit order
 105#define GET_FIELD(X, FROM, TO)                                  \
 106    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
 107
 108// This function uses the order in the manuals, i.e. bit 0 is 2^0
 109#define GET_FIELD_SP(X, FROM, TO)               \
 110    GET_FIELD(X, 31 - (TO), 31 - (FROM))
 111
 112#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
 113#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
 114
 115#ifdef TARGET_SPARC64
 116#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
 117#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
 118#else
 119#define DFPREG(r) (r & 0x1e)
 120#define QFPREG(r) (r & 0x1c)
 121#endif
 122
 123#define UA2005_HTRAP_MASK 0xff
 124#define V8_TRAP_MASK 0x7f
 125
 126static int sign_extend(int x, int len)
 127{
 128    len = 32 - len;
 129    return (x << len) >> len;
 130}
 131
 132#define IS_IMM (insn & (1<<13))
 133
 134static inline TCGv_i32 get_temp_i32(DisasContext *dc)
 135{
 136    TCGv_i32 t;
 137    assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
 138    dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
 139    return t;
 140}
 141
 142static inline TCGv get_temp_tl(DisasContext *dc)
 143{
 144    TCGv t;
 145    assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
 146    dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
 147    return t;
 148}
 149
 150static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
 151{
 152#if defined(TARGET_SPARC64)
 153    int bit = (rd < 32) ? 1 : 2;
 154    /* If we know we've already set this bit within the TB,
 155       we can avoid setting it again.  */
 156    if (!(dc->fprs_dirty & bit)) {
 157        dc->fprs_dirty |= bit;
 158        tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
 159    }
 160#endif
 161}
 162
 163/* floating point registers moves */
 164static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
 165{
 166#if TCG_TARGET_REG_BITS == 32
 167    if (src & 1) {
 168        return TCGV_LOW(cpu_fpr[src / 2]);
 169    } else {
 170        return TCGV_HIGH(cpu_fpr[src / 2]);
 171    }
 172#else
 173    TCGv_i32 ret = get_temp_i32(dc);
 174    if (src & 1) {
 175        tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
 176    } else {
 177        tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
 178    }
 179    return ret;
 180#endif
 181}
 182
 183static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
 184{
 185#if TCG_TARGET_REG_BITS == 32
 186    if (dst & 1) {
 187        tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
 188    } else {
 189        tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
 190    }
 191#else
 192    TCGv_i64 t = (TCGv_i64)v;
 193    tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
 194                        (dst & 1 ? 0 : 32), 32);
 195#endif
 196    gen_update_fprs_dirty(dc, dst);
 197}
 198
 199static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
 200{
 201    return get_temp_i32(dc);
 202}
 203
 204static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
 205{
 206    src = DFPREG(src);
 207    return cpu_fpr[src / 2];
 208}
 209
 210static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
 211{
 212    dst = DFPREG(dst);
 213    tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
 214    gen_update_fprs_dirty(dc, dst);
 215}
 216
 217static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
 218{
 219    return cpu_fpr[DFPREG(dst) / 2];
 220}
 221
 222static void gen_op_load_fpr_QT0(unsigned int src)
 223{
 224    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 225                   offsetof(CPU_QuadU, ll.upper));
 226    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 227                   offsetof(CPU_QuadU, ll.lower));
 228}
 229
 230static void gen_op_load_fpr_QT1(unsigned int src)
 231{
 232    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
 233                   offsetof(CPU_QuadU, ll.upper));
 234    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
 235                   offsetof(CPU_QuadU, ll.lower));
 236}
 237
 238static void gen_op_store_QT0_fpr(unsigned int dst)
 239{
 240    tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 241                   offsetof(CPU_QuadU, ll.upper));
 242    tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 243                   offsetof(CPU_QuadU, ll.lower));
 244}
 245
 246static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
 247                            TCGv_i64 v1, TCGv_i64 v2)
 248{
 249    dst = QFPREG(dst);
 250
 251    tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
 252    tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
 253    gen_update_fprs_dirty(dc, dst);
 254}
 255
 256#ifdef TARGET_SPARC64
 257static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
 258{
 259    src = QFPREG(src);
 260    return cpu_fpr[src / 2];
 261}
 262
 263static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
 264{
 265    src = QFPREG(src);
 266    return cpu_fpr[src / 2 + 1];
 267}
 268
 269static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
 270{
 271    rd = QFPREG(rd);
 272    rs = QFPREG(rs);
 273
 274    tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
 275    tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
 276    gen_update_fprs_dirty(dc, rd);
 277}
 278#endif
 279
 280/* moves */
 281#ifdef CONFIG_USER_ONLY
 282#define supervisor(dc) 0
 283#ifdef TARGET_SPARC64
 284#define hypervisor(dc) 0
 285#endif
 286#else
 287#ifdef TARGET_SPARC64
 288#define hypervisor(dc) (dc->hypervisor)
 289#define supervisor(dc) (dc->supervisor | dc->hypervisor)
 290#else
 291#define supervisor(dc) (dc->supervisor)
 292#endif
 293#endif
 294
 295#ifdef TARGET_SPARC64
 296#ifndef TARGET_ABI32
 297#define AM_CHECK(dc) ((dc)->address_mask_32bit)
 298#else
 299#define AM_CHECK(dc) (1)
 300#endif
 301#endif
 302
 303static inline void gen_address_mask(DisasContext *dc, TCGv addr)
 304{
 305#ifdef TARGET_SPARC64
 306    if (AM_CHECK(dc))
 307        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
 308#endif
 309}
 310
 311static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
 312{
 313    if (reg > 0) {
 314        assert(reg < 32);
 315        return cpu_regs[reg];
 316    } else {
 317        TCGv t = get_temp_tl(dc);
 318        tcg_gen_movi_tl(t, 0);
 319        return t;
 320    }
 321}
 322
 323static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
 324{
 325    if (reg > 0) {
 326        assert(reg < 32);
 327        tcg_gen_mov_tl(cpu_regs[reg], v);
 328    }
 329}
 330
 331static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
 332{
 333    if (reg > 0) {
 334        assert(reg < 32);
 335        return cpu_regs[reg];
 336    } else {
 337        return get_temp_tl(dc);
 338    }
 339}
 340
 341static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
 342{
 343    return translator_use_goto_tb(&s->base, pc) &&
 344           translator_use_goto_tb(&s->base, npc);
 345}
 346
 347static void gen_goto_tb(DisasContext *s, int tb_num,
 348                        target_ulong pc, target_ulong npc)
 349{
 350    if (use_goto_tb(s, pc, npc))  {
 351        /* jump to same page: we can use a direct jump */
 352        tcg_gen_goto_tb(tb_num);
 353        tcg_gen_movi_tl(cpu_pc, pc);
 354        tcg_gen_movi_tl(cpu_npc, npc);
 355        tcg_gen_exit_tb(s->base.tb, tb_num);
 356    } else {
 357        /* jump to another page: currently not optimized */
 358        tcg_gen_movi_tl(cpu_pc, pc);
 359        tcg_gen_movi_tl(cpu_npc, npc);
 360        tcg_gen_exit_tb(NULL, 0);
 361    }
 362}
 363
 364// XXX suboptimal
 365static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
 366{
 367    tcg_gen_extu_i32_tl(reg, src);
 368    tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
 369}
 370
 371static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
 372{
 373    tcg_gen_extu_i32_tl(reg, src);
 374    tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
 375}
 376
 377static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
 378{
 379    tcg_gen_extu_i32_tl(reg, src);
 380    tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
 381}
 382
 383static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
 384{
 385    tcg_gen_extu_i32_tl(reg, src);
 386    tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
 387}
 388
 389static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
 390{
 391    tcg_gen_mov_tl(cpu_cc_src, src1);
 392    tcg_gen_mov_tl(cpu_cc_src2, src2);
 393    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 394    tcg_gen_mov_tl(dst, cpu_cc_dst);
 395}
 396
 397static TCGv_i32 gen_add32_carry32(void)
 398{
 399    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 400
 401    /* Carry is computed from a previous add: (dst < src)  */
 402#if TARGET_LONG_BITS == 64
 403    cc_src1_32 = tcg_temp_new_i32();
 404    cc_src2_32 = tcg_temp_new_i32();
 405    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
 406    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
 407#else
 408    cc_src1_32 = cpu_cc_dst;
 409    cc_src2_32 = cpu_cc_src;
 410#endif
 411
 412    carry_32 = tcg_temp_new_i32();
 413    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 414
 415#if TARGET_LONG_BITS == 64
 416    tcg_temp_free_i32(cc_src1_32);
 417    tcg_temp_free_i32(cc_src2_32);
 418#endif
 419
 420    return carry_32;
 421}
 422
 423static TCGv_i32 gen_sub32_carry32(void)
 424{
 425    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 426
 427    /* Carry is computed from a previous borrow: (src1 < src2)  */
 428#if TARGET_LONG_BITS == 64
 429    cc_src1_32 = tcg_temp_new_i32();
 430    cc_src2_32 = tcg_temp_new_i32();
 431    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
 432    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
 433#else
 434    cc_src1_32 = cpu_cc_src;
 435    cc_src2_32 = cpu_cc_src2;
 436#endif
 437
 438    carry_32 = tcg_temp_new_i32();
 439    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 440
 441#if TARGET_LONG_BITS == 64
 442    tcg_temp_free_i32(cc_src1_32);
 443    tcg_temp_free_i32(cc_src2_32);
 444#endif
 445
 446    return carry_32;
 447}
 448
 449static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
 450                            TCGv src2, int update_cc)
 451{
 452    TCGv_i32 carry_32;
 453    TCGv carry;
 454
 455    switch (dc->cc_op) {
 456    case CC_OP_DIV:
 457    case CC_OP_LOGIC:
 458        /* Carry is known to be zero.  Fall back to plain ADD.  */
 459        if (update_cc) {
 460            gen_op_add_cc(dst, src1, src2);
 461        } else {
 462            tcg_gen_add_tl(dst, src1, src2);
 463        }
 464        return;
 465
 466    case CC_OP_ADD:
 467    case CC_OP_TADD:
 468    case CC_OP_TADDTV:
 469        if (TARGET_LONG_BITS == 32) {
 470            /* We can re-use the host's hardware carry generation by using
 471               an ADD2 opcode.  We discard the low part of the output.
 472               Ideally we'd combine this operation with the add that
 473               generated the carry in the first place.  */
 474            carry = tcg_temp_new();
 475            tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 476            tcg_temp_free(carry);
 477            goto add_done;
 478        }
 479        carry_32 = gen_add32_carry32();
 480        break;
 481
 482    case CC_OP_SUB:
 483    case CC_OP_TSUB:
 484    case CC_OP_TSUBTV:
 485        carry_32 = gen_sub32_carry32();
 486        break;
 487
 488    default:
 489        /* We need external help to produce the carry.  */
 490        carry_32 = tcg_temp_new_i32();
 491        gen_helper_compute_C_icc(carry_32, cpu_env);
 492        break;
 493    }
 494
 495#if TARGET_LONG_BITS == 64
 496    carry = tcg_temp_new();
 497    tcg_gen_extu_i32_i64(carry, carry_32);
 498#else
 499    carry = carry_32;
 500#endif
 501
 502    tcg_gen_add_tl(dst, src1, src2);
 503    tcg_gen_add_tl(dst, dst, carry);
 504
 505    tcg_temp_free_i32(carry_32);
 506#if TARGET_LONG_BITS == 64
 507    tcg_temp_free(carry);
 508#endif
 509
 510 add_done:
 511    if (update_cc) {
 512        tcg_gen_mov_tl(cpu_cc_src, src1);
 513        tcg_gen_mov_tl(cpu_cc_src2, src2);
 514        tcg_gen_mov_tl(cpu_cc_dst, dst);
 515        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
 516        dc->cc_op = CC_OP_ADDX;
 517    }
 518}
 519
 520static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
 521{
 522    tcg_gen_mov_tl(cpu_cc_src, src1);
 523    tcg_gen_mov_tl(cpu_cc_src2, src2);
 524    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 525    tcg_gen_mov_tl(dst, cpu_cc_dst);
 526}
 527
 528static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
 529                            TCGv src2, int update_cc)
 530{
 531    TCGv_i32 carry_32;
 532    TCGv carry;
 533
 534    switch (dc->cc_op) {
 535    case CC_OP_DIV:
 536    case CC_OP_LOGIC:
 537        /* Carry is known to be zero.  Fall back to plain SUB.  */
 538        if (update_cc) {
 539            gen_op_sub_cc(dst, src1, src2);
 540        } else {
 541            tcg_gen_sub_tl(dst, src1, src2);
 542        }
 543        return;
 544
 545    case CC_OP_ADD:
 546    case CC_OP_TADD:
 547    case CC_OP_TADDTV:
 548        carry_32 = gen_add32_carry32();
 549        break;
 550
 551    case CC_OP_SUB:
 552    case CC_OP_TSUB:
 553    case CC_OP_TSUBTV:
 554        if (TARGET_LONG_BITS == 32) {
 555            /* We can re-use the host's hardware carry generation by using
 556               a SUB2 opcode.  We discard the low part of the output.
 557               Ideally we'd combine this operation with the add that
 558               generated the carry in the first place.  */
 559            carry = tcg_temp_new();
 560            tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 561            tcg_temp_free(carry);
 562            goto sub_done;
 563        }
 564        carry_32 = gen_sub32_carry32();
 565        break;
 566
 567    default:
 568        /* We need external help to produce the carry.  */
 569        carry_32 = tcg_temp_new_i32();
 570        gen_helper_compute_C_icc(carry_32, cpu_env);
 571        break;
 572    }
 573
 574#if TARGET_LONG_BITS == 64
 575    carry = tcg_temp_new();
 576    tcg_gen_extu_i32_i64(carry, carry_32);
 577#else
 578    carry = carry_32;
 579#endif
 580
 581    tcg_gen_sub_tl(dst, src1, src2);
 582    tcg_gen_sub_tl(dst, dst, carry);
 583
 584    tcg_temp_free_i32(carry_32);
 585#if TARGET_LONG_BITS == 64
 586    tcg_temp_free(carry);
 587#endif
 588
 589 sub_done:
 590    if (update_cc) {
 591        tcg_gen_mov_tl(cpu_cc_src, src1);
 592        tcg_gen_mov_tl(cpu_cc_src2, src2);
 593        tcg_gen_mov_tl(cpu_cc_dst, dst);
 594        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
 595        dc->cc_op = CC_OP_SUBX;
 596    }
 597}
 598
 599static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
 600{
 601    TCGv r_temp, zero, t0;
 602
 603    r_temp = tcg_temp_new();
 604    t0 = tcg_temp_new();
 605
 606    /* old op:
 607    if (!(env->y & 1))
 608        T1 = 0;
 609    */
 610    zero = tcg_const_tl(0);
 611    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
 612    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
 613    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
 614    tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
 615                       zero, cpu_cc_src2);
 616    tcg_temp_free(zero);
 617
 618    // b2 = T0 & 1;
 619    // env->y = (b2 << 31) | (env->y >> 1);
 620    tcg_gen_extract_tl(t0, cpu_y, 1, 31);
 621    tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
 622
 623    // b1 = N ^ V;
 624    gen_mov_reg_N(t0, cpu_psr);
 625    gen_mov_reg_V(r_temp, cpu_psr);
 626    tcg_gen_xor_tl(t0, t0, r_temp);
 627    tcg_temp_free(r_temp);
 628
 629    // T0 = (b1 << 31) | (T0 >> 1);
 630    // src1 = T0;
 631    tcg_gen_shli_tl(t0, t0, 31);
 632    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
 633    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
 634    tcg_temp_free(t0);
 635
 636    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 637
 638    tcg_gen_mov_tl(dst, cpu_cc_dst);
 639}
 640
 641static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
 642{
 643#if TARGET_LONG_BITS == 32
 644    if (sign_ext) {
 645        tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
 646    } else {
 647        tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
 648    }
 649#else
 650    TCGv t0 = tcg_temp_new_i64();
 651    TCGv t1 = tcg_temp_new_i64();
 652
 653    if (sign_ext) {
 654        tcg_gen_ext32s_i64(t0, src1);
 655        tcg_gen_ext32s_i64(t1, src2);
 656    } else {
 657        tcg_gen_ext32u_i64(t0, src1);
 658        tcg_gen_ext32u_i64(t1, src2);
 659    }
 660
 661    tcg_gen_mul_i64(dst, t0, t1);
 662    tcg_temp_free(t0);
 663    tcg_temp_free(t1);
 664
 665    tcg_gen_shri_i64(cpu_y, dst, 32);
 666#endif
 667}
 668
 669static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
 670{
 671    /* zero-extend truncated operands before multiplication */
 672    gen_op_multiply(dst, src1, src2, 0);
 673}
 674
 675static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
 676{
 677    /* sign-extend truncated operands before multiplication */
 678    gen_op_multiply(dst, src1, src2, 1);
 679}
 680
 681// 1
 682static inline void gen_op_eval_ba(TCGv dst)
 683{
 684    tcg_gen_movi_tl(dst, 1);
 685}
 686
 687// Z
 688static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
 689{
 690    gen_mov_reg_Z(dst, src);
 691}
 692
 693// Z | (N ^ V)
 694static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
 695{
 696    TCGv t0 = tcg_temp_new();
 697    gen_mov_reg_N(t0, src);
 698    gen_mov_reg_V(dst, src);
 699    tcg_gen_xor_tl(dst, dst, t0);
 700    gen_mov_reg_Z(t0, src);
 701    tcg_gen_or_tl(dst, dst, t0);
 702    tcg_temp_free(t0);
 703}
 704
 705// N ^ V
 706static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
 707{
 708    TCGv t0 = tcg_temp_new();
 709    gen_mov_reg_V(t0, src);
 710    gen_mov_reg_N(dst, src);
 711    tcg_gen_xor_tl(dst, dst, t0);
 712    tcg_temp_free(t0);
 713}
 714
 715// C | Z
 716static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
 717{
 718    TCGv t0 = tcg_temp_new();
 719    gen_mov_reg_Z(t0, src);
 720    gen_mov_reg_C(dst, src);
 721    tcg_gen_or_tl(dst, dst, t0);
 722    tcg_temp_free(t0);
 723}
 724
 725// C
 726static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
 727{
 728    gen_mov_reg_C(dst, src);
 729}
 730
 731// V
 732static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
 733{
 734    gen_mov_reg_V(dst, src);
 735}
 736
 737// 0
 738static inline void gen_op_eval_bn(TCGv dst)
 739{
 740    tcg_gen_movi_tl(dst, 0);
 741}
 742
 743// N
 744static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
 745{
 746    gen_mov_reg_N(dst, src);
 747}
 748
 749// !Z
 750static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
 751{
 752    gen_mov_reg_Z(dst, src);
 753    tcg_gen_xori_tl(dst, dst, 0x1);
 754}
 755
 756// !(Z | (N ^ V))
 757static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
 758{
 759    gen_op_eval_ble(dst, src);
 760    tcg_gen_xori_tl(dst, dst, 0x1);
 761}
 762
 763// !(N ^ V)
 764static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
 765{
 766    gen_op_eval_bl(dst, src);
 767    tcg_gen_xori_tl(dst, dst, 0x1);
 768}
 769
 770// !(C | Z)
 771static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
 772{
 773    gen_op_eval_bleu(dst, src);
 774    tcg_gen_xori_tl(dst, dst, 0x1);
 775}
 776
 777// !C
 778static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
 779{
 780    gen_mov_reg_C(dst, src);
 781    tcg_gen_xori_tl(dst, dst, 0x1);
 782}
 783
 784// !N
 785static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
 786{
 787    gen_mov_reg_N(dst, src);
 788    tcg_gen_xori_tl(dst, dst, 0x1);
 789}
 790
 791// !V
 792static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
 793{
 794    gen_mov_reg_V(dst, src);
 795    tcg_gen_xori_tl(dst, dst, 0x1);
 796}
 797
 798/*
 799  FPSR bit field FCC1 | FCC0:
 800   0 =
 801   1 <
 802   2 >
 803   3 unordered
 804*/
 805static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
 806                                    unsigned int fcc_offset)
 807{
 808    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
 809    tcg_gen_andi_tl(reg, reg, 0x1);
 810}
 811
 812static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
 813                                    unsigned int fcc_offset)
 814{
 815    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
 816    tcg_gen_andi_tl(reg, reg, 0x1);
 817}
 818
 819// !0: FCC0 | FCC1
 820static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
 821                                    unsigned int fcc_offset)
 822{
 823    TCGv t0 = tcg_temp_new();
 824    gen_mov_reg_FCC0(dst, src, fcc_offset);
 825    gen_mov_reg_FCC1(t0, src, fcc_offset);
 826    tcg_gen_or_tl(dst, dst, t0);
 827    tcg_temp_free(t0);
 828}
 829
 830// 1 or 2: FCC0 ^ FCC1
 831static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
 832                                    unsigned int fcc_offset)
 833{
 834    TCGv t0 = tcg_temp_new();
 835    gen_mov_reg_FCC0(dst, src, fcc_offset);
 836    gen_mov_reg_FCC1(t0, src, fcc_offset);
 837    tcg_gen_xor_tl(dst, dst, t0);
 838    tcg_temp_free(t0);
 839}
 840
 841// 1 or 3: FCC0
 842static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
 843                                    unsigned int fcc_offset)
 844{
 845    gen_mov_reg_FCC0(dst, src, fcc_offset);
 846}
 847
 848// 1: FCC0 & !FCC1
 849static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
 850                                    unsigned int fcc_offset)
 851{
 852    TCGv t0 = tcg_temp_new();
 853    gen_mov_reg_FCC0(dst, src, fcc_offset);
 854    gen_mov_reg_FCC1(t0, src, fcc_offset);
 855    tcg_gen_andc_tl(dst, dst, t0);
 856    tcg_temp_free(t0);
 857}
 858
 859// 2 or 3: FCC1
 860static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
 861                                    unsigned int fcc_offset)
 862{
 863    gen_mov_reg_FCC1(dst, src, fcc_offset);
 864}
 865
 866// 2: !FCC0 & FCC1
 867static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
 868                                    unsigned int fcc_offset)
 869{
 870    TCGv t0 = tcg_temp_new();
 871    gen_mov_reg_FCC0(dst, src, fcc_offset);
 872    gen_mov_reg_FCC1(t0, src, fcc_offset);
 873    tcg_gen_andc_tl(dst, t0, dst);
 874    tcg_temp_free(t0);
 875}
 876
 877// 3: FCC0 & FCC1
 878static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
 879                                    unsigned int fcc_offset)
 880{
 881    TCGv t0 = tcg_temp_new();
 882    gen_mov_reg_FCC0(dst, src, fcc_offset);
 883    gen_mov_reg_FCC1(t0, src, fcc_offset);
 884    tcg_gen_and_tl(dst, dst, t0);
 885    tcg_temp_free(t0);
 886}
 887
 888// 0: !(FCC0 | FCC1)
 889static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
 890                                    unsigned int fcc_offset)
 891{
 892    TCGv t0 = tcg_temp_new();
 893    gen_mov_reg_FCC0(dst, src, fcc_offset);
 894    gen_mov_reg_FCC1(t0, src, fcc_offset);
 895    tcg_gen_or_tl(dst, dst, t0);
 896    tcg_gen_xori_tl(dst, dst, 0x1);
 897    tcg_temp_free(t0);
 898}
 899
 900// 0 or 3: !(FCC0 ^ FCC1)
 901static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
 902                                    unsigned int fcc_offset)
 903{
 904    TCGv t0 = tcg_temp_new();
 905    gen_mov_reg_FCC0(dst, src, fcc_offset);
 906    gen_mov_reg_FCC1(t0, src, fcc_offset);
 907    tcg_gen_xor_tl(dst, dst, t0);
 908    tcg_gen_xori_tl(dst, dst, 0x1);
 909    tcg_temp_free(t0);
 910}
 911
 912// 0 or 2: !FCC0
 913static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
 914                                    unsigned int fcc_offset)
 915{
 916    gen_mov_reg_FCC0(dst, src, fcc_offset);
 917    tcg_gen_xori_tl(dst, dst, 0x1);
 918}
 919
 920// !1: !(FCC0 & !FCC1)
 921static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
 922                                    unsigned int fcc_offset)
 923{
 924    TCGv t0 = tcg_temp_new();
 925    gen_mov_reg_FCC0(dst, src, fcc_offset);
 926    gen_mov_reg_FCC1(t0, src, fcc_offset);
 927    tcg_gen_andc_tl(dst, dst, t0);
 928    tcg_gen_xori_tl(dst, dst, 0x1);
 929    tcg_temp_free(t0);
 930}
 931
 932// 0 or 1: !FCC1
 933static inline void gen_op_eval_fble(TCGv dst, TCGv src,
 934                                    unsigned int fcc_offset)
 935{
 936    gen_mov_reg_FCC1(dst, src, fcc_offset);
 937    tcg_gen_xori_tl(dst, dst, 0x1);
 938}
 939
 940// !2: !(!FCC0 & FCC1)
 941static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
 942                                    unsigned int fcc_offset)
 943{
 944    TCGv t0 = tcg_temp_new();
 945    gen_mov_reg_FCC0(dst, src, fcc_offset);
 946    gen_mov_reg_FCC1(t0, src, fcc_offset);
 947    tcg_gen_andc_tl(dst, t0, dst);
 948    tcg_gen_xori_tl(dst, dst, 0x1);
 949    tcg_temp_free(t0);
 950}
 951
 952// !3: !(FCC0 & FCC1)
 953static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
 954                                    unsigned int fcc_offset)
 955{
 956    TCGv t0 = tcg_temp_new();
 957    gen_mov_reg_FCC0(dst, src, fcc_offset);
 958    gen_mov_reg_FCC1(t0, src, fcc_offset);
 959    tcg_gen_and_tl(dst, dst, t0);
 960    tcg_gen_xori_tl(dst, dst, 0x1);
 961    tcg_temp_free(t0);
 962}
 963
 964static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
 965                               target_ulong pc2, TCGv r_cond)
 966{
 967    TCGLabel *l1 = gen_new_label();
 968
 969    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
 970
 971    gen_goto_tb(dc, 0, pc1, pc1 + 4);
 972
 973    gen_set_label(l1);
 974    gen_goto_tb(dc, 1, pc2, pc2 + 4);
 975}
 976
 977static void gen_branch_a(DisasContext *dc, target_ulong pc1)
 978{
 979    TCGLabel *l1 = gen_new_label();
 980    target_ulong npc = dc->npc;
 981
 982    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
 983
 984    gen_goto_tb(dc, 0, npc, pc1);
 985
 986    gen_set_label(l1);
 987    gen_goto_tb(dc, 1, npc + 4, npc + 8);
 988
 989    dc->base.is_jmp = DISAS_NORETURN;
 990}
 991
 992static void gen_branch_n(DisasContext *dc, target_ulong pc1)
 993{
 994    target_ulong npc = dc->npc;
 995
 996    if (likely(npc != DYNAMIC_PC)) {
 997        dc->pc = npc;
 998        dc->jump_pc[0] = pc1;
 999        dc->jump_pc[1] = npc + 4;
1000        dc->npc = JUMP_PC;
1001    } else {
1002        TCGv t, z;
1003
1004        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1005
1006        tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1007        t = tcg_const_tl(pc1);
1008        z = tcg_const_tl(0);
1009        tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1010        tcg_temp_free(t);
1011        tcg_temp_free(z);
1012
1013        dc->pc = DYNAMIC_PC;
1014    }
1015}
1016
1017static inline void gen_generic_branch(DisasContext *dc)
1018{
1019    TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1020    TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1021    TCGv zero = tcg_const_tl(0);
1022
1023    tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1024
1025    tcg_temp_free(npc0);
1026    tcg_temp_free(npc1);
1027    tcg_temp_free(zero);
1028}
1029
1030/* call this function before using the condition register as it may
1031   have been set for a jump */
1032static inline void flush_cond(DisasContext *dc)
1033{
1034    if (dc->npc == JUMP_PC) {
1035        gen_generic_branch(dc);
1036        dc->npc = DYNAMIC_PC;
1037    }
1038}
1039
1040static inline void save_npc(DisasContext *dc)
1041{
1042    if (dc->npc == JUMP_PC) {
1043        gen_generic_branch(dc);
1044        dc->npc = DYNAMIC_PC;
1045    } else if (dc->npc != DYNAMIC_PC) {
1046        tcg_gen_movi_tl(cpu_npc, dc->npc);
1047    }
1048}
1049
1050static inline void update_psr(DisasContext *dc)
1051{
1052    if (dc->cc_op != CC_OP_FLAGS) {
1053        dc->cc_op = CC_OP_FLAGS;
1054        gen_helper_compute_psr(cpu_env);
1055    }
1056}
1057
1058static inline void save_state(DisasContext *dc)
1059{
1060    tcg_gen_movi_tl(cpu_pc, dc->pc);
1061    save_npc(dc);
1062}
1063
1064static void gen_exception(DisasContext *dc, int which)
1065{
1066    TCGv_i32 t;
1067
1068    save_state(dc);
1069    t = tcg_const_i32(which);
1070    gen_helper_raise_exception(cpu_env, t);
1071    tcg_temp_free_i32(t);
1072    dc->base.is_jmp = DISAS_NORETURN;
1073}
1074
1075static void gen_check_align(TCGv addr, int mask)
1076{
1077    TCGv_i32 r_mask = tcg_const_i32(mask);
1078    gen_helper_check_align(cpu_env, addr, r_mask);
1079    tcg_temp_free_i32(r_mask);
1080}
1081
1082static inline void gen_mov_pc_npc(DisasContext *dc)
1083{
1084    if (dc->npc == JUMP_PC) {
1085        gen_generic_branch(dc);
1086        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1087        dc->pc = DYNAMIC_PC;
1088    } else if (dc->npc == DYNAMIC_PC) {
1089        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1090        dc->pc = DYNAMIC_PC;
1091    } else {
1092        dc->pc = dc->npc;
1093    }
1094}
1095
1096static inline void gen_op_next_insn(void)
1097{
1098    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1099    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1100}
1101
1102static void free_compare(DisasCompare *cmp)
1103{
1104    if (!cmp->g1) {
1105        tcg_temp_free(cmp->c1);
1106    }
1107    if (!cmp->g2) {
1108        tcg_temp_free(cmp->c2);
1109    }
1110}
1111
1112static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1113                        DisasContext *dc)
1114{
1115    static int subcc_cond[16] = {
1116        TCG_COND_NEVER,
1117        TCG_COND_EQ,
1118        TCG_COND_LE,
1119        TCG_COND_LT,
1120        TCG_COND_LEU,
1121        TCG_COND_LTU,
1122        -1, /* neg */
1123        -1, /* overflow */
1124        TCG_COND_ALWAYS,
1125        TCG_COND_NE,
1126        TCG_COND_GT,
1127        TCG_COND_GE,
1128        TCG_COND_GTU,
1129        TCG_COND_GEU,
1130        -1, /* pos */
1131        -1, /* no overflow */
1132    };
1133
1134    static int logic_cond[16] = {
1135        TCG_COND_NEVER,
1136        TCG_COND_EQ,     /* eq:  Z */
1137        TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1138        TCG_COND_LT,     /* lt:  N ^ V -> N */
1139        TCG_COND_EQ,     /* leu: C | Z -> Z */
1140        TCG_COND_NEVER,  /* ltu: C -> 0 */
1141        TCG_COND_LT,     /* neg: N */
1142        TCG_COND_NEVER,  /* vs:  V -> 0 */
1143        TCG_COND_ALWAYS,
1144        TCG_COND_NE,     /* ne:  !Z */
1145        TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1146        TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1147        TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1148        TCG_COND_ALWAYS, /* geu: !C -> 1 */
1149        TCG_COND_GE,     /* pos: !N */
1150        TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1151    };
1152
1153    TCGv_i32 r_src;
1154    TCGv r_dst;
1155
1156#ifdef TARGET_SPARC64
1157    if (xcc) {
1158        r_src = cpu_xcc;
1159    } else {
1160        r_src = cpu_psr;
1161    }
1162#else
1163    r_src = cpu_psr;
1164#endif
1165
1166    switch (dc->cc_op) {
1167    case CC_OP_LOGIC:
1168        cmp->cond = logic_cond[cond];
1169    do_compare_dst_0:
1170        cmp->is_bool = false;
1171        cmp->g2 = false;
1172        cmp->c2 = tcg_const_tl(0);
1173#ifdef TARGET_SPARC64
1174        if (!xcc) {
1175            cmp->g1 = false;
1176            cmp->c1 = tcg_temp_new();
1177            tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1178            break;
1179        }
1180#endif
1181        cmp->g1 = true;
1182        cmp->c1 = cpu_cc_dst;
1183        break;
1184
1185    case CC_OP_SUB:
1186        switch (cond) {
1187        case 6:  /* neg */
1188        case 14: /* pos */
1189            cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1190            goto do_compare_dst_0;
1191
1192        case 7: /* overflow */
1193        case 15: /* !overflow */
1194            goto do_dynamic;
1195
1196        default:
1197            cmp->cond = subcc_cond[cond];
1198            cmp->is_bool = false;
1199#ifdef TARGET_SPARC64
1200            if (!xcc) {
1201                /* Note that sign-extension works for unsigned compares as
1202                   long as both operands are sign-extended.  */
1203                cmp->g1 = cmp->g2 = false;
1204                cmp->c1 = tcg_temp_new();
1205                cmp->c2 = tcg_temp_new();
1206                tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1207                tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1208                break;
1209            }
1210#endif
1211            cmp->g1 = cmp->g2 = true;
1212            cmp->c1 = cpu_cc_src;
1213            cmp->c2 = cpu_cc_src2;
1214            break;
1215        }
1216        break;
1217
1218    default:
1219    do_dynamic:
1220        gen_helper_compute_psr(cpu_env);
1221        dc->cc_op = CC_OP_FLAGS;
1222        /* FALLTHRU */
1223
1224    case CC_OP_FLAGS:
1225        /* We're going to generate a boolean result.  */
1226        cmp->cond = TCG_COND_NE;
1227        cmp->is_bool = true;
1228        cmp->g1 = cmp->g2 = false;
1229        cmp->c1 = r_dst = tcg_temp_new();
1230        cmp->c2 = tcg_const_tl(0);
1231
1232        switch (cond) {
1233        case 0x0:
1234            gen_op_eval_bn(r_dst);
1235            break;
1236        case 0x1:
1237            gen_op_eval_be(r_dst, r_src);
1238            break;
1239        case 0x2:
1240            gen_op_eval_ble(r_dst, r_src);
1241            break;
1242        case 0x3:
1243            gen_op_eval_bl(r_dst, r_src);
1244            break;
1245        case 0x4:
1246            gen_op_eval_bleu(r_dst, r_src);
1247            break;
1248        case 0x5:
1249            gen_op_eval_bcs(r_dst, r_src);
1250            break;
1251        case 0x6:
1252            gen_op_eval_bneg(r_dst, r_src);
1253            break;
1254        case 0x7:
1255            gen_op_eval_bvs(r_dst, r_src);
1256            break;
1257        case 0x8:
1258            gen_op_eval_ba(r_dst);
1259            break;
1260        case 0x9:
1261            gen_op_eval_bne(r_dst, r_src);
1262            break;
1263        case 0xa:
1264            gen_op_eval_bg(r_dst, r_src);
1265            break;
1266        case 0xb:
1267            gen_op_eval_bge(r_dst, r_src);
1268            break;
1269        case 0xc:
1270            gen_op_eval_bgu(r_dst, r_src);
1271            break;
1272        case 0xd:
1273            gen_op_eval_bcc(r_dst, r_src);
1274            break;
1275        case 0xe:
1276            gen_op_eval_bpos(r_dst, r_src);
1277            break;
1278        case 0xf:
1279            gen_op_eval_bvc(r_dst, r_src);
1280            break;
1281        }
1282        break;
1283    }
1284}
1285
1286static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1287{
1288    unsigned int offset;
1289    TCGv r_dst;
1290
1291    /* For now we still generate a straight boolean result.  */
1292    cmp->cond = TCG_COND_NE;
1293    cmp->is_bool = true;
1294    cmp->g1 = cmp->g2 = false;
1295    cmp->c1 = r_dst = tcg_temp_new();
1296    cmp->c2 = tcg_const_tl(0);
1297
1298    switch (cc) {
1299    default:
1300    case 0x0:
1301        offset = 0;
1302        break;
1303    case 0x1:
1304        offset = 32 - 10;
1305        break;
1306    case 0x2:
1307        offset = 34 - 10;
1308        break;
1309    case 0x3:
1310        offset = 36 - 10;
1311        break;
1312    }
1313
1314    switch (cond) {
1315    case 0x0:
1316        gen_op_eval_bn(r_dst);
1317        break;
1318    case 0x1:
1319        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1320        break;
1321    case 0x2:
1322        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1323        break;
1324    case 0x3:
1325        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1326        break;
1327    case 0x4:
1328        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1329        break;
1330    case 0x5:
1331        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1332        break;
1333    case 0x6:
1334        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1335        break;
1336    case 0x7:
1337        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1338        break;
1339    case 0x8:
1340        gen_op_eval_ba(r_dst);
1341        break;
1342    case 0x9:
1343        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1344        break;
1345    case 0xa:
1346        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1347        break;
1348    case 0xb:
1349        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1350        break;
1351    case 0xc:
1352        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1353        break;
1354    case 0xd:
1355        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1356        break;
1357    case 0xe:
1358        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1359        break;
1360    case 0xf:
1361        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1362        break;
1363    }
1364}
1365
1366static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1367                     DisasContext *dc)
1368{
1369    DisasCompare cmp;
1370    gen_compare(&cmp, cc, cond, dc);
1371
1372    /* The interface is to return a boolean in r_dst.  */
1373    if (cmp.is_bool) {
1374        tcg_gen_mov_tl(r_dst, cmp.c1);
1375    } else {
1376        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1377    }
1378
1379    free_compare(&cmp);
1380}
1381
1382static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1383{
1384    DisasCompare cmp;
1385    gen_fcompare(&cmp, cc, cond);
1386
1387    /* The interface is to return a boolean in r_dst.  */
1388    if (cmp.is_bool) {
1389        tcg_gen_mov_tl(r_dst, cmp.c1);
1390    } else {
1391        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1392    }
1393
1394    free_compare(&cmp);
1395}
1396
1397#ifdef TARGET_SPARC64
1398// Inverted logic
1399static const int gen_tcg_cond_reg[8] = {
1400    -1,
1401    TCG_COND_NE,
1402    TCG_COND_GT,
1403    TCG_COND_GE,
1404    -1,
1405    TCG_COND_EQ,
1406    TCG_COND_LE,
1407    TCG_COND_LT,
1408};
1409
1410static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1411{
1412    cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1413    cmp->is_bool = false;
1414    cmp->g1 = true;
1415    cmp->g2 = false;
1416    cmp->c1 = r_src;
1417    cmp->c2 = tcg_const_tl(0);
1418}
1419
1420static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1421{
1422    DisasCompare cmp;
1423    gen_compare_reg(&cmp, cond, r_src);
1424
1425    /* The interface is to return a boolean in r_dst.  */
1426    tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1427
1428    free_compare(&cmp);
1429}
1430#endif
1431
1432static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1433{
1434    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1435    target_ulong target = dc->pc + offset;
1436
1437#ifdef TARGET_SPARC64
1438    if (unlikely(AM_CHECK(dc))) {
1439        target &= 0xffffffffULL;
1440    }
1441#endif
1442    if (cond == 0x0) {
1443        /* unconditional not taken */
1444        if (a) {
1445            dc->pc = dc->npc + 4;
1446            dc->npc = dc->pc + 4;
1447        } else {
1448            dc->pc = dc->npc;
1449            dc->npc = dc->pc + 4;
1450        }
1451    } else if (cond == 0x8) {
1452        /* unconditional taken */
1453        if (a) {
1454            dc->pc = target;
1455            dc->npc = dc->pc + 4;
1456        } else {
1457            dc->pc = dc->npc;
1458            dc->npc = target;
1459            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1460        }
1461    } else {
1462        flush_cond(dc);
1463        gen_cond(cpu_cond, cc, cond, dc);
1464        if (a) {
1465            gen_branch_a(dc, target);
1466        } else {
1467            gen_branch_n(dc, target);
1468        }
1469    }
1470}
1471
1472static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1473{
1474    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1475    target_ulong target = dc->pc + offset;
1476
1477#ifdef TARGET_SPARC64
1478    if (unlikely(AM_CHECK(dc))) {
1479        target &= 0xffffffffULL;
1480    }
1481#endif
1482    if (cond == 0x0) {
1483        /* unconditional not taken */
1484        if (a) {
1485            dc->pc = dc->npc + 4;
1486            dc->npc = dc->pc + 4;
1487        } else {
1488            dc->pc = dc->npc;
1489            dc->npc = dc->pc + 4;
1490        }
1491    } else if (cond == 0x8) {
1492        /* unconditional taken */
1493        if (a) {
1494            dc->pc = target;
1495            dc->npc = dc->pc + 4;
1496        } else {
1497            dc->pc = dc->npc;
1498            dc->npc = target;
1499            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1500        }
1501    } else {
1502        flush_cond(dc);
1503        gen_fcond(cpu_cond, cc, cond);
1504        if (a) {
1505            gen_branch_a(dc, target);
1506        } else {
1507            gen_branch_n(dc, target);
1508        }
1509    }
1510}
1511
1512#ifdef TARGET_SPARC64
1513static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1514                          TCGv r_reg)
1515{
1516    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1517    target_ulong target = dc->pc + offset;
1518
1519    if (unlikely(AM_CHECK(dc))) {
1520        target &= 0xffffffffULL;
1521    }
1522    flush_cond(dc);
1523    gen_cond_reg(cpu_cond, cond, r_reg);
1524    if (a) {
1525        gen_branch_a(dc, target);
1526    } else {
1527        gen_branch_n(dc, target);
1528    }
1529}
1530
1531static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1532{
1533    switch (fccno) {
1534    case 0:
1535        gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1536        break;
1537    case 1:
1538        gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1539        break;
1540    case 2:
1541        gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1542        break;
1543    case 3:
1544        gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1545        break;
1546    }
1547}
1548
1549static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1550{
1551    switch (fccno) {
1552    case 0:
1553        gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1554        break;
1555    case 1:
1556        gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1557        break;
1558    case 2:
1559        gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1560        break;
1561    case 3:
1562        gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1563        break;
1564    }
1565}
1566
1567static inline void gen_op_fcmpq(int fccno)
1568{
1569    switch (fccno) {
1570    case 0:
1571        gen_helper_fcmpq(cpu_fsr, cpu_env);
1572        break;
1573    case 1:
1574        gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1575        break;
1576    case 2:
1577        gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1578        break;
1579    case 3:
1580        gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1581        break;
1582    }
1583}
1584
1585static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1586{
1587    switch (fccno) {
1588    case 0:
1589        gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1590        break;
1591    case 1:
1592        gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1593        break;
1594    case 2:
1595        gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1596        break;
1597    case 3:
1598        gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1599        break;
1600    }
1601}
1602
1603static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1604{
1605    switch (fccno) {
1606    case 0:
1607        gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1608        break;
1609    case 1:
1610        gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1611        break;
1612    case 2:
1613        gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1614        break;
1615    case 3:
1616        gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1617        break;
1618    }
1619}
1620
1621static inline void gen_op_fcmpeq(int fccno)
1622{
1623    switch (fccno) {
1624    case 0:
1625        gen_helper_fcmpeq(cpu_fsr, cpu_env);
1626        break;
1627    case 1:
1628        gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1629        break;
1630    case 2:
1631        gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1632        break;
1633    case 3:
1634        gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1635        break;
1636    }
1637}
1638
1639#else
1640
1641static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1642{
1643    gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1644}
1645
1646static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1647{
1648    gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1649}
1650
1651static inline void gen_op_fcmpq(int fccno)
1652{
1653    gen_helper_fcmpq(cpu_fsr, cpu_env);
1654}
1655
1656static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1657{
1658    gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1659}
1660
1661static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1662{
1663    gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1664}
1665
1666static inline void gen_op_fcmpeq(int fccno)
1667{
1668    gen_helper_fcmpeq(cpu_fsr, cpu_env);
1669}
1670#endif
1671
1672static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1673{
1674    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1675    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1676    gen_exception(dc, TT_FP_EXCP);
1677}
1678
1679static int gen_trap_ifnofpu(DisasContext *dc)
1680{
1681#if !defined(CONFIG_USER_ONLY)
1682    if (!dc->fpu_enabled) {
1683        gen_exception(dc, TT_NFPU_INSN);
1684        return 1;
1685    }
1686#endif
1687    return 0;
1688}
1689
1690static inline void gen_op_clear_ieee_excp_and_FTT(void)
1691{
1692    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1693}
1694
1695static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1696                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1697{
1698    TCGv_i32 dst, src;
1699
1700    src = gen_load_fpr_F(dc, rs);
1701    dst = gen_dest_fpr_F(dc);
1702
1703    gen(dst, cpu_env, src);
1704    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1705
1706    gen_store_fpr_F(dc, rd, dst);
1707}
1708
1709static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1710                                 void (*gen)(TCGv_i32, TCGv_i32))
1711{
1712    TCGv_i32 dst, src;
1713
1714    src = gen_load_fpr_F(dc, rs);
1715    dst = gen_dest_fpr_F(dc);
1716
1717    gen(dst, src);
1718
1719    gen_store_fpr_F(dc, rd, dst);
1720}
1721
1722static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1723                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1724{
1725    TCGv_i32 dst, src1, src2;
1726
1727    src1 = gen_load_fpr_F(dc, rs1);
1728    src2 = gen_load_fpr_F(dc, rs2);
1729    dst = gen_dest_fpr_F(dc);
1730
1731    gen(dst, cpu_env, src1, src2);
1732    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1733
1734    gen_store_fpr_F(dc, rd, dst);
1735}
1736
1737#ifdef TARGET_SPARC64
1738static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1739                                  void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1740{
1741    TCGv_i32 dst, src1, src2;
1742
1743    src1 = gen_load_fpr_F(dc, rs1);
1744    src2 = gen_load_fpr_F(dc, rs2);
1745    dst = gen_dest_fpr_F(dc);
1746
1747    gen(dst, src1, src2);
1748
1749    gen_store_fpr_F(dc, rd, dst);
1750}
1751#endif
1752
1753static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1754                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1755{
1756    TCGv_i64 dst, src;
1757
1758    src = gen_load_fpr_D(dc, rs);
1759    dst = gen_dest_fpr_D(dc, rd);
1760
1761    gen(dst, cpu_env, src);
1762    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1763
1764    gen_store_fpr_D(dc, rd, dst);
1765}
1766
1767#ifdef TARGET_SPARC64
1768static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1769                                 void (*gen)(TCGv_i64, TCGv_i64))
1770{
1771    TCGv_i64 dst, src;
1772
1773    src = gen_load_fpr_D(dc, rs);
1774    dst = gen_dest_fpr_D(dc, rd);
1775
1776    gen(dst, src);
1777
1778    gen_store_fpr_D(dc, rd, dst);
1779}
1780#endif
1781
1782static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1783                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1784{
1785    TCGv_i64 dst, src1, src2;
1786
1787    src1 = gen_load_fpr_D(dc, rs1);
1788    src2 = gen_load_fpr_D(dc, rs2);
1789    dst = gen_dest_fpr_D(dc, rd);
1790
1791    gen(dst, cpu_env, src1, src2);
1792    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1793
1794    gen_store_fpr_D(dc, rd, dst);
1795}
1796
1797#ifdef TARGET_SPARC64
1798static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1799                                  void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1800{
1801    TCGv_i64 dst, src1, src2;
1802
1803    src1 = gen_load_fpr_D(dc, rs1);
1804    src2 = gen_load_fpr_D(dc, rs2);
1805    dst = gen_dest_fpr_D(dc, rd);
1806
1807    gen(dst, src1, src2);
1808
1809    gen_store_fpr_D(dc, rd, dst);
1810}
1811
1812static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1813                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1814{
1815    TCGv_i64 dst, src1, src2;
1816
1817    src1 = gen_load_fpr_D(dc, rs1);
1818    src2 = gen_load_fpr_D(dc, rs2);
1819    dst = gen_dest_fpr_D(dc, rd);
1820
1821    gen(dst, cpu_gsr, src1, src2);
1822
1823    gen_store_fpr_D(dc, rd, dst);
1824}
1825
1826static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1827                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1828{
1829    TCGv_i64 dst, src0, src1, src2;
1830
1831    src1 = gen_load_fpr_D(dc, rs1);
1832    src2 = gen_load_fpr_D(dc, rs2);
1833    src0 = gen_load_fpr_D(dc, rd);
1834    dst = gen_dest_fpr_D(dc, rd);
1835
1836    gen(dst, src0, src1, src2);
1837
1838    gen_store_fpr_D(dc, rd, dst);
1839}
1840#endif
1841
1842static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1843                              void (*gen)(TCGv_ptr))
1844{
1845    gen_op_load_fpr_QT1(QFPREG(rs));
1846
1847    gen(cpu_env);
1848    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1849
1850    gen_op_store_QT0_fpr(QFPREG(rd));
1851    gen_update_fprs_dirty(dc, QFPREG(rd));
1852}
1853
1854#ifdef TARGET_SPARC64
1855static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1856                                 void (*gen)(TCGv_ptr))
1857{
1858    gen_op_load_fpr_QT1(QFPREG(rs));
1859
1860    gen(cpu_env);
1861
1862    gen_op_store_QT0_fpr(QFPREG(rd));
1863    gen_update_fprs_dirty(dc, QFPREG(rd));
1864}
1865#endif
1866
1867static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1868                               void (*gen)(TCGv_ptr))
1869{
1870    gen_op_load_fpr_QT0(QFPREG(rs1));
1871    gen_op_load_fpr_QT1(QFPREG(rs2));
1872
1873    gen(cpu_env);
1874    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1875
1876    gen_op_store_QT0_fpr(QFPREG(rd));
1877    gen_update_fprs_dirty(dc, QFPREG(rd));
1878}
1879
1880static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1881                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1882{
1883    TCGv_i64 dst;
1884    TCGv_i32 src1, src2;
1885
1886    src1 = gen_load_fpr_F(dc, rs1);
1887    src2 = gen_load_fpr_F(dc, rs2);
1888    dst = gen_dest_fpr_D(dc, rd);
1889
1890    gen(dst, cpu_env, src1, src2);
1891    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1892
1893    gen_store_fpr_D(dc, rd, dst);
1894}
1895
1896static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1897                               void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1898{
1899    TCGv_i64 src1, src2;
1900
1901    src1 = gen_load_fpr_D(dc, rs1);
1902    src2 = gen_load_fpr_D(dc, rs2);
1903
1904    gen(cpu_env, src1, src2);
1905    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1906
1907    gen_op_store_QT0_fpr(QFPREG(rd));
1908    gen_update_fprs_dirty(dc, QFPREG(rd));
1909}
1910
1911#ifdef TARGET_SPARC64
1912static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1913                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1914{
1915    TCGv_i64 dst;
1916    TCGv_i32 src;
1917
1918    src = gen_load_fpr_F(dc, rs);
1919    dst = gen_dest_fpr_D(dc, rd);
1920
1921    gen(dst, cpu_env, src);
1922    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1923
1924    gen_store_fpr_D(dc, rd, dst);
1925}
1926#endif
1927
1928static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1929                                 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1930{
1931    TCGv_i64 dst;
1932    TCGv_i32 src;
1933
1934    src = gen_load_fpr_F(dc, rs);
1935    dst = gen_dest_fpr_D(dc, rd);
1936
1937    gen(dst, cpu_env, src);
1938
1939    gen_store_fpr_D(dc, rd, dst);
1940}
1941
1942static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1943                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1944{
1945    TCGv_i32 dst;
1946    TCGv_i64 src;
1947
1948    src = gen_load_fpr_D(dc, rs);
1949    dst = gen_dest_fpr_F(dc);
1950
1951    gen(dst, cpu_env, src);
1952    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1953
1954    gen_store_fpr_F(dc, rd, dst);
1955}
1956
1957static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1958                              void (*gen)(TCGv_i32, TCGv_ptr))
1959{
1960    TCGv_i32 dst;
1961
1962    gen_op_load_fpr_QT1(QFPREG(rs));
1963    dst = gen_dest_fpr_F(dc);
1964
1965    gen(dst, cpu_env);
1966    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1967
1968    gen_store_fpr_F(dc, rd, dst);
1969}
1970
1971static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1972                              void (*gen)(TCGv_i64, TCGv_ptr))
1973{
1974    TCGv_i64 dst;
1975
1976    gen_op_load_fpr_QT1(QFPREG(rs));
1977    dst = gen_dest_fpr_D(dc, rd);
1978
1979    gen(dst, cpu_env);
1980    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1981
1982    gen_store_fpr_D(dc, rd, dst);
1983}
1984
1985static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1986                                 void (*gen)(TCGv_ptr, TCGv_i32))
1987{
1988    TCGv_i32 src;
1989
1990    src = gen_load_fpr_F(dc, rs);
1991
1992    gen(cpu_env, src);
1993
1994    gen_op_store_QT0_fpr(QFPREG(rd));
1995    gen_update_fprs_dirty(dc, QFPREG(rd));
1996}
1997
1998static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1999                                 void (*gen)(TCGv_ptr, TCGv_i64))
2000{
2001    TCGv_i64 src;
2002
2003    src = gen_load_fpr_D(dc, rs);
2004
2005    gen(cpu_env, src);
2006
2007    gen_op_store_QT0_fpr(QFPREG(rd));
2008    gen_update_fprs_dirty(dc, QFPREG(rd));
2009}
2010
2011static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2012                     TCGv addr, int mmu_idx, MemOp memop)
2013{
2014    gen_address_mask(dc, addr);
2015    tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2016}
2017
2018static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2019{
2020    TCGv m1 = tcg_const_tl(0xff);
2021    gen_address_mask(dc, addr);
2022    tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2023    tcg_temp_free(m1);
2024}
2025
2026/* asi moves */
2027#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2028typedef enum {
2029    GET_ASI_HELPER,
2030    GET_ASI_EXCP,
2031    GET_ASI_DIRECT,
2032    GET_ASI_DTWINX,
2033    GET_ASI_BLOCK,
2034    GET_ASI_SHORT,
2035    GET_ASI_BCOPY,
2036    GET_ASI_BFILL,
2037} ASIType;
2038
2039typedef struct {
2040    ASIType type;
2041    int asi;
2042    int mem_idx;
2043    MemOp memop;
2044} DisasASI;
2045
2046static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
2047{
2048    int asi = GET_FIELD(insn, 19, 26);
2049    ASIType type = GET_ASI_HELPER;
2050    int mem_idx = dc->mem_idx;
2051
2052#ifndef TARGET_SPARC64
2053    /* Before v9, all asis are immediate and privileged.  */
2054    if (IS_IMM) {
2055        gen_exception(dc, TT_ILL_INSN);
2056        type = GET_ASI_EXCP;
2057    } else if (supervisor(dc)
2058               /* Note that LEON accepts ASI_USERDATA in user mode, for
2059                  use with CASA.  Also note that previous versions of
2060                  QEMU allowed (and old versions of gcc emitted) ASI_P
2061                  for LEON, which is incorrect.  */
2062               || (asi == ASI_USERDATA
2063                   && (dc->def->features & CPU_FEATURE_CASA))) {
2064        switch (asi) {
2065        case ASI_USERDATA:   /* User data access */
2066            mem_idx = MMU_USER_IDX;
2067            type = GET_ASI_DIRECT;
2068            break;
2069        case ASI_KERNELDATA: /* Supervisor data access */
2070            mem_idx = MMU_KERNEL_IDX;
2071            type = GET_ASI_DIRECT;
2072            break;
2073        case ASI_M_BYPASS:    /* MMU passthrough */
2074        case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2075            mem_idx = MMU_PHYS_IDX;
2076            type = GET_ASI_DIRECT;
2077            break;
2078        case ASI_M_BCOPY: /* Block copy, sta access */
2079            mem_idx = MMU_KERNEL_IDX;
2080            type = GET_ASI_BCOPY;
2081            break;
2082        case ASI_M_BFILL: /* Block fill, stda access */
2083            mem_idx = MMU_KERNEL_IDX;
2084            type = GET_ASI_BFILL;
2085            break;
2086        }
2087
2088        /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
2089         * permissions check in get_physical_address(..).
2090         */
2091        mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
2092    } else {
2093        gen_exception(dc, TT_PRIV_INSN);
2094        type = GET_ASI_EXCP;
2095    }
2096#else
2097    if (IS_IMM) {
2098        asi = dc->asi;
2099    }
2100    /* With v9, all asis below 0x80 are privileged.  */
2101    /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2102       down that bit into DisasContext.  For the moment that's ok,
2103       since the direct implementations below doesn't have any ASIs
2104       in the restricted [0x30, 0x7f] range, and the check will be
2105       done properly in the helper.  */
2106    if (!supervisor(dc) && asi < 0x80) {
2107        gen_exception(dc, TT_PRIV_ACT);
2108        type = GET_ASI_EXCP;
2109    } else {
2110        switch (asi) {
2111        case ASI_REAL:      /* Bypass */
2112        case ASI_REAL_IO:   /* Bypass, non-cacheable */
2113        case ASI_REAL_L:    /* Bypass LE */
2114        case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2115        case ASI_TWINX_REAL:   /* Real address, twinx */
2116        case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2117        case ASI_QUAD_LDD_PHYS:
2118        case ASI_QUAD_LDD_PHYS_L:
2119            mem_idx = MMU_PHYS_IDX;
2120            break;
2121        case ASI_N:  /* Nucleus */
2122        case ASI_NL: /* Nucleus LE */
2123        case ASI_TWINX_N:
2124        case ASI_TWINX_NL:
2125        case ASI_NUCLEUS_QUAD_LDD:
2126        case ASI_NUCLEUS_QUAD_LDD_L:
2127            if (hypervisor(dc)) {
2128                mem_idx = MMU_PHYS_IDX;
2129            } else {
2130                mem_idx = MMU_NUCLEUS_IDX;
2131            }
2132            break;
2133        case ASI_AIUP:  /* As if user primary */
2134        case ASI_AIUPL: /* As if user primary LE */
2135        case ASI_TWINX_AIUP:
2136        case ASI_TWINX_AIUP_L:
2137        case ASI_BLK_AIUP_4V:
2138        case ASI_BLK_AIUP_L_4V:
2139        case ASI_BLK_AIUP:
2140        case ASI_BLK_AIUPL:
2141            mem_idx = MMU_USER_IDX;
2142            break;
2143        case ASI_AIUS:  /* As if user secondary */
2144        case ASI_AIUSL: /* As if user secondary LE */
2145        case ASI_TWINX_AIUS:
2146        case ASI_TWINX_AIUS_L:
2147        case ASI_BLK_AIUS_4V:
2148        case ASI_BLK_AIUS_L_4V:
2149        case ASI_BLK_AIUS:
2150        case ASI_BLK_AIUSL:
2151            mem_idx = MMU_USER_SECONDARY_IDX;
2152            break;
2153        case ASI_S:  /* Secondary */
2154        case ASI_SL: /* Secondary LE */
2155        case ASI_TWINX_S:
2156        case ASI_TWINX_SL:
2157        case ASI_BLK_COMMIT_S:
2158        case ASI_BLK_S:
2159        case ASI_BLK_SL:
2160        case ASI_FL8_S:
2161        case ASI_FL8_SL:
2162        case ASI_FL16_S:
2163        case ASI_FL16_SL:
2164            if (mem_idx == MMU_USER_IDX) {
2165                mem_idx = MMU_USER_SECONDARY_IDX;
2166            } else if (mem_idx == MMU_KERNEL_IDX) {
2167                mem_idx = MMU_KERNEL_SECONDARY_IDX;
2168            }
2169            break;
2170        case ASI_P:  /* Primary */
2171        case ASI_PL: /* Primary LE */
2172        case ASI_TWINX_P:
2173        case ASI_TWINX_PL:
2174        case ASI_BLK_COMMIT_P:
2175        case ASI_BLK_P:
2176        case ASI_BLK_PL:
2177        case ASI_FL8_P:
2178        case ASI_FL8_PL:
2179        case ASI_FL16_P:
2180        case ASI_FL16_PL:
2181            break;
2182        }
2183        switch (asi) {
2184        case ASI_REAL:
2185        case ASI_REAL_IO:
2186        case ASI_REAL_L:
2187        case ASI_REAL_IO_L:
2188        case ASI_N:
2189        case ASI_NL:
2190        case ASI_AIUP:
2191        case ASI_AIUPL:
2192        case ASI_AIUS:
2193        case ASI_AIUSL:
2194        case ASI_S:
2195        case ASI_SL:
2196        case ASI_P:
2197        case ASI_PL:
2198            type = GET_ASI_DIRECT;
2199            break;
2200        case ASI_TWINX_REAL:
2201        case ASI_TWINX_REAL_L:
2202        case ASI_TWINX_N:
2203        case ASI_TWINX_NL:
2204        case ASI_TWINX_AIUP:
2205        case ASI_TWINX_AIUP_L:
2206        case ASI_TWINX_AIUS:
2207        case ASI_TWINX_AIUS_L:
2208        case ASI_TWINX_P:
2209        case ASI_TWINX_PL:
2210        case ASI_TWINX_S:
2211        case ASI_TWINX_SL:
2212        case ASI_QUAD_LDD_PHYS:
2213        case ASI_QUAD_LDD_PHYS_L:
2214        case ASI_NUCLEUS_QUAD_LDD:
2215        case ASI_NUCLEUS_QUAD_LDD_L:
2216            type = GET_ASI_DTWINX;
2217            break;
2218        case ASI_BLK_COMMIT_P:
2219        case ASI_BLK_COMMIT_S:
2220        case ASI_BLK_AIUP_4V:
2221        case ASI_BLK_AIUP_L_4V:
2222        case ASI_BLK_AIUP:
2223        case ASI_BLK_AIUPL:
2224        case ASI_BLK_AIUS_4V:
2225        case ASI_BLK_AIUS_L_4V:
2226        case ASI_BLK_AIUS:
2227        case ASI_BLK_AIUSL:
2228        case ASI_BLK_S:
2229        case ASI_BLK_SL:
2230        case ASI_BLK_P:
2231        case ASI_BLK_PL:
2232            type = GET_ASI_BLOCK;
2233            break;
2234        case ASI_FL8_S:
2235        case ASI_FL8_SL:
2236        case ASI_FL8_P:
2237        case ASI_FL8_PL:
2238            memop = MO_UB;
2239            type = GET_ASI_SHORT;
2240            break;
2241        case ASI_FL16_S:
2242        case ASI_FL16_SL:
2243        case ASI_FL16_P:
2244        case ASI_FL16_PL:
2245            memop = MO_TEUW;
2246            type = GET_ASI_SHORT;
2247            break;
2248        }
2249        /* The little-endian asis all have bit 3 set.  */
2250        if (asi & 8) {
2251            memop ^= MO_BSWAP;
2252        }
2253    }
2254#endif
2255
2256    return (DisasASI){ type, asi, mem_idx, memop };
2257}
2258
2259static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2260                       int insn, MemOp memop)
2261{
2262    DisasASI da = get_asi(dc, insn, memop);
2263
2264    switch (da.type) {
2265    case GET_ASI_EXCP:
2266        break;
2267    case GET_ASI_DTWINX: /* Reserved for ldda.  */
2268        gen_exception(dc, TT_ILL_INSN);
2269        break;
2270    case GET_ASI_DIRECT:
2271        gen_address_mask(dc, addr);
2272        tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2273        break;
2274    default:
2275        {
2276            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2277            TCGv_i32 r_mop = tcg_const_i32(memop);
2278
2279            save_state(dc);
2280#ifdef TARGET_SPARC64
2281            gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2282#else
2283            {
2284                TCGv_i64 t64 = tcg_temp_new_i64();
2285                gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2286                tcg_gen_trunc_i64_tl(dst, t64);
2287                tcg_temp_free_i64(t64);
2288            }
2289#endif
2290            tcg_temp_free_i32(r_mop);
2291            tcg_temp_free_i32(r_asi);
2292        }
2293        break;
2294    }
2295}
2296
2297static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2298                       int insn, MemOp memop)
2299{
2300    DisasASI da = get_asi(dc, insn, memop);
2301
2302    switch (da.type) {
2303    case GET_ASI_EXCP:
2304        break;
2305    case GET_ASI_DTWINX: /* Reserved for stda.  */
2306#ifndef TARGET_SPARC64
2307        gen_exception(dc, TT_ILL_INSN);
2308        break;
2309#else
2310        if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2311            /* Pre OpenSPARC CPUs don't have these */
2312            gen_exception(dc, TT_ILL_INSN);
2313            return;
2314        }
2315        /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2316         * are ST_BLKINIT_ ASIs */
2317#endif
2318        /* fall through */
2319    case GET_ASI_DIRECT:
2320        gen_address_mask(dc, addr);
2321        tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2322        break;
2323#if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2324    case GET_ASI_BCOPY:
2325        /* Copy 32 bytes from the address in SRC to ADDR.  */
2326        /* ??? The original qemu code suggests 4-byte alignment, dropping
2327           the low bits, but the only place I can see this used is in the
2328           Linux kernel with 32 byte alignment, which would make more sense
2329           as a cacheline-style operation.  */
2330        {
2331            TCGv saddr = tcg_temp_new();
2332            TCGv daddr = tcg_temp_new();
2333            TCGv four = tcg_const_tl(4);
2334            TCGv_i32 tmp = tcg_temp_new_i32();
2335            int i;
2336
2337            tcg_gen_andi_tl(saddr, src, -4);
2338            tcg_gen_andi_tl(daddr, addr, -4);
2339            for (i = 0; i < 32; i += 4) {
2340                /* Since the loads and stores are paired, allow the
2341                   copy to happen in the host endianness.  */
2342                tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2343                tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2344                tcg_gen_add_tl(saddr, saddr, four);
2345                tcg_gen_add_tl(daddr, daddr, four);
2346            }
2347
2348            tcg_temp_free(saddr);
2349            tcg_temp_free(daddr);
2350            tcg_temp_free(four);
2351            tcg_temp_free_i32(tmp);
2352        }
2353        break;
2354#endif
2355    default:
2356        {
2357            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2358            TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2359
2360            save_state(dc);
2361#ifdef TARGET_SPARC64
2362            gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2363#else
2364            {
2365                TCGv_i64 t64 = tcg_temp_new_i64();
2366                tcg_gen_extu_tl_i64(t64, src);
2367                gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2368                tcg_temp_free_i64(t64);
2369            }
2370#endif
2371            tcg_temp_free_i32(r_mop);
2372            tcg_temp_free_i32(r_asi);
2373
2374            /* A write to a TLB register may alter page maps.  End the TB. */
2375            dc->npc = DYNAMIC_PC;
2376        }
2377        break;
2378    }
2379}
2380
2381static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2382                         TCGv addr, int insn)
2383{
2384    DisasASI da = get_asi(dc, insn, MO_TEUL);
2385
2386    switch (da.type) {
2387    case GET_ASI_EXCP:
2388        break;
2389    case GET_ASI_DIRECT:
2390        gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2391        break;
2392    default:
2393        /* ??? Should be DAE_invalid_asi.  */
2394        gen_exception(dc, TT_DATA_ACCESS);
2395        break;
2396    }
2397}
2398
2399static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2400                        int insn, int rd)
2401{
2402    DisasASI da = get_asi(dc, insn, MO_TEUL);
2403    TCGv oldv;
2404
2405    switch (da.type) {
2406    case GET_ASI_EXCP:
2407        return;
2408    case GET_ASI_DIRECT:
2409        oldv = tcg_temp_new();
2410        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2411                                  da.mem_idx, da.memop);
2412        gen_store_gpr(dc, rd, oldv);
2413        tcg_temp_free(oldv);
2414        break;
2415    default:
2416        /* ??? Should be DAE_invalid_asi.  */
2417        gen_exception(dc, TT_DATA_ACCESS);
2418        break;
2419    }
2420}
2421
2422static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2423{
2424    DisasASI da = get_asi(dc, insn, MO_UB);
2425
2426    switch (da.type) {
2427    case GET_ASI_EXCP:
2428        break;
2429    case GET_ASI_DIRECT:
2430        gen_ldstub(dc, dst, addr, da.mem_idx);
2431        break;
2432    default:
2433        /* ??? In theory, this should be raise DAE_invalid_asi.
2434           But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2435        if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2436            gen_helper_exit_atomic(cpu_env);
2437        } else {
2438            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2439            TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2440            TCGv_i64 s64, t64;
2441
2442            save_state(dc);
2443            t64 = tcg_temp_new_i64();
2444            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2445
2446            s64 = tcg_const_i64(0xff);
2447            gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2448            tcg_temp_free_i64(s64);
2449            tcg_temp_free_i32(r_mop);
2450            tcg_temp_free_i32(r_asi);
2451
2452            tcg_gen_trunc_i64_tl(dst, t64);
2453            tcg_temp_free_i64(t64);
2454
2455            /* End the TB.  */
2456            dc->npc = DYNAMIC_PC;
2457        }
2458        break;
2459    }
2460}
2461#endif
2462
2463#ifdef TARGET_SPARC64
2464static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2465                        int insn, int size, int rd)
2466{
2467    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2468    TCGv_i32 d32;
2469    TCGv_i64 d64;
2470
2471    switch (da.type) {
2472    case GET_ASI_EXCP:
2473        break;
2474
2475    case GET_ASI_DIRECT:
2476        gen_address_mask(dc, addr);
2477        switch (size) {
2478        case 4:
2479            d32 = gen_dest_fpr_F(dc);
2480            tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2481            gen_store_fpr_F(dc, rd, d32);
2482            break;
2483        case 8:
2484            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2485                                da.memop | MO_ALIGN_4);
2486            break;
2487        case 16:
2488            d64 = tcg_temp_new_i64();
2489            tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2490            tcg_gen_addi_tl(addr, addr, 8);
2491            tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2492                                da.memop | MO_ALIGN_4);
2493            tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2494            tcg_temp_free_i64(d64);
2495            break;
2496        default:
2497            g_assert_not_reached();
2498        }
2499        break;
2500
2501    case GET_ASI_BLOCK:
2502        /* Valid for lddfa on aligned registers only.  */
2503        if (size == 8 && (rd & 7) == 0) {
2504            MemOp memop;
2505            TCGv eight;
2506            int i;
2507
2508            gen_address_mask(dc, addr);
2509
2510            /* The first operation checks required alignment.  */
2511            memop = da.memop | MO_ALIGN_64;
2512            eight = tcg_const_tl(8);
2513            for (i = 0; ; ++i) {
2514                tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2515                                    da.mem_idx, memop);
2516                if (i == 7) {
2517                    break;
2518                }
2519                tcg_gen_add_tl(addr, addr, eight);
2520                memop = da.memop;
2521            }
2522            tcg_temp_free(eight);
2523        } else {
2524            gen_exception(dc, TT_ILL_INSN);
2525        }
2526        break;
2527
2528    case GET_ASI_SHORT:
2529        /* Valid for lddfa only.  */
2530        if (size == 8) {
2531            gen_address_mask(dc, addr);
2532            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2533        } else {
2534            gen_exception(dc, TT_ILL_INSN);
2535        }
2536        break;
2537
2538    default:
2539        {
2540            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2541            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2542
2543            save_state(dc);
2544            /* According to the table in the UA2011 manual, the only
2545               other asis that are valid for ldfa/lddfa/ldqfa are
2546               the NO_FAULT asis.  We still need a helper for these,
2547               but we can just use the integer asi helper for them.  */
2548            switch (size) {
2549            case 4:
2550                d64 = tcg_temp_new_i64();
2551                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2552                d32 = gen_dest_fpr_F(dc);
2553                tcg_gen_extrl_i64_i32(d32, d64);
2554                tcg_temp_free_i64(d64);
2555                gen_store_fpr_F(dc, rd, d32);
2556                break;
2557            case 8:
2558                gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2559                break;
2560            case 16:
2561                d64 = tcg_temp_new_i64();
2562                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2563                tcg_gen_addi_tl(addr, addr, 8);
2564                gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2565                tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2566                tcg_temp_free_i64(d64);
2567                break;
2568            default:
2569                g_assert_not_reached();
2570            }
2571            tcg_temp_free_i32(r_mop);
2572            tcg_temp_free_i32(r_asi);
2573        }
2574        break;
2575    }
2576}
2577
2578static void gen_stf_asi(DisasContext *dc, TCGv addr,
2579                        int insn, int size, int rd)
2580{
2581    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2582    TCGv_i32 d32;
2583
2584    switch (da.type) {
2585    case GET_ASI_EXCP:
2586        break;
2587
2588    case GET_ASI_DIRECT:
2589        gen_address_mask(dc, addr);
2590        switch (size) {
2591        case 4:
2592            d32 = gen_load_fpr_F(dc, rd);
2593            tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2594            break;
2595        case 8:
2596            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2597                                da.memop | MO_ALIGN_4);
2598            break;
2599        case 16:
2600            /* Only 4-byte alignment required.  However, it is legal for the
2601               cpu to signal the alignment fault, and the OS trap handler is
2602               required to fix it up.  Requiring 16-byte alignment here avoids
2603               having to probe the second page before performing the first
2604               write.  */
2605            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2606                                da.memop | MO_ALIGN_16);
2607            tcg_gen_addi_tl(addr, addr, 8);
2608            tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2609            break;
2610        default:
2611            g_assert_not_reached();
2612        }
2613        break;
2614
2615    case GET_ASI_BLOCK:
2616        /* Valid for stdfa on aligned registers only.  */
2617        if (size == 8 && (rd & 7) == 0) {
2618            MemOp memop;
2619            TCGv eight;
2620            int i;
2621
2622            gen_address_mask(dc, addr);
2623
2624            /* The first operation checks required alignment.  */
2625            memop = da.memop | MO_ALIGN_64;
2626            eight = tcg_const_tl(8);
2627            for (i = 0; ; ++i) {
2628                tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2629                                    da.mem_idx, memop);
2630                if (i == 7) {
2631                    break;
2632                }
2633                tcg_gen_add_tl(addr, addr, eight);
2634                memop = da.memop;
2635            }
2636            tcg_temp_free(eight);
2637        } else {
2638            gen_exception(dc, TT_ILL_INSN);
2639        }
2640        break;
2641
2642    case GET_ASI_SHORT:
2643        /* Valid for stdfa only.  */
2644        if (size == 8) {
2645            gen_address_mask(dc, addr);
2646            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2647        } else {
2648            gen_exception(dc, TT_ILL_INSN);
2649        }
2650        break;
2651
2652    default:
2653        /* According to the table in the UA2011 manual, the only
2654           other asis that are valid for ldfa/lddfa/ldqfa are
2655           the PST* asis, which aren't currently handled.  */
2656        gen_exception(dc, TT_ILL_INSN);
2657        break;
2658    }
2659}
2660
2661static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2662{
2663    DisasASI da = get_asi(dc, insn, MO_TEUQ);
2664    TCGv_i64 hi = gen_dest_gpr(dc, rd);
2665    TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2666
2667    switch (da.type) {
2668    case GET_ASI_EXCP:
2669        return;
2670
2671    case GET_ASI_DTWINX:
2672        gen_address_mask(dc, addr);
2673        tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2674        tcg_gen_addi_tl(addr, addr, 8);
2675        tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2676        break;
2677
2678    case GET_ASI_DIRECT:
2679        {
2680            TCGv_i64 tmp = tcg_temp_new_i64();
2681
2682            gen_address_mask(dc, addr);
2683            tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2684
2685            /* Note that LE ldda acts as if each 32-bit register
2686               result is byte swapped.  Having just performed one
2687               64-bit bswap, we need now to swap the writebacks.  */
2688            if ((da.memop & MO_BSWAP) == MO_TE) {
2689                tcg_gen_extr32_i64(lo, hi, tmp);
2690            } else {
2691                tcg_gen_extr32_i64(hi, lo, tmp);
2692            }
2693            tcg_temp_free_i64(tmp);
2694        }
2695        break;
2696
2697    default:
2698        /* ??? In theory we've handled all of the ASIs that are valid
2699           for ldda, and this should raise DAE_invalid_asi.  However,
2700           real hardware allows others.  This can be seen with e.g.
2701           FreeBSD 10.3 wrt ASI_IC_TAG.  */
2702        {
2703            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2704            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2705            TCGv_i64 tmp = tcg_temp_new_i64();
2706
2707            save_state(dc);
2708            gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2709            tcg_temp_free_i32(r_asi);
2710            tcg_temp_free_i32(r_mop);
2711
2712            /* See above.  */
2713            if ((da.memop & MO_BSWAP) == MO_TE) {
2714                tcg_gen_extr32_i64(lo, hi, tmp);
2715            } else {
2716                tcg_gen_extr32_i64(hi, lo, tmp);
2717            }
2718            tcg_temp_free_i64(tmp);
2719        }
2720        break;
2721    }
2722
2723    gen_store_gpr(dc, rd, hi);
2724    gen_store_gpr(dc, rd + 1, lo);
2725}
2726
2727static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2728                         int insn, int rd)
2729{
2730    DisasASI da = get_asi(dc, insn, MO_TEUQ);
2731    TCGv lo = gen_load_gpr(dc, rd + 1);
2732
2733    switch (da.type) {
2734    case GET_ASI_EXCP:
2735        break;
2736
2737    case GET_ASI_DTWINX:
2738        gen_address_mask(dc, addr);
2739        tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2740        tcg_gen_addi_tl(addr, addr, 8);
2741        tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2742        break;
2743
2744    case GET_ASI_DIRECT:
2745        {
2746            TCGv_i64 t64 = tcg_temp_new_i64();
2747
2748            /* Note that LE stda acts as if each 32-bit register result is
2749               byte swapped.  We will perform one 64-bit LE store, so now
2750               we must swap the order of the construction.  */
2751            if ((da.memop & MO_BSWAP) == MO_TE) {
2752                tcg_gen_concat32_i64(t64, lo, hi);
2753            } else {
2754                tcg_gen_concat32_i64(t64, hi, lo);
2755            }
2756            gen_address_mask(dc, addr);
2757            tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2758            tcg_temp_free_i64(t64);
2759        }
2760        break;
2761
2762    default:
2763        /* ??? In theory we've handled all of the ASIs that are valid
2764           for stda, and this should raise DAE_invalid_asi.  */
2765        {
2766            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2767            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2768            TCGv_i64 t64 = tcg_temp_new_i64();
2769
2770            /* See above.  */
2771            if ((da.memop & MO_BSWAP) == MO_TE) {
2772                tcg_gen_concat32_i64(t64, lo, hi);
2773            } else {
2774                tcg_gen_concat32_i64(t64, hi, lo);
2775            }
2776
2777            save_state(dc);
2778            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2779            tcg_temp_free_i32(r_mop);
2780            tcg_temp_free_i32(r_asi);
2781            tcg_temp_free_i64(t64);
2782        }
2783        break;
2784    }
2785}
2786
2787static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2788                         int insn, int rd)
2789{
2790    DisasASI da = get_asi(dc, insn, MO_TEUQ);
2791    TCGv oldv;
2792
2793    switch (da.type) {
2794    case GET_ASI_EXCP:
2795        return;
2796    case GET_ASI_DIRECT:
2797        oldv = tcg_temp_new();
2798        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2799                                  da.mem_idx, da.memop);
2800        gen_store_gpr(dc, rd, oldv);
2801        tcg_temp_free(oldv);
2802        break;
2803    default:
2804        /* ??? Should be DAE_invalid_asi.  */
2805        gen_exception(dc, TT_DATA_ACCESS);
2806        break;
2807    }
2808}
2809
2810#elif !defined(CONFIG_USER_ONLY)
2811static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2812{
2813    /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2814       whereby "rd + 1" elicits "error: array subscript is above array".
2815       Since we have already asserted that rd is even, the semantics
2816       are unchanged.  */
2817    TCGv lo = gen_dest_gpr(dc, rd | 1);
2818    TCGv hi = gen_dest_gpr(dc, rd);
2819    TCGv_i64 t64 = tcg_temp_new_i64();
2820    DisasASI da = get_asi(dc, insn, MO_TEUQ);
2821
2822    switch (da.type) {
2823    case GET_ASI_EXCP:
2824        tcg_temp_free_i64(t64);
2825        return;
2826    case GET_ASI_DIRECT:
2827        gen_address_mask(dc, addr);
2828        tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2829        break;
2830    default:
2831        {
2832            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2833            TCGv_i32 r_mop = tcg_const_i32(MO_UQ);
2834
2835            save_state(dc);
2836            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2837            tcg_temp_free_i32(r_mop);
2838            tcg_temp_free_i32(r_asi);
2839        }
2840        break;
2841    }
2842
2843    tcg_gen_extr_i64_i32(lo, hi, t64);
2844    tcg_temp_free_i64(t64);
2845    gen_store_gpr(dc, rd | 1, lo);
2846    gen_store_gpr(dc, rd, hi);
2847}
2848
2849static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2850                         int insn, int rd)
2851{
2852    DisasASI da = get_asi(dc, insn, MO_TEUQ);
2853    TCGv lo = gen_load_gpr(dc, rd + 1);
2854    TCGv_i64 t64 = tcg_temp_new_i64();
2855
2856    tcg_gen_concat_tl_i64(t64, lo, hi);
2857
2858    switch (da.type) {
2859    case GET_ASI_EXCP:
2860        break;
2861    case GET_ASI_DIRECT:
2862        gen_address_mask(dc, addr);
2863        tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2864        break;
2865    case GET_ASI_BFILL:
2866        /* Store 32 bytes of T64 to ADDR.  */
2867        /* ??? The original qemu code suggests 8-byte alignment, dropping
2868           the low bits, but the only place I can see this used is in the
2869           Linux kernel with 32 byte alignment, which would make more sense
2870           as a cacheline-style operation.  */
2871        {
2872            TCGv d_addr = tcg_temp_new();
2873            TCGv eight = tcg_const_tl(8);
2874            int i;
2875
2876            tcg_gen_andi_tl(d_addr, addr, -8);
2877            for (i = 0; i < 32; i += 8) {
2878                tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2879                tcg_gen_add_tl(d_addr, d_addr, eight);
2880            }
2881
2882            tcg_temp_free(d_addr);
2883            tcg_temp_free(eight);
2884        }
2885        break;
2886    default:
2887        {
2888            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2889            TCGv_i32 r_mop = tcg_const_i32(MO_UQ);
2890
2891            save_state(dc);
2892            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2893            tcg_temp_free_i32(r_mop);
2894            tcg_temp_free_i32(r_asi);
2895        }
2896        break;
2897    }
2898
2899    tcg_temp_free_i64(t64);
2900}
2901#endif
2902
2903static TCGv get_src1(DisasContext *dc, unsigned int insn)
2904{
2905    unsigned int rs1 = GET_FIELD(insn, 13, 17);
2906    return gen_load_gpr(dc, rs1);
2907}
2908
2909static TCGv get_src2(DisasContext *dc, unsigned int insn)
2910{
2911    if (IS_IMM) { /* immediate */
2912        target_long simm = GET_FIELDs(insn, 19, 31);
2913        TCGv t = get_temp_tl(dc);
2914        tcg_gen_movi_tl(t, simm);
2915        return t;
2916    } else {      /* register */
2917        unsigned int rs2 = GET_FIELD(insn, 27, 31);
2918        return gen_load_gpr(dc, rs2);
2919    }
2920}
2921
2922#ifdef TARGET_SPARC64
2923static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2924{
2925    TCGv_i32 c32, zero, dst, s1, s2;
2926
2927    /* We have two choices here: extend the 32 bit data and use movcond_i64,
2928       or fold the comparison down to 32 bits and use movcond_i32.  Choose
2929       the later.  */
2930    c32 = tcg_temp_new_i32();
2931    if (cmp->is_bool) {
2932        tcg_gen_extrl_i64_i32(c32, cmp->c1);
2933    } else {
2934        TCGv_i64 c64 = tcg_temp_new_i64();
2935        tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2936        tcg_gen_extrl_i64_i32(c32, c64);
2937        tcg_temp_free_i64(c64);
2938    }
2939
2940    s1 = gen_load_fpr_F(dc, rs);
2941    s2 = gen_load_fpr_F(dc, rd);
2942    dst = gen_dest_fpr_F(dc);
2943    zero = tcg_const_i32(0);
2944
2945    tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2946
2947    tcg_temp_free_i32(c32);
2948    tcg_temp_free_i32(zero);
2949    gen_store_fpr_F(dc, rd, dst);
2950}
2951
2952static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2953{
2954    TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2955    tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2956                        gen_load_fpr_D(dc, rs),
2957                        gen_load_fpr_D(dc, rd));
2958    gen_store_fpr_D(dc, rd, dst);
2959}
2960
2961static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2962{
2963    int qd = QFPREG(rd);
2964    int qs = QFPREG(rs);
2965
2966    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2967                        cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2968    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2969                        cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2970
2971    gen_update_fprs_dirty(dc, qd);
2972}
2973
2974#ifndef CONFIG_USER_ONLY
2975static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2976{
2977    TCGv_i32 r_tl = tcg_temp_new_i32();
2978
2979    /* load env->tl into r_tl */
2980    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2981
2982    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2983    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2984
2985    /* calculate offset to current trap state from env->ts, reuse r_tl */
2986    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2987    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2988
2989    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2990    {
2991        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2992        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2993        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2994        tcg_temp_free_ptr(r_tl_tmp);
2995    }
2996
2997    tcg_temp_free_i32(r_tl);
2998}
2999#endif
3000
3001static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3002                     int width, bool cc, bool left)
3003{
3004    TCGv lo1, lo2, t1, t2;
3005    uint64_t amask, tabl, tabr;
3006    int shift, imask, omask;
3007
3008    if (cc) {
3009        tcg_gen_mov_tl(cpu_cc_src, s1);
3010        tcg_gen_mov_tl(cpu_cc_src2, s2);
3011        tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3012        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3013        dc->cc_op = CC_OP_SUB;
3014    }
3015
3016    /* Theory of operation: there are two tables, left and right (not to
3017       be confused with the left and right versions of the opcode).  These
3018       are indexed by the low 3 bits of the inputs.  To make things "easy",
3019       these tables are loaded into two constants, TABL and TABR below.
3020       The operation index = (input & imask) << shift calculates the index
3021       into the constant, while val = (table >> index) & omask calculates
3022       the value we're looking for.  */
3023    switch (width) {
3024    case 8:
3025        imask = 0x7;
3026        shift = 3;
3027        omask = 0xff;
3028        if (left) {
3029            tabl = 0x80c0e0f0f8fcfeffULL;
3030            tabr = 0xff7f3f1f0f070301ULL;
3031        } else {
3032            tabl = 0x0103070f1f3f7fffULL;
3033            tabr = 0xfffefcf8f0e0c080ULL;
3034        }
3035        break;
3036    case 16:
3037        imask = 0x6;
3038        shift = 1;
3039        omask = 0xf;
3040        if (left) {
3041            tabl = 0x8cef;
3042            tabr = 0xf731;
3043        } else {
3044            tabl = 0x137f;
3045            tabr = 0xfec8;
3046        }
3047        break;
3048    case 32:
3049        imask = 0x4;
3050        shift = 0;
3051        omask = 0x3;
3052        if (left) {
3053            tabl = (2 << 2) | 3;
3054            tabr = (3 << 2) | 1;
3055        } else {
3056            tabl = (1 << 2) | 3;
3057            tabr = (3 << 2) | 2;
3058        }
3059        break;
3060    default:
3061        abort();
3062    }
3063
3064    lo1 = tcg_temp_new();
3065    lo2 = tcg_temp_new();
3066    tcg_gen_andi_tl(lo1, s1, imask);
3067    tcg_gen_andi_tl(lo2, s2, imask);
3068    tcg_gen_shli_tl(lo1, lo1, shift);
3069    tcg_gen_shli_tl(lo2, lo2, shift);
3070
3071    t1 = tcg_const_tl(tabl);
3072    t2 = tcg_const_tl(tabr);
3073    tcg_gen_shr_tl(lo1, t1, lo1);
3074    tcg_gen_shr_tl(lo2, t2, lo2);
3075    tcg_gen_andi_tl(dst, lo1, omask);
3076    tcg_gen_andi_tl(lo2, lo2, omask);
3077
3078    amask = -8;
3079    if (AM_CHECK(dc)) {
3080        amask &= 0xffffffffULL;
3081    }
3082    tcg_gen_andi_tl(s1, s1, amask);
3083    tcg_gen_andi_tl(s2, s2, amask);
3084
3085    /* We want to compute
3086        dst = (s1 == s2 ? lo1 : lo1 & lo2).
3087       We've already done dst = lo1, so this reduces to
3088        dst &= (s1 == s2 ? -1 : lo2)
3089       Which we perform by
3090        lo2 |= -(s1 == s2)
3091        dst &= lo2
3092    */
3093    tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3094    tcg_gen_neg_tl(t1, t1);
3095    tcg_gen_or_tl(lo2, lo2, t1);
3096    tcg_gen_and_tl(dst, dst, lo2);
3097
3098    tcg_temp_free(lo1);
3099    tcg_temp_free(lo2);
3100    tcg_temp_free(t1);
3101    tcg_temp_free(t2);
3102}
3103
3104static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3105{
3106    TCGv tmp = tcg_temp_new();
3107
3108    tcg_gen_add_tl(tmp, s1, s2);
3109    tcg_gen_andi_tl(dst, tmp, -8);
3110    if (left) {
3111        tcg_gen_neg_tl(tmp, tmp);
3112    }
3113    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3114
3115    tcg_temp_free(tmp);
3116}
3117
3118static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3119{
3120    TCGv t1, t2, shift;
3121
3122    t1 = tcg_temp_new();
3123    t2 = tcg_temp_new();
3124    shift = tcg_temp_new();
3125
3126    tcg_gen_andi_tl(shift, gsr, 7);
3127    tcg_gen_shli_tl(shift, shift, 3);
3128    tcg_gen_shl_tl(t1, s1, shift);
3129
3130    /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3131       shift of (up to 63) followed by a constant shift of 1.  */
3132    tcg_gen_xori_tl(shift, shift, 63);
3133    tcg_gen_shr_tl(t2, s2, shift);
3134    tcg_gen_shri_tl(t2, t2, 1);
3135
3136    tcg_gen_or_tl(dst, t1, t2);
3137
3138    tcg_temp_free(t1);
3139    tcg_temp_free(t2);
3140    tcg_temp_free(shift);
3141}
3142#endif
3143
3144#define CHECK_IU_FEATURE(dc, FEATURE)                      \
3145    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3146        goto illegal_insn;
3147#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3148    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3149        goto nfpu_insn;
3150
3151/* before an instruction, dc->pc must be static */
3152static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3153{
3154    unsigned int opc, rs1, rs2, rd;
3155    TCGv cpu_src1, cpu_src2;
3156    TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3157    TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3158    target_long simm;
3159
3160    opc = GET_FIELD(insn, 0, 1);
3161    rd = GET_FIELD(insn, 2, 6);
3162
3163    switch (opc) {
3164    case 0:                     /* branches/sethi */
3165        {
3166            unsigned int xop = GET_FIELD(insn, 7, 9);
3167            int32_t target;
3168            switch (xop) {
3169#ifdef TARGET_SPARC64
3170            case 0x1:           /* V9 BPcc */
3171                {
3172                    int cc;
3173
3174                    target = GET_FIELD_SP(insn, 0, 18);
3175                    target = sign_extend(target, 19);
3176                    target <<= 2;
3177                    cc = GET_FIELD_SP(insn, 20, 21);
3178                    if (cc == 0)
3179                        do_branch(dc, target, insn, 0);
3180                    else if (cc == 2)
3181                        do_branch(dc, target, insn, 1);
3182                    else
3183                        goto illegal_insn;
3184                    goto jmp_insn;
3185                }
3186            case 0x3:           /* V9 BPr */
3187                {
3188                    target = GET_FIELD_SP(insn, 0, 13) |
3189                        (GET_FIELD_SP(insn, 20, 21) << 14);
3190                    target = sign_extend(target, 16);
3191                    target <<= 2;
3192                    cpu_src1 = get_src1(dc, insn);
3193                    do_branch_reg(dc, target, insn, cpu_src1);
3194                    goto jmp_insn;
3195                }
3196            case 0x5:           /* V9 FBPcc */
3197                {
3198                    int cc = GET_FIELD_SP(insn, 20, 21);
3199                    if (gen_trap_ifnofpu(dc)) {
3200                        goto jmp_insn;
3201                    }
3202                    target = GET_FIELD_SP(insn, 0, 18);
3203                    target = sign_extend(target, 19);
3204                    target <<= 2;
3205                    do_fbranch(dc, target, insn, cc);
3206                    goto jmp_insn;
3207                }
3208#else
3209            case 0x7:           /* CBN+x */
3210                {
3211                    goto ncp_insn;
3212                }
3213#endif
3214            case 0x2:           /* BN+x */
3215                {
3216                    target = GET_FIELD(insn, 10, 31);
3217                    target = sign_extend(target, 22);
3218                    target <<= 2;
3219                    do_branch(dc, target, insn, 0);
3220                    goto jmp_insn;
3221                }
3222            case 0x6:           /* FBN+x */
3223                {
3224                    if (gen_trap_ifnofpu(dc)) {
3225                        goto jmp_insn;
3226                    }
3227                    target = GET_FIELD(insn, 10, 31);
3228                    target = sign_extend(target, 22);
3229                    target <<= 2;
3230                    do_fbranch(dc, target, insn, 0);
3231                    goto jmp_insn;
3232                }
3233            case 0x4:           /* SETHI */
3234                /* Special-case %g0 because that's the canonical nop.  */
3235                if (rd) {
3236                    uint32_t value = GET_FIELD(insn, 10, 31);
3237                    TCGv t = gen_dest_gpr(dc, rd);
3238                    tcg_gen_movi_tl(t, value << 10);
3239                    gen_store_gpr(dc, rd, t);
3240                }
3241                break;
3242            case 0x0:           /* UNIMPL */
3243            default:
3244                goto illegal_insn;
3245            }
3246            break;
3247        }
3248        break;
3249    case 1:                     /*CALL*/
3250        {
3251            target_long target = GET_FIELDs(insn, 2, 31) << 2;
3252            TCGv o7 = gen_dest_gpr(dc, 15);
3253
3254            tcg_gen_movi_tl(o7, dc->pc);
3255            gen_store_gpr(dc, 15, o7);
3256            target += dc->pc;
3257            gen_mov_pc_npc(dc);
3258#ifdef TARGET_SPARC64
3259            if (unlikely(AM_CHECK(dc))) {
3260                target &= 0xffffffffULL;
3261            }
3262#endif
3263            dc->npc = target;
3264        }
3265        goto jmp_insn;
3266    case 2:                     /* FPU & Logical Operations */
3267        {
3268            unsigned int xop = GET_FIELD(insn, 7, 12);
3269            TCGv cpu_dst = get_temp_tl(dc);
3270            TCGv cpu_tmp0;
3271
3272            if (xop == 0x3a) {  /* generate trap */
3273                int cond = GET_FIELD(insn, 3, 6);
3274                TCGv_i32 trap;
3275                TCGLabel *l1 = NULL;
3276                int mask;
3277
3278                if (cond == 0) {
3279                    /* Trap never.  */
3280                    break;
3281                }
3282
3283                save_state(dc);
3284
3285                if (cond != 8) {
3286                    /* Conditional trap.  */
3287                    DisasCompare cmp;
3288#ifdef TARGET_SPARC64
3289                    /* V9 icc/xcc */
3290                    int cc = GET_FIELD_SP(insn, 11, 12);
3291                    if (cc == 0) {
3292                        gen_compare(&cmp, 0, cond, dc);
3293                    } else if (cc == 2) {
3294                        gen_compare(&cmp, 1, cond, dc);
3295                    } else {
3296                        goto illegal_insn;
3297                    }
3298#else
3299                    gen_compare(&cmp, 0, cond, dc);
3300#endif
3301                    l1 = gen_new_label();
3302                    tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3303                                      cmp.c1, cmp.c2, l1);
3304                    free_compare(&cmp);
3305                }
3306
3307                mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3308                        ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3309
3310                /* Don't use the normal temporaries, as they may well have
3311                   gone out of scope with the branch above.  While we're
3312                   doing that we might as well pre-truncate to 32-bit.  */
3313                trap = tcg_temp_new_i32();
3314
3315                rs1 = GET_FIELD_SP(insn, 14, 18);
3316                if (IS_IMM) {
3317                    rs2 = GET_FIELD_SP(insn, 0, 7);
3318                    if (rs1 == 0) {
3319                        tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3320                        /* Signal that the trap value is fully constant.  */
3321                        mask = 0;
3322                    } else {
3323                        TCGv t1 = gen_load_gpr(dc, rs1);
3324                        tcg_gen_trunc_tl_i32(trap, t1);
3325                        tcg_gen_addi_i32(trap, trap, rs2);
3326                    }
3327                } else {
3328                    TCGv t1, t2;
3329                    rs2 = GET_FIELD_SP(insn, 0, 4);
3330                    t1 = gen_load_gpr(dc, rs1);
3331                    t2 = gen_load_gpr(dc, rs2);
3332                    tcg_gen_add_tl(t1, t1, t2);
3333                    tcg_gen_trunc_tl_i32(trap, t1);
3334                }
3335                if (mask != 0) {
3336                    tcg_gen_andi_i32(trap, trap, mask);
3337                    tcg_gen_addi_i32(trap, trap, TT_TRAP);
3338                }
3339
3340                gen_helper_raise_exception(cpu_env, trap);
3341                tcg_temp_free_i32(trap);
3342
3343                if (cond == 8) {
3344                    /* An unconditional trap ends the TB.  */
3345                    dc->base.is_jmp = DISAS_NORETURN;
3346                    goto jmp_insn;
3347                } else {
3348                    /* A conditional trap falls through to the next insn.  */
3349                    gen_set_label(l1);
3350                    break;
3351                }
3352            } else if (xop == 0x28) {
3353                rs1 = GET_FIELD(insn, 13, 17);
3354                switch(rs1) {
3355                case 0: /* rdy */
3356#ifndef TARGET_SPARC64
3357                case 0x01 ... 0x0e: /* undefined in the SPARCv8
3358                                       manual, rdy on the microSPARC
3359                                       II */
3360                case 0x0f:          /* stbar in the SPARCv8 manual,
3361                                       rdy on the microSPARC II */
3362                case 0x10 ... 0x1f: /* implementation-dependent in the
3363                                       SPARCv8 manual, rdy on the
3364                                       microSPARC II */
3365                    /* Read Asr17 */
3366                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3367                        TCGv t = gen_dest_gpr(dc, rd);
3368                        /* Read Asr17 for a Leon3 monoprocessor */
3369                        tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3370                        gen_store_gpr(dc, rd, t);
3371                        break;
3372                    }
3373#endif
3374                    gen_store_gpr(dc, rd, cpu_y);
3375                    break;
3376#ifdef TARGET_SPARC64
3377                case 0x2: /* V9 rdccr */
3378                    update_psr(dc);
3379                    gen_helper_rdccr(cpu_dst, cpu_env);
3380                    gen_store_gpr(dc, rd, cpu_dst);
3381                    break;
3382                case 0x3: /* V9 rdasi */
3383                    tcg_gen_movi_tl(cpu_dst, dc->asi);
3384                    gen_store_gpr(dc, rd, cpu_dst);
3385                    break;
3386                case 0x4: /* V9 rdtick */
3387                    {
3388                        TCGv_ptr r_tickptr;
3389                        TCGv_i32 r_const;
3390
3391                        r_tickptr = tcg_temp_new_ptr();
3392                        r_const = tcg_const_i32(dc->mem_idx);
3393                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3394                                       offsetof(CPUSPARCState, tick));
3395                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3396                            gen_io_start();
3397                        }
3398                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3399                                                  r_const);
3400                        tcg_temp_free_ptr(r_tickptr);
3401                        tcg_temp_free_i32(r_const);
3402                        gen_store_gpr(dc, rd, cpu_dst);
3403                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3404                            /* I/O operations in icount mode must end the TB */
3405                            dc->base.is_jmp = DISAS_EXIT;
3406                        }
3407                    }
3408                    break;
3409                case 0x5: /* V9 rdpc */
3410                    {
3411                        TCGv t = gen_dest_gpr(dc, rd);
3412                        if (unlikely(AM_CHECK(dc))) {
3413                            tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3414                        } else {
3415                            tcg_gen_movi_tl(t, dc->pc);
3416                        }
3417                        gen_store_gpr(dc, rd, t);
3418                    }
3419                    break;
3420                case 0x6: /* V9 rdfprs */
3421                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3422                    gen_store_gpr(dc, rd, cpu_dst);
3423                    break;
3424                case 0xf: /* V9 membar */
3425                    break; /* no effect */
3426                case 0x13: /* Graphics Status */
3427                    if (gen_trap_ifnofpu(dc)) {
3428                        goto jmp_insn;
3429                    }
3430                    gen_store_gpr(dc, rd, cpu_gsr);
3431                    break;
3432                case 0x16: /* Softint */
3433                    tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3434                                     offsetof(CPUSPARCState, softint));
3435                    gen_store_gpr(dc, rd, cpu_dst);
3436                    break;
3437                case 0x17: /* Tick compare */
3438                    gen_store_gpr(dc, rd, cpu_tick_cmpr);
3439                    break;
3440                case 0x18: /* System tick */
3441                    {
3442                        TCGv_ptr r_tickptr;
3443                        TCGv_i32 r_const;
3444
3445                        r_tickptr = tcg_temp_new_ptr();
3446                        r_const = tcg_const_i32(dc->mem_idx);
3447                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3448                                       offsetof(CPUSPARCState, stick));
3449                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3450                            gen_io_start();
3451                        }
3452                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3453                                                  r_const);
3454                        tcg_temp_free_ptr(r_tickptr);
3455                        tcg_temp_free_i32(r_const);
3456                        gen_store_gpr(dc, rd, cpu_dst);
3457                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3458                            /* I/O operations in icount mode must end the TB */
3459                            dc->base.is_jmp = DISAS_EXIT;
3460                        }
3461                    }
3462                    break;
3463                case 0x19: /* System tick compare */
3464                    gen_store_gpr(dc, rd, cpu_stick_cmpr);
3465                    break;
3466                case 0x1a: /* UltraSPARC-T1 Strand status */
3467                    /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3468                     * this ASR as impl. dep
3469                     */
3470                    CHECK_IU_FEATURE(dc, HYPV);
3471                    {
3472                        TCGv t = gen_dest_gpr(dc, rd);
3473                        tcg_gen_movi_tl(t, 1UL);
3474                        gen_store_gpr(dc, rd, t);
3475                    }
3476                    break;
3477                case 0x10: /* Performance Control */
3478                case 0x11: /* Performance Instrumentation Counter */
3479                case 0x12: /* Dispatch Control */
3480                case 0x14: /* Softint set, WO */
3481                case 0x15: /* Softint clear, WO */
3482#endif
3483                default:
3484                    goto illegal_insn;
3485                }
3486#if !defined(CONFIG_USER_ONLY)
3487            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3488#ifndef TARGET_SPARC64
3489                if (!supervisor(dc)) {
3490                    goto priv_insn;
3491                }
3492                update_psr(dc);
3493                gen_helper_rdpsr(cpu_dst, cpu_env);
3494#else
3495                CHECK_IU_FEATURE(dc, HYPV);
3496                if (!hypervisor(dc))
3497                    goto priv_insn;
3498                rs1 = GET_FIELD(insn, 13, 17);
3499                switch (rs1) {
3500                case 0: // hpstate
3501                    tcg_gen_ld_i64(cpu_dst, cpu_env,
3502                                   offsetof(CPUSPARCState, hpstate));
3503                    break;
3504                case 1: // htstate
3505                    // gen_op_rdhtstate();
3506                    break;
3507                case 3: // hintp
3508                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3509                    break;
3510                case 5: // htba
3511                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
3512                    break;
3513                case 6: // hver
3514                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
3515                    break;
3516                case 31: // hstick_cmpr
3517                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3518                    break;
3519                default:
3520                    goto illegal_insn;
3521                }
3522#endif
3523                gen_store_gpr(dc, rd, cpu_dst);
3524                break;
3525            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3526                if (!supervisor(dc)) {
3527                    goto priv_insn;
3528                }
3529                cpu_tmp0 = get_temp_tl(dc);
3530#ifdef TARGET_SPARC64
3531                rs1 = GET_FIELD(insn, 13, 17);
3532                switch (rs1) {
3533                case 0: // tpc
3534                    {
3535                        TCGv_ptr r_tsptr;
3536
3537                        r_tsptr = tcg_temp_new_ptr();
3538                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3539                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3540                                      offsetof(trap_state, tpc));
3541                        tcg_temp_free_ptr(r_tsptr);
3542                    }
3543                    break;
3544                case 1: // tnpc
3545                    {
3546                        TCGv_ptr r_tsptr;
3547
3548                        r_tsptr = tcg_temp_new_ptr();
3549                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3550                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3551                                      offsetof(trap_state, tnpc));
3552                        tcg_temp_free_ptr(r_tsptr);
3553                    }
3554                    break;
3555                case 2: // tstate
3556                    {
3557                        TCGv_ptr r_tsptr;
3558
3559                        r_tsptr = tcg_temp_new_ptr();
3560                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3561                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3562                                      offsetof(trap_state, tstate));
3563                        tcg_temp_free_ptr(r_tsptr);
3564                    }
3565                    break;
3566                case 3: // tt
3567                    {
3568                        TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3569
3570                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3571                        tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3572                                         offsetof(trap_state, tt));
3573                        tcg_temp_free_ptr(r_tsptr);
3574                    }
3575                    break;
3576                case 4: // tick
3577                    {
3578                        TCGv_ptr r_tickptr;
3579                        TCGv_i32 r_const;
3580
3581                        r_tickptr = tcg_temp_new_ptr();
3582                        r_const = tcg_const_i32(dc->mem_idx);
3583                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3584                                       offsetof(CPUSPARCState, tick));
3585                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3586                            gen_io_start();
3587                        }
3588                        gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3589                                                  r_tickptr, r_const);
3590                        tcg_temp_free_ptr(r_tickptr);
3591                        tcg_temp_free_i32(r_const);
3592                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3593                            /* I/O operations in icount mode must end the TB */
3594                            dc->base.is_jmp = DISAS_EXIT;
3595                        }
3596                    }
3597                    break;
3598                case 5: // tba
3599                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3600                    break;
3601                case 6: // pstate
3602                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3603                                     offsetof(CPUSPARCState, pstate));
3604                    break;
3605                case 7: // tl
3606                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3607                                     offsetof(CPUSPARCState, tl));
3608                    break;
3609                case 8: // pil
3610                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3611                                     offsetof(CPUSPARCState, psrpil));
3612                    break;
3613                case 9: // cwp
3614                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
3615                    break;
3616                case 10: // cansave
3617                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3618                                     offsetof(CPUSPARCState, cansave));
3619                    break;
3620                case 11: // canrestore
3621                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3622                                     offsetof(CPUSPARCState, canrestore));
3623                    break;
3624                case 12: // cleanwin
3625                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3626                                     offsetof(CPUSPARCState, cleanwin));
3627                    break;
3628                case 13: // otherwin
3629                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3630                                     offsetof(CPUSPARCState, otherwin));
3631                    break;
3632                case 14: // wstate
3633                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3634                                     offsetof(CPUSPARCState, wstate));
3635                    break;
3636                case 16: // UA2005 gl
3637                    CHECK_IU_FEATURE(dc, GL);
3638                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3639                                     offsetof(CPUSPARCState, gl));
3640                    break;
3641                case 26: // UA2005 strand status
3642                    CHECK_IU_FEATURE(dc, HYPV);
3643                    if (!hypervisor(dc))
3644                        goto priv_insn;
3645                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3646                    break;
3647                case 31: // ver
3648                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3649                    break;
3650                case 15: // fq
3651                default:
3652                    goto illegal_insn;
3653                }
3654#else
3655                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3656#endif
3657                gen_store_gpr(dc, rd, cpu_tmp0);
3658                break;
3659#endif
3660#if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3661            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3662#ifdef TARGET_SPARC64
3663                gen_helper_flushw(cpu_env);
3664#else
3665                if (!supervisor(dc))
3666                    goto priv_insn;
3667                gen_store_gpr(dc, rd, cpu_tbr);
3668#endif
3669                break;
3670#endif
3671            } else if (xop == 0x34) {   /* FPU Operations */
3672                if (gen_trap_ifnofpu(dc)) {
3673                    goto jmp_insn;
3674                }
3675                gen_op_clear_ieee_excp_and_FTT();
3676                rs1 = GET_FIELD(insn, 13, 17);
3677                rs2 = GET_FIELD(insn, 27, 31);
3678                xop = GET_FIELD(insn, 18, 26);
3679
3680                switch (xop) {
3681                case 0x1: /* fmovs */
3682                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3683                    gen_store_fpr_F(dc, rd, cpu_src1_32);
3684                    break;
3685                case 0x5: /* fnegs */
3686                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3687                    break;
3688                case 0x9: /* fabss */
3689                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3690                    break;
3691                case 0x29: /* fsqrts */
3692                    CHECK_FPU_FEATURE(dc, FSQRT);
3693                    gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3694                    break;
3695                case 0x2a: /* fsqrtd */
3696                    CHECK_FPU_FEATURE(dc, FSQRT);
3697                    gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3698                    break;
3699                case 0x2b: /* fsqrtq */
3700                    CHECK_FPU_FEATURE(dc, FLOAT128);
3701                    gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3702                    break;
3703                case 0x41: /* fadds */
3704                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3705                    break;
3706                case 0x42: /* faddd */
3707                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3708                    break;
3709                case 0x43: /* faddq */
3710                    CHECK_FPU_FEATURE(dc, FLOAT128);
3711                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3712                    break;
3713                case 0x45: /* fsubs */
3714                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3715                    break;
3716                case 0x46: /* fsubd */
3717                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3718                    break;
3719                case 0x47: /* fsubq */
3720                    CHECK_FPU_FEATURE(dc, FLOAT128);
3721                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3722                    break;
3723                case 0x49: /* fmuls */
3724                    CHECK_FPU_FEATURE(dc, FMUL);
3725                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3726                    break;
3727                case 0x4a: /* fmuld */
3728                    CHECK_FPU_FEATURE(dc, FMUL);
3729                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3730                    break;
3731                case 0x4b: /* fmulq */
3732                    CHECK_FPU_FEATURE(dc, FLOAT128);
3733                    CHECK_FPU_FEATURE(dc, FMUL);
3734                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3735                    break;
3736                case 0x4d: /* fdivs */
3737                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3738                    break;
3739                case 0x4e: /* fdivd */
3740                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3741                    break;
3742                case 0x4f: /* fdivq */
3743                    CHECK_FPU_FEATURE(dc, FLOAT128);
3744                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3745                    break;
3746                case 0x69: /* fsmuld */
3747                    CHECK_FPU_FEATURE(dc, FSMULD);
3748                    gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3749                    break;
3750                case 0x6e: /* fdmulq */
3751                    CHECK_FPU_FEATURE(dc, FLOAT128);
3752                    gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3753                    break;
3754                case 0xc4: /* fitos */
3755                    gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3756                    break;
3757                case 0xc6: /* fdtos */
3758                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3759                    break;
3760                case 0xc7: /* fqtos */
3761                    CHECK_FPU_FEATURE(dc, FLOAT128);
3762                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3763                    break;
3764                case 0xc8: /* fitod */
3765                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3766                    break;
3767                case 0xc9: /* fstod */
3768                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3769                    break;
3770                case 0xcb: /* fqtod */
3771                    CHECK_FPU_FEATURE(dc, FLOAT128);
3772                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3773                    break;
3774                case 0xcc: /* fitoq */
3775                    CHECK_FPU_FEATURE(dc, FLOAT128);
3776                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3777                    break;
3778                case 0xcd: /* fstoq */
3779                    CHECK_FPU_FEATURE(dc, FLOAT128);
3780                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3781                    break;
3782                case 0xce: /* fdtoq */
3783                    CHECK_FPU_FEATURE(dc, FLOAT128);
3784                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3785                    break;
3786                case 0xd1: /* fstoi */
3787                    gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3788                    break;
3789                case 0xd2: /* fdtoi */
3790                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3791                    break;
3792                case 0xd3: /* fqtoi */
3793                    CHECK_FPU_FEATURE(dc, FLOAT128);
3794                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3795                    break;
3796#ifdef TARGET_SPARC64
3797                case 0x2: /* V9 fmovd */
3798                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3799                    gen_store_fpr_D(dc, rd, cpu_src1_64);
3800                    break;
3801                case 0x3: /* V9 fmovq */
3802                    CHECK_FPU_FEATURE(dc, FLOAT128);
3803                    gen_move_Q(dc, rd, rs2);
3804                    break;
3805                case 0x6: /* V9 fnegd */
3806                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3807                    break;
3808                case 0x7: /* V9 fnegq */
3809                    CHECK_FPU_FEATURE(dc, FLOAT128);
3810                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3811                    break;
3812                case 0xa: /* V9 fabsd */
3813                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3814                    break;
3815                case 0xb: /* V9 fabsq */
3816                    CHECK_FPU_FEATURE(dc, FLOAT128);
3817                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3818                    break;
3819                case 0x81: /* V9 fstox */
3820                    gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3821                    break;
3822                case 0x82: /* V9 fdtox */
3823                    gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3824                    break;
3825                case 0x83: /* V9 fqtox */
3826                    CHECK_FPU_FEATURE(dc, FLOAT128);
3827                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3828                    break;
3829                case 0x84: /* V9 fxtos */
3830                    gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3831                    break;
3832                case 0x88: /* V9 fxtod */
3833                    gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3834                    break;
3835                case 0x8c: /* V9 fxtoq */
3836                    CHECK_FPU_FEATURE(dc, FLOAT128);
3837                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3838                    break;
3839#endif
3840                default:
3841                    goto illegal_insn;
3842                }
3843            } else if (xop == 0x35) {   /* FPU Operations */
3844#ifdef TARGET_SPARC64
3845                int cond;
3846#endif
3847                if (gen_trap_ifnofpu(dc)) {
3848                    goto jmp_insn;
3849                }
3850                gen_op_clear_ieee_excp_and_FTT();
3851                rs1 = GET_FIELD(insn, 13, 17);
3852                rs2 = GET_FIELD(insn, 27, 31);
3853                xop = GET_FIELD(insn, 18, 26);
3854
3855#ifdef TARGET_SPARC64
3856#define FMOVR(sz)                                                  \
3857                do {                                               \
3858                    DisasCompare cmp;                              \
3859                    cond = GET_FIELD_SP(insn, 10, 12);             \
3860                    cpu_src1 = get_src1(dc, insn);                 \
3861                    gen_compare_reg(&cmp, cond, cpu_src1);         \
3862                    gen_fmov##sz(dc, &cmp, rd, rs2);               \
3863                    free_compare(&cmp);                            \
3864                } while (0)
3865
3866                if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3867                    FMOVR(s);
3868                    break;
3869                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3870                    FMOVR(d);
3871                    break;
3872                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3873                    CHECK_FPU_FEATURE(dc, FLOAT128);
3874                    FMOVR(q);
3875                    break;
3876                }
3877#undef FMOVR
3878#endif
3879                switch (xop) {
3880#ifdef TARGET_SPARC64
3881#define FMOVCC(fcc, sz)                                                 \
3882                    do {                                                \
3883                        DisasCompare cmp;                               \
3884                        cond = GET_FIELD_SP(insn, 14, 17);              \
3885                        gen_fcompare(&cmp, fcc, cond);                  \
3886                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3887                        free_compare(&cmp);                             \
3888                    } while (0)
3889
3890                    case 0x001: /* V9 fmovscc %fcc0 */
3891                        FMOVCC(0, s);
3892                        break;
3893                    case 0x002: /* V9 fmovdcc %fcc0 */
3894                        FMOVCC(0, d);
3895                        break;
3896                    case 0x003: /* V9 fmovqcc %fcc0 */
3897                        CHECK_FPU_FEATURE(dc, FLOAT128);
3898                        FMOVCC(0, q);
3899                        break;
3900                    case 0x041: /* V9 fmovscc %fcc1 */
3901                        FMOVCC(1, s);
3902                        break;
3903                    case 0x042: /* V9 fmovdcc %fcc1 */
3904                        FMOVCC(1, d);
3905                        break;
3906                    case 0x043: /* V9 fmovqcc %fcc1 */
3907                        CHECK_FPU_FEATURE(dc, FLOAT128);
3908                        FMOVCC(1, q);
3909                        break;
3910                    case 0x081: /* V9 fmovscc %fcc2 */
3911                        FMOVCC(2, s);
3912                        break;
3913                    case 0x082: /* V9 fmovdcc %fcc2 */
3914                        FMOVCC(2, d);
3915                        break;
3916                    case 0x083: /* V9 fmovqcc %fcc2 */
3917                        CHECK_FPU_FEATURE(dc, FLOAT128);
3918                        FMOVCC(2, q);
3919                        break;
3920                    case 0x0c1: /* V9 fmovscc %fcc3 */
3921                        FMOVCC(3, s);
3922                        break;
3923                    case 0x0c2: /* V9 fmovdcc %fcc3 */
3924                        FMOVCC(3, d);
3925                        break;
3926                    case 0x0c3: /* V9 fmovqcc %fcc3 */
3927                        CHECK_FPU_FEATURE(dc, FLOAT128);
3928                        FMOVCC(3, q);
3929                        break;
3930#undef FMOVCC
3931#define FMOVCC(xcc, sz)                                                 \
3932                    do {                                                \
3933                        DisasCompare cmp;                               \
3934                        cond = GET_FIELD_SP(insn, 14, 17);              \
3935                        gen_compare(&cmp, xcc, cond, dc);               \
3936                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3937                        free_compare(&cmp);                             \
3938                    } while (0)
3939
3940                    case 0x101: /* V9 fmovscc %icc */
3941                        FMOVCC(0, s);
3942                        break;
3943                    case 0x102: /* V9 fmovdcc %icc */
3944                        FMOVCC(0, d);
3945                        break;
3946                    case 0x103: /* V9 fmovqcc %icc */
3947                        CHECK_FPU_FEATURE(dc, FLOAT128);
3948                        FMOVCC(0, q);
3949                        break;
3950                    case 0x181: /* V9 fmovscc %xcc */
3951                        FMOVCC(1, s);
3952                        break;
3953                    case 0x182: /* V9 fmovdcc %xcc */
3954                        FMOVCC(1, d);
3955                        break;
3956                    case 0x183: /* V9 fmovqcc %xcc */
3957                        CHECK_FPU_FEATURE(dc, FLOAT128);
3958                        FMOVCC(1, q);
3959                        break;
3960#undef FMOVCC
3961#endif
3962                    case 0x51: /* fcmps, V9 %fcc */
3963                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3964                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3965                        gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3966                        break;
3967                    case 0x52: /* fcmpd, V9 %fcc */
3968                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3969                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3970                        gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3971                        break;
3972                    case 0x53: /* fcmpq, V9 %fcc */
3973                        CHECK_FPU_FEATURE(dc, FLOAT128);
3974                        gen_op_load_fpr_QT0(QFPREG(rs1));
3975                        gen_op_load_fpr_QT1(QFPREG(rs2));
3976                        gen_op_fcmpq(rd & 3);
3977                        break;
3978                    case 0x55: /* fcmpes, V9 %fcc */
3979                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3980                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3981                        gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3982                        break;
3983                    case 0x56: /* fcmped, V9 %fcc */
3984                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3985                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3986                        gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3987                        break;
3988                    case 0x57: /* fcmpeq, V9 %fcc */
3989                        CHECK_FPU_FEATURE(dc, FLOAT128);
3990                        gen_op_load_fpr_QT0(QFPREG(rs1));
3991                        gen_op_load_fpr_QT1(QFPREG(rs2));
3992                        gen_op_fcmpeq(rd & 3);
3993                        break;
3994                    default:
3995                        goto illegal_insn;
3996                }
3997            } else if (xop == 0x2) {
3998                TCGv dst = gen_dest_gpr(dc, rd);
3999                rs1 = GET_FIELD(insn, 13, 17);
4000                if (rs1 == 0) {
4001                    /* clr/mov shortcut : or %g0, x, y -> mov x, y */
4002                    if (IS_IMM) {       /* immediate */
4003                        simm = GET_FIELDs(insn, 19, 31);
4004                        tcg_gen_movi_tl(dst, simm);
4005                        gen_store_gpr(dc, rd, dst);
4006                    } else {            /* register */
4007                        rs2 = GET_FIELD(insn, 27, 31);
4008                        if (rs2 == 0) {
4009                            tcg_gen_movi_tl(dst, 0);
4010                            gen_store_gpr(dc, rd, dst);
4011                        } else {
4012                            cpu_src2 = gen_load_gpr(dc, rs2);
4013                            gen_store_gpr(dc, rd, cpu_src2);
4014                        }
4015                    }
4016                } else {
4017                    cpu_src1 = get_src1(dc, insn);
4018                    if (IS_IMM) {       /* immediate */
4019                        simm = GET_FIELDs(insn, 19, 31);
4020                        tcg_gen_ori_tl(dst, cpu_src1, simm);
4021                        gen_store_gpr(dc, rd, dst);
4022                    } else {            /* register */
4023                        rs2 = GET_FIELD(insn, 27, 31);
4024                        if (rs2 == 0) {
4025                            /* mov shortcut:  or x, %g0, y -> mov x, y */
4026                            gen_store_gpr(dc, rd, cpu_src1);
4027                        } else {
4028                            cpu_src2 = gen_load_gpr(dc, rs2);
4029                            tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4030                            gen_store_gpr(dc, rd, dst);
4031                        }
4032                    }
4033                }
4034#ifdef TARGET_SPARC64
4035            } else if (xop == 0x25) { /* sll, V9 sllx */
4036                cpu_src1 = get_src1(dc, insn);
4037                if (IS_IMM) {   /* immediate */
4038                    simm = GET_FIELDs(insn, 20, 31);
4039                    if (insn & (1 << 12)) {
4040                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4041                    } else {
4042                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4043                    }
4044                } else {                /* register */
4045                    rs2 = GET_FIELD(insn, 27, 31);
4046                    cpu_src2 = gen_load_gpr(dc, rs2);
4047                    cpu_tmp0 = get_temp_tl(dc);
4048                    if (insn & (1 << 12)) {
4049                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4050                    } else {
4051                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4052                    }
4053                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4054                }
4055                gen_store_gpr(dc, rd, cpu_dst);
4056            } else if (xop == 0x26) { /* srl, V9 srlx */
4057                cpu_src1 = get_src1(dc, insn);
4058                if (IS_IMM) {   /* immediate */
4059                    simm = GET_FIELDs(insn, 20, 31);
4060                    if (insn & (1 << 12)) {
4061                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4062                    } else {
4063                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4064                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4065                    }
4066                } else {                /* register */
4067                    rs2 = GET_FIELD(insn, 27, 31);
4068                    cpu_src2 = gen_load_gpr(dc, rs2);
4069                    cpu_tmp0 = get_temp_tl(dc);
4070                    if (insn & (1 << 12)) {
4071                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4072                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4073                    } else {
4074                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4075                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4076                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4077                    }
4078                }
4079                gen_store_gpr(dc, rd, cpu_dst);
4080            } else if (xop == 0x27) { /* sra, V9 srax */
4081                cpu_src1 = get_src1(dc, insn);
4082                if (IS_IMM) {   /* immediate */
4083                    simm = GET_FIELDs(insn, 20, 31);
4084                    if (insn & (1 << 12)) {
4085                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4086                    } else {
4087                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4088                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4089                    }
4090                } else {                /* register */
4091                    rs2 = GET_FIELD(insn, 27, 31);
4092                    cpu_src2 = gen_load_gpr(dc, rs2);
4093                    cpu_tmp0 = get_temp_tl(dc);
4094                    if (insn & (1 << 12)) {
4095                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4096                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4097                    } else {
4098                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4099                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4100                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4101                    }
4102                }
4103                gen_store_gpr(dc, rd, cpu_dst);
4104#endif
4105            } else if (xop < 0x36) {
4106                if (xop < 0x20) {
4107                    cpu_src1 = get_src1(dc, insn);
4108                    cpu_src2 = get_src2(dc, insn);
4109                    switch (xop & ~0x10) {
4110                    case 0x0: /* add */
4111                        if (xop & 0x10) {
4112                            gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4113                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4114                            dc->cc_op = CC_OP_ADD;
4115                        } else {
4116                            tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4117                        }
4118                        break;
4119                    case 0x1: /* and */
4120                        tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4121                        if (xop & 0x10) {
4122                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4123                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4124                            dc->cc_op = CC_OP_LOGIC;
4125                        }
4126                        break;
4127                    case 0x2: /* or */
4128                        tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4129                        if (xop & 0x10) {
4130                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4131                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4132                            dc->cc_op = CC_OP_LOGIC;
4133                        }
4134                        break;
4135                    case 0x3: /* xor */
4136                        tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4137                        if (xop & 0x10) {
4138                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4139                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4140                            dc->cc_op = CC_OP_LOGIC;
4141                        }
4142                        break;
4143                    case 0x4: /* sub */
4144                        if (xop & 0x10) {
4145                            gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4146                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4147                            dc->cc_op = CC_OP_SUB;
4148                        } else {
4149                            tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4150                        }
4151                        break;
4152                    case 0x5: /* andn */
4153                        tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4154                        if (xop & 0x10) {
4155                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4156                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4157                            dc->cc_op = CC_OP_LOGIC;
4158                        }
4159                        break;
4160                    case 0x6: /* orn */
4161                        tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4162                        if (xop & 0x10) {
4163                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4164                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4165                            dc->cc_op = CC_OP_LOGIC;
4166                        }
4167                        break;
4168                    case 0x7: /* xorn */
4169                        tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4170                        if (xop & 0x10) {
4171                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4172                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4173                            dc->cc_op = CC_OP_LOGIC;
4174                        }
4175                        break;
4176                    case 0x8: /* addx, V9 addc */
4177                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4178                                        (xop & 0x10));
4179                        break;
4180#ifdef TARGET_SPARC64
4181                    case 0x9: /* V9 mulx */
4182                        tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4183                        break;
4184#endif
4185                    case 0xa: /* umul */
4186                        CHECK_IU_FEATURE(dc, MUL);
4187                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4188                        if (xop & 0x10) {
4189                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4190                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4191                            dc->cc_op = CC_OP_LOGIC;
4192                        }
4193                        break;
4194                    case 0xb: /* smul */
4195                        CHECK_IU_FEATURE(dc, MUL);
4196                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4197                        if (xop & 0x10) {
4198                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4199                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4200                            dc->cc_op = CC_OP_LOGIC;
4201                        }
4202                        break;
4203                    case 0xc: /* subx, V9 subc */
4204                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4205                                        (xop & 0x10));
4206                        break;
4207#ifdef TARGET_SPARC64
4208                    case 0xd: /* V9 udivx */
4209                        gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4210                        break;
4211#endif
4212                    case 0xe: /* udiv */
4213                        CHECK_IU_FEATURE(dc, DIV);
4214                        if (xop & 0x10) {
4215                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4216                                               cpu_src2);
4217                            dc->cc_op = CC_OP_DIV;
4218                        } else {
4219                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4220                                            cpu_src2);
4221                        }
4222                        break;
4223                    case 0xf: /* sdiv */
4224                        CHECK_IU_FEATURE(dc, DIV);
4225                        if (xop & 0x10) {
4226                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4227                                               cpu_src2);
4228                            dc->cc_op = CC_OP_DIV;
4229                        } else {
4230                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4231                                            cpu_src2);
4232                        }
4233                        break;
4234                    default:
4235                        goto illegal_insn;
4236                    }
4237                    gen_store_gpr(dc, rd, cpu_dst);
4238                } else {
4239                    cpu_src1 = get_src1(dc, insn);
4240                    cpu_src2 = get_src2(dc, insn);
4241                    switch (xop) {
4242                    case 0x20: /* taddcc */
4243                        gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4244                        gen_store_gpr(dc, rd, cpu_dst);
4245                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4246                        dc->cc_op = CC_OP_TADD;
4247                        break;
4248                    case 0x21: /* tsubcc */
4249                        gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4250                        gen_store_gpr(dc, rd, cpu_dst);
4251                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4252                        dc->cc_op = CC_OP_TSUB;
4253                        break;
4254                    case 0x22: /* taddcctv */
4255                        gen_helper_taddcctv(cpu_dst, cpu_env,
4256                                            cpu_src1, cpu_src2);
4257                        gen_store_gpr(dc, rd, cpu_dst);
4258                        dc->cc_op = CC_OP_TADDTV;
4259                        break;
4260                    case 0x23: /* tsubcctv */
4261                        gen_helper_tsubcctv(cpu_dst, cpu_env,
4262                                            cpu_src1, cpu_src2);
4263                        gen_store_gpr(dc, rd, cpu_dst);
4264                        dc->cc_op = CC_OP_TSUBTV;
4265                        break;
4266                    case 0x24: /* mulscc */
4267                        update_psr(dc);
4268                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4269                        gen_store_gpr(dc, rd, cpu_dst);
4270                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4271                        dc->cc_op = CC_OP_ADD;
4272                        break;
4273#ifndef TARGET_SPARC64
4274                    case 0x25:  /* sll */
4275                        if (IS_IMM) { /* immediate */
4276                            simm = GET_FIELDs(insn, 20, 31);
4277                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4278                        } else { /* register */
4279                            cpu_tmp0 = get_temp_tl(dc);
4280                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4281                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4282                        }
4283                        gen_store_gpr(dc, rd, cpu_dst);
4284                        break;
4285                    case 0x26:  /* srl */
4286                        if (IS_IMM) { /* immediate */
4287                            simm = GET_FIELDs(insn, 20, 31);
4288                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4289                        } else { /* register */
4290                            cpu_tmp0 = get_temp_tl(dc);
4291                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4292                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4293                        }
4294                        gen_store_gpr(dc, rd, cpu_dst);
4295                        break;
4296                    case 0x27:  /* sra */
4297                        if (IS_IMM) { /* immediate */
4298                            simm = GET_FIELDs(insn, 20, 31);
4299                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4300                        } else { /* register */
4301                            cpu_tmp0 = get_temp_tl(dc);
4302                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4303                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4304                        }
4305                        gen_store_gpr(dc, rd, cpu_dst);
4306                        break;
4307#endif
4308                    case 0x30:
4309                        {
4310                            cpu_tmp0 = get_temp_tl(dc);
4311                            switch(rd) {
4312                            case 0: /* wry */
4313                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4314                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4315                                break;
4316#ifndef TARGET_SPARC64
4317                            case 0x01 ... 0x0f: /* undefined in the
4318                                                   SPARCv8 manual, nop
4319                                                   on the microSPARC
4320                                                   II */
4321                            case 0x10 ... 0x1f: /* implementation-dependent
4322                                                   in the SPARCv8
4323                                                   manual, nop on the
4324                                                   microSPARC II */
4325                                if ((rd == 0x13) && (dc->def->features &
4326                                                     CPU_FEATURE_POWERDOWN)) {
4327                                    /* LEON3 power-down */
4328                                    save_state(dc);
4329                                    gen_helper_power_down(cpu_env);
4330                                }
4331                                break;
4332#else
4333                            case 0x2: /* V9 wrccr */
4334                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4335                                gen_helper_wrccr(cpu_env, cpu_tmp0);
4336                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4337                                dc->cc_op = CC_OP_FLAGS;
4338                                break;
4339                            case 0x3: /* V9 wrasi */
4340                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4341                                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4342                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4343                                                offsetof(CPUSPARCState, asi));
4344                                /* End TB to notice changed ASI.  */
4345                                save_state(dc);
4346                                gen_op_next_insn();
4347                                tcg_gen_exit_tb(NULL, 0);
4348                                dc->base.is_jmp = DISAS_NORETURN;
4349                                break;
4350                            case 0x6: /* V9 wrfprs */
4351                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4352                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4353                                dc->fprs_dirty = 0;
4354                                save_state(dc);
4355                                gen_op_next_insn();
4356                                tcg_gen_exit_tb(NULL, 0);
4357                                dc->base.is_jmp = DISAS_NORETURN;
4358                                break;
4359                            case 0xf: /* V9 sir, nop if user */
4360#if !defined(CONFIG_USER_ONLY)
4361                                if (supervisor(dc)) {
4362                                    ; // XXX
4363                                }
4364#endif
4365                                break;
4366                            case 0x13: /* Graphics Status */
4367                                if (gen_trap_ifnofpu(dc)) {
4368                                    goto jmp_insn;
4369                                }
4370                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4371                                break;
4372                            case 0x14: /* Softint set */
4373                                if (!supervisor(dc))
4374                                    goto illegal_insn;
4375                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4376                                gen_helper_set_softint(cpu_env, cpu_tmp0);
4377                                break;
4378                            case 0x15: /* Softint clear */
4379                                if (!supervisor(dc))
4380                                    goto illegal_insn;
4381                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4382                                gen_helper_clear_softint(cpu_env, cpu_tmp0);
4383                                break;
4384                            case 0x16: /* Softint write */
4385                                if (!supervisor(dc))
4386                                    goto illegal_insn;
4387                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4388                                gen_helper_write_softint(cpu_env, cpu_tmp0);
4389                                break;
4390                            case 0x17: /* Tick compare */
4391#if !defined(CONFIG_USER_ONLY)
4392                                if (!supervisor(dc))
4393                                    goto illegal_insn;
4394#endif
4395                                {
4396                                    TCGv_ptr r_tickptr;
4397
4398                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4399                                                   cpu_src2);
4400                                    r_tickptr = tcg_temp_new_ptr();
4401                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4402                                                   offsetof(CPUSPARCState, tick));
4403                                    if (tb_cflags(dc->base.tb) &
4404                                           CF_USE_ICOUNT) {
4405                                        gen_io_start();
4406                                    }
4407                                    gen_helper_tick_set_limit(r_tickptr,
4408                                                              cpu_tick_cmpr);
4409                                    tcg_temp_free_ptr(r_tickptr);
4410                                    /* End TB to handle timer interrupt */
4411                                    dc->base.is_jmp = DISAS_EXIT;
4412                                }
4413                                break;
4414                            case 0x18: /* System tick */
4415#if !defined(CONFIG_USER_ONLY)
4416                                if (!supervisor(dc))
4417                                    goto illegal_insn;
4418#endif
4419                                {
4420                                    TCGv_ptr r_tickptr;
4421
4422                                    tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4423                                                   cpu_src2);
4424                                    r_tickptr = tcg_temp_new_ptr();
4425                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4426                                                   offsetof(CPUSPARCState, stick));
4427                                    if (tb_cflags(dc->base.tb) &
4428                                           CF_USE_ICOUNT) {
4429                                        gen_io_start();
4430                                    }
4431                                    gen_helper_tick_set_count(r_tickptr,
4432                                                              cpu_tmp0);
4433                                    tcg_temp_free_ptr(r_tickptr);
4434                                    /* End TB to handle timer interrupt */
4435                                    dc->base.is_jmp = DISAS_EXIT;
4436                                }
4437                                break;
4438                            case 0x19: /* System tick compare */
4439#if !defined(CONFIG_USER_ONLY)
4440                                if (!supervisor(dc))
4441                                    goto illegal_insn;
4442#endif
4443                                {
4444                                    TCGv_ptr r_tickptr;
4445
4446                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4447                                                   cpu_src2);
4448                                    r_tickptr = tcg_temp_new_ptr();
4449                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4450                                                   offsetof(CPUSPARCState, stick));
4451                                    if (tb_cflags(dc->base.tb) &
4452                                           CF_USE_ICOUNT) {
4453                                        gen_io_start();
4454                                    }
4455                                    gen_helper_tick_set_limit(r_tickptr,
4456                                                              cpu_stick_cmpr);
4457                                    tcg_temp_free_ptr(r_tickptr);
4458                                    /* End TB to handle timer interrupt */
4459                                    dc->base.is_jmp = DISAS_EXIT;
4460                                }
4461                                break;
4462
4463                            case 0x10: /* Performance Control */
4464                            case 0x11: /* Performance Instrumentation
4465                                          Counter */
4466                            case 0x12: /* Dispatch Control */
4467#endif
4468                            default:
4469                                goto illegal_insn;
4470                            }
4471                        }
4472                        break;
4473#if !defined(CONFIG_USER_ONLY)
4474                    case 0x31: /* wrpsr, V9 saved, restored */
4475                        {
4476                            if (!supervisor(dc))
4477                                goto priv_insn;
4478#ifdef TARGET_SPARC64
4479                            switch (rd) {
4480                            case 0:
4481                                gen_helper_saved(cpu_env);
4482                                break;
4483                            case 1:
4484                                gen_helper_restored(cpu_env);
4485                                break;
4486                            case 2: /* UA2005 allclean */
4487                            case 3: /* UA2005 otherw */
4488                            case 4: /* UA2005 normalw */
4489                            case 5: /* UA2005 invalw */
4490                                // XXX
4491                            default:
4492                                goto illegal_insn;
4493                            }
4494#else
4495                            cpu_tmp0 = get_temp_tl(dc);
4496                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4497                            gen_helper_wrpsr(cpu_env, cpu_tmp0);
4498                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4499                            dc->cc_op = CC_OP_FLAGS;
4500                            save_state(dc);
4501                            gen_op_next_insn();
4502                            tcg_gen_exit_tb(NULL, 0);
4503                            dc->base.is_jmp = DISAS_NORETURN;
4504#endif
4505                        }
4506                        break;
4507                    case 0x32: /* wrwim, V9 wrpr */
4508                        {
4509                            if (!supervisor(dc))
4510                                goto priv_insn;
4511                            cpu_tmp0 = get_temp_tl(dc);
4512                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4513#ifdef TARGET_SPARC64
4514                            switch (rd) {
4515                            case 0: // tpc
4516                                {
4517                                    TCGv_ptr r_tsptr;
4518
4519                                    r_tsptr = tcg_temp_new_ptr();
4520                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4521                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4522                                                  offsetof(trap_state, tpc));
4523                                    tcg_temp_free_ptr(r_tsptr);
4524                                }
4525                                break;
4526                            case 1: // tnpc
4527                                {
4528                                    TCGv_ptr r_tsptr;
4529
4530                                    r_tsptr = tcg_temp_new_ptr();
4531                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4532                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4533                                                  offsetof(trap_state, tnpc));
4534                                    tcg_temp_free_ptr(r_tsptr);
4535                                }
4536                                break;
4537                            case 2: // tstate
4538                                {
4539                                    TCGv_ptr r_tsptr;
4540
4541                                    r_tsptr = tcg_temp_new_ptr();
4542                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4543                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4544                                                  offsetof(trap_state,
4545                                                           tstate));
4546                                    tcg_temp_free_ptr(r_tsptr);
4547                                }
4548                                break;
4549                            case 3: // tt
4550                                {
4551                                    TCGv_ptr r_tsptr;
4552
4553                                    r_tsptr = tcg_temp_new_ptr();
4554                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4555                                    tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4556                                                    offsetof(trap_state, tt));
4557                                    tcg_temp_free_ptr(r_tsptr);
4558                                }
4559                                break;
4560                            case 4: // tick
4561                                {
4562                                    TCGv_ptr r_tickptr;
4563
4564                                    r_tickptr = tcg_temp_new_ptr();
4565                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4566                                                   offsetof(CPUSPARCState, tick));
4567                                    if (tb_cflags(dc->base.tb) &
4568                                           CF_USE_ICOUNT) {
4569                                        gen_io_start();
4570                                    }
4571                                    gen_helper_tick_set_count(r_tickptr,
4572                                                              cpu_tmp0);
4573                                    tcg_temp_free_ptr(r_tickptr);
4574                                    /* End TB to handle timer interrupt */
4575                                    dc->base.is_jmp = DISAS_EXIT;
4576                                }
4577                                break;
4578                            case 5: // tba
4579                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4580                                break;
4581                            case 6: // pstate
4582                                save_state(dc);
4583                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4584                                    gen_io_start();
4585                                }
4586                                gen_helper_wrpstate(cpu_env, cpu_tmp0);
4587                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4588                                    /* I/O ops in icount mode must end the TB */
4589                                    dc->base.is_jmp = DISAS_EXIT;
4590                                }
4591                                dc->npc = DYNAMIC_PC;
4592                                break;
4593                            case 7: // tl
4594                                save_state(dc);
4595                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4596                                               offsetof(CPUSPARCState, tl));
4597                                dc->npc = DYNAMIC_PC;
4598                                break;
4599                            case 8: // pil
4600                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4601                                    gen_io_start();
4602                                }
4603                                gen_helper_wrpil(cpu_env, cpu_tmp0);
4604                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4605                                    /* I/O ops in icount mode must end the TB */
4606                                    dc->base.is_jmp = DISAS_EXIT;
4607                                }
4608                                break;
4609                            case 9: // cwp
4610                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
4611                                break;
4612                            case 10: // cansave
4613                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4614                                                offsetof(CPUSPARCState,
4615                                                         cansave));
4616                                break;
4617                            case 11: // canrestore
4618                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4619                                                offsetof(CPUSPARCState,
4620                                                         canrestore));
4621                                break;
4622                            case 12: // cleanwin
4623                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4624                                                offsetof(CPUSPARCState,
4625                                                         cleanwin));
4626                                break;
4627                            case 13: // otherwin
4628                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4629                                                offsetof(CPUSPARCState,
4630                                                         otherwin));
4631                                break;
4632                            case 14: // wstate
4633                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4634                                                offsetof(CPUSPARCState,
4635                                                         wstate));
4636                                break;
4637                            case 16: // UA2005 gl
4638                                CHECK_IU_FEATURE(dc, GL);
4639                                gen_helper_wrgl(cpu_env, cpu_tmp0);
4640                                break;
4641                            case 26: // UA2005 strand status
4642                                CHECK_IU_FEATURE(dc, HYPV);
4643                                if (!hypervisor(dc))
4644                                    goto priv_insn;
4645                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4646                                break;
4647                            default:
4648                                goto illegal_insn;
4649                            }
4650#else
4651                            tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4652                            if (dc->def->nwindows != 32) {
4653                                tcg_gen_andi_tl(cpu_wim, cpu_wim,
4654                                                (1 << dc->def->nwindows) - 1);
4655                            }
4656#endif
4657                        }
4658                        break;
4659                    case 0x33: /* wrtbr, UA2005 wrhpr */
4660                        {
4661#ifndef TARGET_SPARC64
4662                            if (!supervisor(dc))
4663                                goto priv_insn;
4664                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4665#else
4666                            CHECK_IU_FEATURE(dc, HYPV);
4667                            if (!hypervisor(dc))
4668                                goto priv_insn;
4669                            cpu_tmp0 = get_temp_tl(dc);
4670                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4671                            switch (rd) {
4672                            case 0: // hpstate
4673                                tcg_gen_st_i64(cpu_tmp0, cpu_env,
4674                                               offsetof(CPUSPARCState,
4675                                                        hpstate));
4676                                save_state(dc);
4677                                gen_op_next_insn();
4678                                tcg_gen_exit_tb(NULL, 0);
4679                                dc->base.is_jmp = DISAS_NORETURN;
4680                                break;
4681                            case 1: // htstate
4682                                // XXX gen_op_wrhtstate();
4683                                break;
4684                            case 3: // hintp
4685                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4686                                break;
4687                            case 5: // htba
4688                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4689                                break;
4690                            case 31: // hstick_cmpr
4691                                {
4692                                    TCGv_ptr r_tickptr;
4693
4694                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4695                                    r_tickptr = tcg_temp_new_ptr();
4696                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4697                                                   offsetof(CPUSPARCState, hstick));
4698                                    if (tb_cflags(dc->base.tb) &
4699                                           CF_USE_ICOUNT) {
4700                                        gen_io_start();
4701                                    }
4702                                    gen_helper_tick_set_limit(r_tickptr,
4703                                                              cpu_hstick_cmpr);
4704                                    tcg_temp_free_ptr(r_tickptr);
4705                                    /* End TB to handle timer interrupt */
4706                                    dc->base.is_jmp = DISAS_EXIT;
4707                                }
4708                                break;
4709                            case 6: // hver readonly
4710                            default:
4711                                goto illegal_insn;
4712                            }
4713#endif
4714                        }
4715                        break;
4716#endif
4717#ifdef TARGET_SPARC64
4718                    case 0x2c: /* V9 movcc */
4719                        {
4720                            int cc = GET_FIELD_SP(insn, 11, 12);
4721                            int cond = GET_FIELD_SP(insn, 14, 17);
4722                            DisasCompare cmp;
4723                            TCGv dst;
4724
4725                            if (insn & (1 << 18)) {
4726                                if (cc == 0) {
4727                                    gen_compare(&cmp, 0, cond, dc);
4728                                } else if (cc == 2) {
4729                                    gen_compare(&cmp, 1, cond, dc);
4730                                } else {
4731                                    goto illegal_insn;
4732                                }
4733                            } else {
4734                                gen_fcompare(&cmp, cc, cond);
4735                            }
4736
4737                            /* The get_src2 above loaded the normal 13-bit
4738                               immediate field, not the 11-bit field we have
4739                               in movcc.  But it did handle the reg case.  */
4740                            if (IS_IMM) {
4741                                simm = GET_FIELD_SPs(insn, 0, 10);
4742                                tcg_gen_movi_tl(cpu_src2, simm);
4743                            }
4744
4745                            dst = gen_load_gpr(dc, rd);
4746                            tcg_gen_movcond_tl(cmp.cond, dst,
4747                                               cmp.c1, cmp.c2,
4748                                               cpu_src2, dst);
4749                            free_compare(&cmp);
4750                            gen_store_gpr(dc, rd, dst);
4751                            break;
4752                        }
4753                    case 0x2d: /* V9 sdivx */
4754                        gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4755                        gen_store_gpr(dc, rd, cpu_dst);
4756                        break;
4757                    case 0x2e: /* V9 popc */
4758                        tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4759                        gen_store_gpr(dc, rd, cpu_dst);
4760                        break;
4761                    case 0x2f: /* V9 movr */
4762                        {
4763                            int cond = GET_FIELD_SP(insn, 10, 12);
4764                            DisasCompare cmp;
4765                            TCGv dst;
4766
4767                            gen_compare_reg(&cmp, cond, cpu_src1);
4768
4769                            /* The get_src2 above loaded the normal 13-bit
4770                               immediate field, not the 10-bit field we have
4771                               in movr.  But it did handle the reg case.  */
4772                            if (IS_IMM) {
4773                                simm = GET_FIELD_SPs(insn, 0, 9);
4774                                tcg_gen_movi_tl(cpu_src2, simm);
4775                            }
4776
4777                            dst = gen_load_gpr(dc, rd);
4778                            tcg_gen_movcond_tl(cmp.cond, dst,
4779                                               cmp.c1, cmp.c2,
4780                                               cpu_src2, dst);
4781                            free_compare(&cmp);
4782                            gen_store_gpr(dc, rd, dst);
4783                            break;
4784                        }
4785#endif
4786                    default:
4787                        goto illegal_insn;
4788                    }
4789                }
4790            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4791#ifdef TARGET_SPARC64
4792                int opf = GET_FIELD_SP(insn, 5, 13);
4793                rs1 = GET_FIELD(insn, 13, 17);
4794                rs2 = GET_FIELD(insn, 27, 31);
4795                if (gen_trap_ifnofpu(dc)) {
4796                    goto jmp_insn;
4797                }
4798
4799                switch (opf) {
4800                case 0x000: /* VIS I edge8cc */
4801                    CHECK_FPU_FEATURE(dc, VIS1);
4802                    cpu_src1 = gen_load_gpr(dc, rs1);
4803                    cpu_src2 = gen_load_gpr(dc, rs2);
4804                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4805                    gen_store_gpr(dc, rd, cpu_dst);
4806                    break;
4807                case 0x001: /* VIS II edge8n */
4808                    CHECK_FPU_FEATURE(dc, VIS2);
4809                    cpu_src1 = gen_load_gpr(dc, rs1);
4810                    cpu_src2 = gen_load_gpr(dc, rs2);
4811                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4812                    gen_store_gpr(dc, rd, cpu_dst);
4813                    break;
4814                case 0x002: /* VIS I edge8lcc */
4815                    CHECK_FPU_FEATURE(dc, VIS1);
4816                    cpu_src1 = gen_load_gpr(dc, rs1);
4817                    cpu_src2 = gen_load_gpr(dc, rs2);
4818                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4819                    gen_store_gpr(dc, rd, cpu_dst);
4820                    break;
4821                case 0x003: /* VIS II edge8ln */
4822                    CHECK_FPU_FEATURE(dc, VIS2);
4823                    cpu_src1 = gen_load_gpr(dc, rs1);
4824                    cpu_src2 = gen_load_gpr(dc, rs2);
4825                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4826                    gen_store_gpr(dc, rd, cpu_dst);
4827                    break;
4828                case 0x004: /* VIS I edge16cc */
4829                    CHECK_FPU_FEATURE(dc, VIS1);
4830                    cpu_src1 = gen_load_gpr(dc, rs1);
4831                    cpu_src2 = gen_load_gpr(dc, rs2);
4832                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4833                    gen_store_gpr(dc, rd, cpu_dst);
4834                    break;
4835                case 0x005: /* VIS II edge16n */
4836                    CHECK_FPU_FEATURE(dc, VIS2);
4837                    cpu_src1 = gen_load_gpr(dc, rs1);
4838                    cpu_src2 = gen_load_gpr(dc, rs2);
4839                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4840                    gen_store_gpr(dc, rd, cpu_dst);
4841                    break;
4842                case 0x006: /* VIS I edge16lcc */
4843                    CHECK_FPU_FEATURE(dc, VIS1);
4844                    cpu_src1 = gen_load_gpr(dc, rs1);
4845                    cpu_src2 = gen_load_gpr(dc, rs2);
4846                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4847                    gen_store_gpr(dc, rd, cpu_dst);
4848                    break;
4849                case 0x007: /* VIS II edge16ln */
4850                    CHECK_FPU_FEATURE(dc, VIS2);
4851                    cpu_src1 = gen_load_gpr(dc, rs1);
4852                    cpu_src2 = gen_load_gpr(dc, rs2);
4853                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4854                    gen_store_gpr(dc, rd, cpu_dst);
4855                    break;
4856                case 0x008: /* VIS I edge32cc */
4857                    CHECK_FPU_FEATURE(dc, VIS1);
4858                    cpu_src1 = gen_load_gpr(dc, rs1);
4859                    cpu_src2 = gen_load_gpr(dc, rs2);
4860                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4861                    gen_store_gpr(dc, rd, cpu_dst);
4862                    break;
4863                case 0x009: /* VIS II edge32n */
4864                    CHECK_FPU_FEATURE(dc, VIS2);
4865                    cpu_src1 = gen_load_gpr(dc, rs1);
4866                    cpu_src2 = gen_load_gpr(dc, rs2);
4867                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4868                    gen_store_gpr(dc, rd, cpu_dst);
4869                    break;
4870                case 0x00a: /* VIS I edge32lcc */
4871                    CHECK_FPU_FEATURE(dc, VIS1);
4872                    cpu_src1 = gen_load_gpr(dc, rs1);
4873                    cpu_src2 = gen_load_gpr(dc, rs2);
4874                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4875                    gen_store_gpr(dc, rd, cpu_dst);
4876                    break;
4877                case 0x00b: /* VIS II edge32ln */
4878                    CHECK_FPU_FEATURE(dc, VIS2);
4879                    cpu_src1 = gen_load_gpr(dc, rs1);
4880                    cpu_src2 = gen_load_gpr(dc, rs2);
4881                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4882                    gen_store_gpr(dc, rd, cpu_dst);
4883                    break;
4884                case 0x010: /* VIS I array8 */
4885                    CHECK_FPU_FEATURE(dc, VIS1);
4886                    cpu_src1 = gen_load_gpr(dc, rs1);
4887                    cpu_src2 = gen_load_gpr(dc, rs2);
4888                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4889                    gen_store_gpr(dc, rd, cpu_dst);
4890                    break;
4891                case 0x012: /* VIS I array16 */
4892                    CHECK_FPU_FEATURE(dc, VIS1);
4893                    cpu_src1 = gen_load_gpr(dc, rs1);
4894                    cpu_src2 = gen_load_gpr(dc, rs2);
4895                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4896                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4897                    gen_store_gpr(dc, rd, cpu_dst);
4898                    break;
4899                case 0x014: /* VIS I array32 */
4900                    CHECK_FPU_FEATURE(dc, VIS1);
4901                    cpu_src1 = gen_load_gpr(dc, rs1);
4902                    cpu_src2 = gen_load_gpr(dc, rs2);
4903                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4904                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4905                    gen_store_gpr(dc, rd, cpu_dst);
4906                    break;
4907                case 0x018: /* VIS I alignaddr */
4908                    CHECK_FPU_FEATURE(dc, VIS1);
4909                    cpu_src1 = gen_load_gpr(dc, rs1);
4910                    cpu_src2 = gen_load_gpr(dc, rs2);
4911                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4912                    gen_store_gpr(dc, rd, cpu_dst);
4913                    break;
4914                case 0x01a: /* VIS I alignaddrl */
4915                    CHECK_FPU_FEATURE(dc, VIS1);
4916                    cpu_src1 = gen_load_gpr(dc, rs1);
4917                    cpu_src2 = gen_load_gpr(dc, rs2);
4918                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4919                    gen_store_gpr(dc, rd, cpu_dst);
4920                    break;
4921                case 0x019: /* VIS II bmask */
4922                    CHECK_FPU_FEATURE(dc, VIS2);
4923                    cpu_src1 = gen_load_gpr(dc, rs1);
4924                    cpu_src2 = gen_load_gpr(dc, rs2);
4925                    tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4926                    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4927                    gen_store_gpr(dc, rd, cpu_dst);
4928                    break;
4929                case 0x020: /* VIS I fcmple16 */
4930                    CHECK_FPU_FEATURE(dc, VIS1);
4931                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4932                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4933                    gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4934                    gen_store_gpr(dc, rd, cpu_dst);
4935                    break;
4936                case 0x022: /* VIS I fcmpne16 */
4937                    CHECK_FPU_FEATURE(dc, VIS1);
4938                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4939                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4940                    gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4941                    gen_store_gpr(dc, rd, cpu_dst);
4942                    break;
4943                case 0x024: /* VIS I fcmple32 */
4944                    CHECK_FPU_FEATURE(dc, VIS1);
4945                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4946                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4947                    gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4948                    gen_store_gpr(dc, rd, cpu_dst);
4949                    break;
4950                case 0x026: /* VIS I fcmpne32 */
4951                    CHECK_FPU_FEATURE(dc, VIS1);
4952                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4953                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4954                    gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4955                    gen_store_gpr(dc, rd, cpu_dst);
4956                    break;
4957                case 0x028: /* VIS I fcmpgt16 */
4958                    CHECK_FPU_FEATURE(dc, VIS1);
4959                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4960                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4961                    gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4962                    gen_store_gpr(dc, rd, cpu_dst);
4963                    break;
4964                case 0x02a: /* VIS I fcmpeq16 */
4965                    CHECK_FPU_FEATURE(dc, VIS1);
4966                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4967                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4968                    gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4969                    gen_store_gpr(dc, rd, cpu_dst);
4970                    break;
4971                case 0x02c: /* VIS I fcmpgt32 */
4972                    CHECK_FPU_FEATURE(dc, VIS1);
4973                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4974                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4975                    gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4976                    gen_store_gpr(dc, rd, cpu_dst);
4977                    break;
4978                case 0x02e: /* VIS I fcmpeq32 */
4979                    CHECK_FPU_FEATURE(dc, VIS1);
4980                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4981                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4982                    gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4983                    gen_store_gpr(dc, rd, cpu_dst);
4984                    break;
4985                case 0x031: /* VIS I fmul8x16 */
4986                    CHECK_FPU_FEATURE(dc, VIS1);
4987                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4988                    break;
4989                case 0x033: /* VIS I fmul8x16au */
4990                    CHECK_FPU_FEATURE(dc, VIS1);
4991                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4992                    break;
4993                case 0x035: /* VIS I fmul8x16al */
4994                    CHECK_FPU_FEATURE(dc, VIS1);
4995                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4996                    break;
4997                case 0x036: /* VIS I fmul8sux16 */
4998                    CHECK_FPU_FEATURE(dc, VIS1);
4999                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
5000                    break;
5001                case 0x037: /* VIS I fmul8ulx16 */
5002                    CHECK_FPU_FEATURE(dc, VIS1);
5003                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
5004                    break;
5005                case 0x038: /* VIS I fmuld8sux16 */
5006                    CHECK_FPU_FEATURE(dc, VIS1);
5007                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
5008                    break;
5009                case 0x039: /* VIS I fmuld8ulx16 */
5010                    CHECK_FPU_FEATURE(dc, VIS1);
5011                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
5012                    break;
5013                case 0x03a: /* VIS I fpack32 */
5014                    CHECK_FPU_FEATURE(dc, VIS1);
5015                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
5016                    break;
5017                case 0x03b: /* VIS I fpack16 */
5018                    CHECK_FPU_FEATURE(dc, VIS1);
5019                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5020                    cpu_dst_32 = gen_dest_fpr_F(dc);
5021                    gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
5022                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5023                    break;
5024                case 0x03d: /* VIS I fpackfix */
5025                    CHECK_FPU_FEATURE(dc, VIS1);
5026                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5027                    cpu_dst_32 = gen_dest_fpr_F(dc);
5028                    gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
5029                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5030                    break;
5031                case 0x03e: /* VIS I pdist */
5032                    CHECK_FPU_FEATURE(dc, VIS1);
5033                    gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
5034                    break;
5035                case 0x048: /* VIS I faligndata */
5036                    CHECK_FPU_FEATURE(dc, VIS1);
5037                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
5038                    break;
5039                case 0x04b: /* VIS I fpmerge */
5040                    CHECK_FPU_FEATURE(dc, VIS1);
5041                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
5042                    break;
5043                case 0x04c: /* VIS II bshuffle */
5044                    CHECK_FPU_FEATURE(dc, VIS2);
5045                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
5046                    break;
5047                case 0x04d: /* VIS I fexpand */
5048                    CHECK_FPU_FEATURE(dc, VIS1);
5049                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
5050                    break;
5051                case 0x050: /* VIS I fpadd16 */
5052                    CHECK_FPU_FEATURE(dc, VIS1);
5053                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
5054                    break;
5055                case 0x051: /* VIS I fpadd16s */
5056                    CHECK_FPU_FEATURE(dc, VIS1);
5057                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
5058                    break;
5059                case 0x052: /* VIS I fpadd32 */
5060                    CHECK_FPU_FEATURE(dc, VIS1);
5061                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
5062                    break;
5063                case 0x053: /* VIS I fpadd32s */
5064                    CHECK_FPU_FEATURE(dc, VIS1);
5065                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5066                    break;
5067                case 0x054: /* VIS I fpsub16 */
5068                    CHECK_FPU_FEATURE(dc, VIS1);
5069                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5070                    break;
5071                case 0x055: /* VIS I fpsub16s */
5072                    CHECK_FPU_FEATURE(dc, VIS1);
5073                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5074                    break;
5075                case 0x056: /* VIS I fpsub32 */
5076                    CHECK_FPU_FEATURE(dc, VIS1);
5077                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5078                    break;
5079                case 0x057: /* VIS I fpsub32s */
5080                    CHECK_FPU_FEATURE(dc, VIS1);
5081                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5082                    break;
5083                case 0x060: /* VIS I fzero */
5084                    CHECK_FPU_FEATURE(dc, VIS1);
5085                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5086                    tcg_gen_movi_i64(cpu_dst_64, 0);
5087                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5088                    break;
5089                case 0x061: /* VIS I fzeros */
5090                    CHECK_FPU_FEATURE(dc, VIS1);
5091                    cpu_dst_32 = gen_dest_fpr_F(dc);
5092                    tcg_gen_movi_i32(cpu_dst_32, 0);
5093                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5094                    break;
5095                case 0x062: /* VIS I fnor */
5096                    CHECK_FPU_FEATURE(dc, VIS1);
5097                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5098                    break;
5099                case 0x063: /* VIS I fnors */
5100                    CHECK_FPU_FEATURE(dc, VIS1);
5101                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5102                    break;
5103                case 0x064: /* VIS I fandnot2 */
5104                    CHECK_FPU_FEATURE(dc, VIS1);
5105                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5106                    break;
5107                case 0x065: /* VIS I fandnot2s */
5108                    CHECK_FPU_FEATURE(dc, VIS1);
5109                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5110                    break;
5111                case 0x066: /* VIS I fnot2 */
5112                    CHECK_FPU_FEATURE(dc, VIS1);
5113                    gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5114                    break;
5115                case 0x067: /* VIS I fnot2s */
5116                    CHECK_FPU_FEATURE(dc, VIS1);
5117                    gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5118                    break;
5119                case 0x068: /* VIS I fandnot1 */
5120                    CHECK_FPU_FEATURE(dc, VIS1);
5121                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5122                    break;
5123                case 0x069: /* VIS I fandnot1s */
5124                    CHECK_FPU_FEATURE(dc, VIS1);
5125                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5126                    break;
5127                case 0x06a: /* VIS I fnot1 */
5128                    CHECK_FPU_FEATURE(dc, VIS1);
5129                    gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5130                    break;
5131                case 0x06b: /* VIS I fnot1s */
5132                    CHECK_FPU_FEATURE(dc, VIS1);
5133                    gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5134                    break;
5135                case 0x06c: /* VIS I fxor */
5136                    CHECK_FPU_FEATURE(dc, VIS1);
5137                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5138                    break;
5139                case 0x06d: /* VIS I fxors */
5140                    CHECK_FPU_FEATURE(dc, VIS1);
5141                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5142                    break;
5143                case 0x06e: /* VIS I fnand */
5144                    CHECK_FPU_FEATURE(dc, VIS1);
5145                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5146                    break;
5147                case 0x06f: /* VIS I fnands */
5148                    CHECK_FPU_FEATURE(dc, VIS1);
5149                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5150                    break;
5151                case 0x070: /* VIS I fand */
5152                    CHECK_FPU_FEATURE(dc, VIS1);
5153                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5154                    break;
5155                case 0x071: /* VIS I fands */
5156                    CHECK_FPU_FEATURE(dc, VIS1);
5157                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5158                    break;
5159                case 0x072: /* VIS I fxnor */
5160                    CHECK_FPU_FEATURE(dc, VIS1);
5161                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5162                    break;
5163                case 0x073: /* VIS I fxnors */
5164                    CHECK_FPU_FEATURE(dc, VIS1);
5165                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5166                    break;
5167                case 0x074: /* VIS I fsrc1 */
5168                    CHECK_FPU_FEATURE(dc, VIS1);
5169                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5170                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5171                    break;
5172                case 0x075: /* VIS I fsrc1s */
5173                    CHECK_FPU_FEATURE(dc, VIS1);
5174                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5175                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5176                    break;
5177                case 0x076: /* VIS I fornot2 */
5178                    CHECK_FPU_FEATURE(dc, VIS1);
5179                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5180                    break;
5181                case 0x077: /* VIS I fornot2s */
5182                    CHECK_FPU_FEATURE(dc, VIS1);
5183                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5184                    break;
5185                case 0x078: /* VIS I fsrc2 */
5186                    CHECK_FPU_FEATURE(dc, VIS1);
5187                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5188                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5189                    break;
5190                case 0x079: /* VIS I fsrc2s */
5191                    CHECK_FPU_FEATURE(dc, VIS1);
5192                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5193                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5194                    break;
5195                case 0x07a: /* VIS I fornot1 */
5196                    CHECK_FPU_FEATURE(dc, VIS1);
5197                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5198                    break;
5199                case 0x07b: /* VIS I fornot1s */
5200                    CHECK_FPU_FEATURE(dc, VIS1);
5201                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5202                    break;
5203                case 0x07c: /* VIS I for */
5204                    CHECK_FPU_FEATURE(dc, VIS1);
5205                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5206                    break;
5207                case 0x07d: /* VIS I fors */
5208                    CHECK_FPU_FEATURE(dc, VIS1);
5209                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5210                    break;
5211                case 0x07e: /* VIS I fone */
5212                    CHECK_FPU_FEATURE(dc, VIS1);
5213                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5214                    tcg_gen_movi_i64(cpu_dst_64, -1);
5215                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5216                    break;
5217                case 0x07f: /* VIS I fones */
5218                    CHECK_FPU_FEATURE(dc, VIS1);
5219                    cpu_dst_32 = gen_dest_fpr_F(dc);
5220                    tcg_gen_movi_i32(cpu_dst_32, -1);
5221                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5222                    break;
5223                case 0x080: /* VIS I shutdown */
5224                case 0x081: /* VIS II siam */
5225                    // XXX
5226                    goto illegal_insn;
5227                default:
5228                    goto illegal_insn;
5229                }
5230#else
5231                goto ncp_insn;
5232#endif
5233            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5234#ifdef TARGET_SPARC64
5235                goto illegal_insn;
5236#else
5237                goto ncp_insn;
5238#endif
5239#ifdef TARGET_SPARC64
5240            } else if (xop == 0x39) { /* V9 return */
5241                save_state(dc);
5242                cpu_src1 = get_src1(dc, insn);
5243                cpu_tmp0 = get_temp_tl(dc);
5244                if (IS_IMM) {   /* immediate */
5245                    simm = GET_FIELDs(insn, 19, 31);
5246                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5247                } else {                /* register */
5248                    rs2 = GET_FIELD(insn, 27, 31);
5249                    if (rs2) {
5250                        cpu_src2 = gen_load_gpr(dc, rs2);
5251                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5252                    } else {
5253                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5254                    }
5255                }
5256                gen_helper_restore(cpu_env);
5257                gen_mov_pc_npc(dc);
5258                gen_check_align(cpu_tmp0, 3);
5259                tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5260                dc->npc = DYNAMIC_PC;
5261                goto jmp_insn;
5262#endif
5263            } else {
5264                cpu_src1 = get_src1(dc, insn);
5265                cpu_tmp0 = get_temp_tl(dc);
5266                if (IS_IMM) {   /* immediate */
5267                    simm = GET_FIELDs(insn, 19, 31);
5268                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5269                } else {                /* register */
5270                    rs2 = GET_FIELD(insn, 27, 31);
5271                    if (rs2) {
5272                        cpu_src2 = gen_load_gpr(dc, rs2);
5273                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5274                    } else {
5275                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5276                    }
5277                }
5278                switch (xop) {
5279                case 0x38:      /* jmpl */
5280                    {
5281                        TCGv t = gen_dest_gpr(dc, rd);
5282                        tcg_gen_movi_tl(t, dc->pc);
5283                        gen_store_gpr(dc, rd, t);
5284
5285                        gen_mov_pc_npc(dc);
5286                        gen_check_align(cpu_tmp0, 3);
5287                        gen_address_mask(dc, cpu_tmp0);
5288                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5289                        dc->npc = DYNAMIC_PC;
5290                    }
5291                    goto jmp_insn;
5292#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5293                case 0x39:      /* rett, V9 return */
5294                    {
5295                        if (!supervisor(dc))
5296                            goto priv_insn;
5297                        gen_mov_pc_npc(dc);
5298                        gen_check_align(cpu_tmp0, 3);
5299                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5300                        dc->npc = DYNAMIC_PC;
5301                        gen_helper_rett(cpu_env);
5302                    }
5303                    goto jmp_insn;
5304#endif
5305                case 0x3b: /* flush */
5306                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5307                        goto unimp_flush;
5308                    /* nop */
5309                    break;
5310                case 0x3c:      /* save */
5311                    gen_helper_save(cpu_env);
5312                    gen_store_gpr(dc, rd, cpu_tmp0);
5313                    break;
5314                case 0x3d:      /* restore */
5315                    gen_helper_restore(cpu_env);
5316                    gen_store_gpr(dc, rd, cpu_tmp0);
5317                    break;
5318#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5319                case 0x3e:      /* V9 done/retry */
5320                    {
5321                        switch (rd) {
5322                        case 0:
5323                            if (!supervisor(dc))
5324                                goto priv_insn;
5325                            dc->npc = DYNAMIC_PC;
5326                            dc->pc = DYNAMIC_PC;
5327                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5328                                gen_io_start();
5329                            }
5330                            gen_helper_done(cpu_env);
5331                            goto jmp_insn;
5332                        case 1:
5333                            if (!supervisor(dc))
5334                                goto priv_insn;
5335                            dc->npc = DYNAMIC_PC;
5336                            dc->pc = DYNAMIC_PC;
5337                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5338                                gen_io_start();
5339                            }
5340                            gen_helper_retry(cpu_env);
5341                            goto jmp_insn;
5342                        default:
5343                            goto illegal_insn;
5344                        }
5345                    }
5346                    break;
5347#endif
5348                default:
5349                    goto illegal_insn;
5350                }
5351            }
5352            break;
5353        }
5354        break;
5355    case 3:                     /* load/store instructions */
5356        {
5357            unsigned int xop = GET_FIELD(insn, 7, 12);
5358            /* ??? gen_address_mask prevents us from using a source
5359               register directly.  Always generate a temporary.  */
5360            TCGv cpu_addr = get_temp_tl(dc);
5361
5362            tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5363            if (xop == 0x3c || xop == 0x3e) {
5364                /* V9 casa/casxa : no offset */
5365            } else if (IS_IMM) {     /* immediate */
5366                simm = GET_FIELDs(insn, 19, 31);
5367                if (simm != 0) {
5368                    tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5369                }
5370            } else {            /* register */
5371                rs2 = GET_FIELD(insn, 27, 31);
5372                if (rs2 != 0) {
5373                    tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5374                }
5375            }
5376            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5377                (xop > 0x17 && xop <= 0x1d ) ||
5378                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5379                TCGv cpu_val = gen_dest_gpr(dc, rd);
5380
5381                switch (xop) {
5382                case 0x0:       /* ld, V9 lduw, load unsigned word */
5383                    gen_address_mask(dc, cpu_addr);
5384                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5385                    break;
5386                case 0x1:       /* ldub, load unsigned byte */
5387                    gen_address_mask(dc, cpu_addr);
5388                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5389                    break;
5390                case 0x2:       /* lduh, load unsigned halfword */
5391                    gen_address_mask(dc, cpu_addr);
5392                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5393                    break;
5394                case 0x3:       /* ldd, load double word */
5395                    if (rd & 1)
5396                        goto illegal_insn;
5397                    else {
5398                        TCGv_i64 t64;
5399
5400                        gen_address_mask(dc, cpu_addr);
5401                        t64 = tcg_temp_new_i64();
5402                        tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5403                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5404                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5405                        gen_store_gpr(dc, rd + 1, cpu_val);
5406                        tcg_gen_shri_i64(t64, t64, 32);
5407                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5408                        tcg_temp_free_i64(t64);
5409                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5410                    }
5411                    break;
5412                case 0x9:       /* ldsb, load signed byte */
5413                    gen_address_mask(dc, cpu_addr);
5414                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5415                    break;
5416                case 0xa:       /* ldsh, load signed halfword */
5417                    gen_address_mask(dc, cpu_addr);
5418                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5419                    break;
5420                case 0xd:       /* ldstub */
5421                    gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5422                    break;
5423                case 0x0f:
5424                    /* swap, swap register with memory. Also atomically */
5425                    CHECK_IU_FEATURE(dc, SWAP);
5426                    cpu_src1 = gen_load_gpr(dc, rd);
5427                    gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5428                             dc->mem_idx, MO_TEUL);
5429                    break;
5430#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5431                case 0x10:      /* lda, V9 lduwa, load word alternate */
5432                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5433                    break;
5434                case 0x11:      /* lduba, load unsigned byte alternate */
5435                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5436                    break;
5437                case 0x12:      /* lduha, load unsigned halfword alternate */
5438                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5439                    break;
5440                case 0x13:      /* ldda, load double word alternate */
5441                    if (rd & 1) {
5442                        goto illegal_insn;
5443                    }
5444                    gen_ldda_asi(dc, cpu_addr, insn, rd);
5445                    goto skip_move;
5446                case 0x19:      /* ldsba, load signed byte alternate */
5447                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5448                    break;
5449                case 0x1a:      /* ldsha, load signed halfword alternate */
5450                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5451                    break;
5452                case 0x1d:      /* ldstuba -- XXX: should be atomically */
5453                    gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5454                    break;
5455                case 0x1f:      /* swapa, swap reg with alt. memory. Also
5456                                   atomically */
5457                    CHECK_IU_FEATURE(dc, SWAP);
5458                    cpu_src1 = gen_load_gpr(dc, rd);
5459                    gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5460                    break;
5461
5462#ifndef TARGET_SPARC64
5463                case 0x30: /* ldc */
5464                case 0x31: /* ldcsr */
5465                case 0x33: /* lddc */
5466                    goto ncp_insn;
5467#endif
5468#endif
5469#ifdef TARGET_SPARC64
5470                case 0x08: /* V9 ldsw */
5471                    gen_address_mask(dc, cpu_addr);
5472                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5473                    break;
5474                case 0x0b: /* V9 ldx */
5475                    gen_address_mask(dc, cpu_addr);
5476                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5477                    break;
5478                case 0x18: /* V9 ldswa */
5479                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5480                    break;
5481                case 0x1b: /* V9 ldxa */
5482                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5483                    break;
5484                case 0x2d: /* V9 prefetch, no effect */
5485                    goto skip_move;
5486                case 0x30: /* V9 ldfa */
5487                    if (gen_trap_ifnofpu(dc)) {
5488                        goto jmp_insn;
5489                    }
5490                    gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5491                    gen_update_fprs_dirty(dc, rd);
5492                    goto skip_move;
5493                case 0x33: /* V9 lddfa */
5494                    if (gen_trap_ifnofpu(dc)) {
5495                        goto jmp_insn;
5496                    }
5497                    gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5498                    gen_update_fprs_dirty(dc, DFPREG(rd));
5499                    goto skip_move;
5500                case 0x3d: /* V9 prefetcha, no effect */
5501                    goto skip_move;
5502                case 0x32: /* V9 ldqfa */
5503                    CHECK_FPU_FEATURE(dc, FLOAT128);
5504                    if (gen_trap_ifnofpu(dc)) {
5505                        goto jmp_insn;
5506                    }
5507                    gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5508                    gen_update_fprs_dirty(dc, QFPREG(rd));
5509                    goto skip_move;
5510#endif
5511                default:
5512                    goto illegal_insn;
5513                }
5514                gen_store_gpr(dc, rd, cpu_val);
5515#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5516            skip_move: ;
5517#endif
5518            } else if (xop >= 0x20 && xop < 0x24) {
5519                if (gen_trap_ifnofpu(dc)) {
5520                    goto jmp_insn;
5521                }
5522                switch (xop) {
5523                case 0x20:      /* ldf, load fpreg */
5524                    gen_address_mask(dc, cpu_addr);
5525                    cpu_dst_32 = gen_dest_fpr_F(dc);
5526                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5527                                        dc->mem_idx, MO_TEUL);
5528                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5529                    break;
5530                case 0x21:      /* ldfsr, V9 ldxfsr */
5531#ifdef TARGET_SPARC64
5532                    gen_address_mask(dc, cpu_addr);
5533                    if (rd == 1) {
5534                        TCGv_i64 t64 = tcg_temp_new_i64();
5535                        tcg_gen_qemu_ld_i64(t64, cpu_addr,
5536                                            dc->mem_idx, MO_TEUQ);
5537                        gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5538                        tcg_temp_free_i64(t64);
5539                        break;
5540                    }
5541#endif
5542                    cpu_dst_32 = get_temp_i32(dc);
5543                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5544                                        dc->mem_idx, MO_TEUL);
5545                    gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5546                    break;
5547                case 0x22:      /* ldqf, load quad fpreg */
5548                    CHECK_FPU_FEATURE(dc, FLOAT128);
5549                    gen_address_mask(dc, cpu_addr);
5550                    cpu_src1_64 = tcg_temp_new_i64();
5551                    tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5552                                        MO_TEUQ | MO_ALIGN_4);
5553                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5554                    cpu_src2_64 = tcg_temp_new_i64();
5555                    tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5556                                        MO_TEUQ | MO_ALIGN_4);
5557                    gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5558                    tcg_temp_free_i64(cpu_src1_64);
5559                    tcg_temp_free_i64(cpu_src2_64);
5560                    break;
5561                case 0x23:      /* lddf, load double fpreg */
5562                    gen_address_mask(dc, cpu_addr);
5563                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5564                    tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5565                                        MO_TEUQ | MO_ALIGN_4);
5566                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5567                    break;
5568                default:
5569                    goto illegal_insn;
5570                }
5571            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5572                       xop == 0xe || xop == 0x1e) {
5573                TCGv cpu_val = gen_load_gpr(dc, rd);
5574
5575                switch (xop) {
5576                case 0x4: /* st, store word */
5577                    gen_address_mask(dc, cpu_addr);
5578                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5579                    break;
5580                case 0x5: /* stb, store byte */
5581                    gen_address_mask(dc, cpu_addr);
5582                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5583                    break;
5584                case 0x6: /* sth, store halfword */
5585                    gen_address_mask(dc, cpu_addr);
5586                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5587                    break;
5588                case 0x7: /* std, store double word */
5589                    if (rd & 1)
5590                        goto illegal_insn;
5591                    else {
5592                        TCGv_i64 t64;
5593                        TCGv lo;
5594
5595                        gen_address_mask(dc, cpu_addr);
5596                        lo = gen_load_gpr(dc, rd + 1);
5597                        t64 = tcg_temp_new_i64();
5598                        tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5599                        tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5600                        tcg_temp_free_i64(t64);
5601                    }
5602                    break;
5603#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5604                case 0x14: /* sta, V9 stwa, store word alternate */
5605                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5606                    break;
5607                case 0x15: /* stba, store byte alternate */
5608                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5609                    break;
5610                case 0x16: /* stha, store halfword alternate */
5611                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5612                    break;
5613                case 0x17: /* stda, store double word alternate */
5614                    if (rd & 1) {
5615                        goto illegal_insn;
5616                    }
5617                    gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5618                    break;
5619#endif
5620#ifdef TARGET_SPARC64
5621                case 0x0e: /* V9 stx */
5622                    gen_address_mask(dc, cpu_addr);
5623                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5624                    break;
5625                case 0x1e: /* V9 stxa */
5626                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5627                    break;
5628#endif
5629                default:
5630                    goto illegal_insn;
5631                }
5632            } else if (xop > 0x23 && xop < 0x28) {
5633                if (gen_trap_ifnofpu(dc)) {
5634                    goto jmp_insn;
5635                }
5636                switch (xop) {
5637                case 0x24: /* stf, store fpreg */
5638                    gen_address_mask(dc, cpu_addr);
5639                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
5640                    tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5641                                        dc->mem_idx, MO_TEUL);
5642                    break;
5643                case 0x25: /* stfsr, V9 stxfsr */
5644                    {
5645#ifdef TARGET_SPARC64
5646                        gen_address_mask(dc, cpu_addr);
5647                        if (rd == 1) {
5648                            tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5649                            break;
5650                        }
5651#endif
5652                        tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5653                    }
5654                    break;
5655                case 0x26:
5656#ifdef TARGET_SPARC64
5657                    /* V9 stqf, store quad fpreg */
5658                    CHECK_FPU_FEATURE(dc, FLOAT128);
5659                    gen_address_mask(dc, cpu_addr);
5660                    /* ??? While stqf only requires 4-byte alignment, it is
5661                       legal for the cpu to signal the unaligned exception.
5662                       The OS trap handler is then required to fix it up.
5663                       For qemu, this avoids having to probe the second page
5664                       before performing the first write.  */
5665                    cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5666                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5667                                        dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5668                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5669                    cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5670                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5671                                        dc->mem_idx, MO_TEUQ);
5672                    break;
5673#else /* !TARGET_SPARC64 */
5674                    /* stdfq, store floating point queue */
5675#if defined(CONFIG_USER_ONLY)
5676                    goto illegal_insn;
5677#else
5678                    if (!supervisor(dc))
5679                        goto priv_insn;
5680                    if (gen_trap_ifnofpu(dc)) {
5681                        goto jmp_insn;
5682                    }
5683                    goto nfq_insn;
5684#endif
5685#endif
5686                case 0x27: /* stdf, store double fpreg */
5687                    gen_address_mask(dc, cpu_addr);
5688                    cpu_src1_64 = gen_load_fpr_D(dc, rd);
5689                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5690                                        MO_TEUQ | MO_ALIGN_4);
5691                    break;
5692                default:
5693                    goto illegal_insn;
5694                }
5695            } else if (xop > 0x33 && xop < 0x3f) {
5696                switch (xop) {
5697#ifdef TARGET_SPARC64
5698                case 0x34: /* V9 stfa */
5699                    if (gen_trap_ifnofpu(dc)) {
5700                        goto jmp_insn;
5701                    }
5702                    gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5703                    break;
5704                case 0x36: /* V9 stqfa */
5705                    {
5706                        CHECK_FPU_FEATURE(dc, FLOAT128);
5707                        if (gen_trap_ifnofpu(dc)) {
5708                            goto jmp_insn;
5709                        }
5710                        gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5711                    }
5712                    break;
5713                case 0x37: /* V9 stdfa */
5714                    if (gen_trap_ifnofpu(dc)) {
5715                        goto jmp_insn;
5716                    }
5717                    gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5718                    break;
5719                case 0x3e: /* V9 casxa */
5720                    rs2 = GET_FIELD(insn, 27, 31);
5721                    cpu_src2 = gen_load_gpr(dc, rs2);
5722                    gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5723                    break;
5724#else
5725                case 0x34: /* stc */
5726                case 0x35: /* stcsr */
5727                case 0x36: /* stdcq */
5728                case 0x37: /* stdc */
5729                    goto ncp_insn;
5730#endif
5731#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5732                case 0x3c: /* V9 or LEON3 casa */
5733#ifndef TARGET_SPARC64
5734                    CHECK_IU_FEATURE(dc, CASA);
5735#endif
5736                    rs2 = GET_FIELD(insn, 27, 31);
5737                    cpu_src2 = gen_load_gpr(dc, rs2);
5738                    gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5739                    break;
5740#endif
5741                default:
5742                    goto illegal_insn;
5743                }
5744            } else {
5745                goto illegal_insn;
5746            }
5747        }
5748        break;
5749    }
5750    /* default case for non jump instructions */
5751    if (dc->npc == DYNAMIC_PC) {
5752        dc->pc = DYNAMIC_PC;
5753        gen_op_next_insn();
5754    } else if (dc->npc == JUMP_PC) {
5755        /* we can do a static jump */
5756        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5757        dc->base.is_jmp = DISAS_NORETURN;
5758    } else {
5759        dc->pc = dc->npc;
5760        dc->npc = dc->npc + 4;
5761    }
5762 jmp_insn:
5763    goto egress;
5764 illegal_insn:
5765    gen_exception(dc, TT_ILL_INSN);
5766    goto egress;
5767 unimp_flush:
5768    gen_exception(dc, TT_UNIMP_FLUSH);
5769    goto egress;
5770#if !defined(CONFIG_USER_ONLY)
5771 priv_insn:
5772    gen_exception(dc, TT_PRIV_INSN);
5773    goto egress;
5774#endif
5775 nfpu_insn:
5776    gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5777    goto egress;
5778#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5779 nfq_insn:
5780    gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5781    goto egress;
5782#endif
5783#ifndef TARGET_SPARC64
5784 ncp_insn:
5785    gen_exception(dc, TT_NCP_INSN);
5786    goto egress;
5787#endif
5788 egress:
5789    if (dc->n_t32 != 0) {
5790        int i;
5791        for (i = dc->n_t32 - 1; i >= 0; --i) {
5792            tcg_temp_free_i32(dc->t32[i]);
5793        }
5794        dc->n_t32 = 0;
5795    }
5796    if (dc->n_ttl != 0) {
5797        int i;
5798        for (i = dc->n_ttl - 1; i >= 0; --i) {
5799            tcg_temp_free(dc->ttl[i]);
5800        }
5801        dc->n_ttl = 0;
5802    }
5803}
5804
5805static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5806{
5807    DisasContext *dc = container_of(dcbase, DisasContext, base);
5808    CPUSPARCState *env = cs->env_ptr;
5809    int bound;
5810
5811    dc->pc = dc->base.pc_first;
5812    dc->npc = (target_ulong)dc->base.tb->cs_base;
5813    dc->cc_op = CC_OP_DYNAMIC;
5814    dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5815    dc->def = &env->def;
5816    dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5817    dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5818#ifndef CONFIG_USER_ONLY
5819    dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5820#endif
5821#ifdef TARGET_SPARC64
5822    dc->fprs_dirty = 0;
5823    dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5824#ifndef CONFIG_USER_ONLY
5825    dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5826#endif
5827#endif
5828    /*
5829     * if we reach a page boundary, we stop generation so that the
5830     * PC of a TT_TFAULT exception is always in the right page
5831     */
5832    bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5833    dc->base.max_insns = MIN(dc->base.max_insns, bound);
5834}
5835
5836static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5837{
5838}
5839
5840static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5841{
5842    DisasContext *dc = container_of(dcbase, DisasContext, base);
5843
5844    if (dc->npc & JUMP_PC) {
5845        assert(dc->jump_pc[1] == dc->pc + 4);
5846        tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5847    } else {
5848        tcg_gen_insn_start(dc->pc, dc->npc);
5849    }
5850}
5851
5852static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5853{
5854    DisasContext *dc = container_of(dcbase, DisasContext, base);
5855    CPUSPARCState *env = cs->env_ptr;
5856    unsigned int insn;
5857
5858    insn = translator_ldl(env, &dc->base, dc->pc);
5859    dc->base.pc_next += 4;
5860    disas_sparc_insn(dc, insn);
5861
5862    if (dc->base.is_jmp == DISAS_NORETURN) {
5863        return;
5864    }
5865    if (dc->pc != dc->base.pc_next) {
5866        dc->base.is_jmp = DISAS_TOO_MANY;
5867    }
5868}
5869
5870static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5871{
5872    DisasContext *dc = container_of(dcbase, DisasContext, base);
5873
5874    switch (dc->base.is_jmp) {
5875    case DISAS_NEXT:
5876    case DISAS_TOO_MANY:
5877        if (dc->pc != DYNAMIC_PC &&
5878            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5879            /* static PC and NPC: we can use direct chaining */
5880            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5881        } else {
5882            if (dc->pc != DYNAMIC_PC) {
5883                tcg_gen_movi_tl(cpu_pc, dc->pc);
5884            }
5885            save_npc(dc);
5886            tcg_gen_exit_tb(NULL, 0);
5887        }
5888        break;
5889
5890    case DISAS_NORETURN:
5891       break;
5892
5893    case DISAS_EXIT:
5894        /* Exit TB */
5895        save_state(dc);
5896        tcg_gen_exit_tb(NULL, 0);
5897        break;
5898
5899    default:
5900        g_assert_not_reached();
5901    }
5902}
5903
5904static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5905                               CPUState *cpu, FILE *logfile)
5906{
5907    fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5908    target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5909}
5910
5911static const TranslatorOps sparc_tr_ops = {
5912    .init_disas_context = sparc_tr_init_disas_context,
5913    .tb_start           = sparc_tr_tb_start,
5914    .insn_start         = sparc_tr_insn_start,
5915    .translate_insn     = sparc_tr_translate_insn,
5916    .tb_stop            = sparc_tr_tb_stop,
5917    .disas_log          = sparc_tr_disas_log,
5918};
5919
5920void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns,
5921                           target_ulong pc, void *host_pc)
5922{
5923    DisasContext dc = {};
5924
5925    translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5926}
5927
5928void sparc_tcg_init(void)
5929{
5930    static const char gregnames[32][4] = {
5931        "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5932        "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5933        "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5934        "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5935    };
5936    static const char fregnames[32][4] = {
5937        "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5938        "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5939        "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5940        "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5941    };
5942
5943    static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5944#ifdef TARGET_SPARC64
5945        { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5946        { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5947#else
5948        { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5949#endif
5950        { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5951        { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5952    };
5953
5954    static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5955#ifdef TARGET_SPARC64
5956        { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5957        { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5958        { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5959        { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5960          "hstick_cmpr" },
5961        { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5962        { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5963        { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5964        { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5965        { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5966#endif
5967        { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5968        { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5969        { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5970        { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5971        { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5972        { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5973        { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5974        { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5975#ifndef CONFIG_USER_ONLY
5976        { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5977#endif
5978    };
5979
5980    unsigned int i;
5981
5982    cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5983                                         offsetof(CPUSPARCState, regwptr),
5984                                         "regwptr");
5985
5986    for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5987        *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5988    }
5989
5990    for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5991        *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5992    }
5993
5994    cpu_regs[0] = NULL;
5995    for (i = 1; i < 8; ++i) {
5996        cpu_regs[i] = tcg_global_mem_new(cpu_env,
5997                                         offsetof(CPUSPARCState, gregs[i]),
5998                                         gregnames[i]);
5999    }
6000
6001    for (i = 8; i < 32; ++i) {
6002        cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
6003                                         (i - 8) * sizeof(target_ulong),
6004                                         gregnames[i]);
6005    }
6006
6007    for (i = 0; i < TARGET_DPREGS; i++) {
6008        cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
6009                                            offsetof(CPUSPARCState, fpr[i]),
6010                                            fregnames[i]);
6011    }
6012}
6013
6014void sparc_restore_state_to_opc(CPUState *cs,
6015                                const TranslationBlock *tb,
6016                                const uint64_t *data)
6017{
6018    SPARCCPU *cpu = SPARC_CPU(cs);
6019    CPUSPARCState *env = &cpu->env;
6020    target_ulong pc = data[0];
6021    target_ulong npc = data[1];
6022
6023    env->pc = pc;
6024    if (npc == DYNAMIC_PC) {
6025        /* dynamic NPC: already stored */
6026    } else if (npc & JUMP_PC) {
6027        /* jump PC: use 'cond' and the jump targets of the translation */
6028        if (env->cond) {
6029            env->npc = npc & ~3;
6030        } else {
6031            env->npc = pc + 4;
6032        }
6033    } else {
6034        env->npc = npc;
6035    }
6036}
6037