qemu/target/sparc/translate.c
<<
>>
Prefs
   1/*
   2   SPARC translation
   3
   4   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
   5   Copyright (C) 2003-2005 Fabrice Bellard
   6
   7   This library is free software; you can redistribute it and/or
   8   modify it under the terms of the GNU Lesser General Public
   9   License as published by the Free Software Foundation; either
  10   version 2 of the License, or (at your option) any later version.
  11
  12   This library is distributed in the hope that it will be useful,
  13   but WITHOUT ANY WARRANTY; without even the implied warranty of
  14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15   Lesser General Public License for more details.
  16
  17   You should have received a copy of the GNU Lesser General Public
  18   License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19 */
  20
  21#include "qemu/osdep.h"
  22
  23#include "cpu.h"
  24#include "disas/disas.h"
  25#include "exec/helper-proto.h"
  26#include "exec/exec-all.h"
  27#include "tcg-op.h"
  28#include "exec/cpu_ldst.h"
  29
  30#include "exec/helper-gen.h"
  31
  32#include "trace-tcg.h"
  33#include "exec/translator.h"
  34#include "exec/log.h"
  35#include "asi.h"
  36
  37
  38#define DEBUG_DISAS
  39
  40#define DYNAMIC_PC  1 /* dynamic pc value */
  41#define JUMP_PC     2 /* dynamic pc value which takes only two values
  42                         according to jump_pc[T2] */
  43
  44#define DISAS_EXIT  DISAS_TARGET_0
  45
  46/* global register indexes */
  47static TCGv_ptr cpu_regwptr;
  48static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
  49static TCGv_i32 cpu_cc_op;
  50static TCGv_i32 cpu_psr;
  51static TCGv cpu_fsr, cpu_pc, cpu_npc;
  52static TCGv cpu_regs[32];
  53static TCGv cpu_y;
  54#ifndef CONFIG_USER_ONLY
  55static TCGv cpu_tbr;
  56#endif
  57static TCGv cpu_cond;
  58#ifdef TARGET_SPARC64
  59static TCGv_i32 cpu_xcc, cpu_fprs;
  60static TCGv cpu_gsr;
  61static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
  62static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
  63#else
  64static TCGv cpu_wim;
  65#endif
  66/* Floating point registers */
  67static TCGv_i64 cpu_fpr[TARGET_DPREGS];
  68
  69#include "exec/gen-icount.h"
  70
  71typedef struct DisasContext {
  72    DisasContextBase base;
  73    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
  74    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
  75    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
  76    int mem_idx;
  77    bool fpu_enabled;
  78    bool address_mask_32bit;
  79#ifndef CONFIG_USER_ONLY
  80    bool supervisor;
  81#ifdef TARGET_SPARC64
  82    bool hypervisor;
  83#endif
  84#endif
  85
  86    uint32_t cc_op;  /* current CC operation */
  87    sparc_def_t *def;
  88    TCGv_i32 t32[3];
  89    TCGv ttl[5];
  90    int n_t32;
  91    int n_ttl;
  92#ifdef TARGET_SPARC64
  93    int fprs_dirty;
  94    int asi;
  95#endif
  96} DisasContext;
  97
  98typedef struct {
  99    TCGCond cond;
 100    bool is_bool;
 101    bool g1, g2;
 102    TCGv c1, c2;
 103} DisasCompare;
 104
 105// This function uses non-native bit order
 106#define GET_FIELD(X, FROM, TO)                                  \
 107    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
 108
 109// This function uses the order in the manuals, i.e. bit 0 is 2^0
 110#define GET_FIELD_SP(X, FROM, TO)               \
 111    GET_FIELD(X, 31 - (TO), 31 - (FROM))
 112
 113#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
 114#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
 115
 116#ifdef TARGET_SPARC64
 117#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
 118#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
 119#else
 120#define DFPREG(r) (r & 0x1e)
 121#define QFPREG(r) (r & 0x1c)
 122#endif
 123
 124#define UA2005_HTRAP_MASK 0xff
 125#define V8_TRAP_MASK 0x7f
 126
 127static int sign_extend(int x, int len)
 128{
 129    len = 32 - len;
 130    return (x << len) >> len;
 131}
 132
 133#define IS_IMM (insn & (1<<13))
 134
 135static inline TCGv_i32 get_temp_i32(DisasContext *dc)
 136{
 137    TCGv_i32 t;
 138    assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
 139    dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
 140    return t;
 141}
 142
 143static inline TCGv get_temp_tl(DisasContext *dc)
 144{
 145    TCGv t;
 146    assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
 147    dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
 148    return t;
 149}
 150
 151static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
 152{
 153#if defined(TARGET_SPARC64)
 154    int bit = (rd < 32) ? 1 : 2;
 155    /* If we know we've already set this bit within the TB,
 156       we can avoid setting it again.  */
 157    if (!(dc->fprs_dirty & bit)) {
 158        dc->fprs_dirty |= bit;
 159        tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
 160    }
 161#endif
 162}
 163
 164/* floating point registers moves */
 165static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
 166{
 167#if TCG_TARGET_REG_BITS == 32
 168    if (src & 1) {
 169        return TCGV_LOW(cpu_fpr[src / 2]);
 170    } else {
 171        return TCGV_HIGH(cpu_fpr[src / 2]);
 172    }
 173#else
 174    TCGv_i32 ret = get_temp_i32(dc);
 175    if (src & 1) {
 176        tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
 177    } else {
 178        tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
 179    }
 180    return ret;
 181#endif
 182}
 183
 184static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
 185{
 186#if TCG_TARGET_REG_BITS == 32
 187    if (dst & 1) {
 188        tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
 189    } else {
 190        tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
 191    }
 192#else
 193    TCGv_i64 t = (TCGv_i64)v;
 194    tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
 195                        (dst & 1 ? 0 : 32), 32);
 196#endif
 197    gen_update_fprs_dirty(dc, dst);
 198}
 199
 200static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
 201{
 202    return get_temp_i32(dc);
 203}
 204
 205static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
 206{
 207    src = DFPREG(src);
 208    return cpu_fpr[src / 2];
 209}
 210
 211static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
 212{
 213    dst = DFPREG(dst);
 214    tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
 215    gen_update_fprs_dirty(dc, dst);
 216}
 217
 218static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
 219{
 220    return cpu_fpr[DFPREG(dst) / 2];
 221}
 222
 223static void gen_op_load_fpr_QT0(unsigned int src)
 224{
 225    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 226                   offsetof(CPU_QuadU, ll.upper));
 227    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 228                   offsetof(CPU_QuadU, ll.lower));
 229}
 230
 231static void gen_op_load_fpr_QT1(unsigned int src)
 232{
 233    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
 234                   offsetof(CPU_QuadU, ll.upper));
 235    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
 236                   offsetof(CPU_QuadU, ll.lower));
 237}
 238
 239static void gen_op_store_QT0_fpr(unsigned int dst)
 240{
 241    tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 242                   offsetof(CPU_QuadU, ll.upper));
 243    tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 244                   offsetof(CPU_QuadU, ll.lower));
 245}
 246
 247static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
 248                            TCGv_i64 v1, TCGv_i64 v2)
 249{
 250    dst = QFPREG(dst);
 251
 252    tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
 253    tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
 254    gen_update_fprs_dirty(dc, dst);
 255}
 256
 257#ifdef TARGET_SPARC64
 258static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
 259{
 260    src = QFPREG(src);
 261    return cpu_fpr[src / 2];
 262}
 263
 264static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
 265{
 266    src = QFPREG(src);
 267    return cpu_fpr[src / 2 + 1];
 268}
 269
 270static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
 271{
 272    rd = QFPREG(rd);
 273    rs = QFPREG(rs);
 274
 275    tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
 276    tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
 277    gen_update_fprs_dirty(dc, rd);
 278}
 279#endif
 280
 281/* moves */
 282#ifdef CONFIG_USER_ONLY
 283#define supervisor(dc) 0
 284#ifdef TARGET_SPARC64
 285#define hypervisor(dc) 0
 286#endif
 287#else
 288#ifdef TARGET_SPARC64
 289#define hypervisor(dc) (dc->hypervisor)
 290#define supervisor(dc) (dc->supervisor | dc->hypervisor)
 291#else
 292#define supervisor(dc) (dc->supervisor)
 293#endif
 294#endif
 295
 296#ifdef TARGET_SPARC64
 297#ifndef TARGET_ABI32
 298#define AM_CHECK(dc) ((dc)->address_mask_32bit)
 299#else
 300#define AM_CHECK(dc) (1)
 301#endif
 302#endif
 303
 304static inline void gen_address_mask(DisasContext *dc, TCGv addr)
 305{
 306#ifdef TARGET_SPARC64
 307    if (AM_CHECK(dc))
 308        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
 309#endif
 310}
 311
 312static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
 313{
 314    if (reg > 0) {
 315        assert(reg < 32);
 316        return cpu_regs[reg];
 317    } else {
 318        TCGv t = get_temp_tl(dc);
 319        tcg_gen_movi_tl(t, 0);
 320        return t;
 321    }
 322}
 323
 324static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
 325{
 326    if (reg > 0) {
 327        assert(reg < 32);
 328        tcg_gen_mov_tl(cpu_regs[reg], v);
 329    }
 330}
 331
 332static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
 333{
 334    if (reg > 0) {
 335        assert(reg < 32);
 336        return cpu_regs[reg];
 337    } else {
 338        return get_temp_tl(dc);
 339    }
 340}
 341
 342static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
 343                               target_ulong npc)
 344{
 345    if (unlikely(s->base.singlestep_enabled || singlestep)) {
 346        return false;
 347    }
 348
 349#ifndef CONFIG_USER_ONLY
 350    return (pc & TARGET_PAGE_MASK) == (s->base.tb->pc & TARGET_PAGE_MASK) &&
 351           (npc & TARGET_PAGE_MASK) == (s->base.tb->pc & TARGET_PAGE_MASK);
 352#else
 353    return true;
 354#endif
 355}
 356
 357static inline void gen_goto_tb(DisasContext *s, int tb_num,
 358                               target_ulong pc, target_ulong npc)
 359{
 360    if (use_goto_tb(s, pc, npc))  {
 361        /* jump to same page: we can use a direct jump */
 362        tcg_gen_goto_tb(tb_num);
 363        tcg_gen_movi_tl(cpu_pc, pc);
 364        tcg_gen_movi_tl(cpu_npc, npc);
 365        tcg_gen_exit_tb(s->base.tb, tb_num);
 366    } else {
 367        /* jump to another page: currently not optimized */
 368        tcg_gen_movi_tl(cpu_pc, pc);
 369        tcg_gen_movi_tl(cpu_npc, npc);
 370        tcg_gen_exit_tb(NULL, 0);
 371    }
 372}
 373
 374// XXX suboptimal
 375static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
 376{
 377    tcg_gen_extu_i32_tl(reg, src);
 378    tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
 379}
 380
 381static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
 382{
 383    tcg_gen_extu_i32_tl(reg, src);
 384    tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
 385}
 386
 387static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
 388{
 389    tcg_gen_extu_i32_tl(reg, src);
 390    tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
 391}
 392
 393static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
 394{
 395    tcg_gen_extu_i32_tl(reg, src);
 396    tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
 397}
 398
 399static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
 400{
 401    tcg_gen_mov_tl(cpu_cc_src, src1);
 402    tcg_gen_mov_tl(cpu_cc_src2, src2);
 403    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 404    tcg_gen_mov_tl(dst, cpu_cc_dst);
 405}
 406
 407static TCGv_i32 gen_add32_carry32(void)
 408{
 409    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 410
 411    /* Carry is computed from a previous add: (dst < src)  */
 412#if TARGET_LONG_BITS == 64
 413    cc_src1_32 = tcg_temp_new_i32();
 414    cc_src2_32 = tcg_temp_new_i32();
 415    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
 416    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
 417#else
 418    cc_src1_32 = cpu_cc_dst;
 419    cc_src2_32 = cpu_cc_src;
 420#endif
 421
 422    carry_32 = tcg_temp_new_i32();
 423    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 424
 425#if TARGET_LONG_BITS == 64
 426    tcg_temp_free_i32(cc_src1_32);
 427    tcg_temp_free_i32(cc_src2_32);
 428#endif
 429
 430    return carry_32;
 431}
 432
 433static TCGv_i32 gen_sub32_carry32(void)
 434{
 435    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 436
 437    /* Carry is computed from a previous borrow: (src1 < src2)  */
 438#if TARGET_LONG_BITS == 64
 439    cc_src1_32 = tcg_temp_new_i32();
 440    cc_src2_32 = tcg_temp_new_i32();
 441    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
 442    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
 443#else
 444    cc_src1_32 = cpu_cc_src;
 445    cc_src2_32 = cpu_cc_src2;
 446#endif
 447
 448    carry_32 = tcg_temp_new_i32();
 449    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 450
 451#if TARGET_LONG_BITS == 64
 452    tcg_temp_free_i32(cc_src1_32);
 453    tcg_temp_free_i32(cc_src2_32);
 454#endif
 455
 456    return carry_32;
 457}
 458
 459static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
 460                            TCGv src2, int update_cc)
 461{
 462    TCGv_i32 carry_32;
 463    TCGv carry;
 464
 465    switch (dc->cc_op) {
 466    case CC_OP_DIV:
 467    case CC_OP_LOGIC:
 468        /* Carry is known to be zero.  Fall back to plain ADD.  */
 469        if (update_cc) {
 470            gen_op_add_cc(dst, src1, src2);
 471        } else {
 472            tcg_gen_add_tl(dst, src1, src2);
 473        }
 474        return;
 475
 476    case CC_OP_ADD:
 477    case CC_OP_TADD:
 478    case CC_OP_TADDTV:
 479        if (TARGET_LONG_BITS == 32) {
 480            /* We can re-use the host's hardware carry generation by using
 481               an ADD2 opcode.  We discard the low part of the output.
 482               Ideally we'd combine this operation with the add that
 483               generated the carry in the first place.  */
 484            carry = tcg_temp_new();
 485            tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 486            tcg_temp_free(carry);
 487            goto add_done;
 488        }
 489        carry_32 = gen_add32_carry32();
 490        break;
 491
 492    case CC_OP_SUB:
 493    case CC_OP_TSUB:
 494    case CC_OP_TSUBTV:
 495        carry_32 = gen_sub32_carry32();
 496        break;
 497
 498    default:
 499        /* We need external help to produce the carry.  */
 500        carry_32 = tcg_temp_new_i32();
 501        gen_helper_compute_C_icc(carry_32, cpu_env);
 502        break;
 503    }
 504
 505#if TARGET_LONG_BITS == 64
 506    carry = tcg_temp_new();
 507    tcg_gen_extu_i32_i64(carry, carry_32);
 508#else
 509    carry = carry_32;
 510#endif
 511
 512    tcg_gen_add_tl(dst, src1, src2);
 513    tcg_gen_add_tl(dst, dst, carry);
 514
 515    tcg_temp_free_i32(carry_32);
 516#if TARGET_LONG_BITS == 64
 517    tcg_temp_free(carry);
 518#endif
 519
 520 add_done:
 521    if (update_cc) {
 522        tcg_gen_mov_tl(cpu_cc_src, src1);
 523        tcg_gen_mov_tl(cpu_cc_src2, src2);
 524        tcg_gen_mov_tl(cpu_cc_dst, dst);
 525        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
 526        dc->cc_op = CC_OP_ADDX;
 527    }
 528}
 529
 530static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
 531{
 532    tcg_gen_mov_tl(cpu_cc_src, src1);
 533    tcg_gen_mov_tl(cpu_cc_src2, src2);
 534    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 535    tcg_gen_mov_tl(dst, cpu_cc_dst);
 536}
 537
 538static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
 539                            TCGv src2, int update_cc)
 540{
 541    TCGv_i32 carry_32;
 542    TCGv carry;
 543
 544    switch (dc->cc_op) {
 545    case CC_OP_DIV:
 546    case CC_OP_LOGIC:
 547        /* Carry is known to be zero.  Fall back to plain SUB.  */
 548        if (update_cc) {
 549            gen_op_sub_cc(dst, src1, src2);
 550        } else {
 551            tcg_gen_sub_tl(dst, src1, src2);
 552        }
 553        return;
 554
 555    case CC_OP_ADD:
 556    case CC_OP_TADD:
 557    case CC_OP_TADDTV:
 558        carry_32 = gen_add32_carry32();
 559        break;
 560
 561    case CC_OP_SUB:
 562    case CC_OP_TSUB:
 563    case CC_OP_TSUBTV:
 564        if (TARGET_LONG_BITS == 32) {
 565            /* We can re-use the host's hardware carry generation by using
 566               a SUB2 opcode.  We discard the low part of the output.
 567               Ideally we'd combine this operation with the add that
 568               generated the carry in the first place.  */
 569            carry = tcg_temp_new();
 570            tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 571            tcg_temp_free(carry);
 572            goto sub_done;
 573        }
 574        carry_32 = gen_sub32_carry32();
 575        break;
 576
 577    default:
 578        /* We need external help to produce the carry.  */
 579        carry_32 = tcg_temp_new_i32();
 580        gen_helper_compute_C_icc(carry_32, cpu_env);
 581        break;
 582    }
 583
 584#if TARGET_LONG_BITS == 64
 585    carry = tcg_temp_new();
 586    tcg_gen_extu_i32_i64(carry, carry_32);
 587#else
 588    carry = carry_32;
 589#endif
 590
 591    tcg_gen_sub_tl(dst, src1, src2);
 592    tcg_gen_sub_tl(dst, dst, carry);
 593
 594    tcg_temp_free_i32(carry_32);
 595#if TARGET_LONG_BITS == 64
 596    tcg_temp_free(carry);
 597#endif
 598
 599 sub_done:
 600    if (update_cc) {
 601        tcg_gen_mov_tl(cpu_cc_src, src1);
 602        tcg_gen_mov_tl(cpu_cc_src2, src2);
 603        tcg_gen_mov_tl(cpu_cc_dst, dst);
 604        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
 605        dc->cc_op = CC_OP_SUBX;
 606    }
 607}
 608
 609static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
 610{
 611    TCGv r_temp, zero, t0;
 612
 613    r_temp = tcg_temp_new();
 614    t0 = tcg_temp_new();
 615
 616    /* old op:
 617    if (!(env->y & 1))
 618        T1 = 0;
 619    */
 620    zero = tcg_const_tl(0);
 621    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
 622    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
 623    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
 624    tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
 625                       zero, cpu_cc_src2);
 626    tcg_temp_free(zero);
 627
 628    // b2 = T0 & 1;
 629    // env->y = (b2 << 31) | (env->y >> 1);
 630    tcg_gen_extract_tl(t0, cpu_y, 1, 31);
 631    tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
 632
 633    // b1 = N ^ V;
 634    gen_mov_reg_N(t0, cpu_psr);
 635    gen_mov_reg_V(r_temp, cpu_psr);
 636    tcg_gen_xor_tl(t0, t0, r_temp);
 637    tcg_temp_free(r_temp);
 638
 639    // T0 = (b1 << 31) | (T0 >> 1);
 640    // src1 = T0;
 641    tcg_gen_shli_tl(t0, t0, 31);
 642    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
 643    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
 644    tcg_temp_free(t0);
 645
 646    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 647
 648    tcg_gen_mov_tl(dst, cpu_cc_dst);
 649}
 650
 651static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
 652{
 653#if TARGET_LONG_BITS == 32
 654    if (sign_ext) {
 655        tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
 656    } else {
 657        tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
 658    }
 659#else
 660    TCGv t0 = tcg_temp_new_i64();
 661    TCGv t1 = tcg_temp_new_i64();
 662
 663    if (sign_ext) {
 664        tcg_gen_ext32s_i64(t0, src1);
 665        tcg_gen_ext32s_i64(t1, src2);
 666    } else {
 667        tcg_gen_ext32u_i64(t0, src1);
 668        tcg_gen_ext32u_i64(t1, src2);
 669    }
 670
 671    tcg_gen_mul_i64(dst, t0, t1);
 672    tcg_temp_free(t0);
 673    tcg_temp_free(t1);
 674
 675    tcg_gen_shri_i64(cpu_y, dst, 32);
 676#endif
 677}
 678
 679static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
 680{
 681    /* zero-extend truncated operands before multiplication */
 682    gen_op_multiply(dst, src1, src2, 0);
 683}
 684
 685static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
 686{
 687    /* sign-extend truncated operands before multiplication */
 688    gen_op_multiply(dst, src1, src2, 1);
 689}
 690
 691// 1
 692static inline void gen_op_eval_ba(TCGv dst)
 693{
 694    tcg_gen_movi_tl(dst, 1);
 695}
 696
 697// Z
 698static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
 699{
 700    gen_mov_reg_Z(dst, src);
 701}
 702
 703// Z | (N ^ V)
 704static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
 705{
 706    TCGv t0 = tcg_temp_new();
 707    gen_mov_reg_N(t0, src);
 708    gen_mov_reg_V(dst, src);
 709    tcg_gen_xor_tl(dst, dst, t0);
 710    gen_mov_reg_Z(t0, src);
 711    tcg_gen_or_tl(dst, dst, t0);
 712    tcg_temp_free(t0);
 713}
 714
 715// N ^ V
 716static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
 717{
 718    TCGv t0 = tcg_temp_new();
 719    gen_mov_reg_V(t0, src);
 720    gen_mov_reg_N(dst, src);
 721    tcg_gen_xor_tl(dst, dst, t0);
 722    tcg_temp_free(t0);
 723}
 724
 725// C | Z
 726static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
 727{
 728    TCGv t0 = tcg_temp_new();
 729    gen_mov_reg_Z(t0, src);
 730    gen_mov_reg_C(dst, src);
 731    tcg_gen_or_tl(dst, dst, t0);
 732    tcg_temp_free(t0);
 733}
 734
 735// C
 736static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
 737{
 738    gen_mov_reg_C(dst, src);
 739}
 740
 741// V
 742static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
 743{
 744    gen_mov_reg_V(dst, src);
 745}
 746
 747// 0
 748static inline void gen_op_eval_bn(TCGv dst)
 749{
 750    tcg_gen_movi_tl(dst, 0);
 751}
 752
 753// N
 754static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
 755{
 756    gen_mov_reg_N(dst, src);
 757}
 758
 759// !Z
 760static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
 761{
 762    gen_mov_reg_Z(dst, src);
 763    tcg_gen_xori_tl(dst, dst, 0x1);
 764}
 765
 766// !(Z | (N ^ V))
 767static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
 768{
 769    gen_op_eval_ble(dst, src);
 770    tcg_gen_xori_tl(dst, dst, 0x1);
 771}
 772
 773// !(N ^ V)
 774static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
 775{
 776    gen_op_eval_bl(dst, src);
 777    tcg_gen_xori_tl(dst, dst, 0x1);
 778}
 779
 780// !(C | Z)
 781static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
 782{
 783    gen_op_eval_bleu(dst, src);
 784    tcg_gen_xori_tl(dst, dst, 0x1);
 785}
 786
 787// !C
 788static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
 789{
 790    gen_mov_reg_C(dst, src);
 791    tcg_gen_xori_tl(dst, dst, 0x1);
 792}
 793
 794// !N
 795static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
 796{
 797    gen_mov_reg_N(dst, src);
 798    tcg_gen_xori_tl(dst, dst, 0x1);
 799}
 800
 801// !V
 802static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
 803{
 804    gen_mov_reg_V(dst, src);
 805    tcg_gen_xori_tl(dst, dst, 0x1);
 806}
 807
 808/*
 809  FPSR bit field FCC1 | FCC0:
 810   0 =
 811   1 <
 812   2 >
 813   3 unordered
 814*/
 815static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
 816                                    unsigned int fcc_offset)
 817{
 818    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
 819    tcg_gen_andi_tl(reg, reg, 0x1);
 820}
 821
 822static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
 823                                    unsigned int fcc_offset)
 824{
 825    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
 826    tcg_gen_andi_tl(reg, reg, 0x1);
 827}
 828
 829// !0: FCC0 | FCC1
 830static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
 831                                    unsigned int fcc_offset)
 832{
 833    TCGv t0 = tcg_temp_new();
 834    gen_mov_reg_FCC0(dst, src, fcc_offset);
 835    gen_mov_reg_FCC1(t0, src, fcc_offset);
 836    tcg_gen_or_tl(dst, dst, t0);
 837    tcg_temp_free(t0);
 838}
 839
 840// 1 or 2: FCC0 ^ FCC1
 841static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
 842                                    unsigned int fcc_offset)
 843{
 844    TCGv t0 = tcg_temp_new();
 845    gen_mov_reg_FCC0(dst, src, fcc_offset);
 846    gen_mov_reg_FCC1(t0, src, fcc_offset);
 847    tcg_gen_xor_tl(dst, dst, t0);
 848    tcg_temp_free(t0);
 849}
 850
 851// 1 or 3: FCC0
 852static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
 853                                    unsigned int fcc_offset)
 854{
 855    gen_mov_reg_FCC0(dst, src, fcc_offset);
 856}
 857
 858// 1: FCC0 & !FCC1
 859static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
 860                                    unsigned int fcc_offset)
 861{
 862    TCGv t0 = tcg_temp_new();
 863    gen_mov_reg_FCC0(dst, src, fcc_offset);
 864    gen_mov_reg_FCC1(t0, src, fcc_offset);
 865    tcg_gen_andc_tl(dst, dst, t0);
 866    tcg_temp_free(t0);
 867}
 868
 869// 2 or 3: FCC1
 870static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
 871                                    unsigned int fcc_offset)
 872{
 873    gen_mov_reg_FCC1(dst, src, fcc_offset);
 874}
 875
 876// 2: !FCC0 & FCC1
 877static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
 878                                    unsigned int fcc_offset)
 879{
 880    TCGv t0 = tcg_temp_new();
 881    gen_mov_reg_FCC0(dst, src, fcc_offset);
 882    gen_mov_reg_FCC1(t0, src, fcc_offset);
 883    tcg_gen_andc_tl(dst, t0, dst);
 884    tcg_temp_free(t0);
 885}
 886
 887// 3: FCC0 & FCC1
 888static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
 889                                    unsigned int fcc_offset)
 890{
 891    TCGv t0 = tcg_temp_new();
 892    gen_mov_reg_FCC0(dst, src, fcc_offset);
 893    gen_mov_reg_FCC1(t0, src, fcc_offset);
 894    tcg_gen_and_tl(dst, dst, t0);
 895    tcg_temp_free(t0);
 896}
 897
 898// 0: !(FCC0 | FCC1)
 899static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
 900                                    unsigned int fcc_offset)
 901{
 902    TCGv t0 = tcg_temp_new();
 903    gen_mov_reg_FCC0(dst, src, fcc_offset);
 904    gen_mov_reg_FCC1(t0, src, fcc_offset);
 905    tcg_gen_or_tl(dst, dst, t0);
 906    tcg_gen_xori_tl(dst, dst, 0x1);
 907    tcg_temp_free(t0);
 908}
 909
 910// 0 or 3: !(FCC0 ^ FCC1)
 911static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
 912                                    unsigned int fcc_offset)
 913{
 914    TCGv t0 = tcg_temp_new();
 915    gen_mov_reg_FCC0(dst, src, fcc_offset);
 916    gen_mov_reg_FCC1(t0, src, fcc_offset);
 917    tcg_gen_xor_tl(dst, dst, t0);
 918    tcg_gen_xori_tl(dst, dst, 0x1);
 919    tcg_temp_free(t0);
 920}
 921
 922// 0 or 2: !FCC0
 923static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
 924                                    unsigned int fcc_offset)
 925{
 926    gen_mov_reg_FCC0(dst, src, fcc_offset);
 927    tcg_gen_xori_tl(dst, dst, 0x1);
 928}
 929
 930// !1: !(FCC0 & !FCC1)
 931static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
 932                                    unsigned int fcc_offset)
 933{
 934    TCGv t0 = tcg_temp_new();
 935    gen_mov_reg_FCC0(dst, src, fcc_offset);
 936    gen_mov_reg_FCC1(t0, src, fcc_offset);
 937    tcg_gen_andc_tl(dst, dst, t0);
 938    tcg_gen_xori_tl(dst, dst, 0x1);
 939    tcg_temp_free(t0);
 940}
 941
 942// 0 or 1: !FCC1
 943static inline void gen_op_eval_fble(TCGv dst, TCGv src,
 944                                    unsigned int fcc_offset)
 945{
 946    gen_mov_reg_FCC1(dst, src, fcc_offset);
 947    tcg_gen_xori_tl(dst, dst, 0x1);
 948}
 949
 950// !2: !(!FCC0 & FCC1)
 951static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
 952                                    unsigned int fcc_offset)
 953{
 954    TCGv t0 = tcg_temp_new();
 955    gen_mov_reg_FCC0(dst, src, fcc_offset);
 956    gen_mov_reg_FCC1(t0, src, fcc_offset);
 957    tcg_gen_andc_tl(dst, t0, dst);
 958    tcg_gen_xori_tl(dst, dst, 0x1);
 959    tcg_temp_free(t0);
 960}
 961
 962// !3: !(FCC0 & FCC1)
 963static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
 964                                    unsigned int fcc_offset)
 965{
 966    TCGv t0 = tcg_temp_new();
 967    gen_mov_reg_FCC0(dst, src, fcc_offset);
 968    gen_mov_reg_FCC1(t0, src, fcc_offset);
 969    tcg_gen_and_tl(dst, dst, t0);
 970    tcg_gen_xori_tl(dst, dst, 0x1);
 971    tcg_temp_free(t0);
 972}
 973
 974static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
 975                               target_ulong pc2, TCGv r_cond)
 976{
 977    TCGLabel *l1 = gen_new_label();
 978
 979    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
 980
 981    gen_goto_tb(dc, 0, pc1, pc1 + 4);
 982
 983    gen_set_label(l1);
 984    gen_goto_tb(dc, 1, pc2, pc2 + 4);
 985}
 986
 987static void gen_branch_a(DisasContext *dc, target_ulong pc1)
 988{
 989    TCGLabel *l1 = gen_new_label();
 990    target_ulong npc = dc->npc;
 991
 992    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
 993
 994    gen_goto_tb(dc, 0, npc, pc1);
 995
 996    gen_set_label(l1);
 997    gen_goto_tb(dc, 1, npc + 4, npc + 8);
 998
 999    dc->base.is_jmp = DISAS_NORETURN;
1000}
1001
1002static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1003{
1004    target_ulong npc = dc->npc;
1005
1006    if (likely(npc != DYNAMIC_PC)) {
1007        dc->pc = npc;
1008        dc->jump_pc[0] = pc1;
1009        dc->jump_pc[1] = npc + 4;
1010        dc->npc = JUMP_PC;
1011    } else {
1012        TCGv t, z;
1013
1014        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1015
1016        tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1017        t = tcg_const_tl(pc1);
1018        z = tcg_const_tl(0);
1019        tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1020        tcg_temp_free(t);
1021        tcg_temp_free(z);
1022
1023        dc->pc = DYNAMIC_PC;
1024    }
1025}
1026
1027static inline void gen_generic_branch(DisasContext *dc)
1028{
1029    TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1030    TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1031    TCGv zero = tcg_const_tl(0);
1032
1033    tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1034
1035    tcg_temp_free(npc0);
1036    tcg_temp_free(npc1);
1037    tcg_temp_free(zero);
1038}
1039
1040/* call this function before using the condition register as it may
1041   have been set for a jump */
1042static inline void flush_cond(DisasContext *dc)
1043{
1044    if (dc->npc == JUMP_PC) {
1045        gen_generic_branch(dc);
1046        dc->npc = DYNAMIC_PC;
1047    }
1048}
1049
1050static inline void save_npc(DisasContext *dc)
1051{
1052    if (dc->npc == JUMP_PC) {
1053        gen_generic_branch(dc);
1054        dc->npc = DYNAMIC_PC;
1055    } else if (dc->npc != DYNAMIC_PC) {
1056        tcg_gen_movi_tl(cpu_npc, dc->npc);
1057    }
1058}
1059
1060static inline void update_psr(DisasContext *dc)
1061{
1062    if (dc->cc_op != CC_OP_FLAGS) {
1063        dc->cc_op = CC_OP_FLAGS;
1064        gen_helper_compute_psr(cpu_env);
1065    }
1066}
1067
1068static inline void save_state(DisasContext *dc)
1069{
1070    tcg_gen_movi_tl(cpu_pc, dc->pc);
1071    save_npc(dc);
1072}
1073
1074static void gen_exception(DisasContext *dc, int which)
1075{
1076    TCGv_i32 t;
1077
1078    save_state(dc);
1079    t = tcg_const_i32(which);
1080    gen_helper_raise_exception(cpu_env, t);
1081    tcg_temp_free_i32(t);
1082    dc->base.is_jmp = DISAS_NORETURN;
1083}
1084
1085static void gen_check_align(TCGv addr, int mask)
1086{
1087    TCGv_i32 r_mask = tcg_const_i32(mask);
1088    gen_helper_check_align(cpu_env, addr, r_mask);
1089    tcg_temp_free_i32(r_mask);
1090}
1091
1092static inline void gen_mov_pc_npc(DisasContext *dc)
1093{
1094    if (dc->npc == JUMP_PC) {
1095        gen_generic_branch(dc);
1096        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1097        dc->pc = DYNAMIC_PC;
1098    } else if (dc->npc == DYNAMIC_PC) {
1099        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1100        dc->pc = DYNAMIC_PC;
1101    } else {
1102        dc->pc = dc->npc;
1103    }
1104}
1105
1106static inline void gen_op_next_insn(void)
1107{
1108    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1109    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1110}
1111
1112static void free_compare(DisasCompare *cmp)
1113{
1114    if (!cmp->g1) {
1115        tcg_temp_free(cmp->c1);
1116    }
1117    if (!cmp->g2) {
1118        tcg_temp_free(cmp->c2);
1119    }
1120}
1121
1122static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1123                        DisasContext *dc)
1124{
1125    static int subcc_cond[16] = {
1126        TCG_COND_NEVER,
1127        TCG_COND_EQ,
1128        TCG_COND_LE,
1129        TCG_COND_LT,
1130        TCG_COND_LEU,
1131        TCG_COND_LTU,
1132        -1, /* neg */
1133        -1, /* overflow */
1134        TCG_COND_ALWAYS,
1135        TCG_COND_NE,
1136        TCG_COND_GT,
1137        TCG_COND_GE,
1138        TCG_COND_GTU,
1139        TCG_COND_GEU,
1140        -1, /* pos */
1141        -1, /* no overflow */
1142    };
1143
1144    static int logic_cond[16] = {
1145        TCG_COND_NEVER,
1146        TCG_COND_EQ,     /* eq:  Z */
1147        TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1148        TCG_COND_LT,     /* lt:  N ^ V -> N */
1149        TCG_COND_EQ,     /* leu: C | Z -> Z */
1150        TCG_COND_NEVER,  /* ltu: C -> 0 */
1151        TCG_COND_LT,     /* neg: N */
1152        TCG_COND_NEVER,  /* vs:  V -> 0 */
1153        TCG_COND_ALWAYS,
1154        TCG_COND_NE,     /* ne:  !Z */
1155        TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1156        TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1157        TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1158        TCG_COND_ALWAYS, /* geu: !C -> 1 */
1159        TCG_COND_GE,     /* pos: !N */
1160        TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1161    };
1162
1163    TCGv_i32 r_src;
1164    TCGv r_dst;
1165
1166#ifdef TARGET_SPARC64
1167    if (xcc) {
1168        r_src = cpu_xcc;
1169    } else {
1170        r_src = cpu_psr;
1171    }
1172#else
1173    r_src = cpu_psr;
1174#endif
1175
1176    switch (dc->cc_op) {
1177    case CC_OP_LOGIC:
1178        cmp->cond = logic_cond[cond];
1179    do_compare_dst_0:
1180        cmp->is_bool = false;
1181        cmp->g2 = false;
1182        cmp->c2 = tcg_const_tl(0);
1183#ifdef TARGET_SPARC64
1184        if (!xcc) {
1185            cmp->g1 = false;
1186            cmp->c1 = tcg_temp_new();
1187            tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1188            break;
1189        }
1190#endif
1191        cmp->g1 = true;
1192        cmp->c1 = cpu_cc_dst;
1193        break;
1194
1195    case CC_OP_SUB:
1196        switch (cond) {
1197        case 6:  /* neg */
1198        case 14: /* pos */
1199            cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1200            goto do_compare_dst_0;
1201
1202        case 7: /* overflow */
1203        case 15: /* !overflow */
1204            goto do_dynamic;
1205
1206        default:
1207            cmp->cond = subcc_cond[cond];
1208            cmp->is_bool = false;
1209#ifdef TARGET_SPARC64
1210            if (!xcc) {
1211                /* Note that sign-extension works for unsigned compares as
1212                   long as both operands are sign-extended.  */
1213                cmp->g1 = cmp->g2 = false;
1214                cmp->c1 = tcg_temp_new();
1215                cmp->c2 = tcg_temp_new();
1216                tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1217                tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1218                break;
1219            }
1220#endif
1221            cmp->g1 = cmp->g2 = true;
1222            cmp->c1 = cpu_cc_src;
1223            cmp->c2 = cpu_cc_src2;
1224            break;
1225        }
1226        break;
1227
1228    default:
1229    do_dynamic:
1230        gen_helper_compute_psr(cpu_env);
1231        dc->cc_op = CC_OP_FLAGS;
1232        /* FALLTHRU */
1233
1234    case CC_OP_FLAGS:
1235        /* We're going to generate a boolean result.  */
1236        cmp->cond = TCG_COND_NE;
1237        cmp->is_bool = true;
1238        cmp->g1 = cmp->g2 = false;
1239        cmp->c1 = r_dst = tcg_temp_new();
1240        cmp->c2 = tcg_const_tl(0);
1241
1242        switch (cond) {
1243        case 0x0:
1244            gen_op_eval_bn(r_dst);
1245            break;
1246        case 0x1:
1247            gen_op_eval_be(r_dst, r_src);
1248            break;
1249        case 0x2:
1250            gen_op_eval_ble(r_dst, r_src);
1251            break;
1252        case 0x3:
1253            gen_op_eval_bl(r_dst, r_src);
1254            break;
1255        case 0x4:
1256            gen_op_eval_bleu(r_dst, r_src);
1257            break;
1258        case 0x5:
1259            gen_op_eval_bcs(r_dst, r_src);
1260            break;
1261        case 0x6:
1262            gen_op_eval_bneg(r_dst, r_src);
1263            break;
1264        case 0x7:
1265            gen_op_eval_bvs(r_dst, r_src);
1266            break;
1267        case 0x8:
1268            gen_op_eval_ba(r_dst);
1269            break;
1270        case 0x9:
1271            gen_op_eval_bne(r_dst, r_src);
1272            break;
1273        case 0xa:
1274            gen_op_eval_bg(r_dst, r_src);
1275            break;
1276        case 0xb:
1277            gen_op_eval_bge(r_dst, r_src);
1278            break;
1279        case 0xc:
1280            gen_op_eval_bgu(r_dst, r_src);
1281            break;
1282        case 0xd:
1283            gen_op_eval_bcc(r_dst, r_src);
1284            break;
1285        case 0xe:
1286            gen_op_eval_bpos(r_dst, r_src);
1287            break;
1288        case 0xf:
1289            gen_op_eval_bvc(r_dst, r_src);
1290            break;
1291        }
1292        break;
1293    }
1294}
1295
1296static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1297{
1298    unsigned int offset;
1299    TCGv r_dst;
1300
1301    /* For now we still generate a straight boolean result.  */
1302    cmp->cond = TCG_COND_NE;
1303    cmp->is_bool = true;
1304    cmp->g1 = cmp->g2 = false;
1305    cmp->c1 = r_dst = tcg_temp_new();
1306    cmp->c2 = tcg_const_tl(0);
1307
1308    switch (cc) {
1309    default:
1310    case 0x0:
1311        offset = 0;
1312        break;
1313    case 0x1:
1314        offset = 32 - 10;
1315        break;
1316    case 0x2:
1317        offset = 34 - 10;
1318        break;
1319    case 0x3:
1320        offset = 36 - 10;
1321        break;
1322    }
1323
1324    switch (cond) {
1325    case 0x0:
1326        gen_op_eval_bn(r_dst);
1327        break;
1328    case 0x1:
1329        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1330        break;
1331    case 0x2:
1332        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1333        break;
1334    case 0x3:
1335        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1336        break;
1337    case 0x4:
1338        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1339        break;
1340    case 0x5:
1341        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1342        break;
1343    case 0x6:
1344        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1345        break;
1346    case 0x7:
1347        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1348        break;
1349    case 0x8:
1350        gen_op_eval_ba(r_dst);
1351        break;
1352    case 0x9:
1353        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1354        break;
1355    case 0xa:
1356        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1357        break;
1358    case 0xb:
1359        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1360        break;
1361    case 0xc:
1362        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1363        break;
1364    case 0xd:
1365        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1366        break;
1367    case 0xe:
1368        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1369        break;
1370    case 0xf:
1371        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1372        break;
1373    }
1374}
1375
1376static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1377                     DisasContext *dc)
1378{
1379    DisasCompare cmp;
1380    gen_compare(&cmp, cc, cond, dc);
1381
1382    /* The interface is to return a boolean in r_dst.  */
1383    if (cmp.is_bool) {
1384        tcg_gen_mov_tl(r_dst, cmp.c1);
1385    } else {
1386        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1387    }
1388
1389    free_compare(&cmp);
1390}
1391
1392static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1393{
1394    DisasCompare cmp;
1395    gen_fcompare(&cmp, cc, cond);
1396
1397    /* The interface is to return a boolean in r_dst.  */
1398    if (cmp.is_bool) {
1399        tcg_gen_mov_tl(r_dst, cmp.c1);
1400    } else {
1401        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1402    }
1403
1404    free_compare(&cmp);
1405}
1406
1407#ifdef TARGET_SPARC64
1408// Inverted logic
1409static const int gen_tcg_cond_reg[8] = {
1410    -1,
1411    TCG_COND_NE,
1412    TCG_COND_GT,
1413    TCG_COND_GE,
1414    -1,
1415    TCG_COND_EQ,
1416    TCG_COND_LE,
1417    TCG_COND_LT,
1418};
1419
1420static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1421{
1422    cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1423    cmp->is_bool = false;
1424    cmp->g1 = true;
1425    cmp->g2 = false;
1426    cmp->c1 = r_src;
1427    cmp->c2 = tcg_const_tl(0);
1428}
1429
1430static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1431{
1432    DisasCompare cmp;
1433    gen_compare_reg(&cmp, cond, r_src);
1434
1435    /* The interface is to return a boolean in r_dst.  */
1436    tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1437
1438    free_compare(&cmp);
1439}
1440#endif
1441
1442static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1443{
1444    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1445    target_ulong target = dc->pc + offset;
1446
1447#ifdef TARGET_SPARC64
1448    if (unlikely(AM_CHECK(dc))) {
1449        target &= 0xffffffffULL;
1450    }
1451#endif
1452    if (cond == 0x0) {
1453        /* unconditional not taken */
1454        if (a) {
1455            dc->pc = dc->npc + 4;
1456            dc->npc = dc->pc + 4;
1457        } else {
1458            dc->pc = dc->npc;
1459            dc->npc = dc->pc + 4;
1460        }
1461    } else if (cond == 0x8) {
1462        /* unconditional taken */
1463        if (a) {
1464            dc->pc = target;
1465            dc->npc = dc->pc + 4;
1466        } else {
1467            dc->pc = dc->npc;
1468            dc->npc = target;
1469            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1470        }
1471    } else {
1472        flush_cond(dc);
1473        gen_cond(cpu_cond, cc, cond, dc);
1474        if (a) {
1475            gen_branch_a(dc, target);
1476        } else {
1477            gen_branch_n(dc, target);
1478        }
1479    }
1480}
1481
1482static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1483{
1484    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1485    target_ulong target = dc->pc + offset;
1486
1487#ifdef TARGET_SPARC64
1488    if (unlikely(AM_CHECK(dc))) {
1489        target &= 0xffffffffULL;
1490    }
1491#endif
1492    if (cond == 0x0) {
1493        /* unconditional not taken */
1494        if (a) {
1495            dc->pc = dc->npc + 4;
1496            dc->npc = dc->pc + 4;
1497        } else {
1498            dc->pc = dc->npc;
1499            dc->npc = dc->pc + 4;
1500        }
1501    } else if (cond == 0x8) {
1502        /* unconditional taken */
1503        if (a) {
1504            dc->pc = target;
1505            dc->npc = dc->pc + 4;
1506        } else {
1507            dc->pc = dc->npc;
1508            dc->npc = target;
1509            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1510        }
1511    } else {
1512        flush_cond(dc);
1513        gen_fcond(cpu_cond, cc, cond);
1514        if (a) {
1515            gen_branch_a(dc, target);
1516        } else {
1517            gen_branch_n(dc, target);
1518        }
1519    }
1520}
1521
1522#ifdef TARGET_SPARC64
1523static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1524                          TCGv r_reg)
1525{
1526    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1527    target_ulong target = dc->pc + offset;
1528
1529    if (unlikely(AM_CHECK(dc))) {
1530        target &= 0xffffffffULL;
1531    }
1532    flush_cond(dc);
1533    gen_cond_reg(cpu_cond, cond, r_reg);
1534    if (a) {
1535        gen_branch_a(dc, target);
1536    } else {
1537        gen_branch_n(dc, target);
1538    }
1539}
1540
1541static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1542{
1543    switch (fccno) {
1544    case 0:
1545        gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1546        break;
1547    case 1:
1548        gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1549        break;
1550    case 2:
1551        gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1552        break;
1553    case 3:
1554        gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1555        break;
1556    }
1557}
1558
1559static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1560{
1561    switch (fccno) {
1562    case 0:
1563        gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1564        break;
1565    case 1:
1566        gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1567        break;
1568    case 2:
1569        gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1570        break;
1571    case 3:
1572        gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1573        break;
1574    }
1575}
1576
1577static inline void gen_op_fcmpq(int fccno)
1578{
1579    switch (fccno) {
1580    case 0:
1581        gen_helper_fcmpq(cpu_fsr, cpu_env);
1582        break;
1583    case 1:
1584        gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1585        break;
1586    case 2:
1587        gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1588        break;
1589    case 3:
1590        gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1591        break;
1592    }
1593}
1594
1595static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1596{
1597    switch (fccno) {
1598    case 0:
1599        gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1600        break;
1601    case 1:
1602        gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1603        break;
1604    case 2:
1605        gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1606        break;
1607    case 3:
1608        gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1609        break;
1610    }
1611}
1612
1613static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1614{
1615    switch (fccno) {
1616    case 0:
1617        gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1618        break;
1619    case 1:
1620        gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1621        break;
1622    case 2:
1623        gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1624        break;
1625    case 3:
1626        gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1627        break;
1628    }
1629}
1630
1631static inline void gen_op_fcmpeq(int fccno)
1632{
1633    switch (fccno) {
1634    case 0:
1635        gen_helper_fcmpeq(cpu_fsr, cpu_env);
1636        break;
1637    case 1:
1638        gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1639        break;
1640    case 2:
1641        gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1642        break;
1643    case 3:
1644        gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1645        break;
1646    }
1647}
1648
1649#else
1650
1651static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1652{
1653    gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1654}
1655
1656static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1657{
1658    gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1659}
1660
1661static inline void gen_op_fcmpq(int fccno)
1662{
1663    gen_helper_fcmpq(cpu_fsr, cpu_env);
1664}
1665
1666static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1667{
1668    gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1669}
1670
1671static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1672{
1673    gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1674}
1675
1676static inline void gen_op_fcmpeq(int fccno)
1677{
1678    gen_helper_fcmpeq(cpu_fsr, cpu_env);
1679}
1680#endif
1681
1682static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1683{
1684    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1685    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1686    gen_exception(dc, TT_FP_EXCP);
1687}
1688
1689static int gen_trap_ifnofpu(DisasContext *dc)
1690{
1691#if !defined(CONFIG_USER_ONLY)
1692    if (!dc->fpu_enabled) {
1693        gen_exception(dc, TT_NFPU_INSN);
1694        return 1;
1695    }
1696#endif
1697    return 0;
1698}
1699
1700static inline void gen_op_clear_ieee_excp_and_FTT(void)
1701{
1702    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1703}
1704
1705static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1706                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1707{
1708    TCGv_i32 dst, src;
1709
1710    src = gen_load_fpr_F(dc, rs);
1711    dst = gen_dest_fpr_F(dc);
1712
1713    gen(dst, cpu_env, src);
1714    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1715
1716    gen_store_fpr_F(dc, rd, dst);
1717}
1718
1719static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1720                                 void (*gen)(TCGv_i32, TCGv_i32))
1721{
1722    TCGv_i32 dst, src;
1723
1724    src = gen_load_fpr_F(dc, rs);
1725    dst = gen_dest_fpr_F(dc);
1726
1727    gen(dst, src);
1728
1729    gen_store_fpr_F(dc, rd, dst);
1730}
1731
1732static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1733                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1734{
1735    TCGv_i32 dst, src1, src2;
1736
1737    src1 = gen_load_fpr_F(dc, rs1);
1738    src2 = gen_load_fpr_F(dc, rs2);
1739    dst = gen_dest_fpr_F(dc);
1740
1741    gen(dst, cpu_env, src1, src2);
1742    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1743
1744    gen_store_fpr_F(dc, rd, dst);
1745}
1746
1747#ifdef TARGET_SPARC64
1748static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1749                                  void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1750{
1751    TCGv_i32 dst, src1, src2;
1752
1753    src1 = gen_load_fpr_F(dc, rs1);
1754    src2 = gen_load_fpr_F(dc, rs2);
1755    dst = gen_dest_fpr_F(dc);
1756
1757    gen(dst, src1, src2);
1758
1759    gen_store_fpr_F(dc, rd, dst);
1760}
1761#endif
1762
1763static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1764                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1765{
1766    TCGv_i64 dst, src;
1767
1768    src = gen_load_fpr_D(dc, rs);
1769    dst = gen_dest_fpr_D(dc, rd);
1770
1771    gen(dst, cpu_env, src);
1772    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1773
1774    gen_store_fpr_D(dc, rd, dst);
1775}
1776
1777#ifdef TARGET_SPARC64
1778static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1779                                 void (*gen)(TCGv_i64, TCGv_i64))
1780{
1781    TCGv_i64 dst, src;
1782
1783    src = gen_load_fpr_D(dc, rs);
1784    dst = gen_dest_fpr_D(dc, rd);
1785
1786    gen(dst, src);
1787
1788    gen_store_fpr_D(dc, rd, dst);
1789}
1790#endif
1791
1792static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1793                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1794{
1795    TCGv_i64 dst, src1, src2;
1796
1797    src1 = gen_load_fpr_D(dc, rs1);
1798    src2 = gen_load_fpr_D(dc, rs2);
1799    dst = gen_dest_fpr_D(dc, rd);
1800
1801    gen(dst, cpu_env, src1, src2);
1802    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1803
1804    gen_store_fpr_D(dc, rd, dst);
1805}
1806
1807#ifdef TARGET_SPARC64
1808static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1809                                  void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1810{
1811    TCGv_i64 dst, src1, src2;
1812
1813    src1 = gen_load_fpr_D(dc, rs1);
1814    src2 = gen_load_fpr_D(dc, rs2);
1815    dst = gen_dest_fpr_D(dc, rd);
1816
1817    gen(dst, src1, src2);
1818
1819    gen_store_fpr_D(dc, rd, dst);
1820}
1821
1822static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1823                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1824{
1825    TCGv_i64 dst, src1, src2;
1826
1827    src1 = gen_load_fpr_D(dc, rs1);
1828    src2 = gen_load_fpr_D(dc, rs2);
1829    dst = gen_dest_fpr_D(dc, rd);
1830
1831    gen(dst, cpu_gsr, src1, src2);
1832
1833    gen_store_fpr_D(dc, rd, dst);
1834}
1835
1836static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1837                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1838{
1839    TCGv_i64 dst, src0, src1, src2;
1840
1841    src1 = gen_load_fpr_D(dc, rs1);
1842    src2 = gen_load_fpr_D(dc, rs2);
1843    src0 = gen_load_fpr_D(dc, rd);
1844    dst = gen_dest_fpr_D(dc, rd);
1845
1846    gen(dst, src0, src1, src2);
1847
1848    gen_store_fpr_D(dc, rd, dst);
1849}
1850#endif
1851
1852static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1853                              void (*gen)(TCGv_ptr))
1854{
1855    gen_op_load_fpr_QT1(QFPREG(rs));
1856
1857    gen(cpu_env);
1858    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1859
1860    gen_op_store_QT0_fpr(QFPREG(rd));
1861    gen_update_fprs_dirty(dc, QFPREG(rd));
1862}
1863
1864#ifdef TARGET_SPARC64
1865static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1866                                 void (*gen)(TCGv_ptr))
1867{
1868    gen_op_load_fpr_QT1(QFPREG(rs));
1869
1870    gen(cpu_env);
1871
1872    gen_op_store_QT0_fpr(QFPREG(rd));
1873    gen_update_fprs_dirty(dc, QFPREG(rd));
1874}
1875#endif
1876
1877static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1878                               void (*gen)(TCGv_ptr))
1879{
1880    gen_op_load_fpr_QT0(QFPREG(rs1));
1881    gen_op_load_fpr_QT1(QFPREG(rs2));
1882
1883    gen(cpu_env);
1884    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1885
1886    gen_op_store_QT0_fpr(QFPREG(rd));
1887    gen_update_fprs_dirty(dc, QFPREG(rd));
1888}
1889
1890static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1891                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1892{
1893    TCGv_i64 dst;
1894    TCGv_i32 src1, src2;
1895
1896    src1 = gen_load_fpr_F(dc, rs1);
1897    src2 = gen_load_fpr_F(dc, rs2);
1898    dst = gen_dest_fpr_D(dc, rd);
1899
1900    gen(dst, cpu_env, src1, src2);
1901    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1902
1903    gen_store_fpr_D(dc, rd, dst);
1904}
1905
1906static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1907                               void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1908{
1909    TCGv_i64 src1, src2;
1910
1911    src1 = gen_load_fpr_D(dc, rs1);
1912    src2 = gen_load_fpr_D(dc, rs2);
1913
1914    gen(cpu_env, src1, src2);
1915    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1916
1917    gen_op_store_QT0_fpr(QFPREG(rd));
1918    gen_update_fprs_dirty(dc, QFPREG(rd));
1919}
1920
1921#ifdef TARGET_SPARC64
1922static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1923                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1924{
1925    TCGv_i64 dst;
1926    TCGv_i32 src;
1927
1928    src = gen_load_fpr_F(dc, rs);
1929    dst = gen_dest_fpr_D(dc, rd);
1930
1931    gen(dst, cpu_env, src);
1932    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1933
1934    gen_store_fpr_D(dc, rd, dst);
1935}
1936#endif
1937
1938static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1939                                 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1940{
1941    TCGv_i64 dst;
1942    TCGv_i32 src;
1943
1944    src = gen_load_fpr_F(dc, rs);
1945    dst = gen_dest_fpr_D(dc, rd);
1946
1947    gen(dst, cpu_env, src);
1948
1949    gen_store_fpr_D(dc, rd, dst);
1950}
1951
1952static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1953                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1954{
1955    TCGv_i32 dst;
1956    TCGv_i64 src;
1957
1958    src = gen_load_fpr_D(dc, rs);
1959    dst = gen_dest_fpr_F(dc);
1960
1961    gen(dst, cpu_env, src);
1962    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1963
1964    gen_store_fpr_F(dc, rd, dst);
1965}
1966
1967static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1968                              void (*gen)(TCGv_i32, TCGv_ptr))
1969{
1970    TCGv_i32 dst;
1971
1972    gen_op_load_fpr_QT1(QFPREG(rs));
1973    dst = gen_dest_fpr_F(dc);
1974
1975    gen(dst, cpu_env);
1976    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1977
1978    gen_store_fpr_F(dc, rd, dst);
1979}
1980
1981static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1982                              void (*gen)(TCGv_i64, TCGv_ptr))
1983{
1984    TCGv_i64 dst;
1985
1986    gen_op_load_fpr_QT1(QFPREG(rs));
1987    dst = gen_dest_fpr_D(dc, rd);
1988
1989    gen(dst, cpu_env);
1990    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1991
1992    gen_store_fpr_D(dc, rd, dst);
1993}
1994
1995static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1996                                 void (*gen)(TCGv_ptr, TCGv_i32))
1997{
1998    TCGv_i32 src;
1999
2000    src = gen_load_fpr_F(dc, rs);
2001
2002    gen(cpu_env, src);
2003
2004    gen_op_store_QT0_fpr(QFPREG(rd));
2005    gen_update_fprs_dirty(dc, QFPREG(rd));
2006}
2007
2008static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2009                                 void (*gen)(TCGv_ptr, TCGv_i64))
2010{
2011    TCGv_i64 src;
2012
2013    src = gen_load_fpr_D(dc, rs);
2014
2015    gen(cpu_env, src);
2016
2017    gen_op_store_QT0_fpr(QFPREG(rd));
2018    gen_update_fprs_dirty(dc, QFPREG(rd));
2019}
2020
2021static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2022                     TCGv addr, int mmu_idx, TCGMemOp memop)
2023{
2024    gen_address_mask(dc, addr);
2025    tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2026}
2027
2028static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2029{
2030    TCGv m1 = tcg_const_tl(0xff);
2031    gen_address_mask(dc, addr);
2032    tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2033    tcg_temp_free(m1);
2034}
2035
2036/* asi moves */
2037#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2038typedef enum {
2039    GET_ASI_HELPER,
2040    GET_ASI_EXCP,
2041    GET_ASI_DIRECT,
2042    GET_ASI_DTWINX,
2043    GET_ASI_BLOCK,
2044    GET_ASI_SHORT,
2045    GET_ASI_BCOPY,
2046    GET_ASI_BFILL,
2047} ASIType;
2048
2049typedef struct {
2050    ASIType type;
2051    int asi;
2052    int mem_idx;
2053    TCGMemOp memop;
2054} DisasASI;
2055
2056static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2057{
2058    int asi = GET_FIELD(insn, 19, 26);
2059    ASIType type = GET_ASI_HELPER;
2060    int mem_idx = dc->mem_idx;
2061
2062#ifndef TARGET_SPARC64
2063    /* Before v9, all asis are immediate and privileged.  */
2064    if (IS_IMM) {
2065        gen_exception(dc, TT_ILL_INSN);
2066        type = GET_ASI_EXCP;
2067    } else if (supervisor(dc)
2068               /* Note that LEON accepts ASI_USERDATA in user mode, for
2069                  use with CASA.  Also note that previous versions of
2070                  QEMU allowed (and old versions of gcc emitted) ASI_P
2071                  for LEON, which is incorrect.  */
2072               || (asi == ASI_USERDATA
2073                   && (dc->def->features & CPU_FEATURE_CASA))) {
2074        switch (asi) {
2075        case ASI_USERDATA:   /* User data access */
2076            mem_idx = MMU_USER_IDX;
2077            type = GET_ASI_DIRECT;
2078            break;
2079        case ASI_KERNELDATA: /* Supervisor data access */
2080            mem_idx = MMU_KERNEL_IDX;
2081            type = GET_ASI_DIRECT;
2082            break;
2083        case ASI_M_BYPASS:    /* MMU passthrough */
2084        case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2085            mem_idx = MMU_PHYS_IDX;
2086            type = GET_ASI_DIRECT;
2087            break;
2088        case ASI_M_BCOPY: /* Block copy, sta access */
2089            mem_idx = MMU_KERNEL_IDX;
2090            type = GET_ASI_BCOPY;
2091            break;
2092        case ASI_M_BFILL: /* Block fill, stda access */
2093            mem_idx = MMU_KERNEL_IDX;
2094            type = GET_ASI_BFILL;
2095            break;
2096        }
2097
2098        /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
2099         * permissions check in get_physical_address(..).
2100         */
2101        mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
2102    } else {
2103        gen_exception(dc, TT_PRIV_INSN);
2104        type = GET_ASI_EXCP;
2105    }
2106#else
2107    if (IS_IMM) {
2108        asi = dc->asi;
2109    }
2110    /* With v9, all asis below 0x80 are privileged.  */
2111    /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2112       down that bit into DisasContext.  For the moment that's ok,
2113       since the direct implementations below doesn't have any ASIs
2114       in the restricted [0x30, 0x7f] range, and the check will be
2115       done properly in the helper.  */
2116    if (!supervisor(dc) && asi < 0x80) {
2117        gen_exception(dc, TT_PRIV_ACT);
2118        type = GET_ASI_EXCP;
2119    } else {
2120        switch (asi) {
2121        case ASI_REAL:      /* Bypass */
2122        case ASI_REAL_IO:   /* Bypass, non-cacheable */
2123        case ASI_REAL_L:    /* Bypass LE */
2124        case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2125        case ASI_TWINX_REAL:   /* Real address, twinx */
2126        case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2127        case ASI_QUAD_LDD_PHYS:
2128        case ASI_QUAD_LDD_PHYS_L:
2129            mem_idx = MMU_PHYS_IDX;
2130            break;
2131        case ASI_N:  /* Nucleus */
2132        case ASI_NL: /* Nucleus LE */
2133        case ASI_TWINX_N:
2134        case ASI_TWINX_NL:
2135        case ASI_NUCLEUS_QUAD_LDD:
2136        case ASI_NUCLEUS_QUAD_LDD_L:
2137            if (hypervisor(dc)) {
2138                mem_idx = MMU_PHYS_IDX;
2139            } else {
2140                mem_idx = MMU_NUCLEUS_IDX;
2141            }
2142            break;
2143        case ASI_AIUP:  /* As if user primary */
2144        case ASI_AIUPL: /* As if user primary LE */
2145        case ASI_TWINX_AIUP:
2146        case ASI_TWINX_AIUP_L:
2147        case ASI_BLK_AIUP_4V:
2148        case ASI_BLK_AIUP_L_4V:
2149        case ASI_BLK_AIUP:
2150        case ASI_BLK_AIUPL:
2151            mem_idx = MMU_USER_IDX;
2152            break;
2153        case ASI_AIUS:  /* As if user secondary */
2154        case ASI_AIUSL: /* As if user secondary LE */
2155        case ASI_TWINX_AIUS:
2156        case ASI_TWINX_AIUS_L:
2157        case ASI_BLK_AIUS_4V:
2158        case ASI_BLK_AIUS_L_4V:
2159        case ASI_BLK_AIUS:
2160        case ASI_BLK_AIUSL:
2161            mem_idx = MMU_USER_SECONDARY_IDX;
2162            break;
2163        case ASI_S:  /* Secondary */
2164        case ASI_SL: /* Secondary LE */
2165        case ASI_TWINX_S:
2166        case ASI_TWINX_SL:
2167        case ASI_BLK_COMMIT_S:
2168        case ASI_BLK_S:
2169        case ASI_BLK_SL:
2170        case ASI_FL8_S:
2171        case ASI_FL8_SL:
2172        case ASI_FL16_S:
2173        case ASI_FL16_SL:
2174            if (mem_idx == MMU_USER_IDX) {
2175                mem_idx = MMU_USER_SECONDARY_IDX;
2176            } else if (mem_idx == MMU_KERNEL_IDX) {
2177                mem_idx = MMU_KERNEL_SECONDARY_IDX;
2178            }
2179            break;
2180        case ASI_P:  /* Primary */
2181        case ASI_PL: /* Primary LE */
2182        case ASI_TWINX_P:
2183        case ASI_TWINX_PL:
2184        case ASI_BLK_COMMIT_P:
2185        case ASI_BLK_P:
2186        case ASI_BLK_PL:
2187        case ASI_FL8_P:
2188        case ASI_FL8_PL:
2189        case ASI_FL16_P:
2190        case ASI_FL16_PL:
2191            break;
2192        }
2193        switch (asi) {
2194        case ASI_REAL:
2195        case ASI_REAL_IO:
2196        case ASI_REAL_L:
2197        case ASI_REAL_IO_L:
2198        case ASI_N:
2199        case ASI_NL:
2200        case ASI_AIUP:
2201        case ASI_AIUPL:
2202        case ASI_AIUS:
2203        case ASI_AIUSL:
2204        case ASI_S:
2205        case ASI_SL:
2206        case ASI_P:
2207        case ASI_PL:
2208            type = GET_ASI_DIRECT;
2209            break;
2210        case ASI_TWINX_REAL:
2211        case ASI_TWINX_REAL_L:
2212        case ASI_TWINX_N:
2213        case ASI_TWINX_NL:
2214        case ASI_TWINX_AIUP:
2215        case ASI_TWINX_AIUP_L:
2216        case ASI_TWINX_AIUS:
2217        case ASI_TWINX_AIUS_L:
2218        case ASI_TWINX_P:
2219        case ASI_TWINX_PL:
2220        case ASI_TWINX_S:
2221        case ASI_TWINX_SL:
2222        case ASI_QUAD_LDD_PHYS:
2223        case ASI_QUAD_LDD_PHYS_L:
2224        case ASI_NUCLEUS_QUAD_LDD:
2225        case ASI_NUCLEUS_QUAD_LDD_L:
2226            type = GET_ASI_DTWINX;
2227            break;
2228        case ASI_BLK_COMMIT_P:
2229        case ASI_BLK_COMMIT_S:
2230        case ASI_BLK_AIUP_4V:
2231        case ASI_BLK_AIUP_L_4V:
2232        case ASI_BLK_AIUP:
2233        case ASI_BLK_AIUPL:
2234        case ASI_BLK_AIUS_4V:
2235        case ASI_BLK_AIUS_L_4V:
2236        case ASI_BLK_AIUS:
2237        case ASI_BLK_AIUSL:
2238        case ASI_BLK_S:
2239        case ASI_BLK_SL:
2240        case ASI_BLK_P:
2241        case ASI_BLK_PL:
2242            type = GET_ASI_BLOCK;
2243            break;
2244        case ASI_FL8_S:
2245        case ASI_FL8_SL:
2246        case ASI_FL8_P:
2247        case ASI_FL8_PL:
2248            memop = MO_UB;
2249            type = GET_ASI_SHORT;
2250            break;
2251        case ASI_FL16_S:
2252        case ASI_FL16_SL:
2253        case ASI_FL16_P:
2254        case ASI_FL16_PL:
2255            memop = MO_TEUW;
2256            type = GET_ASI_SHORT;
2257            break;
2258        }
2259        /* The little-endian asis all have bit 3 set.  */
2260        if (asi & 8) {
2261            memop ^= MO_BSWAP;
2262        }
2263    }
2264#endif
2265
2266    return (DisasASI){ type, asi, mem_idx, memop };
2267}
2268
2269static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2270                       int insn, TCGMemOp memop)
2271{
2272    DisasASI da = get_asi(dc, insn, memop);
2273
2274    switch (da.type) {
2275    case GET_ASI_EXCP:
2276        break;
2277    case GET_ASI_DTWINX: /* Reserved for ldda.  */
2278        gen_exception(dc, TT_ILL_INSN);
2279        break;
2280    case GET_ASI_DIRECT:
2281        gen_address_mask(dc, addr);
2282        tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2283        break;
2284    default:
2285        {
2286            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2287            TCGv_i32 r_mop = tcg_const_i32(memop);
2288
2289            save_state(dc);
2290#ifdef TARGET_SPARC64
2291            gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2292#else
2293            {
2294                TCGv_i64 t64 = tcg_temp_new_i64();
2295                gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2296                tcg_gen_trunc_i64_tl(dst, t64);
2297                tcg_temp_free_i64(t64);
2298            }
2299#endif
2300            tcg_temp_free_i32(r_mop);
2301            tcg_temp_free_i32(r_asi);
2302        }
2303        break;
2304    }
2305}
2306
2307static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2308                       int insn, TCGMemOp memop)
2309{
2310    DisasASI da = get_asi(dc, insn, memop);
2311
2312    switch (da.type) {
2313    case GET_ASI_EXCP:
2314        break;
2315    case GET_ASI_DTWINX: /* Reserved for stda.  */
2316#ifndef TARGET_SPARC64
2317        gen_exception(dc, TT_ILL_INSN);
2318        break;
2319#else
2320        if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2321            /* Pre OpenSPARC CPUs don't have these */
2322            gen_exception(dc, TT_ILL_INSN);
2323            return;
2324        }
2325        /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2326         * are ST_BLKINIT_ ASIs */
2327        /* fall through */
2328#endif
2329    case GET_ASI_DIRECT:
2330        gen_address_mask(dc, addr);
2331        tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2332        break;
2333#if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2334    case GET_ASI_BCOPY:
2335        /* Copy 32 bytes from the address in SRC to ADDR.  */
2336        /* ??? The original qemu code suggests 4-byte alignment, dropping
2337           the low bits, but the only place I can see this used is in the
2338           Linux kernel with 32 byte alignment, which would make more sense
2339           as a cacheline-style operation.  */
2340        {
2341            TCGv saddr = tcg_temp_new();
2342            TCGv daddr = tcg_temp_new();
2343            TCGv four = tcg_const_tl(4);
2344            TCGv_i32 tmp = tcg_temp_new_i32();
2345            int i;
2346
2347            tcg_gen_andi_tl(saddr, src, -4);
2348            tcg_gen_andi_tl(daddr, addr, -4);
2349            for (i = 0; i < 32; i += 4) {
2350                /* Since the loads and stores are paired, allow the
2351                   copy to happen in the host endianness.  */
2352                tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2353                tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2354                tcg_gen_add_tl(saddr, saddr, four);
2355                tcg_gen_add_tl(daddr, daddr, four);
2356            }
2357
2358            tcg_temp_free(saddr);
2359            tcg_temp_free(daddr);
2360            tcg_temp_free(four);
2361            tcg_temp_free_i32(tmp);
2362        }
2363        break;
2364#endif
2365    default:
2366        {
2367            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2368            TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2369
2370            save_state(dc);
2371#ifdef TARGET_SPARC64
2372            gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2373#else
2374            {
2375                TCGv_i64 t64 = tcg_temp_new_i64();
2376                tcg_gen_extu_tl_i64(t64, src);
2377                gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2378                tcg_temp_free_i64(t64);
2379            }
2380#endif
2381            tcg_temp_free_i32(r_mop);
2382            tcg_temp_free_i32(r_asi);
2383
2384            /* A write to a TLB register may alter page maps.  End the TB. */
2385            dc->npc = DYNAMIC_PC;
2386        }
2387        break;
2388    }
2389}
2390
2391static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2392                         TCGv addr, int insn)
2393{
2394    DisasASI da = get_asi(dc, insn, MO_TEUL);
2395
2396    switch (da.type) {
2397    case GET_ASI_EXCP:
2398        break;
2399    case GET_ASI_DIRECT:
2400        gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2401        break;
2402    default:
2403        /* ??? Should be DAE_invalid_asi.  */
2404        gen_exception(dc, TT_DATA_ACCESS);
2405        break;
2406    }
2407}
2408
2409static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2410                        int insn, int rd)
2411{
2412    DisasASI da = get_asi(dc, insn, MO_TEUL);
2413    TCGv oldv;
2414
2415    switch (da.type) {
2416    case GET_ASI_EXCP:
2417        return;
2418    case GET_ASI_DIRECT:
2419        oldv = tcg_temp_new();
2420        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2421                                  da.mem_idx, da.memop);
2422        gen_store_gpr(dc, rd, oldv);
2423        tcg_temp_free(oldv);
2424        break;
2425    default:
2426        /* ??? Should be DAE_invalid_asi.  */
2427        gen_exception(dc, TT_DATA_ACCESS);
2428        break;
2429    }
2430}
2431
2432static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2433{
2434    DisasASI da = get_asi(dc, insn, MO_UB);
2435
2436    switch (da.type) {
2437    case GET_ASI_EXCP:
2438        break;
2439    case GET_ASI_DIRECT:
2440        gen_ldstub(dc, dst, addr, da.mem_idx);
2441        break;
2442    default:
2443        /* ??? In theory, this should be raise DAE_invalid_asi.
2444           But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2445        if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2446            gen_helper_exit_atomic(cpu_env);
2447        } else {
2448            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2449            TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2450            TCGv_i64 s64, t64;
2451
2452            save_state(dc);
2453            t64 = tcg_temp_new_i64();
2454            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2455
2456            s64 = tcg_const_i64(0xff);
2457            gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2458            tcg_temp_free_i64(s64);
2459            tcg_temp_free_i32(r_mop);
2460            tcg_temp_free_i32(r_asi);
2461
2462            tcg_gen_trunc_i64_tl(dst, t64);
2463            tcg_temp_free_i64(t64);
2464
2465            /* End the TB.  */
2466            dc->npc = DYNAMIC_PC;
2467        }
2468        break;
2469    }
2470}
2471#endif
2472
2473#ifdef TARGET_SPARC64
2474static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2475                        int insn, int size, int rd)
2476{
2477    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2478    TCGv_i32 d32;
2479    TCGv_i64 d64;
2480
2481    switch (da.type) {
2482    case GET_ASI_EXCP:
2483        break;
2484
2485    case GET_ASI_DIRECT:
2486        gen_address_mask(dc, addr);
2487        switch (size) {
2488        case 4:
2489            d32 = gen_dest_fpr_F(dc);
2490            tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2491            gen_store_fpr_F(dc, rd, d32);
2492            break;
2493        case 8:
2494            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2495                                da.memop | MO_ALIGN_4);
2496            break;
2497        case 16:
2498            d64 = tcg_temp_new_i64();
2499            tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2500            tcg_gen_addi_tl(addr, addr, 8);
2501            tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2502                                da.memop | MO_ALIGN_4);
2503            tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2504            tcg_temp_free_i64(d64);
2505            break;
2506        default:
2507            g_assert_not_reached();
2508        }
2509        break;
2510
2511    case GET_ASI_BLOCK:
2512        /* Valid for lddfa on aligned registers only.  */
2513        if (size == 8 && (rd & 7) == 0) {
2514            TCGMemOp memop;
2515            TCGv eight;
2516            int i;
2517
2518            gen_address_mask(dc, addr);
2519
2520            /* The first operation checks required alignment.  */
2521            memop = da.memop | MO_ALIGN_64;
2522            eight = tcg_const_tl(8);
2523            for (i = 0; ; ++i) {
2524                tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2525                                    da.mem_idx, memop);
2526                if (i == 7) {
2527                    break;
2528                }
2529                tcg_gen_add_tl(addr, addr, eight);
2530                memop = da.memop;
2531            }
2532            tcg_temp_free(eight);
2533        } else {
2534            gen_exception(dc, TT_ILL_INSN);
2535        }
2536        break;
2537
2538    case GET_ASI_SHORT:
2539        /* Valid for lddfa only.  */
2540        if (size == 8) {
2541            gen_address_mask(dc, addr);
2542            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2543        } else {
2544            gen_exception(dc, TT_ILL_INSN);
2545        }
2546        break;
2547
2548    default:
2549        {
2550            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2551            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2552
2553            save_state(dc);
2554            /* According to the table in the UA2011 manual, the only
2555               other asis that are valid for ldfa/lddfa/ldqfa are
2556               the NO_FAULT asis.  We still need a helper for these,
2557               but we can just use the integer asi helper for them.  */
2558            switch (size) {
2559            case 4:
2560                d64 = tcg_temp_new_i64();
2561                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2562                d32 = gen_dest_fpr_F(dc);
2563                tcg_gen_extrl_i64_i32(d32, d64);
2564                tcg_temp_free_i64(d64);
2565                gen_store_fpr_F(dc, rd, d32);
2566                break;
2567            case 8:
2568                gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2569                break;
2570            case 16:
2571                d64 = tcg_temp_new_i64();
2572                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2573                tcg_gen_addi_tl(addr, addr, 8);
2574                gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2575                tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2576                tcg_temp_free_i64(d64);
2577                break;
2578            default:
2579                g_assert_not_reached();
2580            }
2581            tcg_temp_free_i32(r_mop);
2582            tcg_temp_free_i32(r_asi);
2583        }
2584        break;
2585    }
2586}
2587
2588static void gen_stf_asi(DisasContext *dc, TCGv addr,
2589                        int insn, int size, int rd)
2590{
2591    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2592    TCGv_i32 d32;
2593
2594    switch (da.type) {
2595    case GET_ASI_EXCP:
2596        break;
2597
2598    case GET_ASI_DIRECT:
2599        gen_address_mask(dc, addr);
2600        switch (size) {
2601        case 4:
2602            d32 = gen_load_fpr_F(dc, rd);
2603            tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2604            break;
2605        case 8:
2606            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2607                                da.memop | MO_ALIGN_4);
2608            break;
2609        case 16:
2610            /* Only 4-byte alignment required.  However, it is legal for the
2611               cpu to signal the alignment fault, and the OS trap handler is
2612               required to fix it up.  Requiring 16-byte alignment here avoids
2613               having to probe the second page before performing the first
2614               write.  */
2615            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2616                                da.memop | MO_ALIGN_16);
2617            tcg_gen_addi_tl(addr, addr, 8);
2618            tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2619            break;
2620        default:
2621            g_assert_not_reached();
2622        }
2623        break;
2624
2625    case GET_ASI_BLOCK:
2626        /* Valid for stdfa on aligned registers only.  */
2627        if (size == 8 && (rd & 7) == 0) {
2628            TCGMemOp memop;
2629            TCGv eight;
2630            int i;
2631
2632            gen_address_mask(dc, addr);
2633
2634            /* The first operation checks required alignment.  */
2635            memop = da.memop | MO_ALIGN_64;
2636            eight = tcg_const_tl(8);
2637            for (i = 0; ; ++i) {
2638                tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2639                                    da.mem_idx, memop);
2640                if (i == 7) {
2641                    break;
2642                }
2643                tcg_gen_add_tl(addr, addr, eight);
2644                memop = da.memop;
2645            }
2646            tcg_temp_free(eight);
2647        } else {
2648            gen_exception(dc, TT_ILL_INSN);
2649        }
2650        break;
2651
2652    case GET_ASI_SHORT:
2653        /* Valid for stdfa only.  */
2654        if (size == 8) {
2655            gen_address_mask(dc, addr);
2656            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2657        } else {
2658            gen_exception(dc, TT_ILL_INSN);
2659        }
2660        break;
2661
2662    default:
2663        /* According to the table in the UA2011 manual, the only
2664           other asis that are valid for ldfa/lddfa/ldqfa are
2665           the PST* asis, which aren't currently handled.  */
2666        gen_exception(dc, TT_ILL_INSN);
2667        break;
2668    }
2669}
2670
2671static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2672{
2673    DisasASI da = get_asi(dc, insn, MO_TEQ);
2674    TCGv_i64 hi = gen_dest_gpr(dc, rd);
2675    TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2676
2677    switch (da.type) {
2678    case GET_ASI_EXCP:
2679        return;
2680
2681    case GET_ASI_DTWINX:
2682        gen_address_mask(dc, addr);
2683        tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2684        tcg_gen_addi_tl(addr, addr, 8);
2685        tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2686        break;
2687
2688    case GET_ASI_DIRECT:
2689        {
2690            TCGv_i64 tmp = tcg_temp_new_i64();
2691
2692            gen_address_mask(dc, addr);
2693            tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2694
2695            /* Note that LE ldda acts as if each 32-bit register
2696               result is byte swapped.  Having just performed one
2697               64-bit bswap, we need now to swap the writebacks.  */
2698            if ((da.memop & MO_BSWAP) == MO_TE) {
2699                tcg_gen_extr32_i64(lo, hi, tmp);
2700            } else {
2701                tcg_gen_extr32_i64(hi, lo, tmp);
2702            }
2703            tcg_temp_free_i64(tmp);
2704        }
2705        break;
2706
2707    default:
2708        /* ??? In theory we've handled all of the ASIs that are valid
2709           for ldda, and this should raise DAE_invalid_asi.  However,
2710           real hardware allows others.  This can be seen with e.g.
2711           FreeBSD 10.3 wrt ASI_IC_TAG.  */
2712        {
2713            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2714            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2715            TCGv_i64 tmp = tcg_temp_new_i64();
2716
2717            save_state(dc);
2718            gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2719            tcg_temp_free_i32(r_asi);
2720            tcg_temp_free_i32(r_mop);
2721
2722            /* See above.  */
2723            if ((da.memop & MO_BSWAP) == MO_TE) {
2724                tcg_gen_extr32_i64(lo, hi, tmp);
2725            } else {
2726                tcg_gen_extr32_i64(hi, lo, tmp);
2727            }
2728            tcg_temp_free_i64(tmp);
2729        }
2730        break;
2731    }
2732
2733    gen_store_gpr(dc, rd, hi);
2734    gen_store_gpr(dc, rd + 1, lo);
2735}
2736
2737static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2738                         int insn, int rd)
2739{
2740    DisasASI da = get_asi(dc, insn, MO_TEQ);
2741    TCGv lo = gen_load_gpr(dc, rd + 1);
2742
2743    switch (da.type) {
2744    case GET_ASI_EXCP:
2745        break;
2746
2747    case GET_ASI_DTWINX:
2748        gen_address_mask(dc, addr);
2749        tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2750        tcg_gen_addi_tl(addr, addr, 8);
2751        tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2752        break;
2753
2754    case GET_ASI_DIRECT:
2755        {
2756            TCGv_i64 t64 = tcg_temp_new_i64();
2757
2758            /* Note that LE stda acts as if each 32-bit register result is
2759               byte swapped.  We will perform one 64-bit LE store, so now
2760               we must swap the order of the construction.  */
2761            if ((da.memop & MO_BSWAP) == MO_TE) {
2762                tcg_gen_concat32_i64(t64, lo, hi);
2763            } else {
2764                tcg_gen_concat32_i64(t64, hi, lo);
2765            }
2766            gen_address_mask(dc, addr);
2767            tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2768            tcg_temp_free_i64(t64);
2769        }
2770        break;
2771
2772    default:
2773        /* ??? In theory we've handled all of the ASIs that are valid
2774           for stda, and this should raise DAE_invalid_asi.  */
2775        {
2776            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2777            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2778            TCGv_i64 t64 = tcg_temp_new_i64();
2779
2780            /* See above.  */
2781            if ((da.memop & MO_BSWAP) == MO_TE) {
2782                tcg_gen_concat32_i64(t64, lo, hi);
2783            } else {
2784                tcg_gen_concat32_i64(t64, hi, lo);
2785            }
2786
2787            save_state(dc);
2788            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2789            tcg_temp_free_i32(r_mop);
2790            tcg_temp_free_i32(r_asi);
2791            tcg_temp_free_i64(t64);
2792        }
2793        break;
2794    }
2795}
2796
2797static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2798                         int insn, int rd)
2799{
2800    DisasASI da = get_asi(dc, insn, MO_TEQ);
2801    TCGv oldv;
2802
2803    switch (da.type) {
2804    case GET_ASI_EXCP:
2805        return;
2806    case GET_ASI_DIRECT:
2807        oldv = tcg_temp_new();
2808        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2809                                  da.mem_idx, da.memop);
2810        gen_store_gpr(dc, rd, oldv);
2811        tcg_temp_free(oldv);
2812        break;
2813    default:
2814        /* ??? Should be DAE_invalid_asi.  */
2815        gen_exception(dc, TT_DATA_ACCESS);
2816        break;
2817    }
2818}
2819
2820#elif !defined(CONFIG_USER_ONLY)
2821static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2822{
2823    /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2824       whereby "rd + 1" elicits "error: array subscript is above array".
2825       Since we have already asserted that rd is even, the semantics
2826       are unchanged.  */
2827    TCGv lo = gen_dest_gpr(dc, rd | 1);
2828    TCGv hi = gen_dest_gpr(dc, rd);
2829    TCGv_i64 t64 = tcg_temp_new_i64();
2830    DisasASI da = get_asi(dc, insn, MO_TEQ);
2831
2832    switch (da.type) {
2833    case GET_ASI_EXCP:
2834        tcg_temp_free_i64(t64);
2835        return;
2836    case GET_ASI_DIRECT:
2837        gen_address_mask(dc, addr);
2838        tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2839        break;
2840    default:
2841        {
2842            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2843            TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2844
2845            save_state(dc);
2846            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2847            tcg_temp_free_i32(r_mop);
2848            tcg_temp_free_i32(r_asi);
2849        }
2850        break;
2851    }
2852
2853    tcg_gen_extr_i64_i32(lo, hi, t64);
2854    tcg_temp_free_i64(t64);
2855    gen_store_gpr(dc, rd | 1, lo);
2856    gen_store_gpr(dc, rd, hi);
2857}
2858
2859static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2860                         int insn, int rd)
2861{
2862    DisasASI da = get_asi(dc, insn, MO_TEQ);
2863    TCGv lo = gen_load_gpr(dc, rd + 1);
2864    TCGv_i64 t64 = tcg_temp_new_i64();
2865
2866    tcg_gen_concat_tl_i64(t64, lo, hi);
2867
2868    switch (da.type) {
2869    case GET_ASI_EXCP:
2870        break;
2871    case GET_ASI_DIRECT:
2872        gen_address_mask(dc, addr);
2873        tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2874        break;
2875    case GET_ASI_BFILL:
2876        /* Store 32 bytes of T64 to ADDR.  */
2877        /* ??? The original qemu code suggests 8-byte alignment, dropping
2878           the low bits, but the only place I can see this used is in the
2879           Linux kernel with 32 byte alignment, which would make more sense
2880           as a cacheline-style operation.  */
2881        {
2882            TCGv d_addr = tcg_temp_new();
2883            TCGv eight = tcg_const_tl(8);
2884            int i;
2885
2886            tcg_gen_andi_tl(d_addr, addr, -8);
2887            for (i = 0; i < 32; i += 8) {
2888                tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2889                tcg_gen_add_tl(d_addr, d_addr, eight);
2890            }
2891
2892            tcg_temp_free(d_addr);
2893            tcg_temp_free(eight);
2894        }
2895        break;
2896    default:
2897        {
2898            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2899            TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2900
2901            save_state(dc);
2902            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2903            tcg_temp_free_i32(r_mop);
2904            tcg_temp_free_i32(r_asi);
2905        }
2906        break;
2907    }
2908
2909    tcg_temp_free_i64(t64);
2910}
2911#endif
2912
2913static TCGv get_src1(DisasContext *dc, unsigned int insn)
2914{
2915    unsigned int rs1 = GET_FIELD(insn, 13, 17);
2916    return gen_load_gpr(dc, rs1);
2917}
2918
2919static TCGv get_src2(DisasContext *dc, unsigned int insn)
2920{
2921    if (IS_IMM) { /* immediate */
2922        target_long simm = GET_FIELDs(insn, 19, 31);
2923        TCGv t = get_temp_tl(dc);
2924        tcg_gen_movi_tl(t, simm);
2925        return t;
2926    } else {      /* register */
2927        unsigned int rs2 = GET_FIELD(insn, 27, 31);
2928        return gen_load_gpr(dc, rs2);
2929    }
2930}
2931
2932#ifdef TARGET_SPARC64
2933static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2934{
2935    TCGv_i32 c32, zero, dst, s1, s2;
2936
2937    /* We have two choices here: extend the 32 bit data and use movcond_i64,
2938       or fold the comparison down to 32 bits and use movcond_i32.  Choose
2939       the later.  */
2940    c32 = tcg_temp_new_i32();
2941    if (cmp->is_bool) {
2942        tcg_gen_extrl_i64_i32(c32, cmp->c1);
2943    } else {
2944        TCGv_i64 c64 = tcg_temp_new_i64();
2945        tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2946        tcg_gen_extrl_i64_i32(c32, c64);
2947        tcg_temp_free_i64(c64);
2948    }
2949
2950    s1 = gen_load_fpr_F(dc, rs);
2951    s2 = gen_load_fpr_F(dc, rd);
2952    dst = gen_dest_fpr_F(dc);
2953    zero = tcg_const_i32(0);
2954
2955    tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2956
2957    tcg_temp_free_i32(c32);
2958    tcg_temp_free_i32(zero);
2959    gen_store_fpr_F(dc, rd, dst);
2960}
2961
2962static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2963{
2964    TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2965    tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2966                        gen_load_fpr_D(dc, rs),
2967                        gen_load_fpr_D(dc, rd));
2968    gen_store_fpr_D(dc, rd, dst);
2969}
2970
2971static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2972{
2973    int qd = QFPREG(rd);
2974    int qs = QFPREG(rs);
2975
2976    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2977                        cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2978    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2979                        cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2980
2981    gen_update_fprs_dirty(dc, qd);
2982}
2983
2984#ifndef CONFIG_USER_ONLY
2985static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2986{
2987    TCGv_i32 r_tl = tcg_temp_new_i32();
2988
2989    /* load env->tl into r_tl */
2990    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2991
2992    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2993    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2994
2995    /* calculate offset to current trap state from env->ts, reuse r_tl */
2996    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2997    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2998
2999    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
3000    {
3001        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
3002        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
3003        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
3004        tcg_temp_free_ptr(r_tl_tmp);
3005    }
3006
3007    tcg_temp_free_i32(r_tl);
3008}
3009#endif
3010
3011static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3012                     int width, bool cc, bool left)
3013{
3014    TCGv lo1, lo2, t1, t2;
3015    uint64_t amask, tabl, tabr;
3016    int shift, imask, omask;
3017
3018    if (cc) {
3019        tcg_gen_mov_tl(cpu_cc_src, s1);
3020        tcg_gen_mov_tl(cpu_cc_src2, s2);
3021        tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3022        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3023        dc->cc_op = CC_OP_SUB;
3024    }
3025
3026    /* Theory of operation: there are two tables, left and right (not to
3027       be confused with the left and right versions of the opcode).  These
3028       are indexed by the low 3 bits of the inputs.  To make things "easy",
3029       these tables are loaded into two constants, TABL and TABR below.
3030       The operation index = (input & imask) << shift calculates the index
3031       into the constant, while val = (table >> index) & omask calculates
3032       the value we're looking for.  */
3033    switch (width) {
3034    case 8:
3035        imask = 0x7;
3036        shift = 3;
3037        omask = 0xff;
3038        if (left) {
3039            tabl = 0x80c0e0f0f8fcfeffULL;
3040            tabr = 0xff7f3f1f0f070301ULL;
3041        } else {
3042            tabl = 0x0103070f1f3f7fffULL;
3043            tabr = 0xfffefcf8f0e0c080ULL;
3044        }
3045        break;
3046    case 16:
3047        imask = 0x6;
3048        shift = 1;
3049        omask = 0xf;
3050        if (left) {
3051            tabl = 0x8cef;
3052            tabr = 0xf731;
3053        } else {
3054            tabl = 0x137f;
3055            tabr = 0xfec8;
3056        }
3057        break;
3058    case 32:
3059        imask = 0x4;
3060        shift = 0;
3061        omask = 0x3;
3062        if (left) {
3063            tabl = (2 << 2) | 3;
3064            tabr = (3 << 2) | 1;
3065        } else {
3066            tabl = (1 << 2) | 3;
3067            tabr = (3 << 2) | 2;
3068        }
3069        break;
3070    default:
3071        abort();
3072    }
3073
3074    lo1 = tcg_temp_new();
3075    lo2 = tcg_temp_new();
3076    tcg_gen_andi_tl(lo1, s1, imask);
3077    tcg_gen_andi_tl(lo2, s2, imask);
3078    tcg_gen_shli_tl(lo1, lo1, shift);
3079    tcg_gen_shli_tl(lo2, lo2, shift);
3080
3081    t1 = tcg_const_tl(tabl);
3082    t2 = tcg_const_tl(tabr);
3083    tcg_gen_shr_tl(lo1, t1, lo1);
3084    tcg_gen_shr_tl(lo2, t2, lo2);
3085    tcg_gen_andi_tl(dst, lo1, omask);
3086    tcg_gen_andi_tl(lo2, lo2, omask);
3087
3088    amask = -8;
3089    if (AM_CHECK(dc)) {
3090        amask &= 0xffffffffULL;
3091    }
3092    tcg_gen_andi_tl(s1, s1, amask);
3093    tcg_gen_andi_tl(s2, s2, amask);
3094
3095    /* We want to compute
3096        dst = (s1 == s2 ? lo1 : lo1 & lo2).
3097       We've already done dst = lo1, so this reduces to
3098        dst &= (s1 == s2 ? -1 : lo2)
3099       Which we perform by
3100        lo2 |= -(s1 == s2)
3101        dst &= lo2
3102    */
3103    tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3104    tcg_gen_neg_tl(t1, t1);
3105    tcg_gen_or_tl(lo2, lo2, t1);
3106    tcg_gen_and_tl(dst, dst, lo2);
3107
3108    tcg_temp_free(lo1);
3109    tcg_temp_free(lo2);
3110    tcg_temp_free(t1);
3111    tcg_temp_free(t2);
3112}
3113
3114static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3115{
3116    TCGv tmp = tcg_temp_new();
3117
3118    tcg_gen_add_tl(tmp, s1, s2);
3119    tcg_gen_andi_tl(dst, tmp, -8);
3120    if (left) {
3121        tcg_gen_neg_tl(tmp, tmp);
3122    }
3123    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3124
3125    tcg_temp_free(tmp);
3126}
3127
3128static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3129{
3130    TCGv t1, t2, shift;
3131
3132    t1 = tcg_temp_new();
3133    t2 = tcg_temp_new();
3134    shift = tcg_temp_new();
3135
3136    tcg_gen_andi_tl(shift, gsr, 7);
3137    tcg_gen_shli_tl(shift, shift, 3);
3138    tcg_gen_shl_tl(t1, s1, shift);
3139
3140    /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3141       shift of (up to 63) followed by a constant shift of 1.  */
3142    tcg_gen_xori_tl(shift, shift, 63);
3143    tcg_gen_shr_tl(t2, s2, shift);
3144    tcg_gen_shri_tl(t2, t2, 1);
3145
3146    tcg_gen_or_tl(dst, t1, t2);
3147
3148    tcg_temp_free(t1);
3149    tcg_temp_free(t2);
3150    tcg_temp_free(shift);
3151}
3152#endif
3153
3154#define CHECK_IU_FEATURE(dc, FEATURE)                      \
3155    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3156        goto illegal_insn;
3157#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3158    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3159        goto nfpu_insn;
3160
3161/* before an instruction, dc->pc must be static */
3162static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3163{
3164    unsigned int opc, rs1, rs2, rd;
3165    TCGv cpu_src1, cpu_src2;
3166    TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3167    TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3168    target_long simm;
3169
3170    opc = GET_FIELD(insn, 0, 1);
3171    rd = GET_FIELD(insn, 2, 6);
3172
3173    switch (opc) {
3174    case 0:                     /* branches/sethi */
3175        {
3176            unsigned int xop = GET_FIELD(insn, 7, 9);
3177            int32_t target;
3178            switch (xop) {
3179#ifdef TARGET_SPARC64
3180            case 0x1:           /* V9 BPcc */
3181                {
3182                    int cc;
3183
3184                    target = GET_FIELD_SP(insn, 0, 18);
3185                    target = sign_extend(target, 19);
3186                    target <<= 2;
3187                    cc = GET_FIELD_SP(insn, 20, 21);
3188                    if (cc == 0)
3189                        do_branch(dc, target, insn, 0);
3190                    else if (cc == 2)
3191                        do_branch(dc, target, insn, 1);
3192                    else
3193                        goto illegal_insn;
3194                    goto jmp_insn;
3195                }
3196            case 0x3:           /* V9 BPr */
3197                {
3198                    target = GET_FIELD_SP(insn, 0, 13) |
3199                        (GET_FIELD_SP(insn, 20, 21) << 14);
3200                    target = sign_extend(target, 16);
3201                    target <<= 2;
3202                    cpu_src1 = get_src1(dc, insn);
3203                    do_branch_reg(dc, target, insn, cpu_src1);
3204                    goto jmp_insn;
3205                }
3206            case 0x5:           /* V9 FBPcc */
3207                {
3208                    int cc = GET_FIELD_SP(insn, 20, 21);
3209                    if (gen_trap_ifnofpu(dc)) {
3210                        goto jmp_insn;
3211                    }
3212                    target = GET_FIELD_SP(insn, 0, 18);
3213                    target = sign_extend(target, 19);
3214                    target <<= 2;
3215                    do_fbranch(dc, target, insn, cc);
3216                    goto jmp_insn;
3217                }
3218#else
3219            case 0x7:           /* CBN+x */
3220                {
3221                    goto ncp_insn;
3222                }
3223#endif
3224            case 0x2:           /* BN+x */
3225                {
3226                    target = GET_FIELD(insn, 10, 31);
3227                    target = sign_extend(target, 22);
3228                    target <<= 2;
3229                    do_branch(dc, target, insn, 0);
3230                    goto jmp_insn;
3231                }
3232            case 0x6:           /* FBN+x */
3233                {
3234                    if (gen_trap_ifnofpu(dc)) {
3235                        goto jmp_insn;
3236                    }
3237                    target = GET_FIELD(insn, 10, 31);
3238                    target = sign_extend(target, 22);
3239                    target <<= 2;
3240                    do_fbranch(dc, target, insn, 0);
3241                    goto jmp_insn;
3242                }
3243            case 0x4:           /* SETHI */
3244                /* Special-case %g0 because that's the canonical nop.  */
3245                if (rd) {
3246                    uint32_t value = GET_FIELD(insn, 10, 31);
3247                    TCGv t = gen_dest_gpr(dc, rd);
3248                    tcg_gen_movi_tl(t, value << 10);
3249                    gen_store_gpr(dc, rd, t);
3250                }
3251                break;
3252            case 0x0:           /* UNIMPL */
3253            default:
3254                goto illegal_insn;
3255            }
3256            break;
3257        }
3258        break;
3259    case 1:                     /*CALL*/
3260        {
3261            target_long target = GET_FIELDs(insn, 2, 31) << 2;
3262            TCGv o7 = gen_dest_gpr(dc, 15);
3263
3264            tcg_gen_movi_tl(o7, dc->pc);
3265            gen_store_gpr(dc, 15, o7);
3266            target += dc->pc;
3267            gen_mov_pc_npc(dc);
3268#ifdef TARGET_SPARC64
3269            if (unlikely(AM_CHECK(dc))) {
3270                target &= 0xffffffffULL;
3271            }
3272#endif
3273            dc->npc = target;
3274        }
3275        goto jmp_insn;
3276    case 2:                     /* FPU & Logical Operations */
3277        {
3278            unsigned int xop = GET_FIELD(insn, 7, 12);
3279            TCGv cpu_dst = get_temp_tl(dc);
3280            TCGv cpu_tmp0;
3281
3282            if (xop == 0x3a) {  /* generate trap */
3283                int cond = GET_FIELD(insn, 3, 6);
3284                TCGv_i32 trap;
3285                TCGLabel *l1 = NULL;
3286                int mask;
3287
3288                if (cond == 0) {
3289                    /* Trap never.  */
3290                    break;
3291                }
3292
3293                save_state(dc);
3294
3295                if (cond != 8) {
3296                    /* Conditional trap.  */
3297                    DisasCompare cmp;
3298#ifdef TARGET_SPARC64
3299                    /* V9 icc/xcc */
3300                    int cc = GET_FIELD_SP(insn, 11, 12);
3301                    if (cc == 0) {
3302                        gen_compare(&cmp, 0, cond, dc);
3303                    } else if (cc == 2) {
3304                        gen_compare(&cmp, 1, cond, dc);
3305                    } else {
3306                        goto illegal_insn;
3307                    }
3308#else
3309                    gen_compare(&cmp, 0, cond, dc);
3310#endif
3311                    l1 = gen_new_label();
3312                    tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3313                                      cmp.c1, cmp.c2, l1);
3314                    free_compare(&cmp);
3315                }
3316
3317                mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3318                        ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3319
3320                /* Don't use the normal temporaries, as they may well have
3321                   gone out of scope with the branch above.  While we're
3322                   doing that we might as well pre-truncate to 32-bit.  */
3323                trap = tcg_temp_new_i32();
3324
3325                rs1 = GET_FIELD_SP(insn, 14, 18);
3326                if (IS_IMM) {
3327                    rs2 = GET_FIELD_SP(insn, 0, 7);
3328                    if (rs1 == 0) {
3329                        tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3330                        /* Signal that the trap value is fully constant.  */
3331                        mask = 0;
3332                    } else {
3333                        TCGv t1 = gen_load_gpr(dc, rs1);
3334                        tcg_gen_trunc_tl_i32(trap, t1);
3335                        tcg_gen_addi_i32(trap, trap, rs2);
3336                    }
3337                } else {
3338                    TCGv t1, t2;
3339                    rs2 = GET_FIELD_SP(insn, 0, 4);
3340                    t1 = gen_load_gpr(dc, rs1);
3341                    t2 = gen_load_gpr(dc, rs2);
3342                    tcg_gen_add_tl(t1, t1, t2);
3343                    tcg_gen_trunc_tl_i32(trap, t1);
3344                }
3345                if (mask != 0) {
3346                    tcg_gen_andi_i32(trap, trap, mask);
3347                    tcg_gen_addi_i32(trap, trap, TT_TRAP);
3348                }
3349
3350                gen_helper_raise_exception(cpu_env, trap);
3351                tcg_temp_free_i32(trap);
3352
3353                if (cond == 8) {
3354                    /* An unconditional trap ends the TB.  */
3355                    dc->base.is_jmp = DISAS_NORETURN;
3356                    goto jmp_insn;
3357                } else {
3358                    /* A conditional trap falls through to the next insn.  */
3359                    gen_set_label(l1);
3360                    break;
3361                }
3362            } else if (xop == 0x28) {
3363                rs1 = GET_FIELD(insn, 13, 17);
3364                switch(rs1) {
3365                case 0: /* rdy */
3366#ifndef TARGET_SPARC64
3367                case 0x01 ... 0x0e: /* undefined in the SPARCv8
3368                                       manual, rdy on the microSPARC
3369                                       II */
3370                case 0x0f:          /* stbar in the SPARCv8 manual,
3371                                       rdy on the microSPARC II */
3372                case 0x10 ... 0x1f: /* implementation-dependent in the
3373                                       SPARCv8 manual, rdy on the
3374                                       microSPARC II */
3375                    /* Read Asr17 */
3376                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3377                        TCGv t = gen_dest_gpr(dc, rd);
3378                        /* Read Asr17 for a Leon3 monoprocessor */
3379                        tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3380                        gen_store_gpr(dc, rd, t);
3381                        break;
3382                    }
3383#endif
3384                    gen_store_gpr(dc, rd, cpu_y);
3385                    break;
3386#ifdef TARGET_SPARC64
3387                case 0x2: /* V9 rdccr */
3388                    update_psr(dc);
3389                    gen_helper_rdccr(cpu_dst, cpu_env);
3390                    gen_store_gpr(dc, rd, cpu_dst);
3391                    break;
3392                case 0x3: /* V9 rdasi */
3393                    tcg_gen_movi_tl(cpu_dst, dc->asi);
3394                    gen_store_gpr(dc, rd, cpu_dst);
3395                    break;
3396                case 0x4: /* V9 rdtick */
3397                    {
3398                        TCGv_ptr r_tickptr;
3399                        TCGv_i32 r_const;
3400
3401                        r_tickptr = tcg_temp_new_ptr();
3402                        r_const = tcg_const_i32(dc->mem_idx);
3403                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3404                                       offsetof(CPUSPARCState, tick));
3405                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3406                            gen_io_start();
3407                        }
3408                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3409                                                  r_const);
3410                        tcg_temp_free_ptr(r_tickptr);
3411                        tcg_temp_free_i32(r_const);
3412                        gen_store_gpr(dc, rd, cpu_dst);
3413                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3414                            gen_io_end();
3415                        }
3416                    }
3417                    break;
3418                case 0x5: /* V9 rdpc */
3419                    {
3420                        TCGv t = gen_dest_gpr(dc, rd);
3421                        if (unlikely(AM_CHECK(dc))) {
3422                            tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3423                        } else {
3424                            tcg_gen_movi_tl(t, dc->pc);
3425                        }
3426                        gen_store_gpr(dc, rd, t);
3427                    }
3428                    break;
3429                case 0x6: /* V9 rdfprs */
3430                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3431                    gen_store_gpr(dc, rd, cpu_dst);
3432                    break;
3433                case 0xf: /* V9 membar */
3434                    break; /* no effect */
3435                case 0x13: /* Graphics Status */
3436                    if (gen_trap_ifnofpu(dc)) {
3437                        goto jmp_insn;
3438                    }
3439                    gen_store_gpr(dc, rd, cpu_gsr);
3440                    break;
3441                case 0x16: /* Softint */
3442                    tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3443                                     offsetof(CPUSPARCState, softint));
3444                    gen_store_gpr(dc, rd, cpu_dst);
3445                    break;
3446                case 0x17: /* Tick compare */
3447                    gen_store_gpr(dc, rd, cpu_tick_cmpr);
3448                    break;
3449                case 0x18: /* System tick */
3450                    {
3451                        TCGv_ptr r_tickptr;
3452                        TCGv_i32 r_const;
3453
3454                        r_tickptr = tcg_temp_new_ptr();
3455                        r_const = tcg_const_i32(dc->mem_idx);
3456                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3457                                       offsetof(CPUSPARCState, stick));
3458                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3459                            gen_io_start();
3460                        }
3461                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3462                                                  r_const);
3463                        tcg_temp_free_ptr(r_tickptr);
3464                        tcg_temp_free_i32(r_const);
3465                        gen_store_gpr(dc, rd, cpu_dst);
3466                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3467                            gen_io_end();
3468                        }
3469                    }
3470                    break;
3471                case 0x19: /* System tick compare */
3472                    gen_store_gpr(dc, rd, cpu_stick_cmpr);
3473                    break;
3474                case 0x1a: /* UltraSPARC-T1 Strand status */
3475                    /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3476                     * this ASR as impl. dep
3477                     */
3478                    CHECK_IU_FEATURE(dc, HYPV);
3479                    {
3480                        TCGv t = gen_dest_gpr(dc, rd);
3481                        tcg_gen_movi_tl(t, 1UL);
3482                        gen_store_gpr(dc, rd, t);
3483                    }
3484                    break;
3485                case 0x10: /* Performance Control */
3486                case 0x11: /* Performance Instrumentation Counter */
3487                case 0x12: /* Dispatch Control */
3488                case 0x14: /* Softint set, WO */
3489                case 0x15: /* Softint clear, WO */
3490#endif
3491                default:
3492                    goto illegal_insn;
3493                }
3494#if !defined(CONFIG_USER_ONLY)
3495            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3496#ifndef TARGET_SPARC64
3497                if (!supervisor(dc)) {
3498                    goto priv_insn;
3499                }
3500                update_psr(dc);
3501                gen_helper_rdpsr(cpu_dst, cpu_env);
3502#else
3503                CHECK_IU_FEATURE(dc, HYPV);
3504                if (!hypervisor(dc))
3505                    goto priv_insn;
3506                rs1 = GET_FIELD(insn, 13, 17);
3507                switch (rs1) {
3508                case 0: // hpstate
3509                    tcg_gen_ld_i64(cpu_dst, cpu_env,
3510                                   offsetof(CPUSPARCState, hpstate));
3511                    break;
3512                case 1: // htstate
3513                    // gen_op_rdhtstate();
3514                    break;
3515                case 3: // hintp
3516                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3517                    break;
3518                case 5: // htba
3519                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
3520                    break;
3521                case 6: // hver
3522                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
3523                    break;
3524                case 31: // hstick_cmpr
3525                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3526                    break;
3527                default:
3528                    goto illegal_insn;
3529                }
3530#endif
3531                gen_store_gpr(dc, rd, cpu_dst);
3532                break;
3533            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3534                if (!supervisor(dc)) {
3535                    goto priv_insn;
3536                }
3537                cpu_tmp0 = get_temp_tl(dc);
3538#ifdef TARGET_SPARC64
3539                rs1 = GET_FIELD(insn, 13, 17);
3540                switch (rs1) {
3541                case 0: // tpc
3542                    {
3543                        TCGv_ptr r_tsptr;
3544
3545                        r_tsptr = tcg_temp_new_ptr();
3546                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3547                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3548                                      offsetof(trap_state, tpc));
3549                        tcg_temp_free_ptr(r_tsptr);
3550                    }
3551                    break;
3552                case 1: // tnpc
3553                    {
3554                        TCGv_ptr r_tsptr;
3555
3556                        r_tsptr = tcg_temp_new_ptr();
3557                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3558                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3559                                      offsetof(trap_state, tnpc));
3560                        tcg_temp_free_ptr(r_tsptr);
3561                    }
3562                    break;
3563                case 2: // tstate
3564                    {
3565                        TCGv_ptr r_tsptr;
3566
3567                        r_tsptr = tcg_temp_new_ptr();
3568                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3569                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3570                                      offsetof(trap_state, tstate));
3571                        tcg_temp_free_ptr(r_tsptr);
3572                    }
3573                    break;
3574                case 3: // tt
3575                    {
3576                        TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3577
3578                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3579                        tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3580                                         offsetof(trap_state, tt));
3581                        tcg_temp_free_ptr(r_tsptr);
3582                    }
3583                    break;
3584                case 4: // tick
3585                    {
3586                        TCGv_ptr r_tickptr;
3587                        TCGv_i32 r_const;
3588
3589                        r_tickptr = tcg_temp_new_ptr();
3590                        r_const = tcg_const_i32(dc->mem_idx);
3591                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3592                                       offsetof(CPUSPARCState, tick));
3593                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3594                            gen_io_start();
3595                        }
3596                        gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3597                                                  r_tickptr, r_const);
3598                        tcg_temp_free_ptr(r_tickptr);
3599                        tcg_temp_free_i32(r_const);
3600                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3601                            gen_io_end();
3602                        }
3603                    }
3604                    break;
3605                case 5: // tba
3606                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3607                    break;
3608                case 6: // pstate
3609                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3610                                     offsetof(CPUSPARCState, pstate));
3611                    break;
3612                case 7: // tl
3613                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3614                                     offsetof(CPUSPARCState, tl));
3615                    break;
3616                case 8: // pil
3617                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3618                                     offsetof(CPUSPARCState, psrpil));
3619                    break;
3620                case 9: // cwp
3621                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
3622                    break;
3623                case 10: // cansave
3624                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3625                                     offsetof(CPUSPARCState, cansave));
3626                    break;
3627                case 11: // canrestore
3628                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3629                                     offsetof(CPUSPARCState, canrestore));
3630                    break;
3631                case 12: // cleanwin
3632                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3633                                     offsetof(CPUSPARCState, cleanwin));
3634                    break;
3635                case 13: // otherwin
3636                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3637                                     offsetof(CPUSPARCState, otherwin));
3638                    break;
3639                case 14: // wstate
3640                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3641                                     offsetof(CPUSPARCState, wstate));
3642                    break;
3643                case 16: // UA2005 gl
3644                    CHECK_IU_FEATURE(dc, GL);
3645                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3646                                     offsetof(CPUSPARCState, gl));
3647                    break;
3648                case 26: // UA2005 strand status
3649                    CHECK_IU_FEATURE(dc, HYPV);
3650                    if (!hypervisor(dc))
3651                        goto priv_insn;
3652                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3653                    break;
3654                case 31: // ver
3655                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3656                    break;
3657                case 15: // fq
3658                default:
3659                    goto illegal_insn;
3660                }
3661#else
3662                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3663#endif
3664                gen_store_gpr(dc, rd, cpu_tmp0);
3665                break;
3666            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3667#ifdef TARGET_SPARC64
3668                gen_helper_flushw(cpu_env);
3669#else
3670                if (!supervisor(dc))
3671                    goto priv_insn;
3672                gen_store_gpr(dc, rd, cpu_tbr);
3673#endif
3674                break;
3675#endif
3676            } else if (xop == 0x34) {   /* FPU Operations */
3677                if (gen_trap_ifnofpu(dc)) {
3678                    goto jmp_insn;
3679                }
3680                gen_op_clear_ieee_excp_and_FTT();
3681                rs1 = GET_FIELD(insn, 13, 17);
3682                rs2 = GET_FIELD(insn, 27, 31);
3683                xop = GET_FIELD(insn, 18, 26);
3684
3685                switch (xop) {
3686                case 0x1: /* fmovs */
3687                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3688                    gen_store_fpr_F(dc, rd, cpu_src1_32);
3689                    break;
3690                case 0x5: /* fnegs */
3691                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3692                    break;
3693                case 0x9: /* fabss */
3694                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3695                    break;
3696                case 0x29: /* fsqrts */
3697                    CHECK_FPU_FEATURE(dc, FSQRT);
3698                    gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3699                    break;
3700                case 0x2a: /* fsqrtd */
3701                    CHECK_FPU_FEATURE(dc, FSQRT);
3702                    gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3703                    break;
3704                case 0x2b: /* fsqrtq */
3705                    CHECK_FPU_FEATURE(dc, FLOAT128);
3706                    gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3707                    break;
3708                case 0x41: /* fadds */
3709                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3710                    break;
3711                case 0x42: /* faddd */
3712                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3713                    break;
3714                case 0x43: /* faddq */
3715                    CHECK_FPU_FEATURE(dc, FLOAT128);
3716                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3717                    break;
3718                case 0x45: /* fsubs */
3719                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3720                    break;
3721                case 0x46: /* fsubd */
3722                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3723                    break;
3724                case 0x47: /* fsubq */
3725                    CHECK_FPU_FEATURE(dc, FLOAT128);
3726                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3727                    break;
3728                case 0x49: /* fmuls */
3729                    CHECK_FPU_FEATURE(dc, FMUL);
3730                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3731                    break;
3732                case 0x4a: /* fmuld */
3733                    CHECK_FPU_FEATURE(dc, FMUL);
3734                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3735                    break;
3736                case 0x4b: /* fmulq */
3737                    CHECK_FPU_FEATURE(dc, FLOAT128);
3738                    CHECK_FPU_FEATURE(dc, FMUL);
3739                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3740                    break;
3741                case 0x4d: /* fdivs */
3742                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3743                    break;
3744                case 0x4e: /* fdivd */
3745                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3746                    break;
3747                case 0x4f: /* fdivq */
3748                    CHECK_FPU_FEATURE(dc, FLOAT128);
3749                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3750                    break;
3751                case 0x69: /* fsmuld */
3752                    CHECK_FPU_FEATURE(dc, FSMULD);
3753                    gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3754                    break;
3755                case 0x6e: /* fdmulq */
3756                    CHECK_FPU_FEATURE(dc, FLOAT128);
3757                    gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3758                    break;
3759                case 0xc4: /* fitos */
3760                    gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3761                    break;
3762                case 0xc6: /* fdtos */
3763                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3764                    break;
3765                case 0xc7: /* fqtos */
3766                    CHECK_FPU_FEATURE(dc, FLOAT128);
3767                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3768                    break;
3769                case 0xc8: /* fitod */
3770                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3771                    break;
3772                case 0xc9: /* fstod */
3773                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3774                    break;
3775                case 0xcb: /* fqtod */
3776                    CHECK_FPU_FEATURE(dc, FLOAT128);
3777                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3778                    break;
3779                case 0xcc: /* fitoq */
3780                    CHECK_FPU_FEATURE(dc, FLOAT128);
3781                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3782                    break;
3783                case 0xcd: /* fstoq */
3784                    CHECK_FPU_FEATURE(dc, FLOAT128);
3785                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3786                    break;
3787                case 0xce: /* fdtoq */
3788                    CHECK_FPU_FEATURE(dc, FLOAT128);
3789                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3790                    break;
3791                case 0xd1: /* fstoi */
3792                    gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3793                    break;
3794                case 0xd2: /* fdtoi */
3795                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3796                    break;
3797                case 0xd3: /* fqtoi */
3798                    CHECK_FPU_FEATURE(dc, FLOAT128);
3799                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3800                    break;
3801#ifdef TARGET_SPARC64
3802                case 0x2: /* V9 fmovd */
3803                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3804                    gen_store_fpr_D(dc, rd, cpu_src1_64);
3805                    break;
3806                case 0x3: /* V9 fmovq */
3807                    CHECK_FPU_FEATURE(dc, FLOAT128);
3808                    gen_move_Q(dc, rd, rs2);
3809                    break;
3810                case 0x6: /* V9 fnegd */
3811                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3812                    break;
3813                case 0x7: /* V9 fnegq */
3814                    CHECK_FPU_FEATURE(dc, FLOAT128);
3815                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3816                    break;
3817                case 0xa: /* V9 fabsd */
3818                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3819                    break;
3820                case 0xb: /* V9 fabsq */
3821                    CHECK_FPU_FEATURE(dc, FLOAT128);
3822                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3823                    break;
3824                case 0x81: /* V9 fstox */
3825                    gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3826                    break;
3827                case 0x82: /* V9 fdtox */
3828                    gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3829                    break;
3830                case 0x83: /* V9 fqtox */
3831                    CHECK_FPU_FEATURE(dc, FLOAT128);
3832                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3833                    break;
3834                case 0x84: /* V9 fxtos */
3835                    gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3836                    break;
3837                case 0x88: /* V9 fxtod */
3838                    gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3839                    break;
3840                case 0x8c: /* V9 fxtoq */
3841                    CHECK_FPU_FEATURE(dc, FLOAT128);
3842                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3843                    break;
3844#endif
3845                default:
3846                    goto illegal_insn;
3847                }
3848            } else if (xop == 0x35) {   /* FPU Operations */
3849#ifdef TARGET_SPARC64
3850                int cond;
3851#endif
3852                if (gen_trap_ifnofpu(dc)) {
3853                    goto jmp_insn;
3854                }
3855                gen_op_clear_ieee_excp_and_FTT();
3856                rs1 = GET_FIELD(insn, 13, 17);
3857                rs2 = GET_FIELD(insn, 27, 31);
3858                xop = GET_FIELD(insn, 18, 26);
3859
3860#ifdef TARGET_SPARC64
3861#define FMOVR(sz)                                                  \
3862                do {                                               \
3863                    DisasCompare cmp;                              \
3864                    cond = GET_FIELD_SP(insn, 10, 12);             \
3865                    cpu_src1 = get_src1(dc, insn);                 \
3866                    gen_compare_reg(&cmp, cond, cpu_src1);         \
3867                    gen_fmov##sz(dc, &cmp, rd, rs2);               \
3868                    free_compare(&cmp);                            \
3869                } while (0)
3870
3871                if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3872                    FMOVR(s);
3873                    break;
3874                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3875                    FMOVR(d);
3876                    break;
3877                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3878                    CHECK_FPU_FEATURE(dc, FLOAT128);
3879                    FMOVR(q);
3880                    break;
3881                }
3882#undef FMOVR
3883#endif
3884                switch (xop) {
3885#ifdef TARGET_SPARC64
3886#define FMOVCC(fcc, sz)                                                 \
3887                    do {                                                \
3888                        DisasCompare cmp;                               \
3889                        cond = GET_FIELD_SP(insn, 14, 17);              \
3890                        gen_fcompare(&cmp, fcc, cond);                  \
3891                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3892                        free_compare(&cmp);                             \
3893                    } while (0)
3894
3895                    case 0x001: /* V9 fmovscc %fcc0 */
3896                        FMOVCC(0, s);
3897                        break;
3898                    case 0x002: /* V9 fmovdcc %fcc0 */
3899                        FMOVCC(0, d);
3900                        break;
3901                    case 0x003: /* V9 fmovqcc %fcc0 */
3902                        CHECK_FPU_FEATURE(dc, FLOAT128);
3903                        FMOVCC(0, q);
3904                        break;
3905                    case 0x041: /* V9 fmovscc %fcc1 */
3906                        FMOVCC(1, s);
3907                        break;
3908                    case 0x042: /* V9 fmovdcc %fcc1 */
3909                        FMOVCC(1, d);
3910                        break;
3911                    case 0x043: /* V9 fmovqcc %fcc1 */
3912                        CHECK_FPU_FEATURE(dc, FLOAT128);
3913                        FMOVCC(1, q);
3914                        break;
3915                    case 0x081: /* V9 fmovscc %fcc2 */
3916                        FMOVCC(2, s);
3917                        break;
3918                    case 0x082: /* V9 fmovdcc %fcc2 */
3919                        FMOVCC(2, d);
3920                        break;
3921                    case 0x083: /* V9 fmovqcc %fcc2 */
3922                        CHECK_FPU_FEATURE(dc, FLOAT128);
3923                        FMOVCC(2, q);
3924                        break;
3925                    case 0x0c1: /* V9 fmovscc %fcc3 */
3926                        FMOVCC(3, s);
3927                        break;
3928                    case 0x0c2: /* V9 fmovdcc %fcc3 */
3929                        FMOVCC(3, d);
3930                        break;
3931                    case 0x0c3: /* V9 fmovqcc %fcc3 */
3932                        CHECK_FPU_FEATURE(dc, FLOAT128);
3933                        FMOVCC(3, q);
3934                        break;
3935#undef FMOVCC
3936#define FMOVCC(xcc, sz)                                                 \
3937                    do {                                                \
3938                        DisasCompare cmp;                               \
3939                        cond = GET_FIELD_SP(insn, 14, 17);              \
3940                        gen_compare(&cmp, xcc, cond, dc);               \
3941                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3942                        free_compare(&cmp);                             \
3943                    } while (0)
3944
3945                    case 0x101: /* V9 fmovscc %icc */
3946                        FMOVCC(0, s);
3947                        break;
3948                    case 0x102: /* V9 fmovdcc %icc */
3949                        FMOVCC(0, d);
3950                        break;
3951                    case 0x103: /* V9 fmovqcc %icc */
3952                        CHECK_FPU_FEATURE(dc, FLOAT128);
3953                        FMOVCC(0, q);
3954                        break;
3955                    case 0x181: /* V9 fmovscc %xcc */
3956                        FMOVCC(1, s);
3957                        break;
3958                    case 0x182: /* V9 fmovdcc %xcc */
3959                        FMOVCC(1, d);
3960                        break;
3961                    case 0x183: /* V9 fmovqcc %xcc */
3962                        CHECK_FPU_FEATURE(dc, FLOAT128);
3963                        FMOVCC(1, q);
3964                        break;
3965#undef FMOVCC
3966#endif
3967                    case 0x51: /* fcmps, V9 %fcc */
3968                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3969                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3970                        gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3971                        break;
3972                    case 0x52: /* fcmpd, V9 %fcc */
3973                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3974                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3975                        gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3976                        break;
3977                    case 0x53: /* fcmpq, V9 %fcc */
3978                        CHECK_FPU_FEATURE(dc, FLOAT128);
3979                        gen_op_load_fpr_QT0(QFPREG(rs1));
3980                        gen_op_load_fpr_QT1(QFPREG(rs2));
3981                        gen_op_fcmpq(rd & 3);
3982                        break;
3983                    case 0x55: /* fcmpes, V9 %fcc */
3984                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3985                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3986                        gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3987                        break;
3988                    case 0x56: /* fcmped, V9 %fcc */
3989                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3990                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3991                        gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3992                        break;
3993                    case 0x57: /* fcmpeq, V9 %fcc */
3994                        CHECK_FPU_FEATURE(dc, FLOAT128);
3995                        gen_op_load_fpr_QT0(QFPREG(rs1));
3996                        gen_op_load_fpr_QT1(QFPREG(rs2));
3997                        gen_op_fcmpeq(rd & 3);
3998                        break;
3999                    default:
4000                        goto illegal_insn;
4001                }
4002            } else if (xop == 0x2) {
4003                TCGv dst = gen_dest_gpr(dc, rd);
4004                rs1 = GET_FIELD(insn, 13, 17);
4005                if (rs1 == 0) {
4006                    /* clr/mov shortcut : or %g0, x, y -> mov x, y */
4007                    if (IS_IMM) {       /* immediate */
4008                        simm = GET_FIELDs(insn, 19, 31);
4009                        tcg_gen_movi_tl(dst, simm);
4010                        gen_store_gpr(dc, rd, dst);
4011                    } else {            /* register */
4012                        rs2 = GET_FIELD(insn, 27, 31);
4013                        if (rs2 == 0) {
4014                            tcg_gen_movi_tl(dst, 0);
4015                            gen_store_gpr(dc, rd, dst);
4016                        } else {
4017                            cpu_src2 = gen_load_gpr(dc, rs2);
4018                            gen_store_gpr(dc, rd, cpu_src2);
4019                        }
4020                    }
4021                } else {
4022                    cpu_src1 = get_src1(dc, insn);
4023                    if (IS_IMM) {       /* immediate */
4024                        simm = GET_FIELDs(insn, 19, 31);
4025                        tcg_gen_ori_tl(dst, cpu_src1, simm);
4026                        gen_store_gpr(dc, rd, dst);
4027                    } else {            /* register */
4028                        rs2 = GET_FIELD(insn, 27, 31);
4029                        if (rs2 == 0) {
4030                            /* mov shortcut:  or x, %g0, y -> mov x, y */
4031                            gen_store_gpr(dc, rd, cpu_src1);
4032                        } else {
4033                            cpu_src2 = gen_load_gpr(dc, rs2);
4034                            tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4035                            gen_store_gpr(dc, rd, dst);
4036                        }
4037                    }
4038                }
4039#ifdef TARGET_SPARC64
4040            } else if (xop == 0x25) { /* sll, V9 sllx */
4041                cpu_src1 = get_src1(dc, insn);
4042                if (IS_IMM) {   /* immediate */
4043                    simm = GET_FIELDs(insn, 20, 31);
4044                    if (insn & (1 << 12)) {
4045                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4046                    } else {
4047                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4048                    }
4049                } else {                /* register */
4050                    rs2 = GET_FIELD(insn, 27, 31);
4051                    cpu_src2 = gen_load_gpr(dc, rs2);
4052                    cpu_tmp0 = get_temp_tl(dc);
4053                    if (insn & (1 << 12)) {
4054                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4055                    } else {
4056                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4057                    }
4058                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4059                }
4060                gen_store_gpr(dc, rd, cpu_dst);
4061            } else if (xop == 0x26) { /* srl, V9 srlx */
4062                cpu_src1 = get_src1(dc, insn);
4063                if (IS_IMM) {   /* immediate */
4064                    simm = GET_FIELDs(insn, 20, 31);
4065                    if (insn & (1 << 12)) {
4066                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4067                    } else {
4068                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4069                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4070                    }
4071                } else {                /* register */
4072                    rs2 = GET_FIELD(insn, 27, 31);
4073                    cpu_src2 = gen_load_gpr(dc, rs2);
4074                    cpu_tmp0 = get_temp_tl(dc);
4075                    if (insn & (1 << 12)) {
4076                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4077                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4078                    } else {
4079                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4080                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4081                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4082                    }
4083                }
4084                gen_store_gpr(dc, rd, cpu_dst);
4085            } else if (xop == 0x27) { /* sra, V9 srax */
4086                cpu_src1 = get_src1(dc, insn);
4087                if (IS_IMM) {   /* immediate */
4088                    simm = GET_FIELDs(insn, 20, 31);
4089                    if (insn & (1 << 12)) {
4090                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4091                    } else {
4092                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4093                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4094                    }
4095                } else {                /* register */
4096                    rs2 = GET_FIELD(insn, 27, 31);
4097                    cpu_src2 = gen_load_gpr(dc, rs2);
4098                    cpu_tmp0 = get_temp_tl(dc);
4099                    if (insn & (1 << 12)) {
4100                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4101                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4102                    } else {
4103                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4104                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4105                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4106                    }
4107                }
4108                gen_store_gpr(dc, rd, cpu_dst);
4109#endif
4110            } else if (xop < 0x36) {
4111                if (xop < 0x20) {
4112                    cpu_src1 = get_src1(dc, insn);
4113                    cpu_src2 = get_src2(dc, insn);
4114                    switch (xop & ~0x10) {
4115                    case 0x0: /* add */
4116                        if (xop & 0x10) {
4117                            gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4118                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4119                            dc->cc_op = CC_OP_ADD;
4120                        } else {
4121                            tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4122                        }
4123                        break;
4124                    case 0x1: /* and */
4125                        tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4126                        if (xop & 0x10) {
4127                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4128                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4129                            dc->cc_op = CC_OP_LOGIC;
4130                        }
4131                        break;
4132                    case 0x2: /* or */
4133                        tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4134                        if (xop & 0x10) {
4135                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4136                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4137                            dc->cc_op = CC_OP_LOGIC;
4138                        }
4139                        break;
4140                    case 0x3: /* xor */
4141                        tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4142                        if (xop & 0x10) {
4143                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4144                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4145                            dc->cc_op = CC_OP_LOGIC;
4146                        }
4147                        break;
4148                    case 0x4: /* sub */
4149                        if (xop & 0x10) {
4150                            gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4151                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4152                            dc->cc_op = CC_OP_SUB;
4153                        } else {
4154                            tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4155                        }
4156                        break;
4157                    case 0x5: /* andn */
4158                        tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4159                        if (xop & 0x10) {
4160                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4161                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4162                            dc->cc_op = CC_OP_LOGIC;
4163                        }
4164                        break;
4165                    case 0x6: /* orn */
4166                        tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4167                        if (xop & 0x10) {
4168                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4169                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4170                            dc->cc_op = CC_OP_LOGIC;
4171                        }
4172                        break;
4173                    case 0x7: /* xorn */
4174                        tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4175                        if (xop & 0x10) {
4176                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4177                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4178                            dc->cc_op = CC_OP_LOGIC;
4179                        }
4180                        break;
4181                    case 0x8: /* addx, V9 addc */
4182                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4183                                        (xop & 0x10));
4184                        break;
4185#ifdef TARGET_SPARC64
4186                    case 0x9: /* V9 mulx */
4187                        tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4188                        break;
4189#endif
4190                    case 0xa: /* umul */
4191                        CHECK_IU_FEATURE(dc, MUL);
4192                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4193                        if (xop & 0x10) {
4194                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4195                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4196                            dc->cc_op = CC_OP_LOGIC;
4197                        }
4198                        break;
4199                    case 0xb: /* smul */
4200                        CHECK_IU_FEATURE(dc, MUL);
4201                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4202                        if (xop & 0x10) {
4203                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4204                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4205                            dc->cc_op = CC_OP_LOGIC;
4206                        }
4207                        break;
4208                    case 0xc: /* subx, V9 subc */
4209                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4210                                        (xop & 0x10));
4211                        break;
4212#ifdef TARGET_SPARC64
4213                    case 0xd: /* V9 udivx */
4214                        gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4215                        break;
4216#endif
4217                    case 0xe: /* udiv */
4218                        CHECK_IU_FEATURE(dc, DIV);
4219                        if (xop & 0x10) {
4220                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4221                                               cpu_src2);
4222                            dc->cc_op = CC_OP_DIV;
4223                        } else {
4224                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4225                                            cpu_src2);
4226                        }
4227                        break;
4228                    case 0xf: /* sdiv */
4229                        CHECK_IU_FEATURE(dc, DIV);
4230                        if (xop & 0x10) {
4231                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4232                                               cpu_src2);
4233                            dc->cc_op = CC_OP_DIV;
4234                        } else {
4235                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4236                                            cpu_src2);
4237                        }
4238                        break;
4239                    default:
4240                        goto illegal_insn;
4241                    }
4242                    gen_store_gpr(dc, rd, cpu_dst);
4243                } else {
4244                    cpu_src1 = get_src1(dc, insn);
4245                    cpu_src2 = get_src2(dc, insn);
4246                    switch (xop) {
4247                    case 0x20: /* taddcc */
4248                        gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4249                        gen_store_gpr(dc, rd, cpu_dst);
4250                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4251                        dc->cc_op = CC_OP_TADD;
4252                        break;
4253                    case 0x21: /* tsubcc */
4254                        gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4255                        gen_store_gpr(dc, rd, cpu_dst);
4256                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4257                        dc->cc_op = CC_OP_TSUB;
4258                        break;
4259                    case 0x22: /* taddcctv */
4260                        gen_helper_taddcctv(cpu_dst, cpu_env,
4261                                            cpu_src1, cpu_src2);
4262                        gen_store_gpr(dc, rd, cpu_dst);
4263                        dc->cc_op = CC_OP_TADDTV;
4264                        break;
4265                    case 0x23: /* tsubcctv */
4266                        gen_helper_tsubcctv(cpu_dst, cpu_env,
4267                                            cpu_src1, cpu_src2);
4268                        gen_store_gpr(dc, rd, cpu_dst);
4269                        dc->cc_op = CC_OP_TSUBTV;
4270                        break;
4271                    case 0x24: /* mulscc */
4272                        update_psr(dc);
4273                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4274                        gen_store_gpr(dc, rd, cpu_dst);
4275                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4276                        dc->cc_op = CC_OP_ADD;
4277                        break;
4278#ifndef TARGET_SPARC64
4279                    case 0x25:  /* sll */
4280                        if (IS_IMM) { /* immediate */
4281                            simm = GET_FIELDs(insn, 20, 31);
4282                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4283                        } else { /* register */
4284                            cpu_tmp0 = get_temp_tl(dc);
4285                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4286                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4287                        }
4288                        gen_store_gpr(dc, rd, cpu_dst);
4289                        break;
4290                    case 0x26:  /* srl */
4291                        if (IS_IMM) { /* immediate */
4292                            simm = GET_FIELDs(insn, 20, 31);
4293                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4294                        } else { /* register */
4295                            cpu_tmp0 = get_temp_tl(dc);
4296                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4297                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4298                        }
4299                        gen_store_gpr(dc, rd, cpu_dst);
4300                        break;
4301                    case 0x27:  /* sra */
4302                        if (IS_IMM) { /* immediate */
4303                            simm = GET_FIELDs(insn, 20, 31);
4304                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4305                        } else { /* register */
4306                            cpu_tmp0 = get_temp_tl(dc);
4307                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4308                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4309                        }
4310                        gen_store_gpr(dc, rd, cpu_dst);
4311                        break;
4312#endif
4313                    case 0x30:
4314                        {
4315                            cpu_tmp0 = get_temp_tl(dc);
4316                            switch(rd) {
4317                            case 0: /* wry */
4318                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4319                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4320                                break;
4321#ifndef TARGET_SPARC64
4322                            case 0x01 ... 0x0f: /* undefined in the
4323                                                   SPARCv8 manual, nop
4324                                                   on the microSPARC
4325                                                   II */
4326                            case 0x10 ... 0x1f: /* implementation-dependent
4327                                                   in the SPARCv8
4328                                                   manual, nop on the
4329                                                   microSPARC II */
4330                                if ((rd == 0x13) && (dc->def->features &
4331                                                     CPU_FEATURE_POWERDOWN)) {
4332                                    /* LEON3 power-down */
4333                                    save_state(dc);
4334                                    gen_helper_power_down(cpu_env);
4335                                }
4336                                break;
4337#else
4338                            case 0x2: /* V9 wrccr */
4339                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4340                                gen_helper_wrccr(cpu_env, cpu_tmp0);
4341                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4342                                dc->cc_op = CC_OP_FLAGS;
4343                                break;
4344                            case 0x3: /* V9 wrasi */
4345                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4346                                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4347                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4348                                                offsetof(CPUSPARCState, asi));
4349                                /* End TB to notice changed ASI.  */
4350                                save_state(dc);
4351                                gen_op_next_insn();
4352                                tcg_gen_exit_tb(NULL, 0);
4353                                dc->base.is_jmp = DISAS_NORETURN;
4354                                break;
4355                            case 0x6: /* V9 wrfprs */
4356                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4357                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4358                                dc->fprs_dirty = 0;
4359                                save_state(dc);
4360                                gen_op_next_insn();
4361                                tcg_gen_exit_tb(NULL, 0);
4362                                dc->base.is_jmp = DISAS_NORETURN;
4363                                break;
4364                            case 0xf: /* V9 sir, nop if user */
4365#if !defined(CONFIG_USER_ONLY)
4366                                if (supervisor(dc)) {
4367                                    ; // XXX
4368                                }
4369#endif
4370                                break;
4371                            case 0x13: /* Graphics Status */
4372                                if (gen_trap_ifnofpu(dc)) {
4373                                    goto jmp_insn;
4374                                }
4375                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4376                                break;
4377                            case 0x14: /* Softint set */
4378                                if (!supervisor(dc))
4379                                    goto illegal_insn;
4380                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4381                                gen_helper_set_softint(cpu_env, cpu_tmp0);
4382                                break;
4383                            case 0x15: /* Softint clear */
4384                                if (!supervisor(dc))
4385                                    goto illegal_insn;
4386                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4387                                gen_helper_clear_softint(cpu_env, cpu_tmp0);
4388                                break;
4389                            case 0x16: /* Softint write */
4390                                if (!supervisor(dc))
4391                                    goto illegal_insn;
4392                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4393                                gen_helper_write_softint(cpu_env, cpu_tmp0);
4394                                break;
4395                            case 0x17: /* Tick compare */
4396#if !defined(CONFIG_USER_ONLY)
4397                                if (!supervisor(dc))
4398                                    goto illegal_insn;
4399#endif
4400                                {
4401                                    TCGv_ptr r_tickptr;
4402
4403                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4404                                                   cpu_src2);
4405                                    r_tickptr = tcg_temp_new_ptr();
4406                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4407                                                   offsetof(CPUSPARCState, tick));
4408                                    if (tb_cflags(dc->base.tb) &
4409                                           CF_USE_ICOUNT) {
4410                                        gen_io_start();
4411                                    }
4412                                    gen_helper_tick_set_limit(r_tickptr,
4413                                                              cpu_tick_cmpr);
4414                                    tcg_temp_free_ptr(r_tickptr);
4415                                    if (tb_cflags(dc->base.tb) &
4416                                           CF_USE_ICOUNT) {
4417                                        gen_io_end();
4418                                    }
4419                                    /* End TB to handle timer interrupt */
4420                                    dc->base.is_jmp = DISAS_EXIT;
4421                                }
4422                                break;
4423                            case 0x18: /* System tick */
4424#if !defined(CONFIG_USER_ONLY)
4425                                if (!supervisor(dc))
4426                                    goto illegal_insn;
4427#endif
4428                                {
4429                                    TCGv_ptr r_tickptr;
4430
4431                                    tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4432                                                   cpu_src2);
4433                                    r_tickptr = tcg_temp_new_ptr();
4434                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4435                                                   offsetof(CPUSPARCState, stick));
4436                                    if (tb_cflags(dc->base.tb) &
4437                                           CF_USE_ICOUNT) {
4438                                        gen_io_start();
4439                                    }
4440                                    gen_helper_tick_set_count(r_tickptr,
4441                                                              cpu_tmp0);
4442                                    tcg_temp_free_ptr(r_tickptr);
4443                                    if (tb_cflags(dc->base.tb) &
4444                                           CF_USE_ICOUNT) {
4445                                        gen_io_end();
4446                                    }
4447                                    /* End TB to handle timer interrupt */
4448                                    dc->base.is_jmp = DISAS_EXIT;
4449                                }
4450                                break;
4451                            case 0x19: /* System tick compare */
4452#if !defined(CONFIG_USER_ONLY)
4453                                if (!supervisor(dc))
4454                                    goto illegal_insn;
4455#endif
4456                                {
4457                                    TCGv_ptr r_tickptr;
4458
4459                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4460                                                   cpu_src2);
4461                                    r_tickptr = tcg_temp_new_ptr();
4462                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4463                                                   offsetof(CPUSPARCState, stick));
4464                                    if (tb_cflags(dc->base.tb) &
4465                                           CF_USE_ICOUNT) {
4466                                        gen_io_start();
4467                                    }
4468                                    gen_helper_tick_set_limit(r_tickptr,
4469                                                              cpu_stick_cmpr);
4470                                    tcg_temp_free_ptr(r_tickptr);
4471                                    if (tb_cflags(dc->base.tb) &
4472                                           CF_USE_ICOUNT) {
4473                                        gen_io_end();
4474                                    }
4475                                    /* End TB to handle timer interrupt */
4476                                    dc->base.is_jmp = DISAS_EXIT;
4477                                }
4478                                break;
4479
4480                            case 0x10: /* Performance Control */
4481                            case 0x11: /* Performance Instrumentation
4482                                          Counter */
4483                            case 0x12: /* Dispatch Control */
4484#endif
4485                            default:
4486                                goto illegal_insn;
4487                            }
4488                        }
4489                        break;
4490#if !defined(CONFIG_USER_ONLY)
4491                    case 0x31: /* wrpsr, V9 saved, restored */
4492                        {
4493                            if (!supervisor(dc))
4494                                goto priv_insn;
4495#ifdef TARGET_SPARC64
4496                            switch (rd) {
4497                            case 0:
4498                                gen_helper_saved(cpu_env);
4499                                break;
4500                            case 1:
4501                                gen_helper_restored(cpu_env);
4502                                break;
4503                            case 2: /* UA2005 allclean */
4504                            case 3: /* UA2005 otherw */
4505                            case 4: /* UA2005 normalw */
4506                            case 5: /* UA2005 invalw */
4507                                // XXX
4508                            default:
4509                                goto illegal_insn;
4510                            }
4511#else
4512                            cpu_tmp0 = get_temp_tl(dc);
4513                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4514                            gen_helper_wrpsr(cpu_env, cpu_tmp0);
4515                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4516                            dc->cc_op = CC_OP_FLAGS;
4517                            save_state(dc);
4518                            gen_op_next_insn();
4519                            tcg_gen_exit_tb(NULL, 0);
4520                            dc->base.is_jmp = DISAS_NORETURN;
4521#endif
4522                        }
4523                        break;
4524                    case 0x32: /* wrwim, V9 wrpr */
4525                        {
4526                            if (!supervisor(dc))
4527                                goto priv_insn;
4528                            cpu_tmp0 = get_temp_tl(dc);
4529                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4530#ifdef TARGET_SPARC64
4531                            switch (rd) {
4532                            case 0: // tpc
4533                                {
4534                                    TCGv_ptr r_tsptr;
4535
4536                                    r_tsptr = tcg_temp_new_ptr();
4537                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4538                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4539                                                  offsetof(trap_state, tpc));
4540                                    tcg_temp_free_ptr(r_tsptr);
4541                                }
4542                                break;
4543                            case 1: // tnpc
4544                                {
4545                                    TCGv_ptr r_tsptr;
4546
4547                                    r_tsptr = tcg_temp_new_ptr();
4548                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4549                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4550                                                  offsetof(trap_state, tnpc));
4551                                    tcg_temp_free_ptr(r_tsptr);
4552                                }
4553                                break;
4554                            case 2: // tstate
4555                                {
4556                                    TCGv_ptr r_tsptr;
4557
4558                                    r_tsptr = tcg_temp_new_ptr();
4559                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4560                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4561                                                  offsetof(trap_state,
4562                                                           tstate));
4563                                    tcg_temp_free_ptr(r_tsptr);
4564                                }
4565                                break;
4566                            case 3: // tt
4567                                {
4568                                    TCGv_ptr r_tsptr;
4569
4570                                    r_tsptr = tcg_temp_new_ptr();
4571                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4572                                    tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4573                                                    offsetof(trap_state, tt));
4574                                    tcg_temp_free_ptr(r_tsptr);
4575                                }
4576                                break;
4577                            case 4: // tick
4578                                {
4579                                    TCGv_ptr r_tickptr;
4580
4581                                    r_tickptr = tcg_temp_new_ptr();
4582                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4583                                                   offsetof(CPUSPARCState, tick));
4584                                    if (tb_cflags(dc->base.tb) &
4585                                           CF_USE_ICOUNT) {
4586                                        gen_io_start();
4587                                    }
4588                                    gen_helper_tick_set_count(r_tickptr,
4589                                                              cpu_tmp0);
4590                                    tcg_temp_free_ptr(r_tickptr);
4591                                    if (tb_cflags(dc->base.tb) &
4592                                           CF_USE_ICOUNT) {
4593                                        gen_io_end();
4594                                    }
4595                                    /* End TB to handle timer interrupt */
4596                                    dc->base.is_jmp = DISAS_EXIT;
4597                                }
4598                                break;
4599                            case 5: // tba
4600                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4601                                break;
4602                            case 6: // pstate
4603                                save_state(dc);
4604                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4605                                    gen_io_start();
4606                                }
4607                                gen_helper_wrpstate(cpu_env, cpu_tmp0);
4608                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4609                                    gen_io_end();
4610                                }
4611                                dc->npc = DYNAMIC_PC;
4612                                break;
4613                            case 7: // tl
4614                                save_state(dc);
4615                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4616                                               offsetof(CPUSPARCState, tl));
4617                                dc->npc = DYNAMIC_PC;
4618                                break;
4619                            case 8: // pil
4620                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4621                                    gen_io_start();
4622                                }
4623                                gen_helper_wrpil(cpu_env, cpu_tmp0);
4624                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4625                                    gen_io_end();
4626                                }
4627                                break;
4628                            case 9: // cwp
4629                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
4630                                break;
4631                            case 10: // cansave
4632                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4633                                                offsetof(CPUSPARCState,
4634                                                         cansave));
4635                                break;
4636                            case 11: // canrestore
4637                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4638                                                offsetof(CPUSPARCState,
4639                                                         canrestore));
4640                                break;
4641                            case 12: // cleanwin
4642                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4643                                                offsetof(CPUSPARCState,
4644                                                         cleanwin));
4645                                break;
4646                            case 13: // otherwin
4647                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4648                                                offsetof(CPUSPARCState,
4649                                                         otherwin));
4650                                break;
4651                            case 14: // wstate
4652                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4653                                                offsetof(CPUSPARCState,
4654                                                         wstate));
4655                                break;
4656                            case 16: // UA2005 gl
4657                                CHECK_IU_FEATURE(dc, GL);
4658                                gen_helper_wrgl(cpu_env, cpu_tmp0);
4659                                break;
4660                            case 26: // UA2005 strand status
4661                                CHECK_IU_FEATURE(dc, HYPV);
4662                                if (!hypervisor(dc))
4663                                    goto priv_insn;
4664                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4665                                break;
4666                            default:
4667                                goto illegal_insn;
4668                            }
4669#else
4670                            tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4671                            if (dc->def->nwindows != 32) {
4672                                tcg_gen_andi_tl(cpu_wim, cpu_wim,
4673                                                (1 << dc->def->nwindows) - 1);
4674                            }
4675#endif
4676                        }
4677                        break;
4678                    case 0x33: /* wrtbr, UA2005 wrhpr */
4679                        {
4680#ifndef TARGET_SPARC64
4681                            if (!supervisor(dc))
4682                                goto priv_insn;
4683                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4684#else
4685                            CHECK_IU_FEATURE(dc, HYPV);
4686                            if (!hypervisor(dc))
4687                                goto priv_insn;
4688                            cpu_tmp0 = get_temp_tl(dc);
4689                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4690                            switch (rd) {
4691                            case 0: // hpstate
4692                                tcg_gen_st_i64(cpu_tmp0, cpu_env,
4693                                               offsetof(CPUSPARCState,
4694                                                        hpstate));
4695                                save_state(dc);
4696                                gen_op_next_insn();
4697                                tcg_gen_exit_tb(NULL, 0);
4698                                dc->base.is_jmp = DISAS_NORETURN;
4699                                break;
4700                            case 1: // htstate
4701                                // XXX gen_op_wrhtstate();
4702                                break;
4703                            case 3: // hintp
4704                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4705                                break;
4706                            case 5: // htba
4707                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4708                                break;
4709                            case 31: // hstick_cmpr
4710                                {
4711                                    TCGv_ptr r_tickptr;
4712
4713                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4714                                    r_tickptr = tcg_temp_new_ptr();
4715                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4716                                                   offsetof(CPUSPARCState, hstick));
4717                                    if (tb_cflags(dc->base.tb) &
4718                                           CF_USE_ICOUNT) {
4719                                        gen_io_start();
4720                                    }
4721                                    gen_helper_tick_set_limit(r_tickptr,
4722                                                              cpu_hstick_cmpr);
4723                                    tcg_temp_free_ptr(r_tickptr);
4724                                    if (tb_cflags(dc->base.tb) &
4725                                           CF_USE_ICOUNT) {
4726                                        gen_io_end();
4727                                    }
4728                                    /* End TB to handle timer interrupt */
4729                                    dc->base.is_jmp = DISAS_EXIT;
4730                                }
4731                                break;
4732                            case 6: // hver readonly
4733                            default:
4734                                goto illegal_insn;
4735                            }
4736#endif
4737                        }
4738                        break;
4739#endif
4740#ifdef TARGET_SPARC64
4741                    case 0x2c: /* V9 movcc */
4742                        {
4743                            int cc = GET_FIELD_SP(insn, 11, 12);
4744                            int cond = GET_FIELD_SP(insn, 14, 17);
4745                            DisasCompare cmp;
4746                            TCGv dst;
4747
4748                            if (insn & (1 << 18)) {
4749                                if (cc == 0) {
4750                                    gen_compare(&cmp, 0, cond, dc);
4751                                } else if (cc == 2) {
4752                                    gen_compare(&cmp, 1, cond, dc);
4753                                } else {
4754                                    goto illegal_insn;
4755                                }
4756                            } else {
4757                                gen_fcompare(&cmp, cc, cond);
4758                            }
4759
4760                            /* The get_src2 above loaded the normal 13-bit
4761                               immediate field, not the 11-bit field we have
4762                               in movcc.  But it did handle the reg case.  */
4763                            if (IS_IMM) {
4764                                simm = GET_FIELD_SPs(insn, 0, 10);
4765                                tcg_gen_movi_tl(cpu_src2, simm);
4766                            }
4767
4768                            dst = gen_load_gpr(dc, rd);
4769                            tcg_gen_movcond_tl(cmp.cond, dst,
4770                                               cmp.c1, cmp.c2,
4771                                               cpu_src2, dst);
4772                            free_compare(&cmp);
4773                            gen_store_gpr(dc, rd, dst);
4774                            break;
4775                        }
4776                    case 0x2d: /* V9 sdivx */
4777                        gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4778                        gen_store_gpr(dc, rd, cpu_dst);
4779                        break;
4780                    case 0x2e: /* V9 popc */
4781                        tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4782                        gen_store_gpr(dc, rd, cpu_dst);
4783                        break;
4784                    case 0x2f: /* V9 movr */
4785                        {
4786                            int cond = GET_FIELD_SP(insn, 10, 12);
4787                            DisasCompare cmp;
4788                            TCGv dst;
4789
4790                            gen_compare_reg(&cmp, cond, cpu_src1);
4791
4792                            /* The get_src2 above loaded the normal 13-bit
4793                               immediate field, not the 10-bit field we have
4794                               in movr.  But it did handle the reg case.  */
4795                            if (IS_IMM) {
4796                                simm = GET_FIELD_SPs(insn, 0, 9);
4797                                tcg_gen_movi_tl(cpu_src2, simm);
4798                            }
4799
4800                            dst = gen_load_gpr(dc, rd);
4801                            tcg_gen_movcond_tl(cmp.cond, dst,
4802                                               cmp.c1, cmp.c2,
4803                                               cpu_src2, dst);
4804                            free_compare(&cmp);
4805                            gen_store_gpr(dc, rd, dst);
4806                            break;
4807                        }
4808#endif
4809                    default:
4810                        goto illegal_insn;
4811                    }
4812                }
4813            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4814#ifdef TARGET_SPARC64
4815                int opf = GET_FIELD_SP(insn, 5, 13);
4816                rs1 = GET_FIELD(insn, 13, 17);
4817                rs2 = GET_FIELD(insn, 27, 31);
4818                if (gen_trap_ifnofpu(dc)) {
4819                    goto jmp_insn;
4820                }
4821
4822                switch (opf) {
4823                case 0x000: /* VIS I edge8cc */
4824                    CHECK_FPU_FEATURE(dc, VIS1);
4825                    cpu_src1 = gen_load_gpr(dc, rs1);
4826                    cpu_src2 = gen_load_gpr(dc, rs2);
4827                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4828                    gen_store_gpr(dc, rd, cpu_dst);
4829                    break;
4830                case 0x001: /* VIS II edge8n */
4831                    CHECK_FPU_FEATURE(dc, VIS2);
4832                    cpu_src1 = gen_load_gpr(dc, rs1);
4833                    cpu_src2 = gen_load_gpr(dc, rs2);
4834                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4835                    gen_store_gpr(dc, rd, cpu_dst);
4836                    break;
4837                case 0x002: /* VIS I edge8lcc */
4838                    CHECK_FPU_FEATURE(dc, VIS1);
4839                    cpu_src1 = gen_load_gpr(dc, rs1);
4840                    cpu_src2 = gen_load_gpr(dc, rs2);
4841                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4842                    gen_store_gpr(dc, rd, cpu_dst);
4843                    break;
4844                case 0x003: /* VIS II edge8ln */
4845                    CHECK_FPU_FEATURE(dc, VIS2);
4846                    cpu_src1 = gen_load_gpr(dc, rs1);
4847                    cpu_src2 = gen_load_gpr(dc, rs2);
4848                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4849                    gen_store_gpr(dc, rd, cpu_dst);
4850                    break;
4851                case 0x004: /* VIS I edge16cc */
4852                    CHECK_FPU_FEATURE(dc, VIS1);
4853                    cpu_src1 = gen_load_gpr(dc, rs1);
4854                    cpu_src2 = gen_load_gpr(dc, rs2);
4855                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4856                    gen_store_gpr(dc, rd, cpu_dst);
4857                    break;
4858                case 0x005: /* VIS II edge16n */
4859                    CHECK_FPU_FEATURE(dc, VIS2);
4860                    cpu_src1 = gen_load_gpr(dc, rs1);
4861                    cpu_src2 = gen_load_gpr(dc, rs2);
4862                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4863                    gen_store_gpr(dc, rd, cpu_dst);
4864                    break;
4865                case 0x006: /* VIS I edge16lcc */
4866                    CHECK_FPU_FEATURE(dc, VIS1);
4867                    cpu_src1 = gen_load_gpr(dc, rs1);
4868                    cpu_src2 = gen_load_gpr(dc, rs2);
4869                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4870                    gen_store_gpr(dc, rd, cpu_dst);
4871                    break;
4872                case 0x007: /* VIS II edge16ln */
4873                    CHECK_FPU_FEATURE(dc, VIS2);
4874                    cpu_src1 = gen_load_gpr(dc, rs1);
4875                    cpu_src2 = gen_load_gpr(dc, rs2);
4876                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4877                    gen_store_gpr(dc, rd, cpu_dst);
4878                    break;
4879                case 0x008: /* VIS I edge32cc */
4880                    CHECK_FPU_FEATURE(dc, VIS1);
4881                    cpu_src1 = gen_load_gpr(dc, rs1);
4882                    cpu_src2 = gen_load_gpr(dc, rs2);
4883                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4884                    gen_store_gpr(dc, rd, cpu_dst);
4885                    break;
4886                case 0x009: /* VIS II edge32n */
4887                    CHECK_FPU_FEATURE(dc, VIS2);
4888                    cpu_src1 = gen_load_gpr(dc, rs1);
4889                    cpu_src2 = gen_load_gpr(dc, rs2);
4890                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4891                    gen_store_gpr(dc, rd, cpu_dst);
4892                    break;
4893                case 0x00a: /* VIS I edge32lcc */
4894                    CHECK_FPU_FEATURE(dc, VIS1);
4895                    cpu_src1 = gen_load_gpr(dc, rs1);
4896                    cpu_src2 = gen_load_gpr(dc, rs2);
4897                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4898                    gen_store_gpr(dc, rd, cpu_dst);
4899                    break;
4900                case 0x00b: /* VIS II edge32ln */
4901                    CHECK_FPU_FEATURE(dc, VIS2);
4902                    cpu_src1 = gen_load_gpr(dc, rs1);
4903                    cpu_src2 = gen_load_gpr(dc, rs2);
4904                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4905                    gen_store_gpr(dc, rd, cpu_dst);
4906                    break;
4907                case 0x010: /* VIS I array8 */
4908                    CHECK_FPU_FEATURE(dc, VIS1);
4909                    cpu_src1 = gen_load_gpr(dc, rs1);
4910                    cpu_src2 = gen_load_gpr(dc, rs2);
4911                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4912                    gen_store_gpr(dc, rd, cpu_dst);
4913                    break;
4914                case 0x012: /* VIS I array16 */
4915                    CHECK_FPU_FEATURE(dc, VIS1);
4916                    cpu_src1 = gen_load_gpr(dc, rs1);
4917                    cpu_src2 = gen_load_gpr(dc, rs2);
4918                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4919                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4920                    gen_store_gpr(dc, rd, cpu_dst);
4921                    break;
4922                case 0x014: /* VIS I array32 */
4923                    CHECK_FPU_FEATURE(dc, VIS1);
4924                    cpu_src1 = gen_load_gpr(dc, rs1);
4925                    cpu_src2 = gen_load_gpr(dc, rs2);
4926                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4927                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4928                    gen_store_gpr(dc, rd, cpu_dst);
4929                    break;
4930                case 0x018: /* VIS I alignaddr */
4931                    CHECK_FPU_FEATURE(dc, VIS1);
4932                    cpu_src1 = gen_load_gpr(dc, rs1);
4933                    cpu_src2 = gen_load_gpr(dc, rs2);
4934                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4935                    gen_store_gpr(dc, rd, cpu_dst);
4936                    break;
4937                case 0x01a: /* VIS I alignaddrl */
4938                    CHECK_FPU_FEATURE(dc, VIS1);
4939                    cpu_src1 = gen_load_gpr(dc, rs1);
4940                    cpu_src2 = gen_load_gpr(dc, rs2);
4941                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4942                    gen_store_gpr(dc, rd, cpu_dst);
4943                    break;
4944                case 0x019: /* VIS II bmask */
4945                    CHECK_FPU_FEATURE(dc, VIS2);
4946                    cpu_src1 = gen_load_gpr(dc, rs1);
4947                    cpu_src2 = gen_load_gpr(dc, rs2);
4948                    tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4949                    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4950                    gen_store_gpr(dc, rd, cpu_dst);
4951                    break;
4952                case 0x020: /* VIS I fcmple16 */
4953                    CHECK_FPU_FEATURE(dc, VIS1);
4954                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4955                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4956                    gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4957                    gen_store_gpr(dc, rd, cpu_dst);
4958                    break;
4959                case 0x022: /* VIS I fcmpne16 */
4960                    CHECK_FPU_FEATURE(dc, VIS1);
4961                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4962                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4963                    gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4964                    gen_store_gpr(dc, rd, cpu_dst);
4965                    break;
4966                case 0x024: /* VIS I fcmple32 */
4967                    CHECK_FPU_FEATURE(dc, VIS1);
4968                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4969                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4970                    gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4971                    gen_store_gpr(dc, rd, cpu_dst);
4972                    break;
4973                case 0x026: /* VIS I fcmpne32 */
4974                    CHECK_FPU_FEATURE(dc, VIS1);
4975                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4976                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4977                    gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4978                    gen_store_gpr(dc, rd, cpu_dst);
4979                    break;
4980                case 0x028: /* VIS I fcmpgt16 */
4981                    CHECK_FPU_FEATURE(dc, VIS1);
4982                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4983                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4984                    gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4985                    gen_store_gpr(dc, rd, cpu_dst);
4986                    break;
4987                case 0x02a: /* VIS I fcmpeq16 */
4988                    CHECK_FPU_FEATURE(dc, VIS1);
4989                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4990                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4991                    gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4992                    gen_store_gpr(dc, rd, cpu_dst);
4993                    break;
4994                case 0x02c: /* VIS I fcmpgt32 */
4995                    CHECK_FPU_FEATURE(dc, VIS1);
4996                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4997                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4998                    gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4999                    gen_store_gpr(dc, rd, cpu_dst);
5000                    break;
5001                case 0x02e: /* VIS I fcmpeq32 */
5002                    CHECK_FPU_FEATURE(dc, VIS1);
5003                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5004                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
5005                    gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
5006                    gen_store_gpr(dc, rd, cpu_dst);
5007                    break;
5008                case 0x031: /* VIS I fmul8x16 */
5009                    CHECK_FPU_FEATURE(dc, VIS1);
5010                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
5011                    break;
5012                case 0x033: /* VIS I fmul8x16au */
5013                    CHECK_FPU_FEATURE(dc, VIS1);
5014                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
5015                    break;
5016                case 0x035: /* VIS I fmul8x16al */
5017                    CHECK_FPU_FEATURE(dc, VIS1);
5018                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
5019                    break;
5020                case 0x036: /* VIS I fmul8sux16 */
5021                    CHECK_FPU_FEATURE(dc, VIS1);
5022                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
5023                    break;
5024                case 0x037: /* VIS I fmul8ulx16 */
5025                    CHECK_FPU_FEATURE(dc, VIS1);
5026                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
5027                    break;
5028                case 0x038: /* VIS I fmuld8sux16 */
5029                    CHECK_FPU_FEATURE(dc, VIS1);
5030                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
5031                    break;
5032                case 0x039: /* VIS I fmuld8ulx16 */
5033                    CHECK_FPU_FEATURE(dc, VIS1);
5034                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
5035                    break;
5036                case 0x03a: /* VIS I fpack32 */
5037                    CHECK_FPU_FEATURE(dc, VIS1);
5038                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
5039                    break;
5040                case 0x03b: /* VIS I fpack16 */
5041                    CHECK_FPU_FEATURE(dc, VIS1);
5042                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5043                    cpu_dst_32 = gen_dest_fpr_F(dc);
5044                    gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
5045                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5046                    break;
5047                case 0x03d: /* VIS I fpackfix */
5048                    CHECK_FPU_FEATURE(dc, VIS1);
5049                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5050                    cpu_dst_32 = gen_dest_fpr_F(dc);
5051                    gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
5052                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5053                    break;
5054                case 0x03e: /* VIS I pdist */
5055                    CHECK_FPU_FEATURE(dc, VIS1);
5056                    gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
5057                    break;
5058                case 0x048: /* VIS I faligndata */
5059                    CHECK_FPU_FEATURE(dc, VIS1);
5060                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
5061                    break;
5062                case 0x04b: /* VIS I fpmerge */
5063                    CHECK_FPU_FEATURE(dc, VIS1);
5064                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
5065                    break;
5066                case 0x04c: /* VIS II bshuffle */
5067                    CHECK_FPU_FEATURE(dc, VIS2);
5068                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
5069                    break;
5070                case 0x04d: /* VIS I fexpand */
5071                    CHECK_FPU_FEATURE(dc, VIS1);
5072                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
5073                    break;
5074                case 0x050: /* VIS I fpadd16 */
5075                    CHECK_FPU_FEATURE(dc, VIS1);
5076                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
5077                    break;
5078                case 0x051: /* VIS I fpadd16s */
5079                    CHECK_FPU_FEATURE(dc, VIS1);
5080                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
5081                    break;
5082                case 0x052: /* VIS I fpadd32 */
5083                    CHECK_FPU_FEATURE(dc, VIS1);
5084                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
5085                    break;
5086                case 0x053: /* VIS I fpadd32s */
5087                    CHECK_FPU_FEATURE(dc, VIS1);
5088                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5089                    break;
5090                case 0x054: /* VIS I fpsub16 */
5091                    CHECK_FPU_FEATURE(dc, VIS1);
5092                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5093                    break;
5094                case 0x055: /* VIS I fpsub16s */
5095                    CHECK_FPU_FEATURE(dc, VIS1);
5096                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5097                    break;
5098                case 0x056: /* VIS I fpsub32 */
5099                    CHECK_FPU_FEATURE(dc, VIS1);
5100                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5101                    break;
5102                case 0x057: /* VIS I fpsub32s */
5103                    CHECK_FPU_FEATURE(dc, VIS1);
5104                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5105                    break;
5106                case 0x060: /* VIS I fzero */
5107                    CHECK_FPU_FEATURE(dc, VIS1);
5108                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5109                    tcg_gen_movi_i64(cpu_dst_64, 0);
5110                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5111                    break;
5112                case 0x061: /* VIS I fzeros */
5113                    CHECK_FPU_FEATURE(dc, VIS1);
5114                    cpu_dst_32 = gen_dest_fpr_F(dc);
5115                    tcg_gen_movi_i32(cpu_dst_32, 0);
5116                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5117                    break;
5118                case 0x062: /* VIS I fnor */
5119                    CHECK_FPU_FEATURE(dc, VIS1);
5120                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5121                    break;
5122                case 0x063: /* VIS I fnors */
5123                    CHECK_FPU_FEATURE(dc, VIS1);
5124                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5125                    break;
5126                case 0x064: /* VIS I fandnot2 */
5127                    CHECK_FPU_FEATURE(dc, VIS1);
5128                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5129                    break;
5130                case 0x065: /* VIS I fandnot2s */
5131                    CHECK_FPU_FEATURE(dc, VIS1);
5132                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5133                    break;
5134                case 0x066: /* VIS I fnot2 */
5135                    CHECK_FPU_FEATURE(dc, VIS1);
5136                    gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5137                    break;
5138                case 0x067: /* VIS I fnot2s */
5139                    CHECK_FPU_FEATURE(dc, VIS1);
5140                    gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5141                    break;
5142                case 0x068: /* VIS I fandnot1 */
5143                    CHECK_FPU_FEATURE(dc, VIS1);
5144                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5145                    break;
5146                case 0x069: /* VIS I fandnot1s */
5147                    CHECK_FPU_FEATURE(dc, VIS1);
5148                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5149                    break;
5150                case 0x06a: /* VIS I fnot1 */
5151                    CHECK_FPU_FEATURE(dc, VIS1);
5152                    gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5153                    break;
5154                case 0x06b: /* VIS I fnot1s */
5155                    CHECK_FPU_FEATURE(dc, VIS1);
5156                    gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5157                    break;
5158                case 0x06c: /* VIS I fxor */
5159                    CHECK_FPU_FEATURE(dc, VIS1);
5160                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5161                    break;
5162                case 0x06d: /* VIS I fxors */
5163                    CHECK_FPU_FEATURE(dc, VIS1);
5164                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5165                    break;
5166                case 0x06e: /* VIS I fnand */
5167                    CHECK_FPU_FEATURE(dc, VIS1);
5168                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5169                    break;
5170                case 0x06f: /* VIS I fnands */
5171                    CHECK_FPU_FEATURE(dc, VIS1);
5172                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5173                    break;
5174                case 0x070: /* VIS I fand */
5175                    CHECK_FPU_FEATURE(dc, VIS1);
5176                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5177                    break;
5178                case 0x071: /* VIS I fands */
5179                    CHECK_FPU_FEATURE(dc, VIS1);
5180                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5181                    break;
5182                case 0x072: /* VIS I fxnor */
5183                    CHECK_FPU_FEATURE(dc, VIS1);
5184                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5185                    break;
5186                case 0x073: /* VIS I fxnors */
5187                    CHECK_FPU_FEATURE(dc, VIS1);
5188                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5189                    break;
5190                case 0x074: /* VIS I fsrc1 */
5191                    CHECK_FPU_FEATURE(dc, VIS1);
5192                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5193                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5194                    break;
5195                case 0x075: /* VIS I fsrc1s */
5196                    CHECK_FPU_FEATURE(dc, VIS1);
5197                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5198                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5199                    break;
5200                case 0x076: /* VIS I fornot2 */
5201                    CHECK_FPU_FEATURE(dc, VIS1);
5202                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5203                    break;
5204                case 0x077: /* VIS I fornot2s */
5205                    CHECK_FPU_FEATURE(dc, VIS1);
5206                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5207                    break;
5208                case 0x078: /* VIS I fsrc2 */
5209                    CHECK_FPU_FEATURE(dc, VIS1);
5210                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5211                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5212                    break;
5213                case 0x079: /* VIS I fsrc2s */
5214                    CHECK_FPU_FEATURE(dc, VIS1);
5215                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5216                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5217                    break;
5218                case 0x07a: /* VIS I fornot1 */
5219                    CHECK_FPU_FEATURE(dc, VIS1);
5220                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5221                    break;
5222                case 0x07b: /* VIS I fornot1s */
5223                    CHECK_FPU_FEATURE(dc, VIS1);
5224                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5225                    break;
5226                case 0x07c: /* VIS I for */
5227                    CHECK_FPU_FEATURE(dc, VIS1);
5228                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5229                    break;
5230                case 0x07d: /* VIS I fors */
5231                    CHECK_FPU_FEATURE(dc, VIS1);
5232                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5233                    break;
5234                case 0x07e: /* VIS I fone */
5235                    CHECK_FPU_FEATURE(dc, VIS1);
5236                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5237                    tcg_gen_movi_i64(cpu_dst_64, -1);
5238                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5239                    break;
5240                case 0x07f: /* VIS I fones */
5241                    CHECK_FPU_FEATURE(dc, VIS1);
5242                    cpu_dst_32 = gen_dest_fpr_F(dc);
5243                    tcg_gen_movi_i32(cpu_dst_32, -1);
5244                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5245                    break;
5246                case 0x080: /* VIS I shutdown */
5247                case 0x081: /* VIS II siam */
5248                    // XXX
5249                    goto illegal_insn;
5250                default:
5251                    goto illegal_insn;
5252                }
5253#else
5254                goto ncp_insn;
5255#endif
5256            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5257#ifdef TARGET_SPARC64
5258                goto illegal_insn;
5259#else
5260                goto ncp_insn;
5261#endif
5262#ifdef TARGET_SPARC64
5263            } else if (xop == 0x39) { /* V9 return */
5264                save_state(dc);
5265                cpu_src1 = get_src1(dc, insn);
5266                cpu_tmp0 = get_temp_tl(dc);
5267                if (IS_IMM) {   /* immediate */
5268                    simm = GET_FIELDs(insn, 19, 31);
5269                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5270                } else {                /* register */
5271                    rs2 = GET_FIELD(insn, 27, 31);
5272                    if (rs2) {
5273                        cpu_src2 = gen_load_gpr(dc, rs2);
5274                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5275                    } else {
5276                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5277                    }
5278                }
5279                gen_helper_restore(cpu_env);
5280                gen_mov_pc_npc(dc);
5281                gen_check_align(cpu_tmp0, 3);
5282                tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5283                dc->npc = DYNAMIC_PC;
5284                goto jmp_insn;
5285#endif
5286            } else {
5287                cpu_src1 = get_src1(dc, insn);
5288                cpu_tmp0 = get_temp_tl(dc);
5289                if (IS_IMM) {   /* immediate */
5290                    simm = GET_FIELDs(insn, 19, 31);
5291                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5292                } else {                /* register */
5293                    rs2 = GET_FIELD(insn, 27, 31);
5294                    if (rs2) {
5295                        cpu_src2 = gen_load_gpr(dc, rs2);
5296                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5297                    } else {
5298                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5299                    }
5300                }
5301                switch (xop) {
5302                case 0x38:      /* jmpl */
5303                    {
5304                        TCGv t = gen_dest_gpr(dc, rd);
5305                        tcg_gen_movi_tl(t, dc->pc);
5306                        gen_store_gpr(dc, rd, t);
5307
5308                        gen_mov_pc_npc(dc);
5309                        gen_check_align(cpu_tmp0, 3);
5310                        gen_address_mask(dc, cpu_tmp0);
5311                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5312                        dc->npc = DYNAMIC_PC;
5313                    }
5314                    goto jmp_insn;
5315#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5316                case 0x39:      /* rett, V9 return */
5317                    {
5318                        if (!supervisor(dc))
5319                            goto priv_insn;
5320                        gen_mov_pc_npc(dc);
5321                        gen_check_align(cpu_tmp0, 3);
5322                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5323                        dc->npc = DYNAMIC_PC;
5324                        gen_helper_rett(cpu_env);
5325                    }
5326                    goto jmp_insn;
5327#endif
5328                case 0x3b: /* flush */
5329                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5330                        goto unimp_flush;
5331                    /* nop */
5332                    break;
5333                case 0x3c:      /* save */
5334                    gen_helper_save(cpu_env);
5335                    gen_store_gpr(dc, rd, cpu_tmp0);
5336                    break;
5337                case 0x3d:      /* restore */
5338                    gen_helper_restore(cpu_env);
5339                    gen_store_gpr(dc, rd, cpu_tmp0);
5340                    break;
5341#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5342                case 0x3e:      /* V9 done/retry */
5343                    {
5344                        switch (rd) {
5345                        case 0:
5346                            if (!supervisor(dc))
5347                                goto priv_insn;
5348                            dc->npc = DYNAMIC_PC;
5349                            dc->pc = DYNAMIC_PC;
5350                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5351                                gen_io_start();
5352                            }
5353                            gen_helper_done(cpu_env);
5354                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5355                                gen_io_end();
5356                            }
5357                            goto jmp_insn;
5358                        case 1:
5359                            if (!supervisor(dc))
5360                                goto priv_insn;
5361                            dc->npc = DYNAMIC_PC;
5362                            dc->pc = DYNAMIC_PC;
5363                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5364                                gen_io_start();
5365                            }
5366                            gen_helper_retry(cpu_env);
5367                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5368                                gen_io_end();
5369                            }
5370                            goto jmp_insn;
5371                        default:
5372                            goto illegal_insn;
5373                        }
5374                    }
5375                    break;
5376#endif
5377                default:
5378                    goto illegal_insn;
5379                }
5380            }
5381            break;
5382        }
5383        break;
5384    case 3:                     /* load/store instructions */
5385        {
5386            unsigned int xop = GET_FIELD(insn, 7, 12);
5387            /* ??? gen_address_mask prevents us from using a source
5388               register directly.  Always generate a temporary.  */
5389            TCGv cpu_addr = get_temp_tl(dc);
5390
5391            tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5392            if (xop == 0x3c || xop == 0x3e) {
5393                /* V9 casa/casxa : no offset */
5394            } else if (IS_IMM) {     /* immediate */
5395                simm = GET_FIELDs(insn, 19, 31);
5396                if (simm != 0) {
5397                    tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5398                }
5399            } else {            /* register */
5400                rs2 = GET_FIELD(insn, 27, 31);
5401                if (rs2 != 0) {
5402                    tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5403                }
5404            }
5405            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5406                (xop > 0x17 && xop <= 0x1d ) ||
5407                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5408                TCGv cpu_val = gen_dest_gpr(dc, rd);
5409
5410                switch (xop) {
5411                case 0x0:       /* ld, V9 lduw, load unsigned word */
5412                    gen_address_mask(dc, cpu_addr);
5413                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5414                    break;
5415                case 0x1:       /* ldub, load unsigned byte */
5416                    gen_address_mask(dc, cpu_addr);
5417                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5418                    break;
5419                case 0x2:       /* lduh, load unsigned halfword */
5420                    gen_address_mask(dc, cpu_addr);
5421                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5422                    break;
5423                case 0x3:       /* ldd, load double word */
5424                    if (rd & 1)
5425                        goto illegal_insn;
5426                    else {
5427                        TCGv_i64 t64;
5428
5429                        gen_address_mask(dc, cpu_addr);
5430                        t64 = tcg_temp_new_i64();
5431                        tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5432                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5433                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5434                        gen_store_gpr(dc, rd + 1, cpu_val);
5435                        tcg_gen_shri_i64(t64, t64, 32);
5436                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5437                        tcg_temp_free_i64(t64);
5438                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5439                    }
5440                    break;
5441                case 0x9:       /* ldsb, load signed byte */
5442                    gen_address_mask(dc, cpu_addr);
5443                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5444                    break;
5445                case 0xa:       /* ldsh, load signed halfword */
5446                    gen_address_mask(dc, cpu_addr);
5447                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5448                    break;
5449                case 0xd:       /* ldstub */
5450                    gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5451                    break;
5452                case 0x0f:
5453                    /* swap, swap register with memory. Also atomically */
5454                    CHECK_IU_FEATURE(dc, SWAP);
5455                    cpu_src1 = gen_load_gpr(dc, rd);
5456                    gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5457                             dc->mem_idx, MO_TEUL);
5458                    break;
5459#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5460                case 0x10:      /* lda, V9 lduwa, load word alternate */
5461                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5462                    break;
5463                case 0x11:      /* lduba, load unsigned byte alternate */
5464                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5465                    break;
5466                case 0x12:      /* lduha, load unsigned halfword alternate */
5467                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5468                    break;
5469                case 0x13:      /* ldda, load double word alternate */
5470                    if (rd & 1) {
5471                        goto illegal_insn;
5472                    }
5473                    gen_ldda_asi(dc, cpu_addr, insn, rd);
5474                    goto skip_move;
5475                case 0x19:      /* ldsba, load signed byte alternate */
5476                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5477                    break;
5478                case 0x1a:      /* ldsha, load signed halfword alternate */
5479                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5480                    break;
5481                case 0x1d:      /* ldstuba -- XXX: should be atomically */
5482                    gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5483                    break;
5484                case 0x1f:      /* swapa, swap reg with alt. memory. Also
5485                                   atomically */
5486                    CHECK_IU_FEATURE(dc, SWAP);
5487                    cpu_src1 = gen_load_gpr(dc, rd);
5488                    gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5489                    break;
5490
5491#ifndef TARGET_SPARC64
5492                case 0x30: /* ldc */
5493                case 0x31: /* ldcsr */
5494                case 0x33: /* lddc */
5495                    goto ncp_insn;
5496#endif
5497#endif
5498#ifdef TARGET_SPARC64
5499                case 0x08: /* V9 ldsw */
5500                    gen_address_mask(dc, cpu_addr);
5501                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5502                    break;
5503                case 0x0b: /* V9 ldx */
5504                    gen_address_mask(dc, cpu_addr);
5505                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5506                    break;
5507                case 0x18: /* V9 ldswa */
5508                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5509                    break;
5510                case 0x1b: /* V9 ldxa */
5511                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5512                    break;
5513                case 0x2d: /* V9 prefetch, no effect */
5514                    goto skip_move;
5515                case 0x30: /* V9 ldfa */
5516                    if (gen_trap_ifnofpu(dc)) {
5517                        goto jmp_insn;
5518                    }
5519                    gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5520                    gen_update_fprs_dirty(dc, rd);
5521                    goto skip_move;
5522                case 0x33: /* V9 lddfa */
5523                    if (gen_trap_ifnofpu(dc)) {
5524                        goto jmp_insn;
5525                    }
5526                    gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5527                    gen_update_fprs_dirty(dc, DFPREG(rd));
5528                    goto skip_move;
5529                case 0x3d: /* V9 prefetcha, no effect */
5530                    goto skip_move;
5531                case 0x32: /* V9 ldqfa */
5532                    CHECK_FPU_FEATURE(dc, FLOAT128);
5533                    if (gen_trap_ifnofpu(dc)) {
5534                        goto jmp_insn;
5535                    }
5536                    gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5537                    gen_update_fprs_dirty(dc, QFPREG(rd));
5538                    goto skip_move;
5539#endif
5540                default:
5541                    goto illegal_insn;
5542                }
5543                gen_store_gpr(dc, rd, cpu_val);
5544#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5545            skip_move: ;
5546#endif
5547            } else if (xop >= 0x20 && xop < 0x24) {
5548                if (gen_trap_ifnofpu(dc)) {
5549                    goto jmp_insn;
5550                }
5551                switch (xop) {
5552                case 0x20:      /* ldf, load fpreg */
5553                    gen_address_mask(dc, cpu_addr);
5554                    cpu_dst_32 = gen_dest_fpr_F(dc);
5555                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5556                                        dc->mem_idx, MO_TEUL);
5557                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5558                    break;
5559                case 0x21:      /* ldfsr, V9 ldxfsr */
5560#ifdef TARGET_SPARC64
5561                    gen_address_mask(dc, cpu_addr);
5562                    if (rd == 1) {
5563                        TCGv_i64 t64 = tcg_temp_new_i64();
5564                        tcg_gen_qemu_ld_i64(t64, cpu_addr,
5565                                            dc->mem_idx, MO_TEQ);
5566                        gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5567                        tcg_temp_free_i64(t64);
5568                        break;
5569                    }
5570#endif
5571                    cpu_dst_32 = get_temp_i32(dc);
5572                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5573                                        dc->mem_idx, MO_TEUL);
5574                    gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5575                    break;
5576                case 0x22:      /* ldqf, load quad fpreg */
5577                    CHECK_FPU_FEATURE(dc, FLOAT128);
5578                    gen_address_mask(dc, cpu_addr);
5579                    cpu_src1_64 = tcg_temp_new_i64();
5580                    tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5581                                        MO_TEQ | MO_ALIGN_4);
5582                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5583                    cpu_src2_64 = tcg_temp_new_i64();
5584                    tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5585                                        MO_TEQ | MO_ALIGN_4);
5586                    gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5587                    tcg_temp_free_i64(cpu_src1_64);
5588                    tcg_temp_free_i64(cpu_src2_64);
5589                    break;
5590                case 0x23:      /* lddf, load double fpreg */
5591                    gen_address_mask(dc, cpu_addr);
5592                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5593                    tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5594                                        MO_TEQ | MO_ALIGN_4);
5595                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5596                    break;
5597                default:
5598                    goto illegal_insn;
5599                }
5600            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5601                       xop == 0xe || xop == 0x1e) {
5602                TCGv cpu_val = gen_load_gpr(dc, rd);
5603
5604                switch (xop) {
5605                case 0x4: /* st, store word */
5606                    gen_address_mask(dc, cpu_addr);
5607                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5608                    break;
5609                case 0x5: /* stb, store byte */
5610                    gen_address_mask(dc, cpu_addr);
5611                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5612                    break;
5613                case 0x6: /* sth, store halfword */
5614                    gen_address_mask(dc, cpu_addr);
5615                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5616                    break;
5617                case 0x7: /* std, store double word */
5618                    if (rd & 1)
5619                        goto illegal_insn;
5620                    else {
5621                        TCGv_i64 t64;
5622                        TCGv lo;
5623
5624                        gen_address_mask(dc, cpu_addr);
5625                        lo = gen_load_gpr(dc, rd + 1);
5626                        t64 = tcg_temp_new_i64();
5627                        tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5628                        tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5629                        tcg_temp_free_i64(t64);
5630                    }
5631                    break;
5632#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5633                case 0x14: /* sta, V9 stwa, store word alternate */
5634                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5635                    break;
5636                case 0x15: /* stba, store byte alternate */
5637                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5638                    break;
5639                case 0x16: /* stha, store halfword alternate */
5640                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5641                    break;
5642                case 0x17: /* stda, store double word alternate */
5643                    if (rd & 1) {
5644                        goto illegal_insn;
5645                    }
5646                    gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5647                    break;
5648#endif
5649#ifdef TARGET_SPARC64
5650                case 0x0e: /* V9 stx */
5651                    gen_address_mask(dc, cpu_addr);
5652                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5653                    break;
5654                case 0x1e: /* V9 stxa */
5655                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5656                    break;
5657#endif
5658                default:
5659                    goto illegal_insn;
5660                }
5661            } else if (xop > 0x23 && xop < 0x28) {
5662                if (gen_trap_ifnofpu(dc)) {
5663                    goto jmp_insn;
5664                }
5665                switch (xop) {
5666                case 0x24: /* stf, store fpreg */
5667                    gen_address_mask(dc, cpu_addr);
5668                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
5669                    tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5670                                        dc->mem_idx, MO_TEUL);
5671                    break;
5672                case 0x25: /* stfsr, V9 stxfsr */
5673                    {
5674#ifdef TARGET_SPARC64
5675                        gen_address_mask(dc, cpu_addr);
5676                        if (rd == 1) {
5677                            tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5678                            break;
5679                        }
5680#endif
5681                        tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5682                    }
5683                    break;
5684                case 0x26:
5685#ifdef TARGET_SPARC64
5686                    /* V9 stqf, store quad fpreg */
5687                    CHECK_FPU_FEATURE(dc, FLOAT128);
5688                    gen_address_mask(dc, cpu_addr);
5689                    /* ??? While stqf only requires 4-byte alignment, it is
5690                       legal for the cpu to signal the unaligned exception.
5691                       The OS trap handler is then required to fix it up.
5692                       For qemu, this avoids having to probe the second page
5693                       before performing the first write.  */
5694                    cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5695                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5696                                        dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5697                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5698                    cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5699                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5700                                        dc->mem_idx, MO_TEQ);
5701                    break;
5702#else /* !TARGET_SPARC64 */
5703                    /* stdfq, store floating point queue */
5704#if defined(CONFIG_USER_ONLY)
5705                    goto illegal_insn;
5706#else
5707                    if (!supervisor(dc))
5708                        goto priv_insn;
5709                    if (gen_trap_ifnofpu(dc)) {
5710                        goto jmp_insn;
5711                    }
5712                    goto nfq_insn;
5713#endif
5714#endif
5715                case 0x27: /* stdf, store double fpreg */
5716                    gen_address_mask(dc, cpu_addr);
5717                    cpu_src1_64 = gen_load_fpr_D(dc, rd);
5718                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5719                                        MO_TEQ | MO_ALIGN_4);
5720                    break;
5721                default:
5722                    goto illegal_insn;
5723                }
5724            } else if (xop > 0x33 && xop < 0x3f) {
5725                switch (xop) {
5726#ifdef TARGET_SPARC64
5727                case 0x34: /* V9 stfa */
5728                    if (gen_trap_ifnofpu(dc)) {
5729                        goto jmp_insn;
5730                    }
5731                    gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5732                    break;
5733                case 0x36: /* V9 stqfa */
5734                    {
5735                        CHECK_FPU_FEATURE(dc, FLOAT128);
5736                        if (gen_trap_ifnofpu(dc)) {
5737                            goto jmp_insn;
5738                        }
5739                        gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5740                    }
5741                    break;
5742                case 0x37: /* V9 stdfa */
5743                    if (gen_trap_ifnofpu(dc)) {
5744                        goto jmp_insn;
5745                    }
5746                    gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5747                    break;
5748                case 0x3e: /* V9 casxa */
5749                    rs2 = GET_FIELD(insn, 27, 31);
5750                    cpu_src2 = gen_load_gpr(dc, rs2);
5751                    gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5752                    break;
5753#else
5754                case 0x34: /* stc */
5755                case 0x35: /* stcsr */
5756                case 0x36: /* stdcq */
5757                case 0x37: /* stdc */
5758                    goto ncp_insn;
5759#endif
5760#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5761                case 0x3c: /* V9 or LEON3 casa */
5762#ifndef TARGET_SPARC64
5763                    CHECK_IU_FEATURE(dc, CASA);
5764#endif
5765                    rs2 = GET_FIELD(insn, 27, 31);
5766                    cpu_src2 = gen_load_gpr(dc, rs2);
5767                    gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5768                    break;
5769#endif
5770                default:
5771                    goto illegal_insn;
5772                }
5773            } else {
5774                goto illegal_insn;
5775            }
5776        }
5777        break;
5778    }
5779    /* default case for non jump instructions */
5780    if (dc->npc == DYNAMIC_PC) {
5781        dc->pc = DYNAMIC_PC;
5782        gen_op_next_insn();
5783    } else if (dc->npc == JUMP_PC) {
5784        /* we can do a static jump */
5785        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5786        dc->base.is_jmp = DISAS_NORETURN;
5787    } else {
5788        dc->pc = dc->npc;
5789        dc->npc = dc->npc + 4;
5790    }
5791 jmp_insn:
5792    goto egress;
5793 illegal_insn:
5794    gen_exception(dc, TT_ILL_INSN);
5795    goto egress;
5796 unimp_flush:
5797    gen_exception(dc, TT_UNIMP_FLUSH);
5798    goto egress;
5799#if !defined(CONFIG_USER_ONLY)
5800 priv_insn:
5801    gen_exception(dc, TT_PRIV_INSN);
5802    goto egress;
5803#endif
5804 nfpu_insn:
5805    gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5806    goto egress;
5807#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5808 nfq_insn:
5809    gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5810    goto egress;
5811#endif
5812#ifndef TARGET_SPARC64
5813 ncp_insn:
5814    gen_exception(dc, TT_NCP_INSN);
5815    goto egress;
5816#endif
5817 egress:
5818    if (dc->n_t32 != 0) {
5819        int i;
5820        for (i = dc->n_t32 - 1; i >= 0; --i) {
5821            tcg_temp_free_i32(dc->t32[i]);
5822        }
5823        dc->n_t32 = 0;
5824    }
5825    if (dc->n_ttl != 0) {
5826        int i;
5827        for (i = dc->n_ttl - 1; i >= 0; --i) {
5828            tcg_temp_free(dc->ttl[i]);
5829        }
5830        dc->n_ttl = 0;
5831    }
5832}
5833
5834static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5835{
5836    DisasContext *dc = container_of(dcbase, DisasContext, base);
5837    CPUSPARCState *env = cs->env_ptr;
5838    int bound;
5839
5840    dc->pc = dc->base.pc_first;
5841    dc->npc = (target_ulong)dc->base.tb->cs_base;
5842    dc->cc_op = CC_OP_DYNAMIC;
5843    dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5844    dc->def = &env->def;
5845    dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5846    dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5847#ifndef CONFIG_USER_ONLY
5848    dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5849#endif
5850#ifdef TARGET_SPARC64
5851    dc->fprs_dirty = 0;
5852    dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5853#ifndef CONFIG_USER_ONLY
5854    dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5855#endif
5856#endif
5857    /*
5858     * if we reach a page boundary, we stop generation so that the
5859     * PC of a TT_TFAULT exception is always in the right page
5860     */
5861    bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5862    dc->base.max_insns = MIN(dc->base.max_insns, bound);
5863}
5864
5865static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5866{
5867}
5868
5869static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5870{
5871    DisasContext *dc = container_of(dcbase, DisasContext, base);
5872
5873    if (dc->npc & JUMP_PC) {
5874        assert(dc->jump_pc[1] == dc->pc + 4);
5875        tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5876    } else {
5877        tcg_gen_insn_start(dc->pc, dc->npc);
5878    }
5879}
5880
5881static bool sparc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs,
5882                                      const CPUBreakpoint *bp)
5883{
5884    DisasContext *dc = container_of(dcbase, DisasContext, base);
5885
5886    if (dc->pc != dc->base.pc_first) {
5887        save_state(dc);
5888    }
5889    gen_helper_debug(cpu_env);
5890    tcg_gen_exit_tb(NULL, 0);
5891    dc->base.is_jmp = DISAS_NORETURN;
5892    /* update pc_next so that the current instruction is included in tb->size */
5893    dc->base.pc_next += 4;
5894    return true;
5895}
5896
5897static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5898{
5899    DisasContext *dc = container_of(dcbase, DisasContext, base);
5900    CPUSPARCState *env = cs->env_ptr;
5901    unsigned int insn;
5902
5903    insn = cpu_ldl_code(env, dc->pc);
5904    dc->base.pc_next += 4;
5905    disas_sparc_insn(dc, insn);
5906
5907    if (dc->base.is_jmp == DISAS_NORETURN) {
5908        return;
5909    }
5910    if (dc->pc != dc->base.pc_next) {
5911        dc->base.is_jmp = DISAS_TOO_MANY;
5912    }
5913}
5914
5915static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5916{
5917    DisasContext *dc = container_of(dcbase, DisasContext, base);
5918
5919    switch (dc->base.is_jmp) {
5920    case DISAS_NEXT:
5921    case DISAS_TOO_MANY:
5922        if (dc->pc != DYNAMIC_PC &&
5923            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5924            /* static PC and NPC: we can use direct chaining */
5925            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5926        } else {
5927            if (dc->pc != DYNAMIC_PC) {
5928                tcg_gen_movi_tl(cpu_pc, dc->pc);
5929            }
5930            save_npc(dc);
5931            tcg_gen_exit_tb(NULL, 0);
5932        }
5933        break;
5934
5935    case DISAS_NORETURN:
5936       break;
5937
5938    case DISAS_EXIT:
5939        /* Exit TB */
5940        save_state(dc);
5941        tcg_gen_exit_tb(NULL, 0);
5942        break;
5943
5944    default:
5945        g_assert_not_reached();
5946    }
5947}
5948
5949static void sparc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
5950{
5951    qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
5952    log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
5953}
5954
5955static const TranslatorOps sparc_tr_ops = {
5956    .init_disas_context = sparc_tr_init_disas_context,
5957    .tb_start           = sparc_tr_tb_start,
5958    .insn_start         = sparc_tr_insn_start,
5959    .breakpoint_check   = sparc_tr_breakpoint_check,
5960    .translate_insn     = sparc_tr_translate_insn,
5961    .tb_stop            = sparc_tr_tb_stop,
5962    .disas_log          = sparc_tr_disas_log,
5963};
5964
5965void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
5966{
5967    DisasContext dc = {};
5968
5969    translator_loop(&sparc_tr_ops, &dc.base, cs, tb);
5970}
5971
5972void sparc_tcg_init(void)
5973{
5974    static const char gregnames[32][4] = {
5975        "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5976        "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5977        "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5978        "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5979    };
5980    static const char fregnames[32][4] = {
5981        "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5982        "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5983        "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5984        "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5985    };
5986
5987    static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5988#ifdef TARGET_SPARC64
5989        { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5990        { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5991#else
5992        { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5993#endif
5994        { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5995        { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5996    };
5997
5998    static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5999#ifdef TARGET_SPARC64
6000        { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
6001        { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
6002        { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
6003        { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
6004          "hstick_cmpr" },
6005        { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
6006        { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
6007        { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
6008        { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
6009        { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
6010#endif
6011        { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
6012        { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
6013        { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
6014        { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
6015        { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
6016        { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
6017        { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
6018        { &cpu_y, offsetof(CPUSPARCState, y), "y" },
6019#ifndef CONFIG_USER_ONLY
6020        { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
6021#endif
6022    };
6023
6024    unsigned int i;
6025
6026    cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
6027                                         offsetof(CPUSPARCState, regwptr),
6028                                         "regwptr");
6029
6030    for (i = 0; i < ARRAY_SIZE(r32); ++i) {
6031        *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
6032    }
6033
6034    for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
6035        *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
6036    }
6037
6038    cpu_regs[0] = NULL;
6039    for (i = 1; i < 8; ++i) {
6040        cpu_regs[i] = tcg_global_mem_new(cpu_env,
6041                                         offsetof(CPUSPARCState, gregs[i]),
6042                                         gregnames[i]);
6043    }
6044
6045    for (i = 8; i < 32; ++i) {
6046        cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
6047                                         (i - 8) * sizeof(target_ulong),
6048                                         gregnames[i]);
6049    }
6050
6051    for (i = 0; i < TARGET_DPREGS; i++) {
6052        cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
6053                                            offsetof(CPUSPARCState, fpr[i]),
6054                                            fregnames[i]);
6055    }
6056}
6057
6058void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
6059                          target_ulong *data)
6060{
6061    target_ulong pc = data[0];
6062    target_ulong npc = data[1];
6063
6064    env->pc = pc;
6065    if (npc == DYNAMIC_PC) {
6066        /* dynamic NPC: already stored */
6067    } else if (npc & JUMP_PC) {
6068        /* jump PC: use 'cond' and the jump targets of the translation */
6069        if (env->cond) {
6070            env->npc = npc & ~3;
6071        } else {
6072            env->npc = pc + 4;
6073        }
6074    } else {
6075        env->npc = npc;
6076    }
6077}
6078