qemu/target/sparc/translate.c
<<
>>
Prefs
   1/*
   2   SPARC translation
   3
   4   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
   5   Copyright (C) 2003-2005 Fabrice Bellard
   6
   7   This library is free software; you can redistribute it and/or
   8   modify it under the terms of the GNU Lesser General Public
   9   License as published by the Free Software Foundation; either
  10   version 2.1 of the License, or (at your option) any later version.
  11
  12   This library is distributed in the hope that it will be useful,
  13   but WITHOUT ANY WARRANTY; without even the implied warranty of
  14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15   Lesser General Public License for more details.
  16
  17   You should have received a copy of the GNU Lesser General Public
  18   License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19 */
  20
  21#include "qemu/osdep.h"
  22
  23#include "cpu.h"
  24#include "disas/disas.h"
  25#include "exec/helper-proto.h"
  26#include "exec/exec-all.h"
  27#include "tcg/tcg-op.h"
  28#include "exec/cpu_ldst.h"
  29
  30#include "exec/helper-gen.h"
  31
  32#include "exec/translator.h"
  33#include "exec/log.h"
  34#include "asi.h"
  35
  36
  37#define DEBUG_DISAS
  38
  39#define DYNAMIC_PC  1 /* dynamic pc value */
  40#define JUMP_PC     2 /* dynamic pc value which takes only two values
  41                         according to jump_pc[T2] */
  42
  43#define DISAS_EXIT  DISAS_TARGET_0
  44
  45/* global register indexes */
  46static TCGv_ptr cpu_regwptr;
  47static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
  48static TCGv_i32 cpu_cc_op;
  49static TCGv_i32 cpu_psr;
  50static TCGv cpu_fsr, cpu_pc, cpu_npc;
  51static TCGv cpu_regs[32];
  52static TCGv cpu_y;
  53#ifndef CONFIG_USER_ONLY
  54static TCGv cpu_tbr;
  55#endif
  56static TCGv cpu_cond;
  57#ifdef TARGET_SPARC64
  58static TCGv_i32 cpu_xcc, cpu_fprs;
  59static TCGv cpu_gsr;
  60static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
  61static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
  62#else
  63static TCGv cpu_wim;
  64#endif
  65/* Floating point registers */
  66static TCGv_i64 cpu_fpr[TARGET_DPREGS];
  67
  68#include "exec/gen-icount.h"
  69
  70typedef struct DisasContext {
  71    DisasContextBase base;
  72    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
  73    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
  74    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
  75    int mem_idx;
  76    bool fpu_enabled;
  77    bool address_mask_32bit;
  78#ifndef CONFIG_USER_ONLY
  79    bool supervisor;
  80#ifdef TARGET_SPARC64
  81    bool hypervisor;
  82#endif
  83#endif
  84
  85    uint32_t cc_op;  /* current CC operation */
  86    sparc_def_t *def;
  87    TCGv_i32 t32[3];
  88    TCGv ttl[5];
  89    int n_t32;
  90    int n_ttl;
  91#ifdef TARGET_SPARC64
  92    int fprs_dirty;
  93    int asi;
  94#endif
  95} DisasContext;
  96
  97typedef struct {
  98    TCGCond cond;
  99    bool is_bool;
 100    bool g1, g2;
 101    TCGv c1, c2;
 102} DisasCompare;
 103
 104// This function uses non-native bit order
 105#define GET_FIELD(X, FROM, TO)                                  \
 106    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
 107
 108// This function uses the order in the manuals, i.e. bit 0 is 2^0
 109#define GET_FIELD_SP(X, FROM, TO)               \
 110    GET_FIELD(X, 31 - (TO), 31 - (FROM))
 111
 112#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
 113#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
 114
 115#ifdef TARGET_SPARC64
 116#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
 117#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
 118#else
 119#define DFPREG(r) (r & 0x1e)
 120#define QFPREG(r) (r & 0x1c)
 121#endif
 122
 123#define UA2005_HTRAP_MASK 0xff
 124#define V8_TRAP_MASK 0x7f
 125
 126static int sign_extend(int x, int len)
 127{
 128    len = 32 - len;
 129    return (x << len) >> len;
 130}
 131
 132#define IS_IMM (insn & (1<<13))
 133
 134static inline TCGv_i32 get_temp_i32(DisasContext *dc)
 135{
 136    TCGv_i32 t;
 137    assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
 138    dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
 139    return t;
 140}
 141
 142static inline TCGv get_temp_tl(DisasContext *dc)
 143{
 144    TCGv t;
 145    assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
 146    dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
 147    return t;
 148}
 149
 150static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
 151{
 152#if defined(TARGET_SPARC64)
 153    int bit = (rd < 32) ? 1 : 2;
 154    /* If we know we've already set this bit within the TB,
 155       we can avoid setting it again.  */
 156    if (!(dc->fprs_dirty & bit)) {
 157        dc->fprs_dirty |= bit;
 158        tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
 159    }
 160#endif
 161}
 162
 163/* floating point registers moves */
 164static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
 165{
 166#if TCG_TARGET_REG_BITS == 32
 167    if (src & 1) {
 168        return TCGV_LOW(cpu_fpr[src / 2]);
 169    } else {
 170        return TCGV_HIGH(cpu_fpr[src / 2]);
 171    }
 172#else
 173    TCGv_i32 ret = get_temp_i32(dc);
 174    if (src & 1) {
 175        tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
 176    } else {
 177        tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
 178    }
 179    return ret;
 180#endif
 181}
 182
 183static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
 184{
 185#if TCG_TARGET_REG_BITS == 32
 186    if (dst & 1) {
 187        tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
 188    } else {
 189        tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
 190    }
 191#else
 192    TCGv_i64 t = (TCGv_i64)v;
 193    tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
 194                        (dst & 1 ? 0 : 32), 32);
 195#endif
 196    gen_update_fprs_dirty(dc, dst);
 197}
 198
 199static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
 200{
 201    return get_temp_i32(dc);
 202}
 203
 204static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
 205{
 206    src = DFPREG(src);
 207    return cpu_fpr[src / 2];
 208}
 209
 210static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
 211{
 212    dst = DFPREG(dst);
 213    tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
 214    gen_update_fprs_dirty(dc, dst);
 215}
 216
 217static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
 218{
 219    return cpu_fpr[DFPREG(dst) / 2];
 220}
 221
 222static void gen_op_load_fpr_QT0(unsigned int src)
 223{
 224    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 225                   offsetof(CPU_QuadU, ll.upper));
 226    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 227                   offsetof(CPU_QuadU, ll.lower));
 228}
 229
 230static void gen_op_load_fpr_QT1(unsigned int src)
 231{
 232    tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
 233                   offsetof(CPU_QuadU, ll.upper));
 234    tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
 235                   offsetof(CPU_QuadU, ll.lower));
 236}
 237
 238static void gen_op_store_QT0_fpr(unsigned int dst)
 239{
 240    tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
 241                   offsetof(CPU_QuadU, ll.upper));
 242    tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
 243                   offsetof(CPU_QuadU, ll.lower));
 244}
 245
 246static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
 247                            TCGv_i64 v1, TCGv_i64 v2)
 248{
 249    dst = QFPREG(dst);
 250
 251    tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
 252    tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
 253    gen_update_fprs_dirty(dc, dst);
 254}
 255
 256#ifdef TARGET_SPARC64
 257static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
 258{
 259    src = QFPREG(src);
 260    return cpu_fpr[src / 2];
 261}
 262
 263static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
 264{
 265    src = QFPREG(src);
 266    return cpu_fpr[src / 2 + 1];
 267}
 268
 269static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
 270{
 271    rd = QFPREG(rd);
 272    rs = QFPREG(rs);
 273
 274    tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
 275    tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
 276    gen_update_fprs_dirty(dc, rd);
 277}
 278#endif
 279
 280/* moves */
 281#ifdef CONFIG_USER_ONLY
 282#define supervisor(dc) 0
 283#ifdef TARGET_SPARC64
 284#define hypervisor(dc) 0
 285#endif
 286#else
 287#ifdef TARGET_SPARC64
 288#define hypervisor(dc) (dc->hypervisor)
 289#define supervisor(dc) (dc->supervisor | dc->hypervisor)
 290#else
 291#define supervisor(dc) (dc->supervisor)
 292#endif
 293#endif
 294
 295#ifdef TARGET_SPARC64
 296#ifndef TARGET_ABI32
 297#define AM_CHECK(dc) ((dc)->address_mask_32bit)
 298#else
 299#define AM_CHECK(dc) (1)
 300#endif
 301#endif
 302
 303static inline void gen_address_mask(DisasContext *dc, TCGv addr)
 304{
 305#ifdef TARGET_SPARC64
 306    if (AM_CHECK(dc))
 307        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
 308#endif
 309}
 310
 311static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
 312{
 313    if (reg > 0) {
 314        assert(reg < 32);
 315        return cpu_regs[reg];
 316    } else {
 317        TCGv t = get_temp_tl(dc);
 318        tcg_gen_movi_tl(t, 0);
 319        return t;
 320    }
 321}
 322
 323static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
 324{
 325    if (reg > 0) {
 326        assert(reg < 32);
 327        tcg_gen_mov_tl(cpu_regs[reg], v);
 328    }
 329}
 330
 331static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
 332{
 333    if (reg > 0) {
 334        assert(reg < 32);
 335        return cpu_regs[reg];
 336    } else {
 337        return get_temp_tl(dc);
 338    }
 339}
 340
 341static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
 342{
 343    return translator_use_goto_tb(&s->base, pc) &&
 344           translator_use_goto_tb(&s->base, npc);
 345}
 346
 347static void gen_goto_tb(DisasContext *s, int tb_num,
 348                        target_ulong pc, target_ulong npc)
 349{
 350    if (use_goto_tb(s, pc, npc))  {
 351        /* jump to same page: we can use a direct jump */
 352        tcg_gen_goto_tb(tb_num);
 353        tcg_gen_movi_tl(cpu_pc, pc);
 354        tcg_gen_movi_tl(cpu_npc, npc);
 355        tcg_gen_exit_tb(s->base.tb, tb_num);
 356    } else {
 357        /* jump to another page: currently not optimized */
 358        tcg_gen_movi_tl(cpu_pc, pc);
 359        tcg_gen_movi_tl(cpu_npc, npc);
 360        tcg_gen_exit_tb(NULL, 0);
 361    }
 362}
 363
 364// XXX suboptimal
 365static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
 366{
 367    tcg_gen_extu_i32_tl(reg, src);
 368    tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
 369}
 370
 371static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
 372{
 373    tcg_gen_extu_i32_tl(reg, src);
 374    tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
 375}
 376
 377static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
 378{
 379    tcg_gen_extu_i32_tl(reg, src);
 380    tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
 381}
 382
 383static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
 384{
 385    tcg_gen_extu_i32_tl(reg, src);
 386    tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
 387}
 388
 389static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
 390{
 391    tcg_gen_mov_tl(cpu_cc_src, src1);
 392    tcg_gen_mov_tl(cpu_cc_src2, src2);
 393    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 394    tcg_gen_mov_tl(dst, cpu_cc_dst);
 395}
 396
 397static TCGv_i32 gen_add32_carry32(void)
 398{
 399    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 400
 401    /* Carry is computed from a previous add: (dst < src)  */
 402#if TARGET_LONG_BITS == 64
 403    cc_src1_32 = tcg_temp_new_i32();
 404    cc_src2_32 = tcg_temp_new_i32();
 405    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
 406    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
 407#else
 408    cc_src1_32 = cpu_cc_dst;
 409    cc_src2_32 = cpu_cc_src;
 410#endif
 411
 412    carry_32 = tcg_temp_new_i32();
 413    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 414
 415#if TARGET_LONG_BITS == 64
 416    tcg_temp_free_i32(cc_src1_32);
 417    tcg_temp_free_i32(cc_src2_32);
 418#endif
 419
 420    return carry_32;
 421}
 422
 423static TCGv_i32 gen_sub32_carry32(void)
 424{
 425    TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
 426
 427    /* Carry is computed from a previous borrow: (src1 < src2)  */
 428#if TARGET_LONG_BITS == 64
 429    cc_src1_32 = tcg_temp_new_i32();
 430    cc_src2_32 = tcg_temp_new_i32();
 431    tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
 432    tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
 433#else
 434    cc_src1_32 = cpu_cc_src;
 435    cc_src2_32 = cpu_cc_src2;
 436#endif
 437
 438    carry_32 = tcg_temp_new_i32();
 439    tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
 440
 441#if TARGET_LONG_BITS == 64
 442    tcg_temp_free_i32(cc_src1_32);
 443    tcg_temp_free_i32(cc_src2_32);
 444#endif
 445
 446    return carry_32;
 447}
 448
 449static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
 450                            TCGv src2, int update_cc)
 451{
 452    TCGv_i32 carry_32;
 453    TCGv carry;
 454
 455    switch (dc->cc_op) {
 456    case CC_OP_DIV:
 457    case CC_OP_LOGIC:
 458        /* Carry is known to be zero.  Fall back to plain ADD.  */
 459        if (update_cc) {
 460            gen_op_add_cc(dst, src1, src2);
 461        } else {
 462            tcg_gen_add_tl(dst, src1, src2);
 463        }
 464        return;
 465
 466    case CC_OP_ADD:
 467    case CC_OP_TADD:
 468    case CC_OP_TADDTV:
 469        if (TARGET_LONG_BITS == 32) {
 470            /* We can re-use the host's hardware carry generation by using
 471               an ADD2 opcode.  We discard the low part of the output.
 472               Ideally we'd combine this operation with the add that
 473               generated the carry in the first place.  */
 474            carry = tcg_temp_new();
 475            tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 476            tcg_temp_free(carry);
 477            goto add_done;
 478        }
 479        carry_32 = gen_add32_carry32();
 480        break;
 481
 482    case CC_OP_SUB:
 483    case CC_OP_TSUB:
 484    case CC_OP_TSUBTV:
 485        carry_32 = gen_sub32_carry32();
 486        break;
 487
 488    default:
 489        /* We need external help to produce the carry.  */
 490        carry_32 = tcg_temp_new_i32();
 491        gen_helper_compute_C_icc(carry_32, cpu_env);
 492        break;
 493    }
 494
 495#if TARGET_LONG_BITS == 64
 496    carry = tcg_temp_new();
 497    tcg_gen_extu_i32_i64(carry, carry_32);
 498#else
 499    carry = carry_32;
 500#endif
 501
 502    tcg_gen_add_tl(dst, src1, src2);
 503    tcg_gen_add_tl(dst, dst, carry);
 504
 505    tcg_temp_free_i32(carry_32);
 506#if TARGET_LONG_BITS == 64
 507    tcg_temp_free(carry);
 508#endif
 509
 510 add_done:
 511    if (update_cc) {
 512        tcg_gen_mov_tl(cpu_cc_src, src1);
 513        tcg_gen_mov_tl(cpu_cc_src2, src2);
 514        tcg_gen_mov_tl(cpu_cc_dst, dst);
 515        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
 516        dc->cc_op = CC_OP_ADDX;
 517    }
 518}
 519
 520static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
 521{
 522    tcg_gen_mov_tl(cpu_cc_src, src1);
 523    tcg_gen_mov_tl(cpu_cc_src2, src2);
 524    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 525    tcg_gen_mov_tl(dst, cpu_cc_dst);
 526}
 527
 528static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
 529                            TCGv src2, int update_cc)
 530{
 531    TCGv_i32 carry_32;
 532    TCGv carry;
 533
 534    switch (dc->cc_op) {
 535    case CC_OP_DIV:
 536    case CC_OP_LOGIC:
 537        /* Carry is known to be zero.  Fall back to plain SUB.  */
 538        if (update_cc) {
 539            gen_op_sub_cc(dst, src1, src2);
 540        } else {
 541            tcg_gen_sub_tl(dst, src1, src2);
 542        }
 543        return;
 544
 545    case CC_OP_ADD:
 546    case CC_OP_TADD:
 547    case CC_OP_TADDTV:
 548        carry_32 = gen_add32_carry32();
 549        break;
 550
 551    case CC_OP_SUB:
 552    case CC_OP_TSUB:
 553    case CC_OP_TSUBTV:
 554        if (TARGET_LONG_BITS == 32) {
 555            /* We can re-use the host's hardware carry generation by using
 556               a SUB2 opcode.  We discard the low part of the output.
 557               Ideally we'd combine this operation with the add that
 558               generated the carry in the first place.  */
 559            carry = tcg_temp_new();
 560            tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
 561            tcg_temp_free(carry);
 562            goto sub_done;
 563        }
 564        carry_32 = gen_sub32_carry32();
 565        break;
 566
 567    default:
 568        /* We need external help to produce the carry.  */
 569        carry_32 = tcg_temp_new_i32();
 570        gen_helper_compute_C_icc(carry_32, cpu_env);
 571        break;
 572    }
 573
 574#if TARGET_LONG_BITS == 64
 575    carry = tcg_temp_new();
 576    tcg_gen_extu_i32_i64(carry, carry_32);
 577#else
 578    carry = carry_32;
 579#endif
 580
 581    tcg_gen_sub_tl(dst, src1, src2);
 582    tcg_gen_sub_tl(dst, dst, carry);
 583
 584    tcg_temp_free_i32(carry_32);
 585#if TARGET_LONG_BITS == 64
 586    tcg_temp_free(carry);
 587#endif
 588
 589 sub_done:
 590    if (update_cc) {
 591        tcg_gen_mov_tl(cpu_cc_src, src1);
 592        tcg_gen_mov_tl(cpu_cc_src2, src2);
 593        tcg_gen_mov_tl(cpu_cc_dst, dst);
 594        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
 595        dc->cc_op = CC_OP_SUBX;
 596    }
 597}
 598
 599static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
 600{
 601    TCGv r_temp, zero, t0;
 602
 603    r_temp = tcg_temp_new();
 604    t0 = tcg_temp_new();
 605
 606    /* old op:
 607    if (!(env->y & 1))
 608        T1 = 0;
 609    */
 610    zero = tcg_const_tl(0);
 611    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
 612    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
 613    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
 614    tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
 615                       zero, cpu_cc_src2);
 616    tcg_temp_free(zero);
 617
 618    // b2 = T0 & 1;
 619    // env->y = (b2 << 31) | (env->y >> 1);
 620    tcg_gen_extract_tl(t0, cpu_y, 1, 31);
 621    tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
 622
 623    // b1 = N ^ V;
 624    gen_mov_reg_N(t0, cpu_psr);
 625    gen_mov_reg_V(r_temp, cpu_psr);
 626    tcg_gen_xor_tl(t0, t0, r_temp);
 627    tcg_temp_free(r_temp);
 628
 629    // T0 = (b1 << 31) | (T0 >> 1);
 630    // src1 = T0;
 631    tcg_gen_shli_tl(t0, t0, 31);
 632    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
 633    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
 634    tcg_temp_free(t0);
 635
 636    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
 637
 638    tcg_gen_mov_tl(dst, cpu_cc_dst);
 639}
 640
 641static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
 642{
 643#if TARGET_LONG_BITS == 32
 644    if (sign_ext) {
 645        tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
 646    } else {
 647        tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
 648    }
 649#else
 650    TCGv t0 = tcg_temp_new_i64();
 651    TCGv t1 = tcg_temp_new_i64();
 652
 653    if (sign_ext) {
 654        tcg_gen_ext32s_i64(t0, src1);
 655        tcg_gen_ext32s_i64(t1, src2);
 656    } else {
 657        tcg_gen_ext32u_i64(t0, src1);
 658        tcg_gen_ext32u_i64(t1, src2);
 659    }
 660
 661    tcg_gen_mul_i64(dst, t0, t1);
 662    tcg_temp_free(t0);
 663    tcg_temp_free(t1);
 664
 665    tcg_gen_shri_i64(cpu_y, dst, 32);
 666#endif
 667}
 668
 669static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
 670{
 671    /* zero-extend truncated operands before multiplication */
 672    gen_op_multiply(dst, src1, src2, 0);
 673}
 674
 675static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
 676{
 677    /* sign-extend truncated operands before multiplication */
 678    gen_op_multiply(dst, src1, src2, 1);
 679}
 680
 681// 1
 682static inline void gen_op_eval_ba(TCGv dst)
 683{
 684    tcg_gen_movi_tl(dst, 1);
 685}
 686
 687// Z
 688static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
 689{
 690    gen_mov_reg_Z(dst, src);
 691}
 692
 693// Z | (N ^ V)
 694static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
 695{
 696    TCGv t0 = tcg_temp_new();
 697    gen_mov_reg_N(t0, src);
 698    gen_mov_reg_V(dst, src);
 699    tcg_gen_xor_tl(dst, dst, t0);
 700    gen_mov_reg_Z(t0, src);
 701    tcg_gen_or_tl(dst, dst, t0);
 702    tcg_temp_free(t0);
 703}
 704
 705// N ^ V
 706static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
 707{
 708    TCGv t0 = tcg_temp_new();
 709    gen_mov_reg_V(t0, src);
 710    gen_mov_reg_N(dst, src);
 711    tcg_gen_xor_tl(dst, dst, t0);
 712    tcg_temp_free(t0);
 713}
 714
 715// C | Z
 716static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
 717{
 718    TCGv t0 = tcg_temp_new();
 719    gen_mov_reg_Z(t0, src);
 720    gen_mov_reg_C(dst, src);
 721    tcg_gen_or_tl(dst, dst, t0);
 722    tcg_temp_free(t0);
 723}
 724
 725// C
 726static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
 727{
 728    gen_mov_reg_C(dst, src);
 729}
 730
 731// V
 732static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
 733{
 734    gen_mov_reg_V(dst, src);
 735}
 736
 737// 0
 738static inline void gen_op_eval_bn(TCGv dst)
 739{
 740    tcg_gen_movi_tl(dst, 0);
 741}
 742
 743// N
 744static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
 745{
 746    gen_mov_reg_N(dst, src);
 747}
 748
 749// !Z
 750static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
 751{
 752    gen_mov_reg_Z(dst, src);
 753    tcg_gen_xori_tl(dst, dst, 0x1);
 754}
 755
 756// !(Z | (N ^ V))
 757static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
 758{
 759    gen_op_eval_ble(dst, src);
 760    tcg_gen_xori_tl(dst, dst, 0x1);
 761}
 762
 763// !(N ^ V)
 764static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
 765{
 766    gen_op_eval_bl(dst, src);
 767    tcg_gen_xori_tl(dst, dst, 0x1);
 768}
 769
 770// !(C | Z)
 771static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
 772{
 773    gen_op_eval_bleu(dst, src);
 774    tcg_gen_xori_tl(dst, dst, 0x1);
 775}
 776
 777// !C
 778static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
 779{
 780    gen_mov_reg_C(dst, src);
 781    tcg_gen_xori_tl(dst, dst, 0x1);
 782}
 783
 784// !N
 785static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
 786{
 787    gen_mov_reg_N(dst, src);
 788    tcg_gen_xori_tl(dst, dst, 0x1);
 789}
 790
 791// !V
 792static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
 793{
 794    gen_mov_reg_V(dst, src);
 795    tcg_gen_xori_tl(dst, dst, 0x1);
 796}
 797
 798/*
 799  FPSR bit field FCC1 | FCC0:
 800   0 =
 801   1 <
 802   2 >
 803   3 unordered
 804*/
 805static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
 806                                    unsigned int fcc_offset)
 807{
 808    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
 809    tcg_gen_andi_tl(reg, reg, 0x1);
 810}
 811
 812static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
 813                                    unsigned int fcc_offset)
 814{
 815    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
 816    tcg_gen_andi_tl(reg, reg, 0x1);
 817}
 818
 819// !0: FCC0 | FCC1
 820static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
 821                                    unsigned int fcc_offset)
 822{
 823    TCGv t0 = tcg_temp_new();
 824    gen_mov_reg_FCC0(dst, src, fcc_offset);
 825    gen_mov_reg_FCC1(t0, src, fcc_offset);
 826    tcg_gen_or_tl(dst, dst, t0);
 827    tcg_temp_free(t0);
 828}
 829
 830// 1 or 2: FCC0 ^ FCC1
 831static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
 832                                    unsigned int fcc_offset)
 833{
 834    TCGv t0 = tcg_temp_new();
 835    gen_mov_reg_FCC0(dst, src, fcc_offset);
 836    gen_mov_reg_FCC1(t0, src, fcc_offset);
 837    tcg_gen_xor_tl(dst, dst, t0);
 838    tcg_temp_free(t0);
 839}
 840
 841// 1 or 3: FCC0
 842static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
 843                                    unsigned int fcc_offset)
 844{
 845    gen_mov_reg_FCC0(dst, src, fcc_offset);
 846}
 847
 848// 1: FCC0 & !FCC1
 849static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
 850                                    unsigned int fcc_offset)
 851{
 852    TCGv t0 = tcg_temp_new();
 853    gen_mov_reg_FCC0(dst, src, fcc_offset);
 854    gen_mov_reg_FCC1(t0, src, fcc_offset);
 855    tcg_gen_andc_tl(dst, dst, t0);
 856    tcg_temp_free(t0);
 857}
 858
 859// 2 or 3: FCC1
 860static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
 861                                    unsigned int fcc_offset)
 862{
 863    gen_mov_reg_FCC1(dst, src, fcc_offset);
 864}
 865
 866// 2: !FCC0 & FCC1
 867static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
 868                                    unsigned int fcc_offset)
 869{
 870    TCGv t0 = tcg_temp_new();
 871    gen_mov_reg_FCC0(dst, src, fcc_offset);
 872    gen_mov_reg_FCC1(t0, src, fcc_offset);
 873    tcg_gen_andc_tl(dst, t0, dst);
 874    tcg_temp_free(t0);
 875}
 876
 877// 3: FCC0 & FCC1
 878static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
 879                                    unsigned int fcc_offset)
 880{
 881    TCGv t0 = tcg_temp_new();
 882    gen_mov_reg_FCC0(dst, src, fcc_offset);
 883    gen_mov_reg_FCC1(t0, src, fcc_offset);
 884    tcg_gen_and_tl(dst, dst, t0);
 885    tcg_temp_free(t0);
 886}
 887
 888// 0: !(FCC0 | FCC1)
 889static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
 890                                    unsigned int fcc_offset)
 891{
 892    TCGv t0 = tcg_temp_new();
 893    gen_mov_reg_FCC0(dst, src, fcc_offset);
 894    gen_mov_reg_FCC1(t0, src, fcc_offset);
 895    tcg_gen_or_tl(dst, dst, t0);
 896    tcg_gen_xori_tl(dst, dst, 0x1);
 897    tcg_temp_free(t0);
 898}
 899
 900// 0 or 3: !(FCC0 ^ FCC1)
 901static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
 902                                    unsigned int fcc_offset)
 903{
 904    TCGv t0 = tcg_temp_new();
 905    gen_mov_reg_FCC0(dst, src, fcc_offset);
 906    gen_mov_reg_FCC1(t0, src, fcc_offset);
 907    tcg_gen_xor_tl(dst, dst, t0);
 908    tcg_gen_xori_tl(dst, dst, 0x1);
 909    tcg_temp_free(t0);
 910}
 911
 912// 0 or 2: !FCC0
 913static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
 914                                    unsigned int fcc_offset)
 915{
 916    gen_mov_reg_FCC0(dst, src, fcc_offset);
 917    tcg_gen_xori_tl(dst, dst, 0x1);
 918}
 919
 920// !1: !(FCC0 & !FCC1)
 921static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
 922                                    unsigned int fcc_offset)
 923{
 924    TCGv t0 = tcg_temp_new();
 925    gen_mov_reg_FCC0(dst, src, fcc_offset);
 926    gen_mov_reg_FCC1(t0, src, fcc_offset);
 927    tcg_gen_andc_tl(dst, dst, t0);
 928    tcg_gen_xori_tl(dst, dst, 0x1);
 929    tcg_temp_free(t0);
 930}
 931
 932// 0 or 1: !FCC1
 933static inline void gen_op_eval_fble(TCGv dst, TCGv src,
 934                                    unsigned int fcc_offset)
 935{
 936    gen_mov_reg_FCC1(dst, src, fcc_offset);
 937    tcg_gen_xori_tl(dst, dst, 0x1);
 938}
 939
 940// !2: !(!FCC0 & FCC1)
 941static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
 942                                    unsigned int fcc_offset)
 943{
 944    TCGv t0 = tcg_temp_new();
 945    gen_mov_reg_FCC0(dst, src, fcc_offset);
 946    gen_mov_reg_FCC1(t0, src, fcc_offset);
 947    tcg_gen_andc_tl(dst, t0, dst);
 948    tcg_gen_xori_tl(dst, dst, 0x1);
 949    tcg_temp_free(t0);
 950}
 951
 952// !3: !(FCC0 & FCC1)
 953static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
 954                                    unsigned int fcc_offset)
 955{
 956    TCGv t0 = tcg_temp_new();
 957    gen_mov_reg_FCC0(dst, src, fcc_offset);
 958    gen_mov_reg_FCC1(t0, src, fcc_offset);
 959    tcg_gen_and_tl(dst, dst, t0);
 960    tcg_gen_xori_tl(dst, dst, 0x1);
 961    tcg_temp_free(t0);
 962}
 963
 964static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
 965                               target_ulong pc2, TCGv r_cond)
 966{
 967    TCGLabel *l1 = gen_new_label();
 968
 969    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
 970
 971    gen_goto_tb(dc, 0, pc1, pc1 + 4);
 972
 973    gen_set_label(l1);
 974    gen_goto_tb(dc, 1, pc2, pc2 + 4);
 975}
 976
 977static void gen_branch_a(DisasContext *dc, target_ulong pc1)
 978{
 979    TCGLabel *l1 = gen_new_label();
 980    target_ulong npc = dc->npc;
 981
 982    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
 983
 984    gen_goto_tb(dc, 0, npc, pc1);
 985
 986    gen_set_label(l1);
 987    gen_goto_tb(dc, 1, npc + 4, npc + 8);
 988
 989    dc->base.is_jmp = DISAS_NORETURN;
 990}
 991
 992static void gen_branch_n(DisasContext *dc, target_ulong pc1)
 993{
 994    target_ulong npc = dc->npc;
 995
 996    if (likely(npc != DYNAMIC_PC)) {
 997        dc->pc = npc;
 998        dc->jump_pc[0] = pc1;
 999        dc->jump_pc[1] = npc + 4;
1000        dc->npc = JUMP_PC;
1001    } else {
1002        TCGv t, z;
1003
1004        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1005
1006        tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1007        t = tcg_const_tl(pc1);
1008        z = tcg_const_tl(0);
1009        tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1010        tcg_temp_free(t);
1011        tcg_temp_free(z);
1012
1013        dc->pc = DYNAMIC_PC;
1014    }
1015}
1016
1017static inline void gen_generic_branch(DisasContext *dc)
1018{
1019    TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1020    TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1021    TCGv zero = tcg_const_tl(0);
1022
1023    tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1024
1025    tcg_temp_free(npc0);
1026    tcg_temp_free(npc1);
1027    tcg_temp_free(zero);
1028}
1029
1030/* call this function before using the condition register as it may
1031   have been set for a jump */
1032static inline void flush_cond(DisasContext *dc)
1033{
1034    if (dc->npc == JUMP_PC) {
1035        gen_generic_branch(dc);
1036        dc->npc = DYNAMIC_PC;
1037    }
1038}
1039
1040static inline void save_npc(DisasContext *dc)
1041{
1042    if (dc->npc == JUMP_PC) {
1043        gen_generic_branch(dc);
1044        dc->npc = DYNAMIC_PC;
1045    } else if (dc->npc != DYNAMIC_PC) {
1046        tcg_gen_movi_tl(cpu_npc, dc->npc);
1047    }
1048}
1049
1050static inline void update_psr(DisasContext *dc)
1051{
1052    if (dc->cc_op != CC_OP_FLAGS) {
1053        dc->cc_op = CC_OP_FLAGS;
1054        gen_helper_compute_psr(cpu_env);
1055    }
1056}
1057
1058static inline void save_state(DisasContext *dc)
1059{
1060    tcg_gen_movi_tl(cpu_pc, dc->pc);
1061    save_npc(dc);
1062}
1063
1064static void gen_exception(DisasContext *dc, int which)
1065{
1066    TCGv_i32 t;
1067
1068    save_state(dc);
1069    t = tcg_const_i32(which);
1070    gen_helper_raise_exception(cpu_env, t);
1071    tcg_temp_free_i32(t);
1072    dc->base.is_jmp = DISAS_NORETURN;
1073}
1074
1075static void gen_check_align(TCGv addr, int mask)
1076{
1077    TCGv_i32 r_mask = tcg_const_i32(mask);
1078    gen_helper_check_align(cpu_env, addr, r_mask);
1079    tcg_temp_free_i32(r_mask);
1080}
1081
1082static inline void gen_mov_pc_npc(DisasContext *dc)
1083{
1084    if (dc->npc == JUMP_PC) {
1085        gen_generic_branch(dc);
1086        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1087        dc->pc = DYNAMIC_PC;
1088    } else if (dc->npc == DYNAMIC_PC) {
1089        tcg_gen_mov_tl(cpu_pc, cpu_npc);
1090        dc->pc = DYNAMIC_PC;
1091    } else {
1092        dc->pc = dc->npc;
1093    }
1094}
1095
1096static inline void gen_op_next_insn(void)
1097{
1098    tcg_gen_mov_tl(cpu_pc, cpu_npc);
1099    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1100}
1101
1102static void free_compare(DisasCompare *cmp)
1103{
1104    if (!cmp->g1) {
1105        tcg_temp_free(cmp->c1);
1106    }
1107    if (!cmp->g2) {
1108        tcg_temp_free(cmp->c2);
1109    }
1110}
1111
1112static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1113                        DisasContext *dc)
1114{
1115    static int subcc_cond[16] = {
1116        TCG_COND_NEVER,
1117        TCG_COND_EQ,
1118        TCG_COND_LE,
1119        TCG_COND_LT,
1120        TCG_COND_LEU,
1121        TCG_COND_LTU,
1122        -1, /* neg */
1123        -1, /* overflow */
1124        TCG_COND_ALWAYS,
1125        TCG_COND_NE,
1126        TCG_COND_GT,
1127        TCG_COND_GE,
1128        TCG_COND_GTU,
1129        TCG_COND_GEU,
1130        -1, /* pos */
1131        -1, /* no overflow */
1132    };
1133
1134    static int logic_cond[16] = {
1135        TCG_COND_NEVER,
1136        TCG_COND_EQ,     /* eq:  Z */
1137        TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1138        TCG_COND_LT,     /* lt:  N ^ V -> N */
1139        TCG_COND_EQ,     /* leu: C | Z -> Z */
1140        TCG_COND_NEVER,  /* ltu: C -> 0 */
1141        TCG_COND_LT,     /* neg: N */
1142        TCG_COND_NEVER,  /* vs:  V -> 0 */
1143        TCG_COND_ALWAYS,
1144        TCG_COND_NE,     /* ne:  !Z */
1145        TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1146        TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1147        TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1148        TCG_COND_ALWAYS, /* geu: !C -> 1 */
1149        TCG_COND_GE,     /* pos: !N */
1150        TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1151    };
1152
1153    TCGv_i32 r_src;
1154    TCGv r_dst;
1155
1156#ifdef TARGET_SPARC64
1157    if (xcc) {
1158        r_src = cpu_xcc;
1159    } else {
1160        r_src = cpu_psr;
1161    }
1162#else
1163    r_src = cpu_psr;
1164#endif
1165
1166    switch (dc->cc_op) {
1167    case CC_OP_LOGIC:
1168        cmp->cond = logic_cond[cond];
1169    do_compare_dst_0:
1170        cmp->is_bool = false;
1171        cmp->g2 = false;
1172        cmp->c2 = tcg_const_tl(0);
1173#ifdef TARGET_SPARC64
1174        if (!xcc) {
1175            cmp->g1 = false;
1176            cmp->c1 = tcg_temp_new();
1177            tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1178            break;
1179        }
1180#endif
1181        cmp->g1 = true;
1182        cmp->c1 = cpu_cc_dst;
1183        break;
1184
1185    case CC_OP_SUB:
1186        switch (cond) {
1187        case 6:  /* neg */
1188        case 14: /* pos */
1189            cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1190            goto do_compare_dst_0;
1191
1192        case 7: /* overflow */
1193        case 15: /* !overflow */
1194            goto do_dynamic;
1195
1196        default:
1197            cmp->cond = subcc_cond[cond];
1198            cmp->is_bool = false;
1199#ifdef TARGET_SPARC64
1200            if (!xcc) {
1201                /* Note that sign-extension works for unsigned compares as
1202                   long as both operands are sign-extended.  */
1203                cmp->g1 = cmp->g2 = false;
1204                cmp->c1 = tcg_temp_new();
1205                cmp->c2 = tcg_temp_new();
1206                tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1207                tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1208                break;
1209            }
1210#endif
1211            cmp->g1 = cmp->g2 = true;
1212            cmp->c1 = cpu_cc_src;
1213            cmp->c2 = cpu_cc_src2;
1214            break;
1215        }
1216        break;
1217
1218    default:
1219    do_dynamic:
1220        gen_helper_compute_psr(cpu_env);
1221        dc->cc_op = CC_OP_FLAGS;
1222        /* FALLTHRU */
1223
1224    case CC_OP_FLAGS:
1225        /* We're going to generate a boolean result.  */
1226        cmp->cond = TCG_COND_NE;
1227        cmp->is_bool = true;
1228        cmp->g1 = cmp->g2 = false;
1229        cmp->c1 = r_dst = tcg_temp_new();
1230        cmp->c2 = tcg_const_tl(0);
1231
1232        switch (cond) {
1233        case 0x0:
1234            gen_op_eval_bn(r_dst);
1235            break;
1236        case 0x1:
1237            gen_op_eval_be(r_dst, r_src);
1238            break;
1239        case 0x2:
1240            gen_op_eval_ble(r_dst, r_src);
1241            break;
1242        case 0x3:
1243            gen_op_eval_bl(r_dst, r_src);
1244            break;
1245        case 0x4:
1246            gen_op_eval_bleu(r_dst, r_src);
1247            break;
1248        case 0x5:
1249            gen_op_eval_bcs(r_dst, r_src);
1250            break;
1251        case 0x6:
1252            gen_op_eval_bneg(r_dst, r_src);
1253            break;
1254        case 0x7:
1255            gen_op_eval_bvs(r_dst, r_src);
1256            break;
1257        case 0x8:
1258            gen_op_eval_ba(r_dst);
1259            break;
1260        case 0x9:
1261            gen_op_eval_bne(r_dst, r_src);
1262            break;
1263        case 0xa:
1264            gen_op_eval_bg(r_dst, r_src);
1265            break;
1266        case 0xb:
1267            gen_op_eval_bge(r_dst, r_src);
1268            break;
1269        case 0xc:
1270            gen_op_eval_bgu(r_dst, r_src);
1271            break;
1272        case 0xd:
1273            gen_op_eval_bcc(r_dst, r_src);
1274            break;
1275        case 0xe:
1276            gen_op_eval_bpos(r_dst, r_src);
1277            break;
1278        case 0xf:
1279            gen_op_eval_bvc(r_dst, r_src);
1280            break;
1281        }
1282        break;
1283    }
1284}
1285
1286static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1287{
1288    unsigned int offset;
1289    TCGv r_dst;
1290
1291    /* For now we still generate a straight boolean result.  */
1292    cmp->cond = TCG_COND_NE;
1293    cmp->is_bool = true;
1294    cmp->g1 = cmp->g2 = false;
1295    cmp->c1 = r_dst = tcg_temp_new();
1296    cmp->c2 = tcg_const_tl(0);
1297
1298    switch (cc) {
1299    default:
1300    case 0x0:
1301        offset = 0;
1302        break;
1303    case 0x1:
1304        offset = 32 - 10;
1305        break;
1306    case 0x2:
1307        offset = 34 - 10;
1308        break;
1309    case 0x3:
1310        offset = 36 - 10;
1311        break;
1312    }
1313
1314    switch (cond) {
1315    case 0x0:
1316        gen_op_eval_bn(r_dst);
1317        break;
1318    case 0x1:
1319        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1320        break;
1321    case 0x2:
1322        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1323        break;
1324    case 0x3:
1325        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1326        break;
1327    case 0x4:
1328        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1329        break;
1330    case 0x5:
1331        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1332        break;
1333    case 0x6:
1334        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1335        break;
1336    case 0x7:
1337        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1338        break;
1339    case 0x8:
1340        gen_op_eval_ba(r_dst);
1341        break;
1342    case 0x9:
1343        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1344        break;
1345    case 0xa:
1346        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1347        break;
1348    case 0xb:
1349        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1350        break;
1351    case 0xc:
1352        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1353        break;
1354    case 0xd:
1355        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1356        break;
1357    case 0xe:
1358        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1359        break;
1360    case 0xf:
1361        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1362        break;
1363    }
1364}
1365
1366static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1367                     DisasContext *dc)
1368{
1369    DisasCompare cmp;
1370    gen_compare(&cmp, cc, cond, dc);
1371
1372    /* The interface is to return a boolean in r_dst.  */
1373    if (cmp.is_bool) {
1374        tcg_gen_mov_tl(r_dst, cmp.c1);
1375    } else {
1376        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1377    }
1378
1379    free_compare(&cmp);
1380}
1381
1382static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1383{
1384    DisasCompare cmp;
1385    gen_fcompare(&cmp, cc, cond);
1386
1387    /* The interface is to return a boolean in r_dst.  */
1388    if (cmp.is_bool) {
1389        tcg_gen_mov_tl(r_dst, cmp.c1);
1390    } else {
1391        tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1392    }
1393
1394    free_compare(&cmp);
1395}
1396
1397#ifdef TARGET_SPARC64
1398// Inverted logic
1399static const int gen_tcg_cond_reg[8] = {
1400    -1,
1401    TCG_COND_NE,
1402    TCG_COND_GT,
1403    TCG_COND_GE,
1404    -1,
1405    TCG_COND_EQ,
1406    TCG_COND_LE,
1407    TCG_COND_LT,
1408};
1409
1410static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1411{
1412    cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1413    cmp->is_bool = false;
1414    cmp->g1 = true;
1415    cmp->g2 = false;
1416    cmp->c1 = r_src;
1417    cmp->c2 = tcg_const_tl(0);
1418}
1419
1420static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1421{
1422    DisasCompare cmp;
1423    gen_compare_reg(&cmp, cond, r_src);
1424
1425    /* The interface is to return a boolean in r_dst.  */
1426    tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1427
1428    free_compare(&cmp);
1429}
1430#endif
1431
1432static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1433{
1434    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1435    target_ulong target = dc->pc + offset;
1436
1437#ifdef TARGET_SPARC64
1438    if (unlikely(AM_CHECK(dc))) {
1439        target &= 0xffffffffULL;
1440    }
1441#endif
1442    if (cond == 0x0) {
1443        /* unconditional not taken */
1444        if (a) {
1445            dc->pc = dc->npc + 4;
1446            dc->npc = dc->pc + 4;
1447        } else {
1448            dc->pc = dc->npc;
1449            dc->npc = dc->pc + 4;
1450        }
1451    } else if (cond == 0x8) {
1452        /* unconditional taken */
1453        if (a) {
1454            dc->pc = target;
1455            dc->npc = dc->pc + 4;
1456        } else {
1457            dc->pc = dc->npc;
1458            dc->npc = target;
1459            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1460        }
1461    } else {
1462        flush_cond(dc);
1463        gen_cond(cpu_cond, cc, cond, dc);
1464        if (a) {
1465            gen_branch_a(dc, target);
1466        } else {
1467            gen_branch_n(dc, target);
1468        }
1469    }
1470}
1471
1472static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1473{
1474    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1475    target_ulong target = dc->pc + offset;
1476
1477#ifdef TARGET_SPARC64
1478    if (unlikely(AM_CHECK(dc))) {
1479        target &= 0xffffffffULL;
1480    }
1481#endif
1482    if (cond == 0x0) {
1483        /* unconditional not taken */
1484        if (a) {
1485            dc->pc = dc->npc + 4;
1486            dc->npc = dc->pc + 4;
1487        } else {
1488            dc->pc = dc->npc;
1489            dc->npc = dc->pc + 4;
1490        }
1491    } else if (cond == 0x8) {
1492        /* unconditional taken */
1493        if (a) {
1494            dc->pc = target;
1495            dc->npc = dc->pc + 4;
1496        } else {
1497            dc->pc = dc->npc;
1498            dc->npc = target;
1499            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1500        }
1501    } else {
1502        flush_cond(dc);
1503        gen_fcond(cpu_cond, cc, cond);
1504        if (a) {
1505            gen_branch_a(dc, target);
1506        } else {
1507            gen_branch_n(dc, target);
1508        }
1509    }
1510}
1511
1512#ifdef TARGET_SPARC64
1513static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1514                          TCGv r_reg)
1515{
1516    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1517    target_ulong target = dc->pc + offset;
1518
1519    if (unlikely(AM_CHECK(dc))) {
1520        target &= 0xffffffffULL;
1521    }
1522    flush_cond(dc);
1523    gen_cond_reg(cpu_cond, cond, r_reg);
1524    if (a) {
1525        gen_branch_a(dc, target);
1526    } else {
1527        gen_branch_n(dc, target);
1528    }
1529}
1530
1531static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1532{
1533    switch (fccno) {
1534    case 0:
1535        gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1536        break;
1537    case 1:
1538        gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1539        break;
1540    case 2:
1541        gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1542        break;
1543    case 3:
1544        gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1545        break;
1546    }
1547}
1548
1549static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1550{
1551    switch (fccno) {
1552    case 0:
1553        gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1554        break;
1555    case 1:
1556        gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1557        break;
1558    case 2:
1559        gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1560        break;
1561    case 3:
1562        gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1563        break;
1564    }
1565}
1566
1567static inline void gen_op_fcmpq(int fccno)
1568{
1569    switch (fccno) {
1570    case 0:
1571        gen_helper_fcmpq(cpu_fsr, cpu_env);
1572        break;
1573    case 1:
1574        gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1575        break;
1576    case 2:
1577        gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1578        break;
1579    case 3:
1580        gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1581        break;
1582    }
1583}
1584
1585static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1586{
1587    switch (fccno) {
1588    case 0:
1589        gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1590        break;
1591    case 1:
1592        gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1593        break;
1594    case 2:
1595        gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1596        break;
1597    case 3:
1598        gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1599        break;
1600    }
1601}
1602
1603static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1604{
1605    switch (fccno) {
1606    case 0:
1607        gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1608        break;
1609    case 1:
1610        gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1611        break;
1612    case 2:
1613        gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1614        break;
1615    case 3:
1616        gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1617        break;
1618    }
1619}
1620
1621static inline void gen_op_fcmpeq(int fccno)
1622{
1623    switch (fccno) {
1624    case 0:
1625        gen_helper_fcmpeq(cpu_fsr, cpu_env);
1626        break;
1627    case 1:
1628        gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1629        break;
1630    case 2:
1631        gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1632        break;
1633    case 3:
1634        gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1635        break;
1636    }
1637}
1638
1639#else
1640
1641static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1642{
1643    gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1644}
1645
1646static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1647{
1648    gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1649}
1650
1651static inline void gen_op_fcmpq(int fccno)
1652{
1653    gen_helper_fcmpq(cpu_fsr, cpu_env);
1654}
1655
1656static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1657{
1658    gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1659}
1660
1661static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1662{
1663    gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1664}
1665
1666static inline void gen_op_fcmpeq(int fccno)
1667{
1668    gen_helper_fcmpeq(cpu_fsr, cpu_env);
1669}
1670#endif
1671
1672static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1673{
1674    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1675    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1676    gen_exception(dc, TT_FP_EXCP);
1677}
1678
1679static int gen_trap_ifnofpu(DisasContext *dc)
1680{
1681#if !defined(CONFIG_USER_ONLY)
1682    if (!dc->fpu_enabled) {
1683        gen_exception(dc, TT_NFPU_INSN);
1684        return 1;
1685    }
1686#endif
1687    return 0;
1688}
1689
1690static inline void gen_op_clear_ieee_excp_and_FTT(void)
1691{
1692    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1693}
1694
1695static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1696                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1697{
1698    TCGv_i32 dst, src;
1699
1700    src = gen_load_fpr_F(dc, rs);
1701    dst = gen_dest_fpr_F(dc);
1702
1703    gen(dst, cpu_env, src);
1704    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1705
1706    gen_store_fpr_F(dc, rd, dst);
1707}
1708
1709static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1710                                 void (*gen)(TCGv_i32, TCGv_i32))
1711{
1712    TCGv_i32 dst, src;
1713
1714    src = gen_load_fpr_F(dc, rs);
1715    dst = gen_dest_fpr_F(dc);
1716
1717    gen(dst, src);
1718
1719    gen_store_fpr_F(dc, rd, dst);
1720}
1721
1722static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1723                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1724{
1725    TCGv_i32 dst, src1, src2;
1726
1727    src1 = gen_load_fpr_F(dc, rs1);
1728    src2 = gen_load_fpr_F(dc, rs2);
1729    dst = gen_dest_fpr_F(dc);
1730
1731    gen(dst, cpu_env, src1, src2);
1732    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1733
1734    gen_store_fpr_F(dc, rd, dst);
1735}
1736
1737#ifdef TARGET_SPARC64
1738static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1739                                  void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1740{
1741    TCGv_i32 dst, src1, src2;
1742
1743    src1 = gen_load_fpr_F(dc, rs1);
1744    src2 = gen_load_fpr_F(dc, rs2);
1745    dst = gen_dest_fpr_F(dc);
1746
1747    gen(dst, src1, src2);
1748
1749    gen_store_fpr_F(dc, rd, dst);
1750}
1751#endif
1752
1753static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1754                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1755{
1756    TCGv_i64 dst, src;
1757
1758    src = gen_load_fpr_D(dc, rs);
1759    dst = gen_dest_fpr_D(dc, rd);
1760
1761    gen(dst, cpu_env, src);
1762    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1763
1764    gen_store_fpr_D(dc, rd, dst);
1765}
1766
1767#ifdef TARGET_SPARC64
1768static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1769                                 void (*gen)(TCGv_i64, TCGv_i64))
1770{
1771    TCGv_i64 dst, src;
1772
1773    src = gen_load_fpr_D(dc, rs);
1774    dst = gen_dest_fpr_D(dc, rd);
1775
1776    gen(dst, src);
1777
1778    gen_store_fpr_D(dc, rd, dst);
1779}
1780#endif
1781
1782static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1783                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1784{
1785    TCGv_i64 dst, src1, src2;
1786
1787    src1 = gen_load_fpr_D(dc, rs1);
1788    src2 = gen_load_fpr_D(dc, rs2);
1789    dst = gen_dest_fpr_D(dc, rd);
1790
1791    gen(dst, cpu_env, src1, src2);
1792    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1793
1794    gen_store_fpr_D(dc, rd, dst);
1795}
1796
1797#ifdef TARGET_SPARC64
1798static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1799                                  void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1800{
1801    TCGv_i64 dst, src1, src2;
1802
1803    src1 = gen_load_fpr_D(dc, rs1);
1804    src2 = gen_load_fpr_D(dc, rs2);
1805    dst = gen_dest_fpr_D(dc, rd);
1806
1807    gen(dst, src1, src2);
1808
1809    gen_store_fpr_D(dc, rd, dst);
1810}
1811
1812static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1813                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1814{
1815    TCGv_i64 dst, src1, src2;
1816
1817    src1 = gen_load_fpr_D(dc, rs1);
1818    src2 = gen_load_fpr_D(dc, rs2);
1819    dst = gen_dest_fpr_D(dc, rd);
1820
1821    gen(dst, cpu_gsr, src1, src2);
1822
1823    gen_store_fpr_D(dc, rd, dst);
1824}
1825
1826static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1827                           void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1828{
1829    TCGv_i64 dst, src0, src1, src2;
1830
1831    src1 = gen_load_fpr_D(dc, rs1);
1832    src2 = gen_load_fpr_D(dc, rs2);
1833    src0 = gen_load_fpr_D(dc, rd);
1834    dst = gen_dest_fpr_D(dc, rd);
1835
1836    gen(dst, src0, src1, src2);
1837
1838    gen_store_fpr_D(dc, rd, dst);
1839}
1840#endif
1841
1842static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1843                              void (*gen)(TCGv_ptr))
1844{
1845    gen_op_load_fpr_QT1(QFPREG(rs));
1846
1847    gen(cpu_env);
1848    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1849
1850    gen_op_store_QT0_fpr(QFPREG(rd));
1851    gen_update_fprs_dirty(dc, QFPREG(rd));
1852}
1853
1854#ifdef TARGET_SPARC64
1855static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1856                                 void (*gen)(TCGv_ptr))
1857{
1858    gen_op_load_fpr_QT1(QFPREG(rs));
1859
1860    gen(cpu_env);
1861
1862    gen_op_store_QT0_fpr(QFPREG(rd));
1863    gen_update_fprs_dirty(dc, QFPREG(rd));
1864}
1865#endif
1866
1867static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1868                               void (*gen)(TCGv_ptr))
1869{
1870    gen_op_load_fpr_QT0(QFPREG(rs1));
1871    gen_op_load_fpr_QT1(QFPREG(rs2));
1872
1873    gen(cpu_env);
1874    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1875
1876    gen_op_store_QT0_fpr(QFPREG(rd));
1877    gen_update_fprs_dirty(dc, QFPREG(rd));
1878}
1879
1880static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1881                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1882{
1883    TCGv_i64 dst;
1884    TCGv_i32 src1, src2;
1885
1886    src1 = gen_load_fpr_F(dc, rs1);
1887    src2 = gen_load_fpr_F(dc, rs2);
1888    dst = gen_dest_fpr_D(dc, rd);
1889
1890    gen(dst, cpu_env, src1, src2);
1891    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1892
1893    gen_store_fpr_D(dc, rd, dst);
1894}
1895
1896static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1897                               void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1898{
1899    TCGv_i64 src1, src2;
1900
1901    src1 = gen_load_fpr_D(dc, rs1);
1902    src2 = gen_load_fpr_D(dc, rs2);
1903
1904    gen(cpu_env, src1, src2);
1905    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1906
1907    gen_op_store_QT0_fpr(QFPREG(rd));
1908    gen_update_fprs_dirty(dc, QFPREG(rd));
1909}
1910
1911#ifdef TARGET_SPARC64
1912static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1913                              void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1914{
1915    TCGv_i64 dst;
1916    TCGv_i32 src;
1917
1918    src = gen_load_fpr_F(dc, rs);
1919    dst = gen_dest_fpr_D(dc, rd);
1920
1921    gen(dst, cpu_env, src);
1922    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1923
1924    gen_store_fpr_D(dc, rd, dst);
1925}
1926#endif
1927
1928static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1929                                 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1930{
1931    TCGv_i64 dst;
1932    TCGv_i32 src;
1933
1934    src = gen_load_fpr_F(dc, rs);
1935    dst = gen_dest_fpr_D(dc, rd);
1936
1937    gen(dst, cpu_env, src);
1938
1939    gen_store_fpr_D(dc, rd, dst);
1940}
1941
1942static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1943                              void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1944{
1945    TCGv_i32 dst;
1946    TCGv_i64 src;
1947
1948    src = gen_load_fpr_D(dc, rs);
1949    dst = gen_dest_fpr_F(dc);
1950
1951    gen(dst, cpu_env, src);
1952    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1953
1954    gen_store_fpr_F(dc, rd, dst);
1955}
1956
1957static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1958                              void (*gen)(TCGv_i32, TCGv_ptr))
1959{
1960    TCGv_i32 dst;
1961
1962    gen_op_load_fpr_QT1(QFPREG(rs));
1963    dst = gen_dest_fpr_F(dc);
1964
1965    gen(dst, cpu_env);
1966    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1967
1968    gen_store_fpr_F(dc, rd, dst);
1969}
1970
1971static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1972                              void (*gen)(TCGv_i64, TCGv_ptr))
1973{
1974    TCGv_i64 dst;
1975
1976    gen_op_load_fpr_QT1(QFPREG(rs));
1977    dst = gen_dest_fpr_D(dc, rd);
1978
1979    gen(dst, cpu_env);
1980    gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1981
1982    gen_store_fpr_D(dc, rd, dst);
1983}
1984
1985static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1986                                 void (*gen)(TCGv_ptr, TCGv_i32))
1987{
1988    TCGv_i32 src;
1989
1990    src = gen_load_fpr_F(dc, rs);
1991
1992    gen(cpu_env, src);
1993
1994    gen_op_store_QT0_fpr(QFPREG(rd));
1995    gen_update_fprs_dirty(dc, QFPREG(rd));
1996}
1997
1998static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1999                                 void (*gen)(TCGv_ptr, TCGv_i64))
2000{
2001    TCGv_i64 src;
2002
2003    src = gen_load_fpr_D(dc, rs);
2004
2005    gen(cpu_env, src);
2006
2007    gen_op_store_QT0_fpr(QFPREG(rd));
2008    gen_update_fprs_dirty(dc, QFPREG(rd));
2009}
2010
2011static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2012                     TCGv addr, int mmu_idx, MemOp memop)
2013{
2014    gen_address_mask(dc, addr);
2015    tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2016}
2017
2018static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2019{
2020    TCGv m1 = tcg_const_tl(0xff);
2021    gen_address_mask(dc, addr);
2022    tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2023    tcg_temp_free(m1);
2024}
2025
2026/* asi moves */
2027#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2028typedef enum {
2029    GET_ASI_HELPER,
2030    GET_ASI_EXCP,
2031    GET_ASI_DIRECT,
2032    GET_ASI_DTWINX,
2033    GET_ASI_BLOCK,
2034    GET_ASI_SHORT,
2035    GET_ASI_BCOPY,
2036    GET_ASI_BFILL,
2037} ASIType;
2038
2039typedef struct {
2040    ASIType type;
2041    int asi;
2042    int mem_idx;
2043    MemOp memop;
2044} DisasASI;
2045
2046static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
2047{
2048    int asi = GET_FIELD(insn, 19, 26);
2049    ASIType type = GET_ASI_HELPER;
2050    int mem_idx = dc->mem_idx;
2051
2052#ifndef TARGET_SPARC64
2053    /* Before v9, all asis are immediate and privileged.  */
2054    if (IS_IMM) {
2055        gen_exception(dc, TT_ILL_INSN);
2056        type = GET_ASI_EXCP;
2057    } else if (supervisor(dc)
2058               /* Note that LEON accepts ASI_USERDATA in user mode, for
2059                  use with CASA.  Also note that previous versions of
2060                  QEMU allowed (and old versions of gcc emitted) ASI_P
2061                  for LEON, which is incorrect.  */
2062               || (asi == ASI_USERDATA
2063                   && (dc->def->features & CPU_FEATURE_CASA))) {
2064        switch (asi) {
2065        case ASI_USERDATA:   /* User data access */
2066            mem_idx = MMU_USER_IDX;
2067            type = GET_ASI_DIRECT;
2068            break;
2069        case ASI_KERNELDATA: /* Supervisor data access */
2070            mem_idx = MMU_KERNEL_IDX;
2071            type = GET_ASI_DIRECT;
2072            break;
2073        case ASI_M_BYPASS:    /* MMU passthrough */
2074        case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2075            mem_idx = MMU_PHYS_IDX;
2076            type = GET_ASI_DIRECT;
2077            break;
2078        case ASI_M_BCOPY: /* Block copy, sta access */
2079            mem_idx = MMU_KERNEL_IDX;
2080            type = GET_ASI_BCOPY;
2081            break;
2082        case ASI_M_BFILL: /* Block fill, stda access */
2083            mem_idx = MMU_KERNEL_IDX;
2084            type = GET_ASI_BFILL;
2085            break;
2086        }
2087
2088        /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
2089         * permissions check in get_physical_address(..).
2090         */
2091        mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
2092    } else {
2093        gen_exception(dc, TT_PRIV_INSN);
2094        type = GET_ASI_EXCP;
2095    }
2096#else
2097    if (IS_IMM) {
2098        asi = dc->asi;
2099    }
2100    /* With v9, all asis below 0x80 are privileged.  */
2101    /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2102       down that bit into DisasContext.  For the moment that's ok,
2103       since the direct implementations below doesn't have any ASIs
2104       in the restricted [0x30, 0x7f] range, and the check will be
2105       done properly in the helper.  */
2106    if (!supervisor(dc) && asi < 0x80) {
2107        gen_exception(dc, TT_PRIV_ACT);
2108        type = GET_ASI_EXCP;
2109    } else {
2110        switch (asi) {
2111        case ASI_REAL:      /* Bypass */
2112        case ASI_REAL_IO:   /* Bypass, non-cacheable */
2113        case ASI_REAL_L:    /* Bypass LE */
2114        case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2115        case ASI_TWINX_REAL:   /* Real address, twinx */
2116        case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2117        case ASI_QUAD_LDD_PHYS:
2118        case ASI_QUAD_LDD_PHYS_L:
2119            mem_idx = MMU_PHYS_IDX;
2120            break;
2121        case ASI_N:  /* Nucleus */
2122        case ASI_NL: /* Nucleus LE */
2123        case ASI_TWINX_N:
2124        case ASI_TWINX_NL:
2125        case ASI_NUCLEUS_QUAD_LDD:
2126        case ASI_NUCLEUS_QUAD_LDD_L:
2127            if (hypervisor(dc)) {
2128                mem_idx = MMU_PHYS_IDX;
2129            } else {
2130                mem_idx = MMU_NUCLEUS_IDX;
2131            }
2132            break;
2133        case ASI_AIUP:  /* As if user primary */
2134        case ASI_AIUPL: /* As if user primary LE */
2135        case ASI_TWINX_AIUP:
2136        case ASI_TWINX_AIUP_L:
2137        case ASI_BLK_AIUP_4V:
2138        case ASI_BLK_AIUP_L_4V:
2139        case ASI_BLK_AIUP:
2140        case ASI_BLK_AIUPL:
2141            mem_idx = MMU_USER_IDX;
2142            break;
2143        case ASI_AIUS:  /* As if user secondary */
2144        case ASI_AIUSL: /* As if user secondary LE */
2145        case ASI_TWINX_AIUS:
2146        case ASI_TWINX_AIUS_L:
2147        case ASI_BLK_AIUS_4V:
2148        case ASI_BLK_AIUS_L_4V:
2149        case ASI_BLK_AIUS:
2150        case ASI_BLK_AIUSL:
2151            mem_idx = MMU_USER_SECONDARY_IDX;
2152            break;
2153        case ASI_S:  /* Secondary */
2154        case ASI_SL: /* Secondary LE */
2155        case ASI_TWINX_S:
2156        case ASI_TWINX_SL:
2157        case ASI_BLK_COMMIT_S:
2158        case ASI_BLK_S:
2159        case ASI_BLK_SL:
2160        case ASI_FL8_S:
2161        case ASI_FL8_SL:
2162        case ASI_FL16_S:
2163        case ASI_FL16_SL:
2164            if (mem_idx == MMU_USER_IDX) {
2165                mem_idx = MMU_USER_SECONDARY_IDX;
2166            } else if (mem_idx == MMU_KERNEL_IDX) {
2167                mem_idx = MMU_KERNEL_SECONDARY_IDX;
2168            }
2169            break;
2170        case ASI_P:  /* Primary */
2171        case ASI_PL: /* Primary LE */
2172        case ASI_TWINX_P:
2173        case ASI_TWINX_PL:
2174        case ASI_BLK_COMMIT_P:
2175        case ASI_BLK_P:
2176        case ASI_BLK_PL:
2177        case ASI_FL8_P:
2178        case ASI_FL8_PL:
2179        case ASI_FL16_P:
2180        case ASI_FL16_PL:
2181            break;
2182        }
2183        switch (asi) {
2184        case ASI_REAL:
2185        case ASI_REAL_IO:
2186        case ASI_REAL_L:
2187        case ASI_REAL_IO_L:
2188        case ASI_N:
2189        case ASI_NL:
2190        case ASI_AIUP:
2191        case ASI_AIUPL:
2192        case ASI_AIUS:
2193        case ASI_AIUSL:
2194        case ASI_S:
2195        case ASI_SL:
2196        case ASI_P:
2197        case ASI_PL:
2198            type = GET_ASI_DIRECT;
2199            break;
2200        case ASI_TWINX_REAL:
2201        case ASI_TWINX_REAL_L:
2202        case ASI_TWINX_N:
2203        case ASI_TWINX_NL:
2204        case ASI_TWINX_AIUP:
2205        case ASI_TWINX_AIUP_L:
2206        case ASI_TWINX_AIUS:
2207        case ASI_TWINX_AIUS_L:
2208        case ASI_TWINX_P:
2209        case ASI_TWINX_PL:
2210        case ASI_TWINX_S:
2211        case ASI_TWINX_SL:
2212        case ASI_QUAD_LDD_PHYS:
2213        case ASI_QUAD_LDD_PHYS_L:
2214        case ASI_NUCLEUS_QUAD_LDD:
2215        case ASI_NUCLEUS_QUAD_LDD_L:
2216            type = GET_ASI_DTWINX;
2217            break;
2218        case ASI_BLK_COMMIT_P:
2219        case ASI_BLK_COMMIT_S:
2220        case ASI_BLK_AIUP_4V:
2221        case ASI_BLK_AIUP_L_4V:
2222        case ASI_BLK_AIUP:
2223        case ASI_BLK_AIUPL:
2224        case ASI_BLK_AIUS_4V:
2225        case ASI_BLK_AIUS_L_4V:
2226        case ASI_BLK_AIUS:
2227        case ASI_BLK_AIUSL:
2228        case ASI_BLK_S:
2229        case ASI_BLK_SL:
2230        case ASI_BLK_P:
2231        case ASI_BLK_PL:
2232            type = GET_ASI_BLOCK;
2233            break;
2234        case ASI_FL8_S:
2235        case ASI_FL8_SL:
2236        case ASI_FL8_P:
2237        case ASI_FL8_PL:
2238            memop = MO_UB;
2239            type = GET_ASI_SHORT;
2240            break;
2241        case ASI_FL16_S:
2242        case ASI_FL16_SL:
2243        case ASI_FL16_P:
2244        case ASI_FL16_PL:
2245            memop = MO_TEUW;
2246            type = GET_ASI_SHORT;
2247            break;
2248        }
2249        /* The little-endian asis all have bit 3 set.  */
2250        if (asi & 8) {
2251            memop ^= MO_BSWAP;
2252        }
2253    }
2254#endif
2255
2256    return (DisasASI){ type, asi, mem_idx, memop };
2257}
2258
2259static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2260                       int insn, MemOp memop)
2261{
2262    DisasASI da = get_asi(dc, insn, memop);
2263
2264    switch (da.type) {
2265    case GET_ASI_EXCP:
2266        break;
2267    case GET_ASI_DTWINX: /* Reserved for ldda.  */
2268        gen_exception(dc, TT_ILL_INSN);
2269        break;
2270    case GET_ASI_DIRECT:
2271        gen_address_mask(dc, addr);
2272        tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2273        break;
2274    default:
2275        {
2276            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2277            TCGv_i32 r_mop = tcg_const_i32(memop);
2278
2279            save_state(dc);
2280#ifdef TARGET_SPARC64
2281            gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2282#else
2283            {
2284                TCGv_i64 t64 = tcg_temp_new_i64();
2285                gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2286                tcg_gen_trunc_i64_tl(dst, t64);
2287                tcg_temp_free_i64(t64);
2288            }
2289#endif
2290            tcg_temp_free_i32(r_mop);
2291            tcg_temp_free_i32(r_asi);
2292        }
2293        break;
2294    }
2295}
2296
2297static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2298                       int insn, MemOp memop)
2299{
2300    DisasASI da = get_asi(dc, insn, memop);
2301
2302    switch (da.type) {
2303    case GET_ASI_EXCP:
2304        break;
2305    case GET_ASI_DTWINX: /* Reserved for stda.  */
2306#ifndef TARGET_SPARC64
2307        gen_exception(dc, TT_ILL_INSN);
2308        break;
2309#else
2310        if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2311            /* Pre OpenSPARC CPUs don't have these */
2312            gen_exception(dc, TT_ILL_INSN);
2313            return;
2314        }
2315        /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2316         * are ST_BLKINIT_ ASIs */
2317#endif
2318        /* fall through */
2319    case GET_ASI_DIRECT:
2320        gen_address_mask(dc, addr);
2321        tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2322        break;
2323#if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2324    case GET_ASI_BCOPY:
2325        /* Copy 32 bytes from the address in SRC to ADDR.  */
2326        /* ??? The original qemu code suggests 4-byte alignment, dropping
2327           the low bits, but the only place I can see this used is in the
2328           Linux kernel with 32 byte alignment, which would make more sense
2329           as a cacheline-style operation.  */
2330        {
2331            TCGv saddr = tcg_temp_new();
2332            TCGv daddr = tcg_temp_new();
2333            TCGv four = tcg_const_tl(4);
2334            TCGv_i32 tmp = tcg_temp_new_i32();
2335            int i;
2336
2337            tcg_gen_andi_tl(saddr, src, -4);
2338            tcg_gen_andi_tl(daddr, addr, -4);
2339            for (i = 0; i < 32; i += 4) {
2340                /* Since the loads and stores are paired, allow the
2341                   copy to happen in the host endianness.  */
2342                tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2343                tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2344                tcg_gen_add_tl(saddr, saddr, four);
2345                tcg_gen_add_tl(daddr, daddr, four);
2346            }
2347
2348            tcg_temp_free(saddr);
2349            tcg_temp_free(daddr);
2350            tcg_temp_free(four);
2351            tcg_temp_free_i32(tmp);
2352        }
2353        break;
2354#endif
2355    default:
2356        {
2357            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2358            TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2359
2360            save_state(dc);
2361#ifdef TARGET_SPARC64
2362            gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2363#else
2364            {
2365                TCGv_i64 t64 = tcg_temp_new_i64();
2366                tcg_gen_extu_tl_i64(t64, src);
2367                gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2368                tcg_temp_free_i64(t64);
2369            }
2370#endif
2371            tcg_temp_free_i32(r_mop);
2372            tcg_temp_free_i32(r_asi);
2373
2374            /* A write to a TLB register may alter page maps.  End the TB. */
2375            dc->npc = DYNAMIC_PC;
2376        }
2377        break;
2378    }
2379}
2380
2381static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2382                         TCGv addr, int insn)
2383{
2384    DisasASI da = get_asi(dc, insn, MO_TEUL);
2385
2386    switch (da.type) {
2387    case GET_ASI_EXCP:
2388        break;
2389    case GET_ASI_DIRECT:
2390        gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2391        break;
2392    default:
2393        /* ??? Should be DAE_invalid_asi.  */
2394        gen_exception(dc, TT_DATA_ACCESS);
2395        break;
2396    }
2397}
2398
2399static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2400                        int insn, int rd)
2401{
2402    DisasASI da = get_asi(dc, insn, MO_TEUL);
2403    TCGv oldv;
2404
2405    switch (da.type) {
2406    case GET_ASI_EXCP:
2407        return;
2408    case GET_ASI_DIRECT:
2409        oldv = tcg_temp_new();
2410        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2411                                  da.mem_idx, da.memop);
2412        gen_store_gpr(dc, rd, oldv);
2413        tcg_temp_free(oldv);
2414        break;
2415    default:
2416        /* ??? Should be DAE_invalid_asi.  */
2417        gen_exception(dc, TT_DATA_ACCESS);
2418        break;
2419    }
2420}
2421
2422static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2423{
2424    DisasASI da = get_asi(dc, insn, MO_UB);
2425
2426    switch (da.type) {
2427    case GET_ASI_EXCP:
2428        break;
2429    case GET_ASI_DIRECT:
2430        gen_ldstub(dc, dst, addr, da.mem_idx);
2431        break;
2432    default:
2433        /* ??? In theory, this should be raise DAE_invalid_asi.
2434           But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2435        if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2436            gen_helper_exit_atomic(cpu_env);
2437        } else {
2438            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2439            TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2440            TCGv_i64 s64, t64;
2441
2442            save_state(dc);
2443            t64 = tcg_temp_new_i64();
2444            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2445
2446            s64 = tcg_const_i64(0xff);
2447            gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2448            tcg_temp_free_i64(s64);
2449            tcg_temp_free_i32(r_mop);
2450            tcg_temp_free_i32(r_asi);
2451
2452            tcg_gen_trunc_i64_tl(dst, t64);
2453            tcg_temp_free_i64(t64);
2454
2455            /* End the TB.  */
2456            dc->npc = DYNAMIC_PC;
2457        }
2458        break;
2459    }
2460}
2461#endif
2462
2463#ifdef TARGET_SPARC64
2464static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2465                        int insn, int size, int rd)
2466{
2467    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2468    TCGv_i32 d32;
2469    TCGv_i64 d64;
2470
2471    switch (da.type) {
2472    case GET_ASI_EXCP:
2473        break;
2474
2475    case GET_ASI_DIRECT:
2476        gen_address_mask(dc, addr);
2477        switch (size) {
2478        case 4:
2479            d32 = gen_dest_fpr_F(dc);
2480            tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2481            gen_store_fpr_F(dc, rd, d32);
2482            break;
2483        case 8:
2484            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2485                                da.memop | MO_ALIGN_4);
2486            break;
2487        case 16:
2488            d64 = tcg_temp_new_i64();
2489            tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2490            tcg_gen_addi_tl(addr, addr, 8);
2491            tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2492                                da.memop | MO_ALIGN_4);
2493            tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2494            tcg_temp_free_i64(d64);
2495            break;
2496        default:
2497            g_assert_not_reached();
2498        }
2499        break;
2500
2501    case GET_ASI_BLOCK:
2502        /* Valid for lddfa on aligned registers only.  */
2503        if (size == 8 && (rd & 7) == 0) {
2504            MemOp memop;
2505            TCGv eight;
2506            int i;
2507
2508            gen_address_mask(dc, addr);
2509
2510            /* The first operation checks required alignment.  */
2511            memop = da.memop | MO_ALIGN_64;
2512            eight = tcg_const_tl(8);
2513            for (i = 0; ; ++i) {
2514                tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2515                                    da.mem_idx, memop);
2516                if (i == 7) {
2517                    break;
2518                }
2519                tcg_gen_add_tl(addr, addr, eight);
2520                memop = da.memop;
2521            }
2522            tcg_temp_free(eight);
2523        } else {
2524            gen_exception(dc, TT_ILL_INSN);
2525        }
2526        break;
2527
2528    case GET_ASI_SHORT:
2529        /* Valid for lddfa only.  */
2530        if (size == 8) {
2531            gen_address_mask(dc, addr);
2532            tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2533        } else {
2534            gen_exception(dc, TT_ILL_INSN);
2535        }
2536        break;
2537
2538    default:
2539        {
2540            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2541            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2542
2543            save_state(dc);
2544            /* According to the table in the UA2011 manual, the only
2545               other asis that are valid for ldfa/lddfa/ldqfa are
2546               the NO_FAULT asis.  We still need a helper for these,
2547               but we can just use the integer asi helper for them.  */
2548            switch (size) {
2549            case 4:
2550                d64 = tcg_temp_new_i64();
2551                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2552                d32 = gen_dest_fpr_F(dc);
2553                tcg_gen_extrl_i64_i32(d32, d64);
2554                tcg_temp_free_i64(d64);
2555                gen_store_fpr_F(dc, rd, d32);
2556                break;
2557            case 8:
2558                gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2559                break;
2560            case 16:
2561                d64 = tcg_temp_new_i64();
2562                gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2563                tcg_gen_addi_tl(addr, addr, 8);
2564                gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2565                tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2566                tcg_temp_free_i64(d64);
2567                break;
2568            default:
2569                g_assert_not_reached();
2570            }
2571            tcg_temp_free_i32(r_mop);
2572            tcg_temp_free_i32(r_asi);
2573        }
2574        break;
2575    }
2576}
2577
2578static void gen_stf_asi(DisasContext *dc, TCGv addr,
2579                        int insn, int size, int rd)
2580{
2581    DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2582    TCGv_i32 d32;
2583
2584    switch (da.type) {
2585    case GET_ASI_EXCP:
2586        break;
2587
2588    case GET_ASI_DIRECT:
2589        gen_address_mask(dc, addr);
2590        switch (size) {
2591        case 4:
2592            d32 = gen_load_fpr_F(dc, rd);
2593            tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2594            break;
2595        case 8:
2596            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2597                                da.memop | MO_ALIGN_4);
2598            break;
2599        case 16:
2600            /* Only 4-byte alignment required.  However, it is legal for the
2601               cpu to signal the alignment fault, and the OS trap handler is
2602               required to fix it up.  Requiring 16-byte alignment here avoids
2603               having to probe the second page before performing the first
2604               write.  */
2605            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2606                                da.memop | MO_ALIGN_16);
2607            tcg_gen_addi_tl(addr, addr, 8);
2608            tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2609            break;
2610        default:
2611            g_assert_not_reached();
2612        }
2613        break;
2614
2615    case GET_ASI_BLOCK:
2616        /* Valid for stdfa on aligned registers only.  */
2617        if (size == 8 && (rd & 7) == 0) {
2618            MemOp memop;
2619            TCGv eight;
2620            int i;
2621
2622            gen_address_mask(dc, addr);
2623
2624            /* The first operation checks required alignment.  */
2625            memop = da.memop | MO_ALIGN_64;
2626            eight = tcg_const_tl(8);
2627            for (i = 0; ; ++i) {
2628                tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2629                                    da.mem_idx, memop);
2630                if (i == 7) {
2631                    break;
2632                }
2633                tcg_gen_add_tl(addr, addr, eight);
2634                memop = da.memop;
2635            }
2636            tcg_temp_free(eight);
2637        } else {
2638            gen_exception(dc, TT_ILL_INSN);
2639        }
2640        break;
2641
2642    case GET_ASI_SHORT:
2643        /* Valid for stdfa only.  */
2644        if (size == 8) {
2645            gen_address_mask(dc, addr);
2646            tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2647        } else {
2648            gen_exception(dc, TT_ILL_INSN);
2649        }
2650        break;
2651
2652    default:
2653        /* According to the table in the UA2011 manual, the only
2654           other asis that are valid for ldfa/lddfa/ldqfa are
2655           the PST* asis, which aren't currently handled.  */
2656        gen_exception(dc, TT_ILL_INSN);
2657        break;
2658    }
2659}
2660
2661static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2662{
2663    DisasASI da = get_asi(dc, insn, MO_TEQ);
2664    TCGv_i64 hi = gen_dest_gpr(dc, rd);
2665    TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2666
2667    switch (da.type) {
2668    case GET_ASI_EXCP:
2669        return;
2670
2671    case GET_ASI_DTWINX:
2672        gen_address_mask(dc, addr);
2673        tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2674        tcg_gen_addi_tl(addr, addr, 8);
2675        tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2676        break;
2677
2678    case GET_ASI_DIRECT:
2679        {
2680            TCGv_i64 tmp = tcg_temp_new_i64();
2681
2682            gen_address_mask(dc, addr);
2683            tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2684
2685            /* Note that LE ldda acts as if each 32-bit register
2686               result is byte swapped.  Having just performed one
2687               64-bit bswap, we need now to swap the writebacks.  */
2688            if ((da.memop & MO_BSWAP) == MO_TE) {
2689                tcg_gen_extr32_i64(lo, hi, tmp);
2690            } else {
2691                tcg_gen_extr32_i64(hi, lo, tmp);
2692            }
2693            tcg_temp_free_i64(tmp);
2694        }
2695        break;
2696
2697    default:
2698        /* ??? In theory we've handled all of the ASIs that are valid
2699           for ldda, and this should raise DAE_invalid_asi.  However,
2700           real hardware allows others.  This can be seen with e.g.
2701           FreeBSD 10.3 wrt ASI_IC_TAG.  */
2702        {
2703            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2704            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2705            TCGv_i64 tmp = tcg_temp_new_i64();
2706
2707            save_state(dc);
2708            gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2709            tcg_temp_free_i32(r_asi);
2710            tcg_temp_free_i32(r_mop);
2711
2712            /* See above.  */
2713            if ((da.memop & MO_BSWAP) == MO_TE) {
2714                tcg_gen_extr32_i64(lo, hi, tmp);
2715            } else {
2716                tcg_gen_extr32_i64(hi, lo, tmp);
2717            }
2718            tcg_temp_free_i64(tmp);
2719        }
2720        break;
2721    }
2722
2723    gen_store_gpr(dc, rd, hi);
2724    gen_store_gpr(dc, rd + 1, lo);
2725}
2726
2727static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2728                         int insn, int rd)
2729{
2730    DisasASI da = get_asi(dc, insn, MO_TEQ);
2731    TCGv lo = gen_load_gpr(dc, rd + 1);
2732
2733    switch (da.type) {
2734    case GET_ASI_EXCP:
2735        break;
2736
2737    case GET_ASI_DTWINX:
2738        gen_address_mask(dc, addr);
2739        tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2740        tcg_gen_addi_tl(addr, addr, 8);
2741        tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2742        break;
2743
2744    case GET_ASI_DIRECT:
2745        {
2746            TCGv_i64 t64 = tcg_temp_new_i64();
2747
2748            /* Note that LE stda acts as if each 32-bit register result is
2749               byte swapped.  We will perform one 64-bit LE store, so now
2750               we must swap the order of the construction.  */
2751            if ((da.memop & MO_BSWAP) == MO_TE) {
2752                tcg_gen_concat32_i64(t64, lo, hi);
2753            } else {
2754                tcg_gen_concat32_i64(t64, hi, lo);
2755            }
2756            gen_address_mask(dc, addr);
2757            tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2758            tcg_temp_free_i64(t64);
2759        }
2760        break;
2761
2762    default:
2763        /* ??? In theory we've handled all of the ASIs that are valid
2764           for stda, and this should raise DAE_invalid_asi.  */
2765        {
2766            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2767            TCGv_i32 r_mop = tcg_const_i32(da.memop);
2768            TCGv_i64 t64 = tcg_temp_new_i64();
2769
2770            /* See above.  */
2771            if ((da.memop & MO_BSWAP) == MO_TE) {
2772                tcg_gen_concat32_i64(t64, lo, hi);
2773            } else {
2774                tcg_gen_concat32_i64(t64, hi, lo);
2775            }
2776
2777            save_state(dc);
2778            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2779            tcg_temp_free_i32(r_mop);
2780            tcg_temp_free_i32(r_asi);
2781            tcg_temp_free_i64(t64);
2782        }
2783        break;
2784    }
2785}
2786
2787static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2788                         int insn, int rd)
2789{
2790    DisasASI da = get_asi(dc, insn, MO_TEQ);
2791    TCGv oldv;
2792
2793    switch (da.type) {
2794    case GET_ASI_EXCP:
2795        return;
2796    case GET_ASI_DIRECT:
2797        oldv = tcg_temp_new();
2798        tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2799                                  da.mem_idx, da.memop);
2800        gen_store_gpr(dc, rd, oldv);
2801        tcg_temp_free(oldv);
2802        break;
2803    default:
2804        /* ??? Should be DAE_invalid_asi.  */
2805        gen_exception(dc, TT_DATA_ACCESS);
2806        break;
2807    }
2808}
2809
2810#elif !defined(CONFIG_USER_ONLY)
2811static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2812{
2813    /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2814       whereby "rd + 1" elicits "error: array subscript is above array".
2815       Since we have already asserted that rd is even, the semantics
2816       are unchanged.  */
2817    TCGv lo = gen_dest_gpr(dc, rd | 1);
2818    TCGv hi = gen_dest_gpr(dc, rd);
2819    TCGv_i64 t64 = tcg_temp_new_i64();
2820    DisasASI da = get_asi(dc, insn, MO_TEQ);
2821
2822    switch (da.type) {
2823    case GET_ASI_EXCP:
2824        tcg_temp_free_i64(t64);
2825        return;
2826    case GET_ASI_DIRECT:
2827        gen_address_mask(dc, addr);
2828        tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2829        break;
2830    default:
2831        {
2832            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2833            TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2834
2835            save_state(dc);
2836            gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2837            tcg_temp_free_i32(r_mop);
2838            tcg_temp_free_i32(r_asi);
2839        }
2840        break;
2841    }
2842
2843    tcg_gen_extr_i64_i32(lo, hi, t64);
2844    tcg_temp_free_i64(t64);
2845    gen_store_gpr(dc, rd | 1, lo);
2846    gen_store_gpr(dc, rd, hi);
2847}
2848
2849static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2850                         int insn, int rd)
2851{
2852    DisasASI da = get_asi(dc, insn, MO_TEQ);
2853    TCGv lo = gen_load_gpr(dc, rd + 1);
2854    TCGv_i64 t64 = tcg_temp_new_i64();
2855
2856    tcg_gen_concat_tl_i64(t64, lo, hi);
2857
2858    switch (da.type) {
2859    case GET_ASI_EXCP:
2860        break;
2861    case GET_ASI_DIRECT:
2862        gen_address_mask(dc, addr);
2863        tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2864        break;
2865    case GET_ASI_BFILL:
2866        /* Store 32 bytes of T64 to ADDR.  */
2867        /* ??? The original qemu code suggests 8-byte alignment, dropping
2868           the low bits, but the only place I can see this used is in the
2869           Linux kernel with 32 byte alignment, which would make more sense
2870           as a cacheline-style operation.  */
2871        {
2872            TCGv d_addr = tcg_temp_new();
2873            TCGv eight = tcg_const_tl(8);
2874            int i;
2875
2876            tcg_gen_andi_tl(d_addr, addr, -8);
2877            for (i = 0; i < 32; i += 8) {
2878                tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2879                tcg_gen_add_tl(d_addr, d_addr, eight);
2880            }
2881
2882            tcg_temp_free(d_addr);
2883            tcg_temp_free(eight);
2884        }
2885        break;
2886    default:
2887        {
2888            TCGv_i32 r_asi = tcg_const_i32(da.asi);
2889            TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2890
2891            save_state(dc);
2892            gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2893            tcg_temp_free_i32(r_mop);
2894            tcg_temp_free_i32(r_asi);
2895        }
2896        break;
2897    }
2898
2899    tcg_temp_free_i64(t64);
2900}
2901#endif
2902
2903static TCGv get_src1(DisasContext *dc, unsigned int insn)
2904{
2905    unsigned int rs1 = GET_FIELD(insn, 13, 17);
2906    return gen_load_gpr(dc, rs1);
2907}
2908
2909static TCGv get_src2(DisasContext *dc, unsigned int insn)
2910{
2911    if (IS_IMM) { /* immediate */
2912        target_long simm = GET_FIELDs(insn, 19, 31);
2913        TCGv t = get_temp_tl(dc);
2914        tcg_gen_movi_tl(t, simm);
2915        return t;
2916    } else {      /* register */
2917        unsigned int rs2 = GET_FIELD(insn, 27, 31);
2918        return gen_load_gpr(dc, rs2);
2919    }
2920}
2921
2922#ifdef TARGET_SPARC64
2923static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2924{
2925    TCGv_i32 c32, zero, dst, s1, s2;
2926
2927    /* We have two choices here: extend the 32 bit data and use movcond_i64,
2928       or fold the comparison down to 32 bits and use movcond_i32.  Choose
2929       the later.  */
2930    c32 = tcg_temp_new_i32();
2931    if (cmp->is_bool) {
2932        tcg_gen_extrl_i64_i32(c32, cmp->c1);
2933    } else {
2934        TCGv_i64 c64 = tcg_temp_new_i64();
2935        tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2936        tcg_gen_extrl_i64_i32(c32, c64);
2937        tcg_temp_free_i64(c64);
2938    }
2939
2940    s1 = gen_load_fpr_F(dc, rs);
2941    s2 = gen_load_fpr_F(dc, rd);
2942    dst = gen_dest_fpr_F(dc);
2943    zero = tcg_const_i32(0);
2944
2945    tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2946
2947    tcg_temp_free_i32(c32);
2948    tcg_temp_free_i32(zero);
2949    gen_store_fpr_F(dc, rd, dst);
2950}
2951
2952static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2953{
2954    TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2955    tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2956                        gen_load_fpr_D(dc, rs),
2957                        gen_load_fpr_D(dc, rd));
2958    gen_store_fpr_D(dc, rd, dst);
2959}
2960
2961static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2962{
2963    int qd = QFPREG(rd);
2964    int qs = QFPREG(rs);
2965
2966    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2967                        cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2968    tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2969                        cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2970
2971    gen_update_fprs_dirty(dc, qd);
2972}
2973
2974#ifndef CONFIG_USER_ONLY
2975static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2976{
2977    TCGv_i32 r_tl = tcg_temp_new_i32();
2978
2979    /* load env->tl into r_tl */
2980    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2981
2982    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2983    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2984
2985    /* calculate offset to current trap state from env->ts, reuse r_tl */
2986    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2987    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2988
2989    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2990    {
2991        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2992        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2993        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2994        tcg_temp_free_ptr(r_tl_tmp);
2995    }
2996
2997    tcg_temp_free_i32(r_tl);
2998}
2999#endif
3000
3001static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3002                     int width, bool cc, bool left)
3003{
3004    TCGv lo1, lo2, t1, t2;
3005    uint64_t amask, tabl, tabr;
3006    int shift, imask, omask;
3007
3008    if (cc) {
3009        tcg_gen_mov_tl(cpu_cc_src, s1);
3010        tcg_gen_mov_tl(cpu_cc_src2, s2);
3011        tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3012        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3013        dc->cc_op = CC_OP_SUB;
3014    }
3015
3016    /* Theory of operation: there are two tables, left and right (not to
3017       be confused with the left and right versions of the opcode).  These
3018       are indexed by the low 3 bits of the inputs.  To make things "easy",
3019       these tables are loaded into two constants, TABL and TABR below.
3020       The operation index = (input & imask) << shift calculates the index
3021       into the constant, while val = (table >> index) & omask calculates
3022       the value we're looking for.  */
3023    switch (width) {
3024    case 8:
3025        imask = 0x7;
3026        shift = 3;
3027        omask = 0xff;
3028        if (left) {
3029            tabl = 0x80c0e0f0f8fcfeffULL;
3030            tabr = 0xff7f3f1f0f070301ULL;
3031        } else {
3032            tabl = 0x0103070f1f3f7fffULL;
3033            tabr = 0xfffefcf8f0e0c080ULL;
3034        }
3035        break;
3036    case 16:
3037        imask = 0x6;
3038        shift = 1;
3039        omask = 0xf;
3040        if (left) {
3041            tabl = 0x8cef;
3042            tabr = 0xf731;
3043        } else {
3044            tabl = 0x137f;
3045            tabr = 0xfec8;
3046        }
3047        break;
3048    case 32:
3049        imask = 0x4;
3050        shift = 0;
3051        omask = 0x3;
3052        if (left) {
3053            tabl = (2 << 2) | 3;
3054            tabr = (3 << 2) | 1;
3055        } else {
3056            tabl = (1 << 2) | 3;
3057            tabr = (3 << 2) | 2;
3058        }
3059        break;
3060    default:
3061        abort();
3062    }
3063
3064    lo1 = tcg_temp_new();
3065    lo2 = tcg_temp_new();
3066    tcg_gen_andi_tl(lo1, s1, imask);
3067    tcg_gen_andi_tl(lo2, s2, imask);
3068    tcg_gen_shli_tl(lo1, lo1, shift);
3069    tcg_gen_shli_tl(lo2, lo2, shift);
3070
3071    t1 = tcg_const_tl(tabl);
3072    t2 = tcg_const_tl(tabr);
3073    tcg_gen_shr_tl(lo1, t1, lo1);
3074    tcg_gen_shr_tl(lo2, t2, lo2);
3075    tcg_gen_andi_tl(dst, lo1, omask);
3076    tcg_gen_andi_tl(lo2, lo2, omask);
3077
3078    amask = -8;
3079    if (AM_CHECK(dc)) {
3080        amask &= 0xffffffffULL;
3081    }
3082    tcg_gen_andi_tl(s1, s1, amask);
3083    tcg_gen_andi_tl(s2, s2, amask);
3084
3085    /* We want to compute
3086        dst = (s1 == s2 ? lo1 : lo1 & lo2).
3087       We've already done dst = lo1, so this reduces to
3088        dst &= (s1 == s2 ? -1 : lo2)
3089       Which we perform by
3090        lo2 |= -(s1 == s2)
3091        dst &= lo2
3092    */
3093    tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3094    tcg_gen_neg_tl(t1, t1);
3095    tcg_gen_or_tl(lo2, lo2, t1);
3096    tcg_gen_and_tl(dst, dst, lo2);
3097
3098    tcg_temp_free(lo1);
3099    tcg_temp_free(lo2);
3100    tcg_temp_free(t1);
3101    tcg_temp_free(t2);
3102}
3103
3104static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3105{
3106    TCGv tmp = tcg_temp_new();
3107
3108    tcg_gen_add_tl(tmp, s1, s2);
3109    tcg_gen_andi_tl(dst, tmp, -8);
3110    if (left) {
3111        tcg_gen_neg_tl(tmp, tmp);
3112    }
3113    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3114
3115    tcg_temp_free(tmp);
3116}
3117
3118static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3119{
3120    TCGv t1, t2, shift;
3121
3122    t1 = tcg_temp_new();
3123    t2 = tcg_temp_new();
3124    shift = tcg_temp_new();
3125
3126    tcg_gen_andi_tl(shift, gsr, 7);
3127    tcg_gen_shli_tl(shift, shift, 3);
3128    tcg_gen_shl_tl(t1, s1, shift);
3129
3130    /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3131       shift of (up to 63) followed by a constant shift of 1.  */
3132    tcg_gen_xori_tl(shift, shift, 63);
3133    tcg_gen_shr_tl(t2, s2, shift);
3134    tcg_gen_shri_tl(t2, t2, 1);
3135
3136    tcg_gen_or_tl(dst, t1, t2);
3137
3138    tcg_temp_free(t1);
3139    tcg_temp_free(t2);
3140    tcg_temp_free(shift);
3141}
3142#endif
3143
3144#define CHECK_IU_FEATURE(dc, FEATURE)                      \
3145    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3146        goto illegal_insn;
3147#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3148    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3149        goto nfpu_insn;
3150
3151/* before an instruction, dc->pc must be static */
3152static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3153{
3154    unsigned int opc, rs1, rs2, rd;
3155    TCGv cpu_src1, cpu_src2;
3156    TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3157    TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3158    target_long simm;
3159
3160    opc = GET_FIELD(insn, 0, 1);
3161    rd = GET_FIELD(insn, 2, 6);
3162
3163    switch (opc) {
3164    case 0:                     /* branches/sethi */
3165        {
3166            unsigned int xop = GET_FIELD(insn, 7, 9);
3167            int32_t target;
3168            switch (xop) {
3169#ifdef TARGET_SPARC64
3170            case 0x1:           /* V9 BPcc */
3171                {
3172                    int cc;
3173
3174                    target = GET_FIELD_SP(insn, 0, 18);
3175                    target = sign_extend(target, 19);
3176                    target <<= 2;
3177                    cc = GET_FIELD_SP(insn, 20, 21);
3178                    if (cc == 0)
3179                        do_branch(dc, target, insn, 0);
3180                    else if (cc == 2)
3181                        do_branch(dc, target, insn, 1);
3182                    else
3183                        goto illegal_insn;
3184                    goto jmp_insn;
3185                }
3186            case 0x3:           /* V9 BPr */
3187                {
3188                    target = GET_FIELD_SP(insn, 0, 13) |
3189                        (GET_FIELD_SP(insn, 20, 21) << 14);
3190                    target = sign_extend(target, 16);
3191                    target <<= 2;
3192                    cpu_src1 = get_src1(dc, insn);
3193                    do_branch_reg(dc, target, insn, cpu_src1);
3194                    goto jmp_insn;
3195                }
3196            case 0x5:           /* V9 FBPcc */
3197                {
3198                    int cc = GET_FIELD_SP(insn, 20, 21);
3199                    if (gen_trap_ifnofpu(dc)) {
3200                        goto jmp_insn;
3201                    }
3202                    target = GET_FIELD_SP(insn, 0, 18);
3203                    target = sign_extend(target, 19);
3204                    target <<= 2;
3205                    do_fbranch(dc, target, insn, cc);
3206                    goto jmp_insn;
3207                }
3208#else
3209            case 0x7:           /* CBN+x */
3210                {
3211                    goto ncp_insn;
3212                }
3213#endif
3214            case 0x2:           /* BN+x */
3215                {
3216                    target = GET_FIELD(insn, 10, 31);
3217                    target = sign_extend(target, 22);
3218                    target <<= 2;
3219                    do_branch(dc, target, insn, 0);
3220                    goto jmp_insn;
3221                }
3222            case 0x6:           /* FBN+x */
3223                {
3224                    if (gen_trap_ifnofpu(dc)) {
3225                        goto jmp_insn;
3226                    }
3227                    target = GET_FIELD(insn, 10, 31);
3228                    target = sign_extend(target, 22);
3229                    target <<= 2;
3230                    do_fbranch(dc, target, insn, 0);
3231                    goto jmp_insn;
3232                }
3233            case 0x4:           /* SETHI */
3234                /* Special-case %g0 because that's the canonical nop.  */
3235                if (rd) {
3236                    uint32_t value = GET_FIELD(insn, 10, 31);
3237                    TCGv t = gen_dest_gpr(dc, rd);
3238                    tcg_gen_movi_tl(t, value << 10);
3239                    gen_store_gpr(dc, rd, t);
3240                }
3241                break;
3242            case 0x0:           /* UNIMPL */
3243            default:
3244                goto illegal_insn;
3245            }
3246            break;
3247        }
3248        break;
3249    case 1:                     /*CALL*/
3250        {
3251            target_long target = GET_FIELDs(insn, 2, 31) << 2;
3252            TCGv o7 = gen_dest_gpr(dc, 15);
3253
3254            tcg_gen_movi_tl(o7, dc->pc);
3255            gen_store_gpr(dc, 15, o7);
3256            target += dc->pc;
3257            gen_mov_pc_npc(dc);
3258#ifdef TARGET_SPARC64
3259            if (unlikely(AM_CHECK(dc))) {
3260                target &= 0xffffffffULL;
3261            }
3262#endif
3263            dc->npc = target;
3264        }
3265        goto jmp_insn;
3266    case 2:                     /* FPU & Logical Operations */
3267        {
3268            unsigned int xop = GET_FIELD(insn, 7, 12);
3269            TCGv cpu_dst = get_temp_tl(dc);
3270            TCGv cpu_tmp0;
3271
3272            if (xop == 0x3a) {  /* generate trap */
3273                int cond = GET_FIELD(insn, 3, 6);
3274                TCGv_i32 trap;
3275                TCGLabel *l1 = NULL;
3276                int mask;
3277
3278                if (cond == 0) {
3279                    /* Trap never.  */
3280                    break;
3281                }
3282
3283                save_state(dc);
3284
3285                if (cond != 8) {
3286                    /* Conditional trap.  */
3287                    DisasCompare cmp;
3288#ifdef TARGET_SPARC64
3289                    /* V9 icc/xcc */
3290                    int cc = GET_FIELD_SP(insn, 11, 12);
3291                    if (cc == 0) {
3292                        gen_compare(&cmp, 0, cond, dc);
3293                    } else if (cc == 2) {
3294                        gen_compare(&cmp, 1, cond, dc);
3295                    } else {
3296                        goto illegal_insn;
3297                    }
3298#else
3299                    gen_compare(&cmp, 0, cond, dc);
3300#endif
3301                    l1 = gen_new_label();
3302                    tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3303                                      cmp.c1, cmp.c2, l1);
3304                    free_compare(&cmp);
3305                }
3306
3307                mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3308                        ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3309
3310                /* Don't use the normal temporaries, as they may well have
3311                   gone out of scope with the branch above.  While we're
3312                   doing that we might as well pre-truncate to 32-bit.  */
3313                trap = tcg_temp_new_i32();
3314
3315                rs1 = GET_FIELD_SP(insn, 14, 18);
3316                if (IS_IMM) {
3317                    rs2 = GET_FIELD_SP(insn, 0, 7);
3318                    if (rs1 == 0) {
3319                        tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3320                        /* Signal that the trap value is fully constant.  */
3321                        mask = 0;
3322                    } else {
3323                        TCGv t1 = gen_load_gpr(dc, rs1);
3324                        tcg_gen_trunc_tl_i32(trap, t1);
3325                        tcg_gen_addi_i32(trap, trap, rs2);
3326                    }
3327                } else {
3328                    TCGv t1, t2;
3329                    rs2 = GET_FIELD_SP(insn, 0, 4);
3330                    t1 = gen_load_gpr(dc, rs1);
3331                    t2 = gen_load_gpr(dc, rs2);
3332                    tcg_gen_add_tl(t1, t1, t2);
3333                    tcg_gen_trunc_tl_i32(trap, t1);
3334                }
3335                if (mask != 0) {
3336                    tcg_gen_andi_i32(trap, trap, mask);
3337                    tcg_gen_addi_i32(trap, trap, TT_TRAP);
3338                }
3339
3340                gen_helper_raise_exception(cpu_env, trap);
3341                tcg_temp_free_i32(trap);
3342
3343                if (cond == 8) {
3344                    /* An unconditional trap ends the TB.  */
3345                    dc->base.is_jmp = DISAS_NORETURN;
3346                    goto jmp_insn;
3347                } else {
3348                    /* A conditional trap falls through to the next insn.  */
3349                    gen_set_label(l1);
3350                    break;
3351                }
3352            } else if (xop == 0x28) {
3353                rs1 = GET_FIELD(insn, 13, 17);
3354                switch(rs1) {
3355                case 0: /* rdy */
3356#ifndef TARGET_SPARC64
3357                case 0x01 ... 0x0e: /* undefined in the SPARCv8
3358                                       manual, rdy on the microSPARC
3359                                       II */
3360                case 0x0f:          /* stbar in the SPARCv8 manual,
3361                                       rdy on the microSPARC II */
3362                case 0x10 ... 0x1f: /* implementation-dependent in the
3363                                       SPARCv8 manual, rdy on the
3364                                       microSPARC II */
3365                    /* Read Asr17 */
3366                    if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3367                        TCGv t = gen_dest_gpr(dc, rd);
3368                        /* Read Asr17 for a Leon3 monoprocessor */
3369                        tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3370                        gen_store_gpr(dc, rd, t);
3371                        break;
3372                    }
3373#endif
3374                    gen_store_gpr(dc, rd, cpu_y);
3375                    break;
3376#ifdef TARGET_SPARC64
3377                case 0x2: /* V9 rdccr */
3378                    update_psr(dc);
3379                    gen_helper_rdccr(cpu_dst, cpu_env);
3380                    gen_store_gpr(dc, rd, cpu_dst);
3381                    break;
3382                case 0x3: /* V9 rdasi */
3383                    tcg_gen_movi_tl(cpu_dst, dc->asi);
3384                    gen_store_gpr(dc, rd, cpu_dst);
3385                    break;
3386                case 0x4: /* V9 rdtick */
3387                    {
3388                        TCGv_ptr r_tickptr;
3389                        TCGv_i32 r_const;
3390
3391                        r_tickptr = tcg_temp_new_ptr();
3392                        r_const = tcg_const_i32(dc->mem_idx);
3393                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3394                                       offsetof(CPUSPARCState, tick));
3395                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3396                            gen_io_start();
3397                        }
3398                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3399                                                  r_const);
3400                        tcg_temp_free_ptr(r_tickptr);
3401                        tcg_temp_free_i32(r_const);
3402                        gen_store_gpr(dc, rd, cpu_dst);
3403                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3404                            gen_io_end();
3405                        }
3406                    }
3407                    break;
3408                case 0x5: /* V9 rdpc */
3409                    {
3410                        TCGv t = gen_dest_gpr(dc, rd);
3411                        if (unlikely(AM_CHECK(dc))) {
3412                            tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3413                        } else {
3414                            tcg_gen_movi_tl(t, dc->pc);
3415                        }
3416                        gen_store_gpr(dc, rd, t);
3417                    }
3418                    break;
3419                case 0x6: /* V9 rdfprs */
3420                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3421                    gen_store_gpr(dc, rd, cpu_dst);
3422                    break;
3423                case 0xf: /* V9 membar */
3424                    break; /* no effect */
3425                case 0x13: /* Graphics Status */
3426                    if (gen_trap_ifnofpu(dc)) {
3427                        goto jmp_insn;
3428                    }
3429                    gen_store_gpr(dc, rd, cpu_gsr);
3430                    break;
3431                case 0x16: /* Softint */
3432                    tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3433                                     offsetof(CPUSPARCState, softint));
3434                    gen_store_gpr(dc, rd, cpu_dst);
3435                    break;
3436                case 0x17: /* Tick compare */
3437                    gen_store_gpr(dc, rd, cpu_tick_cmpr);
3438                    break;
3439                case 0x18: /* System tick */
3440                    {
3441                        TCGv_ptr r_tickptr;
3442                        TCGv_i32 r_const;
3443
3444                        r_tickptr = tcg_temp_new_ptr();
3445                        r_const = tcg_const_i32(dc->mem_idx);
3446                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3447                                       offsetof(CPUSPARCState, stick));
3448                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3449                            gen_io_start();
3450                        }
3451                        gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3452                                                  r_const);
3453                        tcg_temp_free_ptr(r_tickptr);
3454                        tcg_temp_free_i32(r_const);
3455                        gen_store_gpr(dc, rd, cpu_dst);
3456                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3457                            gen_io_end();
3458                        }
3459                    }
3460                    break;
3461                case 0x19: /* System tick compare */
3462                    gen_store_gpr(dc, rd, cpu_stick_cmpr);
3463                    break;
3464                case 0x1a: /* UltraSPARC-T1 Strand status */
3465                    /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3466                     * this ASR as impl. dep
3467                     */
3468                    CHECK_IU_FEATURE(dc, HYPV);
3469                    {
3470                        TCGv t = gen_dest_gpr(dc, rd);
3471                        tcg_gen_movi_tl(t, 1UL);
3472                        gen_store_gpr(dc, rd, t);
3473                    }
3474                    break;
3475                case 0x10: /* Performance Control */
3476                case 0x11: /* Performance Instrumentation Counter */
3477                case 0x12: /* Dispatch Control */
3478                case 0x14: /* Softint set, WO */
3479                case 0x15: /* Softint clear, WO */
3480#endif
3481                default:
3482                    goto illegal_insn;
3483                }
3484#if !defined(CONFIG_USER_ONLY)
3485            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3486#ifndef TARGET_SPARC64
3487                if (!supervisor(dc)) {
3488                    goto priv_insn;
3489                }
3490                update_psr(dc);
3491                gen_helper_rdpsr(cpu_dst, cpu_env);
3492#else
3493                CHECK_IU_FEATURE(dc, HYPV);
3494                if (!hypervisor(dc))
3495                    goto priv_insn;
3496                rs1 = GET_FIELD(insn, 13, 17);
3497                switch (rs1) {
3498                case 0: // hpstate
3499                    tcg_gen_ld_i64(cpu_dst, cpu_env,
3500                                   offsetof(CPUSPARCState, hpstate));
3501                    break;
3502                case 1: // htstate
3503                    // gen_op_rdhtstate();
3504                    break;
3505                case 3: // hintp
3506                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3507                    break;
3508                case 5: // htba
3509                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
3510                    break;
3511                case 6: // hver
3512                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
3513                    break;
3514                case 31: // hstick_cmpr
3515                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3516                    break;
3517                default:
3518                    goto illegal_insn;
3519                }
3520#endif
3521                gen_store_gpr(dc, rd, cpu_dst);
3522                break;
3523            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3524                if (!supervisor(dc)) {
3525                    goto priv_insn;
3526                }
3527                cpu_tmp0 = get_temp_tl(dc);
3528#ifdef TARGET_SPARC64
3529                rs1 = GET_FIELD(insn, 13, 17);
3530                switch (rs1) {
3531                case 0: // tpc
3532                    {
3533                        TCGv_ptr r_tsptr;
3534
3535                        r_tsptr = tcg_temp_new_ptr();
3536                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3537                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3538                                      offsetof(trap_state, tpc));
3539                        tcg_temp_free_ptr(r_tsptr);
3540                    }
3541                    break;
3542                case 1: // tnpc
3543                    {
3544                        TCGv_ptr r_tsptr;
3545
3546                        r_tsptr = tcg_temp_new_ptr();
3547                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3548                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3549                                      offsetof(trap_state, tnpc));
3550                        tcg_temp_free_ptr(r_tsptr);
3551                    }
3552                    break;
3553                case 2: // tstate
3554                    {
3555                        TCGv_ptr r_tsptr;
3556
3557                        r_tsptr = tcg_temp_new_ptr();
3558                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3559                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3560                                      offsetof(trap_state, tstate));
3561                        tcg_temp_free_ptr(r_tsptr);
3562                    }
3563                    break;
3564                case 3: // tt
3565                    {
3566                        TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3567
3568                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3569                        tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3570                                         offsetof(trap_state, tt));
3571                        tcg_temp_free_ptr(r_tsptr);
3572                    }
3573                    break;
3574                case 4: // tick
3575                    {
3576                        TCGv_ptr r_tickptr;
3577                        TCGv_i32 r_const;
3578
3579                        r_tickptr = tcg_temp_new_ptr();
3580                        r_const = tcg_const_i32(dc->mem_idx);
3581                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
3582                                       offsetof(CPUSPARCState, tick));
3583                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3584                            gen_io_start();
3585                        }
3586                        gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3587                                                  r_tickptr, r_const);
3588                        tcg_temp_free_ptr(r_tickptr);
3589                        tcg_temp_free_i32(r_const);
3590                        if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3591                            gen_io_end();
3592                        }
3593                    }
3594                    break;
3595                case 5: // tba
3596                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3597                    break;
3598                case 6: // pstate
3599                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3600                                     offsetof(CPUSPARCState, pstate));
3601                    break;
3602                case 7: // tl
3603                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3604                                     offsetof(CPUSPARCState, tl));
3605                    break;
3606                case 8: // pil
3607                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3608                                     offsetof(CPUSPARCState, psrpil));
3609                    break;
3610                case 9: // cwp
3611                    gen_helper_rdcwp(cpu_tmp0, cpu_env);
3612                    break;
3613                case 10: // cansave
3614                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3615                                     offsetof(CPUSPARCState, cansave));
3616                    break;
3617                case 11: // canrestore
3618                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3619                                     offsetof(CPUSPARCState, canrestore));
3620                    break;
3621                case 12: // cleanwin
3622                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3623                                     offsetof(CPUSPARCState, cleanwin));
3624                    break;
3625                case 13: // otherwin
3626                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3627                                     offsetof(CPUSPARCState, otherwin));
3628                    break;
3629                case 14: // wstate
3630                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3631                                     offsetof(CPUSPARCState, wstate));
3632                    break;
3633                case 16: // UA2005 gl
3634                    CHECK_IU_FEATURE(dc, GL);
3635                    tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3636                                     offsetof(CPUSPARCState, gl));
3637                    break;
3638                case 26: // UA2005 strand status
3639                    CHECK_IU_FEATURE(dc, HYPV);
3640                    if (!hypervisor(dc))
3641                        goto priv_insn;
3642                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3643                    break;
3644                case 31: // ver
3645                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3646                    break;
3647                case 15: // fq
3648                default:
3649                    goto illegal_insn;
3650                }
3651#else
3652                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3653#endif
3654                gen_store_gpr(dc, rd, cpu_tmp0);
3655                break;
3656#endif
3657#if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3658            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3659#ifdef TARGET_SPARC64
3660                gen_helper_flushw(cpu_env);
3661#else
3662                if (!supervisor(dc))
3663                    goto priv_insn;
3664                gen_store_gpr(dc, rd, cpu_tbr);
3665#endif
3666                break;
3667#endif
3668            } else if (xop == 0x34) {   /* FPU Operations */
3669                if (gen_trap_ifnofpu(dc)) {
3670                    goto jmp_insn;
3671                }
3672                gen_op_clear_ieee_excp_and_FTT();
3673                rs1 = GET_FIELD(insn, 13, 17);
3674                rs2 = GET_FIELD(insn, 27, 31);
3675                xop = GET_FIELD(insn, 18, 26);
3676
3677                switch (xop) {
3678                case 0x1: /* fmovs */
3679                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3680                    gen_store_fpr_F(dc, rd, cpu_src1_32);
3681                    break;
3682                case 0x5: /* fnegs */
3683                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3684                    break;
3685                case 0x9: /* fabss */
3686                    gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3687                    break;
3688                case 0x29: /* fsqrts */
3689                    CHECK_FPU_FEATURE(dc, FSQRT);
3690                    gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3691                    break;
3692                case 0x2a: /* fsqrtd */
3693                    CHECK_FPU_FEATURE(dc, FSQRT);
3694                    gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3695                    break;
3696                case 0x2b: /* fsqrtq */
3697                    CHECK_FPU_FEATURE(dc, FLOAT128);
3698                    gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3699                    break;
3700                case 0x41: /* fadds */
3701                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3702                    break;
3703                case 0x42: /* faddd */
3704                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3705                    break;
3706                case 0x43: /* faddq */
3707                    CHECK_FPU_FEATURE(dc, FLOAT128);
3708                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3709                    break;
3710                case 0x45: /* fsubs */
3711                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3712                    break;
3713                case 0x46: /* fsubd */
3714                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3715                    break;
3716                case 0x47: /* fsubq */
3717                    CHECK_FPU_FEATURE(dc, FLOAT128);
3718                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3719                    break;
3720                case 0x49: /* fmuls */
3721                    CHECK_FPU_FEATURE(dc, FMUL);
3722                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3723                    break;
3724                case 0x4a: /* fmuld */
3725                    CHECK_FPU_FEATURE(dc, FMUL);
3726                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3727                    break;
3728                case 0x4b: /* fmulq */
3729                    CHECK_FPU_FEATURE(dc, FLOAT128);
3730                    CHECK_FPU_FEATURE(dc, FMUL);
3731                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3732                    break;
3733                case 0x4d: /* fdivs */
3734                    gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3735                    break;
3736                case 0x4e: /* fdivd */
3737                    gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3738                    break;
3739                case 0x4f: /* fdivq */
3740                    CHECK_FPU_FEATURE(dc, FLOAT128);
3741                    gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3742                    break;
3743                case 0x69: /* fsmuld */
3744                    CHECK_FPU_FEATURE(dc, FSMULD);
3745                    gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3746                    break;
3747                case 0x6e: /* fdmulq */
3748                    CHECK_FPU_FEATURE(dc, FLOAT128);
3749                    gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3750                    break;
3751                case 0xc4: /* fitos */
3752                    gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3753                    break;
3754                case 0xc6: /* fdtos */
3755                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3756                    break;
3757                case 0xc7: /* fqtos */
3758                    CHECK_FPU_FEATURE(dc, FLOAT128);
3759                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3760                    break;
3761                case 0xc8: /* fitod */
3762                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3763                    break;
3764                case 0xc9: /* fstod */
3765                    gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3766                    break;
3767                case 0xcb: /* fqtod */
3768                    CHECK_FPU_FEATURE(dc, FLOAT128);
3769                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3770                    break;
3771                case 0xcc: /* fitoq */
3772                    CHECK_FPU_FEATURE(dc, FLOAT128);
3773                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3774                    break;
3775                case 0xcd: /* fstoq */
3776                    CHECK_FPU_FEATURE(dc, FLOAT128);
3777                    gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3778                    break;
3779                case 0xce: /* fdtoq */
3780                    CHECK_FPU_FEATURE(dc, FLOAT128);
3781                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3782                    break;
3783                case 0xd1: /* fstoi */
3784                    gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3785                    break;
3786                case 0xd2: /* fdtoi */
3787                    gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3788                    break;
3789                case 0xd3: /* fqtoi */
3790                    CHECK_FPU_FEATURE(dc, FLOAT128);
3791                    gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3792                    break;
3793#ifdef TARGET_SPARC64
3794                case 0x2: /* V9 fmovd */
3795                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3796                    gen_store_fpr_D(dc, rd, cpu_src1_64);
3797                    break;
3798                case 0x3: /* V9 fmovq */
3799                    CHECK_FPU_FEATURE(dc, FLOAT128);
3800                    gen_move_Q(dc, rd, rs2);
3801                    break;
3802                case 0x6: /* V9 fnegd */
3803                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3804                    break;
3805                case 0x7: /* V9 fnegq */
3806                    CHECK_FPU_FEATURE(dc, FLOAT128);
3807                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3808                    break;
3809                case 0xa: /* V9 fabsd */
3810                    gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3811                    break;
3812                case 0xb: /* V9 fabsq */
3813                    CHECK_FPU_FEATURE(dc, FLOAT128);
3814                    gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3815                    break;
3816                case 0x81: /* V9 fstox */
3817                    gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3818                    break;
3819                case 0x82: /* V9 fdtox */
3820                    gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3821                    break;
3822                case 0x83: /* V9 fqtox */
3823                    CHECK_FPU_FEATURE(dc, FLOAT128);
3824                    gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3825                    break;
3826                case 0x84: /* V9 fxtos */
3827                    gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3828                    break;
3829                case 0x88: /* V9 fxtod */
3830                    gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3831                    break;
3832                case 0x8c: /* V9 fxtoq */
3833                    CHECK_FPU_FEATURE(dc, FLOAT128);
3834                    gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3835                    break;
3836#endif
3837                default:
3838                    goto illegal_insn;
3839                }
3840            } else if (xop == 0x35) {   /* FPU Operations */
3841#ifdef TARGET_SPARC64
3842                int cond;
3843#endif
3844                if (gen_trap_ifnofpu(dc)) {
3845                    goto jmp_insn;
3846                }
3847                gen_op_clear_ieee_excp_and_FTT();
3848                rs1 = GET_FIELD(insn, 13, 17);
3849                rs2 = GET_FIELD(insn, 27, 31);
3850                xop = GET_FIELD(insn, 18, 26);
3851
3852#ifdef TARGET_SPARC64
3853#define FMOVR(sz)                                                  \
3854                do {                                               \
3855                    DisasCompare cmp;                              \
3856                    cond = GET_FIELD_SP(insn, 10, 12);             \
3857                    cpu_src1 = get_src1(dc, insn);                 \
3858                    gen_compare_reg(&cmp, cond, cpu_src1);         \
3859                    gen_fmov##sz(dc, &cmp, rd, rs2);               \
3860                    free_compare(&cmp);                            \
3861                } while (0)
3862
3863                if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3864                    FMOVR(s);
3865                    break;
3866                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3867                    FMOVR(d);
3868                    break;
3869                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3870                    CHECK_FPU_FEATURE(dc, FLOAT128);
3871                    FMOVR(q);
3872                    break;
3873                }
3874#undef FMOVR
3875#endif
3876                switch (xop) {
3877#ifdef TARGET_SPARC64
3878#define FMOVCC(fcc, sz)                                                 \
3879                    do {                                                \
3880                        DisasCompare cmp;                               \
3881                        cond = GET_FIELD_SP(insn, 14, 17);              \
3882                        gen_fcompare(&cmp, fcc, cond);                  \
3883                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3884                        free_compare(&cmp);                             \
3885                    } while (0)
3886
3887                    case 0x001: /* V9 fmovscc %fcc0 */
3888                        FMOVCC(0, s);
3889                        break;
3890                    case 0x002: /* V9 fmovdcc %fcc0 */
3891                        FMOVCC(0, d);
3892                        break;
3893                    case 0x003: /* V9 fmovqcc %fcc0 */
3894                        CHECK_FPU_FEATURE(dc, FLOAT128);
3895                        FMOVCC(0, q);
3896                        break;
3897                    case 0x041: /* V9 fmovscc %fcc1 */
3898                        FMOVCC(1, s);
3899                        break;
3900                    case 0x042: /* V9 fmovdcc %fcc1 */
3901                        FMOVCC(1, d);
3902                        break;
3903                    case 0x043: /* V9 fmovqcc %fcc1 */
3904                        CHECK_FPU_FEATURE(dc, FLOAT128);
3905                        FMOVCC(1, q);
3906                        break;
3907                    case 0x081: /* V9 fmovscc %fcc2 */
3908                        FMOVCC(2, s);
3909                        break;
3910                    case 0x082: /* V9 fmovdcc %fcc2 */
3911                        FMOVCC(2, d);
3912                        break;
3913                    case 0x083: /* V9 fmovqcc %fcc2 */
3914                        CHECK_FPU_FEATURE(dc, FLOAT128);
3915                        FMOVCC(2, q);
3916                        break;
3917                    case 0x0c1: /* V9 fmovscc %fcc3 */
3918                        FMOVCC(3, s);
3919                        break;
3920                    case 0x0c2: /* V9 fmovdcc %fcc3 */
3921                        FMOVCC(3, d);
3922                        break;
3923                    case 0x0c3: /* V9 fmovqcc %fcc3 */
3924                        CHECK_FPU_FEATURE(dc, FLOAT128);
3925                        FMOVCC(3, q);
3926                        break;
3927#undef FMOVCC
3928#define FMOVCC(xcc, sz)                                                 \
3929                    do {                                                \
3930                        DisasCompare cmp;                               \
3931                        cond = GET_FIELD_SP(insn, 14, 17);              \
3932                        gen_compare(&cmp, xcc, cond, dc);               \
3933                        gen_fmov##sz(dc, &cmp, rd, rs2);                \
3934                        free_compare(&cmp);                             \
3935                    } while (0)
3936
3937                    case 0x101: /* V9 fmovscc %icc */
3938                        FMOVCC(0, s);
3939                        break;
3940                    case 0x102: /* V9 fmovdcc %icc */
3941                        FMOVCC(0, d);
3942                        break;
3943                    case 0x103: /* V9 fmovqcc %icc */
3944                        CHECK_FPU_FEATURE(dc, FLOAT128);
3945                        FMOVCC(0, q);
3946                        break;
3947                    case 0x181: /* V9 fmovscc %xcc */
3948                        FMOVCC(1, s);
3949                        break;
3950                    case 0x182: /* V9 fmovdcc %xcc */
3951                        FMOVCC(1, d);
3952                        break;
3953                    case 0x183: /* V9 fmovqcc %xcc */
3954                        CHECK_FPU_FEATURE(dc, FLOAT128);
3955                        FMOVCC(1, q);
3956                        break;
3957#undef FMOVCC
3958#endif
3959                    case 0x51: /* fcmps, V9 %fcc */
3960                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3961                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3962                        gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3963                        break;
3964                    case 0x52: /* fcmpd, V9 %fcc */
3965                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3966                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3967                        gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3968                        break;
3969                    case 0x53: /* fcmpq, V9 %fcc */
3970                        CHECK_FPU_FEATURE(dc, FLOAT128);
3971                        gen_op_load_fpr_QT0(QFPREG(rs1));
3972                        gen_op_load_fpr_QT1(QFPREG(rs2));
3973                        gen_op_fcmpq(rd & 3);
3974                        break;
3975                    case 0x55: /* fcmpes, V9 %fcc */
3976                        cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3977                        cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3978                        gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3979                        break;
3980                    case 0x56: /* fcmped, V9 %fcc */
3981                        cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3982                        cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3983                        gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3984                        break;
3985                    case 0x57: /* fcmpeq, V9 %fcc */
3986                        CHECK_FPU_FEATURE(dc, FLOAT128);
3987                        gen_op_load_fpr_QT0(QFPREG(rs1));
3988                        gen_op_load_fpr_QT1(QFPREG(rs2));
3989                        gen_op_fcmpeq(rd & 3);
3990                        break;
3991                    default:
3992                        goto illegal_insn;
3993                }
3994            } else if (xop == 0x2) {
3995                TCGv dst = gen_dest_gpr(dc, rd);
3996                rs1 = GET_FIELD(insn, 13, 17);
3997                if (rs1 == 0) {
3998                    /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3999                    if (IS_IMM) {       /* immediate */
4000                        simm = GET_FIELDs(insn, 19, 31);
4001                        tcg_gen_movi_tl(dst, simm);
4002                        gen_store_gpr(dc, rd, dst);
4003                    } else {            /* register */
4004                        rs2 = GET_FIELD(insn, 27, 31);
4005                        if (rs2 == 0) {
4006                            tcg_gen_movi_tl(dst, 0);
4007                            gen_store_gpr(dc, rd, dst);
4008                        } else {
4009                            cpu_src2 = gen_load_gpr(dc, rs2);
4010                            gen_store_gpr(dc, rd, cpu_src2);
4011                        }
4012                    }
4013                } else {
4014                    cpu_src1 = get_src1(dc, insn);
4015                    if (IS_IMM) {       /* immediate */
4016                        simm = GET_FIELDs(insn, 19, 31);
4017                        tcg_gen_ori_tl(dst, cpu_src1, simm);
4018                        gen_store_gpr(dc, rd, dst);
4019                    } else {            /* register */
4020                        rs2 = GET_FIELD(insn, 27, 31);
4021                        if (rs2 == 0) {
4022                            /* mov shortcut:  or x, %g0, y -> mov x, y */
4023                            gen_store_gpr(dc, rd, cpu_src1);
4024                        } else {
4025                            cpu_src2 = gen_load_gpr(dc, rs2);
4026                            tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4027                            gen_store_gpr(dc, rd, dst);
4028                        }
4029                    }
4030                }
4031#ifdef TARGET_SPARC64
4032            } else if (xop == 0x25) { /* sll, V9 sllx */
4033                cpu_src1 = get_src1(dc, insn);
4034                if (IS_IMM) {   /* immediate */
4035                    simm = GET_FIELDs(insn, 20, 31);
4036                    if (insn & (1 << 12)) {
4037                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4038                    } else {
4039                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4040                    }
4041                } else {                /* register */
4042                    rs2 = GET_FIELD(insn, 27, 31);
4043                    cpu_src2 = gen_load_gpr(dc, rs2);
4044                    cpu_tmp0 = get_temp_tl(dc);
4045                    if (insn & (1 << 12)) {
4046                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4047                    } else {
4048                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4049                    }
4050                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4051                }
4052                gen_store_gpr(dc, rd, cpu_dst);
4053            } else if (xop == 0x26) { /* srl, V9 srlx */
4054                cpu_src1 = get_src1(dc, insn);
4055                if (IS_IMM) {   /* immediate */
4056                    simm = GET_FIELDs(insn, 20, 31);
4057                    if (insn & (1 << 12)) {
4058                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4059                    } else {
4060                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4061                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4062                    }
4063                } else {                /* register */
4064                    rs2 = GET_FIELD(insn, 27, 31);
4065                    cpu_src2 = gen_load_gpr(dc, rs2);
4066                    cpu_tmp0 = get_temp_tl(dc);
4067                    if (insn & (1 << 12)) {
4068                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4069                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4070                    } else {
4071                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4072                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4073                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4074                    }
4075                }
4076                gen_store_gpr(dc, rd, cpu_dst);
4077            } else if (xop == 0x27) { /* sra, V9 srax */
4078                cpu_src1 = get_src1(dc, insn);
4079                if (IS_IMM) {   /* immediate */
4080                    simm = GET_FIELDs(insn, 20, 31);
4081                    if (insn & (1 << 12)) {
4082                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4083                    } else {
4084                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4085                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4086                    }
4087                } else {                /* register */
4088                    rs2 = GET_FIELD(insn, 27, 31);
4089                    cpu_src2 = gen_load_gpr(dc, rs2);
4090                    cpu_tmp0 = get_temp_tl(dc);
4091                    if (insn & (1 << 12)) {
4092                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4093                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4094                    } else {
4095                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4096                        tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4097                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4098                    }
4099                }
4100                gen_store_gpr(dc, rd, cpu_dst);
4101#endif
4102            } else if (xop < 0x36) {
4103                if (xop < 0x20) {
4104                    cpu_src1 = get_src1(dc, insn);
4105                    cpu_src2 = get_src2(dc, insn);
4106                    switch (xop & ~0x10) {
4107                    case 0x0: /* add */
4108                        if (xop & 0x10) {
4109                            gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4110                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4111                            dc->cc_op = CC_OP_ADD;
4112                        } else {
4113                            tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4114                        }
4115                        break;
4116                    case 0x1: /* and */
4117                        tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4118                        if (xop & 0x10) {
4119                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4120                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4121                            dc->cc_op = CC_OP_LOGIC;
4122                        }
4123                        break;
4124                    case 0x2: /* or */
4125                        tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4126                        if (xop & 0x10) {
4127                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4128                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4129                            dc->cc_op = CC_OP_LOGIC;
4130                        }
4131                        break;
4132                    case 0x3: /* xor */
4133                        tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4134                        if (xop & 0x10) {
4135                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4136                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4137                            dc->cc_op = CC_OP_LOGIC;
4138                        }
4139                        break;
4140                    case 0x4: /* sub */
4141                        if (xop & 0x10) {
4142                            gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4143                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4144                            dc->cc_op = CC_OP_SUB;
4145                        } else {
4146                            tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4147                        }
4148                        break;
4149                    case 0x5: /* andn */
4150                        tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4151                        if (xop & 0x10) {
4152                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4153                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4154                            dc->cc_op = CC_OP_LOGIC;
4155                        }
4156                        break;
4157                    case 0x6: /* orn */
4158                        tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4159                        if (xop & 0x10) {
4160                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4161                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4162                            dc->cc_op = CC_OP_LOGIC;
4163                        }
4164                        break;
4165                    case 0x7: /* xorn */
4166                        tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4167                        if (xop & 0x10) {
4168                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4169                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4170                            dc->cc_op = CC_OP_LOGIC;
4171                        }
4172                        break;
4173                    case 0x8: /* addx, V9 addc */
4174                        gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4175                                        (xop & 0x10));
4176                        break;
4177#ifdef TARGET_SPARC64
4178                    case 0x9: /* V9 mulx */
4179                        tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4180                        break;
4181#endif
4182                    case 0xa: /* umul */
4183                        CHECK_IU_FEATURE(dc, MUL);
4184                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4185                        if (xop & 0x10) {
4186                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4187                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4188                            dc->cc_op = CC_OP_LOGIC;
4189                        }
4190                        break;
4191                    case 0xb: /* smul */
4192                        CHECK_IU_FEATURE(dc, MUL);
4193                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4194                        if (xop & 0x10) {
4195                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4196                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4197                            dc->cc_op = CC_OP_LOGIC;
4198                        }
4199                        break;
4200                    case 0xc: /* subx, V9 subc */
4201                        gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4202                                        (xop & 0x10));
4203                        break;
4204#ifdef TARGET_SPARC64
4205                    case 0xd: /* V9 udivx */
4206                        gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4207                        break;
4208#endif
4209                    case 0xe: /* udiv */
4210                        CHECK_IU_FEATURE(dc, DIV);
4211                        if (xop & 0x10) {
4212                            gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4213                                               cpu_src2);
4214                            dc->cc_op = CC_OP_DIV;
4215                        } else {
4216                            gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4217                                            cpu_src2);
4218                        }
4219                        break;
4220                    case 0xf: /* sdiv */
4221                        CHECK_IU_FEATURE(dc, DIV);
4222                        if (xop & 0x10) {
4223                            gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4224                                               cpu_src2);
4225                            dc->cc_op = CC_OP_DIV;
4226                        } else {
4227                            gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4228                                            cpu_src2);
4229                        }
4230                        break;
4231                    default:
4232                        goto illegal_insn;
4233                    }
4234                    gen_store_gpr(dc, rd, cpu_dst);
4235                } else {
4236                    cpu_src1 = get_src1(dc, insn);
4237                    cpu_src2 = get_src2(dc, insn);
4238                    switch (xop) {
4239                    case 0x20: /* taddcc */
4240                        gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4241                        gen_store_gpr(dc, rd, cpu_dst);
4242                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4243                        dc->cc_op = CC_OP_TADD;
4244                        break;
4245                    case 0x21: /* tsubcc */
4246                        gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4247                        gen_store_gpr(dc, rd, cpu_dst);
4248                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4249                        dc->cc_op = CC_OP_TSUB;
4250                        break;
4251                    case 0x22: /* taddcctv */
4252                        gen_helper_taddcctv(cpu_dst, cpu_env,
4253                                            cpu_src1, cpu_src2);
4254                        gen_store_gpr(dc, rd, cpu_dst);
4255                        dc->cc_op = CC_OP_TADDTV;
4256                        break;
4257                    case 0x23: /* tsubcctv */
4258                        gen_helper_tsubcctv(cpu_dst, cpu_env,
4259                                            cpu_src1, cpu_src2);
4260                        gen_store_gpr(dc, rd, cpu_dst);
4261                        dc->cc_op = CC_OP_TSUBTV;
4262                        break;
4263                    case 0x24: /* mulscc */
4264                        update_psr(dc);
4265                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4266                        gen_store_gpr(dc, rd, cpu_dst);
4267                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4268                        dc->cc_op = CC_OP_ADD;
4269                        break;
4270#ifndef TARGET_SPARC64
4271                    case 0x25:  /* sll */
4272                        if (IS_IMM) { /* immediate */
4273                            simm = GET_FIELDs(insn, 20, 31);
4274                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4275                        } else { /* register */
4276                            cpu_tmp0 = get_temp_tl(dc);
4277                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4278                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4279                        }
4280                        gen_store_gpr(dc, rd, cpu_dst);
4281                        break;
4282                    case 0x26:  /* srl */
4283                        if (IS_IMM) { /* immediate */
4284                            simm = GET_FIELDs(insn, 20, 31);
4285                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4286                        } else { /* register */
4287                            cpu_tmp0 = get_temp_tl(dc);
4288                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4289                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4290                        }
4291                        gen_store_gpr(dc, rd, cpu_dst);
4292                        break;
4293                    case 0x27:  /* sra */
4294                        if (IS_IMM) { /* immediate */
4295                            simm = GET_FIELDs(insn, 20, 31);
4296                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4297                        } else { /* register */
4298                            cpu_tmp0 = get_temp_tl(dc);
4299                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4300                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4301                        }
4302                        gen_store_gpr(dc, rd, cpu_dst);
4303                        break;
4304#endif
4305                    case 0x30:
4306                        {
4307                            cpu_tmp0 = get_temp_tl(dc);
4308                            switch(rd) {
4309                            case 0: /* wry */
4310                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4311                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4312                                break;
4313#ifndef TARGET_SPARC64
4314                            case 0x01 ... 0x0f: /* undefined in the
4315                                                   SPARCv8 manual, nop
4316                                                   on the microSPARC
4317                                                   II */
4318                            case 0x10 ... 0x1f: /* implementation-dependent
4319                                                   in the SPARCv8
4320                                                   manual, nop on the
4321                                                   microSPARC II */
4322                                if ((rd == 0x13) && (dc->def->features &
4323                                                     CPU_FEATURE_POWERDOWN)) {
4324                                    /* LEON3 power-down */
4325                                    save_state(dc);
4326                                    gen_helper_power_down(cpu_env);
4327                                }
4328                                break;
4329#else
4330                            case 0x2: /* V9 wrccr */
4331                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4332                                gen_helper_wrccr(cpu_env, cpu_tmp0);
4333                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4334                                dc->cc_op = CC_OP_FLAGS;
4335                                break;
4336                            case 0x3: /* V9 wrasi */
4337                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4338                                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4339                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4340                                                offsetof(CPUSPARCState, asi));
4341                                /* End TB to notice changed ASI.  */
4342                                save_state(dc);
4343                                gen_op_next_insn();
4344                                tcg_gen_exit_tb(NULL, 0);
4345                                dc->base.is_jmp = DISAS_NORETURN;
4346                                break;
4347                            case 0x6: /* V9 wrfprs */
4348                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4349                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4350                                dc->fprs_dirty = 0;
4351                                save_state(dc);
4352                                gen_op_next_insn();
4353                                tcg_gen_exit_tb(NULL, 0);
4354                                dc->base.is_jmp = DISAS_NORETURN;
4355                                break;
4356                            case 0xf: /* V9 sir, nop if user */
4357#if !defined(CONFIG_USER_ONLY)
4358                                if (supervisor(dc)) {
4359                                    ; // XXX
4360                                }
4361#endif
4362                                break;
4363                            case 0x13: /* Graphics Status */
4364                                if (gen_trap_ifnofpu(dc)) {
4365                                    goto jmp_insn;
4366                                }
4367                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4368                                break;
4369                            case 0x14: /* Softint set */
4370                                if (!supervisor(dc))
4371                                    goto illegal_insn;
4372                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4373                                gen_helper_set_softint(cpu_env, cpu_tmp0);
4374                                break;
4375                            case 0x15: /* Softint clear */
4376                                if (!supervisor(dc))
4377                                    goto illegal_insn;
4378                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4379                                gen_helper_clear_softint(cpu_env, cpu_tmp0);
4380                                break;
4381                            case 0x16: /* Softint write */
4382                                if (!supervisor(dc))
4383                                    goto illegal_insn;
4384                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4385                                gen_helper_write_softint(cpu_env, cpu_tmp0);
4386                                break;
4387                            case 0x17: /* Tick compare */
4388#if !defined(CONFIG_USER_ONLY)
4389                                if (!supervisor(dc))
4390                                    goto illegal_insn;
4391#endif
4392                                {
4393                                    TCGv_ptr r_tickptr;
4394
4395                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4396                                                   cpu_src2);
4397                                    r_tickptr = tcg_temp_new_ptr();
4398                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4399                                                   offsetof(CPUSPARCState, tick));
4400                                    if (tb_cflags(dc->base.tb) &
4401                                           CF_USE_ICOUNT) {
4402                                        gen_io_start();
4403                                    }
4404                                    gen_helper_tick_set_limit(r_tickptr,
4405                                                              cpu_tick_cmpr);
4406                                    tcg_temp_free_ptr(r_tickptr);
4407                                    /* End TB to handle timer interrupt */
4408                                    dc->base.is_jmp = DISAS_EXIT;
4409                                }
4410                                break;
4411                            case 0x18: /* System tick */
4412#if !defined(CONFIG_USER_ONLY)
4413                                if (!supervisor(dc))
4414                                    goto illegal_insn;
4415#endif
4416                                {
4417                                    TCGv_ptr r_tickptr;
4418
4419                                    tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4420                                                   cpu_src2);
4421                                    r_tickptr = tcg_temp_new_ptr();
4422                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4423                                                   offsetof(CPUSPARCState, stick));
4424                                    if (tb_cflags(dc->base.tb) &
4425                                           CF_USE_ICOUNT) {
4426                                        gen_io_start();
4427                                    }
4428                                    gen_helper_tick_set_count(r_tickptr,
4429                                                              cpu_tmp0);
4430                                    tcg_temp_free_ptr(r_tickptr);
4431                                    /* End TB to handle timer interrupt */
4432                                    dc->base.is_jmp = DISAS_EXIT;
4433                                }
4434                                break;
4435                            case 0x19: /* System tick compare */
4436#if !defined(CONFIG_USER_ONLY)
4437                                if (!supervisor(dc))
4438                                    goto illegal_insn;
4439#endif
4440                                {
4441                                    TCGv_ptr r_tickptr;
4442
4443                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4444                                                   cpu_src2);
4445                                    r_tickptr = tcg_temp_new_ptr();
4446                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4447                                                   offsetof(CPUSPARCState, stick));
4448                                    if (tb_cflags(dc->base.tb) &
4449                                           CF_USE_ICOUNT) {
4450                                        gen_io_start();
4451                                    }
4452                                    gen_helper_tick_set_limit(r_tickptr,
4453                                                              cpu_stick_cmpr);
4454                                    tcg_temp_free_ptr(r_tickptr);
4455                                    /* End TB to handle timer interrupt */
4456                                    dc->base.is_jmp = DISAS_EXIT;
4457                                }
4458                                break;
4459
4460                            case 0x10: /* Performance Control */
4461                            case 0x11: /* Performance Instrumentation
4462                                          Counter */
4463                            case 0x12: /* Dispatch Control */
4464#endif
4465                            default:
4466                                goto illegal_insn;
4467                            }
4468                        }
4469                        break;
4470#if !defined(CONFIG_USER_ONLY)
4471                    case 0x31: /* wrpsr, V9 saved, restored */
4472                        {
4473                            if (!supervisor(dc))
4474                                goto priv_insn;
4475#ifdef TARGET_SPARC64
4476                            switch (rd) {
4477                            case 0:
4478                                gen_helper_saved(cpu_env);
4479                                break;
4480                            case 1:
4481                                gen_helper_restored(cpu_env);
4482                                break;
4483                            case 2: /* UA2005 allclean */
4484                            case 3: /* UA2005 otherw */
4485                            case 4: /* UA2005 normalw */
4486                            case 5: /* UA2005 invalw */
4487                                // XXX
4488                            default:
4489                                goto illegal_insn;
4490                            }
4491#else
4492                            cpu_tmp0 = get_temp_tl(dc);
4493                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4494                            gen_helper_wrpsr(cpu_env, cpu_tmp0);
4495                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4496                            dc->cc_op = CC_OP_FLAGS;
4497                            save_state(dc);
4498                            gen_op_next_insn();
4499                            tcg_gen_exit_tb(NULL, 0);
4500                            dc->base.is_jmp = DISAS_NORETURN;
4501#endif
4502                        }
4503                        break;
4504                    case 0x32: /* wrwim, V9 wrpr */
4505                        {
4506                            if (!supervisor(dc))
4507                                goto priv_insn;
4508                            cpu_tmp0 = get_temp_tl(dc);
4509                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4510#ifdef TARGET_SPARC64
4511                            switch (rd) {
4512                            case 0: // tpc
4513                                {
4514                                    TCGv_ptr r_tsptr;
4515
4516                                    r_tsptr = tcg_temp_new_ptr();
4517                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4518                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4519                                                  offsetof(trap_state, tpc));
4520                                    tcg_temp_free_ptr(r_tsptr);
4521                                }
4522                                break;
4523                            case 1: // tnpc
4524                                {
4525                                    TCGv_ptr r_tsptr;
4526
4527                                    r_tsptr = tcg_temp_new_ptr();
4528                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4529                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4530                                                  offsetof(trap_state, tnpc));
4531                                    tcg_temp_free_ptr(r_tsptr);
4532                                }
4533                                break;
4534                            case 2: // tstate
4535                                {
4536                                    TCGv_ptr r_tsptr;
4537
4538                                    r_tsptr = tcg_temp_new_ptr();
4539                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4540                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4541                                                  offsetof(trap_state,
4542                                                           tstate));
4543                                    tcg_temp_free_ptr(r_tsptr);
4544                                }
4545                                break;
4546                            case 3: // tt
4547                                {
4548                                    TCGv_ptr r_tsptr;
4549
4550                                    r_tsptr = tcg_temp_new_ptr();
4551                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4552                                    tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4553                                                    offsetof(trap_state, tt));
4554                                    tcg_temp_free_ptr(r_tsptr);
4555                                }
4556                                break;
4557                            case 4: // tick
4558                                {
4559                                    TCGv_ptr r_tickptr;
4560
4561                                    r_tickptr = tcg_temp_new_ptr();
4562                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4563                                                   offsetof(CPUSPARCState, tick));
4564                                    if (tb_cflags(dc->base.tb) &
4565                                           CF_USE_ICOUNT) {
4566                                        gen_io_start();
4567                                    }
4568                                    gen_helper_tick_set_count(r_tickptr,
4569                                                              cpu_tmp0);
4570                                    tcg_temp_free_ptr(r_tickptr);
4571                                    /* End TB to handle timer interrupt */
4572                                    dc->base.is_jmp = DISAS_EXIT;
4573                                }
4574                                break;
4575                            case 5: // tba
4576                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4577                                break;
4578                            case 6: // pstate
4579                                save_state(dc);
4580                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4581                                    gen_io_start();
4582                                }
4583                                gen_helper_wrpstate(cpu_env, cpu_tmp0);
4584                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4585                                    gen_io_end();
4586                                }
4587                                dc->npc = DYNAMIC_PC;
4588                                break;
4589                            case 7: // tl
4590                                save_state(dc);
4591                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4592                                               offsetof(CPUSPARCState, tl));
4593                                dc->npc = DYNAMIC_PC;
4594                                break;
4595                            case 8: // pil
4596                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4597                                    gen_io_start();
4598                                }
4599                                gen_helper_wrpil(cpu_env, cpu_tmp0);
4600                                if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4601                                    gen_io_end();
4602                                }
4603                                break;
4604                            case 9: // cwp
4605                                gen_helper_wrcwp(cpu_env, cpu_tmp0);
4606                                break;
4607                            case 10: // cansave
4608                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4609                                                offsetof(CPUSPARCState,
4610                                                         cansave));
4611                                break;
4612                            case 11: // canrestore
4613                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4614                                                offsetof(CPUSPARCState,
4615                                                         canrestore));
4616                                break;
4617                            case 12: // cleanwin
4618                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4619                                                offsetof(CPUSPARCState,
4620                                                         cleanwin));
4621                                break;
4622                            case 13: // otherwin
4623                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4624                                                offsetof(CPUSPARCState,
4625                                                         otherwin));
4626                                break;
4627                            case 14: // wstate
4628                                tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4629                                                offsetof(CPUSPARCState,
4630                                                         wstate));
4631                                break;
4632                            case 16: // UA2005 gl
4633                                CHECK_IU_FEATURE(dc, GL);
4634                                gen_helper_wrgl(cpu_env, cpu_tmp0);
4635                                break;
4636                            case 26: // UA2005 strand status
4637                                CHECK_IU_FEATURE(dc, HYPV);
4638                                if (!hypervisor(dc))
4639                                    goto priv_insn;
4640                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4641                                break;
4642                            default:
4643                                goto illegal_insn;
4644                            }
4645#else
4646                            tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4647                            if (dc->def->nwindows != 32) {
4648                                tcg_gen_andi_tl(cpu_wim, cpu_wim,
4649                                                (1 << dc->def->nwindows) - 1);
4650                            }
4651#endif
4652                        }
4653                        break;
4654                    case 0x33: /* wrtbr, UA2005 wrhpr */
4655                        {
4656#ifndef TARGET_SPARC64
4657                            if (!supervisor(dc))
4658                                goto priv_insn;
4659                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4660#else
4661                            CHECK_IU_FEATURE(dc, HYPV);
4662                            if (!hypervisor(dc))
4663                                goto priv_insn;
4664                            cpu_tmp0 = get_temp_tl(dc);
4665                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4666                            switch (rd) {
4667                            case 0: // hpstate
4668                                tcg_gen_st_i64(cpu_tmp0, cpu_env,
4669                                               offsetof(CPUSPARCState,
4670                                                        hpstate));
4671                                save_state(dc);
4672                                gen_op_next_insn();
4673                                tcg_gen_exit_tb(NULL, 0);
4674                                dc->base.is_jmp = DISAS_NORETURN;
4675                                break;
4676                            case 1: // htstate
4677                                // XXX gen_op_wrhtstate();
4678                                break;
4679                            case 3: // hintp
4680                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4681                                break;
4682                            case 5: // htba
4683                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4684                                break;
4685                            case 31: // hstick_cmpr
4686                                {
4687                                    TCGv_ptr r_tickptr;
4688
4689                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4690                                    r_tickptr = tcg_temp_new_ptr();
4691                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
4692                                                   offsetof(CPUSPARCState, hstick));
4693                                    if (tb_cflags(dc->base.tb) &
4694                                           CF_USE_ICOUNT) {
4695                                        gen_io_start();
4696                                    }
4697                                    gen_helper_tick_set_limit(r_tickptr,
4698                                                              cpu_hstick_cmpr);
4699                                    tcg_temp_free_ptr(r_tickptr);
4700                                    if (tb_cflags(dc->base.tb) &
4701                                           CF_USE_ICOUNT) {
4702                                        gen_io_end();
4703                                    }
4704                                    /* End TB to handle timer interrupt */
4705                                    dc->base.is_jmp = DISAS_EXIT;
4706                                }
4707                                break;
4708                            case 6: // hver readonly
4709                            default:
4710                                goto illegal_insn;
4711                            }
4712#endif
4713                        }
4714                        break;
4715#endif
4716#ifdef TARGET_SPARC64
4717                    case 0x2c: /* V9 movcc */
4718                        {
4719                            int cc = GET_FIELD_SP(insn, 11, 12);
4720                            int cond = GET_FIELD_SP(insn, 14, 17);
4721                            DisasCompare cmp;
4722                            TCGv dst;
4723
4724                            if (insn & (1 << 18)) {
4725                                if (cc == 0) {
4726                                    gen_compare(&cmp, 0, cond, dc);
4727                                } else if (cc == 2) {
4728                                    gen_compare(&cmp, 1, cond, dc);
4729                                } else {
4730                                    goto illegal_insn;
4731                                }
4732                            } else {
4733                                gen_fcompare(&cmp, cc, cond);
4734                            }
4735
4736                            /* The get_src2 above loaded the normal 13-bit
4737                               immediate field, not the 11-bit field we have
4738                               in movcc.  But it did handle the reg case.  */
4739                            if (IS_IMM) {
4740                                simm = GET_FIELD_SPs(insn, 0, 10);
4741                                tcg_gen_movi_tl(cpu_src2, simm);
4742                            }
4743
4744                            dst = gen_load_gpr(dc, rd);
4745                            tcg_gen_movcond_tl(cmp.cond, dst,
4746                                               cmp.c1, cmp.c2,
4747                                               cpu_src2, dst);
4748                            free_compare(&cmp);
4749                            gen_store_gpr(dc, rd, dst);
4750                            break;
4751                        }
4752                    case 0x2d: /* V9 sdivx */
4753                        gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4754                        gen_store_gpr(dc, rd, cpu_dst);
4755                        break;
4756                    case 0x2e: /* V9 popc */
4757                        tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4758                        gen_store_gpr(dc, rd, cpu_dst);
4759                        break;
4760                    case 0x2f: /* V9 movr */
4761                        {
4762                            int cond = GET_FIELD_SP(insn, 10, 12);
4763                            DisasCompare cmp;
4764                            TCGv dst;
4765
4766                            gen_compare_reg(&cmp, cond, cpu_src1);
4767
4768                            /* The get_src2 above loaded the normal 13-bit
4769                               immediate field, not the 10-bit field we have
4770                               in movr.  But it did handle the reg case.  */
4771                            if (IS_IMM) {
4772                                simm = GET_FIELD_SPs(insn, 0, 9);
4773                                tcg_gen_movi_tl(cpu_src2, simm);
4774                            }
4775
4776                            dst = gen_load_gpr(dc, rd);
4777                            tcg_gen_movcond_tl(cmp.cond, dst,
4778                                               cmp.c1, cmp.c2,
4779                                               cpu_src2, dst);
4780                            free_compare(&cmp);
4781                            gen_store_gpr(dc, rd, dst);
4782                            break;
4783                        }
4784#endif
4785                    default:
4786                        goto illegal_insn;
4787                    }
4788                }
4789            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4790#ifdef TARGET_SPARC64
4791                int opf = GET_FIELD_SP(insn, 5, 13);
4792                rs1 = GET_FIELD(insn, 13, 17);
4793                rs2 = GET_FIELD(insn, 27, 31);
4794                if (gen_trap_ifnofpu(dc)) {
4795                    goto jmp_insn;
4796                }
4797
4798                switch (opf) {
4799                case 0x000: /* VIS I edge8cc */
4800                    CHECK_FPU_FEATURE(dc, VIS1);
4801                    cpu_src1 = gen_load_gpr(dc, rs1);
4802                    cpu_src2 = gen_load_gpr(dc, rs2);
4803                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4804                    gen_store_gpr(dc, rd, cpu_dst);
4805                    break;
4806                case 0x001: /* VIS II edge8n */
4807                    CHECK_FPU_FEATURE(dc, VIS2);
4808                    cpu_src1 = gen_load_gpr(dc, rs1);
4809                    cpu_src2 = gen_load_gpr(dc, rs2);
4810                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4811                    gen_store_gpr(dc, rd, cpu_dst);
4812                    break;
4813                case 0x002: /* VIS I edge8lcc */
4814                    CHECK_FPU_FEATURE(dc, VIS1);
4815                    cpu_src1 = gen_load_gpr(dc, rs1);
4816                    cpu_src2 = gen_load_gpr(dc, rs2);
4817                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4818                    gen_store_gpr(dc, rd, cpu_dst);
4819                    break;
4820                case 0x003: /* VIS II edge8ln */
4821                    CHECK_FPU_FEATURE(dc, VIS2);
4822                    cpu_src1 = gen_load_gpr(dc, rs1);
4823                    cpu_src2 = gen_load_gpr(dc, rs2);
4824                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4825                    gen_store_gpr(dc, rd, cpu_dst);
4826                    break;
4827                case 0x004: /* VIS I edge16cc */
4828                    CHECK_FPU_FEATURE(dc, VIS1);
4829                    cpu_src1 = gen_load_gpr(dc, rs1);
4830                    cpu_src2 = gen_load_gpr(dc, rs2);
4831                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4832                    gen_store_gpr(dc, rd, cpu_dst);
4833                    break;
4834                case 0x005: /* VIS II edge16n */
4835                    CHECK_FPU_FEATURE(dc, VIS2);
4836                    cpu_src1 = gen_load_gpr(dc, rs1);
4837                    cpu_src2 = gen_load_gpr(dc, rs2);
4838                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4839                    gen_store_gpr(dc, rd, cpu_dst);
4840                    break;
4841                case 0x006: /* VIS I edge16lcc */
4842                    CHECK_FPU_FEATURE(dc, VIS1);
4843                    cpu_src1 = gen_load_gpr(dc, rs1);
4844                    cpu_src2 = gen_load_gpr(dc, rs2);
4845                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4846                    gen_store_gpr(dc, rd, cpu_dst);
4847                    break;
4848                case 0x007: /* VIS II edge16ln */
4849                    CHECK_FPU_FEATURE(dc, VIS2);
4850                    cpu_src1 = gen_load_gpr(dc, rs1);
4851                    cpu_src2 = gen_load_gpr(dc, rs2);
4852                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4853                    gen_store_gpr(dc, rd, cpu_dst);
4854                    break;
4855                case 0x008: /* VIS I edge32cc */
4856                    CHECK_FPU_FEATURE(dc, VIS1);
4857                    cpu_src1 = gen_load_gpr(dc, rs1);
4858                    cpu_src2 = gen_load_gpr(dc, rs2);
4859                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4860                    gen_store_gpr(dc, rd, cpu_dst);
4861                    break;
4862                case 0x009: /* VIS II edge32n */
4863                    CHECK_FPU_FEATURE(dc, VIS2);
4864                    cpu_src1 = gen_load_gpr(dc, rs1);
4865                    cpu_src2 = gen_load_gpr(dc, rs2);
4866                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4867                    gen_store_gpr(dc, rd, cpu_dst);
4868                    break;
4869                case 0x00a: /* VIS I edge32lcc */
4870                    CHECK_FPU_FEATURE(dc, VIS1);
4871                    cpu_src1 = gen_load_gpr(dc, rs1);
4872                    cpu_src2 = gen_load_gpr(dc, rs2);
4873                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4874                    gen_store_gpr(dc, rd, cpu_dst);
4875                    break;
4876                case 0x00b: /* VIS II edge32ln */
4877                    CHECK_FPU_FEATURE(dc, VIS2);
4878                    cpu_src1 = gen_load_gpr(dc, rs1);
4879                    cpu_src2 = gen_load_gpr(dc, rs2);
4880                    gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4881                    gen_store_gpr(dc, rd, cpu_dst);
4882                    break;
4883                case 0x010: /* VIS I array8 */
4884                    CHECK_FPU_FEATURE(dc, VIS1);
4885                    cpu_src1 = gen_load_gpr(dc, rs1);
4886                    cpu_src2 = gen_load_gpr(dc, rs2);
4887                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4888                    gen_store_gpr(dc, rd, cpu_dst);
4889                    break;
4890                case 0x012: /* VIS I array16 */
4891                    CHECK_FPU_FEATURE(dc, VIS1);
4892                    cpu_src1 = gen_load_gpr(dc, rs1);
4893                    cpu_src2 = gen_load_gpr(dc, rs2);
4894                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4895                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4896                    gen_store_gpr(dc, rd, cpu_dst);
4897                    break;
4898                case 0x014: /* VIS I array32 */
4899                    CHECK_FPU_FEATURE(dc, VIS1);
4900                    cpu_src1 = gen_load_gpr(dc, rs1);
4901                    cpu_src2 = gen_load_gpr(dc, rs2);
4902                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4903                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4904                    gen_store_gpr(dc, rd, cpu_dst);
4905                    break;
4906                case 0x018: /* VIS I alignaddr */
4907                    CHECK_FPU_FEATURE(dc, VIS1);
4908                    cpu_src1 = gen_load_gpr(dc, rs1);
4909                    cpu_src2 = gen_load_gpr(dc, rs2);
4910                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4911                    gen_store_gpr(dc, rd, cpu_dst);
4912                    break;
4913                case 0x01a: /* VIS I alignaddrl */
4914                    CHECK_FPU_FEATURE(dc, VIS1);
4915                    cpu_src1 = gen_load_gpr(dc, rs1);
4916                    cpu_src2 = gen_load_gpr(dc, rs2);
4917                    gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4918                    gen_store_gpr(dc, rd, cpu_dst);
4919                    break;
4920                case 0x019: /* VIS II bmask */
4921                    CHECK_FPU_FEATURE(dc, VIS2);
4922                    cpu_src1 = gen_load_gpr(dc, rs1);
4923                    cpu_src2 = gen_load_gpr(dc, rs2);
4924                    tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4925                    tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4926                    gen_store_gpr(dc, rd, cpu_dst);
4927                    break;
4928                case 0x020: /* VIS I fcmple16 */
4929                    CHECK_FPU_FEATURE(dc, VIS1);
4930                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4931                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4932                    gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4933                    gen_store_gpr(dc, rd, cpu_dst);
4934                    break;
4935                case 0x022: /* VIS I fcmpne16 */
4936                    CHECK_FPU_FEATURE(dc, VIS1);
4937                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4938                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4939                    gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4940                    gen_store_gpr(dc, rd, cpu_dst);
4941                    break;
4942                case 0x024: /* VIS I fcmple32 */
4943                    CHECK_FPU_FEATURE(dc, VIS1);
4944                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4945                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4946                    gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4947                    gen_store_gpr(dc, rd, cpu_dst);
4948                    break;
4949                case 0x026: /* VIS I fcmpne32 */
4950                    CHECK_FPU_FEATURE(dc, VIS1);
4951                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4952                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4953                    gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4954                    gen_store_gpr(dc, rd, cpu_dst);
4955                    break;
4956                case 0x028: /* VIS I fcmpgt16 */
4957                    CHECK_FPU_FEATURE(dc, VIS1);
4958                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4959                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4960                    gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4961                    gen_store_gpr(dc, rd, cpu_dst);
4962                    break;
4963                case 0x02a: /* VIS I fcmpeq16 */
4964                    CHECK_FPU_FEATURE(dc, VIS1);
4965                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4966                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4967                    gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4968                    gen_store_gpr(dc, rd, cpu_dst);
4969                    break;
4970                case 0x02c: /* VIS I fcmpgt32 */
4971                    CHECK_FPU_FEATURE(dc, VIS1);
4972                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4973                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4974                    gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4975                    gen_store_gpr(dc, rd, cpu_dst);
4976                    break;
4977                case 0x02e: /* VIS I fcmpeq32 */
4978                    CHECK_FPU_FEATURE(dc, VIS1);
4979                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4980                    cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4981                    gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4982                    gen_store_gpr(dc, rd, cpu_dst);
4983                    break;
4984                case 0x031: /* VIS I fmul8x16 */
4985                    CHECK_FPU_FEATURE(dc, VIS1);
4986                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4987                    break;
4988                case 0x033: /* VIS I fmul8x16au */
4989                    CHECK_FPU_FEATURE(dc, VIS1);
4990                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4991                    break;
4992                case 0x035: /* VIS I fmul8x16al */
4993                    CHECK_FPU_FEATURE(dc, VIS1);
4994                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4995                    break;
4996                case 0x036: /* VIS I fmul8sux16 */
4997                    CHECK_FPU_FEATURE(dc, VIS1);
4998                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4999                    break;
5000                case 0x037: /* VIS I fmul8ulx16 */
5001                    CHECK_FPU_FEATURE(dc, VIS1);
5002                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
5003                    break;
5004                case 0x038: /* VIS I fmuld8sux16 */
5005                    CHECK_FPU_FEATURE(dc, VIS1);
5006                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
5007                    break;
5008                case 0x039: /* VIS I fmuld8ulx16 */
5009                    CHECK_FPU_FEATURE(dc, VIS1);
5010                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
5011                    break;
5012                case 0x03a: /* VIS I fpack32 */
5013                    CHECK_FPU_FEATURE(dc, VIS1);
5014                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
5015                    break;
5016                case 0x03b: /* VIS I fpack16 */
5017                    CHECK_FPU_FEATURE(dc, VIS1);
5018                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5019                    cpu_dst_32 = gen_dest_fpr_F(dc);
5020                    gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
5021                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5022                    break;
5023                case 0x03d: /* VIS I fpackfix */
5024                    CHECK_FPU_FEATURE(dc, VIS1);
5025                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5026                    cpu_dst_32 = gen_dest_fpr_F(dc);
5027                    gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
5028                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5029                    break;
5030                case 0x03e: /* VIS I pdist */
5031                    CHECK_FPU_FEATURE(dc, VIS1);
5032                    gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
5033                    break;
5034                case 0x048: /* VIS I faligndata */
5035                    CHECK_FPU_FEATURE(dc, VIS1);
5036                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
5037                    break;
5038                case 0x04b: /* VIS I fpmerge */
5039                    CHECK_FPU_FEATURE(dc, VIS1);
5040                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
5041                    break;
5042                case 0x04c: /* VIS II bshuffle */
5043                    CHECK_FPU_FEATURE(dc, VIS2);
5044                    gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
5045                    break;
5046                case 0x04d: /* VIS I fexpand */
5047                    CHECK_FPU_FEATURE(dc, VIS1);
5048                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
5049                    break;
5050                case 0x050: /* VIS I fpadd16 */
5051                    CHECK_FPU_FEATURE(dc, VIS1);
5052                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
5053                    break;
5054                case 0x051: /* VIS I fpadd16s */
5055                    CHECK_FPU_FEATURE(dc, VIS1);
5056                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
5057                    break;
5058                case 0x052: /* VIS I fpadd32 */
5059                    CHECK_FPU_FEATURE(dc, VIS1);
5060                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
5061                    break;
5062                case 0x053: /* VIS I fpadd32s */
5063                    CHECK_FPU_FEATURE(dc, VIS1);
5064                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5065                    break;
5066                case 0x054: /* VIS I fpsub16 */
5067                    CHECK_FPU_FEATURE(dc, VIS1);
5068                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5069                    break;
5070                case 0x055: /* VIS I fpsub16s */
5071                    CHECK_FPU_FEATURE(dc, VIS1);
5072                    gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5073                    break;
5074                case 0x056: /* VIS I fpsub32 */
5075                    CHECK_FPU_FEATURE(dc, VIS1);
5076                    gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5077                    break;
5078                case 0x057: /* VIS I fpsub32s */
5079                    CHECK_FPU_FEATURE(dc, VIS1);
5080                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5081                    break;
5082                case 0x060: /* VIS I fzero */
5083                    CHECK_FPU_FEATURE(dc, VIS1);
5084                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5085                    tcg_gen_movi_i64(cpu_dst_64, 0);
5086                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5087                    break;
5088                case 0x061: /* VIS I fzeros */
5089                    CHECK_FPU_FEATURE(dc, VIS1);
5090                    cpu_dst_32 = gen_dest_fpr_F(dc);
5091                    tcg_gen_movi_i32(cpu_dst_32, 0);
5092                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5093                    break;
5094                case 0x062: /* VIS I fnor */
5095                    CHECK_FPU_FEATURE(dc, VIS1);
5096                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5097                    break;
5098                case 0x063: /* VIS I fnors */
5099                    CHECK_FPU_FEATURE(dc, VIS1);
5100                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5101                    break;
5102                case 0x064: /* VIS I fandnot2 */
5103                    CHECK_FPU_FEATURE(dc, VIS1);
5104                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5105                    break;
5106                case 0x065: /* VIS I fandnot2s */
5107                    CHECK_FPU_FEATURE(dc, VIS1);
5108                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5109                    break;
5110                case 0x066: /* VIS I fnot2 */
5111                    CHECK_FPU_FEATURE(dc, VIS1);
5112                    gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5113                    break;
5114                case 0x067: /* VIS I fnot2s */
5115                    CHECK_FPU_FEATURE(dc, VIS1);
5116                    gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5117                    break;
5118                case 0x068: /* VIS I fandnot1 */
5119                    CHECK_FPU_FEATURE(dc, VIS1);
5120                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5121                    break;
5122                case 0x069: /* VIS I fandnot1s */
5123                    CHECK_FPU_FEATURE(dc, VIS1);
5124                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5125                    break;
5126                case 0x06a: /* VIS I fnot1 */
5127                    CHECK_FPU_FEATURE(dc, VIS1);
5128                    gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5129                    break;
5130                case 0x06b: /* VIS I fnot1s */
5131                    CHECK_FPU_FEATURE(dc, VIS1);
5132                    gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5133                    break;
5134                case 0x06c: /* VIS I fxor */
5135                    CHECK_FPU_FEATURE(dc, VIS1);
5136                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5137                    break;
5138                case 0x06d: /* VIS I fxors */
5139                    CHECK_FPU_FEATURE(dc, VIS1);
5140                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5141                    break;
5142                case 0x06e: /* VIS I fnand */
5143                    CHECK_FPU_FEATURE(dc, VIS1);
5144                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5145                    break;
5146                case 0x06f: /* VIS I fnands */
5147                    CHECK_FPU_FEATURE(dc, VIS1);
5148                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5149                    break;
5150                case 0x070: /* VIS I fand */
5151                    CHECK_FPU_FEATURE(dc, VIS1);
5152                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5153                    break;
5154                case 0x071: /* VIS I fands */
5155                    CHECK_FPU_FEATURE(dc, VIS1);
5156                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5157                    break;
5158                case 0x072: /* VIS I fxnor */
5159                    CHECK_FPU_FEATURE(dc, VIS1);
5160                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5161                    break;
5162                case 0x073: /* VIS I fxnors */
5163                    CHECK_FPU_FEATURE(dc, VIS1);
5164                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5165                    break;
5166                case 0x074: /* VIS I fsrc1 */
5167                    CHECK_FPU_FEATURE(dc, VIS1);
5168                    cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5169                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5170                    break;
5171                case 0x075: /* VIS I fsrc1s */
5172                    CHECK_FPU_FEATURE(dc, VIS1);
5173                    cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5174                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5175                    break;
5176                case 0x076: /* VIS I fornot2 */
5177                    CHECK_FPU_FEATURE(dc, VIS1);
5178                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5179                    break;
5180                case 0x077: /* VIS I fornot2s */
5181                    CHECK_FPU_FEATURE(dc, VIS1);
5182                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5183                    break;
5184                case 0x078: /* VIS I fsrc2 */
5185                    CHECK_FPU_FEATURE(dc, VIS1);
5186                    cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5187                    gen_store_fpr_D(dc, rd, cpu_src1_64);
5188                    break;
5189                case 0x079: /* VIS I fsrc2s */
5190                    CHECK_FPU_FEATURE(dc, VIS1);
5191                    cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5192                    gen_store_fpr_F(dc, rd, cpu_src1_32);
5193                    break;
5194                case 0x07a: /* VIS I fornot1 */
5195                    CHECK_FPU_FEATURE(dc, VIS1);
5196                    gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5197                    break;
5198                case 0x07b: /* VIS I fornot1s */
5199                    CHECK_FPU_FEATURE(dc, VIS1);
5200                    gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5201                    break;
5202                case 0x07c: /* VIS I for */
5203                    CHECK_FPU_FEATURE(dc, VIS1);
5204                    gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5205                    break;
5206                case 0x07d: /* VIS I fors */
5207                    CHECK_FPU_FEATURE(dc, VIS1);
5208                    gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5209                    break;
5210                case 0x07e: /* VIS I fone */
5211                    CHECK_FPU_FEATURE(dc, VIS1);
5212                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5213                    tcg_gen_movi_i64(cpu_dst_64, -1);
5214                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5215                    break;
5216                case 0x07f: /* VIS I fones */
5217                    CHECK_FPU_FEATURE(dc, VIS1);
5218                    cpu_dst_32 = gen_dest_fpr_F(dc);
5219                    tcg_gen_movi_i32(cpu_dst_32, -1);
5220                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5221                    break;
5222                case 0x080: /* VIS I shutdown */
5223                case 0x081: /* VIS II siam */
5224                    // XXX
5225                    goto illegal_insn;
5226                default:
5227                    goto illegal_insn;
5228                }
5229#else
5230                goto ncp_insn;
5231#endif
5232            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5233#ifdef TARGET_SPARC64
5234                goto illegal_insn;
5235#else
5236                goto ncp_insn;
5237#endif
5238#ifdef TARGET_SPARC64
5239            } else if (xop == 0x39) { /* V9 return */
5240                save_state(dc);
5241                cpu_src1 = get_src1(dc, insn);
5242                cpu_tmp0 = get_temp_tl(dc);
5243                if (IS_IMM) {   /* immediate */
5244                    simm = GET_FIELDs(insn, 19, 31);
5245                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5246                } else {                /* register */
5247                    rs2 = GET_FIELD(insn, 27, 31);
5248                    if (rs2) {
5249                        cpu_src2 = gen_load_gpr(dc, rs2);
5250                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5251                    } else {
5252                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5253                    }
5254                }
5255                gen_helper_restore(cpu_env);
5256                gen_mov_pc_npc(dc);
5257                gen_check_align(cpu_tmp0, 3);
5258                tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5259                dc->npc = DYNAMIC_PC;
5260                goto jmp_insn;
5261#endif
5262            } else {
5263                cpu_src1 = get_src1(dc, insn);
5264                cpu_tmp0 = get_temp_tl(dc);
5265                if (IS_IMM) {   /* immediate */
5266                    simm = GET_FIELDs(insn, 19, 31);
5267                    tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5268                } else {                /* register */
5269                    rs2 = GET_FIELD(insn, 27, 31);
5270                    if (rs2) {
5271                        cpu_src2 = gen_load_gpr(dc, rs2);
5272                        tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5273                    } else {
5274                        tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5275                    }
5276                }
5277                switch (xop) {
5278                case 0x38:      /* jmpl */
5279                    {
5280                        TCGv t = gen_dest_gpr(dc, rd);
5281                        tcg_gen_movi_tl(t, dc->pc);
5282                        gen_store_gpr(dc, rd, t);
5283
5284                        gen_mov_pc_npc(dc);
5285                        gen_check_align(cpu_tmp0, 3);
5286                        gen_address_mask(dc, cpu_tmp0);
5287                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5288                        dc->npc = DYNAMIC_PC;
5289                    }
5290                    goto jmp_insn;
5291#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5292                case 0x39:      /* rett, V9 return */
5293                    {
5294                        if (!supervisor(dc))
5295                            goto priv_insn;
5296                        gen_mov_pc_npc(dc);
5297                        gen_check_align(cpu_tmp0, 3);
5298                        tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5299                        dc->npc = DYNAMIC_PC;
5300                        gen_helper_rett(cpu_env);
5301                    }
5302                    goto jmp_insn;
5303#endif
5304                case 0x3b: /* flush */
5305                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5306                        goto unimp_flush;
5307                    /* nop */
5308                    break;
5309                case 0x3c:      /* save */
5310                    gen_helper_save(cpu_env);
5311                    gen_store_gpr(dc, rd, cpu_tmp0);
5312                    break;
5313                case 0x3d:      /* restore */
5314                    gen_helper_restore(cpu_env);
5315                    gen_store_gpr(dc, rd, cpu_tmp0);
5316                    break;
5317#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5318                case 0x3e:      /* V9 done/retry */
5319                    {
5320                        switch (rd) {
5321                        case 0:
5322                            if (!supervisor(dc))
5323                                goto priv_insn;
5324                            dc->npc = DYNAMIC_PC;
5325                            dc->pc = DYNAMIC_PC;
5326                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5327                                gen_io_start();
5328                            }
5329                            gen_helper_done(cpu_env);
5330                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5331                                gen_io_end();
5332                            }
5333                            goto jmp_insn;
5334                        case 1:
5335                            if (!supervisor(dc))
5336                                goto priv_insn;
5337                            dc->npc = DYNAMIC_PC;
5338                            dc->pc = DYNAMIC_PC;
5339                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5340                                gen_io_start();
5341                            }
5342                            gen_helper_retry(cpu_env);
5343                            if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5344                                gen_io_end();
5345                            }
5346                            goto jmp_insn;
5347                        default:
5348                            goto illegal_insn;
5349                        }
5350                    }
5351                    break;
5352#endif
5353                default:
5354                    goto illegal_insn;
5355                }
5356            }
5357            break;
5358        }
5359        break;
5360    case 3:                     /* load/store instructions */
5361        {
5362            unsigned int xop = GET_FIELD(insn, 7, 12);
5363            /* ??? gen_address_mask prevents us from using a source
5364               register directly.  Always generate a temporary.  */
5365            TCGv cpu_addr = get_temp_tl(dc);
5366
5367            tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5368            if (xop == 0x3c || xop == 0x3e) {
5369                /* V9 casa/casxa : no offset */
5370            } else if (IS_IMM) {     /* immediate */
5371                simm = GET_FIELDs(insn, 19, 31);
5372                if (simm != 0) {
5373                    tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5374                }
5375            } else {            /* register */
5376                rs2 = GET_FIELD(insn, 27, 31);
5377                if (rs2 != 0) {
5378                    tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5379                }
5380            }
5381            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5382                (xop > 0x17 && xop <= 0x1d ) ||
5383                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5384                TCGv cpu_val = gen_dest_gpr(dc, rd);
5385
5386                switch (xop) {
5387                case 0x0:       /* ld, V9 lduw, load unsigned word */
5388                    gen_address_mask(dc, cpu_addr);
5389                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5390                    break;
5391                case 0x1:       /* ldub, load unsigned byte */
5392                    gen_address_mask(dc, cpu_addr);
5393                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5394                    break;
5395                case 0x2:       /* lduh, load unsigned halfword */
5396                    gen_address_mask(dc, cpu_addr);
5397                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5398                    break;
5399                case 0x3:       /* ldd, load double word */
5400                    if (rd & 1)
5401                        goto illegal_insn;
5402                    else {
5403                        TCGv_i64 t64;
5404
5405                        gen_address_mask(dc, cpu_addr);
5406                        t64 = tcg_temp_new_i64();
5407                        tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5408                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5409                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5410                        gen_store_gpr(dc, rd + 1, cpu_val);
5411                        tcg_gen_shri_i64(t64, t64, 32);
5412                        tcg_gen_trunc_i64_tl(cpu_val, t64);
5413                        tcg_temp_free_i64(t64);
5414                        tcg_gen_ext32u_tl(cpu_val, cpu_val);
5415                    }
5416                    break;
5417                case 0x9:       /* ldsb, load signed byte */
5418                    gen_address_mask(dc, cpu_addr);
5419                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5420                    break;
5421                case 0xa:       /* ldsh, load signed halfword */
5422                    gen_address_mask(dc, cpu_addr);
5423                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5424                    break;
5425                case 0xd:       /* ldstub */
5426                    gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5427                    break;
5428                case 0x0f:
5429                    /* swap, swap register with memory. Also atomically */
5430                    CHECK_IU_FEATURE(dc, SWAP);
5431                    cpu_src1 = gen_load_gpr(dc, rd);
5432                    gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5433                             dc->mem_idx, MO_TEUL);
5434                    break;
5435#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5436                case 0x10:      /* lda, V9 lduwa, load word alternate */
5437                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5438                    break;
5439                case 0x11:      /* lduba, load unsigned byte alternate */
5440                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5441                    break;
5442                case 0x12:      /* lduha, load unsigned halfword alternate */
5443                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5444                    break;
5445                case 0x13:      /* ldda, load double word alternate */
5446                    if (rd & 1) {
5447                        goto illegal_insn;
5448                    }
5449                    gen_ldda_asi(dc, cpu_addr, insn, rd);
5450                    goto skip_move;
5451                case 0x19:      /* ldsba, load signed byte alternate */
5452                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5453                    break;
5454                case 0x1a:      /* ldsha, load signed halfword alternate */
5455                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5456                    break;
5457                case 0x1d:      /* ldstuba -- XXX: should be atomically */
5458                    gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5459                    break;
5460                case 0x1f:      /* swapa, swap reg with alt. memory. Also
5461                                   atomically */
5462                    CHECK_IU_FEATURE(dc, SWAP);
5463                    cpu_src1 = gen_load_gpr(dc, rd);
5464                    gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5465                    break;
5466
5467#ifndef TARGET_SPARC64
5468                case 0x30: /* ldc */
5469                case 0x31: /* ldcsr */
5470                case 0x33: /* lddc */
5471                    goto ncp_insn;
5472#endif
5473#endif
5474#ifdef TARGET_SPARC64
5475                case 0x08: /* V9 ldsw */
5476                    gen_address_mask(dc, cpu_addr);
5477                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5478                    break;
5479                case 0x0b: /* V9 ldx */
5480                    gen_address_mask(dc, cpu_addr);
5481                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5482                    break;
5483                case 0x18: /* V9 ldswa */
5484                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5485                    break;
5486                case 0x1b: /* V9 ldxa */
5487                    gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5488                    break;
5489                case 0x2d: /* V9 prefetch, no effect */
5490                    goto skip_move;
5491                case 0x30: /* V9 ldfa */
5492                    if (gen_trap_ifnofpu(dc)) {
5493                        goto jmp_insn;
5494                    }
5495                    gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5496                    gen_update_fprs_dirty(dc, rd);
5497                    goto skip_move;
5498                case 0x33: /* V9 lddfa */
5499                    if (gen_trap_ifnofpu(dc)) {
5500                        goto jmp_insn;
5501                    }
5502                    gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5503                    gen_update_fprs_dirty(dc, DFPREG(rd));
5504                    goto skip_move;
5505                case 0x3d: /* V9 prefetcha, no effect */
5506                    goto skip_move;
5507                case 0x32: /* V9 ldqfa */
5508                    CHECK_FPU_FEATURE(dc, FLOAT128);
5509                    if (gen_trap_ifnofpu(dc)) {
5510                        goto jmp_insn;
5511                    }
5512                    gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5513                    gen_update_fprs_dirty(dc, QFPREG(rd));
5514                    goto skip_move;
5515#endif
5516                default:
5517                    goto illegal_insn;
5518                }
5519                gen_store_gpr(dc, rd, cpu_val);
5520#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5521            skip_move: ;
5522#endif
5523            } else if (xop >= 0x20 && xop < 0x24) {
5524                if (gen_trap_ifnofpu(dc)) {
5525                    goto jmp_insn;
5526                }
5527                switch (xop) {
5528                case 0x20:      /* ldf, load fpreg */
5529                    gen_address_mask(dc, cpu_addr);
5530                    cpu_dst_32 = gen_dest_fpr_F(dc);
5531                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5532                                        dc->mem_idx, MO_TEUL);
5533                    gen_store_fpr_F(dc, rd, cpu_dst_32);
5534                    break;
5535                case 0x21:      /* ldfsr, V9 ldxfsr */
5536#ifdef TARGET_SPARC64
5537                    gen_address_mask(dc, cpu_addr);
5538                    if (rd == 1) {
5539                        TCGv_i64 t64 = tcg_temp_new_i64();
5540                        tcg_gen_qemu_ld_i64(t64, cpu_addr,
5541                                            dc->mem_idx, MO_TEQ);
5542                        gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5543                        tcg_temp_free_i64(t64);
5544                        break;
5545                    }
5546#endif
5547                    cpu_dst_32 = get_temp_i32(dc);
5548                    tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5549                                        dc->mem_idx, MO_TEUL);
5550                    gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5551                    break;
5552                case 0x22:      /* ldqf, load quad fpreg */
5553                    CHECK_FPU_FEATURE(dc, FLOAT128);
5554                    gen_address_mask(dc, cpu_addr);
5555                    cpu_src1_64 = tcg_temp_new_i64();
5556                    tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5557                                        MO_TEQ | MO_ALIGN_4);
5558                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5559                    cpu_src2_64 = tcg_temp_new_i64();
5560                    tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5561                                        MO_TEQ | MO_ALIGN_4);
5562                    gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5563                    tcg_temp_free_i64(cpu_src1_64);
5564                    tcg_temp_free_i64(cpu_src2_64);
5565                    break;
5566                case 0x23:      /* lddf, load double fpreg */
5567                    gen_address_mask(dc, cpu_addr);
5568                    cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5569                    tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5570                                        MO_TEQ | MO_ALIGN_4);
5571                    gen_store_fpr_D(dc, rd, cpu_dst_64);
5572                    break;
5573                default:
5574                    goto illegal_insn;
5575                }
5576            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5577                       xop == 0xe || xop == 0x1e) {
5578                TCGv cpu_val = gen_load_gpr(dc, rd);
5579
5580                switch (xop) {
5581                case 0x4: /* st, store word */
5582                    gen_address_mask(dc, cpu_addr);
5583                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5584                    break;
5585                case 0x5: /* stb, store byte */
5586                    gen_address_mask(dc, cpu_addr);
5587                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5588                    break;
5589                case 0x6: /* sth, store halfword */
5590                    gen_address_mask(dc, cpu_addr);
5591                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5592                    break;
5593                case 0x7: /* std, store double word */
5594                    if (rd & 1)
5595                        goto illegal_insn;
5596                    else {
5597                        TCGv_i64 t64;
5598                        TCGv lo;
5599
5600                        gen_address_mask(dc, cpu_addr);
5601                        lo = gen_load_gpr(dc, rd + 1);
5602                        t64 = tcg_temp_new_i64();
5603                        tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5604                        tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5605                        tcg_temp_free_i64(t64);
5606                    }
5607                    break;
5608#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5609                case 0x14: /* sta, V9 stwa, store word alternate */
5610                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5611                    break;
5612                case 0x15: /* stba, store byte alternate */
5613                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5614                    break;
5615                case 0x16: /* stha, store halfword alternate */
5616                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5617                    break;
5618                case 0x17: /* stda, store double word alternate */
5619                    if (rd & 1) {
5620                        goto illegal_insn;
5621                    }
5622                    gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5623                    break;
5624#endif
5625#ifdef TARGET_SPARC64
5626                case 0x0e: /* V9 stx */
5627                    gen_address_mask(dc, cpu_addr);
5628                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5629                    break;
5630                case 0x1e: /* V9 stxa */
5631                    gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5632                    break;
5633#endif
5634                default:
5635                    goto illegal_insn;
5636                }
5637            } else if (xop > 0x23 && xop < 0x28) {
5638                if (gen_trap_ifnofpu(dc)) {
5639                    goto jmp_insn;
5640                }
5641                switch (xop) {
5642                case 0x24: /* stf, store fpreg */
5643                    gen_address_mask(dc, cpu_addr);
5644                    cpu_src1_32 = gen_load_fpr_F(dc, rd);
5645                    tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5646                                        dc->mem_idx, MO_TEUL);
5647                    break;
5648                case 0x25: /* stfsr, V9 stxfsr */
5649                    {
5650#ifdef TARGET_SPARC64
5651                        gen_address_mask(dc, cpu_addr);
5652                        if (rd == 1) {
5653                            tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5654                            break;
5655                        }
5656#endif
5657                        tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5658                    }
5659                    break;
5660                case 0x26:
5661#ifdef TARGET_SPARC64
5662                    /* V9 stqf, store quad fpreg */
5663                    CHECK_FPU_FEATURE(dc, FLOAT128);
5664                    gen_address_mask(dc, cpu_addr);
5665                    /* ??? While stqf only requires 4-byte alignment, it is
5666                       legal for the cpu to signal the unaligned exception.
5667                       The OS trap handler is then required to fix it up.
5668                       For qemu, this avoids having to probe the second page
5669                       before performing the first write.  */
5670                    cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5671                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5672                                        dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5673                    tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5674                    cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5675                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5676                                        dc->mem_idx, MO_TEQ);
5677                    break;
5678#else /* !TARGET_SPARC64 */
5679                    /* stdfq, store floating point queue */
5680#if defined(CONFIG_USER_ONLY)
5681                    goto illegal_insn;
5682#else
5683                    if (!supervisor(dc))
5684                        goto priv_insn;
5685                    if (gen_trap_ifnofpu(dc)) {
5686                        goto jmp_insn;
5687                    }
5688                    goto nfq_insn;
5689#endif
5690#endif
5691                case 0x27: /* stdf, store double fpreg */
5692                    gen_address_mask(dc, cpu_addr);
5693                    cpu_src1_64 = gen_load_fpr_D(dc, rd);
5694                    tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5695                                        MO_TEQ | MO_ALIGN_4);
5696                    break;
5697                default:
5698                    goto illegal_insn;
5699                }
5700            } else if (xop > 0x33 && xop < 0x3f) {
5701                switch (xop) {
5702#ifdef TARGET_SPARC64
5703                case 0x34: /* V9 stfa */
5704                    if (gen_trap_ifnofpu(dc)) {
5705                        goto jmp_insn;
5706                    }
5707                    gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5708                    break;
5709                case 0x36: /* V9 stqfa */
5710                    {
5711                        CHECK_FPU_FEATURE(dc, FLOAT128);
5712                        if (gen_trap_ifnofpu(dc)) {
5713                            goto jmp_insn;
5714                        }
5715                        gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5716                    }
5717                    break;
5718                case 0x37: /* V9 stdfa */
5719                    if (gen_trap_ifnofpu(dc)) {
5720                        goto jmp_insn;
5721                    }
5722                    gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5723                    break;
5724                case 0x3e: /* V9 casxa */
5725                    rs2 = GET_FIELD(insn, 27, 31);
5726                    cpu_src2 = gen_load_gpr(dc, rs2);
5727                    gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5728                    break;
5729#else
5730                case 0x34: /* stc */
5731                case 0x35: /* stcsr */
5732                case 0x36: /* stdcq */
5733                case 0x37: /* stdc */
5734                    goto ncp_insn;
5735#endif
5736#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5737                case 0x3c: /* V9 or LEON3 casa */
5738#ifndef TARGET_SPARC64
5739                    CHECK_IU_FEATURE(dc, CASA);
5740#endif
5741                    rs2 = GET_FIELD(insn, 27, 31);
5742                    cpu_src2 = gen_load_gpr(dc, rs2);
5743                    gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5744                    break;
5745#endif
5746                default:
5747                    goto illegal_insn;
5748                }
5749            } else {
5750                goto illegal_insn;
5751            }
5752        }
5753        break;
5754    }
5755    /* default case for non jump instructions */
5756    if (dc->npc == DYNAMIC_PC) {
5757        dc->pc = DYNAMIC_PC;
5758        gen_op_next_insn();
5759    } else if (dc->npc == JUMP_PC) {
5760        /* we can do a static jump */
5761        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5762        dc->base.is_jmp = DISAS_NORETURN;
5763    } else {
5764        dc->pc = dc->npc;
5765        dc->npc = dc->npc + 4;
5766    }
5767 jmp_insn:
5768    goto egress;
5769 illegal_insn:
5770    gen_exception(dc, TT_ILL_INSN);
5771    goto egress;
5772 unimp_flush:
5773    gen_exception(dc, TT_UNIMP_FLUSH);
5774    goto egress;
5775#if !defined(CONFIG_USER_ONLY)
5776 priv_insn:
5777    gen_exception(dc, TT_PRIV_INSN);
5778    goto egress;
5779#endif
5780 nfpu_insn:
5781    gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5782    goto egress;
5783#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5784 nfq_insn:
5785    gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5786    goto egress;
5787#endif
5788#ifndef TARGET_SPARC64
5789 ncp_insn:
5790    gen_exception(dc, TT_NCP_INSN);
5791    goto egress;
5792#endif
5793 egress:
5794    if (dc->n_t32 != 0) {
5795        int i;
5796        for (i = dc->n_t32 - 1; i >= 0; --i) {
5797            tcg_temp_free_i32(dc->t32[i]);
5798        }
5799        dc->n_t32 = 0;
5800    }
5801    if (dc->n_ttl != 0) {
5802        int i;
5803        for (i = dc->n_ttl - 1; i >= 0; --i) {
5804            tcg_temp_free(dc->ttl[i]);
5805        }
5806        dc->n_ttl = 0;
5807    }
5808}
5809
5810static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5811{
5812    DisasContext *dc = container_of(dcbase, DisasContext, base);
5813    CPUSPARCState *env = cs->env_ptr;
5814    int bound;
5815
5816    dc->pc = dc->base.pc_first;
5817    dc->npc = (target_ulong)dc->base.tb->cs_base;
5818    dc->cc_op = CC_OP_DYNAMIC;
5819    dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5820    dc->def = &env->def;
5821    dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5822    dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5823#ifndef CONFIG_USER_ONLY
5824    dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5825#endif
5826#ifdef TARGET_SPARC64
5827    dc->fprs_dirty = 0;
5828    dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5829#ifndef CONFIG_USER_ONLY
5830    dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5831#endif
5832#endif
5833    /*
5834     * if we reach a page boundary, we stop generation so that the
5835     * PC of a TT_TFAULT exception is always in the right page
5836     */
5837    bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5838    dc->base.max_insns = MIN(dc->base.max_insns, bound);
5839}
5840
5841static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5842{
5843}
5844
5845static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5846{
5847    DisasContext *dc = container_of(dcbase, DisasContext, base);
5848
5849    if (dc->npc & JUMP_PC) {
5850        assert(dc->jump_pc[1] == dc->pc + 4);
5851        tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5852    } else {
5853        tcg_gen_insn_start(dc->pc, dc->npc);
5854    }
5855}
5856
5857static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5858{
5859    DisasContext *dc = container_of(dcbase, DisasContext, base);
5860    CPUSPARCState *env = cs->env_ptr;
5861    unsigned int insn;
5862
5863    insn = translator_ldl(env, dc->pc);
5864    dc->base.pc_next += 4;
5865    disas_sparc_insn(dc, insn);
5866
5867    if (dc->base.is_jmp == DISAS_NORETURN) {
5868        return;
5869    }
5870    if (dc->pc != dc->base.pc_next) {
5871        dc->base.is_jmp = DISAS_TOO_MANY;
5872    }
5873}
5874
5875static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5876{
5877    DisasContext *dc = container_of(dcbase, DisasContext, base);
5878
5879    switch (dc->base.is_jmp) {
5880    case DISAS_NEXT:
5881    case DISAS_TOO_MANY:
5882        if (dc->pc != DYNAMIC_PC &&
5883            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5884            /* static PC and NPC: we can use direct chaining */
5885            gen_goto_tb(dc, 0, dc->pc, dc->npc);
5886        } else {
5887            if (dc->pc != DYNAMIC_PC) {
5888                tcg_gen_movi_tl(cpu_pc, dc->pc);
5889            }
5890            save_npc(dc);
5891            tcg_gen_exit_tb(NULL, 0);
5892        }
5893        break;
5894
5895    case DISAS_NORETURN:
5896       break;
5897
5898    case DISAS_EXIT:
5899        /* Exit TB */
5900        save_state(dc);
5901        tcg_gen_exit_tb(NULL, 0);
5902        break;
5903
5904    default:
5905        g_assert_not_reached();
5906    }
5907}
5908
5909static void sparc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
5910{
5911    qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
5912    log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
5913}
5914
5915static const TranslatorOps sparc_tr_ops = {
5916    .init_disas_context = sparc_tr_init_disas_context,
5917    .tb_start           = sparc_tr_tb_start,
5918    .insn_start         = sparc_tr_insn_start,
5919    .translate_insn     = sparc_tr_translate_insn,
5920    .tb_stop            = sparc_tr_tb_stop,
5921    .disas_log          = sparc_tr_disas_log,
5922};
5923
5924void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
5925{
5926    DisasContext dc = {};
5927
5928    translator_loop(&sparc_tr_ops, &dc.base, cs, tb, max_insns);
5929}
5930
5931void sparc_tcg_init(void)
5932{
5933    static const char gregnames[32][4] = {
5934        "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5935        "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5936        "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5937        "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5938    };
5939    static const char fregnames[32][4] = {
5940        "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5941        "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5942        "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5943        "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5944    };
5945
5946    static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5947#ifdef TARGET_SPARC64
5948        { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5949        { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5950#else
5951        { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5952#endif
5953        { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5954        { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5955    };
5956
5957    static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5958#ifdef TARGET_SPARC64
5959        { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5960        { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5961        { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5962        { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5963          "hstick_cmpr" },
5964        { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5965        { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5966        { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5967        { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5968        { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5969#endif
5970        { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5971        { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5972        { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5973        { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5974        { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5975        { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5976        { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5977        { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5978#ifndef CONFIG_USER_ONLY
5979        { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5980#endif
5981    };
5982
5983    unsigned int i;
5984
5985    cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5986                                         offsetof(CPUSPARCState, regwptr),
5987                                         "regwptr");
5988
5989    for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5990        *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5991    }
5992
5993    for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5994        *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5995    }
5996
5997    cpu_regs[0] = NULL;
5998    for (i = 1; i < 8; ++i) {
5999        cpu_regs[i] = tcg_global_mem_new(cpu_env,
6000                                         offsetof(CPUSPARCState, gregs[i]),
6001                                         gregnames[i]);
6002    }
6003
6004    for (i = 8; i < 32; ++i) {
6005        cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
6006                                         (i - 8) * sizeof(target_ulong),
6007                                         gregnames[i]);
6008    }
6009
6010    for (i = 0; i < TARGET_DPREGS; i++) {
6011        cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
6012                                            offsetof(CPUSPARCState, fpr[i]),
6013                                            fregnames[i]);
6014    }
6015}
6016
6017void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
6018                          target_ulong *data)
6019{
6020    target_ulong pc = data[0];
6021    target_ulong npc = data[1];
6022
6023    env->pc = pc;
6024    if (npc == DYNAMIC_PC) {
6025        /* dynamic NPC: already stored */
6026    } else if (npc & JUMP_PC) {
6027        /* jump PC: use 'cond' and the jump targets of the translation */
6028        if (env->cond) {
6029            env->npc = npc & ~3;
6030        } else {
6031            env->npc = pc + 4;
6032        }
6033    } else {
6034        env->npc = npc;
6035    }
6036}
6037