qemu/target-alpha/translate.c
<<
>>
Prefs
   1/*
   2 *  Alpha emulation cpu translation for qemu.
   3 *
   4 *  Copyright (c) 2007 Jocelyn Mayer
   5 *
   6 * This library is free software; you can redistribute it and/or
   7 * modify it under the terms of the GNU Lesser General Public
   8 * License as published by the Free Software Foundation; either
   9 * version 2 of the License, or (at your option) any later version.
  10 *
  11 * This library is distributed in the hope that it will be useful,
  12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  14 * Lesser General Public License for more details.
  15 *
  16 * You should have received a copy of the GNU Lesser General Public
  17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  18 */
  19
  20#include "qemu/osdep.h"
  21#include "cpu.h"
  22#include "disas/disas.h"
  23#include "qemu/host-utils.h"
  24#include "tcg-op.h"
  25#include "exec/cpu_ldst.h"
  26
  27#include "exec/helper-proto.h"
  28#include "exec/helper-gen.h"
  29
  30#include "trace-tcg.h"
  31#include "exec/log.h"
  32
  33
  34#undef ALPHA_DEBUG_DISAS
  35#define CONFIG_SOFTFLOAT_INLINE
  36
  37#ifdef ALPHA_DEBUG_DISAS
  38#  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
  39#else
  40#  define LOG_DISAS(...) do { } while (0)
  41#endif
  42
  43typedef struct DisasContext DisasContext;
  44struct DisasContext {
  45    struct TranslationBlock *tb;
  46    uint64_t pc;
  47#ifndef CONFIG_USER_ONLY
  48    uint64_t palbr;
  49#endif
  50    int mem_idx;
  51
  52    /* Current rounding mode for this TB.  */
  53    int tb_rm;
  54    /* Current flush-to-zero setting for this TB.  */
  55    int tb_ftz;
  56
  57    /* implver value for this CPU.  */
  58    int implver;
  59
  60    /* The set of registers active in the current context.  */
  61    TCGv *ir;
  62
  63    /* Temporaries for $31 and $f31 as source and destination.  */
  64    TCGv zero;
  65    TCGv sink;
  66    /* Temporary for immediate constants.  */
  67    TCGv lit;
  68
  69    bool singlestep_enabled;
  70};
  71
  72/* Return values from translate_one, indicating the state of the TB.
  73   Note that zero indicates that we are not exiting the TB.  */
  74
  75typedef enum {
  76    NO_EXIT,
  77
  78    /* We have emitted one or more goto_tb.  No fixup required.  */
  79    EXIT_GOTO_TB,
  80
  81    /* We are not using a goto_tb (for whatever reason), but have updated
  82       the PC (for whatever reason), so there's no need to do it again on
  83       exiting the TB.  */
  84    EXIT_PC_UPDATED,
  85
  86    /* We are exiting the TB, but have neither emitted a goto_tb, nor
  87       updated the PC for the next instruction to be executed.  */
  88    EXIT_PC_STALE,
  89
  90    /* We are ending the TB with a noreturn function call, e.g. longjmp.
  91       No following code will be executed.  */
  92    EXIT_NORETURN,
  93} ExitStatus;
  94
  95/* global register indexes */
  96static TCGv_env cpu_env;
  97static TCGv cpu_std_ir[31];
  98static TCGv cpu_fir[31];
  99static TCGv cpu_pc;
 100static TCGv cpu_lock_addr;
 101static TCGv cpu_lock_st_addr;
 102static TCGv cpu_lock_value;
 103
 104#ifndef CONFIG_USER_ONLY
 105static TCGv cpu_pal_ir[31];
 106#endif
 107
 108#include "exec/gen-icount.h"
 109
 110void alpha_translate_init(void)
 111{
 112#define DEF_VAR(V)  { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
 113
 114    typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
 115    static const GlobalVar vars[] = {
 116        DEF_VAR(pc),
 117        DEF_VAR(lock_addr),
 118        DEF_VAR(lock_st_addr),
 119        DEF_VAR(lock_value),
 120    };
 121
 122#undef DEF_VAR
 123
 124    /* Use the symbolic register names that match the disassembler.  */
 125    static const char greg_names[31][4] = {
 126        "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
 127        "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
 128        "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
 129        "t10", "t11", "ra", "t12", "at", "gp", "sp"
 130    };
 131    static const char freg_names[31][4] = {
 132        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
 133        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
 134        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
 135        "f24", "f25", "f26", "f27", "f28", "f29", "f30"
 136    };
 137#ifndef CONFIG_USER_ONLY
 138    static const char shadow_names[8][8] = {
 139        "pal_t7", "pal_s0", "pal_s1", "pal_s2",
 140        "pal_s3", "pal_s4", "pal_s5", "pal_t11"
 141    };
 142#endif
 143
 144    static bool done_init = 0;
 145    int i;
 146
 147    if (done_init) {
 148        return;
 149    }
 150    done_init = 1;
 151
 152    cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
 153
 154    for (i = 0; i < 31; i++) {
 155        cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
 156                                               offsetof(CPUAlphaState, ir[i]),
 157                                               greg_names[i]);
 158    }
 159
 160    for (i = 0; i < 31; i++) {
 161        cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
 162                                            offsetof(CPUAlphaState, fir[i]),
 163                                            freg_names[i]);
 164    }
 165
 166#ifndef CONFIG_USER_ONLY
 167    memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
 168    for (i = 0; i < 8; i++) {
 169        int r = (i == 7 ? 25 : i + 8);
 170        cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
 171                                               offsetof(CPUAlphaState,
 172                                                        shadow[i]),
 173                                               shadow_names[i]);
 174    }
 175#endif
 176
 177    for (i = 0; i < ARRAY_SIZE(vars); ++i) {
 178        const GlobalVar *v = &vars[i];
 179        *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
 180    }
 181}
 182
 183static TCGv load_zero(DisasContext *ctx)
 184{
 185    if (TCGV_IS_UNUSED_I64(ctx->zero)) {
 186        ctx->zero = tcg_const_i64(0);
 187    }
 188    return ctx->zero;
 189}
 190
 191static TCGv dest_sink(DisasContext *ctx)
 192{
 193    if (TCGV_IS_UNUSED_I64(ctx->sink)) {
 194        ctx->sink = tcg_temp_new();
 195    }
 196    return ctx->sink;
 197}
 198
 199static TCGv load_gpr(DisasContext *ctx, unsigned reg)
 200{
 201    if (likely(reg < 31)) {
 202        return ctx->ir[reg];
 203    } else {
 204        return load_zero(ctx);
 205    }
 206}
 207
 208static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
 209                         uint8_t lit, bool islit)
 210{
 211    if (islit) {
 212        ctx->lit = tcg_const_i64(lit);
 213        return ctx->lit;
 214    } else if (likely(reg < 31)) {
 215        return ctx->ir[reg];
 216    } else {
 217        return load_zero(ctx);
 218    }
 219}
 220
 221static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
 222{
 223    if (likely(reg < 31)) {
 224        return ctx->ir[reg];
 225    } else {
 226        return dest_sink(ctx);
 227    }
 228}
 229
 230static TCGv load_fpr(DisasContext *ctx, unsigned reg)
 231{
 232    if (likely(reg < 31)) {
 233        return cpu_fir[reg];
 234    } else {
 235        return load_zero(ctx);
 236    }
 237}
 238
 239static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
 240{
 241    if (likely(reg < 31)) {
 242        return cpu_fir[reg];
 243    } else {
 244        return dest_sink(ctx);
 245    }
 246}
 247
 248static void gen_excp_1(int exception, int error_code)
 249{
 250    TCGv_i32 tmp1, tmp2;
 251
 252    tmp1 = tcg_const_i32(exception);
 253    tmp2 = tcg_const_i32(error_code);
 254    gen_helper_excp(cpu_env, tmp1, tmp2);
 255    tcg_temp_free_i32(tmp2);
 256    tcg_temp_free_i32(tmp1);
 257}
 258
 259static ExitStatus gen_excp(DisasContext *ctx, int exception, int error_code)
 260{
 261    tcg_gen_movi_i64(cpu_pc, ctx->pc);
 262    gen_excp_1(exception, error_code);
 263    return EXIT_NORETURN;
 264}
 265
 266static inline ExitStatus gen_invalid(DisasContext *ctx)
 267{
 268    return gen_excp(ctx, EXCP_OPCDEC, 0);
 269}
 270
 271static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
 272{
 273    TCGv_i32 tmp32 = tcg_temp_new_i32();
 274    tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
 275    gen_helper_memory_to_f(t0, tmp32);
 276    tcg_temp_free_i32(tmp32);
 277}
 278
 279static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
 280{
 281    TCGv tmp = tcg_temp_new();
 282    tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
 283    gen_helper_memory_to_g(t0, tmp);
 284    tcg_temp_free(tmp);
 285}
 286
 287static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
 288{
 289    TCGv_i32 tmp32 = tcg_temp_new_i32();
 290    tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
 291    gen_helper_memory_to_s(t0, tmp32);
 292    tcg_temp_free_i32(tmp32);
 293}
 294
 295static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
 296{
 297    tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
 298    tcg_gen_mov_i64(cpu_lock_addr, t1);
 299    tcg_gen_mov_i64(cpu_lock_value, t0);
 300}
 301
 302static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
 303{
 304    tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
 305    tcg_gen_mov_i64(cpu_lock_addr, t1);
 306    tcg_gen_mov_i64(cpu_lock_value, t0);
 307}
 308
 309static inline void gen_load_mem(DisasContext *ctx,
 310                                void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
 311                                                          int flags),
 312                                int ra, int rb, int32_t disp16, bool fp,
 313                                bool clear)
 314{
 315    TCGv tmp, addr, va;
 316
 317    /* LDQ_U with ra $31 is UNOP.  Other various loads are forms of
 318       prefetches, which we can treat as nops.  No worries about
 319       missed exceptions here.  */
 320    if (unlikely(ra == 31)) {
 321        return;
 322    }
 323
 324    tmp = tcg_temp_new();
 325    addr = load_gpr(ctx, rb);
 326
 327    if (disp16) {
 328        tcg_gen_addi_i64(tmp, addr, disp16);
 329        addr = tmp;
 330    }
 331    if (clear) {
 332        tcg_gen_andi_i64(tmp, addr, ~0x7);
 333        addr = tmp;
 334    }
 335
 336    va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
 337    tcg_gen_qemu_load(va, addr, ctx->mem_idx);
 338
 339    tcg_temp_free(tmp);
 340}
 341
 342static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
 343{
 344    TCGv_i32 tmp32 = tcg_temp_new_i32();
 345    gen_helper_f_to_memory(tmp32, t0);
 346    tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
 347    tcg_temp_free_i32(tmp32);
 348}
 349
 350static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
 351{
 352    TCGv tmp = tcg_temp_new();
 353    gen_helper_g_to_memory(tmp, t0);
 354    tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
 355    tcg_temp_free(tmp);
 356}
 357
 358static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
 359{
 360    TCGv_i32 tmp32 = tcg_temp_new_i32();
 361    gen_helper_s_to_memory(tmp32, t0);
 362    tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
 363    tcg_temp_free_i32(tmp32);
 364}
 365
 366static inline void gen_store_mem(DisasContext *ctx,
 367                                 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
 368                                                            int flags),
 369                                 int ra, int rb, int32_t disp16, bool fp,
 370                                 bool clear)
 371{
 372    TCGv tmp, addr, va;
 373
 374    tmp = tcg_temp_new();
 375    addr = load_gpr(ctx, rb);
 376
 377    if (disp16) {
 378        tcg_gen_addi_i64(tmp, addr, disp16);
 379        addr = tmp;
 380    }
 381    if (clear) {
 382        tcg_gen_andi_i64(tmp, addr, ~0x7);
 383        addr = tmp;
 384    }
 385
 386    va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
 387    tcg_gen_qemu_store(va, addr, ctx->mem_idx);
 388
 389    tcg_temp_free(tmp);
 390}
 391
 392static ExitStatus gen_store_conditional(DisasContext *ctx, int ra, int rb,
 393                                        int32_t disp16, int quad)
 394{
 395    TCGv addr;
 396
 397    if (ra == 31) {
 398        /* ??? Don't bother storing anything.  The user can't tell
 399           the difference, since the zero register always reads zero.  */
 400        return NO_EXIT;
 401    }
 402
 403#if defined(CONFIG_USER_ONLY)
 404    addr = cpu_lock_st_addr;
 405#else
 406    addr = tcg_temp_local_new();
 407#endif
 408
 409    tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
 410
 411#if defined(CONFIG_USER_ONLY)
 412    /* ??? This is handled via a complicated version of compare-and-swap
 413       in the cpu_loop.  Hopefully one day we'll have a real CAS opcode
 414       in TCG so that this isn't necessary.  */
 415    return gen_excp(ctx, quad ? EXCP_STQ_C : EXCP_STL_C, ra);
 416#else
 417    /* ??? In system mode we are never multi-threaded, so CAS can be
 418       implemented via a non-atomic load-compare-store sequence.  */
 419    {
 420        TCGLabel *lab_fail, *lab_done;
 421        TCGv val;
 422
 423        lab_fail = gen_new_label();
 424        lab_done = gen_new_label();
 425        tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
 426
 427        val = tcg_temp_new();
 428        tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, quad ? MO_LEQ : MO_LESL);
 429        tcg_gen_brcond_i64(TCG_COND_NE, val, cpu_lock_value, lab_fail);
 430
 431        tcg_gen_qemu_st_i64(ctx->ir[ra], addr, ctx->mem_idx,
 432                            quad ? MO_LEQ : MO_LEUL);
 433        tcg_gen_movi_i64(ctx->ir[ra], 1);
 434        tcg_gen_br(lab_done);
 435
 436        gen_set_label(lab_fail);
 437        tcg_gen_movi_i64(ctx->ir[ra], 0);
 438
 439        gen_set_label(lab_done);
 440        tcg_gen_movi_i64(cpu_lock_addr, -1);
 441
 442        tcg_temp_free(addr);
 443        return NO_EXIT;
 444    }
 445#endif
 446}
 447
 448static bool in_superpage(DisasContext *ctx, int64_t addr)
 449{
 450    return ((ctx->tb->flags & TB_FLAGS_USER_MODE) == 0
 451            && addr < 0
 452            && ((addr >> 41) & 3) == 2
 453            && addr >> TARGET_VIRT_ADDR_SPACE_BITS == addr >> 63);
 454}
 455
 456static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
 457{
 458    /* Suppress goto_tb in the case of single-steping and IO.  */
 459    if ((ctx->tb->cflags & CF_LAST_IO)
 460        || ctx->singlestep_enabled || singlestep) {
 461        return false;
 462    }
 463    /* If the destination is in the superpage, the page perms can't change.  */
 464    if (in_superpage(ctx, dest)) {
 465        return true;
 466    }
 467    /* Check for the dest on the same page as the start of the TB.  */
 468    return ((ctx->tb->pc ^ dest) & TARGET_PAGE_MASK) == 0;
 469}
 470
 471static ExitStatus gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
 472{
 473    uint64_t dest = ctx->pc + (disp << 2);
 474
 475    if (ra != 31) {
 476        tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
 477    }
 478
 479    /* Notice branch-to-next; used to initialize RA with the PC.  */
 480    if (disp == 0) {
 481        return 0;
 482    } else if (use_goto_tb(ctx, dest)) {
 483        tcg_gen_goto_tb(0);
 484        tcg_gen_movi_i64(cpu_pc, dest);
 485        tcg_gen_exit_tb((uintptr_t)ctx->tb);
 486        return EXIT_GOTO_TB;
 487    } else {
 488        tcg_gen_movi_i64(cpu_pc, dest);
 489        return EXIT_PC_UPDATED;
 490    }
 491}
 492
 493static ExitStatus gen_bcond_internal(DisasContext *ctx, TCGCond cond,
 494                                     TCGv cmp, int32_t disp)
 495{
 496    uint64_t dest = ctx->pc + (disp << 2);
 497    TCGLabel *lab_true = gen_new_label();
 498
 499    if (use_goto_tb(ctx, dest)) {
 500        tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
 501
 502        tcg_gen_goto_tb(0);
 503        tcg_gen_movi_i64(cpu_pc, ctx->pc);
 504        tcg_gen_exit_tb((uintptr_t)ctx->tb);
 505
 506        gen_set_label(lab_true);
 507        tcg_gen_goto_tb(1);
 508        tcg_gen_movi_i64(cpu_pc, dest);
 509        tcg_gen_exit_tb((uintptr_t)ctx->tb + 1);
 510
 511        return EXIT_GOTO_TB;
 512    } else {
 513        TCGv_i64 z = tcg_const_i64(0);
 514        TCGv_i64 d = tcg_const_i64(dest);
 515        TCGv_i64 p = tcg_const_i64(ctx->pc);
 516
 517        tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
 518
 519        tcg_temp_free_i64(z);
 520        tcg_temp_free_i64(d);
 521        tcg_temp_free_i64(p);
 522        return EXIT_PC_UPDATED;
 523    }
 524}
 525
 526static ExitStatus gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
 527                            int32_t disp, int mask)
 528{
 529    TCGv cmp_tmp;
 530
 531    if (mask) {
 532        cmp_tmp = tcg_temp_new();
 533        tcg_gen_andi_i64(cmp_tmp, load_gpr(ctx, ra), 1);
 534    } else {
 535        cmp_tmp = load_gpr(ctx, ra);
 536    }
 537
 538    return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
 539}
 540
 541/* Fold -0.0 for comparison with COND.  */
 542
 543static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
 544{
 545    uint64_t mzero = 1ull << 63;
 546
 547    switch (cond) {
 548    case TCG_COND_LE:
 549    case TCG_COND_GT:
 550        /* For <= or >, the -0.0 value directly compares the way we want.  */
 551        tcg_gen_mov_i64(dest, src);
 552        break;
 553
 554    case TCG_COND_EQ:
 555    case TCG_COND_NE:
 556        /* For == or !=, we can simply mask off the sign bit and compare.  */
 557        tcg_gen_andi_i64(dest, src, mzero - 1);
 558        break;
 559
 560    case TCG_COND_GE:
 561    case TCG_COND_LT:
 562        /* For >= or <, map -0.0 to +0.0 via comparison and mask.  */
 563        tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
 564        tcg_gen_neg_i64(dest, dest);
 565        tcg_gen_and_i64(dest, dest, src);
 566        break;
 567
 568    default:
 569        abort();
 570    }
 571}
 572
 573static ExitStatus gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
 574                             int32_t disp)
 575{
 576    TCGv cmp_tmp = tcg_temp_new();
 577    gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
 578    return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
 579}
 580
 581static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
 582{
 583    TCGv_i64 va, vb, z;
 584
 585    z = load_zero(ctx);
 586    vb = load_fpr(ctx, rb);
 587    va = tcg_temp_new();
 588    gen_fold_mzero(cond, va, load_fpr(ctx, ra));
 589
 590    tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
 591
 592    tcg_temp_free(va);
 593}
 594
 595#define QUAL_RM_N       0x080   /* Round mode nearest even */
 596#define QUAL_RM_C       0x000   /* Round mode chopped */
 597#define QUAL_RM_M       0x040   /* Round mode minus infinity */
 598#define QUAL_RM_D       0x0c0   /* Round mode dynamic */
 599#define QUAL_RM_MASK    0x0c0
 600
 601#define QUAL_U          0x100   /* Underflow enable (fp output) */
 602#define QUAL_V          0x100   /* Overflow enable (int output) */
 603#define QUAL_S          0x400   /* Software completion enable */
 604#define QUAL_I          0x200   /* Inexact detection enable */
 605
 606static void gen_qual_roundmode(DisasContext *ctx, int fn11)
 607{
 608    TCGv_i32 tmp;
 609
 610    fn11 &= QUAL_RM_MASK;
 611    if (fn11 == ctx->tb_rm) {
 612        return;
 613    }
 614    ctx->tb_rm = fn11;
 615
 616    tmp = tcg_temp_new_i32();
 617    switch (fn11) {
 618    case QUAL_RM_N:
 619        tcg_gen_movi_i32(tmp, float_round_nearest_even);
 620        break;
 621    case QUAL_RM_C:
 622        tcg_gen_movi_i32(tmp, float_round_to_zero);
 623        break;
 624    case QUAL_RM_M:
 625        tcg_gen_movi_i32(tmp, float_round_down);
 626        break;
 627    case QUAL_RM_D:
 628        tcg_gen_ld8u_i32(tmp, cpu_env,
 629                         offsetof(CPUAlphaState, fpcr_dyn_round));
 630        break;
 631    }
 632
 633#if defined(CONFIG_SOFTFLOAT_INLINE)
 634    /* ??? The "fpu/softfloat.h" interface is to call set_float_rounding_mode.
 635       With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
 636       sets the one field.  */
 637    tcg_gen_st8_i32(tmp, cpu_env,
 638                    offsetof(CPUAlphaState, fp_status.float_rounding_mode));
 639#else
 640    gen_helper_setroundmode(tmp);
 641#endif
 642
 643    tcg_temp_free_i32(tmp);
 644}
 645
 646static void gen_qual_flushzero(DisasContext *ctx, int fn11)
 647{
 648    TCGv_i32 tmp;
 649
 650    fn11 &= QUAL_U;
 651    if (fn11 == ctx->tb_ftz) {
 652        return;
 653    }
 654    ctx->tb_ftz = fn11;
 655
 656    tmp = tcg_temp_new_i32();
 657    if (fn11) {
 658        /* Underflow is enabled, use the FPCR setting.  */
 659        tcg_gen_ld8u_i32(tmp, cpu_env,
 660                         offsetof(CPUAlphaState, fpcr_flush_to_zero));
 661    } else {
 662        /* Underflow is disabled, force flush-to-zero.  */
 663        tcg_gen_movi_i32(tmp, 1);
 664    }
 665
 666#if defined(CONFIG_SOFTFLOAT_INLINE)
 667    tcg_gen_st8_i32(tmp, cpu_env,
 668                    offsetof(CPUAlphaState, fp_status.flush_to_zero));
 669#else
 670    gen_helper_setflushzero(tmp);
 671#endif
 672
 673    tcg_temp_free_i32(tmp);
 674}
 675
 676static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
 677{
 678    TCGv val;
 679
 680    if (unlikely(reg == 31)) {
 681        val = load_zero(ctx);
 682    } else {
 683        val = cpu_fir[reg];
 684        if ((fn11 & QUAL_S) == 0) {
 685            if (is_cmp) {
 686                gen_helper_ieee_input_cmp(cpu_env, val);
 687            } else {
 688                gen_helper_ieee_input(cpu_env, val);
 689            }
 690        } else {
 691#ifndef CONFIG_USER_ONLY
 692            /* In system mode, raise exceptions for denormals like real
 693               hardware.  In user mode, proceed as if the OS completion
 694               handler is handling the denormal as per spec.  */
 695            gen_helper_ieee_input_s(cpu_env, val);
 696#endif
 697        }
 698    }
 699    return val;
 700}
 701
 702static void gen_fp_exc_raise(int rc, int fn11)
 703{
 704    /* ??? We ought to be able to do something with imprecise exceptions.
 705       E.g. notice we're still in the trap shadow of something within the
 706       TB and do not generate the code to signal the exception; end the TB
 707       when an exception is forced to arrive, either by consumption of a
 708       register value or TRAPB or EXCB.  */
 709    TCGv_i32 reg, ign;
 710    uint32_t ignore = 0;
 711
 712    if (!(fn11 & QUAL_U)) {
 713        /* Note that QUAL_U == QUAL_V, so ignore either.  */
 714        ignore |= FPCR_UNF | FPCR_IOV;
 715    }
 716    if (!(fn11 & QUAL_I)) {
 717        ignore |= FPCR_INE;
 718    }
 719    ign = tcg_const_i32(ignore);
 720
 721    /* ??? Pass in the regno of the destination so that the helper can
 722       set EXC_MASK, which contains a bitmask of destination registers
 723       that have caused arithmetic traps.  A simple userspace emulation
 724       does not require this.  We do need it for a guest kernel's entArith,
 725       or if we were to do something clever with imprecise exceptions.  */
 726    reg = tcg_const_i32(rc + 32);
 727    if (fn11 & QUAL_S) {
 728        gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
 729    } else {
 730        gen_helper_fp_exc_raise(cpu_env, ign, reg);
 731    }
 732
 733    tcg_temp_free_i32(reg);
 734    tcg_temp_free_i32(ign);
 735}
 736
 737static void gen_cvtlq(TCGv vc, TCGv vb)
 738{
 739    TCGv tmp = tcg_temp_new();
 740
 741    /* The arithmetic right shift here, plus the sign-extended mask below
 742       yields a sign-extended result without an explicit ext32s_i64.  */
 743    tcg_gen_sari_i64(tmp, vb, 32);
 744    tcg_gen_shri_i64(vc, vb, 29);
 745    tcg_gen_andi_i64(tmp, tmp, (int32_t)0xc0000000);
 746    tcg_gen_andi_i64(vc, vc, 0x3fffffff);
 747    tcg_gen_or_i64(vc, vc, tmp);
 748
 749    tcg_temp_free(tmp);
 750}
 751
 752static void gen_ieee_arith2(DisasContext *ctx,
 753                            void (*helper)(TCGv, TCGv_ptr, TCGv),
 754                            int rb, int rc, int fn11)
 755{
 756    TCGv vb;
 757
 758    gen_qual_roundmode(ctx, fn11);
 759    gen_qual_flushzero(ctx, fn11);
 760
 761    vb = gen_ieee_input(ctx, rb, fn11, 0);
 762    helper(dest_fpr(ctx, rc), cpu_env, vb);
 763
 764    gen_fp_exc_raise(rc, fn11);
 765}
 766
 767#define IEEE_ARITH2(name)                                       \
 768static inline void glue(gen_, name)(DisasContext *ctx,          \
 769                                    int rb, int rc, int fn11)   \
 770{                                                               \
 771    gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11);      \
 772}
 773IEEE_ARITH2(sqrts)
 774IEEE_ARITH2(sqrtt)
 775IEEE_ARITH2(cvtst)
 776IEEE_ARITH2(cvtts)
 777
 778static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
 779{
 780    TCGv vb, vc;
 781
 782    /* No need to set flushzero, since we have an integer output.  */
 783    vb = gen_ieee_input(ctx, rb, fn11, 0);
 784    vc = dest_fpr(ctx, rc);
 785
 786    /* Almost all integer conversions use cropped rounding;
 787       special case that.  */
 788    if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
 789        gen_helper_cvttq_c(vc, cpu_env, vb);
 790    } else {
 791        gen_qual_roundmode(ctx, fn11);
 792        gen_helper_cvttq(vc, cpu_env, vb);
 793    }
 794    gen_fp_exc_raise(rc, fn11);
 795}
 796
 797static void gen_ieee_intcvt(DisasContext *ctx,
 798                            void (*helper)(TCGv, TCGv_ptr, TCGv),
 799                            int rb, int rc, int fn11)
 800{
 801    TCGv vb, vc;
 802
 803    gen_qual_roundmode(ctx, fn11);
 804    vb = load_fpr(ctx, rb);
 805    vc = dest_fpr(ctx, rc);
 806
 807    /* The only exception that can be raised by integer conversion
 808       is inexact.  Thus we only need to worry about exceptions when
 809       inexact handling is requested.  */
 810    if (fn11 & QUAL_I) {
 811        helper(vc, cpu_env, vb);
 812        gen_fp_exc_raise(rc, fn11);
 813    } else {
 814        helper(vc, cpu_env, vb);
 815    }
 816}
 817
 818#define IEEE_INTCVT(name)                                       \
 819static inline void glue(gen_, name)(DisasContext *ctx,          \
 820                                    int rb, int rc, int fn11)   \
 821{                                                               \
 822    gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11);      \
 823}
 824IEEE_INTCVT(cvtqs)
 825IEEE_INTCVT(cvtqt)
 826
 827static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
 828{
 829    TCGv vmask = tcg_const_i64(mask);
 830    TCGv tmp = tcg_temp_new_i64();
 831
 832    if (inv_a) {
 833        tcg_gen_andc_i64(tmp, vmask, va);
 834    } else {
 835        tcg_gen_and_i64(tmp, va, vmask);
 836    }
 837
 838    tcg_gen_andc_i64(vc, vb, vmask);
 839    tcg_gen_or_i64(vc, vc, tmp);
 840
 841    tcg_temp_free(vmask);
 842    tcg_temp_free(tmp);
 843}
 844
 845static void gen_ieee_arith3(DisasContext *ctx,
 846                            void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
 847                            int ra, int rb, int rc, int fn11)
 848{
 849    TCGv va, vb, vc;
 850
 851    gen_qual_roundmode(ctx, fn11);
 852    gen_qual_flushzero(ctx, fn11);
 853
 854    va = gen_ieee_input(ctx, ra, fn11, 0);
 855    vb = gen_ieee_input(ctx, rb, fn11, 0);
 856    vc = dest_fpr(ctx, rc);
 857    helper(vc, cpu_env, va, vb);
 858
 859    gen_fp_exc_raise(rc, fn11);
 860}
 861
 862#define IEEE_ARITH3(name)                                               \
 863static inline void glue(gen_, name)(DisasContext *ctx,                  \
 864                                    int ra, int rb, int rc, int fn11)   \
 865{                                                                       \
 866    gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11);          \
 867}
 868IEEE_ARITH3(adds)
 869IEEE_ARITH3(subs)
 870IEEE_ARITH3(muls)
 871IEEE_ARITH3(divs)
 872IEEE_ARITH3(addt)
 873IEEE_ARITH3(subt)
 874IEEE_ARITH3(mult)
 875IEEE_ARITH3(divt)
 876
 877static void gen_ieee_compare(DisasContext *ctx,
 878                             void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
 879                             int ra, int rb, int rc, int fn11)
 880{
 881    TCGv va, vb, vc;
 882
 883    va = gen_ieee_input(ctx, ra, fn11, 1);
 884    vb = gen_ieee_input(ctx, rb, fn11, 1);
 885    vc = dest_fpr(ctx, rc);
 886    helper(vc, cpu_env, va, vb);
 887
 888    gen_fp_exc_raise(rc, fn11);
 889}
 890
 891#define IEEE_CMP3(name)                                                 \
 892static inline void glue(gen_, name)(DisasContext *ctx,                  \
 893                                    int ra, int rb, int rc, int fn11)   \
 894{                                                                       \
 895    gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11);         \
 896}
 897IEEE_CMP3(cmptun)
 898IEEE_CMP3(cmpteq)
 899IEEE_CMP3(cmptlt)
 900IEEE_CMP3(cmptle)
 901
 902static inline uint64_t zapnot_mask(uint8_t lit)
 903{
 904    uint64_t mask = 0;
 905    int i;
 906
 907    for (i = 0; i < 8; ++i) {
 908        if ((lit >> i) & 1) {
 909            mask |= 0xffull << (i * 8);
 910        }
 911    }
 912    return mask;
 913}
 914
 915/* Implement zapnot with an immediate operand, which expands to some
 916   form of immediate AND.  This is a basic building block in the
 917   definition of many of the other byte manipulation instructions.  */
 918static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
 919{
 920    switch (lit) {
 921    case 0x00:
 922        tcg_gen_movi_i64(dest, 0);
 923        break;
 924    case 0x01:
 925        tcg_gen_ext8u_i64(dest, src);
 926        break;
 927    case 0x03:
 928        tcg_gen_ext16u_i64(dest, src);
 929        break;
 930    case 0x0f:
 931        tcg_gen_ext32u_i64(dest, src);
 932        break;
 933    case 0xff:
 934        tcg_gen_mov_i64(dest, src);
 935        break;
 936    default:
 937        tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
 938        break;
 939    }
 940}
 941
 942/* EXTWH, EXTLH, EXTQH */
 943static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
 944                      uint8_t lit, uint8_t byte_mask)
 945{
 946    if (islit) {
 947        tcg_gen_shli_i64(vc, va, (64 - lit * 8) & 0x3f);
 948    } else {
 949        TCGv tmp = tcg_temp_new();
 950        tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
 951        tcg_gen_neg_i64(tmp, tmp);
 952        tcg_gen_andi_i64(tmp, tmp, 0x3f);
 953        tcg_gen_shl_i64(vc, va, tmp);
 954        tcg_temp_free(tmp);
 955    }
 956    gen_zapnoti(vc, vc, byte_mask);
 957}
 958
 959/* EXTBL, EXTWL, EXTLL, EXTQL */
 960static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
 961                      uint8_t lit, uint8_t byte_mask)
 962{
 963    if (islit) {
 964        tcg_gen_shri_i64(vc, va, (lit & 7) * 8);
 965    } else {
 966        TCGv tmp = tcg_temp_new();
 967        tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
 968        tcg_gen_shli_i64(tmp, tmp, 3);
 969        tcg_gen_shr_i64(vc, va, tmp);
 970        tcg_temp_free(tmp);
 971    }
 972    gen_zapnoti(vc, vc, byte_mask);
 973}
 974
 975/* INSWH, INSLH, INSQH */
 976static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
 977                      uint8_t lit, uint8_t byte_mask)
 978{
 979    TCGv tmp = tcg_temp_new();
 980
 981    /* The instruction description has us left-shift the byte mask and extract
 982       bits <15:8> and apply that zap at the end.  This is equivalent to simply
 983       performing the zap first and shifting afterward.  */
 984    gen_zapnoti(tmp, va, byte_mask);
 985
 986    if (islit) {
 987        lit &= 7;
 988        if (unlikely(lit == 0)) {
 989            tcg_gen_movi_i64(vc, 0);
 990        } else {
 991            tcg_gen_shri_i64(vc, tmp, 64 - lit * 8);
 992        }
 993    } else {
 994        TCGv shift = tcg_temp_new();
 995
 996        /* If (B & 7) == 0, we need to shift by 64 and leave a zero.  Do this
 997           portably by splitting the shift into two parts: shift_count-1 and 1.
 998           Arrange for the -1 by using ones-complement instead of
 999           twos-complement in the negation: ~(B * 8) & 63.  */
1000
1001        tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1002        tcg_gen_not_i64(shift, shift);
1003        tcg_gen_andi_i64(shift, shift, 0x3f);
1004
1005        tcg_gen_shr_i64(vc, tmp, shift);
1006        tcg_gen_shri_i64(vc, vc, 1);
1007        tcg_temp_free(shift);
1008    }
1009    tcg_temp_free(tmp);
1010}
1011
1012/* INSBL, INSWL, INSLL, INSQL */
1013static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1014                      uint8_t lit, uint8_t byte_mask)
1015{
1016    TCGv tmp = tcg_temp_new();
1017
1018    /* The instruction description has us left-shift the byte mask
1019       the same number of byte slots as the data and apply the zap
1020       at the end.  This is equivalent to simply performing the zap
1021       first and shifting afterward.  */
1022    gen_zapnoti(tmp, va, byte_mask);
1023
1024    if (islit) {
1025        tcg_gen_shli_i64(vc, tmp, (lit & 7) * 8);
1026    } else {
1027        TCGv shift = tcg_temp_new();
1028        tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1029        tcg_gen_shli_i64(shift, shift, 3);
1030        tcg_gen_shl_i64(vc, tmp, shift);
1031        tcg_temp_free(shift);
1032    }
1033    tcg_temp_free(tmp);
1034}
1035
1036/* MSKWH, MSKLH, MSKQH */
1037static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1038                      uint8_t lit, uint8_t byte_mask)
1039{
1040    if (islit) {
1041        gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1042    } else {
1043        TCGv shift = tcg_temp_new();
1044        TCGv mask = tcg_temp_new();
1045
1046        /* The instruction description is as above, where the byte_mask
1047           is shifted left, and then we extract bits <15:8>.  This can be
1048           emulated with a right-shift on the expanded byte mask.  This
1049           requires extra care because for an input <2:0> == 0 we need a
1050           shift of 64 bits in order to generate a zero.  This is done by
1051           splitting the shift into two parts, the variable shift - 1
1052           followed by a constant 1 shift.  The code we expand below is
1053           equivalent to ~(B * 8) & 63.  */
1054
1055        tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1056        tcg_gen_not_i64(shift, shift);
1057        tcg_gen_andi_i64(shift, shift, 0x3f);
1058        tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1059        tcg_gen_shr_i64(mask, mask, shift);
1060        tcg_gen_shri_i64(mask, mask, 1);
1061
1062        tcg_gen_andc_i64(vc, va, mask);
1063
1064        tcg_temp_free(mask);
1065        tcg_temp_free(shift);
1066    }
1067}
1068
1069/* MSKBL, MSKWL, MSKLL, MSKQL */
1070static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1071                      uint8_t lit, uint8_t byte_mask)
1072{
1073    if (islit) {
1074        gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1075    } else {
1076        TCGv shift = tcg_temp_new();
1077        TCGv mask = tcg_temp_new();
1078
1079        tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1080        tcg_gen_shli_i64(shift, shift, 3);
1081        tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1082        tcg_gen_shl_i64(mask, mask, shift);
1083
1084        tcg_gen_andc_i64(vc, va, mask);
1085
1086        tcg_temp_free(mask);
1087        tcg_temp_free(shift);
1088    }
1089}
1090
1091static void gen_rx(DisasContext *ctx, int ra, int set)
1092{
1093    TCGv_i32 tmp;
1094
1095    if (ra != 31) {
1096        tcg_gen_ld8u_i64(ctx->ir[ra], cpu_env,
1097                         offsetof(CPUAlphaState, intr_flag));
1098    }
1099
1100    tmp = tcg_const_i32(set);
1101    tcg_gen_st8_i32(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
1102    tcg_temp_free_i32(tmp);
1103}
1104
1105static ExitStatus gen_call_pal(DisasContext *ctx, int palcode)
1106{
1107    /* We're emulating OSF/1 PALcode.  Many of these are trivial access
1108       to internal cpu registers.  */
1109
1110    /* Unprivileged PAL call */
1111    if (palcode >= 0x80 && palcode < 0xC0) {
1112        switch (palcode) {
1113        case 0x86:
1114            /* IMB */
1115            /* No-op inside QEMU.  */
1116            break;
1117        case 0x9E:
1118            /* RDUNIQUE */
1119            tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1120                           offsetof(CPUAlphaState, unique));
1121            break;
1122        case 0x9F:
1123            /* WRUNIQUE */
1124            tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1125                           offsetof(CPUAlphaState, unique));
1126            break;
1127        default:
1128            palcode &= 0xbf;
1129            goto do_call_pal;
1130        }
1131        return NO_EXIT;
1132    }
1133
1134#ifndef CONFIG_USER_ONLY
1135    /* Privileged PAL code */
1136    if (palcode < 0x40 && (ctx->tb->flags & TB_FLAGS_USER_MODE) == 0) {
1137        switch (palcode) {
1138        case 0x01:
1139            /* CFLUSH */
1140            /* No-op inside QEMU.  */
1141            break;
1142        case 0x02:
1143            /* DRAINA */
1144            /* No-op inside QEMU.  */
1145            break;
1146        case 0x2D:
1147            /* WRVPTPTR */
1148            tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1149                           offsetof(CPUAlphaState, vptptr));
1150            break;
1151        case 0x31:
1152            /* WRVAL */
1153            tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1154                           offsetof(CPUAlphaState, sysval));
1155            break;
1156        case 0x32:
1157            /* RDVAL */
1158            tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1159                           offsetof(CPUAlphaState, sysval));
1160            break;
1161
1162        case 0x35: {
1163            /* SWPIPL */
1164            TCGv tmp;
1165
1166            /* Note that we already know we're in kernel mode, so we know
1167               that PS only contains the 3 IPL bits.  */
1168            tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1169                             offsetof(CPUAlphaState, ps));
1170
1171            /* But make sure and store only the 3 IPL bits from the user.  */
1172            tmp = tcg_temp_new();
1173            tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1174            tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, ps));
1175            tcg_temp_free(tmp);
1176            break;
1177        }
1178
1179        case 0x36:
1180            /* RDPS */
1181            tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1182                             offsetof(CPUAlphaState, ps));
1183            break;
1184        case 0x38:
1185            /* WRUSP */
1186            tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1187                           offsetof(CPUAlphaState, usp));
1188            break;
1189        case 0x3A:
1190            /* RDUSP */
1191            tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1192                           offsetof(CPUAlphaState, usp));
1193            break;
1194        case 0x3C:
1195            /* WHAMI */
1196            tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1197                -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1198            break;
1199
1200        default:
1201            palcode &= 0x3f;
1202            goto do_call_pal;
1203        }
1204        return NO_EXIT;
1205    }
1206#endif
1207    return gen_invalid(ctx);
1208
1209 do_call_pal:
1210#ifdef CONFIG_USER_ONLY
1211    return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1212#else
1213    {
1214        TCGv tmp = tcg_temp_new();
1215        uint64_t exc_addr = ctx->pc;
1216        uint64_t entry = ctx->palbr;
1217
1218        if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
1219            exc_addr |= 1;
1220        } else {
1221            tcg_gen_movi_i64(tmp, 1);
1222            tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
1223        }
1224
1225        tcg_gen_movi_i64(tmp, exc_addr);
1226        tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1227        tcg_temp_free(tmp);
1228
1229        entry += (palcode & 0x80
1230                  ? 0x2000 + (palcode - 0x80) * 64
1231                  : 0x1000 + palcode * 64);
1232
1233        /* Since the destination is running in PALmode, we don't really
1234           need the page permissions check.  We'll see the existence of
1235           the page when we create the TB, and we'll flush all TBs if
1236           we change the PAL base register.  */
1237        if (!ctx->singlestep_enabled && !(ctx->tb->cflags & CF_LAST_IO)) {
1238            tcg_gen_goto_tb(0);
1239            tcg_gen_movi_i64(cpu_pc, entry);
1240            tcg_gen_exit_tb((uintptr_t)ctx->tb);
1241            return EXIT_GOTO_TB;
1242        } else {
1243            tcg_gen_movi_i64(cpu_pc, entry);
1244            return EXIT_PC_UPDATED;
1245        }
1246    }
1247#endif
1248}
1249
1250#ifndef CONFIG_USER_ONLY
1251
1252#define PR_BYTE         0x100000
1253#define PR_LONG         0x200000
1254
1255static int cpu_pr_data(int pr)
1256{
1257    switch (pr) {
1258    case  0: return offsetof(CPUAlphaState, ps) | PR_BYTE;
1259    case  1: return offsetof(CPUAlphaState, fen) | PR_BYTE;
1260    case  2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1261    case  3: return offsetof(CPUAlphaState, trap_arg0);
1262    case  4: return offsetof(CPUAlphaState, trap_arg1);
1263    case  5: return offsetof(CPUAlphaState, trap_arg2);
1264    case  6: return offsetof(CPUAlphaState, exc_addr);
1265    case  7: return offsetof(CPUAlphaState, palbr);
1266    case  8: return offsetof(CPUAlphaState, ptbr);
1267    case  9: return offsetof(CPUAlphaState, vptptr);
1268    case 10: return offsetof(CPUAlphaState, unique);
1269    case 11: return offsetof(CPUAlphaState, sysval);
1270    case 12: return offsetof(CPUAlphaState, usp);
1271
1272    case 40 ... 63:
1273        return offsetof(CPUAlphaState, scratch[pr - 40]);
1274
1275    case 251:
1276        return offsetof(CPUAlphaState, alarm_expire);
1277    }
1278    return 0;
1279}
1280
1281static ExitStatus gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1282{
1283    void (*helper)(TCGv);
1284    int data;
1285
1286    switch (regno) {
1287    case 32 ... 39:
1288        /* Accessing the "non-shadow" general registers.  */
1289        regno = regno == 39 ? 25 : regno - 32 + 8;
1290        tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1291        break;
1292
1293    case 250: /* WALLTIME */
1294        helper = gen_helper_get_walltime;
1295        goto do_helper;
1296    case 249: /* VMTIME */
1297        helper = gen_helper_get_vmtime;
1298    do_helper:
1299        if (use_icount) {
1300            gen_io_start();
1301            helper(va);
1302            gen_io_end();
1303            return EXIT_PC_STALE;
1304        } else {
1305            helper(va);
1306        }
1307        break;
1308
1309    default:
1310        /* The basic registers are data only, and unknown registers
1311           are read-zero, write-ignore.  */
1312        data = cpu_pr_data(regno);
1313        if (data == 0) {
1314            tcg_gen_movi_i64(va, 0);
1315        } else if (data & PR_BYTE) {
1316            tcg_gen_ld8u_i64(va, cpu_env, data & ~PR_BYTE);
1317        } else if (data & PR_LONG) {
1318            tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1319        } else {
1320            tcg_gen_ld_i64(va, cpu_env, data);
1321        }
1322        break;
1323    }
1324
1325    return NO_EXIT;
1326}
1327
1328static ExitStatus gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1329{
1330    TCGv tmp;
1331    int data;
1332
1333    switch (regno) {
1334    case 255:
1335        /* TBIA */
1336        gen_helper_tbia(cpu_env);
1337        break;
1338
1339    case 254:
1340        /* TBIS */
1341        gen_helper_tbis(cpu_env, vb);
1342        break;
1343
1344    case 253:
1345        /* WAIT */
1346        tmp = tcg_const_i64(1);
1347        tcg_gen_st32_i64(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1348                                       offsetof(CPUState, halted));
1349        return gen_excp(ctx, EXCP_HLT, 0);
1350
1351    case 252:
1352        /* HALT */
1353        gen_helper_halt(vb);
1354        return EXIT_PC_STALE;
1355
1356    case 251:
1357        /* ALARM */
1358        gen_helper_set_alarm(cpu_env, vb);
1359        break;
1360
1361    case 7:
1362        /* PALBR */
1363        tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1364        /* Changing the PAL base register implies un-chaining all of the TBs
1365           that ended with a CALL_PAL.  Since the base register usually only
1366           changes during boot, flushing everything works well.  */
1367        gen_helper_tb_flush(cpu_env);
1368        return EXIT_PC_STALE;
1369
1370    case 32 ... 39:
1371        /* Accessing the "non-shadow" general registers.  */
1372        regno = regno == 39 ? 25 : regno - 32 + 8;
1373        tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1374        break;
1375
1376    default:
1377        /* The basic registers are data only, and unknown registers
1378           are read-zero, write-ignore.  */
1379        data = cpu_pr_data(regno);
1380        if (data != 0) {
1381            if (data & PR_BYTE) {
1382                tcg_gen_st8_i64(vb, cpu_env, data & ~PR_BYTE);
1383            } else if (data & PR_LONG) {
1384                tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1385            } else {
1386                tcg_gen_st_i64(vb, cpu_env, data);
1387            }
1388        }
1389        break;
1390    }
1391
1392    return NO_EXIT;
1393}
1394#endif /* !USER_ONLY*/
1395
1396#define REQUIRE_NO_LIT                          \
1397    do {                                        \
1398        if (real_islit) {                       \
1399            goto invalid_opc;                   \
1400        }                                       \
1401    } while (0)
1402
1403#define REQUIRE_TB_FLAG(FLAG)                   \
1404    do {                                        \
1405        if ((ctx->tb->flags & (FLAG)) == 0) {   \
1406            goto invalid_opc;                   \
1407        }                                       \
1408    } while (0)
1409
1410#define REQUIRE_REG_31(WHICH)                   \
1411    do {                                        \
1412        if (WHICH != 31) {                      \
1413            goto invalid_opc;                   \
1414        }                                       \
1415    } while (0)
1416
1417static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
1418{
1419    int32_t disp21, disp16, disp12 __attribute__((unused));
1420    uint16_t fn11;
1421    uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1422    bool islit, real_islit;
1423    TCGv va, vb, vc, tmp, tmp2;
1424    TCGv_i32 t32;
1425    ExitStatus ret;
1426
1427    /* Decode all instruction fields */
1428    opc = extract32(insn, 26, 6);
1429    ra = extract32(insn, 21, 5);
1430    rb = extract32(insn, 16, 5);
1431    rc = extract32(insn, 0, 5);
1432    real_islit = islit = extract32(insn, 12, 1);
1433    lit = extract32(insn, 13, 8);
1434
1435    disp21 = sextract32(insn, 0, 21);
1436    disp16 = sextract32(insn, 0, 16);
1437    disp12 = sextract32(insn, 0, 12);
1438
1439    fn11 = extract32(insn, 5, 11);
1440    fpfn = extract32(insn, 5, 6);
1441    fn7 = extract32(insn, 5, 7);
1442
1443    if (rb == 31 && !islit) {
1444        islit = true;
1445        lit = 0;
1446    }
1447
1448    ret = NO_EXIT;
1449    switch (opc) {
1450    case 0x00:
1451        /* CALL_PAL */
1452        ret = gen_call_pal(ctx, insn & 0x03ffffff);
1453        break;
1454    case 0x01:
1455        /* OPC01 */
1456        goto invalid_opc;
1457    case 0x02:
1458        /* OPC02 */
1459        goto invalid_opc;
1460    case 0x03:
1461        /* OPC03 */
1462        goto invalid_opc;
1463    case 0x04:
1464        /* OPC04 */
1465        goto invalid_opc;
1466    case 0x05:
1467        /* OPC05 */
1468        goto invalid_opc;
1469    case 0x06:
1470        /* OPC06 */
1471        goto invalid_opc;
1472    case 0x07:
1473        /* OPC07 */
1474        goto invalid_opc;
1475
1476    case 0x09:
1477        /* LDAH */
1478        disp16 = (uint32_t)disp16 << 16;
1479        /* fall through */
1480    case 0x08:
1481        /* LDA */
1482        va = dest_gpr(ctx, ra);
1483        /* It's worth special-casing immediate loads.  */
1484        if (rb == 31) {
1485            tcg_gen_movi_i64(va, disp16);
1486        } else {
1487            tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1488        }
1489        break;
1490
1491    case 0x0A:
1492        /* LDBU */
1493        REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1494        gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1495        break;
1496    case 0x0B:
1497        /* LDQ_U */
1498        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1499        break;
1500    case 0x0C:
1501        /* LDWU */
1502        REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1503        gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1504        break;
1505    case 0x0D:
1506        /* STW */
1507        REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1508        gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1509        break;
1510    case 0x0E:
1511        /* STB */
1512        REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1513        gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1514        break;
1515    case 0x0F:
1516        /* STQ_U */
1517        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1518        break;
1519
1520    case 0x10:
1521        vc = dest_gpr(ctx, rc);
1522        vb = load_gpr_lit(ctx, rb, lit, islit);
1523
1524        if (ra == 31) {
1525            if (fn7 == 0x00) {
1526                /* Special case ADDL as SEXTL.  */
1527                tcg_gen_ext32s_i64(vc, vb);
1528                break;
1529            }
1530            if (fn7 == 0x29) {
1531                /* Special case SUBQ as NEGQ.  */
1532                tcg_gen_neg_i64(vc, vb);
1533                break;
1534            }
1535        }
1536
1537        va = load_gpr(ctx, ra);
1538        switch (fn7) {
1539        case 0x00:
1540            /* ADDL */
1541            tcg_gen_add_i64(vc, va, vb);
1542            tcg_gen_ext32s_i64(vc, vc);
1543            break;
1544        case 0x02:
1545            /* S4ADDL */
1546            tmp = tcg_temp_new();
1547            tcg_gen_shli_i64(tmp, va, 2);
1548            tcg_gen_add_i64(tmp, tmp, vb);
1549            tcg_gen_ext32s_i64(vc, tmp);
1550            tcg_temp_free(tmp);
1551            break;
1552        case 0x09:
1553            /* SUBL */
1554            tcg_gen_sub_i64(vc, va, vb);
1555            tcg_gen_ext32s_i64(vc, vc);
1556            break;
1557        case 0x0B:
1558            /* S4SUBL */
1559            tmp = tcg_temp_new();
1560            tcg_gen_shli_i64(tmp, va, 2);
1561            tcg_gen_sub_i64(tmp, tmp, vb);
1562            tcg_gen_ext32s_i64(vc, tmp);
1563            tcg_temp_free(tmp);
1564            break;
1565        case 0x0F:
1566            /* CMPBGE */
1567            if (ra == 31) {
1568                /* Special case 0 >= X as X == 0.  */
1569                gen_helper_cmpbe0(vc, vb);
1570            } else {
1571                gen_helper_cmpbge(vc, va, vb);
1572            }
1573            break;
1574        case 0x12:
1575            /* S8ADDL */
1576            tmp = tcg_temp_new();
1577            tcg_gen_shli_i64(tmp, va, 3);
1578            tcg_gen_add_i64(tmp, tmp, vb);
1579            tcg_gen_ext32s_i64(vc, tmp);
1580            tcg_temp_free(tmp);
1581            break;
1582        case 0x1B:
1583            /* S8SUBL */
1584            tmp = tcg_temp_new();
1585            tcg_gen_shli_i64(tmp, va, 3);
1586            tcg_gen_sub_i64(tmp, tmp, vb);
1587            tcg_gen_ext32s_i64(vc, tmp);
1588            tcg_temp_free(tmp);
1589            break;
1590        case 0x1D:
1591            /* CMPULT */
1592            tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1593            break;
1594        case 0x20:
1595            /* ADDQ */
1596            tcg_gen_add_i64(vc, va, vb);
1597            break;
1598        case 0x22:
1599            /* S4ADDQ */
1600            tmp = tcg_temp_new();
1601            tcg_gen_shli_i64(tmp, va, 2);
1602            tcg_gen_add_i64(vc, tmp, vb);
1603            tcg_temp_free(tmp);
1604            break;
1605        case 0x29:
1606            /* SUBQ */
1607            tcg_gen_sub_i64(vc, va, vb);
1608            break;
1609        case 0x2B:
1610            /* S4SUBQ */
1611            tmp = tcg_temp_new();
1612            tcg_gen_shli_i64(tmp, va, 2);
1613            tcg_gen_sub_i64(vc, tmp, vb);
1614            tcg_temp_free(tmp);
1615            break;
1616        case 0x2D:
1617            /* CMPEQ */
1618            tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1619            break;
1620        case 0x32:
1621            /* S8ADDQ */
1622            tmp = tcg_temp_new();
1623            tcg_gen_shli_i64(tmp, va, 3);
1624            tcg_gen_add_i64(vc, tmp, vb);
1625            tcg_temp_free(tmp);
1626            break;
1627        case 0x3B:
1628            /* S8SUBQ */
1629            tmp = tcg_temp_new();
1630            tcg_gen_shli_i64(tmp, va, 3);
1631            tcg_gen_sub_i64(vc, tmp, vb);
1632            tcg_temp_free(tmp);
1633            break;
1634        case 0x3D:
1635            /* CMPULE */
1636            tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1637            break;
1638        case 0x40:
1639            /* ADDL/V */
1640            tmp = tcg_temp_new();
1641            tcg_gen_ext32s_i64(tmp, va);
1642            tcg_gen_ext32s_i64(vc, vb);
1643            tcg_gen_add_i64(tmp, tmp, vc);
1644            tcg_gen_ext32s_i64(vc, tmp);
1645            gen_helper_check_overflow(cpu_env, vc, tmp);
1646            tcg_temp_free(tmp);
1647            break;
1648        case 0x49:
1649            /* SUBL/V */
1650            tmp = tcg_temp_new();
1651            tcg_gen_ext32s_i64(tmp, va);
1652            tcg_gen_ext32s_i64(vc, vb);
1653            tcg_gen_sub_i64(tmp, tmp, vc);
1654            tcg_gen_ext32s_i64(vc, tmp);
1655            gen_helper_check_overflow(cpu_env, vc, tmp);
1656            tcg_temp_free(tmp);
1657            break;
1658        case 0x4D:
1659            /* CMPLT */
1660            tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1661            break;
1662        case 0x60:
1663            /* ADDQ/V */
1664            tmp = tcg_temp_new();
1665            tmp2 = tcg_temp_new();
1666            tcg_gen_eqv_i64(tmp, va, vb);
1667            tcg_gen_mov_i64(tmp2, va);
1668            tcg_gen_add_i64(vc, va, vb);
1669            tcg_gen_xor_i64(tmp2, tmp2, vc);
1670            tcg_gen_and_i64(tmp, tmp, tmp2);
1671            tcg_gen_shri_i64(tmp, tmp, 63);
1672            tcg_gen_movi_i64(tmp2, 0);
1673            gen_helper_check_overflow(cpu_env, tmp, tmp2);
1674            tcg_temp_free(tmp);
1675            tcg_temp_free(tmp2);
1676            break;
1677        case 0x69:
1678            /* SUBQ/V */
1679            tmp = tcg_temp_new();
1680            tmp2 = tcg_temp_new();
1681            tcg_gen_xor_i64(tmp, va, vb);
1682            tcg_gen_mov_i64(tmp2, va);
1683            tcg_gen_sub_i64(vc, va, vb);
1684            tcg_gen_xor_i64(tmp2, tmp2, vc);
1685            tcg_gen_and_i64(tmp, tmp, tmp2);
1686            tcg_gen_shri_i64(tmp, tmp, 63);
1687            tcg_gen_movi_i64(tmp2, 0);
1688            gen_helper_check_overflow(cpu_env, tmp, tmp2);
1689            tcg_temp_free(tmp);
1690            tcg_temp_free(tmp2);
1691            break;
1692        case 0x6D:
1693            /* CMPLE */
1694            tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1695            break;
1696        default:
1697            goto invalid_opc;
1698        }
1699        break;
1700
1701    case 0x11:
1702        if (fn7 == 0x20) {
1703            if (rc == 31) {
1704                /* Special case BIS as NOP.  */
1705                break;
1706            }
1707            if (ra == 31) {
1708                /* Special case BIS as MOV.  */
1709                vc = dest_gpr(ctx, rc);
1710                if (islit) {
1711                    tcg_gen_movi_i64(vc, lit);
1712                } else {
1713                    tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1714                }
1715                break;
1716            }
1717        }
1718
1719        vc = dest_gpr(ctx, rc);
1720        vb = load_gpr_lit(ctx, rb, lit, islit);
1721
1722        if (fn7 == 0x28 && ra == 31) {
1723            /* Special case ORNOT as NOT.  */
1724            tcg_gen_not_i64(vc, vb);
1725            break;
1726        }
1727
1728        va = load_gpr(ctx, ra);
1729        switch (fn7) {
1730        case 0x00:
1731            /* AND */
1732            tcg_gen_and_i64(vc, va, vb);
1733            break;
1734        case 0x08:
1735            /* BIC */
1736            tcg_gen_andc_i64(vc, va, vb);
1737            break;
1738        case 0x14:
1739            /* CMOVLBS */
1740            tmp = tcg_temp_new();
1741            tcg_gen_andi_i64(tmp, va, 1);
1742            tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1743                                vb, load_gpr(ctx, rc));
1744            tcg_temp_free(tmp);
1745            break;
1746        case 0x16:
1747            /* CMOVLBC */
1748            tmp = tcg_temp_new();
1749            tcg_gen_andi_i64(tmp, va, 1);
1750            tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1751                                vb, load_gpr(ctx, rc));
1752            tcg_temp_free(tmp);
1753            break;
1754        case 0x20:
1755            /* BIS */
1756            tcg_gen_or_i64(vc, va, vb);
1757            break;
1758        case 0x24:
1759            /* CMOVEQ */
1760            tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1761                                vb, load_gpr(ctx, rc));
1762            break;
1763        case 0x26:
1764            /* CMOVNE */
1765            tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1766                                vb, load_gpr(ctx, rc));
1767            break;
1768        case 0x28:
1769            /* ORNOT */
1770            tcg_gen_orc_i64(vc, va, vb);
1771            break;
1772        case 0x40:
1773            /* XOR */
1774            tcg_gen_xor_i64(vc, va, vb);
1775            break;
1776        case 0x44:
1777            /* CMOVLT */
1778            tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1779                                vb, load_gpr(ctx, rc));
1780            break;
1781        case 0x46:
1782            /* CMOVGE */
1783            tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1784                                vb, load_gpr(ctx, rc));
1785            break;
1786        case 0x48:
1787            /* EQV */
1788            tcg_gen_eqv_i64(vc, va, vb);
1789            break;
1790        case 0x61:
1791            /* AMASK */
1792            REQUIRE_REG_31(ra);
1793            {
1794                uint64_t amask = ctx->tb->flags >> TB_FLAGS_AMASK_SHIFT;
1795                tcg_gen_andi_i64(vc, vb, ~amask);
1796            }
1797            break;
1798        case 0x64:
1799            /* CMOVLE */
1800            tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1801                                vb, load_gpr(ctx, rc));
1802            break;
1803        case 0x66:
1804            /* CMOVGT */
1805            tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1806                                vb, load_gpr(ctx, rc));
1807            break;
1808        case 0x6C:
1809            /* IMPLVER */
1810            REQUIRE_REG_31(ra);
1811            tcg_gen_movi_i64(vc, ctx->implver);
1812            break;
1813        default:
1814            goto invalid_opc;
1815        }
1816        break;
1817
1818    case 0x12:
1819        vc = dest_gpr(ctx, rc);
1820        va = load_gpr(ctx, ra);
1821        switch (fn7) {
1822        case 0x02:
1823            /* MSKBL */
1824            gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1825            break;
1826        case 0x06:
1827            /* EXTBL */
1828            gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1829            break;
1830        case 0x0B:
1831            /* INSBL */
1832            gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1833            break;
1834        case 0x12:
1835            /* MSKWL */
1836            gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1837            break;
1838        case 0x16:
1839            /* EXTWL */
1840            gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1841            break;
1842        case 0x1B:
1843            /* INSWL */
1844            gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1845            break;
1846        case 0x22:
1847            /* MSKLL */
1848            gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1849            break;
1850        case 0x26:
1851            /* EXTLL */
1852            gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1853            break;
1854        case 0x2B:
1855            /* INSLL */
1856            gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1857            break;
1858        case 0x30:
1859            /* ZAP */
1860            if (islit) {
1861                gen_zapnoti(vc, va, ~lit);
1862            } else {
1863                gen_helper_zap(vc, va, load_gpr(ctx, rb));
1864            }
1865            break;
1866        case 0x31:
1867            /* ZAPNOT */
1868            if (islit) {
1869                gen_zapnoti(vc, va, lit);
1870            } else {
1871                gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1872            }
1873            break;
1874        case 0x32:
1875            /* MSKQL */
1876            gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1877            break;
1878        case 0x34:
1879            /* SRL */
1880            if (islit) {
1881                tcg_gen_shri_i64(vc, va, lit & 0x3f);
1882            } else {
1883                tmp = tcg_temp_new();
1884                vb = load_gpr(ctx, rb);
1885                tcg_gen_andi_i64(tmp, vb, 0x3f);
1886                tcg_gen_shr_i64(vc, va, tmp);
1887                tcg_temp_free(tmp);
1888            }
1889            break;
1890        case 0x36:
1891            /* EXTQL */
1892            gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1893            break;
1894        case 0x39:
1895            /* SLL */
1896            if (islit) {
1897                tcg_gen_shli_i64(vc, va, lit & 0x3f);
1898            } else {
1899                tmp = tcg_temp_new();
1900                vb = load_gpr(ctx, rb);
1901                tcg_gen_andi_i64(tmp, vb, 0x3f);
1902                tcg_gen_shl_i64(vc, va, tmp);
1903                tcg_temp_free(tmp);
1904            }
1905            break;
1906        case 0x3B:
1907            /* INSQL */
1908            gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1909            break;
1910        case 0x3C:
1911            /* SRA */
1912            if (islit) {
1913                tcg_gen_sari_i64(vc, va, lit & 0x3f);
1914            } else {
1915                tmp = tcg_temp_new();
1916                vb = load_gpr(ctx, rb);
1917                tcg_gen_andi_i64(tmp, vb, 0x3f);
1918                tcg_gen_sar_i64(vc, va, tmp);
1919                tcg_temp_free(tmp);
1920            }
1921            break;
1922        case 0x52:
1923            /* MSKWH */
1924            gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1925            break;
1926        case 0x57:
1927            /* INSWH */
1928            gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1929            break;
1930        case 0x5A:
1931            /* EXTWH */
1932            gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1933            break;
1934        case 0x62:
1935            /* MSKLH */
1936            gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1937            break;
1938        case 0x67:
1939            /* INSLH */
1940            gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1941            break;
1942        case 0x6A:
1943            /* EXTLH */
1944            gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1945            break;
1946        case 0x72:
1947            /* MSKQH */
1948            gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1949            break;
1950        case 0x77:
1951            /* INSQH */
1952            gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
1953            break;
1954        case 0x7A:
1955            /* EXTQH */
1956            gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
1957            break;
1958        default:
1959            goto invalid_opc;
1960        }
1961        break;
1962
1963    case 0x13:
1964        vc = dest_gpr(ctx, rc);
1965        vb = load_gpr_lit(ctx, rb, lit, islit);
1966        va = load_gpr(ctx, ra);
1967        switch (fn7) {
1968        case 0x00:
1969            /* MULL */
1970            tcg_gen_mul_i64(vc, va, vb);
1971            tcg_gen_ext32s_i64(vc, vc);
1972            break;
1973        case 0x20:
1974            /* MULQ */
1975            tcg_gen_mul_i64(vc, va, vb);
1976            break;
1977        case 0x30:
1978            /* UMULH */
1979            tmp = tcg_temp_new();
1980            tcg_gen_mulu2_i64(tmp, vc, va, vb);
1981            tcg_temp_free(tmp);
1982            break;
1983        case 0x40:
1984            /* MULL/V */
1985            tmp = tcg_temp_new();
1986            tcg_gen_ext32s_i64(tmp, va);
1987            tcg_gen_ext32s_i64(vc, vb);
1988            tcg_gen_mul_i64(tmp, tmp, vc);
1989            tcg_gen_ext32s_i64(vc, tmp);
1990            gen_helper_check_overflow(cpu_env, vc, tmp);
1991            tcg_temp_free(tmp);
1992            break;
1993        case 0x60:
1994            /* MULQ/V */
1995            tmp = tcg_temp_new();
1996            tmp2 = tcg_temp_new();
1997            tcg_gen_muls2_i64(vc, tmp, va, vb);
1998            tcg_gen_sari_i64(tmp2, vc, 63);
1999            gen_helper_check_overflow(cpu_env, tmp, tmp2);
2000            tcg_temp_free(tmp);
2001            tcg_temp_free(tmp2);
2002            break;
2003        default:
2004            goto invalid_opc;
2005        }
2006        break;
2007
2008    case 0x14:
2009        REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2010        vc = dest_fpr(ctx, rc);
2011        switch (fpfn) { /* fn11 & 0x3F */
2012        case 0x04:
2013            /* ITOFS */
2014            REQUIRE_REG_31(rb);
2015            t32 = tcg_temp_new_i32();
2016            va = load_gpr(ctx, ra);
2017            tcg_gen_extrl_i64_i32(t32, va);
2018            gen_helper_memory_to_s(vc, t32);
2019            tcg_temp_free_i32(t32);
2020            break;
2021        case 0x0A:
2022            /* SQRTF */
2023            REQUIRE_REG_31(ra);
2024            vb = load_fpr(ctx, rb);
2025            gen_helper_sqrtf(vc, cpu_env, vb);
2026            break;
2027        case 0x0B:
2028            /* SQRTS */
2029            REQUIRE_REG_31(ra);
2030            gen_sqrts(ctx, rb, rc, fn11);
2031            break;
2032        case 0x14:
2033            /* ITOFF */
2034            REQUIRE_REG_31(rb);
2035            t32 = tcg_temp_new_i32();
2036            va = load_gpr(ctx, ra);
2037            tcg_gen_extrl_i64_i32(t32, va);
2038            gen_helper_memory_to_f(vc, t32);
2039            tcg_temp_free_i32(t32);
2040            break;
2041        case 0x24:
2042            /* ITOFT */
2043            REQUIRE_REG_31(rb);
2044            va = load_gpr(ctx, ra);
2045            tcg_gen_mov_i64(vc, va);
2046            break;
2047        case 0x2A:
2048            /* SQRTG */
2049            REQUIRE_REG_31(ra);
2050            vb = load_fpr(ctx, rb);
2051            gen_helper_sqrtg(vc, cpu_env, vb);
2052            break;
2053        case 0x02B:
2054            /* SQRTT */
2055            REQUIRE_REG_31(ra);
2056            gen_sqrtt(ctx, rb, rc, fn11);
2057            break;
2058        default:
2059            goto invalid_opc;
2060        }
2061        break;
2062
2063    case 0x15:
2064        /* VAX floating point */
2065        /* XXX: rounding mode and trap are ignored (!) */
2066        vc = dest_fpr(ctx, rc);
2067        vb = load_fpr(ctx, rb);
2068        va = load_fpr(ctx, ra);
2069        switch (fpfn) { /* fn11 & 0x3F */
2070        case 0x00:
2071            /* ADDF */
2072            gen_helper_addf(vc, cpu_env, va, vb);
2073            break;
2074        case 0x01:
2075            /* SUBF */
2076            gen_helper_subf(vc, cpu_env, va, vb);
2077            break;
2078        case 0x02:
2079            /* MULF */
2080            gen_helper_mulf(vc, cpu_env, va, vb);
2081            break;
2082        case 0x03:
2083            /* DIVF */
2084            gen_helper_divf(vc, cpu_env, va, vb);
2085            break;
2086        case 0x1E:
2087            /* CVTDG -- TODO */
2088            REQUIRE_REG_31(ra);
2089            goto invalid_opc;
2090        case 0x20:
2091            /* ADDG */
2092            gen_helper_addg(vc, cpu_env, va, vb);
2093            break;
2094        case 0x21:
2095            /* SUBG */
2096            gen_helper_subg(vc, cpu_env, va, vb);
2097            break;
2098        case 0x22:
2099            /* MULG */
2100            gen_helper_mulg(vc, cpu_env, va, vb);
2101            break;
2102        case 0x23:
2103            /* DIVG */
2104            gen_helper_divg(vc, cpu_env, va, vb);
2105            break;
2106        case 0x25:
2107            /* CMPGEQ */
2108            gen_helper_cmpgeq(vc, cpu_env, va, vb);
2109            break;
2110        case 0x26:
2111            /* CMPGLT */
2112            gen_helper_cmpglt(vc, cpu_env, va, vb);
2113            break;
2114        case 0x27:
2115            /* CMPGLE */
2116            gen_helper_cmpgle(vc, cpu_env, va, vb);
2117            break;
2118        case 0x2C:
2119            /* CVTGF */
2120            REQUIRE_REG_31(ra);
2121            gen_helper_cvtgf(vc, cpu_env, vb);
2122            break;
2123        case 0x2D:
2124            /* CVTGD -- TODO */
2125            REQUIRE_REG_31(ra);
2126            goto invalid_opc;
2127        case 0x2F:
2128            /* CVTGQ */
2129            REQUIRE_REG_31(ra);
2130            gen_helper_cvtgq(vc, cpu_env, vb);
2131            break;
2132        case 0x3C:
2133            /* CVTQF */
2134            REQUIRE_REG_31(ra);
2135            gen_helper_cvtqf(vc, cpu_env, vb);
2136            break;
2137        case 0x3E:
2138            /* CVTQG */
2139            REQUIRE_REG_31(ra);
2140            gen_helper_cvtqg(vc, cpu_env, vb);
2141            break;
2142        default:
2143            goto invalid_opc;
2144        }
2145        break;
2146
2147    case 0x16:
2148        /* IEEE floating-point */
2149        switch (fpfn) { /* fn11 & 0x3F */
2150        case 0x00:
2151            /* ADDS */
2152            gen_adds(ctx, ra, rb, rc, fn11);
2153            break;
2154        case 0x01:
2155            /* SUBS */
2156            gen_subs(ctx, ra, rb, rc, fn11);
2157            break;
2158        case 0x02:
2159            /* MULS */
2160            gen_muls(ctx, ra, rb, rc, fn11);
2161            break;
2162        case 0x03:
2163            /* DIVS */
2164            gen_divs(ctx, ra, rb, rc, fn11);
2165            break;
2166        case 0x20:
2167            /* ADDT */
2168            gen_addt(ctx, ra, rb, rc, fn11);
2169            break;
2170        case 0x21:
2171            /* SUBT */
2172            gen_subt(ctx, ra, rb, rc, fn11);
2173            break;
2174        case 0x22:
2175            /* MULT */
2176            gen_mult(ctx, ra, rb, rc, fn11);
2177            break;
2178        case 0x23:
2179            /* DIVT */
2180            gen_divt(ctx, ra, rb, rc, fn11);
2181            break;
2182        case 0x24:
2183            /* CMPTUN */
2184            gen_cmptun(ctx, ra, rb, rc, fn11);
2185            break;
2186        case 0x25:
2187            /* CMPTEQ */
2188            gen_cmpteq(ctx, ra, rb, rc, fn11);
2189            break;
2190        case 0x26:
2191            /* CMPTLT */
2192            gen_cmptlt(ctx, ra, rb, rc, fn11);
2193            break;
2194        case 0x27:
2195            /* CMPTLE */
2196            gen_cmptle(ctx, ra, rb, rc, fn11);
2197            break;
2198        case 0x2C:
2199            REQUIRE_REG_31(ra);
2200            if (fn11 == 0x2AC || fn11 == 0x6AC) {
2201                /* CVTST */
2202                gen_cvtst(ctx, rb, rc, fn11);
2203            } else {
2204                /* CVTTS */
2205                gen_cvtts(ctx, rb, rc, fn11);
2206            }
2207            break;
2208        case 0x2F:
2209            /* CVTTQ */
2210            REQUIRE_REG_31(ra);
2211            gen_cvttq(ctx, rb, rc, fn11);
2212            break;
2213        case 0x3C:
2214            /* CVTQS */
2215            REQUIRE_REG_31(ra);
2216            gen_cvtqs(ctx, rb, rc, fn11);
2217            break;
2218        case 0x3E:
2219            /* CVTQT */
2220            REQUIRE_REG_31(ra);
2221            gen_cvtqt(ctx, rb, rc, fn11);
2222            break;
2223        default:
2224            goto invalid_opc;
2225        }
2226        break;
2227
2228    case 0x17:
2229        switch (fn11) {
2230        case 0x010:
2231            /* CVTLQ */
2232            REQUIRE_REG_31(ra);
2233            vc = dest_fpr(ctx, rc);
2234            vb = load_fpr(ctx, rb);
2235            gen_cvtlq(vc, vb);
2236            break;
2237        case 0x020:
2238            /* CPYS */
2239            if (rc == 31) {
2240                /* Special case CPYS as FNOP.  */
2241            } else {
2242                vc = dest_fpr(ctx, rc);
2243                va = load_fpr(ctx, ra);
2244                if (ra == rb) {
2245                    /* Special case CPYS as FMOV.  */
2246                    tcg_gen_mov_i64(vc, va);
2247                } else {
2248                    vb = load_fpr(ctx, rb);
2249                    gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2250                }
2251            }
2252            break;
2253        case 0x021:
2254            /* CPYSN */
2255            vc = dest_fpr(ctx, rc);
2256            vb = load_fpr(ctx, rb);
2257            va = load_fpr(ctx, ra);
2258            gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2259            break;
2260        case 0x022:
2261            /* CPYSE */
2262            vc = dest_fpr(ctx, rc);
2263            vb = load_fpr(ctx, rb);
2264            va = load_fpr(ctx, ra);
2265            gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2266            break;
2267        case 0x024:
2268            /* MT_FPCR */
2269            va = load_fpr(ctx, ra);
2270            gen_helper_store_fpcr(cpu_env, va);
2271            if (ctx->tb_rm == QUAL_RM_D) {
2272                /* Re-do the copy of the rounding mode to fp_status
2273                   the next time we use dynamic rounding.  */
2274                ctx->tb_rm = -1;
2275            }
2276            break;
2277        case 0x025:
2278            /* MF_FPCR */
2279            va = dest_fpr(ctx, ra);
2280            gen_helper_load_fpcr(va, cpu_env);
2281            break;
2282        case 0x02A:
2283            /* FCMOVEQ */
2284            gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2285            break;
2286        case 0x02B:
2287            /* FCMOVNE */
2288            gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2289            break;
2290        case 0x02C:
2291            /* FCMOVLT */
2292            gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2293            break;
2294        case 0x02D:
2295            /* FCMOVGE */
2296            gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2297            break;
2298        case 0x02E:
2299            /* FCMOVLE */
2300            gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2301            break;
2302        case 0x02F:
2303            /* FCMOVGT */
2304            gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2305            break;
2306        case 0x030: /* CVTQL */
2307        case 0x130: /* CVTQL/V */
2308        case 0x530: /* CVTQL/SV */
2309            REQUIRE_REG_31(ra);
2310            vc = dest_fpr(ctx, rc);
2311            vb = load_fpr(ctx, rb);
2312            gen_helper_cvtql(vc, cpu_env, vb);
2313            gen_fp_exc_raise(rc, fn11);
2314            break;
2315        default:
2316            goto invalid_opc;
2317        }
2318        break;
2319
2320    case 0x18:
2321        switch ((uint16_t)disp16) {
2322        case 0x0000:
2323            /* TRAPB */
2324            /* No-op.  */
2325            break;
2326        case 0x0400:
2327            /* EXCB */
2328            /* No-op.  */
2329            break;
2330        case 0x4000:
2331            /* MB */
2332            /* No-op */
2333            break;
2334        case 0x4400:
2335            /* WMB */
2336            /* No-op */
2337            break;
2338        case 0x8000:
2339            /* FETCH */
2340            /* No-op */
2341            break;
2342        case 0xA000:
2343            /* FETCH_M */
2344            /* No-op */
2345            break;
2346        case 0xC000:
2347            /* RPCC */
2348            va = dest_gpr(ctx, ra);
2349            if (ctx->tb->cflags & CF_USE_ICOUNT) {
2350                gen_io_start();
2351                gen_helper_load_pcc(va, cpu_env);
2352                gen_io_end();
2353                ret = EXIT_PC_STALE;
2354            } else {
2355                gen_helper_load_pcc(va, cpu_env);
2356            }
2357            break;
2358        case 0xE000:
2359            /* RC */
2360            gen_rx(ctx, ra, 0);
2361            break;
2362        case 0xE800:
2363            /* ECB */
2364            break;
2365        case 0xF000:
2366            /* RS */
2367            gen_rx(ctx, ra, 1);
2368            break;
2369        case 0xF800:
2370            /* WH64 */
2371            /* No-op */
2372            break;
2373        case 0xFC00:
2374            /* WH64EN */
2375            /* No-op */
2376            break;
2377        default:
2378            goto invalid_opc;
2379        }
2380        break;
2381
2382    case 0x19:
2383        /* HW_MFPR (PALcode) */
2384#ifndef CONFIG_USER_ONLY
2385        REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2386        va = dest_gpr(ctx, ra);
2387        ret = gen_mfpr(ctx, va, insn & 0xffff);
2388        break;
2389#else
2390        goto invalid_opc;
2391#endif
2392
2393    case 0x1A:
2394        /* JMP, JSR, RET, JSR_COROUTINE.  These only differ by the branch
2395           prediction stack action, which of course we don't implement.  */
2396        vb = load_gpr(ctx, rb);
2397        tcg_gen_andi_i64(cpu_pc, vb, ~3);
2398        if (ra != 31) {
2399            tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
2400        }
2401        ret = EXIT_PC_UPDATED;
2402        break;
2403
2404    case 0x1B:
2405        /* HW_LD (PALcode) */
2406#ifndef CONFIG_USER_ONLY
2407        REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2408        {
2409            TCGv addr = tcg_temp_new();
2410            vb = load_gpr(ctx, rb);
2411            va = dest_gpr(ctx, ra);
2412
2413            tcg_gen_addi_i64(addr, vb, disp12);
2414            switch ((insn >> 12) & 0xF) {
2415            case 0x0:
2416                /* Longword physical access (hw_ldl/p) */
2417                gen_helper_ldl_phys(va, cpu_env, addr);
2418                break;
2419            case 0x1:
2420                /* Quadword physical access (hw_ldq/p) */
2421                gen_helper_ldq_phys(va, cpu_env, addr);
2422                break;
2423            case 0x2:
2424                /* Longword physical access with lock (hw_ldl_l/p) */
2425                gen_helper_ldl_l_phys(va, cpu_env, addr);
2426                break;
2427            case 0x3:
2428                /* Quadword physical access with lock (hw_ldq_l/p) */
2429                gen_helper_ldq_l_phys(va, cpu_env, addr);
2430                break;
2431            case 0x4:
2432                /* Longword virtual PTE fetch (hw_ldl/v) */
2433                goto invalid_opc;
2434            case 0x5:
2435                /* Quadword virtual PTE fetch (hw_ldq/v) */
2436                goto invalid_opc;
2437                break;
2438            case 0x6:
2439                /* Invalid */
2440                goto invalid_opc;
2441            case 0x7:
2442                /* Invaliid */
2443                goto invalid_opc;
2444            case 0x8:
2445                /* Longword virtual access (hw_ldl) */
2446                goto invalid_opc;
2447            case 0x9:
2448                /* Quadword virtual access (hw_ldq) */
2449                goto invalid_opc;
2450            case 0xA:
2451                /* Longword virtual access with protection check (hw_ldl/w) */
2452                tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2453                break;
2454            case 0xB:
2455                /* Quadword virtual access with protection check (hw_ldq/w) */
2456                tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2457                break;
2458            case 0xC:
2459                /* Longword virtual access with alt access mode (hw_ldl/a)*/
2460                goto invalid_opc;
2461            case 0xD:
2462                /* Quadword virtual access with alt access mode (hw_ldq/a) */
2463                goto invalid_opc;
2464            case 0xE:
2465                /* Longword virtual access with alternate access mode and
2466                   protection checks (hw_ldl/wa) */
2467                tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2468                break;
2469            case 0xF:
2470                /* Quadword virtual access with alternate access mode and
2471                   protection checks (hw_ldq/wa) */
2472                tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2473                break;
2474            }
2475            tcg_temp_free(addr);
2476            break;
2477        }
2478#else
2479        goto invalid_opc;
2480#endif
2481
2482    case 0x1C:
2483        vc = dest_gpr(ctx, rc);
2484        if (fn7 == 0x70) {
2485            /* FTOIT */
2486            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2487            REQUIRE_REG_31(rb);
2488            va = load_fpr(ctx, ra);
2489            tcg_gen_mov_i64(vc, va);
2490            break;
2491        } else if (fn7 == 0x78) {
2492            /* FTOIS */
2493            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2494            REQUIRE_REG_31(rb);
2495            t32 = tcg_temp_new_i32();
2496            va = load_fpr(ctx, ra);
2497            gen_helper_s_to_memory(t32, va);
2498            tcg_gen_ext_i32_i64(vc, t32);
2499            tcg_temp_free_i32(t32);
2500            break;
2501        }
2502
2503        vb = load_gpr_lit(ctx, rb, lit, islit);
2504        switch (fn7) {
2505        case 0x00:
2506            /* SEXTB */
2507            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2508            REQUIRE_REG_31(ra);
2509            tcg_gen_ext8s_i64(vc, vb);
2510            break;
2511        case 0x01:
2512            /* SEXTW */
2513            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2514            REQUIRE_REG_31(ra);
2515            tcg_gen_ext16s_i64(vc, vb);
2516            break;
2517        case 0x30:
2518            /* CTPOP */
2519            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2520            REQUIRE_REG_31(ra);
2521            REQUIRE_NO_LIT;
2522            gen_helper_ctpop(vc, vb);
2523            break;
2524        case 0x31:
2525            /* PERR */
2526            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2527            REQUIRE_NO_LIT;
2528            va = load_gpr(ctx, ra);
2529            gen_helper_perr(vc, va, vb);
2530            break;
2531        case 0x32:
2532            /* CTLZ */
2533            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2534            REQUIRE_REG_31(ra);
2535            REQUIRE_NO_LIT;
2536            gen_helper_ctlz(vc, vb);
2537            break;
2538        case 0x33:
2539            /* CTTZ */
2540            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2541            REQUIRE_REG_31(ra);
2542            REQUIRE_NO_LIT;
2543            gen_helper_cttz(vc, vb);
2544            break;
2545        case 0x34:
2546            /* UNPKBW */
2547            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2548            REQUIRE_REG_31(ra);
2549            REQUIRE_NO_LIT;
2550            gen_helper_unpkbw(vc, vb);
2551            break;
2552        case 0x35:
2553            /* UNPKBL */
2554            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2555            REQUIRE_REG_31(ra);
2556            REQUIRE_NO_LIT;
2557            gen_helper_unpkbl(vc, vb);
2558            break;
2559        case 0x36:
2560            /* PKWB */
2561            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2562            REQUIRE_REG_31(ra);
2563            REQUIRE_NO_LIT;
2564            gen_helper_pkwb(vc, vb);
2565            break;
2566        case 0x37:
2567            /* PKLB */
2568            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2569            REQUIRE_REG_31(ra);
2570            REQUIRE_NO_LIT;
2571            gen_helper_pklb(vc, vb);
2572            break;
2573        case 0x38:
2574            /* MINSB8 */
2575            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2576            va = load_gpr(ctx, ra);
2577            gen_helper_minsb8(vc, va, vb);
2578            break;
2579        case 0x39:
2580            /* MINSW4 */
2581            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2582            va = load_gpr(ctx, ra);
2583            gen_helper_minsw4(vc, va, vb);
2584            break;
2585        case 0x3A:
2586            /* MINUB8 */
2587            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2588            va = load_gpr(ctx, ra);
2589            gen_helper_minub8(vc, va, vb);
2590            break;
2591        case 0x3B:
2592            /* MINUW4 */
2593            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2594            va = load_gpr(ctx, ra);
2595            gen_helper_minuw4(vc, va, vb);
2596            break;
2597        case 0x3C:
2598            /* MAXUB8 */
2599            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2600            va = load_gpr(ctx, ra);
2601            gen_helper_maxub8(vc, va, vb);
2602            break;
2603        case 0x3D:
2604            /* MAXUW4 */
2605            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2606            va = load_gpr(ctx, ra);
2607            gen_helper_maxuw4(vc, va, vb);
2608            break;
2609        case 0x3E:
2610            /* MAXSB8 */
2611            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2612            va = load_gpr(ctx, ra);
2613            gen_helper_maxsb8(vc, va, vb);
2614            break;
2615        case 0x3F:
2616            /* MAXSW4 */
2617            REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2618            va = load_gpr(ctx, ra);
2619            gen_helper_maxsw4(vc, va, vb);
2620            break;
2621        default:
2622            goto invalid_opc;
2623        }
2624        break;
2625
2626    case 0x1D:
2627        /* HW_MTPR (PALcode) */
2628#ifndef CONFIG_USER_ONLY
2629        REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2630        vb = load_gpr(ctx, rb);
2631        ret = gen_mtpr(ctx, vb, insn & 0xffff);
2632        break;
2633#else
2634        goto invalid_opc;
2635#endif
2636
2637    case 0x1E:
2638        /* HW_RET (PALcode) */
2639#ifndef CONFIG_USER_ONLY
2640        REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2641        if (rb == 31) {
2642            /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
2643               address from EXC_ADDR.  This turns out to be useful for our
2644               emulation PALcode, so continue to accept it.  */
2645            ctx->lit = vb = tcg_temp_new();
2646            tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2647        } else {
2648            vb = load_gpr(ctx, rb);
2649        }
2650        tmp = tcg_temp_new();
2651        tcg_gen_movi_i64(tmp, 0);
2652        tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
2653        tcg_gen_movi_i64(cpu_lock_addr, -1);
2654        tcg_gen_andi_i64(tmp, vb, 1);
2655        tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
2656        tcg_gen_andi_i64(cpu_pc, vb, ~3);
2657        ret = EXIT_PC_UPDATED;
2658        break;
2659#else
2660        goto invalid_opc;
2661#endif
2662
2663    case 0x1F:
2664        /* HW_ST (PALcode) */
2665#ifndef CONFIG_USER_ONLY
2666        REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2667        {
2668            TCGv addr = tcg_temp_new();
2669            va = load_gpr(ctx, ra);
2670            vb = load_gpr(ctx, rb);
2671
2672            tcg_gen_addi_i64(addr, vb, disp12);
2673            switch ((insn >> 12) & 0xF) {
2674            case 0x0:
2675                /* Longword physical access */
2676                gen_helper_stl_phys(cpu_env, addr, va);
2677                break;
2678            case 0x1:
2679                /* Quadword physical access */
2680                gen_helper_stq_phys(cpu_env, addr, va);
2681                break;
2682            case 0x2:
2683                /* Longword physical access with lock */
2684                gen_helper_stl_c_phys(dest_gpr(ctx, ra), cpu_env, addr, va);
2685                break;
2686            case 0x3:
2687                /* Quadword physical access with lock */
2688                gen_helper_stq_c_phys(dest_gpr(ctx, ra), cpu_env, addr, va);
2689                break;
2690            case 0x4:
2691                /* Longword virtual access */
2692                goto invalid_opc;
2693            case 0x5:
2694                /* Quadword virtual access */
2695                goto invalid_opc;
2696            case 0x6:
2697                /* Invalid */
2698                goto invalid_opc;
2699            case 0x7:
2700                /* Invalid */
2701                goto invalid_opc;
2702            case 0x8:
2703                /* Invalid */
2704                goto invalid_opc;
2705            case 0x9:
2706                /* Invalid */
2707                goto invalid_opc;
2708            case 0xA:
2709                /* Invalid */
2710                goto invalid_opc;
2711            case 0xB:
2712                /* Invalid */
2713                goto invalid_opc;
2714            case 0xC:
2715                /* Longword virtual access with alternate access mode */
2716                goto invalid_opc;
2717            case 0xD:
2718                /* Quadword virtual access with alternate access mode */
2719                goto invalid_opc;
2720            case 0xE:
2721                /* Invalid */
2722                goto invalid_opc;
2723            case 0xF:
2724                /* Invalid */
2725                goto invalid_opc;
2726            }
2727            tcg_temp_free(addr);
2728            break;
2729        }
2730#else
2731        goto invalid_opc;
2732#endif
2733    case 0x20:
2734        /* LDF */
2735        gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2736        break;
2737    case 0x21:
2738        /* LDG */
2739        gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2740        break;
2741    case 0x22:
2742        /* LDS */
2743        gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2744        break;
2745    case 0x23:
2746        /* LDT */
2747        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2748        break;
2749    case 0x24:
2750        /* STF */
2751        gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2752        break;
2753    case 0x25:
2754        /* STG */
2755        gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2756        break;
2757    case 0x26:
2758        /* STS */
2759        gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2760        break;
2761    case 0x27:
2762        /* STT */
2763        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2764        break;
2765    case 0x28:
2766        /* LDL */
2767        gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2768        break;
2769    case 0x29:
2770        /* LDQ */
2771        gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2772        break;
2773    case 0x2A:
2774        /* LDL_L */
2775        gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2776        break;
2777    case 0x2B:
2778        /* LDQ_L */
2779        gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2780        break;
2781    case 0x2C:
2782        /* STL */
2783        gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2784        break;
2785    case 0x2D:
2786        /* STQ */
2787        gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2788        break;
2789    case 0x2E:
2790        /* STL_C */
2791        ret = gen_store_conditional(ctx, ra, rb, disp16, 0);
2792        break;
2793    case 0x2F:
2794        /* STQ_C */
2795        ret = gen_store_conditional(ctx, ra, rb, disp16, 1);
2796        break;
2797    case 0x30:
2798        /* BR */
2799        ret = gen_bdirect(ctx, ra, disp21);
2800        break;
2801    case 0x31: /* FBEQ */
2802        ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2803        break;
2804    case 0x32: /* FBLT */
2805        ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2806        break;
2807    case 0x33: /* FBLE */
2808        ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2809        break;
2810    case 0x34:
2811        /* BSR */
2812        ret = gen_bdirect(ctx, ra, disp21);
2813        break;
2814    case 0x35: /* FBNE */
2815        ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2816        break;
2817    case 0x36: /* FBGE */
2818        ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2819        break;
2820    case 0x37: /* FBGT */
2821        ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2822        break;
2823    case 0x38:
2824        /* BLBC */
2825        ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2826        break;
2827    case 0x39:
2828        /* BEQ */
2829        ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2830        break;
2831    case 0x3A:
2832        /* BLT */
2833        ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2834        break;
2835    case 0x3B:
2836        /* BLE */
2837        ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2838        break;
2839    case 0x3C:
2840        /* BLBS */
2841        ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2842        break;
2843    case 0x3D:
2844        /* BNE */
2845        ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2846        break;
2847    case 0x3E:
2848        /* BGE */
2849        ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2850        break;
2851    case 0x3F:
2852        /* BGT */
2853        ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2854        break;
2855    invalid_opc:
2856        ret = gen_invalid(ctx);
2857        break;
2858    }
2859
2860    return ret;
2861}
2862
2863void gen_intermediate_code(CPUAlphaState *env, struct TranslationBlock *tb)
2864{
2865    AlphaCPU *cpu = alpha_env_get_cpu(env);
2866    CPUState *cs = CPU(cpu);
2867    DisasContext ctx, *ctxp = &ctx;
2868    target_ulong pc_start;
2869    target_ulong pc_mask;
2870    uint32_t insn;
2871    ExitStatus ret;
2872    int num_insns;
2873    int max_insns;
2874
2875    pc_start = tb->pc;
2876
2877    ctx.tb = tb;
2878    ctx.pc = pc_start;
2879    ctx.mem_idx = cpu_mmu_index(env, false);
2880    ctx.implver = env->implver;
2881    ctx.singlestep_enabled = cs->singlestep_enabled;
2882
2883#ifdef CONFIG_USER_ONLY
2884    ctx.ir = cpu_std_ir;
2885#else
2886    ctx.palbr = env->palbr;
2887    ctx.ir = (tb->flags & TB_FLAGS_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2888#endif
2889
2890    /* ??? Every TB begins with unset rounding mode, to be initialized on
2891       the first fp insn of the TB.  Alternately we could define a proper
2892       default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
2893       to reset the FP_STATUS to that default at the end of any TB that
2894       changes the default.  We could even (gasp) dynamiclly figure out
2895       what default would be most efficient given the running program.  */
2896    ctx.tb_rm = -1;
2897    /* Similarly for flush-to-zero.  */
2898    ctx.tb_ftz = -1;
2899
2900    num_insns = 0;
2901    max_insns = tb->cflags & CF_COUNT_MASK;
2902    if (max_insns == 0) {
2903        max_insns = CF_COUNT_MASK;
2904    }
2905    if (max_insns > TCG_MAX_INSNS) {
2906        max_insns = TCG_MAX_INSNS;
2907    }
2908
2909    if (in_superpage(&ctx, pc_start)) {
2910        pc_mask = (1ULL << 41) - 1;
2911    } else {
2912        pc_mask = ~TARGET_PAGE_MASK;
2913    }
2914
2915    gen_tb_start(tb);
2916    do {
2917        tcg_gen_insn_start(ctx.pc);
2918        num_insns++;
2919
2920        if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) {
2921            ret = gen_excp(&ctx, EXCP_DEBUG, 0);
2922            /* The address covered by the breakpoint must be included in
2923               [tb->pc, tb->pc + tb->size) in order to for it to be
2924               properly cleared -- thus we increment the PC here so that
2925               the logic setting tb->size below does the right thing.  */
2926            ctx.pc += 4;
2927            break;
2928        }
2929        if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
2930            gen_io_start();
2931        }
2932        insn = cpu_ldl_code(env, ctx.pc);
2933
2934        TCGV_UNUSED_I64(ctx.zero);
2935        TCGV_UNUSED_I64(ctx.sink);
2936        TCGV_UNUSED_I64(ctx.lit);
2937
2938        ctx.pc += 4;
2939        ret = translate_one(ctxp, insn);
2940
2941        if (!TCGV_IS_UNUSED_I64(ctx.sink)) {
2942            tcg_gen_discard_i64(ctx.sink);
2943            tcg_temp_free(ctx.sink);
2944        }
2945        if (!TCGV_IS_UNUSED_I64(ctx.zero)) {
2946            tcg_temp_free(ctx.zero);
2947        }
2948        if (!TCGV_IS_UNUSED_I64(ctx.lit)) {
2949            tcg_temp_free(ctx.lit);
2950        }
2951
2952        /* If we reach a page boundary, are single stepping,
2953           or exhaust instruction count, stop generation.  */
2954        if (ret == NO_EXIT
2955            && ((ctx.pc & pc_mask) == 0
2956                || tcg_op_buf_full()
2957                || num_insns >= max_insns
2958                || singlestep
2959                || ctx.singlestep_enabled)) {
2960            ret = EXIT_PC_STALE;
2961        }
2962    } while (ret == NO_EXIT);
2963
2964    if (tb->cflags & CF_LAST_IO) {
2965        gen_io_end();
2966    }
2967
2968    switch (ret) {
2969    case EXIT_GOTO_TB:
2970    case EXIT_NORETURN:
2971        break;
2972    case EXIT_PC_STALE:
2973        tcg_gen_movi_i64(cpu_pc, ctx.pc);
2974        /* FALLTHRU */
2975    case EXIT_PC_UPDATED:
2976        if (ctx.singlestep_enabled) {
2977            gen_excp_1(EXCP_DEBUG, 0);
2978        } else {
2979            tcg_gen_exit_tb(0);
2980        }
2981        break;
2982    default:
2983        abort();
2984    }
2985
2986    gen_tb_end(tb, num_insns);
2987
2988    tb->size = ctx.pc - pc_start;
2989    tb->icount = num_insns;
2990
2991#ifdef DEBUG_DISAS
2992    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2993        qemu_log("IN: %s\n", lookup_symbol(pc_start));
2994        log_target_disas(cs, pc_start, ctx.pc - pc_start, 1);
2995        qemu_log("\n");
2996    }
2997#endif
2998}
2999
3000void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
3001                          target_ulong *data)
3002{
3003    env->pc = data[0];
3004}
3005