qemu/target/m68k/translate.c
<<
>>
Prefs
   1/*
   2 *  m68k translation
   3 *
   4 *  Copyright (c) 2005-2007 CodeSourcery
   5 *  Written by Paul Brook
   6 *
   7 * This library is free software; you can redistribute it and/or
   8 * modify it under the terms of the GNU Lesser General Public
   9 * License as published by the Free Software Foundation; either
  10 * version 2.1 of the License, or (at your option) any later version.
  11 *
  12 * This library is distributed in the hope that it will be useful,
  13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15 * Lesser General Public License for more details.
  16 *
  17 * You should have received a copy of the GNU Lesser General Public
  18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19 */
  20
  21#include "qemu/osdep.h"
  22#include "cpu.h"
  23#include "disas/disas.h"
  24#include "exec/exec-all.h"
  25#include "tcg/tcg-op.h"
  26#include "qemu/log.h"
  27#include "qemu/qemu-print.h"
  28#include "exec/cpu_ldst.h"
  29#include "exec/translator.h"
  30
  31#include "exec/helper-proto.h"
  32#include "exec/helper-gen.h"
  33
  34#include "trace-tcg.h"
  35#include "exec/log.h"
  36#include "fpu/softfloat.h"
  37
  38
  39//#define DEBUG_DISPATCH 1
  40
  41#define DEFO32(name, offset) static TCGv QREG_##name;
  42#define DEFO64(name, offset) static TCGv_i64 QREG_##name;
  43#include "qregs.def"
  44#undef DEFO32
  45#undef DEFO64
  46
  47static TCGv_i32 cpu_halted;
  48static TCGv_i32 cpu_exception_index;
  49
  50static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
  51static TCGv cpu_dregs[8];
  52static TCGv cpu_aregs[8];
  53static TCGv_i64 cpu_macc[4];
  54
  55#define REG(insn, pos)  (((insn) >> (pos)) & 7)
  56#define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
  57#define AREG(insn, pos) get_areg(s, REG(insn, pos))
  58#define MACREG(acc)     cpu_macc[acc]
  59#define QREG_SP         get_areg(s, 7)
  60
  61static TCGv NULL_QREG;
  62#define IS_NULL_QREG(t) (t == NULL_QREG)
  63/* Used to distinguish stores from bad addressing modes.  */
  64static TCGv store_dummy;
  65
  66#include "exec/gen-icount.h"
  67
  68void m68k_tcg_init(void)
  69{
  70    char *p;
  71    int i;
  72
  73#define DEFO32(name, offset) \
  74    QREG_##name = tcg_global_mem_new_i32(cpu_env, \
  75        offsetof(CPUM68KState, offset), #name);
  76#define DEFO64(name, offset) \
  77    QREG_##name = tcg_global_mem_new_i64(cpu_env, \
  78        offsetof(CPUM68KState, offset), #name);
  79#include "qregs.def"
  80#undef DEFO32
  81#undef DEFO64
  82
  83    cpu_halted = tcg_global_mem_new_i32(cpu_env,
  84                                        -offsetof(M68kCPU, env) +
  85                                        offsetof(CPUState, halted), "HALTED");
  86    cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
  87                                                 -offsetof(M68kCPU, env) +
  88                                                 offsetof(CPUState, exception_index),
  89                                                 "EXCEPTION");
  90
  91    p = cpu_reg_names;
  92    for (i = 0; i < 8; i++) {
  93        sprintf(p, "D%d", i);
  94        cpu_dregs[i] = tcg_global_mem_new(cpu_env,
  95                                          offsetof(CPUM68KState, dregs[i]), p);
  96        p += 3;
  97        sprintf(p, "A%d", i);
  98        cpu_aregs[i] = tcg_global_mem_new(cpu_env,
  99                                          offsetof(CPUM68KState, aregs[i]), p);
 100        p += 3;
 101    }
 102    for (i = 0; i < 4; i++) {
 103        sprintf(p, "ACC%d", i);
 104        cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
 105                                         offsetof(CPUM68KState, macc[i]), p);
 106        p += 5;
 107    }
 108
 109    NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
 110    store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
 111}
 112
 113/* internal defines */
 114typedef struct DisasContext {
 115    DisasContextBase base;
 116    CPUM68KState *env;
 117    target_ulong pc;
 118    CCOp cc_op; /* Current CC operation */
 119    int cc_op_synced;
 120    TCGv_i64 mactmp;
 121    int done_mac;
 122    int writeback_mask;
 123    TCGv writeback[8];
 124#define MAX_TO_RELEASE 8
 125    int release_count;
 126    TCGv release[MAX_TO_RELEASE];
 127} DisasContext;
 128
 129static void init_release_array(DisasContext *s)
 130{
 131#ifdef CONFIG_DEBUG_TCG
 132    memset(s->release, 0, sizeof(s->release));
 133#endif
 134    s->release_count = 0;
 135}
 136
 137static void do_release(DisasContext *s)
 138{
 139    int i;
 140    for (i = 0; i < s->release_count; i++) {
 141        tcg_temp_free(s->release[i]);
 142    }
 143    init_release_array(s);
 144}
 145
 146static TCGv mark_to_release(DisasContext *s, TCGv tmp)
 147{
 148    g_assert(s->release_count < MAX_TO_RELEASE);
 149    return s->release[s->release_count++] = tmp;
 150}
 151
 152static TCGv get_areg(DisasContext *s, unsigned regno)
 153{
 154    if (s->writeback_mask & (1 << regno)) {
 155        return s->writeback[regno];
 156    } else {
 157        return cpu_aregs[regno];
 158    }
 159}
 160
 161static void delay_set_areg(DisasContext *s, unsigned regno,
 162                           TCGv val, bool give_temp)
 163{
 164    if (s->writeback_mask & (1 << regno)) {
 165        if (give_temp) {
 166            tcg_temp_free(s->writeback[regno]);
 167            s->writeback[regno] = val;
 168        } else {
 169            tcg_gen_mov_i32(s->writeback[regno], val);
 170        }
 171    } else {
 172        s->writeback_mask |= 1 << regno;
 173        if (give_temp) {
 174            s->writeback[regno] = val;
 175        } else {
 176            TCGv tmp = tcg_temp_new();
 177            s->writeback[regno] = tmp;
 178            tcg_gen_mov_i32(tmp, val);
 179        }
 180    }
 181}
 182
 183static void do_writebacks(DisasContext *s)
 184{
 185    unsigned mask = s->writeback_mask;
 186    if (mask) {
 187        s->writeback_mask = 0;
 188        do {
 189            unsigned regno = ctz32(mask);
 190            tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
 191            tcg_temp_free(s->writeback[regno]);
 192            mask &= mask - 1;
 193        } while (mask);
 194    }
 195}
 196
 197/* is_jmp field values */
 198#define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
 199#define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
 200
 201#if defined(CONFIG_USER_ONLY)
 202#define IS_USER(s) 1
 203#else
 204#define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
 205#define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
 206                      MMU_KERNEL_IDX : MMU_USER_IDX)
 207#define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
 208                      MMU_KERNEL_IDX : MMU_USER_IDX)
 209#endif
 210
 211typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
 212
 213#ifdef DEBUG_DISPATCH
 214#define DISAS_INSN(name)                                                \
 215    static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
 216                                  uint16_t insn);                       \
 217    static void disas_##name(CPUM68KState *env, DisasContext *s,        \
 218                             uint16_t insn)                             \
 219    {                                                                   \
 220        qemu_log("Dispatch " #name "\n");                               \
 221        real_disas_##name(env, s, insn);                                \
 222    }                                                                   \
 223    static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
 224                                  uint16_t insn)
 225#else
 226#define DISAS_INSN(name)                                                \
 227    static void disas_##name(CPUM68KState *env, DisasContext *s,        \
 228                             uint16_t insn)
 229#endif
 230
 231static const uint8_t cc_op_live[CC_OP_NB] = {
 232    [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
 233    [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
 234    [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
 235    [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
 236    [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
 237    [CC_OP_LOGIC] = CCF_X | CCF_N
 238};
 239
 240static void set_cc_op(DisasContext *s, CCOp op)
 241{
 242    CCOp old_op = s->cc_op;
 243    int dead;
 244
 245    if (old_op == op) {
 246        return;
 247    }
 248    s->cc_op = op;
 249    s->cc_op_synced = 0;
 250
 251    /*
 252     * Discard CC computation that will no longer be used.
 253     * Note that X and N are never dead.
 254     */
 255    dead = cc_op_live[old_op] & ~cc_op_live[op];
 256    if (dead & CCF_C) {
 257        tcg_gen_discard_i32(QREG_CC_C);
 258    }
 259    if (dead & CCF_Z) {
 260        tcg_gen_discard_i32(QREG_CC_Z);
 261    }
 262    if (dead & CCF_V) {
 263        tcg_gen_discard_i32(QREG_CC_V);
 264    }
 265}
 266
 267/* Update the CPU env CC_OP state.  */
 268static void update_cc_op(DisasContext *s)
 269{
 270    if (!s->cc_op_synced) {
 271        s->cc_op_synced = 1;
 272        tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
 273    }
 274}
 275
 276/* Generate a jump to an immediate address.  */
 277static void gen_jmp_im(DisasContext *s, uint32_t dest)
 278{
 279    update_cc_op(s);
 280    tcg_gen_movi_i32(QREG_PC, dest);
 281    s->base.is_jmp = DISAS_JUMP;
 282}
 283
 284/* Generate a jump to the address in qreg DEST.  */
 285static void gen_jmp(DisasContext *s, TCGv dest)
 286{
 287    update_cc_op(s);
 288    tcg_gen_mov_i32(QREG_PC, dest);
 289    s->base.is_jmp = DISAS_JUMP;
 290}
 291
 292static void gen_raise_exception(int nr)
 293{
 294    TCGv_i32 tmp;
 295
 296    tmp = tcg_const_i32(nr);
 297    gen_helper_raise_exception(cpu_env, tmp);
 298    tcg_temp_free_i32(tmp);
 299}
 300
 301static void gen_exception(DisasContext *s, uint32_t dest, int nr)
 302{
 303    update_cc_op(s);
 304    tcg_gen_movi_i32(QREG_PC, dest);
 305
 306    gen_raise_exception(nr);
 307
 308    s->base.is_jmp = DISAS_NORETURN;
 309}
 310
 311static inline void gen_addr_fault(DisasContext *s)
 312{
 313    gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
 314}
 315
 316/*
 317 * Generate a load from the specified address.  Narrow values are
 318 *  sign extended to full register width.
 319 */
 320static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
 321                            int sign, int index)
 322{
 323    TCGv tmp;
 324    tmp = tcg_temp_new_i32();
 325    switch(opsize) {
 326    case OS_BYTE:
 327        if (sign)
 328            tcg_gen_qemu_ld8s(tmp, addr, index);
 329        else
 330            tcg_gen_qemu_ld8u(tmp, addr, index);
 331        break;
 332    case OS_WORD:
 333        if (sign)
 334            tcg_gen_qemu_ld16s(tmp, addr, index);
 335        else
 336            tcg_gen_qemu_ld16u(tmp, addr, index);
 337        break;
 338    case OS_LONG:
 339        tcg_gen_qemu_ld32u(tmp, addr, index);
 340        break;
 341    default:
 342        g_assert_not_reached();
 343    }
 344    return tmp;
 345}
 346
 347/* Generate a store.  */
 348static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
 349                             int index)
 350{
 351    switch(opsize) {
 352    case OS_BYTE:
 353        tcg_gen_qemu_st8(val, addr, index);
 354        break;
 355    case OS_WORD:
 356        tcg_gen_qemu_st16(val, addr, index);
 357        break;
 358    case OS_LONG:
 359        tcg_gen_qemu_st32(val, addr, index);
 360        break;
 361    default:
 362        g_assert_not_reached();
 363    }
 364}
 365
 366typedef enum {
 367    EA_STORE,
 368    EA_LOADU,
 369    EA_LOADS
 370} ea_what;
 371
 372/*
 373 * Generate an unsigned load if VAL is 0 a signed load if val is -1,
 374 * otherwise generate a store.
 375 */
 376static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
 377                     ea_what what, int index)
 378{
 379    if (what == EA_STORE) {
 380        gen_store(s, opsize, addr, val, index);
 381        return store_dummy;
 382    } else {
 383        return mark_to_release(s, gen_load(s, opsize, addr,
 384                                           what == EA_LOADS, index));
 385    }
 386}
 387
 388/* Read a 16-bit immediate constant */
 389static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
 390{
 391    uint16_t im;
 392    im = translator_lduw(env, s->pc);
 393    s->pc += 2;
 394    return im;
 395}
 396
 397/* Read an 8-bit immediate constant */
 398static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
 399{
 400    return read_im16(env, s);
 401}
 402
 403/* Read a 32-bit immediate constant.  */
 404static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
 405{
 406    uint32_t im;
 407    im = read_im16(env, s) << 16;
 408    im |= 0xffff & read_im16(env, s);
 409    return im;
 410}
 411
 412/* Read a 64-bit immediate constant.  */
 413static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
 414{
 415    uint64_t im;
 416    im = (uint64_t)read_im32(env, s) << 32;
 417    im |= (uint64_t)read_im32(env, s);
 418    return im;
 419}
 420
 421/* Calculate and address index.  */
 422static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
 423{
 424    TCGv add;
 425    int scale;
 426
 427    add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
 428    if ((ext & 0x800) == 0) {
 429        tcg_gen_ext16s_i32(tmp, add);
 430        add = tmp;
 431    }
 432    scale = (ext >> 9) & 3;
 433    if (scale != 0) {
 434        tcg_gen_shli_i32(tmp, add, scale);
 435        add = tmp;
 436    }
 437    return add;
 438}
 439
 440/*
 441 * Handle a base + index + displacement effective addresss.
 442 * A NULL_QREG base means pc-relative.
 443 */
 444static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
 445{
 446    uint32_t offset;
 447    uint16_t ext;
 448    TCGv add;
 449    TCGv tmp;
 450    uint32_t bd, od;
 451
 452    offset = s->pc;
 453    ext = read_im16(env, s);
 454
 455    if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
 456        return NULL_QREG;
 457
 458    if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
 459        !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
 460        ext &= ~(3 << 9);
 461    }
 462
 463    if (ext & 0x100) {
 464        /* full extension word format */
 465        if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
 466            return NULL_QREG;
 467
 468        if ((ext & 0x30) > 0x10) {
 469            /* base displacement */
 470            if ((ext & 0x30) == 0x20) {
 471                bd = (int16_t)read_im16(env, s);
 472            } else {
 473                bd = read_im32(env, s);
 474            }
 475        } else {
 476            bd = 0;
 477        }
 478        tmp = mark_to_release(s, tcg_temp_new());
 479        if ((ext & 0x44) == 0) {
 480            /* pre-index */
 481            add = gen_addr_index(s, ext, tmp);
 482        } else {
 483            add = NULL_QREG;
 484        }
 485        if ((ext & 0x80) == 0) {
 486            /* base not suppressed */
 487            if (IS_NULL_QREG(base)) {
 488                base = mark_to_release(s, tcg_const_i32(offset + bd));
 489                bd = 0;
 490            }
 491            if (!IS_NULL_QREG(add)) {
 492                tcg_gen_add_i32(tmp, add, base);
 493                add = tmp;
 494            } else {
 495                add = base;
 496            }
 497        }
 498        if (!IS_NULL_QREG(add)) {
 499            if (bd != 0) {
 500                tcg_gen_addi_i32(tmp, add, bd);
 501                add = tmp;
 502            }
 503        } else {
 504            add = mark_to_release(s, tcg_const_i32(bd));
 505        }
 506        if ((ext & 3) != 0) {
 507            /* memory indirect */
 508            base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
 509            if ((ext & 0x44) == 4) {
 510                add = gen_addr_index(s, ext, tmp);
 511                tcg_gen_add_i32(tmp, add, base);
 512                add = tmp;
 513            } else {
 514                add = base;
 515            }
 516            if ((ext & 3) > 1) {
 517                /* outer displacement */
 518                if ((ext & 3) == 2) {
 519                    od = (int16_t)read_im16(env, s);
 520                } else {
 521                    od = read_im32(env, s);
 522                }
 523            } else {
 524                od = 0;
 525            }
 526            if (od != 0) {
 527                tcg_gen_addi_i32(tmp, add, od);
 528                add = tmp;
 529            }
 530        }
 531    } else {
 532        /* brief extension word format */
 533        tmp = mark_to_release(s, tcg_temp_new());
 534        add = gen_addr_index(s, ext, tmp);
 535        if (!IS_NULL_QREG(base)) {
 536            tcg_gen_add_i32(tmp, add, base);
 537            if ((int8_t)ext)
 538                tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
 539        } else {
 540            tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
 541        }
 542        add = tmp;
 543    }
 544    return add;
 545}
 546
 547/* Sign or zero extend a value.  */
 548
 549static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
 550{
 551    switch (opsize) {
 552    case OS_BYTE:
 553        if (sign) {
 554            tcg_gen_ext8s_i32(res, val);
 555        } else {
 556            tcg_gen_ext8u_i32(res, val);
 557        }
 558        break;
 559    case OS_WORD:
 560        if (sign) {
 561            tcg_gen_ext16s_i32(res, val);
 562        } else {
 563            tcg_gen_ext16u_i32(res, val);
 564        }
 565        break;
 566    case OS_LONG:
 567        tcg_gen_mov_i32(res, val);
 568        break;
 569    default:
 570        g_assert_not_reached();
 571    }
 572}
 573
 574/* Evaluate all the CC flags.  */
 575
 576static void gen_flush_flags(DisasContext *s)
 577{
 578    TCGv t0, t1;
 579
 580    switch (s->cc_op) {
 581    case CC_OP_FLAGS:
 582        return;
 583
 584    case CC_OP_ADDB:
 585    case CC_OP_ADDW:
 586    case CC_OP_ADDL:
 587        tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
 588        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 589        /* Compute signed overflow for addition.  */
 590        t0 = tcg_temp_new();
 591        t1 = tcg_temp_new();
 592        tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
 593        gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
 594        tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
 595        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
 596        tcg_temp_free(t0);
 597        tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
 598        tcg_temp_free(t1);
 599        break;
 600
 601    case CC_OP_SUBB:
 602    case CC_OP_SUBW:
 603    case CC_OP_SUBL:
 604        tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
 605        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 606        /* Compute signed overflow for subtraction.  */
 607        t0 = tcg_temp_new();
 608        t1 = tcg_temp_new();
 609        tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
 610        gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
 611        tcg_gen_xor_i32(t1, QREG_CC_N, t0);
 612        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
 613        tcg_temp_free(t0);
 614        tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
 615        tcg_temp_free(t1);
 616        break;
 617
 618    case CC_OP_CMPB:
 619    case CC_OP_CMPW:
 620    case CC_OP_CMPL:
 621        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
 622        tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
 623        gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
 624        /* Compute signed overflow for subtraction.  */
 625        t0 = tcg_temp_new();
 626        tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
 627        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
 628        tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
 629        tcg_temp_free(t0);
 630        tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
 631        break;
 632
 633    case CC_OP_LOGIC:
 634        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 635        tcg_gen_movi_i32(QREG_CC_C, 0);
 636        tcg_gen_movi_i32(QREG_CC_V, 0);
 637        break;
 638
 639    case CC_OP_DYNAMIC:
 640        gen_helper_flush_flags(cpu_env, QREG_CC_OP);
 641        s->cc_op_synced = 1;
 642        break;
 643
 644    default:
 645        t0 = tcg_const_i32(s->cc_op);
 646        gen_helper_flush_flags(cpu_env, t0);
 647        tcg_temp_free(t0);
 648        s->cc_op_synced = 1;
 649        break;
 650    }
 651
 652    /* Note that flush_flags also assigned to env->cc_op.  */
 653    s->cc_op = CC_OP_FLAGS;
 654}
 655
 656static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
 657{
 658    TCGv tmp;
 659
 660    if (opsize == OS_LONG) {
 661        tmp = val;
 662    } else {
 663        tmp = mark_to_release(s, tcg_temp_new());
 664        gen_ext(tmp, val, opsize, sign);
 665    }
 666
 667    return tmp;
 668}
 669
 670static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
 671{
 672    gen_ext(QREG_CC_N, val, opsize, 1);
 673    set_cc_op(s, CC_OP_LOGIC);
 674}
 675
 676static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
 677{
 678    tcg_gen_mov_i32(QREG_CC_N, dest);
 679    tcg_gen_mov_i32(QREG_CC_V, src);
 680    set_cc_op(s, CC_OP_CMPB + opsize);
 681}
 682
 683static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
 684{
 685    gen_ext(QREG_CC_N, dest, opsize, 1);
 686    tcg_gen_mov_i32(QREG_CC_V, src);
 687}
 688
 689static inline int opsize_bytes(int opsize)
 690{
 691    switch (opsize) {
 692    case OS_BYTE: return 1;
 693    case OS_WORD: return 2;
 694    case OS_LONG: return 4;
 695    case OS_SINGLE: return 4;
 696    case OS_DOUBLE: return 8;
 697    case OS_EXTENDED: return 12;
 698    case OS_PACKED: return 12;
 699    default:
 700        g_assert_not_reached();
 701    }
 702}
 703
 704static inline int insn_opsize(int insn)
 705{
 706    switch ((insn >> 6) & 3) {
 707    case 0: return OS_BYTE;
 708    case 1: return OS_WORD;
 709    case 2: return OS_LONG;
 710    default:
 711        g_assert_not_reached();
 712    }
 713}
 714
 715static inline int ext_opsize(int ext, int pos)
 716{
 717    switch ((ext >> pos) & 7) {
 718    case 0: return OS_LONG;
 719    case 1: return OS_SINGLE;
 720    case 2: return OS_EXTENDED;
 721    case 3: return OS_PACKED;
 722    case 4: return OS_WORD;
 723    case 5: return OS_DOUBLE;
 724    case 6: return OS_BYTE;
 725    default:
 726        g_assert_not_reached();
 727    }
 728}
 729
 730/*
 731 * Assign value to a register.  If the width is less than the register width
 732 * only the low part of the register is set.
 733 */
 734static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
 735{
 736    TCGv tmp;
 737    switch (opsize) {
 738    case OS_BYTE:
 739        tcg_gen_andi_i32(reg, reg, 0xffffff00);
 740        tmp = tcg_temp_new();
 741        tcg_gen_ext8u_i32(tmp, val);
 742        tcg_gen_or_i32(reg, reg, tmp);
 743        tcg_temp_free(tmp);
 744        break;
 745    case OS_WORD:
 746        tcg_gen_andi_i32(reg, reg, 0xffff0000);
 747        tmp = tcg_temp_new();
 748        tcg_gen_ext16u_i32(tmp, val);
 749        tcg_gen_or_i32(reg, reg, tmp);
 750        tcg_temp_free(tmp);
 751        break;
 752    case OS_LONG:
 753    case OS_SINGLE:
 754        tcg_gen_mov_i32(reg, val);
 755        break;
 756    default:
 757        g_assert_not_reached();
 758    }
 759}
 760
 761/*
 762 * Generate code for an "effective address".  Does not adjust the base
 763 * register for autoincrement addressing modes.
 764 */
 765static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
 766                         int mode, int reg0, int opsize)
 767{
 768    TCGv reg;
 769    TCGv tmp;
 770    uint16_t ext;
 771    uint32_t offset;
 772
 773    switch (mode) {
 774    case 0: /* Data register direct.  */
 775    case 1: /* Address register direct.  */
 776        return NULL_QREG;
 777    case 3: /* Indirect postincrement.  */
 778        if (opsize == OS_UNSIZED) {
 779            return NULL_QREG;
 780        }
 781        /* fallthru */
 782    case 2: /* Indirect register */
 783        return get_areg(s, reg0);
 784    case 4: /* Indirect predecrememnt.  */
 785        if (opsize == OS_UNSIZED) {
 786            return NULL_QREG;
 787        }
 788        reg = get_areg(s, reg0);
 789        tmp = mark_to_release(s, tcg_temp_new());
 790        if (reg0 == 7 && opsize == OS_BYTE &&
 791            m68k_feature(s->env, M68K_FEATURE_M68000)) {
 792            tcg_gen_subi_i32(tmp, reg, 2);
 793        } else {
 794            tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
 795        }
 796        return tmp;
 797    case 5: /* Indirect displacement.  */
 798        reg = get_areg(s, reg0);
 799        tmp = mark_to_release(s, tcg_temp_new());
 800        ext = read_im16(env, s);
 801        tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
 802        return tmp;
 803    case 6: /* Indirect index + displacement.  */
 804        reg = get_areg(s, reg0);
 805        return gen_lea_indexed(env, s, reg);
 806    case 7: /* Other */
 807        switch (reg0) {
 808        case 0: /* Absolute short.  */
 809            offset = (int16_t)read_im16(env, s);
 810            return mark_to_release(s, tcg_const_i32(offset));
 811        case 1: /* Absolute long.  */
 812            offset = read_im32(env, s);
 813            return mark_to_release(s, tcg_const_i32(offset));
 814        case 2: /* pc displacement  */
 815            offset = s->pc;
 816            offset += (int16_t)read_im16(env, s);
 817            return mark_to_release(s, tcg_const_i32(offset));
 818        case 3: /* pc index+displacement.  */
 819            return gen_lea_indexed(env, s, NULL_QREG);
 820        case 4: /* Immediate.  */
 821        default:
 822            return NULL_QREG;
 823        }
 824    }
 825    /* Should never happen.  */
 826    return NULL_QREG;
 827}
 828
 829static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
 830                    int opsize)
 831{
 832    int mode = extract32(insn, 3, 3);
 833    int reg0 = REG(insn, 0);
 834    return gen_lea_mode(env, s, mode, reg0, opsize);
 835}
 836
 837/*
 838 * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
 839 * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
 840 * ADDRP is non-null for readwrite operands.
 841 */
 842static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
 843                        int opsize, TCGv val, TCGv *addrp, ea_what what,
 844                        int index)
 845{
 846    TCGv reg, tmp, result;
 847    int32_t offset;
 848
 849    switch (mode) {
 850    case 0: /* Data register direct.  */
 851        reg = cpu_dregs[reg0];
 852        if (what == EA_STORE) {
 853            gen_partset_reg(opsize, reg, val);
 854            return store_dummy;
 855        } else {
 856            return gen_extend(s, reg, opsize, what == EA_LOADS);
 857        }
 858    case 1: /* Address register direct.  */
 859        reg = get_areg(s, reg0);
 860        if (what == EA_STORE) {
 861            tcg_gen_mov_i32(reg, val);
 862            return store_dummy;
 863        } else {
 864            return gen_extend(s, reg, opsize, what == EA_LOADS);
 865        }
 866    case 2: /* Indirect register */
 867        reg = get_areg(s, reg0);
 868        return gen_ldst(s, opsize, reg, val, what, index);
 869    case 3: /* Indirect postincrement.  */
 870        reg = get_areg(s, reg0);
 871        result = gen_ldst(s, opsize, reg, val, what, index);
 872        if (what == EA_STORE || !addrp) {
 873            TCGv tmp = tcg_temp_new();
 874            if (reg0 == 7 && opsize == OS_BYTE &&
 875                m68k_feature(s->env, M68K_FEATURE_M68000)) {
 876                tcg_gen_addi_i32(tmp, reg, 2);
 877            } else {
 878                tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
 879            }
 880            delay_set_areg(s, reg0, tmp, true);
 881        }
 882        return result;
 883    case 4: /* Indirect predecrememnt.  */
 884        if (addrp && what == EA_STORE) {
 885            tmp = *addrp;
 886        } else {
 887            tmp = gen_lea_mode(env, s, mode, reg0, opsize);
 888            if (IS_NULL_QREG(tmp)) {
 889                return tmp;
 890            }
 891            if (addrp) {
 892                *addrp = tmp;
 893            }
 894        }
 895        result = gen_ldst(s, opsize, tmp, val, what, index);
 896        if (what == EA_STORE || !addrp) {
 897            delay_set_areg(s, reg0, tmp, false);
 898        }
 899        return result;
 900    case 5: /* Indirect displacement.  */
 901    case 6: /* Indirect index + displacement.  */
 902    do_indirect:
 903        if (addrp && what == EA_STORE) {
 904            tmp = *addrp;
 905        } else {
 906            tmp = gen_lea_mode(env, s, mode, reg0, opsize);
 907            if (IS_NULL_QREG(tmp)) {
 908                return tmp;
 909            }
 910            if (addrp) {
 911                *addrp = tmp;
 912            }
 913        }
 914        return gen_ldst(s, opsize, tmp, val, what, index);
 915    case 7: /* Other */
 916        switch (reg0) {
 917        case 0: /* Absolute short.  */
 918        case 1: /* Absolute long.  */
 919        case 2: /* pc displacement  */
 920        case 3: /* pc index+displacement.  */
 921            goto do_indirect;
 922        case 4: /* Immediate.  */
 923            /* Sign extend values for consistency.  */
 924            switch (opsize) {
 925            case OS_BYTE:
 926                if (what == EA_LOADS) {
 927                    offset = (int8_t)read_im8(env, s);
 928                } else {
 929                    offset = read_im8(env, s);
 930                }
 931                break;
 932            case OS_WORD:
 933                if (what == EA_LOADS) {
 934                    offset = (int16_t)read_im16(env, s);
 935                } else {
 936                    offset = read_im16(env, s);
 937                }
 938                break;
 939            case OS_LONG:
 940                offset = read_im32(env, s);
 941                break;
 942            default:
 943                g_assert_not_reached();
 944            }
 945            return mark_to_release(s, tcg_const_i32(offset));
 946        default:
 947            return NULL_QREG;
 948        }
 949    }
 950    /* Should never happen.  */
 951    return NULL_QREG;
 952}
 953
 954static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
 955                   int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
 956{
 957    int mode = extract32(insn, 3, 3);
 958    int reg0 = REG(insn, 0);
 959    return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
 960}
 961
 962static TCGv_ptr gen_fp_ptr(int freg)
 963{
 964    TCGv_ptr fp = tcg_temp_new_ptr();
 965    tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
 966    return fp;
 967}
 968
 969static TCGv_ptr gen_fp_result_ptr(void)
 970{
 971    TCGv_ptr fp = tcg_temp_new_ptr();
 972    tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
 973    return fp;
 974}
 975
 976static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
 977{
 978    TCGv t32;
 979    TCGv_i64 t64;
 980
 981    t32 = tcg_temp_new();
 982    tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
 983    tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
 984    tcg_temp_free(t32);
 985
 986    t64 = tcg_temp_new_i64();
 987    tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
 988    tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
 989    tcg_temp_free_i64(t64);
 990}
 991
 992static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
 993                        int index)
 994{
 995    TCGv tmp;
 996    TCGv_i64 t64;
 997
 998    t64 = tcg_temp_new_i64();
 999    tmp = tcg_temp_new();
1000    switch (opsize) {
1001    case OS_BYTE:
1002        tcg_gen_qemu_ld8s(tmp, addr, index);
1003        gen_helper_exts32(cpu_env, fp, tmp);
1004        break;
1005    case OS_WORD:
1006        tcg_gen_qemu_ld16s(tmp, addr, index);
1007        gen_helper_exts32(cpu_env, fp, tmp);
1008        break;
1009    case OS_LONG:
1010        tcg_gen_qemu_ld32u(tmp, addr, index);
1011        gen_helper_exts32(cpu_env, fp, tmp);
1012        break;
1013    case OS_SINGLE:
1014        tcg_gen_qemu_ld32u(tmp, addr, index);
1015        gen_helper_extf32(cpu_env, fp, tmp);
1016        break;
1017    case OS_DOUBLE:
1018        tcg_gen_qemu_ld64(t64, addr, index);
1019        gen_helper_extf64(cpu_env, fp, t64);
1020        break;
1021    case OS_EXTENDED:
1022        if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1023            gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1024            break;
1025        }
1026        tcg_gen_qemu_ld32u(tmp, addr, index);
1027        tcg_gen_shri_i32(tmp, tmp, 16);
1028        tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1029        tcg_gen_addi_i32(tmp, addr, 4);
1030        tcg_gen_qemu_ld64(t64, tmp, index);
1031        tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1032        break;
1033    case OS_PACKED:
1034        /*
1035         * unimplemented data type on 68040/ColdFire
1036         * FIXME if needed for another FPU
1037         */
1038        gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1039        break;
1040    default:
1041        g_assert_not_reached();
1042    }
1043    tcg_temp_free(tmp);
1044    tcg_temp_free_i64(t64);
1045}
1046
1047static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1048                         int index)
1049{
1050    TCGv tmp;
1051    TCGv_i64 t64;
1052
1053    t64 = tcg_temp_new_i64();
1054    tmp = tcg_temp_new();
1055    switch (opsize) {
1056    case OS_BYTE:
1057        gen_helper_reds32(tmp, cpu_env, fp);
1058        tcg_gen_qemu_st8(tmp, addr, index);
1059        break;
1060    case OS_WORD:
1061        gen_helper_reds32(tmp, cpu_env, fp);
1062        tcg_gen_qemu_st16(tmp, addr, index);
1063        break;
1064    case OS_LONG:
1065        gen_helper_reds32(tmp, cpu_env, fp);
1066        tcg_gen_qemu_st32(tmp, addr, index);
1067        break;
1068    case OS_SINGLE:
1069        gen_helper_redf32(tmp, cpu_env, fp);
1070        tcg_gen_qemu_st32(tmp, addr, index);
1071        break;
1072    case OS_DOUBLE:
1073        gen_helper_redf64(t64, cpu_env, fp);
1074        tcg_gen_qemu_st64(t64, addr, index);
1075        break;
1076    case OS_EXTENDED:
1077        if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1078            gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1079            break;
1080        }
1081        tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1082        tcg_gen_shli_i32(tmp, tmp, 16);
1083        tcg_gen_qemu_st32(tmp, addr, index);
1084        tcg_gen_addi_i32(tmp, addr, 4);
1085        tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1086        tcg_gen_qemu_st64(t64, tmp, index);
1087        break;
1088    case OS_PACKED:
1089        /*
1090         * unimplemented data type on 68040/ColdFire
1091         * FIXME if needed for another FPU
1092         */
1093        gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1094        break;
1095    default:
1096        g_assert_not_reached();
1097    }
1098    tcg_temp_free(tmp);
1099    tcg_temp_free_i64(t64);
1100}
1101
1102static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1103                        TCGv_ptr fp, ea_what what, int index)
1104{
1105    if (what == EA_STORE) {
1106        gen_store_fp(s, opsize, addr, fp, index);
1107    } else {
1108        gen_load_fp(s, opsize, addr, fp, index);
1109    }
1110}
1111
1112static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1113                          int reg0, int opsize, TCGv_ptr fp, ea_what what,
1114                          int index)
1115{
1116    TCGv reg, addr, tmp;
1117    TCGv_i64 t64;
1118
1119    switch (mode) {
1120    case 0: /* Data register direct.  */
1121        reg = cpu_dregs[reg0];
1122        if (what == EA_STORE) {
1123            switch (opsize) {
1124            case OS_BYTE:
1125            case OS_WORD:
1126            case OS_LONG:
1127                gen_helper_reds32(reg, cpu_env, fp);
1128                break;
1129            case OS_SINGLE:
1130                gen_helper_redf32(reg, cpu_env, fp);
1131                break;
1132            default:
1133                g_assert_not_reached();
1134            }
1135        } else {
1136            tmp = tcg_temp_new();
1137            switch (opsize) {
1138            case OS_BYTE:
1139                tcg_gen_ext8s_i32(tmp, reg);
1140                gen_helper_exts32(cpu_env, fp, tmp);
1141                break;
1142            case OS_WORD:
1143                tcg_gen_ext16s_i32(tmp, reg);
1144                gen_helper_exts32(cpu_env, fp, tmp);
1145                break;
1146            case OS_LONG:
1147                gen_helper_exts32(cpu_env, fp, reg);
1148                break;
1149            case OS_SINGLE:
1150                gen_helper_extf32(cpu_env, fp, reg);
1151                break;
1152            default:
1153                g_assert_not_reached();
1154            }
1155            tcg_temp_free(tmp);
1156        }
1157        return 0;
1158    case 1: /* Address register direct.  */
1159        return -1;
1160    case 2: /* Indirect register */
1161        addr = get_areg(s, reg0);
1162        gen_ldst_fp(s, opsize, addr, fp, what, index);
1163        return 0;
1164    case 3: /* Indirect postincrement.  */
1165        addr = cpu_aregs[reg0];
1166        gen_ldst_fp(s, opsize, addr, fp, what, index);
1167        tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1168        return 0;
1169    case 4: /* Indirect predecrememnt.  */
1170        addr = gen_lea_mode(env, s, mode, reg0, opsize);
1171        if (IS_NULL_QREG(addr)) {
1172            return -1;
1173        }
1174        gen_ldst_fp(s, opsize, addr, fp, what, index);
1175        tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1176        return 0;
1177    case 5: /* Indirect displacement.  */
1178    case 6: /* Indirect index + displacement.  */
1179    do_indirect:
1180        addr = gen_lea_mode(env, s, mode, reg0, opsize);
1181        if (IS_NULL_QREG(addr)) {
1182            return -1;
1183        }
1184        gen_ldst_fp(s, opsize, addr, fp, what, index);
1185        return 0;
1186    case 7: /* Other */
1187        switch (reg0) {
1188        case 0: /* Absolute short.  */
1189        case 1: /* Absolute long.  */
1190        case 2: /* pc displacement  */
1191        case 3: /* pc index+displacement.  */
1192            goto do_indirect;
1193        case 4: /* Immediate.  */
1194            if (what == EA_STORE) {
1195                return -1;
1196            }
1197            switch (opsize) {
1198            case OS_BYTE:
1199                tmp = tcg_const_i32((int8_t)read_im8(env, s));
1200                gen_helper_exts32(cpu_env, fp, tmp);
1201                tcg_temp_free(tmp);
1202                break;
1203            case OS_WORD:
1204                tmp = tcg_const_i32((int16_t)read_im16(env, s));
1205                gen_helper_exts32(cpu_env, fp, tmp);
1206                tcg_temp_free(tmp);
1207                break;
1208            case OS_LONG:
1209                tmp = tcg_const_i32(read_im32(env, s));
1210                gen_helper_exts32(cpu_env, fp, tmp);
1211                tcg_temp_free(tmp);
1212                break;
1213            case OS_SINGLE:
1214                tmp = tcg_const_i32(read_im32(env, s));
1215                gen_helper_extf32(cpu_env, fp, tmp);
1216                tcg_temp_free(tmp);
1217                break;
1218            case OS_DOUBLE:
1219                t64 = tcg_const_i64(read_im64(env, s));
1220                gen_helper_extf64(cpu_env, fp, t64);
1221                tcg_temp_free_i64(t64);
1222                break;
1223            case OS_EXTENDED:
1224                if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1225                    gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1226                    break;
1227                }
1228                tmp = tcg_const_i32(read_im32(env, s) >> 16);
1229                tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1230                tcg_temp_free(tmp);
1231                t64 = tcg_const_i64(read_im64(env, s));
1232                tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1233                tcg_temp_free_i64(t64);
1234                break;
1235            case OS_PACKED:
1236                /*
1237                 * unimplemented data type on 68040/ColdFire
1238                 * FIXME if needed for another FPU
1239                 */
1240                gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1241                break;
1242            default:
1243                g_assert_not_reached();
1244            }
1245            return 0;
1246        default:
1247            return -1;
1248        }
1249    }
1250    return -1;
1251}
1252
1253static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1254                       int opsize, TCGv_ptr fp, ea_what what, int index)
1255{
1256    int mode = extract32(insn, 3, 3);
1257    int reg0 = REG(insn, 0);
1258    return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1259}
1260
1261typedef struct {
1262    TCGCond tcond;
1263    bool g1;
1264    bool g2;
1265    TCGv v1;
1266    TCGv v2;
1267} DisasCompare;
1268
1269static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1270{
1271    TCGv tmp, tmp2;
1272    TCGCond tcond;
1273    CCOp op = s->cc_op;
1274
1275    /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1276    if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1277        c->g1 = c->g2 = 1;
1278        c->v1 = QREG_CC_N;
1279        c->v2 = QREG_CC_V;
1280        switch (cond) {
1281        case 2: /* HI */
1282        case 3: /* LS */
1283            tcond = TCG_COND_LEU;
1284            goto done;
1285        case 4: /* CC */
1286        case 5: /* CS */
1287            tcond = TCG_COND_LTU;
1288            goto done;
1289        case 6: /* NE */
1290        case 7: /* EQ */
1291            tcond = TCG_COND_EQ;
1292            goto done;
1293        case 10: /* PL */
1294        case 11: /* MI */
1295            c->g1 = c->g2 = 0;
1296            c->v2 = tcg_const_i32(0);
1297            c->v1 = tmp = tcg_temp_new();
1298            tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1299            gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1300            /* fallthru */
1301        case 12: /* GE */
1302        case 13: /* LT */
1303            tcond = TCG_COND_LT;
1304            goto done;
1305        case 14: /* GT */
1306        case 15: /* LE */
1307            tcond = TCG_COND_LE;
1308            goto done;
1309        }
1310    }
1311
1312    c->g1 = 1;
1313    c->g2 = 0;
1314    c->v2 = tcg_const_i32(0);
1315
1316    switch (cond) {
1317    case 0: /* T */
1318    case 1: /* F */
1319        c->v1 = c->v2;
1320        tcond = TCG_COND_NEVER;
1321        goto done;
1322    case 14: /* GT (!(Z || (N ^ V))) */
1323    case 15: /* LE (Z || (N ^ V)) */
1324        /*
1325         * Logic operations clear V, which simplifies LE to (Z || N),
1326         * and since Z and N are co-located, this becomes a normal
1327         * comparison vs N.
1328         */
1329        if (op == CC_OP_LOGIC) {
1330            c->v1 = QREG_CC_N;
1331            tcond = TCG_COND_LE;
1332            goto done;
1333        }
1334        break;
1335    case 12: /* GE (!(N ^ V)) */
1336    case 13: /* LT (N ^ V) */
1337        /* Logic operations clear V, which simplifies this to N.  */
1338        if (op != CC_OP_LOGIC) {
1339            break;
1340        }
1341        /* fallthru */
1342    case 10: /* PL (!N) */
1343    case 11: /* MI (N) */
1344        /* Several cases represent N normally.  */
1345        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1346            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1347            op == CC_OP_LOGIC) {
1348            c->v1 = QREG_CC_N;
1349            tcond = TCG_COND_LT;
1350            goto done;
1351        }
1352        break;
1353    case 6: /* NE (!Z) */
1354    case 7: /* EQ (Z) */
1355        /* Some cases fold Z into N.  */
1356        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1357            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1358            op == CC_OP_LOGIC) {
1359            tcond = TCG_COND_EQ;
1360            c->v1 = QREG_CC_N;
1361            goto done;
1362        }
1363        break;
1364    case 4: /* CC (!C) */
1365    case 5: /* CS (C) */
1366        /* Some cases fold C into X.  */
1367        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1368            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1369            tcond = TCG_COND_NE;
1370            c->v1 = QREG_CC_X;
1371            goto done;
1372        }
1373        /* fallthru */
1374    case 8: /* VC (!V) */
1375    case 9: /* VS (V) */
1376        /* Logic operations clear V and C.  */
1377        if (op == CC_OP_LOGIC) {
1378            tcond = TCG_COND_NEVER;
1379            c->v1 = c->v2;
1380            goto done;
1381        }
1382        break;
1383    }
1384
1385    /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1386    gen_flush_flags(s);
1387
1388    switch (cond) {
1389    case 0: /* T */
1390    case 1: /* F */
1391    default:
1392        /* Invalid, or handled above.  */
1393        abort();
1394    case 2: /* HI (!C && !Z) -> !(C || Z)*/
1395    case 3: /* LS (C || Z) */
1396        c->v1 = tmp = tcg_temp_new();
1397        c->g1 = 0;
1398        tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1399        tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1400        tcond = TCG_COND_NE;
1401        break;
1402    case 4: /* CC (!C) */
1403    case 5: /* CS (C) */
1404        c->v1 = QREG_CC_C;
1405        tcond = TCG_COND_NE;
1406        break;
1407    case 6: /* NE (!Z) */
1408    case 7: /* EQ (Z) */
1409        c->v1 = QREG_CC_Z;
1410        tcond = TCG_COND_EQ;
1411        break;
1412    case 8: /* VC (!V) */
1413    case 9: /* VS (V) */
1414        c->v1 = QREG_CC_V;
1415        tcond = TCG_COND_LT;
1416        break;
1417    case 10: /* PL (!N) */
1418    case 11: /* MI (N) */
1419        c->v1 = QREG_CC_N;
1420        tcond = TCG_COND_LT;
1421        break;
1422    case 12: /* GE (!(N ^ V)) */
1423    case 13: /* LT (N ^ V) */
1424        c->v1 = tmp = tcg_temp_new();
1425        c->g1 = 0;
1426        tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1427        tcond = TCG_COND_LT;
1428        break;
1429    case 14: /* GT (!(Z || (N ^ V))) */
1430    case 15: /* LE (Z || (N ^ V)) */
1431        c->v1 = tmp = tcg_temp_new();
1432        c->g1 = 0;
1433        tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1434        tcg_gen_neg_i32(tmp, tmp);
1435        tmp2 = tcg_temp_new();
1436        tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1437        tcg_gen_or_i32(tmp, tmp, tmp2);
1438        tcg_temp_free(tmp2);
1439        tcond = TCG_COND_LT;
1440        break;
1441    }
1442
1443 done:
1444    if ((cond & 1) == 0) {
1445        tcond = tcg_invert_cond(tcond);
1446    }
1447    c->tcond = tcond;
1448}
1449
1450static void free_cond(DisasCompare *c)
1451{
1452    if (!c->g1) {
1453        tcg_temp_free(c->v1);
1454    }
1455    if (!c->g2) {
1456        tcg_temp_free(c->v2);
1457    }
1458}
1459
1460static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1461{
1462  DisasCompare c;
1463
1464  gen_cc_cond(&c, s, cond);
1465  update_cc_op(s);
1466  tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1467  free_cond(&c);
1468}
1469
1470/* Force a TB lookup after an instruction that changes the CPU state.  */
1471static void gen_exit_tb(DisasContext *s)
1472{
1473    update_cc_op(s);
1474    tcg_gen_movi_i32(QREG_PC, s->pc);
1475    s->base.is_jmp = DISAS_EXIT;
1476}
1477
1478#define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1479        result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1480                        op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1481        if (IS_NULL_QREG(result)) {                                     \
1482            gen_addr_fault(s);                                          \
1483            return;                                                     \
1484        }                                                               \
1485    } while (0)
1486
1487#define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1488        TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1489                                EA_STORE, IS_USER(s));                  \
1490        if (IS_NULL_QREG(ea_result)) {                                  \
1491            gen_addr_fault(s);                                          \
1492            return;                                                     \
1493        }                                                               \
1494    } while (0)
1495
1496static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1497{
1498#ifndef CONFIG_USER_ONLY
1499    return (s->base.pc_first & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)
1500        || (s->base.pc_next & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1501#else
1502    return true;
1503#endif
1504}
1505
1506/* Generate a jump to an immediate address.  */
1507static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1508{
1509    if (unlikely(s->base.singlestep_enabled)) {
1510        gen_exception(s, dest, EXCP_DEBUG);
1511    } else if (use_goto_tb(s, dest)) {
1512        tcg_gen_goto_tb(n);
1513        tcg_gen_movi_i32(QREG_PC, dest);
1514        tcg_gen_exit_tb(s->base.tb, n);
1515    } else {
1516        gen_jmp_im(s, dest);
1517        tcg_gen_exit_tb(NULL, 0);
1518    }
1519    s->base.is_jmp = DISAS_NORETURN;
1520}
1521
1522DISAS_INSN(scc)
1523{
1524    DisasCompare c;
1525    int cond;
1526    TCGv tmp;
1527
1528    cond = (insn >> 8) & 0xf;
1529    gen_cc_cond(&c, s, cond);
1530
1531    tmp = tcg_temp_new();
1532    tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1533    free_cond(&c);
1534
1535    tcg_gen_neg_i32(tmp, tmp);
1536    DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1537    tcg_temp_free(tmp);
1538}
1539
1540DISAS_INSN(dbcc)
1541{
1542    TCGLabel *l1;
1543    TCGv reg;
1544    TCGv tmp;
1545    int16_t offset;
1546    uint32_t base;
1547
1548    reg = DREG(insn, 0);
1549    base = s->pc;
1550    offset = (int16_t)read_im16(env, s);
1551    l1 = gen_new_label();
1552    gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1553
1554    tmp = tcg_temp_new();
1555    tcg_gen_ext16s_i32(tmp, reg);
1556    tcg_gen_addi_i32(tmp, tmp, -1);
1557    gen_partset_reg(OS_WORD, reg, tmp);
1558    tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1559    gen_jmp_tb(s, 1, base + offset);
1560    gen_set_label(l1);
1561    gen_jmp_tb(s, 0, s->pc);
1562}
1563
1564DISAS_INSN(undef_mac)
1565{
1566    gen_exception(s, s->base.pc_next, EXCP_LINEA);
1567}
1568
1569DISAS_INSN(undef_fpu)
1570{
1571    gen_exception(s, s->base.pc_next, EXCP_LINEF);
1572}
1573
1574DISAS_INSN(undef)
1575{
1576    /*
1577     * ??? This is both instructions that are as yet unimplemented
1578     * for the 680x0 series, as well as those that are implemented
1579     * but actually illegal for CPU32 or pre-68020.
1580     */
1581    qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1582                  insn, s->base.pc_next);
1583    gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1584}
1585
1586DISAS_INSN(mulw)
1587{
1588    TCGv reg;
1589    TCGv tmp;
1590    TCGv src;
1591    int sign;
1592
1593    sign = (insn & 0x100) != 0;
1594    reg = DREG(insn, 9);
1595    tmp = tcg_temp_new();
1596    if (sign)
1597        tcg_gen_ext16s_i32(tmp, reg);
1598    else
1599        tcg_gen_ext16u_i32(tmp, reg);
1600    SRC_EA(env, src, OS_WORD, sign, NULL);
1601    tcg_gen_mul_i32(tmp, tmp, src);
1602    tcg_gen_mov_i32(reg, tmp);
1603    gen_logic_cc(s, tmp, OS_LONG);
1604    tcg_temp_free(tmp);
1605}
1606
1607DISAS_INSN(divw)
1608{
1609    int sign;
1610    TCGv src;
1611    TCGv destr;
1612
1613    /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1614
1615    sign = (insn & 0x100) != 0;
1616
1617    /* dest.l / src.w */
1618
1619    SRC_EA(env, src, OS_WORD, sign, NULL);
1620    destr = tcg_const_i32(REG(insn, 9));
1621    if (sign) {
1622        gen_helper_divsw(cpu_env, destr, src);
1623    } else {
1624        gen_helper_divuw(cpu_env, destr, src);
1625    }
1626    tcg_temp_free(destr);
1627
1628    set_cc_op(s, CC_OP_FLAGS);
1629}
1630
1631DISAS_INSN(divl)
1632{
1633    TCGv num, reg, den;
1634    int sign;
1635    uint16_t ext;
1636
1637    ext = read_im16(env, s);
1638
1639    sign = (ext & 0x0800) != 0;
1640
1641    if (ext & 0x400) {
1642        if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1643            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1644            return;
1645        }
1646
1647        /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1648
1649        SRC_EA(env, den, OS_LONG, 0, NULL);
1650        num = tcg_const_i32(REG(ext, 12));
1651        reg = tcg_const_i32(REG(ext, 0));
1652        if (sign) {
1653            gen_helper_divsll(cpu_env, num, reg, den);
1654        } else {
1655            gen_helper_divull(cpu_env, num, reg, den);
1656        }
1657        tcg_temp_free(reg);
1658        tcg_temp_free(num);
1659        set_cc_op(s, CC_OP_FLAGS);
1660        return;
1661    }
1662
1663    /* divX.l <EA>, Dq        32/32 -> 32q     */
1664    /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1665
1666    SRC_EA(env, den, OS_LONG, 0, NULL);
1667    num = tcg_const_i32(REG(ext, 12));
1668    reg = tcg_const_i32(REG(ext, 0));
1669    if (sign) {
1670        gen_helper_divsl(cpu_env, num, reg, den);
1671    } else {
1672        gen_helper_divul(cpu_env, num, reg, den);
1673    }
1674    tcg_temp_free(reg);
1675    tcg_temp_free(num);
1676
1677    set_cc_op(s, CC_OP_FLAGS);
1678}
1679
1680static void bcd_add(TCGv dest, TCGv src)
1681{
1682    TCGv t0, t1;
1683
1684    /*
1685     * dest10 = dest10 + src10 + X
1686     *
1687     *        t1 = src
1688     *        t2 = t1 + 0x066
1689     *        t3 = t2 + dest + X
1690     *        t4 = t2 ^ dest
1691     *        t5 = t3 ^ t4
1692     *        t6 = ~t5 & 0x110
1693     *        t7 = (t6 >> 2) | (t6 >> 3)
1694     *        return t3 - t7
1695     */
1696
1697    /*
1698     * t1 = (src + 0x066) + dest + X
1699     *    = result with some possible exceding 0x6
1700     */
1701
1702    t0 = tcg_const_i32(0x066);
1703    tcg_gen_add_i32(t0, t0, src);
1704
1705    t1 = tcg_temp_new();
1706    tcg_gen_add_i32(t1, t0, dest);
1707    tcg_gen_add_i32(t1, t1, QREG_CC_X);
1708
1709    /* we will remove exceding 0x6 where there is no carry */
1710
1711    /*
1712     * t0 = (src + 0x0066) ^ dest
1713     *    = t1 without carries
1714     */
1715
1716    tcg_gen_xor_i32(t0, t0, dest);
1717
1718    /*
1719     * extract the carries
1720     * t0 = t0 ^ t1
1721     *    = only the carries
1722     */
1723
1724    tcg_gen_xor_i32(t0, t0, t1);
1725
1726    /*
1727     * generate 0x1 where there is no carry
1728     * and for each 0x10, generate a 0x6
1729     */
1730
1731    tcg_gen_shri_i32(t0, t0, 3);
1732    tcg_gen_not_i32(t0, t0);
1733    tcg_gen_andi_i32(t0, t0, 0x22);
1734    tcg_gen_add_i32(dest, t0, t0);
1735    tcg_gen_add_i32(dest, dest, t0);
1736    tcg_temp_free(t0);
1737
1738    /*
1739     * remove the exceding 0x6
1740     * for digits that have not generated a carry
1741     */
1742
1743    tcg_gen_sub_i32(dest, t1, dest);
1744    tcg_temp_free(t1);
1745}
1746
1747static void bcd_sub(TCGv dest, TCGv src)
1748{
1749    TCGv t0, t1, t2;
1750
1751    /*
1752     *  dest10 = dest10 - src10 - X
1753     *         = bcd_add(dest + 1 - X, 0x199 - src)
1754     */
1755
1756    /* t0 = 0x066 + (0x199 - src) */
1757
1758    t0 = tcg_temp_new();
1759    tcg_gen_subfi_i32(t0, 0x1ff, src);
1760
1761    /* t1 = t0 + dest + 1 - X*/
1762
1763    t1 = tcg_temp_new();
1764    tcg_gen_add_i32(t1, t0, dest);
1765    tcg_gen_addi_i32(t1, t1, 1);
1766    tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1767
1768    /* t2 = t0 ^ dest */
1769
1770    t2 = tcg_temp_new();
1771    tcg_gen_xor_i32(t2, t0, dest);
1772
1773    /* t0 = t1 ^ t2 */
1774
1775    tcg_gen_xor_i32(t0, t1, t2);
1776
1777    /*
1778     * t2 = ~t0 & 0x110
1779     * t0 = (t2 >> 2) | (t2 >> 3)
1780     *
1781     * to fit on 8bit operands, changed in:
1782     *
1783     * t2 = ~(t0 >> 3) & 0x22
1784     * t0 = t2 + t2
1785     * t0 = t0 + t2
1786     */
1787
1788    tcg_gen_shri_i32(t2, t0, 3);
1789    tcg_gen_not_i32(t2, t2);
1790    tcg_gen_andi_i32(t2, t2, 0x22);
1791    tcg_gen_add_i32(t0, t2, t2);
1792    tcg_gen_add_i32(t0, t0, t2);
1793    tcg_temp_free(t2);
1794
1795    /* return t1 - t0 */
1796
1797    tcg_gen_sub_i32(dest, t1, t0);
1798    tcg_temp_free(t0);
1799    tcg_temp_free(t1);
1800}
1801
1802static void bcd_flags(TCGv val)
1803{
1804    tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1805    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1806
1807    tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1808
1809    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1810}
1811
1812DISAS_INSN(abcd_reg)
1813{
1814    TCGv src;
1815    TCGv dest;
1816
1817    gen_flush_flags(s); /* !Z is sticky */
1818
1819    src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1820    dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1821    bcd_add(dest, src);
1822    gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1823
1824    bcd_flags(dest);
1825}
1826
1827DISAS_INSN(abcd_mem)
1828{
1829    TCGv src, dest, addr;
1830
1831    gen_flush_flags(s); /* !Z is sticky */
1832
1833    /* Indirect pre-decrement load (mode 4) */
1834
1835    src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1836                      NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1837    dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1838                       NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1839
1840    bcd_add(dest, src);
1841
1842    gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1843                EA_STORE, IS_USER(s));
1844
1845    bcd_flags(dest);
1846}
1847
1848DISAS_INSN(sbcd_reg)
1849{
1850    TCGv src, dest;
1851
1852    gen_flush_flags(s); /* !Z is sticky */
1853
1854    src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1855    dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1856
1857    bcd_sub(dest, src);
1858
1859    gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1860
1861    bcd_flags(dest);
1862}
1863
1864DISAS_INSN(sbcd_mem)
1865{
1866    TCGv src, dest, addr;
1867
1868    gen_flush_flags(s); /* !Z is sticky */
1869
1870    /* Indirect pre-decrement load (mode 4) */
1871
1872    src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1873                      NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1874    dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1875                       NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1876
1877    bcd_sub(dest, src);
1878
1879    gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1880                EA_STORE, IS_USER(s));
1881
1882    bcd_flags(dest);
1883}
1884
1885DISAS_INSN(nbcd)
1886{
1887    TCGv src, dest;
1888    TCGv addr;
1889
1890    gen_flush_flags(s); /* !Z is sticky */
1891
1892    SRC_EA(env, src, OS_BYTE, 0, &addr);
1893
1894    dest = tcg_const_i32(0);
1895    bcd_sub(dest, src);
1896
1897    DEST_EA(env, insn, OS_BYTE, dest, &addr);
1898
1899    bcd_flags(dest);
1900
1901    tcg_temp_free(dest);
1902}
1903
1904DISAS_INSN(addsub)
1905{
1906    TCGv reg;
1907    TCGv dest;
1908    TCGv src;
1909    TCGv tmp;
1910    TCGv addr;
1911    int add;
1912    int opsize;
1913
1914    add = (insn & 0x4000) != 0;
1915    opsize = insn_opsize(insn);
1916    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1917    dest = tcg_temp_new();
1918    if (insn & 0x100) {
1919        SRC_EA(env, tmp, opsize, 1, &addr);
1920        src = reg;
1921    } else {
1922        tmp = reg;
1923        SRC_EA(env, src, opsize, 1, NULL);
1924    }
1925    if (add) {
1926        tcg_gen_add_i32(dest, tmp, src);
1927        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1928        set_cc_op(s, CC_OP_ADDB + opsize);
1929    } else {
1930        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1931        tcg_gen_sub_i32(dest, tmp, src);
1932        set_cc_op(s, CC_OP_SUBB + opsize);
1933    }
1934    gen_update_cc_add(dest, src, opsize);
1935    if (insn & 0x100) {
1936        DEST_EA(env, insn, opsize, dest, &addr);
1937    } else {
1938        gen_partset_reg(opsize, DREG(insn, 9), dest);
1939    }
1940    tcg_temp_free(dest);
1941}
1942
1943/* Reverse the order of the bits in REG.  */
1944DISAS_INSN(bitrev)
1945{
1946    TCGv reg;
1947    reg = DREG(insn, 0);
1948    gen_helper_bitrev(reg, reg);
1949}
1950
1951DISAS_INSN(bitop_reg)
1952{
1953    int opsize;
1954    int op;
1955    TCGv src1;
1956    TCGv src2;
1957    TCGv tmp;
1958    TCGv addr;
1959    TCGv dest;
1960
1961    if ((insn & 0x38) != 0)
1962        opsize = OS_BYTE;
1963    else
1964        opsize = OS_LONG;
1965    op = (insn >> 6) & 3;
1966    SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1967
1968    gen_flush_flags(s);
1969    src2 = tcg_temp_new();
1970    if (opsize == OS_BYTE)
1971        tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1972    else
1973        tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1974
1975    tmp = tcg_const_i32(1);
1976    tcg_gen_shl_i32(tmp, tmp, src2);
1977    tcg_temp_free(src2);
1978
1979    tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1980
1981    dest = tcg_temp_new();
1982    switch (op) {
1983    case 1: /* bchg */
1984        tcg_gen_xor_i32(dest, src1, tmp);
1985        break;
1986    case 2: /* bclr */
1987        tcg_gen_andc_i32(dest, src1, tmp);
1988        break;
1989    case 3: /* bset */
1990        tcg_gen_or_i32(dest, src1, tmp);
1991        break;
1992    default: /* btst */
1993        break;
1994    }
1995    tcg_temp_free(tmp);
1996    if (op) {
1997        DEST_EA(env, insn, opsize, dest, &addr);
1998    }
1999    tcg_temp_free(dest);
2000}
2001
2002DISAS_INSN(sats)
2003{
2004    TCGv reg;
2005    reg = DREG(insn, 0);
2006    gen_flush_flags(s);
2007    gen_helper_sats(reg, reg, QREG_CC_V);
2008    gen_logic_cc(s, reg, OS_LONG);
2009}
2010
2011static void gen_push(DisasContext *s, TCGv val)
2012{
2013    TCGv tmp;
2014
2015    tmp = tcg_temp_new();
2016    tcg_gen_subi_i32(tmp, QREG_SP, 4);
2017    gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2018    tcg_gen_mov_i32(QREG_SP, tmp);
2019    tcg_temp_free(tmp);
2020}
2021
2022static TCGv mreg(int reg)
2023{
2024    if (reg < 8) {
2025        /* Dx */
2026        return cpu_dregs[reg];
2027    }
2028    /* Ax */
2029    return cpu_aregs[reg & 7];
2030}
2031
2032DISAS_INSN(movem)
2033{
2034    TCGv addr, incr, tmp, r[16];
2035    int is_load = (insn & 0x0400) != 0;
2036    int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2037    uint16_t mask = read_im16(env, s);
2038    int mode = extract32(insn, 3, 3);
2039    int reg0 = REG(insn, 0);
2040    int i;
2041
2042    tmp = cpu_aregs[reg0];
2043
2044    switch (mode) {
2045    case 0: /* data register direct */
2046    case 1: /* addr register direct */
2047    do_addr_fault:
2048        gen_addr_fault(s);
2049        return;
2050
2051    case 2: /* indirect */
2052        break;
2053
2054    case 3: /* indirect post-increment */
2055        if (!is_load) {
2056            /* post-increment is not allowed */
2057            goto do_addr_fault;
2058        }
2059        break;
2060
2061    case 4: /* indirect pre-decrement */
2062        if (is_load) {
2063            /* pre-decrement is not allowed */
2064            goto do_addr_fault;
2065        }
2066        /*
2067         * We want a bare copy of the address reg, without any pre-decrement
2068         * adjustment, as gen_lea would provide.
2069         */
2070        break;
2071
2072    default:
2073        tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2074        if (IS_NULL_QREG(tmp)) {
2075            goto do_addr_fault;
2076        }
2077        break;
2078    }
2079
2080    addr = tcg_temp_new();
2081    tcg_gen_mov_i32(addr, tmp);
2082    incr = tcg_const_i32(opsize_bytes(opsize));
2083
2084    if (is_load) {
2085        /* memory to register */
2086        for (i = 0; i < 16; i++) {
2087            if (mask & (1 << i)) {
2088                r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2089                tcg_gen_add_i32(addr, addr, incr);
2090            }
2091        }
2092        for (i = 0; i < 16; i++) {
2093            if (mask & (1 << i)) {
2094                tcg_gen_mov_i32(mreg(i), r[i]);
2095                tcg_temp_free(r[i]);
2096            }
2097        }
2098        if (mode == 3) {
2099            /* post-increment: movem (An)+,X */
2100            tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2101        }
2102    } else {
2103        /* register to memory */
2104        if (mode == 4) {
2105            /* pre-decrement: movem X,-(An) */
2106            for (i = 15; i >= 0; i--) {
2107                if ((mask << i) & 0x8000) {
2108                    tcg_gen_sub_i32(addr, addr, incr);
2109                    if (reg0 + 8 == i &&
2110                        m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2111                        /*
2112                         * M68020+: if the addressing register is the
2113                         * register moved to memory, the value written
2114                         * is the initial value decremented by the size of
2115                         * the operation, regardless of how many actual
2116                         * stores have been performed until this point.
2117                         * M68000/M68010: the value is the initial value.
2118                         */
2119                        tmp = tcg_temp_new();
2120                        tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2121                        gen_store(s, opsize, addr, tmp, IS_USER(s));
2122                        tcg_temp_free(tmp);
2123                    } else {
2124                        gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2125                    }
2126                }
2127            }
2128            tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2129        } else {
2130            for (i = 0; i < 16; i++) {
2131                if (mask & (1 << i)) {
2132                    gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2133                    tcg_gen_add_i32(addr, addr, incr);
2134                }
2135            }
2136        }
2137    }
2138
2139    tcg_temp_free(incr);
2140    tcg_temp_free(addr);
2141}
2142
2143DISAS_INSN(movep)
2144{
2145    uint8_t i;
2146    int16_t displ;
2147    TCGv reg;
2148    TCGv addr;
2149    TCGv abuf;
2150    TCGv dbuf;
2151
2152    displ = read_im16(env, s);
2153
2154    addr = AREG(insn, 0);
2155    reg = DREG(insn, 9);
2156
2157    abuf = tcg_temp_new();
2158    tcg_gen_addi_i32(abuf, addr, displ);
2159    dbuf = tcg_temp_new();
2160
2161    if (insn & 0x40) {
2162        i = 4;
2163    } else {
2164        i = 2;
2165    }
2166
2167    if (insn & 0x80) {
2168        for ( ; i > 0 ; i--) {
2169            tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2170            tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2171            if (i > 1) {
2172                tcg_gen_addi_i32(abuf, abuf, 2);
2173            }
2174        }
2175    } else {
2176        for ( ; i > 0 ; i--) {
2177            tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2178            tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2179            if (i > 1) {
2180                tcg_gen_addi_i32(abuf, abuf, 2);
2181            }
2182        }
2183    }
2184    tcg_temp_free(abuf);
2185    tcg_temp_free(dbuf);
2186}
2187
2188DISAS_INSN(bitop_im)
2189{
2190    int opsize;
2191    int op;
2192    TCGv src1;
2193    uint32_t mask;
2194    int bitnum;
2195    TCGv tmp;
2196    TCGv addr;
2197
2198    if ((insn & 0x38) != 0)
2199        opsize = OS_BYTE;
2200    else
2201        opsize = OS_LONG;
2202    op = (insn >> 6) & 3;
2203
2204    bitnum = read_im16(env, s);
2205    if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2206        if (bitnum & 0xfe00) {
2207            disas_undef(env, s, insn);
2208            return;
2209        }
2210    } else {
2211        if (bitnum & 0xff00) {
2212            disas_undef(env, s, insn);
2213            return;
2214        }
2215    }
2216
2217    SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2218
2219    gen_flush_flags(s);
2220    if (opsize == OS_BYTE)
2221        bitnum &= 7;
2222    else
2223        bitnum &= 31;
2224    mask = 1 << bitnum;
2225
2226   tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2227
2228    if (op) {
2229        tmp = tcg_temp_new();
2230        switch (op) {
2231        case 1: /* bchg */
2232            tcg_gen_xori_i32(tmp, src1, mask);
2233            break;
2234        case 2: /* bclr */
2235            tcg_gen_andi_i32(tmp, src1, ~mask);
2236            break;
2237        case 3: /* bset */
2238            tcg_gen_ori_i32(tmp, src1, mask);
2239            break;
2240        default: /* btst */
2241            break;
2242        }
2243        DEST_EA(env, insn, opsize, tmp, &addr);
2244        tcg_temp_free(tmp);
2245    }
2246}
2247
2248static TCGv gen_get_ccr(DisasContext *s)
2249{
2250    TCGv dest;
2251
2252    update_cc_op(s);
2253    dest = tcg_temp_new();
2254    gen_helper_get_ccr(dest, cpu_env);
2255    return dest;
2256}
2257
2258static TCGv gen_get_sr(DisasContext *s)
2259{
2260    TCGv ccr;
2261    TCGv sr;
2262
2263    ccr = gen_get_ccr(s);
2264    sr = tcg_temp_new();
2265    tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2266    tcg_gen_or_i32(sr, sr, ccr);
2267    tcg_temp_free(ccr);
2268    return sr;
2269}
2270
2271static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2272{
2273    if (ccr_only) {
2274        tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2275        tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2276        tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2277        tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2278        tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2279    } else {
2280        TCGv sr = tcg_const_i32(val);
2281        gen_helper_set_sr(cpu_env, sr);
2282        tcg_temp_free(sr);
2283    }
2284    set_cc_op(s, CC_OP_FLAGS);
2285}
2286
2287static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2288{
2289    if (ccr_only) {
2290        gen_helper_set_ccr(cpu_env, val);
2291    } else {
2292        gen_helper_set_sr(cpu_env, val);
2293    }
2294    set_cc_op(s, CC_OP_FLAGS);
2295}
2296
2297static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2298                           bool ccr_only)
2299{
2300    if ((insn & 0x3f) == 0x3c) {
2301        uint16_t val;
2302        val = read_im16(env, s);
2303        gen_set_sr_im(s, val, ccr_only);
2304    } else {
2305        TCGv src;
2306        SRC_EA(env, src, OS_WORD, 0, NULL);
2307        gen_set_sr(s, src, ccr_only);
2308    }
2309}
2310
2311DISAS_INSN(arith_im)
2312{
2313    int op;
2314    TCGv im;
2315    TCGv src1;
2316    TCGv dest;
2317    TCGv addr;
2318    int opsize;
2319    bool with_SR = ((insn & 0x3f) == 0x3c);
2320
2321    op = (insn >> 9) & 7;
2322    opsize = insn_opsize(insn);
2323    switch (opsize) {
2324    case OS_BYTE:
2325        im = tcg_const_i32((int8_t)read_im8(env, s));
2326        break;
2327    case OS_WORD:
2328        im = tcg_const_i32((int16_t)read_im16(env, s));
2329        break;
2330    case OS_LONG:
2331        im = tcg_const_i32(read_im32(env, s));
2332        break;
2333    default:
2334        g_assert_not_reached();
2335    }
2336
2337    if (with_SR) {
2338        /* SR/CCR can only be used with andi/eori/ori */
2339        if (op == 2 || op == 3 || op == 6) {
2340            disas_undef(env, s, insn);
2341            return;
2342        }
2343        switch (opsize) {
2344        case OS_BYTE:
2345            src1 = gen_get_ccr(s);
2346            break;
2347        case OS_WORD:
2348            if (IS_USER(s)) {
2349                gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2350                return;
2351            }
2352            src1 = gen_get_sr(s);
2353            break;
2354        default:
2355            /* OS_LONG; others already g_assert_not_reached.  */
2356            disas_undef(env, s, insn);
2357            return;
2358        }
2359    } else {
2360        SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2361    }
2362    dest = tcg_temp_new();
2363    switch (op) {
2364    case 0: /* ori */
2365        tcg_gen_or_i32(dest, src1, im);
2366        if (with_SR) {
2367            gen_set_sr(s, dest, opsize == OS_BYTE);
2368        } else {
2369            DEST_EA(env, insn, opsize, dest, &addr);
2370            gen_logic_cc(s, dest, opsize);
2371        }
2372        break;
2373    case 1: /* andi */
2374        tcg_gen_and_i32(dest, src1, im);
2375        if (with_SR) {
2376            gen_set_sr(s, dest, opsize == OS_BYTE);
2377        } else {
2378            DEST_EA(env, insn, opsize, dest, &addr);
2379            gen_logic_cc(s, dest, opsize);
2380        }
2381        break;
2382    case 2: /* subi */
2383        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2384        tcg_gen_sub_i32(dest, src1, im);
2385        gen_update_cc_add(dest, im, opsize);
2386        set_cc_op(s, CC_OP_SUBB + opsize);
2387        DEST_EA(env, insn, opsize, dest, &addr);
2388        break;
2389    case 3: /* addi */
2390        tcg_gen_add_i32(dest, src1, im);
2391        gen_update_cc_add(dest, im, opsize);
2392        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2393        set_cc_op(s, CC_OP_ADDB + opsize);
2394        DEST_EA(env, insn, opsize, dest, &addr);
2395        break;
2396    case 5: /* eori */
2397        tcg_gen_xor_i32(dest, src1, im);
2398        if (with_SR) {
2399            gen_set_sr(s, dest, opsize == OS_BYTE);
2400        } else {
2401            DEST_EA(env, insn, opsize, dest, &addr);
2402            gen_logic_cc(s, dest, opsize);
2403        }
2404        break;
2405    case 6: /* cmpi */
2406        gen_update_cc_cmp(s, src1, im, opsize);
2407        break;
2408    default:
2409        abort();
2410    }
2411    tcg_temp_free(im);
2412    tcg_temp_free(dest);
2413}
2414
2415DISAS_INSN(cas)
2416{
2417    int opsize;
2418    TCGv addr;
2419    uint16_t ext;
2420    TCGv load;
2421    TCGv cmp;
2422    MemOp opc;
2423
2424    switch ((insn >> 9) & 3) {
2425    case 1:
2426        opsize = OS_BYTE;
2427        opc = MO_SB;
2428        break;
2429    case 2:
2430        opsize = OS_WORD;
2431        opc = MO_TESW;
2432        break;
2433    case 3:
2434        opsize = OS_LONG;
2435        opc = MO_TESL;
2436        break;
2437    default:
2438        g_assert_not_reached();
2439    }
2440
2441    ext = read_im16(env, s);
2442
2443    /* cas Dc,Du,<EA> */
2444
2445    addr = gen_lea(env, s, insn, opsize);
2446    if (IS_NULL_QREG(addr)) {
2447        gen_addr_fault(s);
2448        return;
2449    }
2450
2451    cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2452
2453    /*
2454     * if  <EA> == Dc then
2455     *     <EA> = Du
2456     *     Dc = <EA> (because <EA> == Dc)
2457     * else
2458     *     Dc = <EA>
2459     */
2460
2461    load = tcg_temp_new();
2462    tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2463                               IS_USER(s), opc);
2464    /* update flags before setting cmp to load */
2465    gen_update_cc_cmp(s, load, cmp, opsize);
2466    gen_partset_reg(opsize, DREG(ext, 0), load);
2467
2468    tcg_temp_free(load);
2469
2470    switch (extract32(insn, 3, 3)) {
2471    case 3: /* Indirect postincrement.  */
2472        tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2473        break;
2474    case 4: /* Indirect predecrememnt.  */
2475        tcg_gen_mov_i32(AREG(insn, 0), addr);
2476        break;
2477    }
2478}
2479
2480DISAS_INSN(cas2w)
2481{
2482    uint16_t ext1, ext2;
2483    TCGv addr1, addr2;
2484    TCGv regs;
2485
2486    /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2487
2488    ext1 = read_im16(env, s);
2489
2490    if (ext1 & 0x8000) {
2491        /* Address Register */
2492        addr1 = AREG(ext1, 12);
2493    } else {
2494        /* Data Register */
2495        addr1 = DREG(ext1, 12);
2496    }
2497
2498    ext2 = read_im16(env, s);
2499    if (ext2 & 0x8000) {
2500        /* Address Register */
2501        addr2 = AREG(ext2, 12);
2502    } else {
2503        /* Data Register */
2504        addr2 = DREG(ext2, 12);
2505    }
2506
2507    /*
2508     * if (R1) == Dc1 && (R2) == Dc2 then
2509     *     (R1) = Du1
2510     *     (R2) = Du2
2511     * else
2512     *     Dc1 = (R1)
2513     *     Dc2 = (R2)
2514     */
2515
2516    regs = tcg_const_i32(REG(ext2, 6) |
2517                         (REG(ext1, 6) << 3) |
2518                         (REG(ext2, 0) << 6) |
2519                         (REG(ext1, 0) << 9));
2520    if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2521        gen_helper_exit_atomic(cpu_env);
2522    } else {
2523        gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2524    }
2525    tcg_temp_free(regs);
2526
2527    /* Note that cas2w also assigned to env->cc_op.  */
2528    s->cc_op = CC_OP_CMPW;
2529    s->cc_op_synced = 1;
2530}
2531
2532DISAS_INSN(cas2l)
2533{
2534    uint16_t ext1, ext2;
2535    TCGv addr1, addr2, regs;
2536
2537    /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2538
2539    ext1 = read_im16(env, s);
2540
2541    if (ext1 & 0x8000) {
2542        /* Address Register */
2543        addr1 = AREG(ext1, 12);
2544    } else {
2545        /* Data Register */
2546        addr1 = DREG(ext1, 12);
2547    }
2548
2549    ext2 = read_im16(env, s);
2550    if (ext2 & 0x8000) {
2551        /* Address Register */
2552        addr2 = AREG(ext2, 12);
2553    } else {
2554        /* Data Register */
2555        addr2 = DREG(ext2, 12);
2556    }
2557
2558    /*
2559     * if (R1) == Dc1 && (R2) == Dc2 then
2560     *     (R1) = Du1
2561     *     (R2) = Du2
2562     * else
2563     *     Dc1 = (R1)
2564     *     Dc2 = (R2)
2565     */
2566
2567    regs = tcg_const_i32(REG(ext2, 6) |
2568                         (REG(ext1, 6) << 3) |
2569                         (REG(ext2, 0) << 6) |
2570                         (REG(ext1, 0) << 9));
2571    if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2572        gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2573    } else {
2574        gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2575    }
2576    tcg_temp_free(regs);
2577
2578    /* Note that cas2l also assigned to env->cc_op.  */
2579    s->cc_op = CC_OP_CMPL;
2580    s->cc_op_synced = 1;
2581}
2582
2583DISAS_INSN(byterev)
2584{
2585    TCGv reg;
2586
2587    reg = DREG(insn, 0);
2588    tcg_gen_bswap32_i32(reg, reg);
2589}
2590
2591DISAS_INSN(move)
2592{
2593    TCGv src;
2594    TCGv dest;
2595    int op;
2596    int opsize;
2597
2598    switch (insn >> 12) {
2599    case 1: /* move.b */
2600        opsize = OS_BYTE;
2601        break;
2602    case 2: /* move.l */
2603        opsize = OS_LONG;
2604        break;
2605    case 3: /* move.w */
2606        opsize = OS_WORD;
2607        break;
2608    default:
2609        abort();
2610    }
2611    SRC_EA(env, src, opsize, 1, NULL);
2612    op = (insn >> 6) & 7;
2613    if (op == 1) {
2614        /* movea */
2615        /* The value will already have been sign extended.  */
2616        dest = AREG(insn, 9);
2617        tcg_gen_mov_i32(dest, src);
2618    } else {
2619        /* normal move */
2620        uint16_t dest_ea;
2621        dest_ea = ((insn >> 9) & 7) | (op << 3);
2622        DEST_EA(env, dest_ea, opsize, src, NULL);
2623        /* This will be correct because loads sign extend.  */
2624        gen_logic_cc(s, src, opsize);
2625    }
2626}
2627
2628DISAS_INSN(negx)
2629{
2630    TCGv z;
2631    TCGv src;
2632    TCGv addr;
2633    int opsize;
2634
2635    opsize = insn_opsize(insn);
2636    SRC_EA(env, src, opsize, 1, &addr);
2637
2638    gen_flush_flags(s); /* compute old Z */
2639
2640    /*
2641     * Perform substract with borrow.
2642     * (X, N) =  -(src + X);
2643     */
2644
2645    z = tcg_const_i32(0);
2646    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2647    tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2648    tcg_temp_free(z);
2649    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2650
2651    tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2652
2653    /*
2654     * Compute signed-overflow for negation.  The normal formula for
2655     * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2656     * this simplies to res & src.
2657     */
2658
2659    tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2660
2661    /* Copy the rest of the results into place.  */
2662    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2663    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2664
2665    set_cc_op(s, CC_OP_FLAGS);
2666
2667    /* result is in QREG_CC_N */
2668
2669    DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2670}
2671
2672DISAS_INSN(lea)
2673{
2674    TCGv reg;
2675    TCGv tmp;
2676
2677    reg = AREG(insn, 9);
2678    tmp = gen_lea(env, s, insn, OS_LONG);
2679    if (IS_NULL_QREG(tmp)) {
2680        gen_addr_fault(s);
2681        return;
2682    }
2683    tcg_gen_mov_i32(reg, tmp);
2684}
2685
2686DISAS_INSN(clr)
2687{
2688    int opsize;
2689    TCGv zero;
2690
2691    zero = tcg_const_i32(0);
2692
2693    opsize = insn_opsize(insn);
2694    DEST_EA(env, insn, opsize, zero, NULL);
2695    gen_logic_cc(s, zero, opsize);
2696    tcg_temp_free(zero);
2697}
2698
2699DISAS_INSN(move_from_ccr)
2700{
2701    TCGv ccr;
2702
2703    ccr = gen_get_ccr(s);
2704    DEST_EA(env, insn, OS_WORD, ccr, NULL);
2705}
2706
2707DISAS_INSN(neg)
2708{
2709    TCGv src1;
2710    TCGv dest;
2711    TCGv addr;
2712    int opsize;
2713
2714    opsize = insn_opsize(insn);
2715    SRC_EA(env, src1, opsize, 1, &addr);
2716    dest = tcg_temp_new();
2717    tcg_gen_neg_i32(dest, src1);
2718    set_cc_op(s, CC_OP_SUBB + opsize);
2719    gen_update_cc_add(dest, src1, opsize);
2720    tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2721    DEST_EA(env, insn, opsize, dest, &addr);
2722    tcg_temp_free(dest);
2723}
2724
2725DISAS_INSN(move_to_ccr)
2726{
2727    gen_move_to_sr(env, s, insn, true);
2728}
2729
2730DISAS_INSN(not)
2731{
2732    TCGv src1;
2733    TCGv dest;
2734    TCGv addr;
2735    int opsize;
2736
2737    opsize = insn_opsize(insn);
2738    SRC_EA(env, src1, opsize, 1, &addr);
2739    dest = tcg_temp_new();
2740    tcg_gen_not_i32(dest, src1);
2741    DEST_EA(env, insn, opsize, dest, &addr);
2742    gen_logic_cc(s, dest, opsize);
2743}
2744
2745DISAS_INSN(swap)
2746{
2747    TCGv src1;
2748    TCGv src2;
2749    TCGv reg;
2750
2751    src1 = tcg_temp_new();
2752    src2 = tcg_temp_new();
2753    reg = DREG(insn, 0);
2754    tcg_gen_shli_i32(src1, reg, 16);
2755    tcg_gen_shri_i32(src2, reg, 16);
2756    tcg_gen_or_i32(reg, src1, src2);
2757    tcg_temp_free(src2);
2758    tcg_temp_free(src1);
2759    gen_logic_cc(s, reg, OS_LONG);
2760}
2761
2762DISAS_INSN(bkpt)
2763{
2764    gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2765}
2766
2767DISAS_INSN(pea)
2768{
2769    TCGv tmp;
2770
2771    tmp = gen_lea(env, s, insn, OS_LONG);
2772    if (IS_NULL_QREG(tmp)) {
2773        gen_addr_fault(s);
2774        return;
2775    }
2776    gen_push(s, tmp);
2777}
2778
2779DISAS_INSN(ext)
2780{
2781    int op;
2782    TCGv reg;
2783    TCGv tmp;
2784
2785    reg = DREG(insn, 0);
2786    op = (insn >> 6) & 7;
2787    tmp = tcg_temp_new();
2788    if (op == 3)
2789        tcg_gen_ext16s_i32(tmp, reg);
2790    else
2791        tcg_gen_ext8s_i32(tmp, reg);
2792    if (op == 2)
2793        gen_partset_reg(OS_WORD, reg, tmp);
2794    else
2795        tcg_gen_mov_i32(reg, tmp);
2796    gen_logic_cc(s, tmp, OS_LONG);
2797    tcg_temp_free(tmp);
2798}
2799
2800DISAS_INSN(tst)
2801{
2802    int opsize;
2803    TCGv tmp;
2804
2805    opsize = insn_opsize(insn);
2806    SRC_EA(env, tmp, opsize, 1, NULL);
2807    gen_logic_cc(s, tmp, opsize);
2808}
2809
2810DISAS_INSN(pulse)
2811{
2812  /* Implemented as a NOP.  */
2813}
2814
2815DISAS_INSN(illegal)
2816{
2817    gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2818}
2819
2820/* ??? This should be atomic.  */
2821DISAS_INSN(tas)
2822{
2823    TCGv dest;
2824    TCGv src1;
2825    TCGv addr;
2826
2827    dest = tcg_temp_new();
2828    SRC_EA(env, src1, OS_BYTE, 1, &addr);
2829    gen_logic_cc(s, src1, OS_BYTE);
2830    tcg_gen_ori_i32(dest, src1, 0x80);
2831    DEST_EA(env, insn, OS_BYTE, dest, &addr);
2832    tcg_temp_free(dest);
2833}
2834
2835DISAS_INSN(mull)
2836{
2837    uint16_t ext;
2838    TCGv src1;
2839    int sign;
2840
2841    ext = read_im16(env, s);
2842
2843    sign = ext & 0x800;
2844
2845    if (ext & 0x400) {
2846        if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2847            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2848            return;
2849        }
2850
2851        SRC_EA(env, src1, OS_LONG, 0, NULL);
2852
2853        if (sign) {
2854            tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2855        } else {
2856            tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2857        }
2858        /* if Dl == Dh, 68040 returns low word */
2859        tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2860        tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2861        tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2862
2863        tcg_gen_movi_i32(QREG_CC_V, 0);
2864        tcg_gen_movi_i32(QREG_CC_C, 0);
2865
2866        set_cc_op(s, CC_OP_FLAGS);
2867        return;
2868    }
2869    SRC_EA(env, src1, OS_LONG, 0, NULL);
2870    if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2871        tcg_gen_movi_i32(QREG_CC_C, 0);
2872        if (sign) {
2873            tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2874            /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2875            tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2876            tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2877        } else {
2878            tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2879            /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2880            tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2881        }
2882        tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2883        tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2884
2885        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2886
2887        set_cc_op(s, CC_OP_FLAGS);
2888    } else {
2889        /*
2890         * The upper 32 bits of the product are discarded, so
2891         * muls.l and mulu.l are functionally equivalent.
2892         */
2893        tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2894        gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2895    }
2896}
2897
2898static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2899{
2900    TCGv reg;
2901    TCGv tmp;
2902
2903    reg = AREG(insn, 0);
2904    tmp = tcg_temp_new();
2905    tcg_gen_subi_i32(tmp, QREG_SP, 4);
2906    gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2907    if ((insn & 7) != 7) {
2908        tcg_gen_mov_i32(reg, tmp);
2909    }
2910    tcg_gen_addi_i32(QREG_SP, tmp, offset);
2911    tcg_temp_free(tmp);
2912}
2913
2914DISAS_INSN(link)
2915{
2916    int16_t offset;
2917
2918    offset = read_im16(env, s);
2919    gen_link(s, insn, offset);
2920}
2921
2922DISAS_INSN(linkl)
2923{
2924    int32_t offset;
2925
2926    offset = read_im32(env, s);
2927    gen_link(s, insn, offset);
2928}
2929
2930DISAS_INSN(unlk)
2931{
2932    TCGv src;
2933    TCGv reg;
2934    TCGv tmp;
2935
2936    src = tcg_temp_new();
2937    reg = AREG(insn, 0);
2938    tcg_gen_mov_i32(src, reg);
2939    tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2940    tcg_gen_mov_i32(reg, tmp);
2941    tcg_gen_addi_i32(QREG_SP, src, 4);
2942    tcg_temp_free(src);
2943    tcg_temp_free(tmp);
2944}
2945
2946#if defined(CONFIG_SOFTMMU)
2947DISAS_INSN(reset)
2948{
2949    if (IS_USER(s)) {
2950        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2951        return;
2952    }
2953
2954    gen_helper_reset(cpu_env);
2955}
2956#endif
2957
2958DISAS_INSN(nop)
2959{
2960}
2961
2962DISAS_INSN(rtd)
2963{
2964    TCGv tmp;
2965    int16_t offset = read_im16(env, s);
2966
2967    tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2968    tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2969    gen_jmp(s, tmp);
2970}
2971
2972DISAS_INSN(rts)
2973{
2974    TCGv tmp;
2975
2976    tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2977    tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2978    gen_jmp(s, tmp);
2979}
2980
2981DISAS_INSN(jump)
2982{
2983    TCGv tmp;
2984
2985    /*
2986     * Load the target address first to ensure correct exception
2987     * behavior.
2988     */
2989    tmp = gen_lea(env, s, insn, OS_LONG);
2990    if (IS_NULL_QREG(tmp)) {
2991        gen_addr_fault(s);
2992        return;
2993    }
2994    if ((insn & 0x40) == 0) {
2995        /* jsr */
2996        gen_push(s, tcg_const_i32(s->pc));
2997    }
2998    gen_jmp(s, tmp);
2999}
3000
3001DISAS_INSN(addsubq)
3002{
3003    TCGv src;
3004    TCGv dest;
3005    TCGv val;
3006    int imm;
3007    TCGv addr;
3008    int opsize;
3009
3010    if ((insn & 070) == 010) {
3011        /* Operation on address register is always long.  */
3012        opsize = OS_LONG;
3013    } else {
3014        opsize = insn_opsize(insn);
3015    }
3016    SRC_EA(env, src, opsize, 1, &addr);
3017    imm = (insn >> 9) & 7;
3018    if (imm == 0) {
3019        imm = 8;
3020    }
3021    val = tcg_const_i32(imm);
3022    dest = tcg_temp_new();
3023    tcg_gen_mov_i32(dest, src);
3024    if ((insn & 0x38) == 0x08) {
3025        /*
3026         * Don't update condition codes if the destination is an
3027         * address register.
3028         */
3029        if (insn & 0x0100) {
3030            tcg_gen_sub_i32(dest, dest, val);
3031        } else {
3032            tcg_gen_add_i32(dest, dest, val);
3033        }
3034    } else {
3035        if (insn & 0x0100) {
3036            tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3037            tcg_gen_sub_i32(dest, dest, val);
3038            set_cc_op(s, CC_OP_SUBB + opsize);
3039        } else {
3040            tcg_gen_add_i32(dest, dest, val);
3041            tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3042            set_cc_op(s, CC_OP_ADDB + opsize);
3043        }
3044        gen_update_cc_add(dest, val, opsize);
3045    }
3046    tcg_temp_free(val);
3047    DEST_EA(env, insn, opsize, dest, &addr);
3048    tcg_temp_free(dest);
3049}
3050
3051DISAS_INSN(tpf)
3052{
3053    switch (insn & 7) {
3054    case 2: /* One extension word.  */
3055        s->pc += 2;
3056        break;
3057    case 3: /* Two extension words.  */
3058        s->pc += 4;
3059        break;
3060    case 4: /* No extension words.  */
3061        break;
3062    default:
3063        disas_undef(env, s, insn);
3064    }
3065}
3066
3067DISAS_INSN(branch)
3068{
3069    int32_t offset;
3070    uint32_t base;
3071    int op;
3072
3073    base = s->pc;
3074    op = (insn >> 8) & 0xf;
3075    offset = (int8_t)insn;
3076    if (offset == 0) {
3077        offset = (int16_t)read_im16(env, s);
3078    } else if (offset == -1) {
3079        offset = read_im32(env, s);
3080    }
3081    if (op == 1) {
3082        /* bsr */
3083        gen_push(s, tcg_const_i32(s->pc));
3084    }
3085    if (op > 1) {
3086        /* Bcc */
3087        TCGLabel *l1 = gen_new_label();
3088        gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3089        gen_jmp_tb(s, 1, base + offset);
3090        gen_set_label(l1);
3091        gen_jmp_tb(s, 0, s->pc);
3092    } else {
3093        /* Unconditional branch.  */
3094        update_cc_op(s);
3095        gen_jmp_tb(s, 0, base + offset);
3096    }
3097}
3098
3099DISAS_INSN(moveq)
3100{
3101    tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3102    gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3103}
3104
3105DISAS_INSN(mvzs)
3106{
3107    int opsize;
3108    TCGv src;
3109    TCGv reg;
3110
3111    if (insn & 0x40)
3112        opsize = OS_WORD;
3113    else
3114        opsize = OS_BYTE;
3115    SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3116    reg = DREG(insn, 9);
3117    tcg_gen_mov_i32(reg, src);
3118    gen_logic_cc(s, src, opsize);
3119}
3120
3121DISAS_INSN(or)
3122{
3123    TCGv reg;
3124    TCGv dest;
3125    TCGv src;
3126    TCGv addr;
3127    int opsize;
3128
3129    opsize = insn_opsize(insn);
3130    reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3131    dest = tcg_temp_new();
3132    if (insn & 0x100) {
3133        SRC_EA(env, src, opsize, 0, &addr);
3134        tcg_gen_or_i32(dest, src, reg);
3135        DEST_EA(env, insn, opsize, dest, &addr);
3136    } else {
3137        SRC_EA(env, src, opsize, 0, NULL);
3138        tcg_gen_or_i32(dest, src, reg);
3139        gen_partset_reg(opsize, DREG(insn, 9), dest);
3140    }
3141    gen_logic_cc(s, dest, opsize);
3142    tcg_temp_free(dest);
3143}
3144
3145DISAS_INSN(suba)
3146{
3147    TCGv src;
3148    TCGv reg;
3149
3150    SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3151    reg = AREG(insn, 9);
3152    tcg_gen_sub_i32(reg, reg, src);
3153}
3154
3155static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3156{
3157    TCGv tmp;
3158
3159    gen_flush_flags(s); /* compute old Z */
3160
3161    /*
3162     * Perform substract with borrow.
3163     * (X, N) = dest - (src + X);
3164     */
3165
3166    tmp = tcg_const_i32(0);
3167    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3168    tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3169    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3170    tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3171
3172    /* Compute signed-overflow for substract.  */
3173
3174    tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3175    tcg_gen_xor_i32(tmp, dest, src);
3176    tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3177    tcg_temp_free(tmp);
3178
3179    /* Copy the rest of the results into place.  */
3180    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3181    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3182
3183    set_cc_op(s, CC_OP_FLAGS);
3184
3185    /* result is in QREG_CC_N */
3186}
3187
3188DISAS_INSN(subx_reg)
3189{
3190    TCGv dest;
3191    TCGv src;
3192    int opsize;
3193
3194    opsize = insn_opsize(insn);
3195
3196    src = gen_extend(s, DREG(insn, 0), opsize, 1);
3197    dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3198
3199    gen_subx(s, src, dest, opsize);
3200
3201    gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3202}
3203
3204DISAS_INSN(subx_mem)
3205{
3206    TCGv src;
3207    TCGv addr_src;
3208    TCGv dest;
3209    TCGv addr_dest;
3210    int opsize;
3211
3212    opsize = insn_opsize(insn);
3213
3214    addr_src = AREG(insn, 0);
3215    tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3216    src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3217
3218    addr_dest = AREG(insn, 9);
3219    tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3220    dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3221
3222    gen_subx(s, src, dest, opsize);
3223
3224    gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3225
3226    tcg_temp_free(dest);
3227    tcg_temp_free(src);
3228}
3229
3230DISAS_INSN(mov3q)
3231{
3232    TCGv src;
3233    int val;
3234
3235    val = (insn >> 9) & 7;
3236    if (val == 0)
3237        val = -1;
3238    src = tcg_const_i32(val);
3239    gen_logic_cc(s, src, OS_LONG);
3240    DEST_EA(env, insn, OS_LONG, src, NULL);
3241    tcg_temp_free(src);
3242}
3243
3244DISAS_INSN(cmp)
3245{
3246    TCGv src;
3247    TCGv reg;
3248    int opsize;
3249
3250    opsize = insn_opsize(insn);
3251    SRC_EA(env, src, opsize, 1, NULL);
3252    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3253    gen_update_cc_cmp(s, reg, src, opsize);
3254}
3255
3256DISAS_INSN(cmpa)
3257{
3258    int opsize;
3259    TCGv src;
3260    TCGv reg;
3261
3262    if (insn & 0x100) {
3263        opsize = OS_LONG;
3264    } else {
3265        opsize = OS_WORD;
3266    }
3267    SRC_EA(env, src, opsize, 1, NULL);
3268    reg = AREG(insn, 9);
3269    gen_update_cc_cmp(s, reg, src, OS_LONG);
3270}
3271
3272DISAS_INSN(cmpm)
3273{
3274    int opsize = insn_opsize(insn);
3275    TCGv src, dst;
3276
3277    /* Post-increment load (mode 3) from Ay.  */
3278    src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3279                      NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3280    /* Post-increment load (mode 3) from Ax.  */
3281    dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3282                      NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3283
3284    gen_update_cc_cmp(s, dst, src, opsize);
3285}
3286
3287DISAS_INSN(eor)
3288{
3289    TCGv src;
3290    TCGv dest;
3291    TCGv addr;
3292    int opsize;
3293
3294    opsize = insn_opsize(insn);
3295
3296    SRC_EA(env, src, opsize, 0, &addr);
3297    dest = tcg_temp_new();
3298    tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3299    gen_logic_cc(s, dest, opsize);
3300    DEST_EA(env, insn, opsize, dest, &addr);
3301    tcg_temp_free(dest);
3302}
3303
3304static void do_exg(TCGv reg1, TCGv reg2)
3305{
3306    TCGv temp = tcg_temp_new();
3307    tcg_gen_mov_i32(temp, reg1);
3308    tcg_gen_mov_i32(reg1, reg2);
3309    tcg_gen_mov_i32(reg2, temp);
3310    tcg_temp_free(temp);
3311}
3312
3313DISAS_INSN(exg_dd)
3314{
3315    /* exchange Dx and Dy */
3316    do_exg(DREG(insn, 9), DREG(insn, 0));
3317}
3318
3319DISAS_INSN(exg_aa)
3320{
3321    /* exchange Ax and Ay */
3322    do_exg(AREG(insn, 9), AREG(insn, 0));
3323}
3324
3325DISAS_INSN(exg_da)
3326{
3327    /* exchange Dx and Ay */
3328    do_exg(DREG(insn, 9), AREG(insn, 0));
3329}
3330
3331DISAS_INSN(and)
3332{
3333    TCGv src;
3334    TCGv reg;
3335    TCGv dest;
3336    TCGv addr;
3337    int opsize;
3338
3339    dest = tcg_temp_new();
3340
3341    opsize = insn_opsize(insn);
3342    reg = DREG(insn, 9);
3343    if (insn & 0x100) {
3344        SRC_EA(env, src, opsize, 0, &addr);
3345        tcg_gen_and_i32(dest, src, reg);
3346        DEST_EA(env, insn, opsize, dest, &addr);
3347    } else {
3348        SRC_EA(env, src, opsize, 0, NULL);
3349        tcg_gen_and_i32(dest, src, reg);
3350        gen_partset_reg(opsize, reg, dest);
3351    }
3352    gen_logic_cc(s, dest, opsize);
3353    tcg_temp_free(dest);
3354}
3355
3356DISAS_INSN(adda)
3357{
3358    TCGv src;
3359    TCGv reg;
3360
3361    SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3362    reg = AREG(insn, 9);
3363    tcg_gen_add_i32(reg, reg, src);
3364}
3365
3366static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3367{
3368    TCGv tmp;
3369
3370    gen_flush_flags(s); /* compute old Z */
3371
3372    /*
3373     * Perform addition with carry.
3374     * (X, N) = src + dest + X;
3375     */
3376
3377    tmp = tcg_const_i32(0);
3378    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3379    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3380    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3381
3382    /* Compute signed-overflow for addition.  */
3383
3384    tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3385    tcg_gen_xor_i32(tmp, dest, src);
3386    tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3387    tcg_temp_free(tmp);
3388
3389    /* Copy the rest of the results into place.  */
3390    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3391    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3392
3393    set_cc_op(s, CC_OP_FLAGS);
3394
3395    /* result is in QREG_CC_N */
3396}
3397
3398DISAS_INSN(addx_reg)
3399{
3400    TCGv dest;
3401    TCGv src;
3402    int opsize;
3403
3404    opsize = insn_opsize(insn);
3405
3406    dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3407    src = gen_extend(s, DREG(insn, 0), opsize, 1);
3408
3409    gen_addx(s, src, dest, opsize);
3410
3411    gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3412}
3413
3414DISAS_INSN(addx_mem)
3415{
3416    TCGv src;
3417    TCGv addr_src;
3418    TCGv dest;
3419    TCGv addr_dest;
3420    int opsize;
3421
3422    opsize = insn_opsize(insn);
3423
3424    addr_src = AREG(insn, 0);
3425    tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3426    src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3427
3428    addr_dest = AREG(insn, 9);
3429    tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3430    dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3431
3432    gen_addx(s, src, dest, opsize);
3433
3434    gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3435
3436    tcg_temp_free(dest);
3437    tcg_temp_free(src);
3438}
3439
3440static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3441{
3442    int count = (insn >> 9) & 7;
3443    int logical = insn & 8;
3444    int left = insn & 0x100;
3445    int bits = opsize_bytes(opsize) * 8;
3446    TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3447
3448    if (count == 0) {
3449        count = 8;
3450    }
3451
3452    tcg_gen_movi_i32(QREG_CC_V, 0);
3453    if (left) {
3454        tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3455        tcg_gen_shli_i32(QREG_CC_N, reg, count);
3456
3457        /*
3458         * Note that ColdFire always clears V (done above),
3459         * while M68000 sets if the most significant bit is changed at
3460         * any time during the shift operation.
3461         */
3462        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3463            /* if shift count >= bits, V is (reg != 0) */
3464            if (count >= bits) {
3465                tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3466            } else {
3467                TCGv t0 = tcg_temp_new();
3468                tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3469                tcg_gen_sari_i32(t0, reg, bits - count - 1);
3470                tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3471                tcg_temp_free(t0);
3472            }
3473            tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3474        }
3475    } else {
3476        tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3477        if (logical) {
3478            tcg_gen_shri_i32(QREG_CC_N, reg, count);
3479        } else {
3480            tcg_gen_sari_i32(QREG_CC_N, reg, count);
3481        }
3482    }
3483
3484    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3485    tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3486    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3487    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3488
3489    gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3490    set_cc_op(s, CC_OP_FLAGS);
3491}
3492
3493static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3494{
3495    int logical = insn & 8;
3496    int left = insn & 0x100;
3497    int bits = opsize_bytes(opsize) * 8;
3498    TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3499    TCGv s32;
3500    TCGv_i64 t64, s64;
3501
3502    t64 = tcg_temp_new_i64();
3503    s64 = tcg_temp_new_i64();
3504    s32 = tcg_temp_new();
3505
3506    /*
3507     * Note that m68k truncates the shift count modulo 64, not 32.
3508     * In addition, a 64-bit shift makes it easy to find "the last
3509     * bit shifted out", for the carry flag.
3510     */
3511    tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3512    tcg_gen_extu_i32_i64(s64, s32);
3513    tcg_gen_extu_i32_i64(t64, reg);
3514
3515    /* Optimistically set V=0.  Also used as a zero source below.  */
3516    tcg_gen_movi_i32(QREG_CC_V, 0);
3517    if (left) {
3518        tcg_gen_shl_i64(t64, t64, s64);
3519
3520        if (opsize == OS_LONG) {
3521            tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3522            /* Note that C=0 if shift count is 0, and we get that for free.  */
3523        } else {
3524            TCGv zero = tcg_const_i32(0);
3525            tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3526            tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3527            tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3528                                s32, zero, zero, QREG_CC_C);
3529            tcg_temp_free(zero);
3530        }
3531        tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3532
3533        /* X = C, but only if the shift count was non-zero.  */
3534        tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3535                            QREG_CC_C, QREG_CC_X);
3536
3537        /*
3538         * M68000 sets V if the most significant bit is changed at
3539         * any time during the shift operation.  Do this via creating
3540         * an extension of the sign bit, comparing, and discarding
3541         * the bits below the sign bit.  I.e.
3542         *     int64_t s = (intN_t)reg;
3543         *     int64_t t = (int64_t)(intN_t)reg << count;
3544         *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3545         */
3546        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3547            TCGv_i64 tt = tcg_const_i64(32);
3548            /* if shift is greater than 32, use 32 */
3549            tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3550            tcg_temp_free_i64(tt);
3551            /* Sign extend the input to 64 bits; re-do the shift.  */
3552            tcg_gen_ext_i32_i64(t64, reg);
3553            tcg_gen_shl_i64(s64, t64, s64);
3554            /* Clear all bits that are unchanged.  */
3555            tcg_gen_xor_i64(t64, t64, s64);
3556            /* Ignore the bits below the sign bit.  */
3557            tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3558            /* If any bits remain set, we have overflow.  */
3559            tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3560            tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3561            tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3562        }
3563    } else {
3564        tcg_gen_shli_i64(t64, t64, 32);
3565        if (logical) {
3566            tcg_gen_shr_i64(t64, t64, s64);
3567        } else {
3568            tcg_gen_sar_i64(t64, t64, s64);
3569        }
3570        tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3571
3572        /* Note that C=0 if shift count is 0, and we get that for free.  */
3573        tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3574
3575        /* X = C, but only if the shift count was non-zero.  */
3576        tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3577                            QREG_CC_C, QREG_CC_X);
3578    }
3579    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3580    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3581
3582    tcg_temp_free(s32);
3583    tcg_temp_free_i64(s64);
3584    tcg_temp_free_i64(t64);
3585
3586    /* Write back the result.  */
3587    gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3588    set_cc_op(s, CC_OP_FLAGS);
3589}
3590
3591DISAS_INSN(shift8_im)
3592{
3593    shift_im(s, insn, OS_BYTE);
3594}
3595
3596DISAS_INSN(shift16_im)
3597{
3598    shift_im(s, insn, OS_WORD);
3599}
3600
3601DISAS_INSN(shift_im)
3602{
3603    shift_im(s, insn, OS_LONG);
3604}
3605
3606DISAS_INSN(shift8_reg)
3607{
3608    shift_reg(s, insn, OS_BYTE);
3609}
3610
3611DISAS_INSN(shift16_reg)
3612{
3613    shift_reg(s, insn, OS_WORD);
3614}
3615
3616DISAS_INSN(shift_reg)
3617{
3618    shift_reg(s, insn, OS_LONG);
3619}
3620
3621DISAS_INSN(shift_mem)
3622{
3623    int logical = insn & 8;
3624    int left = insn & 0x100;
3625    TCGv src;
3626    TCGv addr;
3627
3628    SRC_EA(env, src, OS_WORD, !logical, &addr);
3629    tcg_gen_movi_i32(QREG_CC_V, 0);
3630    if (left) {
3631        tcg_gen_shri_i32(QREG_CC_C, src, 15);
3632        tcg_gen_shli_i32(QREG_CC_N, src, 1);
3633
3634        /*
3635         * Note that ColdFire always clears V,
3636         * while M68000 sets if the most significant bit is changed at
3637         * any time during the shift operation
3638         */
3639        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3640            src = gen_extend(s, src, OS_WORD, 1);
3641            tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3642        }
3643    } else {
3644        tcg_gen_mov_i32(QREG_CC_C, src);
3645        if (logical) {
3646            tcg_gen_shri_i32(QREG_CC_N, src, 1);
3647        } else {
3648            tcg_gen_sari_i32(QREG_CC_N, src, 1);
3649        }
3650    }
3651
3652    gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3653    tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3654    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3655    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3656
3657    DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3658    set_cc_op(s, CC_OP_FLAGS);
3659}
3660
3661static void rotate(TCGv reg, TCGv shift, int left, int size)
3662{
3663    switch (size) {
3664    case 8:
3665        /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3666        tcg_gen_ext8u_i32(reg, reg);
3667        tcg_gen_muli_i32(reg, reg, 0x01010101);
3668        goto do_long;
3669    case 16:
3670        /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3671        tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3672        goto do_long;
3673    do_long:
3674    default:
3675        if (left) {
3676            tcg_gen_rotl_i32(reg, reg, shift);
3677        } else {
3678            tcg_gen_rotr_i32(reg, reg, shift);
3679        }
3680    }
3681
3682    /* compute flags */
3683
3684    switch (size) {
3685    case 8:
3686        tcg_gen_ext8s_i32(reg, reg);
3687        break;
3688    case 16:
3689        tcg_gen_ext16s_i32(reg, reg);
3690        break;
3691    default:
3692        break;
3693    }
3694
3695    /* QREG_CC_X is not affected */
3696
3697    tcg_gen_mov_i32(QREG_CC_N, reg);
3698    tcg_gen_mov_i32(QREG_CC_Z, reg);
3699
3700    if (left) {
3701        tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3702    } else {
3703        tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3704    }
3705
3706    tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3707}
3708
3709static void rotate_x_flags(TCGv reg, TCGv X, int size)
3710{
3711    switch (size) {
3712    case 8:
3713        tcg_gen_ext8s_i32(reg, reg);
3714        break;
3715    case 16:
3716        tcg_gen_ext16s_i32(reg, reg);
3717        break;
3718    default:
3719        break;
3720    }
3721    tcg_gen_mov_i32(QREG_CC_N, reg);
3722    tcg_gen_mov_i32(QREG_CC_Z, reg);
3723    tcg_gen_mov_i32(QREG_CC_X, X);
3724    tcg_gen_mov_i32(QREG_CC_C, X);
3725    tcg_gen_movi_i32(QREG_CC_V, 0);
3726}
3727
3728/* Result of rotate_x() is valid if 0 <= shift <= size */
3729static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3730{
3731    TCGv X, shl, shr, shx, sz, zero;
3732
3733    sz = tcg_const_i32(size);
3734
3735    shr = tcg_temp_new();
3736    shl = tcg_temp_new();
3737    shx = tcg_temp_new();
3738    if (left) {
3739        tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3740        tcg_gen_movi_i32(shr, size + 1);
3741        tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3742        tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3743        /* shx = shx < 0 ? size : shx; */
3744        zero = tcg_const_i32(0);
3745        tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3746        tcg_temp_free(zero);
3747    } else {
3748        tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3749        tcg_gen_movi_i32(shl, size + 1);
3750        tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3751        tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3752    }
3753    tcg_temp_free_i32(sz);
3754
3755    /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3756
3757    tcg_gen_shl_i32(shl, reg, shl);
3758    tcg_gen_shr_i32(shr, reg, shr);
3759    tcg_gen_or_i32(reg, shl, shr);
3760    tcg_temp_free(shl);
3761    tcg_temp_free(shr);
3762    tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3763    tcg_gen_or_i32(reg, reg, shx);
3764    tcg_temp_free(shx);
3765
3766    /* X = (reg >> size) & 1 */
3767
3768    X = tcg_temp_new();
3769    tcg_gen_extract_i32(X, reg, size, 1);
3770
3771    return X;
3772}
3773
3774/* Result of rotate32_x() is valid if 0 <= shift < 33 */
3775static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3776{
3777    TCGv_i64 t0, shift64;
3778    TCGv X, lo, hi, zero;
3779
3780    shift64 = tcg_temp_new_i64();
3781    tcg_gen_extu_i32_i64(shift64, shift);
3782
3783    t0 = tcg_temp_new_i64();
3784
3785    X = tcg_temp_new();
3786    lo = tcg_temp_new();
3787    hi = tcg_temp_new();
3788
3789    if (left) {
3790        /* create [reg:X:..] */
3791
3792        tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3793        tcg_gen_concat_i32_i64(t0, lo, reg);
3794
3795        /* rotate */
3796
3797        tcg_gen_rotl_i64(t0, t0, shift64);
3798        tcg_temp_free_i64(shift64);
3799
3800        /* result is [reg:..:reg:X] */
3801
3802        tcg_gen_extr_i64_i32(lo, hi, t0);
3803        tcg_gen_andi_i32(X, lo, 1);
3804
3805        tcg_gen_shri_i32(lo, lo, 1);
3806    } else {
3807        /* create [..:X:reg] */
3808
3809        tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3810
3811        tcg_gen_rotr_i64(t0, t0, shift64);
3812        tcg_temp_free_i64(shift64);
3813
3814        /* result is value: [X:reg:..:reg] */
3815
3816        tcg_gen_extr_i64_i32(lo, hi, t0);
3817
3818        /* extract X */
3819
3820        tcg_gen_shri_i32(X, hi, 31);
3821
3822        /* extract result */
3823
3824        tcg_gen_shli_i32(hi, hi, 1);
3825    }
3826    tcg_temp_free_i64(t0);
3827    tcg_gen_or_i32(lo, lo, hi);
3828    tcg_temp_free(hi);
3829
3830    /* if shift == 0, register and X are not affected */
3831
3832    zero = tcg_const_i32(0);
3833    tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3834    tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3835    tcg_temp_free(zero);
3836    tcg_temp_free(lo);
3837
3838    return X;
3839}
3840
3841DISAS_INSN(rotate_im)
3842{
3843    TCGv shift;
3844    int tmp;
3845    int left = (insn & 0x100);
3846
3847    tmp = (insn >> 9) & 7;
3848    if (tmp == 0) {
3849        tmp = 8;
3850    }
3851
3852    shift = tcg_const_i32(tmp);
3853    if (insn & 8) {
3854        rotate(DREG(insn, 0), shift, left, 32);
3855    } else {
3856        TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3857        rotate_x_flags(DREG(insn, 0), X, 32);
3858        tcg_temp_free(X);
3859    }
3860    tcg_temp_free(shift);
3861
3862    set_cc_op(s, CC_OP_FLAGS);
3863}
3864
3865DISAS_INSN(rotate8_im)
3866{
3867    int left = (insn & 0x100);
3868    TCGv reg;
3869    TCGv shift;
3870    int tmp;
3871
3872    reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3873
3874    tmp = (insn >> 9) & 7;
3875    if (tmp == 0) {
3876        tmp = 8;
3877    }
3878
3879    shift = tcg_const_i32(tmp);
3880    if (insn & 8) {
3881        rotate(reg, shift, left, 8);
3882    } else {
3883        TCGv X = rotate_x(reg, shift, left, 8);
3884        rotate_x_flags(reg, X, 8);
3885        tcg_temp_free(X);
3886    }
3887    tcg_temp_free(shift);
3888    gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3889    set_cc_op(s, CC_OP_FLAGS);
3890}
3891
3892DISAS_INSN(rotate16_im)
3893{
3894    int left = (insn & 0x100);
3895    TCGv reg;
3896    TCGv shift;
3897    int tmp;
3898
3899    reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3900    tmp = (insn >> 9) & 7;
3901    if (tmp == 0) {
3902        tmp = 8;
3903    }
3904
3905    shift = tcg_const_i32(tmp);
3906    if (insn & 8) {
3907        rotate(reg, shift, left, 16);
3908    } else {
3909        TCGv X = rotate_x(reg, shift, left, 16);
3910        rotate_x_flags(reg, X, 16);
3911        tcg_temp_free(X);
3912    }
3913    tcg_temp_free(shift);
3914    gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3915    set_cc_op(s, CC_OP_FLAGS);
3916}
3917
3918DISAS_INSN(rotate_reg)
3919{
3920    TCGv reg;
3921    TCGv src;
3922    TCGv t0, t1;
3923    int left = (insn & 0x100);
3924
3925    reg = DREG(insn, 0);
3926    src = DREG(insn, 9);
3927    /* shift in [0..63] */
3928    t0 = tcg_temp_new();
3929    tcg_gen_andi_i32(t0, src, 63);
3930    t1 = tcg_temp_new_i32();
3931    if (insn & 8) {
3932        tcg_gen_andi_i32(t1, src, 31);
3933        rotate(reg, t1, left, 32);
3934        /* if shift == 0, clear C */
3935        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3936                            t0, QREG_CC_V /* 0 */,
3937                            QREG_CC_V /* 0 */, QREG_CC_C);
3938    } else {
3939        TCGv X;
3940        /* modulo 33 */
3941        tcg_gen_movi_i32(t1, 33);
3942        tcg_gen_remu_i32(t1, t0, t1);
3943        X = rotate32_x(DREG(insn, 0), t1, left);
3944        rotate_x_flags(DREG(insn, 0), X, 32);
3945        tcg_temp_free(X);
3946    }
3947    tcg_temp_free(t1);
3948    tcg_temp_free(t0);
3949    set_cc_op(s, CC_OP_FLAGS);
3950}
3951
3952DISAS_INSN(rotate8_reg)
3953{
3954    TCGv reg;
3955    TCGv src;
3956    TCGv t0, t1;
3957    int left = (insn & 0x100);
3958
3959    reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3960    src = DREG(insn, 9);
3961    /* shift in [0..63] */
3962    t0 = tcg_temp_new_i32();
3963    tcg_gen_andi_i32(t0, src, 63);
3964    t1 = tcg_temp_new_i32();
3965    if (insn & 8) {
3966        tcg_gen_andi_i32(t1, src, 7);
3967        rotate(reg, t1, left, 8);
3968        /* if shift == 0, clear C */
3969        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3970                            t0, QREG_CC_V /* 0 */,
3971                            QREG_CC_V /* 0 */, QREG_CC_C);
3972    } else {
3973        TCGv X;
3974        /* modulo 9 */
3975        tcg_gen_movi_i32(t1, 9);
3976        tcg_gen_remu_i32(t1, t0, t1);
3977        X = rotate_x(reg, t1, left, 8);
3978        rotate_x_flags(reg, X, 8);
3979        tcg_temp_free(X);
3980    }
3981    tcg_temp_free(t1);
3982    tcg_temp_free(t0);
3983    gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3984    set_cc_op(s, CC_OP_FLAGS);
3985}
3986
3987DISAS_INSN(rotate16_reg)
3988{
3989    TCGv reg;
3990    TCGv src;
3991    TCGv t0, t1;
3992    int left = (insn & 0x100);
3993
3994    reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3995    src = DREG(insn, 9);
3996    /* shift in [0..63] */
3997    t0 = tcg_temp_new_i32();
3998    tcg_gen_andi_i32(t0, src, 63);
3999    t1 = tcg_temp_new_i32();
4000    if (insn & 8) {
4001        tcg_gen_andi_i32(t1, src, 15);
4002        rotate(reg, t1, left, 16);
4003        /* if shift == 0, clear C */
4004        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4005                            t0, QREG_CC_V /* 0 */,
4006                            QREG_CC_V /* 0 */, QREG_CC_C);
4007    } else {
4008        TCGv X;
4009        /* modulo 17 */
4010        tcg_gen_movi_i32(t1, 17);
4011        tcg_gen_remu_i32(t1, t0, t1);
4012        X = rotate_x(reg, t1, left, 16);
4013        rotate_x_flags(reg, X, 16);
4014        tcg_temp_free(X);
4015    }
4016    tcg_temp_free(t1);
4017    tcg_temp_free(t0);
4018    gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4019    set_cc_op(s, CC_OP_FLAGS);
4020}
4021
4022DISAS_INSN(rotate_mem)
4023{
4024    TCGv src;
4025    TCGv addr;
4026    TCGv shift;
4027    int left = (insn & 0x100);
4028
4029    SRC_EA(env, src, OS_WORD, 0, &addr);
4030
4031    shift = tcg_const_i32(1);
4032    if (insn & 0x0200) {
4033        rotate(src, shift, left, 16);
4034    } else {
4035        TCGv X = rotate_x(src, shift, left, 16);
4036        rotate_x_flags(src, X, 16);
4037        tcg_temp_free(X);
4038    }
4039    tcg_temp_free(shift);
4040    DEST_EA(env, insn, OS_WORD, src, &addr);
4041    set_cc_op(s, CC_OP_FLAGS);
4042}
4043
4044DISAS_INSN(bfext_reg)
4045{
4046    int ext = read_im16(env, s);
4047    int is_sign = insn & 0x200;
4048    TCGv src = DREG(insn, 0);
4049    TCGv dst = DREG(ext, 12);
4050    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4051    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4052    int pos = 32 - ofs - len;        /* little bit-endian */
4053    TCGv tmp = tcg_temp_new();
4054    TCGv shift;
4055
4056    /*
4057     * In general, we're going to rotate the field so that it's at the
4058     * top of the word and then right-shift by the complement of the
4059     * width to extend the field.
4060     */
4061    if (ext & 0x20) {
4062        /* Variable width.  */
4063        if (ext & 0x800) {
4064            /* Variable offset.  */
4065            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4066            tcg_gen_rotl_i32(tmp, src, tmp);
4067        } else {
4068            tcg_gen_rotli_i32(tmp, src, ofs);
4069        }
4070
4071        shift = tcg_temp_new();
4072        tcg_gen_neg_i32(shift, DREG(ext, 0));
4073        tcg_gen_andi_i32(shift, shift, 31);
4074        tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4075        if (is_sign) {
4076            tcg_gen_mov_i32(dst, QREG_CC_N);
4077        } else {
4078            tcg_gen_shr_i32(dst, tmp, shift);
4079        }
4080        tcg_temp_free(shift);
4081    } else {
4082        /* Immediate width.  */
4083        if (ext & 0x800) {
4084            /* Variable offset */
4085            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4086            tcg_gen_rotl_i32(tmp, src, tmp);
4087            src = tmp;
4088            pos = 32 - len;
4089        } else {
4090            /*
4091             * Immediate offset.  If the field doesn't wrap around the
4092             * end of the word, rely on (s)extract completely.
4093             */
4094            if (pos < 0) {
4095                tcg_gen_rotli_i32(tmp, src, ofs);
4096                src = tmp;
4097                pos = 32 - len;
4098            }
4099        }
4100
4101        tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4102        if (is_sign) {
4103            tcg_gen_mov_i32(dst, QREG_CC_N);
4104        } else {
4105            tcg_gen_extract_i32(dst, src, pos, len);
4106        }
4107    }
4108
4109    tcg_temp_free(tmp);
4110    set_cc_op(s, CC_OP_LOGIC);
4111}
4112
4113DISAS_INSN(bfext_mem)
4114{
4115    int ext = read_im16(env, s);
4116    int is_sign = insn & 0x200;
4117    TCGv dest = DREG(ext, 12);
4118    TCGv addr, len, ofs;
4119
4120    addr = gen_lea(env, s, insn, OS_UNSIZED);
4121    if (IS_NULL_QREG(addr)) {
4122        gen_addr_fault(s);
4123        return;
4124    }
4125
4126    if (ext & 0x20) {
4127        len = DREG(ext, 0);
4128    } else {
4129        len = tcg_const_i32(extract32(ext, 0, 5));
4130    }
4131    if (ext & 0x800) {
4132        ofs = DREG(ext, 6);
4133    } else {
4134        ofs = tcg_const_i32(extract32(ext, 6, 5));
4135    }
4136
4137    if (is_sign) {
4138        gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4139        tcg_gen_mov_i32(QREG_CC_N, dest);
4140    } else {
4141        TCGv_i64 tmp = tcg_temp_new_i64();
4142        gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4143        tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4144        tcg_temp_free_i64(tmp);
4145    }
4146    set_cc_op(s, CC_OP_LOGIC);
4147
4148    if (!(ext & 0x20)) {
4149        tcg_temp_free(len);
4150    }
4151    if (!(ext & 0x800)) {
4152        tcg_temp_free(ofs);
4153    }
4154}
4155
4156DISAS_INSN(bfop_reg)
4157{
4158    int ext = read_im16(env, s);
4159    TCGv src = DREG(insn, 0);
4160    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4161    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4162    TCGv mask, tofs, tlen;
4163
4164    tofs = NULL;
4165    tlen = NULL;
4166    if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4167        tofs = tcg_temp_new();
4168        tlen = tcg_temp_new();
4169    }
4170
4171    if ((ext & 0x820) == 0) {
4172        /* Immediate width and offset.  */
4173        uint32_t maski = 0x7fffffffu >> (len - 1);
4174        if (ofs + len <= 32) {
4175            tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4176        } else {
4177            tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4178        }
4179        tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4180        mask = tcg_const_i32(ror32(maski, ofs));
4181        if (tofs) {
4182            tcg_gen_movi_i32(tofs, ofs);
4183            tcg_gen_movi_i32(tlen, len);
4184        }
4185    } else {
4186        TCGv tmp = tcg_temp_new();
4187        if (ext & 0x20) {
4188            /* Variable width */
4189            tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4190            tcg_gen_andi_i32(tmp, tmp, 31);
4191            mask = tcg_const_i32(0x7fffffffu);
4192            tcg_gen_shr_i32(mask, mask, tmp);
4193            if (tlen) {
4194                tcg_gen_addi_i32(tlen, tmp, 1);
4195            }
4196        } else {
4197            /* Immediate width */
4198            mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4199            if (tlen) {
4200                tcg_gen_movi_i32(tlen, len);
4201            }
4202        }
4203        if (ext & 0x800) {
4204            /* Variable offset */
4205            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4206            tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4207            tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4208            tcg_gen_rotr_i32(mask, mask, tmp);
4209            if (tofs) {
4210                tcg_gen_mov_i32(tofs, tmp);
4211            }
4212        } else {
4213            /* Immediate offset (and variable width) */
4214            tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4215            tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4216            tcg_gen_rotri_i32(mask, mask, ofs);
4217            if (tofs) {
4218                tcg_gen_movi_i32(tofs, ofs);
4219            }
4220        }
4221        tcg_temp_free(tmp);
4222    }
4223    set_cc_op(s, CC_OP_LOGIC);
4224
4225    switch (insn & 0x0f00) {
4226    case 0x0a00: /* bfchg */
4227        tcg_gen_eqv_i32(src, src, mask);
4228        break;
4229    case 0x0c00: /* bfclr */
4230        tcg_gen_and_i32(src, src, mask);
4231        break;
4232    case 0x0d00: /* bfffo */
4233        gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4234        tcg_temp_free(tlen);
4235        tcg_temp_free(tofs);
4236        break;
4237    case 0x0e00: /* bfset */
4238        tcg_gen_orc_i32(src, src, mask);
4239        break;
4240    case 0x0800: /* bftst */
4241        /* flags already set; no other work to do.  */
4242        break;
4243    default:
4244        g_assert_not_reached();
4245    }
4246    tcg_temp_free(mask);
4247}
4248
4249DISAS_INSN(bfop_mem)
4250{
4251    int ext = read_im16(env, s);
4252    TCGv addr, len, ofs;
4253    TCGv_i64 t64;
4254
4255    addr = gen_lea(env, s, insn, OS_UNSIZED);
4256    if (IS_NULL_QREG(addr)) {
4257        gen_addr_fault(s);
4258        return;
4259    }
4260
4261    if (ext & 0x20) {
4262        len = DREG(ext, 0);
4263    } else {
4264        len = tcg_const_i32(extract32(ext, 0, 5));
4265    }
4266    if (ext & 0x800) {
4267        ofs = DREG(ext, 6);
4268    } else {
4269        ofs = tcg_const_i32(extract32(ext, 6, 5));
4270    }
4271
4272    switch (insn & 0x0f00) {
4273    case 0x0a00: /* bfchg */
4274        gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4275        break;
4276    case 0x0c00: /* bfclr */
4277        gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4278        break;
4279    case 0x0d00: /* bfffo */
4280        t64 = tcg_temp_new_i64();
4281        gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4282        tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4283        tcg_temp_free_i64(t64);
4284        break;
4285    case 0x0e00: /* bfset */
4286        gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4287        break;
4288    case 0x0800: /* bftst */
4289        gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4290        break;
4291    default:
4292        g_assert_not_reached();
4293    }
4294    set_cc_op(s, CC_OP_LOGIC);
4295
4296    if (!(ext & 0x20)) {
4297        tcg_temp_free(len);
4298    }
4299    if (!(ext & 0x800)) {
4300        tcg_temp_free(ofs);
4301    }
4302}
4303
4304DISAS_INSN(bfins_reg)
4305{
4306    int ext = read_im16(env, s);
4307    TCGv dst = DREG(insn, 0);
4308    TCGv src = DREG(ext, 12);
4309    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4310    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4311    int pos = 32 - ofs - len;        /* little bit-endian */
4312    TCGv tmp;
4313
4314    tmp = tcg_temp_new();
4315
4316    if (ext & 0x20) {
4317        /* Variable width */
4318        tcg_gen_neg_i32(tmp, DREG(ext, 0));
4319        tcg_gen_andi_i32(tmp, tmp, 31);
4320        tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4321    } else {
4322        /* Immediate width */
4323        tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4324    }
4325    set_cc_op(s, CC_OP_LOGIC);
4326
4327    /* Immediate width and offset */
4328    if ((ext & 0x820) == 0) {
4329        /* Check for suitability for deposit.  */
4330        if (pos >= 0) {
4331            tcg_gen_deposit_i32(dst, dst, src, pos, len);
4332        } else {
4333            uint32_t maski = -2U << (len - 1);
4334            uint32_t roti = (ofs + len) & 31;
4335            tcg_gen_andi_i32(tmp, src, ~maski);
4336            tcg_gen_rotri_i32(tmp, tmp, roti);
4337            tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4338            tcg_gen_or_i32(dst, dst, tmp);
4339        }
4340    } else {
4341        TCGv mask = tcg_temp_new();
4342        TCGv rot = tcg_temp_new();
4343
4344        if (ext & 0x20) {
4345            /* Variable width */
4346            tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4347            tcg_gen_andi_i32(rot, rot, 31);
4348            tcg_gen_movi_i32(mask, -2);
4349            tcg_gen_shl_i32(mask, mask, rot);
4350            tcg_gen_mov_i32(rot, DREG(ext, 0));
4351            tcg_gen_andc_i32(tmp, src, mask);
4352        } else {
4353            /* Immediate width (variable offset) */
4354            uint32_t maski = -2U << (len - 1);
4355            tcg_gen_andi_i32(tmp, src, ~maski);
4356            tcg_gen_movi_i32(mask, maski);
4357            tcg_gen_movi_i32(rot, len & 31);
4358        }
4359        if (ext & 0x800) {
4360            /* Variable offset */
4361            tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4362        } else {
4363            /* Immediate offset (variable width) */
4364            tcg_gen_addi_i32(rot, rot, ofs);
4365        }
4366        tcg_gen_andi_i32(rot, rot, 31);
4367        tcg_gen_rotr_i32(mask, mask, rot);
4368        tcg_gen_rotr_i32(tmp, tmp, rot);
4369        tcg_gen_and_i32(dst, dst, mask);
4370        tcg_gen_or_i32(dst, dst, tmp);
4371
4372        tcg_temp_free(rot);
4373        tcg_temp_free(mask);
4374    }
4375    tcg_temp_free(tmp);
4376}
4377
4378DISAS_INSN(bfins_mem)
4379{
4380    int ext = read_im16(env, s);
4381    TCGv src = DREG(ext, 12);
4382    TCGv addr, len, ofs;
4383
4384    addr = gen_lea(env, s, insn, OS_UNSIZED);
4385    if (IS_NULL_QREG(addr)) {
4386        gen_addr_fault(s);
4387        return;
4388    }
4389
4390    if (ext & 0x20) {
4391        len = DREG(ext, 0);
4392    } else {
4393        len = tcg_const_i32(extract32(ext, 0, 5));
4394    }
4395    if (ext & 0x800) {
4396        ofs = DREG(ext, 6);
4397    } else {
4398        ofs = tcg_const_i32(extract32(ext, 6, 5));
4399    }
4400
4401    gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4402    set_cc_op(s, CC_OP_LOGIC);
4403
4404    if (!(ext & 0x20)) {
4405        tcg_temp_free(len);
4406    }
4407    if (!(ext & 0x800)) {
4408        tcg_temp_free(ofs);
4409    }
4410}
4411
4412DISAS_INSN(ff1)
4413{
4414    TCGv reg;
4415    reg = DREG(insn, 0);
4416    gen_logic_cc(s, reg, OS_LONG);
4417    gen_helper_ff1(reg, reg);
4418}
4419
4420DISAS_INSN(chk)
4421{
4422    TCGv src, reg;
4423    int opsize;
4424
4425    switch ((insn >> 7) & 3) {
4426    case 3:
4427        opsize = OS_WORD;
4428        break;
4429    case 2:
4430        if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4431            opsize = OS_LONG;
4432            break;
4433        }
4434        /* fallthru */
4435    default:
4436        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4437        return;
4438    }
4439    SRC_EA(env, src, opsize, 1, NULL);
4440    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4441
4442    gen_flush_flags(s);
4443    gen_helper_chk(cpu_env, reg, src);
4444}
4445
4446DISAS_INSN(chk2)
4447{
4448    uint16_t ext;
4449    TCGv addr1, addr2, bound1, bound2, reg;
4450    int opsize;
4451
4452    switch ((insn >> 9) & 3) {
4453    case 0:
4454        opsize = OS_BYTE;
4455        break;
4456    case 1:
4457        opsize = OS_WORD;
4458        break;
4459    case 2:
4460        opsize = OS_LONG;
4461        break;
4462    default:
4463        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4464        return;
4465    }
4466
4467    ext = read_im16(env, s);
4468    if ((ext & 0x0800) == 0) {
4469        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4470        return;
4471    }
4472
4473    addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4474    addr2 = tcg_temp_new();
4475    tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4476
4477    bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4478    tcg_temp_free(addr1);
4479    bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4480    tcg_temp_free(addr2);
4481
4482    reg = tcg_temp_new();
4483    if (ext & 0x8000) {
4484        tcg_gen_mov_i32(reg, AREG(ext, 12));
4485    } else {
4486        gen_ext(reg, DREG(ext, 12), opsize, 1);
4487    }
4488
4489    gen_flush_flags(s);
4490    gen_helper_chk2(cpu_env, reg, bound1, bound2);
4491    tcg_temp_free(reg);
4492    tcg_temp_free(bound1);
4493    tcg_temp_free(bound2);
4494}
4495
4496static void m68k_copy_line(TCGv dst, TCGv src, int index)
4497{
4498    TCGv addr;
4499    TCGv_i64 t0, t1;
4500
4501    addr = tcg_temp_new();
4502
4503    t0 = tcg_temp_new_i64();
4504    t1 = tcg_temp_new_i64();
4505
4506    tcg_gen_andi_i32(addr, src, ~15);
4507    tcg_gen_qemu_ld64(t0, addr, index);
4508    tcg_gen_addi_i32(addr, addr, 8);
4509    tcg_gen_qemu_ld64(t1, addr, index);
4510
4511    tcg_gen_andi_i32(addr, dst, ~15);
4512    tcg_gen_qemu_st64(t0, addr, index);
4513    tcg_gen_addi_i32(addr, addr, 8);
4514    tcg_gen_qemu_st64(t1, addr, index);
4515
4516    tcg_temp_free_i64(t0);
4517    tcg_temp_free_i64(t1);
4518    tcg_temp_free(addr);
4519}
4520
4521DISAS_INSN(move16_reg)
4522{
4523    int index = IS_USER(s);
4524    TCGv tmp;
4525    uint16_t ext;
4526
4527    ext = read_im16(env, s);
4528    if ((ext & (1 << 15)) == 0) {
4529        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4530    }
4531
4532    m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4533
4534    /* Ax can be Ay, so save Ay before incrementing Ax */
4535    tmp = tcg_temp_new();
4536    tcg_gen_mov_i32(tmp, AREG(ext, 12));
4537    tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4538    tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4539    tcg_temp_free(tmp);
4540}
4541
4542DISAS_INSN(move16_mem)
4543{
4544    int index = IS_USER(s);
4545    TCGv reg, addr;
4546
4547    reg = AREG(insn, 0);
4548    addr = tcg_const_i32(read_im32(env, s));
4549
4550    if ((insn >> 3) & 1) {
4551        /* MOVE16 (xxx).L, (Ay) */
4552        m68k_copy_line(reg, addr, index);
4553    } else {
4554        /* MOVE16 (Ay), (xxx).L */
4555        m68k_copy_line(addr, reg, index);
4556    }
4557
4558    tcg_temp_free(addr);
4559
4560    if (((insn >> 3) & 2) == 0) {
4561        /* (Ay)+ */
4562        tcg_gen_addi_i32(reg, reg, 16);
4563    }
4564}
4565
4566DISAS_INSN(strldsr)
4567{
4568    uint16_t ext;
4569    uint32_t addr;
4570
4571    addr = s->pc - 2;
4572    ext = read_im16(env, s);
4573    if (ext != 0x46FC) {
4574        gen_exception(s, addr, EXCP_ILLEGAL);
4575        return;
4576    }
4577    ext = read_im16(env, s);
4578    if (IS_USER(s) || (ext & SR_S) == 0) {
4579        gen_exception(s, addr, EXCP_PRIVILEGE);
4580        return;
4581    }
4582    gen_push(s, gen_get_sr(s));
4583    gen_set_sr_im(s, ext, 0);
4584}
4585
4586DISAS_INSN(move_from_sr)
4587{
4588    TCGv sr;
4589
4590    if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4591        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4592        return;
4593    }
4594    sr = gen_get_sr(s);
4595    DEST_EA(env, insn, OS_WORD, sr, NULL);
4596}
4597
4598#if defined(CONFIG_SOFTMMU)
4599DISAS_INSN(moves)
4600{
4601    int opsize;
4602    uint16_t ext;
4603    TCGv reg;
4604    TCGv addr;
4605    int extend;
4606
4607    if (IS_USER(s)) {
4608        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4609        return;
4610    }
4611
4612    ext = read_im16(env, s);
4613
4614    opsize = insn_opsize(insn);
4615
4616    if (ext & 0x8000) {
4617        /* address register */
4618        reg = AREG(ext, 12);
4619        extend = 1;
4620    } else {
4621        /* data register */
4622        reg = DREG(ext, 12);
4623        extend = 0;
4624    }
4625
4626    addr = gen_lea(env, s, insn, opsize);
4627    if (IS_NULL_QREG(addr)) {
4628        gen_addr_fault(s);
4629        return;
4630    }
4631
4632    if (ext & 0x0800) {
4633        /* from reg to ea */
4634        gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4635    } else {
4636        /* from ea to reg */
4637        TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4638        if (extend) {
4639            gen_ext(reg, tmp, opsize, 1);
4640        } else {
4641            gen_partset_reg(opsize, reg, tmp);
4642        }
4643        tcg_temp_free(tmp);
4644    }
4645    switch (extract32(insn, 3, 3)) {
4646    case 3: /* Indirect postincrement.  */
4647        tcg_gen_addi_i32(AREG(insn, 0), addr,
4648                         REG(insn, 0) == 7 && opsize == OS_BYTE
4649                         ? 2
4650                         : opsize_bytes(opsize));
4651        break;
4652    case 4: /* Indirect predecrememnt.  */
4653        tcg_gen_mov_i32(AREG(insn, 0), addr);
4654        break;
4655    }
4656}
4657
4658DISAS_INSN(move_to_sr)
4659{
4660    if (IS_USER(s)) {
4661        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4662        return;
4663    }
4664    gen_move_to_sr(env, s, insn, false);
4665    gen_exit_tb(s);
4666}
4667
4668DISAS_INSN(move_from_usp)
4669{
4670    if (IS_USER(s)) {
4671        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4672        return;
4673    }
4674    tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4675                   offsetof(CPUM68KState, sp[M68K_USP]));
4676}
4677
4678DISAS_INSN(move_to_usp)
4679{
4680    if (IS_USER(s)) {
4681        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4682        return;
4683    }
4684    tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4685                   offsetof(CPUM68KState, sp[M68K_USP]));
4686}
4687
4688DISAS_INSN(halt)
4689{
4690    if (IS_USER(s)) {
4691        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4692        return;
4693    }
4694
4695    gen_exception(s, s->pc, EXCP_HALT_INSN);
4696}
4697
4698DISAS_INSN(stop)
4699{
4700    uint16_t ext;
4701
4702    if (IS_USER(s)) {
4703        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4704        return;
4705    }
4706
4707    ext = read_im16(env, s);
4708
4709    gen_set_sr_im(s, ext, 0);
4710    tcg_gen_movi_i32(cpu_halted, 1);
4711    gen_exception(s, s->pc, EXCP_HLT);
4712}
4713
4714DISAS_INSN(rte)
4715{
4716    if (IS_USER(s)) {
4717        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4718        return;
4719    }
4720    gen_exception(s, s->base.pc_next, EXCP_RTE);
4721}
4722
4723DISAS_INSN(cf_movec)
4724{
4725    uint16_t ext;
4726    TCGv reg;
4727
4728    if (IS_USER(s)) {
4729        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4730        return;
4731    }
4732
4733    ext = read_im16(env, s);
4734
4735    if (ext & 0x8000) {
4736        reg = AREG(ext, 12);
4737    } else {
4738        reg = DREG(ext, 12);
4739    }
4740    gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4741    gen_exit_tb(s);
4742}
4743
4744DISAS_INSN(m68k_movec)
4745{
4746    uint16_t ext;
4747    TCGv reg;
4748
4749    if (IS_USER(s)) {
4750        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4751        return;
4752    }
4753
4754    ext = read_im16(env, s);
4755
4756    if (ext & 0x8000) {
4757        reg = AREG(ext, 12);
4758    } else {
4759        reg = DREG(ext, 12);
4760    }
4761    if (insn & 1) {
4762        gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4763    } else {
4764        gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4765    }
4766    gen_exit_tb(s);
4767}
4768
4769DISAS_INSN(intouch)
4770{
4771    if (IS_USER(s)) {
4772        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4773        return;
4774    }
4775    /* ICache fetch.  Implement as no-op.  */
4776}
4777
4778DISAS_INSN(cpushl)
4779{
4780    if (IS_USER(s)) {
4781        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4782        return;
4783    }
4784    /* Cache push/invalidate.  Implement as no-op.  */
4785}
4786
4787DISAS_INSN(cpush)
4788{
4789    if (IS_USER(s)) {
4790        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4791        return;
4792    }
4793    /* Cache push/invalidate.  Implement as no-op.  */
4794}
4795
4796DISAS_INSN(cinv)
4797{
4798    if (IS_USER(s)) {
4799        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4800        return;
4801    }
4802    /* Invalidate cache line.  Implement as no-op.  */
4803}
4804
4805#if defined(CONFIG_SOFTMMU)
4806DISAS_INSN(pflush)
4807{
4808    TCGv opmode;
4809
4810    if (IS_USER(s)) {
4811        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4812        return;
4813    }
4814
4815    opmode = tcg_const_i32((insn >> 3) & 3);
4816    gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4817    tcg_temp_free(opmode);
4818}
4819
4820DISAS_INSN(ptest)
4821{
4822    TCGv is_read;
4823
4824    if (IS_USER(s)) {
4825        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4826        return;
4827    }
4828    is_read = tcg_const_i32((insn >> 5) & 1);
4829    gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4830    tcg_temp_free(is_read);
4831}
4832#endif
4833
4834DISAS_INSN(wddata)
4835{
4836    gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4837}
4838
4839DISAS_INSN(wdebug)
4840{
4841    if (IS_USER(s)) {
4842        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4843        return;
4844    }
4845    /* TODO: Implement wdebug.  */
4846    cpu_abort(env_cpu(env), "WDEBUG not implemented");
4847}
4848#endif
4849
4850DISAS_INSN(trap)
4851{
4852    gen_exception(s, s->base.pc_next, EXCP_TRAP0 + (insn & 0xf));
4853}
4854
4855static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4856{
4857    switch (reg) {
4858    case M68K_FPIAR:
4859        tcg_gen_movi_i32(res, 0);
4860        break;
4861    case M68K_FPSR:
4862        tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4863        break;
4864    case M68K_FPCR:
4865        tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4866        break;
4867    }
4868}
4869
4870static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4871{
4872    switch (reg) {
4873    case M68K_FPIAR:
4874        break;
4875    case M68K_FPSR:
4876        tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4877        break;
4878    case M68K_FPCR:
4879        gen_helper_set_fpcr(cpu_env, val);
4880        break;
4881    }
4882}
4883
4884static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4885{
4886    int index = IS_USER(s);
4887    TCGv tmp;
4888
4889    tmp = tcg_temp_new();
4890    gen_load_fcr(s, tmp, reg);
4891    tcg_gen_qemu_st32(tmp, addr, index);
4892    tcg_temp_free(tmp);
4893}
4894
4895static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4896{
4897    int index = IS_USER(s);
4898    TCGv tmp;
4899
4900    tmp = tcg_temp_new();
4901    tcg_gen_qemu_ld32u(tmp, addr, index);
4902    gen_store_fcr(s, tmp, reg);
4903    tcg_temp_free(tmp);
4904}
4905
4906
4907static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4908                             uint32_t insn, uint32_t ext)
4909{
4910    int mask = (ext >> 10) & 7;
4911    int is_write = (ext >> 13) & 1;
4912    int mode = extract32(insn, 3, 3);
4913    int i;
4914    TCGv addr, tmp;
4915
4916    switch (mode) {
4917    case 0: /* Dn */
4918        if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4919            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4920            return;
4921        }
4922        if (is_write) {
4923            gen_load_fcr(s, DREG(insn, 0), mask);
4924        } else {
4925            gen_store_fcr(s, DREG(insn, 0), mask);
4926        }
4927        return;
4928    case 1: /* An, only with FPIAR */
4929        if (mask != M68K_FPIAR) {
4930            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4931            return;
4932        }
4933        if (is_write) {
4934            gen_load_fcr(s, AREG(insn, 0), mask);
4935        } else {
4936            gen_store_fcr(s, AREG(insn, 0), mask);
4937        }
4938        return;
4939    case 7: /* Immediate */
4940        if (REG(insn, 0) == 4) {
4941            if (is_write ||
4942                (mask != M68K_FPIAR && mask != M68K_FPSR &&
4943                 mask != M68K_FPCR)) {
4944                gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4945                return;
4946            }
4947            tmp = tcg_const_i32(read_im32(env, s));
4948            gen_store_fcr(s, tmp, mask);
4949            tcg_temp_free(tmp);
4950            return;
4951        }
4952        break;
4953    default:
4954        break;
4955    }
4956
4957    tmp = gen_lea(env, s, insn, OS_LONG);
4958    if (IS_NULL_QREG(tmp)) {
4959        gen_addr_fault(s);
4960        return;
4961    }
4962
4963    addr = tcg_temp_new();
4964    tcg_gen_mov_i32(addr, tmp);
4965
4966    /*
4967     * mask:
4968     *
4969     * 0b100 Floating-Point Control Register
4970     * 0b010 Floating-Point Status Register
4971     * 0b001 Floating-Point Instruction Address Register
4972     *
4973     */
4974
4975    if (is_write && mode == 4) {
4976        for (i = 2; i >= 0; i--, mask >>= 1) {
4977            if (mask & 1) {
4978                gen_qemu_store_fcr(s, addr, 1 << i);
4979                if (mask != 1) {
4980                    tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
4981                }
4982            }
4983       }
4984       tcg_gen_mov_i32(AREG(insn, 0), addr);
4985    } else {
4986        for (i = 0; i < 3; i++, mask >>= 1) {
4987            if (mask & 1) {
4988                if (is_write) {
4989                    gen_qemu_store_fcr(s, addr, 1 << i);
4990                } else {
4991                    gen_qemu_load_fcr(s, addr, 1 << i);
4992                }
4993                if (mask != 1 || mode == 3) {
4994                    tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
4995                }
4996            }
4997        }
4998        if (mode == 3) {
4999            tcg_gen_mov_i32(AREG(insn, 0), addr);
5000        }
5001    }
5002    tcg_temp_free_i32(addr);
5003}
5004
5005static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
5006                          uint32_t insn, uint32_t ext)
5007{
5008    int opsize;
5009    TCGv addr, tmp;
5010    int mode = (ext >> 11) & 0x3;
5011    int is_load = ((ext & 0x2000) == 0);
5012
5013    if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
5014        opsize = OS_EXTENDED;
5015    } else {
5016        opsize = OS_DOUBLE;  /* FIXME */
5017    }
5018
5019    addr = gen_lea(env, s, insn, opsize);
5020    if (IS_NULL_QREG(addr)) {
5021        gen_addr_fault(s);
5022        return;
5023    }
5024
5025    tmp = tcg_temp_new();
5026    if (mode & 0x1) {
5027        /* Dynamic register list */
5028        tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5029    } else {
5030        /* Static register list */
5031        tcg_gen_movi_i32(tmp, ext & 0xff);
5032    }
5033
5034    if (!is_load && (mode & 2) == 0) {
5035        /*
5036         * predecrement addressing mode
5037         * only available to store register to memory
5038         */
5039        if (opsize == OS_EXTENDED) {
5040            gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5041        } else {
5042            gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5043        }
5044    } else {
5045        /* postincrement addressing mode */
5046        if (opsize == OS_EXTENDED) {
5047            if (is_load) {
5048                gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5049            } else {
5050                gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5051            }
5052        } else {
5053            if (is_load) {
5054                gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5055            } else {
5056                gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5057            }
5058        }
5059    }
5060    if ((insn & 070) == 030 || (insn & 070) == 040) {
5061        tcg_gen_mov_i32(AREG(insn, 0), tmp);
5062    }
5063    tcg_temp_free(tmp);
5064}
5065
5066/*
5067 * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5068 * immediately before the next FP instruction is executed.
5069 */
5070DISAS_INSN(fpu)
5071{
5072    uint16_t ext;
5073    int opmode;
5074    int opsize;
5075    TCGv_ptr cpu_src, cpu_dest;
5076
5077    ext = read_im16(env, s);
5078    opmode = ext & 0x7f;
5079    switch ((ext >> 13) & 7) {
5080    case 0:
5081        break;
5082    case 1:
5083        goto undef;
5084    case 2:
5085        if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5086            /* fmovecr */
5087            TCGv rom_offset = tcg_const_i32(opmode);
5088            cpu_dest = gen_fp_ptr(REG(ext, 7));
5089            gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5090            tcg_temp_free_ptr(cpu_dest);
5091            tcg_temp_free(rom_offset);
5092            return;
5093        }
5094        break;
5095    case 3: /* fmove out */
5096        cpu_src = gen_fp_ptr(REG(ext, 7));
5097        opsize = ext_opsize(ext, 10);
5098        if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5099                      EA_STORE, IS_USER(s)) == -1) {
5100            gen_addr_fault(s);
5101        }
5102        gen_helper_ftst(cpu_env, cpu_src);
5103        tcg_temp_free_ptr(cpu_src);
5104        return;
5105    case 4: /* fmove to control register.  */
5106    case 5: /* fmove from control register.  */
5107        gen_op_fmove_fcr(env, s, insn, ext);
5108        return;
5109    case 6: /* fmovem */
5110    case 7:
5111        if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5112            goto undef;
5113        }
5114        gen_op_fmovem(env, s, insn, ext);
5115        return;
5116    }
5117    if (ext & (1 << 14)) {
5118        /* Source effective address.  */
5119        opsize = ext_opsize(ext, 10);
5120        cpu_src = gen_fp_result_ptr();
5121        if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5122                      EA_LOADS, IS_USER(s)) == -1) {
5123            gen_addr_fault(s);
5124            return;
5125        }
5126    } else {
5127        /* Source register.  */
5128        opsize = OS_EXTENDED;
5129        cpu_src = gen_fp_ptr(REG(ext, 10));
5130    }
5131    cpu_dest = gen_fp_ptr(REG(ext, 7));
5132    switch (opmode) {
5133    case 0: /* fmove */
5134        gen_fp_move(cpu_dest, cpu_src);
5135        break;
5136    case 0x40: /* fsmove */
5137        gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5138        break;
5139    case 0x44: /* fdmove */
5140        gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5141        break;
5142    case 1: /* fint */
5143        gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5144        break;
5145    case 2: /* fsinh */
5146        gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5147        break;
5148    case 3: /* fintrz */
5149        gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5150        break;
5151    case 4: /* fsqrt */
5152        gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5153        break;
5154    case 0x41: /* fssqrt */
5155        gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5156        break;
5157    case 0x45: /* fdsqrt */
5158        gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5159        break;
5160    case 0x06: /* flognp1 */
5161        gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5162        break;
5163    case 0x08: /* fetoxm1 */
5164        gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5165        break;
5166    case 0x09: /* ftanh */
5167        gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5168        break;
5169    case 0x0a: /* fatan */
5170        gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5171        break;
5172    case 0x0c: /* fasin */
5173        gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5174        break;
5175    case 0x0d: /* fatanh */
5176        gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5177        break;
5178    case 0x0e: /* fsin */
5179        gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5180        break;
5181    case 0x0f: /* ftan */
5182        gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5183        break;
5184    case 0x10: /* fetox */
5185        gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5186        break;
5187    case 0x11: /* ftwotox */
5188        gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5189        break;
5190    case 0x12: /* ftentox */
5191        gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5192        break;
5193    case 0x14: /* flogn */
5194        gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5195        break;
5196    case 0x15: /* flog10 */
5197        gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5198        break;
5199    case 0x16: /* flog2 */
5200        gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5201        break;
5202    case 0x18: /* fabs */
5203        gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5204        break;
5205    case 0x58: /* fsabs */
5206        gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5207        break;
5208    case 0x5c: /* fdabs */
5209        gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5210        break;
5211    case 0x19: /* fcosh */
5212        gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5213        break;
5214    case 0x1a: /* fneg */
5215        gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5216        break;
5217    case 0x5a: /* fsneg */
5218        gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5219        break;
5220    case 0x5e: /* fdneg */
5221        gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5222        break;
5223    case 0x1c: /* facos */
5224        gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5225        break;
5226    case 0x1d: /* fcos */
5227        gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5228        break;
5229    case 0x1e: /* fgetexp */
5230        gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5231        break;
5232    case 0x1f: /* fgetman */
5233        gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5234        break;
5235    case 0x20: /* fdiv */
5236        gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5237        break;
5238    case 0x60: /* fsdiv */
5239        gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5240        break;
5241    case 0x64: /* fddiv */
5242        gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5243        break;
5244    case 0x21: /* fmod */
5245        gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5246        break;
5247    case 0x22: /* fadd */
5248        gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5249        break;
5250    case 0x62: /* fsadd */
5251        gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5252        break;
5253    case 0x66: /* fdadd */
5254        gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5255        break;
5256    case 0x23: /* fmul */
5257        gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5258        break;
5259    case 0x63: /* fsmul */
5260        gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5261        break;
5262    case 0x67: /* fdmul */
5263        gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5264        break;
5265    case 0x24: /* fsgldiv */
5266        gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5267        break;
5268    case 0x25: /* frem */
5269        gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5270        break;
5271    case 0x26: /* fscale */
5272        gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5273        break;
5274    case 0x27: /* fsglmul */
5275        gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5276        break;
5277    case 0x28: /* fsub */
5278        gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5279        break;
5280    case 0x68: /* fssub */
5281        gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5282        break;
5283    case 0x6c: /* fdsub */
5284        gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5285        break;
5286    case 0x30: case 0x31: case 0x32:
5287    case 0x33: case 0x34: case 0x35:
5288    case 0x36: case 0x37: {
5289            TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5290            gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5291            tcg_temp_free_ptr(cpu_dest2);
5292        }
5293        break;
5294    case 0x38: /* fcmp */
5295        gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5296        return;
5297    case 0x3a: /* ftst */
5298        gen_helper_ftst(cpu_env, cpu_src);
5299        return;
5300    default:
5301        goto undef;
5302    }
5303    tcg_temp_free_ptr(cpu_src);
5304    gen_helper_ftst(cpu_env, cpu_dest);
5305    tcg_temp_free_ptr(cpu_dest);
5306    return;
5307undef:
5308    /* FIXME: Is this right for offset addressing modes?  */
5309    s->pc -= 2;
5310    disas_undef_fpu(env, s, insn);
5311}
5312
5313static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5314{
5315    TCGv fpsr;
5316
5317    c->g1 = 1;
5318    c->v2 = tcg_const_i32(0);
5319    c->g2 = 0;
5320    /* TODO: Raise BSUN exception.  */
5321    fpsr = tcg_temp_new();
5322    gen_load_fcr(s, fpsr, M68K_FPSR);
5323    switch (cond) {
5324    case 0:  /* False */
5325    case 16: /* Signaling False */
5326        c->v1 = c->v2;
5327        c->tcond = TCG_COND_NEVER;
5328        break;
5329    case 1:  /* EQual Z */
5330    case 17: /* Signaling EQual Z */
5331        c->v1 = tcg_temp_new();
5332        c->g1 = 0;
5333        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5334        c->tcond = TCG_COND_NE;
5335        break;
5336    case 2:  /* Ordered Greater Than !(A || Z || N) */
5337    case 18: /* Greater Than !(A || Z || N) */
5338        c->v1 = tcg_temp_new();
5339        c->g1 = 0;
5340        tcg_gen_andi_i32(c->v1, fpsr,
5341                         FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5342        c->tcond = TCG_COND_EQ;
5343        break;
5344    case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5345    case 19: /* Greater than or Equal Z || !(A || N) */
5346        c->v1 = tcg_temp_new();
5347        c->g1 = 0;
5348        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5349        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5350        tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5351        tcg_gen_or_i32(c->v1, c->v1, fpsr);
5352        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5353        c->tcond = TCG_COND_NE;
5354        break;
5355    case 4:  /* Ordered Less Than !(!N || A || Z); */
5356    case 20: /* Less Than !(!N || A || Z); */
5357        c->v1 = tcg_temp_new();
5358        c->g1 = 0;
5359        tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5360        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5361        c->tcond = TCG_COND_EQ;
5362        break;
5363    case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5364    case 21: /* Less than or Equal Z || (N && !A) */
5365        c->v1 = tcg_temp_new();
5366        c->g1 = 0;
5367        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5368        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5369        tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5370        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5371        c->tcond = TCG_COND_NE;
5372        break;
5373    case 6:  /* Ordered Greater or Less than !(A || Z) */
5374    case 22: /* Greater or Less than !(A || Z) */
5375        c->v1 = tcg_temp_new();
5376        c->g1 = 0;
5377        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5378        c->tcond = TCG_COND_EQ;
5379        break;
5380    case 7:  /* Ordered !A */
5381    case 23: /* Greater, Less or Equal !A */
5382        c->v1 = tcg_temp_new();
5383        c->g1 = 0;
5384        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5385        c->tcond = TCG_COND_EQ;
5386        break;
5387    case 8:  /* Unordered A */
5388    case 24: /* Not Greater, Less or Equal A */
5389        c->v1 = tcg_temp_new();
5390        c->g1 = 0;
5391        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5392        c->tcond = TCG_COND_NE;
5393        break;
5394    case 9:  /* Unordered or Equal A || Z */
5395    case 25: /* Not Greater or Less then A || Z */
5396        c->v1 = tcg_temp_new();
5397        c->g1 = 0;
5398        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5399        c->tcond = TCG_COND_NE;
5400        break;
5401    case 10: /* Unordered or Greater Than A || !(N || Z)) */
5402    case 26: /* Not Less or Equal A || !(N || Z)) */
5403        c->v1 = tcg_temp_new();
5404        c->g1 = 0;
5405        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5406        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5407        tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5408        tcg_gen_or_i32(c->v1, c->v1, fpsr);
5409        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5410        c->tcond = TCG_COND_NE;
5411        break;
5412    case 11: /* Unordered or Greater or Equal A || Z || !N */
5413    case 27: /* Not Less Than A || Z || !N */
5414        c->v1 = tcg_temp_new();
5415        c->g1 = 0;
5416        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5417        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5418        c->tcond = TCG_COND_NE;
5419        break;
5420    case 12: /* Unordered or Less Than A || (N && !Z) */
5421    case 28: /* Not Greater than or Equal A || (N && !Z) */
5422        c->v1 = tcg_temp_new();
5423        c->g1 = 0;
5424        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5425        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5426        tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5427        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5428        c->tcond = TCG_COND_NE;
5429        break;
5430    case 13: /* Unordered or Less or Equal A || Z || N */
5431    case 29: /* Not Greater Than A || Z || N */
5432        c->v1 = tcg_temp_new();
5433        c->g1 = 0;
5434        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5435        c->tcond = TCG_COND_NE;
5436        break;
5437    case 14: /* Not Equal !Z */
5438    case 30: /* Signaling Not Equal !Z */
5439        c->v1 = tcg_temp_new();
5440        c->g1 = 0;
5441        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5442        c->tcond = TCG_COND_EQ;
5443        break;
5444    case 15: /* True */
5445    case 31: /* Signaling True */
5446        c->v1 = c->v2;
5447        c->tcond = TCG_COND_ALWAYS;
5448        break;
5449    }
5450    tcg_temp_free(fpsr);
5451}
5452
5453static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5454{
5455    DisasCompare c;
5456
5457    gen_fcc_cond(&c, s, cond);
5458    update_cc_op(s);
5459    tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5460    free_cond(&c);
5461}
5462
5463DISAS_INSN(fbcc)
5464{
5465    uint32_t offset;
5466    uint32_t base;
5467    TCGLabel *l1;
5468
5469    base = s->pc;
5470    offset = (int16_t)read_im16(env, s);
5471    if (insn & (1 << 6)) {
5472        offset = (offset << 16) | read_im16(env, s);
5473    }
5474
5475    l1 = gen_new_label();
5476    update_cc_op(s);
5477    gen_fjmpcc(s, insn & 0x3f, l1);
5478    gen_jmp_tb(s, 0, s->pc);
5479    gen_set_label(l1);
5480    gen_jmp_tb(s, 1, base + offset);
5481}
5482
5483DISAS_INSN(fscc)
5484{
5485    DisasCompare c;
5486    int cond;
5487    TCGv tmp;
5488    uint16_t ext;
5489
5490    ext = read_im16(env, s);
5491    cond = ext & 0x3f;
5492    gen_fcc_cond(&c, s, cond);
5493
5494    tmp = tcg_temp_new();
5495    tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5496    free_cond(&c);
5497
5498    tcg_gen_neg_i32(tmp, tmp);
5499    DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5500    tcg_temp_free(tmp);
5501}
5502
5503#if defined(CONFIG_SOFTMMU)
5504DISAS_INSN(frestore)
5505{
5506    TCGv addr;
5507
5508    if (IS_USER(s)) {
5509        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5510        return;
5511    }
5512    if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5513        SRC_EA(env, addr, OS_LONG, 0, NULL);
5514        /* FIXME: check the state frame */
5515    } else {
5516        disas_undef(env, s, insn);
5517    }
5518}
5519
5520DISAS_INSN(fsave)
5521{
5522    if (IS_USER(s)) {
5523        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5524        return;
5525    }
5526
5527    if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5528        /* always write IDLE */
5529        TCGv idle = tcg_const_i32(0x41000000);
5530        DEST_EA(env, insn, OS_LONG, idle, NULL);
5531        tcg_temp_free(idle);
5532    } else {
5533        disas_undef(env, s, insn);
5534    }
5535}
5536#endif
5537
5538static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5539{
5540    TCGv tmp = tcg_temp_new();
5541    if (s->env->macsr & MACSR_FI) {
5542        if (upper)
5543            tcg_gen_andi_i32(tmp, val, 0xffff0000);
5544        else
5545            tcg_gen_shli_i32(tmp, val, 16);
5546    } else if (s->env->macsr & MACSR_SU) {
5547        if (upper)
5548            tcg_gen_sari_i32(tmp, val, 16);
5549        else
5550            tcg_gen_ext16s_i32(tmp, val);
5551    } else {
5552        if (upper)
5553            tcg_gen_shri_i32(tmp, val, 16);
5554        else
5555            tcg_gen_ext16u_i32(tmp, val);
5556    }
5557    return tmp;
5558}
5559
5560static void gen_mac_clear_flags(void)
5561{
5562    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5563                     ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5564}
5565
5566DISAS_INSN(mac)
5567{
5568    TCGv rx;
5569    TCGv ry;
5570    uint16_t ext;
5571    int acc;
5572    TCGv tmp;
5573    TCGv addr;
5574    TCGv loadval;
5575    int dual;
5576    TCGv saved_flags;
5577
5578    if (!s->done_mac) {
5579        s->mactmp = tcg_temp_new_i64();
5580        s->done_mac = 1;
5581    }
5582
5583    ext = read_im16(env, s);
5584
5585    acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5586    dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5587    if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5588        disas_undef(env, s, insn);
5589        return;
5590    }
5591    if (insn & 0x30) {
5592        /* MAC with load.  */
5593        tmp = gen_lea(env, s, insn, OS_LONG);
5594        addr = tcg_temp_new();
5595        tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5596        /*
5597         * Load the value now to ensure correct exception behavior.
5598         * Perform writeback after reading the MAC inputs.
5599         */
5600        loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5601
5602        acc ^= 1;
5603        rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5604        ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5605    } else {
5606        loadval = addr = NULL_QREG;
5607        rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5608        ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5609    }
5610
5611    gen_mac_clear_flags();
5612#if 0
5613    l1 = -1;
5614    /* Disabled because conditional branches clobber temporary vars.  */
5615    if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5616        /* Skip the multiply if we know we will ignore it.  */
5617        l1 = gen_new_label();
5618        tmp = tcg_temp_new();
5619        tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5620        gen_op_jmp_nz32(tmp, l1);
5621    }
5622#endif
5623
5624    if ((ext & 0x0800) == 0) {
5625        /* Word.  */
5626        rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5627        ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5628    }
5629    if (s->env->macsr & MACSR_FI) {
5630        gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5631    } else {
5632        if (s->env->macsr & MACSR_SU)
5633            gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5634        else
5635            gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5636        switch ((ext >> 9) & 3) {
5637        case 1:
5638            tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5639            break;
5640        case 3:
5641            tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5642            break;
5643        }
5644    }
5645
5646    if (dual) {
5647        /* Save the overflow flag from the multiply.  */
5648        saved_flags = tcg_temp_new();
5649        tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5650    } else {
5651        saved_flags = NULL_QREG;
5652    }
5653
5654#if 0
5655    /* Disabled because conditional branches clobber temporary vars.  */
5656    if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5657        /* Skip the accumulate if the value is already saturated.  */
5658        l1 = gen_new_label();
5659        tmp = tcg_temp_new();
5660        gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5661        gen_op_jmp_nz32(tmp, l1);
5662    }
5663#endif
5664
5665    if (insn & 0x100)
5666        tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5667    else
5668        tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5669
5670    if (s->env->macsr & MACSR_FI)
5671        gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5672    else if (s->env->macsr & MACSR_SU)
5673        gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5674    else
5675        gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5676
5677#if 0
5678    /* Disabled because conditional branches clobber temporary vars.  */
5679    if (l1 != -1)
5680        gen_set_label(l1);
5681#endif
5682
5683    if (dual) {
5684        /* Dual accumulate variant.  */
5685        acc = (ext >> 2) & 3;
5686        /* Restore the overflow flag from the multiplier.  */
5687        tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5688#if 0
5689        /* Disabled because conditional branches clobber temporary vars.  */
5690        if ((s->env->macsr & MACSR_OMC) != 0) {
5691            /* Skip the accumulate if the value is already saturated.  */
5692            l1 = gen_new_label();
5693            tmp = tcg_temp_new();
5694            gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5695            gen_op_jmp_nz32(tmp, l1);
5696        }
5697#endif
5698        if (ext & 2)
5699            tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5700        else
5701            tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5702        if (s->env->macsr & MACSR_FI)
5703            gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5704        else if (s->env->macsr & MACSR_SU)
5705            gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5706        else
5707            gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5708#if 0
5709        /* Disabled because conditional branches clobber temporary vars.  */
5710        if (l1 != -1)
5711            gen_set_label(l1);
5712#endif
5713    }
5714    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5715
5716    if (insn & 0x30) {
5717        TCGv rw;
5718        rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5719        tcg_gen_mov_i32(rw, loadval);
5720        /*
5721         * FIXME: Should address writeback happen with the masked or
5722         * unmasked value?
5723         */
5724        switch ((insn >> 3) & 7) {
5725        case 3: /* Post-increment.  */
5726            tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5727            break;
5728        case 4: /* Pre-decrement.  */
5729            tcg_gen_mov_i32(AREG(insn, 0), addr);
5730        }
5731        tcg_temp_free(loadval);
5732    }
5733}
5734
5735DISAS_INSN(from_mac)
5736{
5737    TCGv rx;
5738    TCGv_i64 acc;
5739    int accnum;
5740
5741    rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5742    accnum = (insn >> 9) & 3;
5743    acc = MACREG(accnum);
5744    if (s->env->macsr & MACSR_FI) {
5745        gen_helper_get_macf(rx, cpu_env, acc);
5746    } else if ((s->env->macsr & MACSR_OMC) == 0) {
5747        tcg_gen_extrl_i64_i32(rx, acc);
5748    } else if (s->env->macsr & MACSR_SU) {
5749        gen_helper_get_macs(rx, acc);
5750    } else {
5751        gen_helper_get_macu(rx, acc);
5752    }
5753    if (insn & 0x40) {
5754        tcg_gen_movi_i64(acc, 0);
5755        tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5756    }
5757}
5758
5759DISAS_INSN(move_mac)
5760{
5761    /* FIXME: This can be done without a helper.  */
5762    int src;
5763    TCGv dest;
5764    src = insn & 3;
5765    dest = tcg_const_i32((insn >> 9) & 3);
5766    gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5767    gen_mac_clear_flags();
5768    gen_helper_mac_set_flags(cpu_env, dest);
5769}
5770
5771DISAS_INSN(from_macsr)
5772{
5773    TCGv reg;
5774
5775    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5776    tcg_gen_mov_i32(reg, QREG_MACSR);
5777}
5778
5779DISAS_INSN(from_mask)
5780{
5781    TCGv reg;
5782    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5783    tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5784}
5785
5786DISAS_INSN(from_mext)
5787{
5788    TCGv reg;
5789    TCGv acc;
5790    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5791    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5792    if (s->env->macsr & MACSR_FI)
5793        gen_helper_get_mac_extf(reg, cpu_env, acc);
5794    else
5795        gen_helper_get_mac_exti(reg, cpu_env, acc);
5796}
5797
5798DISAS_INSN(macsr_to_ccr)
5799{
5800    TCGv tmp = tcg_temp_new();
5801    tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5802    gen_helper_set_sr(cpu_env, tmp);
5803    tcg_temp_free(tmp);
5804    set_cc_op(s, CC_OP_FLAGS);
5805}
5806
5807DISAS_INSN(to_mac)
5808{
5809    TCGv_i64 acc;
5810    TCGv val;
5811    int accnum;
5812    accnum = (insn >> 9) & 3;
5813    acc = MACREG(accnum);
5814    SRC_EA(env, val, OS_LONG, 0, NULL);
5815    if (s->env->macsr & MACSR_FI) {
5816        tcg_gen_ext_i32_i64(acc, val);
5817        tcg_gen_shli_i64(acc, acc, 8);
5818    } else if (s->env->macsr & MACSR_SU) {
5819        tcg_gen_ext_i32_i64(acc, val);
5820    } else {
5821        tcg_gen_extu_i32_i64(acc, val);
5822    }
5823    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5824    gen_mac_clear_flags();
5825    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5826}
5827
5828DISAS_INSN(to_macsr)
5829{
5830    TCGv val;
5831    SRC_EA(env, val, OS_LONG, 0, NULL);
5832    gen_helper_set_macsr(cpu_env, val);
5833    gen_exit_tb(s);
5834}
5835
5836DISAS_INSN(to_mask)
5837{
5838    TCGv val;
5839    SRC_EA(env, val, OS_LONG, 0, NULL);
5840    tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5841}
5842
5843DISAS_INSN(to_mext)
5844{
5845    TCGv val;
5846    TCGv acc;
5847    SRC_EA(env, val, OS_LONG, 0, NULL);
5848    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5849    if (s->env->macsr & MACSR_FI)
5850        gen_helper_set_mac_extf(cpu_env, val, acc);
5851    else if (s->env->macsr & MACSR_SU)
5852        gen_helper_set_mac_exts(cpu_env, val, acc);
5853    else
5854        gen_helper_set_mac_extu(cpu_env, val, acc);
5855}
5856
5857static disas_proc opcode_table[65536];
5858
5859static void
5860register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5861{
5862  int i;
5863  int from;
5864  int to;
5865
5866  /* Sanity check.  All set bits must be included in the mask.  */
5867  if (opcode & ~mask) {
5868      fprintf(stderr,
5869              "qemu internal error: bogus opcode definition %04x/%04x\n",
5870              opcode, mask);
5871      abort();
5872  }
5873  /*
5874   * This could probably be cleverer.  For now just optimize the case where
5875   * the top bits are known.
5876   */
5877  /* Find the first zero bit in the mask.  */
5878  i = 0x8000;
5879  while ((i & mask) != 0)
5880      i >>= 1;
5881  /* Iterate over all combinations of this and lower bits.  */
5882  if (i == 0)
5883      i = 1;
5884  else
5885      i <<= 1;
5886  from = opcode & ~(i - 1);
5887  to = from + i;
5888  for (i = from; i < to; i++) {
5889      if ((i & mask) == opcode)
5890          opcode_table[i] = proc;
5891  }
5892}
5893
5894/*
5895 * Register m68k opcode handlers.  Order is important.
5896 * Later insn override earlier ones.
5897 */
5898void register_m68k_insns (CPUM68KState *env)
5899{
5900    /*
5901     * Build the opcode table only once to avoid
5902     * multithreading issues.
5903     */
5904    if (opcode_table[0] != NULL) {
5905        return;
5906    }
5907
5908    /*
5909     * use BASE() for instruction available
5910     * for CF_ISA_A and M68000.
5911     */
5912#define BASE(name, opcode, mask) \
5913    register_opcode(disas_##name, 0x##opcode, 0x##mask)
5914#define INSN(name, opcode, mask, feature) do { \
5915    if (m68k_feature(env, M68K_FEATURE_##feature)) \
5916        BASE(name, opcode, mask); \
5917    } while(0)
5918    BASE(undef,     0000, 0000);
5919    INSN(arith_im,  0080, fff8, CF_ISA_A);
5920    INSN(arith_im,  0000, ff00, M68000);
5921    INSN(chk2,      00c0, f9c0, CHK2);
5922    INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5923    BASE(bitop_reg, 0100, f1c0);
5924    BASE(bitop_reg, 0140, f1c0);
5925    BASE(bitop_reg, 0180, f1c0);
5926    BASE(bitop_reg, 01c0, f1c0);
5927    INSN(movep,     0108, f138, MOVEP);
5928    INSN(arith_im,  0280, fff8, CF_ISA_A);
5929    INSN(arith_im,  0200, ff00, M68000);
5930    INSN(undef,     02c0, ffc0, M68000);
5931    INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5932    INSN(arith_im,  0480, fff8, CF_ISA_A);
5933    INSN(arith_im,  0400, ff00, M68000);
5934    INSN(undef,     04c0, ffc0, M68000);
5935    INSN(arith_im,  0600, ff00, M68000);
5936    INSN(undef,     06c0, ffc0, M68000);
5937    INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5938    INSN(arith_im,  0680, fff8, CF_ISA_A);
5939    INSN(arith_im,  0c00, ff38, CF_ISA_A);
5940    INSN(arith_im,  0c00, ff00, M68000);
5941    BASE(bitop_im,  0800, ffc0);
5942    BASE(bitop_im,  0840, ffc0);
5943    BASE(bitop_im,  0880, ffc0);
5944    BASE(bitop_im,  08c0, ffc0);
5945    INSN(arith_im,  0a80, fff8, CF_ISA_A);
5946    INSN(arith_im,  0a00, ff00, M68000);
5947#if defined(CONFIG_SOFTMMU)
5948    INSN(moves,     0e00, ff00, M68000);
5949#endif
5950    INSN(cas,       0ac0, ffc0, CAS);
5951    INSN(cas,       0cc0, ffc0, CAS);
5952    INSN(cas,       0ec0, ffc0, CAS);
5953    INSN(cas2w,     0cfc, ffff, CAS);
5954    INSN(cas2l,     0efc, ffff, CAS);
5955    BASE(move,      1000, f000);
5956    BASE(move,      2000, f000);
5957    BASE(move,      3000, f000);
5958    INSN(chk,       4000, f040, M68000);
5959    INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5960    INSN(negx,      4080, fff8, CF_ISA_A);
5961    INSN(negx,      4000, ff00, M68000);
5962    INSN(undef,     40c0, ffc0, M68000);
5963    INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5964    INSN(move_from_sr, 40c0, ffc0, M68000);
5965    BASE(lea,       41c0, f1c0);
5966    BASE(clr,       4200, ff00);
5967    BASE(undef,     42c0, ffc0);
5968    INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5969    INSN(move_from_ccr, 42c0, ffc0, M68000);
5970    INSN(neg,       4480, fff8, CF_ISA_A);
5971    INSN(neg,       4400, ff00, M68000);
5972    INSN(undef,     44c0, ffc0, M68000);
5973    BASE(move_to_ccr, 44c0, ffc0);
5974    INSN(not,       4680, fff8, CF_ISA_A);
5975    INSN(not,       4600, ff00, M68000);
5976#if defined(CONFIG_SOFTMMU)
5977    BASE(move_to_sr, 46c0, ffc0);
5978#endif
5979    INSN(nbcd,      4800, ffc0, M68000);
5980    INSN(linkl,     4808, fff8, M68000);
5981    BASE(pea,       4840, ffc0);
5982    BASE(swap,      4840, fff8);
5983    INSN(bkpt,      4848, fff8, BKPT);
5984    INSN(movem,     48d0, fbf8, CF_ISA_A);
5985    INSN(movem,     48e8, fbf8, CF_ISA_A);
5986    INSN(movem,     4880, fb80, M68000);
5987    BASE(ext,       4880, fff8);
5988    BASE(ext,       48c0, fff8);
5989    BASE(ext,       49c0, fff8);
5990    BASE(tst,       4a00, ff00);
5991    INSN(tas,       4ac0, ffc0, CF_ISA_B);
5992    INSN(tas,       4ac0, ffc0, M68000);
5993#if defined(CONFIG_SOFTMMU)
5994    INSN(halt,      4ac8, ffff, CF_ISA_A);
5995#endif
5996    INSN(pulse,     4acc, ffff, CF_ISA_A);
5997    BASE(illegal,   4afc, ffff);
5998    INSN(mull,      4c00, ffc0, CF_ISA_A);
5999    INSN(mull,      4c00, ffc0, LONG_MULDIV);
6000    INSN(divl,      4c40, ffc0, CF_ISA_A);
6001    INSN(divl,      4c40, ffc0, LONG_MULDIV);
6002    INSN(sats,      4c80, fff8, CF_ISA_B);
6003    BASE(trap,      4e40, fff0);
6004    BASE(link,      4e50, fff8);
6005    BASE(unlk,      4e58, fff8);
6006#if defined(CONFIG_SOFTMMU)
6007    INSN(move_to_usp, 4e60, fff8, USP);
6008    INSN(move_from_usp, 4e68, fff8, USP);
6009    INSN(reset,     4e70, ffff, M68000);
6010    BASE(stop,      4e72, ffff);
6011    BASE(rte,       4e73, ffff);
6012    INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
6013    INSN(m68k_movec, 4e7a, fffe, M68000);
6014#endif
6015    BASE(nop,       4e71, ffff);
6016    INSN(rtd,       4e74, ffff, RTD);
6017    BASE(rts,       4e75, ffff);
6018    BASE(jump,      4e80, ffc0);
6019    BASE(jump,      4ec0, ffc0);
6020    INSN(addsubq,   5000, f080, M68000);
6021    BASE(addsubq,   5080, f0c0);
6022    INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6023    INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
6024    INSN(dbcc,      50c8, f0f8, M68000);
6025    INSN(tpf,       51f8, fff8, CF_ISA_A);
6026
6027    /* Branch instructions.  */
6028    BASE(branch,    6000, f000);
6029    /* Disable long branch instructions, then add back the ones we want.  */
6030    BASE(undef,     60ff, f0ff); /* All long branches.  */
6031    INSN(branch,    60ff, f0ff, CF_ISA_B);
6032    INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6033    INSN(branch,    60ff, ffff, BRAL);
6034    INSN(branch,    60ff, f0ff, BCCL);
6035
6036    BASE(moveq,     7000, f100);
6037    INSN(mvzs,      7100, f100, CF_ISA_B);
6038    BASE(or,        8000, f000);
6039    BASE(divw,      80c0, f0c0);
6040    INSN(sbcd_reg,  8100, f1f8, M68000);
6041    INSN(sbcd_mem,  8108, f1f8, M68000);
6042    BASE(addsub,    9000, f000);
6043    INSN(undef,     90c0, f0c0, CF_ISA_A);
6044    INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6045    INSN(subx_reg,  9100, f138, M68000);
6046    INSN(subx_mem,  9108, f138, M68000);
6047    INSN(suba,      91c0, f1c0, CF_ISA_A);
6048    INSN(suba,      90c0, f0c0, M68000);
6049
6050    BASE(undef_mac, a000, f000);
6051    INSN(mac,       a000, f100, CF_EMAC);
6052    INSN(from_mac,  a180, f9b0, CF_EMAC);
6053    INSN(move_mac,  a110, f9fc, CF_EMAC);
6054    INSN(from_macsr,a980, f9f0, CF_EMAC);
6055    INSN(from_mask, ad80, fff0, CF_EMAC);
6056    INSN(from_mext, ab80, fbf0, CF_EMAC);
6057    INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6058    INSN(to_mac,    a100, f9c0, CF_EMAC);
6059    INSN(to_macsr,  a900, ffc0, CF_EMAC);
6060    INSN(to_mext,   ab00, fbc0, CF_EMAC);
6061    INSN(to_mask,   ad00, ffc0, CF_EMAC);
6062
6063    INSN(mov3q,     a140, f1c0, CF_ISA_B);
6064    INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6065    INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6066    INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6067    INSN(cmp,       b080, f1c0, CF_ISA_A);
6068    INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6069    INSN(cmp,       b000, f100, M68000);
6070    INSN(eor,       b100, f100, M68000);
6071    INSN(cmpm,      b108, f138, M68000);
6072    INSN(cmpa,      b0c0, f0c0, M68000);
6073    INSN(eor,       b180, f1c0, CF_ISA_A);
6074    BASE(and,       c000, f000);
6075    INSN(exg_dd,    c140, f1f8, M68000);
6076    INSN(exg_aa,    c148, f1f8, M68000);
6077    INSN(exg_da,    c188, f1f8, M68000);
6078    BASE(mulw,      c0c0, f0c0);
6079    INSN(abcd_reg,  c100, f1f8, M68000);
6080    INSN(abcd_mem,  c108, f1f8, M68000);
6081    BASE(addsub,    d000, f000);
6082    INSN(undef,     d0c0, f0c0, CF_ISA_A);
6083    INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6084    INSN(addx_reg,  d100, f138, M68000);
6085    INSN(addx_mem,  d108, f138, M68000);
6086    INSN(adda,      d1c0, f1c0, CF_ISA_A);
6087    INSN(adda,      d0c0, f0c0, M68000);
6088    INSN(shift_im,  e080, f0f0, CF_ISA_A);
6089    INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6090    INSN(shift8_im, e000, f0f0, M68000);
6091    INSN(shift16_im, e040, f0f0, M68000);
6092    INSN(shift_im,  e080, f0f0, M68000);
6093    INSN(shift8_reg, e020, f0f0, M68000);
6094    INSN(shift16_reg, e060, f0f0, M68000);
6095    INSN(shift_reg, e0a0, f0f0, M68000);
6096    INSN(shift_mem, e0c0, fcc0, M68000);
6097    INSN(rotate_im, e090, f0f0, M68000);
6098    INSN(rotate8_im, e010, f0f0, M68000);
6099    INSN(rotate16_im, e050, f0f0, M68000);
6100    INSN(rotate_reg, e0b0, f0f0, M68000);
6101    INSN(rotate8_reg, e030, f0f0, M68000);
6102    INSN(rotate16_reg, e070, f0f0, M68000);
6103    INSN(rotate_mem, e4c0, fcc0, M68000);
6104    INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6105    INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6106    INSN(bfins_mem, efc0, ffc0, BITFIELD);
6107    INSN(bfins_reg, efc0, fff8, BITFIELD);
6108    INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6109    INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6110    INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6111    INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6112    INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6113    INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6114    INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6115    INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6116    INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6117    INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6118    BASE(undef_fpu, f000, f000);
6119    INSN(fpu,       f200, ffc0, CF_FPU);
6120    INSN(fbcc,      f280, ffc0, CF_FPU);
6121    INSN(fpu,       f200, ffc0, FPU);
6122    INSN(fscc,      f240, ffc0, FPU);
6123    INSN(fbcc,      f280, ff80, FPU);
6124#if defined(CONFIG_SOFTMMU)
6125    INSN(frestore,  f340, ffc0, CF_FPU);
6126    INSN(fsave,     f300, ffc0, CF_FPU);
6127    INSN(frestore,  f340, ffc0, FPU);
6128    INSN(fsave,     f300, ffc0, FPU);
6129    INSN(intouch,   f340, ffc0, CF_ISA_A);
6130    INSN(cpushl,    f428, ff38, CF_ISA_A);
6131    INSN(cpush,     f420, ff20, M68040);
6132    INSN(cinv,      f400, ff20, M68040);
6133    INSN(pflush,    f500, ffe0, M68040);
6134    INSN(ptest,     f548, ffd8, M68040);
6135    INSN(wddata,    fb00, ff00, CF_ISA_A);
6136    INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6137#endif
6138    INSN(move16_mem, f600, ffe0, M68040);
6139    INSN(move16_reg, f620, fff8, M68040);
6140#undef INSN
6141}
6142
6143static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6144{
6145    DisasContext *dc = container_of(dcbase, DisasContext, base);
6146    CPUM68KState *env = cpu->env_ptr;
6147
6148    dc->env = env;
6149    dc->pc = dc->base.pc_first;
6150    dc->cc_op = CC_OP_DYNAMIC;
6151    dc->cc_op_synced = 1;
6152    dc->done_mac = 0;
6153    dc->writeback_mask = 0;
6154    init_release_array(dc);
6155}
6156
6157static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6158{
6159}
6160
6161static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6162{
6163    DisasContext *dc = container_of(dcbase, DisasContext, base);
6164    tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6165}
6166
6167static bool m68k_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
6168                                     const CPUBreakpoint *bp)
6169{
6170    DisasContext *dc = container_of(dcbase, DisasContext, base);
6171
6172    gen_exception(dc, dc->base.pc_next, EXCP_DEBUG);
6173    /*
6174     * The address covered by the breakpoint must be included in
6175     * [tb->pc, tb->pc + tb->size) in order to for it to be
6176     * properly cleared -- thus we increment the PC here so that
6177     * the logic setting tb->size below does the right thing.
6178     */
6179    dc->base.pc_next += 2;
6180
6181    return true;
6182}
6183
6184static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6185{
6186    DisasContext *dc = container_of(dcbase, DisasContext, base);
6187    CPUM68KState *env = cpu->env_ptr;
6188    uint16_t insn = read_im16(env, dc);
6189
6190    opcode_table[insn](env, dc, insn);
6191    do_writebacks(dc);
6192    do_release(dc);
6193
6194    dc->base.pc_next = dc->pc;
6195
6196    if (dc->base.is_jmp == DISAS_NEXT) {
6197        /*
6198         * Stop translation when the next insn might touch a new page.
6199         * This ensures that prefetch aborts at the right place.
6200         *
6201         * We cannot determine the size of the next insn without
6202         * completely decoding it.  However, the maximum insn size
6203         * is 32 bytes, so end if we do not have that much remaining.
6204         * This may produce several small TBs at the end of each page,
6205         * but they will all be linked with goto_tb.
6206         *
6207         * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6208         * smaller than MC68020's.
6209         */
6210        target_ulong start_page_offset
6211            = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6212
6213        if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6214            dc->base.is_jmp = DISAS_TOO_MANY;
6215        }
6216    }
6217}
6218
6219static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6220{
6221    DisasContext *dc = container_of(dcbase, DisasContext, base);
6222
6223    switch (dc->base.is_jmp) {
6224    case DISAS_NORETURN:
6225        break;
6226    case DISAS_TOO_MANY:
6227        update_cc_op(dc);
6228        if (dc->base.singlestep_enabled) {
6229            tcg_gen_movi_i32(QREG_PC, dc->pc);
6230            gen_raise_exception(EXCP_DEBUG);
6231        } else {
6232            gen_jmp_tb(dc, 0, dc->pc);
6233        }
6234        break;
6235    case DISAS_JUMP:
6236        /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6237        if (dc->base.singlestep_enabled) {
6238            gen_raise_exception(EXCP_DEBUG);
6239        } else {
6240            tcg_gen_lookup_and_goto_ptr();
6241        }
6242        break;
6243    case DISAS_EXIT:
6244        /*
6245         * We updated CC_OP and PC in gen_exit_tb, but also modified
6246         * other state that may require returning to the main loop.
6247         */
6248        if (dc->base.singlestep_enabled) {
6249            gen_raise_exception(EXCP_DEBUG);
6250        } else {
6251            tcg_gen_exit_tb(NULL, 0);
6252        }
6253        break;
6254    default:
6255        g_assert_not_reached();
6256    }
6257}
6258
6259static void m68k_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
6260{
6261    qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
6262    log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
6263}
6264
6265static const TranslatorOps m68k_tr_ops = {
6266    .init_disas_context = m68k_tr_init_disas_context,
6267    .tb_start           = m68k_tr_tb_start,
6268    .insn_start         = m68k_tr_insn_start,
6269    .breakpoint_check   = m68k_tr_breakpoint_check,
6270    .translate_insn     = m68k_tr_translate_insn,
6271    .tb_stop            = m68k_tr_tb_stop,
6272    .disas_log          = m68k_tr_disas_log,
6273};
6274
6275void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
6276{
6277    DisasContext dc;
6278    translator_loop(&m68k_tr_ops, &dc.base, cpu, tb, max_insns);
6279}
6280
6281static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6282{
6283    floatx80 a = { .high = high, .low = low };
6284    union {
6285        float64 f64;
6286        double d;
6287    } u;
6288
6289    u.f64 = floatx80_to_float64(a, &env->fp_status);
6290    return u.d;
6291}
6292
6293void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6294{
6295    M68kCPU *cpu = M68K_CPU(cs);
6296    CPUM68KState *env = &cpu->env;
6297    int i;
6298    uint16_t sr;
6299    for (i = 0; i < 8; i++) {
6300        qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6301                     "F%d = %04x %016"PRIx64"  (%12g)\n",
6302                     i, env->dregs[i], i, env->aregs[i],
6303                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6304                     floatx80_to_double(env, env->fregs[i].l.upper,
6305                                        env->fregs[i].l.lower));
6306    }
6307    qemu_fprintf(f, "PC = %08x   ", env->pc);
6308    sr = env->sr | cpu_m68k_get_ccr(env);
6309    qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6310                 sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6311                 (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6312                 (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6313                 (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6314                 (sr & CCF_C) ? 'C' : '-');
6315    qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6316                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6317                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6318                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6319                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6320    qemu_fprintf(f, "\n                                "
6321                 "FPCR =     %04x ", env->fpcr);
6322    switch (env->fpcr & FPCR_PREC_MASK) {
6323    case FPCR_PREC_X:
6324        qemu_fprintf(f, "X ");
6325        break;
6326    case FPCR_PREC_S:
6327        qemu_fprintf(f, "S ");
6328        break;
6329    case FPCR_PREC_D:
6330        qemu_fprintf(f, "D ");
6331        break;
6332    }
6333    switch (env->fpcr & FPCR_RND_MASK) {
6334    case FPCR_RND_N:
6335        qemu_fprintf(f, "RN ");
6336        break;
6337    case FPCR_RND_Z:
6338        qemu_fprintf(f, "RZ ");
6339        break;
6340    case FPCR_RND_M:
6341        qemu_fprintf(f, "RM ");
6342        break;
6343    case FPCR_RND_P:
6344        qemu_fprintf(f, "RP ");
6345        break;
6346    }
6347    qemu_fprintf(f, "\n");
6348#ifdef CONFIG_SOFTMMU
6349    qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6350                 env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6351                 env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6352                 env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6353    qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6354    qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6355    qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6356                 env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6357    qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6358                 env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6359                 env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6360    qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6361                 env->mmu.mmusr, env->mmu.ar);
6362#endif
6363}
6364
6365void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6366                          target_ulong *data)
6367{
6368    int cc_op = data[1];
6369    env->pc = data[0];
6370    if (cc_op != CC_OP_DYNAMIC) {
6371        env->cc_op = cc_op;
6372    }
6373}
6374