qemu/target/m68k/translate.c
<<
>>
Prefs
   1/*
   2 *  m68k translation
   3 *
   4 *  Copyright (c) 2005-2007 CodeSourcery
   5 *  Written by Paul Brook
   6 *
   7 * This library is free software; you can redistribute it and/or
   8 * modify it under the terms of the GNU Lesser General Public
   9 * License as published by the Free Software Foundation; either
  10 * version 2.1 of the License, or (at your option) any later version.
  11 *
  12 * This library is distributed in the hope that it will be useful,
  13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15 * Lesser General Public License for more details.
  16 *
  17 * You should have received a copy of the GNU Lesser General Public
  18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19 */
  20
  21#include "qemu/osdep.h"
  22#include "cpu.h"
  23#include "disas/disas.h"
  24#include "exec/exec-all.h"
  25#include "tcg/tcg-op.h"
  26#include "qemu/log.h"
  27#include "qemu/qemu-print.h"
  28#include "exec/cpu_ldst.h"
  29#include "exec/translator.h"
  30
  31#include "exec/helper-proto.h"
  32#include "exec/helper-gen.h"
  33
  34#include "trace-tcg.h"
  35#include "exec/log.h"
  36#include "fpu/softfloat.h"
  37
  38
  39//#define DEBUG_DISPATCH 1
  40
  41#define DEFO32(name, offset) static TCGv QREG_##name;
  42#define DEFO64(name, offset) static TCGv_i64 QREG_##name;
  43#include "qregs.def"
  44#undef DEFO32
  45#undef DEFO64
  46
  47static TCGv_i32 cpu_halted;
  48static TCGv_i32 cpu_exception_index;
  49
  50static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
  51static TCGv cpu_dregs[8];
  52static TCGv cpu_aregs[8];
  53static TCGv_i64 cpu_macc[4];
  54
  55#define REG(insn, pos)  (((insn) >> (pos)) & 7)
  56#define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
  57#define AREG(insn, pos) get_areg(s, REG(insn, pos))
  58#define MACREG(acc)     cpu_macc[acc]
  59#define QREG_SP         get_areg(s, 7)
  60
  61static TCGv NULL_QREG;
  62#define IS_NULL_QREG(t) (t == NULL_QREG)
  63/* Used to distinguish stores from bad addressing modes.  */
  64static TCGv store_dummy;
  65
  66#include "exec/gen-icount.h"
  67
  68void m68k_tcg_init(void)
  69{
  70    char *p;
  71    int i;
  72
  73#define DEFO32(name, offset) \
  74    QREG_##name = tcg_global_mem_new_i32(cpu_env, \
  75        offsetof(CPUM68KState, offset), #name);
  76#define DEFO64(name, offset) \
  77    QREG_##name = tcg_global_mem_new_i64(cpu_env, \
  78        offsetof(CPUM68KState, offset), #name);
  79#include "qregs.def"
  80#undef DEFO32
  81#undef DEFO64
  82
  83    cpu_halted = tcg_global_mem_new_i32(cpu_env,
  84                                        -offsetof(M68kCPU, env) +
  85                                        offsetof(CPUState, halted), "HALTED");
  86    cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
  87                                                 -offsetof(M68kCPU, env) +
  88                                                 offsetof(CPUState, exception_index),
  89                                                 "EXCEPTION");
  90
  91    p = cpu_reg_names;
  92    for (i = 0; i < 8; i++) {
  93        sprintf(p, "D%d", i);
  94        cpu_dregs[i] = tcg_global_mem_new(cpu_env,
  95                                          offsetof(CPUM68KState, dregs[i]), p);
  96        p += 3;
  97        sprintf(p, "A%d", i);
  98        cpu_aregs[i] = tcg_global_mem_new(cpu_env,
  99                                          offsetof(CPUM68KState, aregs[i]), p);
 100        p += 3;
 101    }
 102    for (i = 0; i < 4; i++) {
 103        sprintf(p, "ACC%d", i);
 104        cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
 105                                         offsetof(CPUM68KState, macc[i]), p);
 106        p += 5;
 107    }
 108
 109    NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
 110    store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
 111}
 112
 113/* internal defines */
 114typedef struct DisasContext {
 115    DisasContextBase base;
 116    CPUM68KState *env;
 117    target_ulong pc;
 118    CCOp cc_op; /* Current CC operation */
 119    int cc_op_synced;
 120    TCGv_i64 mactmp;
 121    int done_mac;
 122    int writeback_mask;
 123    TCGv writeback[8];
 124#define MAX_TO_RELEASE 8
 125    int release_count;
 126    TCGv release[MAX_TO_RELEASE];
 127} DisasContext;
 128
 129static void init_release_array(DisasContext *s)
 130{
 131#ifdef CONFIG_DEBUG_TCG
 132    memset(s->release, 0, sizeof(s->release));
 133#endif
 134    s->release_count = 0;
 135}
 136
 137static void do_release(DisasContext *s)
 138{
 139    int i;
 140    for (i = 0; i < s->release_count; i++) {
 141        tcg_temp_free(s->release[i]);
 142    }
 143    init_release_array(s);
 144}
 145
 146static TCGv mark_to_release(DisasContext *s, TCGv tmp)
 147{
 148    g_assert(s->release_count < MAX_TO_RELEASE);
 149    return s->release[s->release_count++] = tmp;
 150}
 151
 152static TCGv get_areg(DisasContext *s, unsigned regno)
 153{
 154    if (s->writeback_mask & (1 << regno)) {
 155        return s->writeback[regno];
 156    } else {
 157        return cpu_aregs[regno];
 158    }
 159}
 160
 161static void delay_set_areg(DisasContext *s, unsigned regno,
 162                           TCGv val, bool give_temp)
 163{
 164    if (s->writeback_mask & (1 << regno)) {
 165        if (give_temp) {
 166            tcg_temp_free(s->writeback[regno]);
 167            s->writeback[regno] = val;
 168        } else {
 169            tcg_gen_mov_i32(s->writeback[regno], val);
 170        }
 171    } else {
 172        s->writeback_mask |= 1 << regno;
 173        if (give_temp) {
 174            s->writeback[regno] = val;
 175        } else {
 176            TCGv tmp = tcg_temp_new();
 177            s->writeback[regno] = tmp;
 178            tcg_gen_mov_i32(tmp, val);
 179        }
 180    }
 181}
 182
 183static void do_writebacks(DisasContext *s)
 184{
 185    unsigned mask = s->writeback_mask;
 186    if (mask) {
 187        s->writeback_mask = 0;
 188        do {
 189            unsigned regno = ctz32(mask);
 190            tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
 191            tcg_temp_free(s->writeback[regno]);
 192            mask &= mask - 1;
 193        } while (mask);
 194    }
 195}
 196
 197/* is_jmp field values */
 198#define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
 199#define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
 200
 201#if defined(CONFIG_USER_ONLY)
 202#define IS_USER(s) 1
 203#else
 204#define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
 205#define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
 206                      MMU_KERNEL_IDX : MMU_USER_IDX)
 207#define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
 208                      MMU_KERNEL_IDX : MMU_USER_IDX)
 209#endif
 210
 211typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
 212
 213#ifdef DEBUG_DISPATCH
 214#define DISAS_INSN(name)                                                \
 215    static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
 216                                  uint16_t insn);                       \
 217    static void disas_##name(CPUM68KState *env, DisasContext *s,        \
 218                             uint16_t insn)                             \
 219    {                                                                   \
 220        qemu_log("Dispatch " #name "\n");                               \
 221        real_disas_##name(env, s, insn);                                \
 222    }                                                                   \
 223    static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
 224                                  uint16_t insn)
 225#else
 226#define DISAS_INSN(name)                                                \
 227    static void disas_##name(CPUM68KState *env, DisasContext *s,        \
 228                             uint16_t insn)
 229#endif
 230
 231static const uint8_t cc_op_live[CC_OP_NB] = {
 232    [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
 233    [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
 234    [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
 235    [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
 236    [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
 237    [CC_OP_LOGIC] = CCF_X | CCF_N
 238};
 239
 240static void set_cc_op(DisasContext *s, CCOp op)
 241{
 242    CCOp old_op = s->cc_op;
 243    int dead;
 244
 245    if (old_op == op) {
 246        return;
 247    }
 248    s->cc_op = op;
 249    s->cc_op_synced = 0;
 250
 251    /*
 252     * Discard CC computation that will no longer be used.
 253     * Note that X and N are never dead.
 254     */
 255    dead = cc_op_live[old_op] & ~cc_op_live[op];
 256    if (dead & CCF_C) {
 257        tcg_gen_discard_i32(QREG_CC_C);
 258    }
 259    if (dead & CCF_Z) {
 260        tcg_gen_discard_i32(QREG_CC_Z);
 261    }
 262    if (dead & CCF_V) {
 263        tcg_gen_discard_i32(QREG_CC_V);
 264    }
 265}
 266
 267/* Update the CPU env CC_OP state.  */
 268static void update_cc_op(DisasContext *s)
 269{
 270    if (!s->cc_op_synced) {
 271        s->cc_op_synced = 1;
 272        tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
 273    }
 274}
 275
 276/* Generate a jump to an immediate address.  */
 277static void gen_jmp_im(DisasContext *s, uint32_t dest)
 278{
 279    update_cc_op(s);
 280    tcg_gen_movi_i32(QREG_PC, dest);
 281    s->base.is_jmp = DISAS_JUMP;
 282}
 283
 284/* Generate a jump to the address in qreg DEST.  */
 285static void gen_jmp(DisasContext *s, TCGv dest)
 286{
 287    update_cc_op(s);
 288    tcg_gen_mov_i32(QREG_PC, dest);
 289    s->base.is_jmp = DISAS_JUMP;
 290}
 291
 292static void gen_raise_exception(int nr)
 293{
 294    TCGv_i32 tmp;
 295
 296    tmp = tcg_const_i32(nr);
 297    gen_helper_raise_exception(cpu_env, tmp);
 298    tcg_temp_free_i32(tmp);
 299}
 300
 301static void gen_exception(DisasContext *s, uint32_t dest, int nr)
 302{
 303    update_cc_op(s);
 304    tcg_gen_movi_i32(QREG_PC, dest);
 305
 306    gen_raise_exception(nr);
 307
 308    s->base.is_jmp = DISAS_NORETURN;
 309}
 310
 311static inline void gen_addr_fault(DisasContext *s)
 312{
 313    gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
 314}
 315
 316/*
 317 * Generate a load from the specified address.  Narrow values are
 318 *  sign extended to full register width.
 319 */
 320static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
 321                            int sign, int index)
 322{
 323    TCGv tmp;
 324    tmp = tcg_temp_new_i32();
 325    switch(opsize) {
 326    case OS_BYTE:
 327        if (sign)
 328            tcg_gen_qemu_ld8s(tmp, addr, index);
 329        else
 330            tcg_gen_qemu_ld8u(tmp, addr, index);
 331        break;
 332    case OS_WORD:
 333        if (sign)
 334            tcg_gen_qemu_ld16s(tmp, addr, index);
 335        else
 336            tcg_gen_qemu_ld16u(tmp, addr, index);
 337        break;
 338    case OS_LONG:
 339        tcg_gen_qemu_ld32u(tmp, addr, index);
 340        break;
 341    default:
 342        g_assert_not_reached();
 343    }
 344    return tmp;
 345}
 346
 347/* Generate a store.  */
 348static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
 349                             int index)
 350{
 351    switch(opsize) {
 352    case OS_BYTE:
 353        tcg_gen_qemu_st8(val, addr, index);
 354        break;
 355    case OS_WORD:
 356        tcg_gen_qemu_st16(val, addr, index);
 357        break;
 358    case OS_LONG:
 359        tcg_gen_qemu_st32(val, addr, index);
 360        break;
 361    default:
 362        g_assert_not_reached();
 363    }
 364}
 365
 366typedef enum {
 367    EA_STORE,
 368    EA_LOADU,
 369    EA_LOADS
 370} ea_what;
 371
 372/*
 373 * Generate an unsigned load if VAL is 0 a signed load if val is -1,
 374 * otherwise generate a store.
 375 */
 376static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
 377                     ea_what what, int index)
 378{
 379    if (what == EA_STORE) {
 380        gen_store(s, opsize, addr, val, index);
 381        return store_dummy;
 382    } else {
 383        return mark_to_release(s, gen_load(s, opsize, addr,
 384                                           what == EA_LOADS, index));
 385    }
 386}
 387
 388/* Read a 16-bit immediate constant */
 389static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
 390{
 391    uint16_t im;
 392    im = translator_lduw(env, s->pc);
 393    s->pc += 2;
 394    return im;
 395}
 396
 397/* Read an 8-bit immediate constant */
 398static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
 399{
 400    return read_im16(env, s);
 401}
 402
 403/* Read a 32-bit immediate constant.  */
 404static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
 405{
 406    uint32_t im;
 407    im = read_im16(env, s) << 16;
 408    im |= 0xffff & read_im16(env, s);
 409    return im;
 410}
 411
 412/* Read a 64-bit immediate constant.  */
 413static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
 414{
 415    uint64_t im;
 416    im = (uint64_t)read_im32(env, s) << 32;
 417    im |= (uint64_t)read_im32(env, s);
 418    return im;
 419}
 420
 421/* Calculate and address index.  */
 422static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
 423{
 424    TCGv add;
 425    int scale;
 426
 427    add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
 428    if ((ext & 0x800) == 0) {
 429        tcg_gen_ext16s_i32(tmp, add);
 430        add = tmp;
 431    }
 432    scale = (ext >> 9) & 3;
 433    if (scale != 0) {
 434        tcg_gen_shli_i32(tmp, add, scale);
 435        add = tmp;
 436    }
 437    return add;
 438}
 439
 440/*
 441 * Handle a base + index + displacement effective address.
 442 * A NULL_QREG base means pc-relative.
 443 */
 444static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
 445{
 446    uint32_t offset;
 447    uint16_t ext;
 448    TCGv add;
 449    TCGv tmp;
 450    uint32_t bd, od;
 451
 452    offset = s->pc;
 453    ext = read_im16(env, s);
 454
 455    if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
 456        return NULL_QREG;
 457
 458    if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
 459        !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
 460        ext &= ~(3 << 9);
 461    }
 462
 463    if (ext & 0x100) {
 464        /* full extension word format */
 465        if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
 466            return NULL_QREG;
 467
 468        if ((ext & 0x30) > 0x10) {
 469            /* base displacement */
 470            if ((ext & 0x30) == 0x20) {
 471                bd = (int16_t)read_im16(env, s);
 472            } else {
 473                bd = read_im32(env, s);
 474            }
 475        } else {
 476            bd = 0;
 477        }
 478        tmp = mark_to_release(s, tcg_temp_new());
 479        if ((ext & 0x44) == 0) {
 480            /* pre-index */
 481            add = gen_addr_index(s, ext, tmp);
 482        } else {
 483            add = NULL_QREG;
 484        }
 485        if ((ext & 0x80) == 0) {
 486            /* base not suppressed */
 487            if (IS_NULL_QREG(base)) {
 488                base = mark_to_release(s, tcg_const_i32(offset + bd));
 489                bd = 0;
 490            }
 491            if (!IS_NULL_QREG(add)) {
 492                tcg_gen_add_i32(tmp, add, base);
 493                add = tmp;
 494            } else {
 495                add = base;
 496            }
 497        }
 498        if (!IS_NULL_QREG(add)) {
 499            if (bd != 0) {
 500                tcg_gen_addi_i32(tmp, add, bd);
 501                add = tmp;
 502            }
 503        } else {
 504            add = mark_to_release(s, tcg_const_i32(bd));
 505        }
 506        if ((ext & 3) != 0) {
 507            /* memory indirect */
 508            base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
 509            if ((ext & 0x44) == 4) {
 510                add = gen_addr_index(s, ext, tmp);
 511                tcg_gen_add_i32(tmp, add, base);
 512                add = tmp;
 513            } else {
 514                add = base;
 515            }
 516            if ((ext & 3) > 1) {
 517                /* outer displacement */
 518                if ((ext & 3) == 2) {
 519                    od = (int16_t)read_im16(env, s);
 520                } else {
 521                    od = read_im32(env, s);
 522                }
 523            } else {
 524                od = 0;
 525            }
 526            if (od != 0) {
 527                tcg_gen_addi_i32(tmp, add, od);
 528                add = tmp;
 529            }
 530        }
 531    } else {
 532        /* brief extension word format */
 533        tmp = mark_to_release(s, tcg_temp_new());
 534        add = gen_addr_index(s, ext, tmp);
 535        if (!IS_NULL_QREG(base)) {
 536            tcg_gen_add_i32(tmp, add, base);
 537            if ((int8_t)ext)
 538                tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
 539        } else {
 540            tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
 541        }
 542        add = tmp;
 543    }
 544    return add;
 545}
 546
 547/* Sign or zero extend a value.  */
 548
 549static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
 550{
 551    switch (opsize) {
 552    case OS_BYTE:
 553        if (sign) {
 554            tcg_gen_ext8s_i32(res, val);
 555        } else {
 556            tcg_gen_ext8u_i32(res, val);
 557        }
 558        break;
 559    case OS_WORD:
 560        if (sign) {
 561            tcg_gen_ext16s_i32(res, val);
 562        } else {
 563            tcg_gen_ext16u_i32(res, val);
 564        }
 565        break;
 566    case OS_LONG:
 567        tcg_gen_mov_i32(res, val);
 568        break;
 569    default:
 570        g_assert_not_reached();
 571    }
 572}
 573
 574/* Evaluate all the CC flags.  */
 575
 576static void gen_flush_flags(DisasContext *s)
 577{
 578    TCGv t0, t1;
 579
 580    switch (s->cc_op) {
 581    case CC_OP_FLAGS:
 582        return;
 583
 584    case CC_OP_ADDB:
 585    case CC_OP_ADDW:
 586    case CC_OP_ADDL:
 587        tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
 588        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 589        /* Compute signed overflow for addition.  */
 590        t0 = tcg_temp_new();
 591        t1 = tcg_temp_new();
 592        tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
 593        gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
 594        tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
 595        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
 596        tcg_temp_free(t0);
 597        tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
 598        tcg_temp_free(t1);
 599        break;
 600
 601    case CC_OP_SUBB:
 602    case CC_OP_SUBW:
 603    case CC_OP_SUBL:
 604        tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
 605        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 606        /* Compute signed overflow for subtraction.  */
 607        t0 = tcg_temp_new();
 608        t1 = tcg_temp_new();
 609        tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
 610        gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
 611        tcg_gen_xor_i32(t1, QREG_CC_N, t0);
 612        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
 613        tcg_temp_free(t0);
 614        tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
 615        tcg_temp_free(t1);
 616        break;
 617
 618    case CC_OP_CMPB:
 619    case CC_OP_CMPW:
 620    case CC_OP_CMPL:
 621        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
 622        tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
 623        gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
 624        /* Compute signed overflow for subtraction.  */
 625        t0 = tcg_temp_new();
 626        tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
 627        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
 628        tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
 629        tcg_temp_free(t0);
 630        tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
 631        break;
 632
 633    case CC_OP_LOGIC:
 634        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 635        tcg_gen_movi_i32(QREG_CC_C, 0);
 636        tcg_gen_movi_i32(QREG_CC_V, 0);
 637        break;
 638
 639    case CC_OP_DYNAMIC:
 640        gen_helper_flush_flags(cpu_env, QREG_CC_OP);
 641        s->cc_op_synced = 1;
 642        break;
 643
 644    default:
 645        t0 = tcg_const_i32(s->cc_op);
 646        gen_helper_flush_flags(cpu_env, t0);
 647        tcg_temp_free(t0);
 648        s->cc_op_synced = 1;
 649        break;
 650    }
 651
 652    /* Note that flush_flags also assigned to env->cc_op.  */
 653    s->cc_op = CC_OP_FLAGS;
 654}
 655
 656static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
 657{
 658    TCGv tmp;
 659
 660    if (opsize == OS_LONG) {
 661        tmp = val;
 662    } else {
 663        tmp = mark_to_release(s, tcg_temp_new());
 664        gen_ext(tmp, val, opsize, sign);
 665    }
 666
 667    return tmp;
 668}
 669
 670static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
 671{
 672    gen_ext(QREG_CC_N, val, opsize, 1);
 673    set_cc_op(s, CC_OP_LOGIC);
 674}
 675
 676static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
 677{
 678    tcg_gen_mov_i32(QREG_CC_N, dest);
 679    tcg_gen_mov_i32(QREG_CC_V, src);
 680    set_cc_op(s, CC_OP_CMPB + opsize);
 681}
 682
 683static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
 684{
 685    gen_ext(QREG_CC_N, dest, opsize, 1);
 686    tcg_gen_mov_i32(QREG_CC_V, src);
 687}
 688
 689static inline int opsize_bytes(int opsize)
 690{
 691    switch (opsize) {
 692    case OS_BYTE: return 1;
 693    case OS_WORD: return 2;
 694    case OS_LONG: return 4;
 695    case OS_SINGLE: return 4;
 696    case OS_DOUBLE: return 8;
 697    case OS_EXTENDED: return 12;
 698    case OS_PACKED: return 12;
 699    default:
 700        g_assert_not_reached();
 701    }
 702}
 703
 704static inline int insn_opsize(int insn)
 705{
 706    switch ((insn >> 6) & 3) {
 707    case 0: return OS_BYTE;
 708    case 1: return OS_WORD;
 709    case 2: return OS_LONG;
 710    default:
 711        g_assert_not_reached();
 712    }
 713}
 714
 715static inline int ext_opsize(int ext, int pos)
 716{
 717    switch ((ext >> pos) & 7) {
 718    case 0: return OS_LONG;
 719    case 1: return OS_SINGLE;
 720    case 2: return OS_EXTENDED;
 721    case 3: return OS_PACKED;
 722    case 4: return OS_WORD;
 723    case 5: return OS_DOUBLE;
 724    case 6: return OS_BYTE;
 725    default:
 726        g_assert_not_reached();
 727    }
 728}
 729
 730/*
 731 * Assign value to a register.  If the width is less than the register width
 732 * only the low part of the register is set.
 733 */
 734static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
 735{
 736    TCGv tmp;
 737    switch (opsize) {
 738    case OS_BYTE:
 739        tcg_gen_andi_i32(reg, reg, 0xffffff00);
 740        tmp = tcg_temp_new();
 741        tcg_gen_ext8u_i32(tmp, val);
 742        tcg_gen_or_i32(reg, reg, tmp);
 743        tcg_temp_free(tmp);
 744        break;
 745    case OS_WORD:
 746        tcg_gen_andi_i32(reg, reg, 0xffff0000);
 747        tmp = tcg_temp_new();
 748        tcg_gen_ext16u_i32(tmp, val);
 749        tcg_gen_or_i32(reg, reg, tmp);
 750        tcg_temp_free(tmp);
 751        break;
 752    case OS_LONG:
 753    case OS_SINGLE:
 754        tcg_gen_mov_i32(reg, val);
 755        break;
 756    default:
 757        g_assert_not_reached();
 758    }
 759}
 760
 761/*
 762 * Generate code for an "effective address".  Does not adjust the base
 763 * register for autoincrement addressing modes.
 764 */
 765static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
 766                         int mode, int reg0, int opsize)
 767{
 768    TCGv reg;
 769    TCGv tmp;
 770    uint16_t ext;
 771    uint32_t offset;
 772
 773    switch (mode) {
 774    case 0: /* Data register direct.  */
 775    case 1: /* Address register direct.  */
 776        return NULL_QREG;
 777    case 3: /* Indirect postincrement.  */
 778        if (opsize == OS_UNSIZED) {
 779            return NULL_QREG;
 780        }
 781        /* fallthru */
 782    case 2: /* Indirect register */
 783        return get_areg(s, reg0);
 784    case 4: /* Indirect predecrememnt.  */
 785        if (opsize == OS_UNSIZED) {
 786            return NULL_QREG;
 787        }
 788        reg = get_areg(s, reg0);
 789        tmp = mark_to_release(s, tcg_temp_new());
 790        if (reg0 == 7 && opsize == OS_BYTE &&
 791            m68k_feature(s->env, M68K_FEATURE_M68000)) {
 792            tcg_gen_subi_i32(tmp, reg, 2);
 793        } else {
 794            tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
 795        }
 796        return tmp;
 797    case 5: /* Indirect displacement.  */
 798        reg = get_areg(s, reg0);
 799        tmp = mark_to_release(s, tcg_temp_new());
 800        ext = read_im16(env, s);
 801        tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
 802        return tmp;
 803    case 6: /* Indirect index + displacement.  */
 804        reg = get_areg(s, reg0);
 805        return gen_lea_indexed(env, s, reg);
 806    case 7: /* Other */
 807        switch (reg0) {
 808        case 0: /* Absolute short.  */
 809            offset = (int16_t)read_im16(env, s);
 810            return mark_to_release(s, tcg_const_i32(offset));
 811        case 1: /* Absolute long.  */
 812            offset = read_im32(env, s);
 813            return mark_to_release(s, tcg_const_i32(offset));
 814        case 2: /* pc displacement  */
 815            offset = s->pc;
 816            offset += (int16_t)read_im16(env, s);
 817            return mark_to_release(s, tcg_const_i32(offset));
 818        case 3: /* pc index+displacement.  */
 819            return gen_lea_indexed(env, s, NULL_QREG);
 820        case 4: /* Immediate.  */
 821        default:
 822            return NULL_QREG;
 823        }
 824    }
 825    /* Should never happen.  */
 826    return NULL_QREG;
 827}
 828
 829static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
 830                    int opsize)
 831{
 832    int mode = extract32(insn, 3, 3);
 833    int reg0 = REG(insn, 0);
 834    return gen_lea_mode(env, s, mode, reg0, opsize);
 835}
 836
 837/*
 838 * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
 839 * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
 840 * ADDRP is non-null for readwrite operands.
 841 */
 842static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
 843                        int opsize, TCGv val, TCGv *addrp, ea_what what,
 844                        int index)
 845{
 846    TCGv reg, tmp, result;
 847    int32_t offset;
 848
 849    switch (mode) {
 850    case 0: /* Data register direct.  */
 851        reg = cpu_dregs[reg0];
 852        if (what == EA_STORE) {
 853            gen_partset_reg(opsize, reg, val);
 854            return store_dummy;
 855        } else {
 856            return gen_extend(s, reg, opsize, what == EA_LOADS);
 857        }
 858    case 1: /* Address register direct.  */
 859        reg = get_areg(s, reg0);
 860        if (what == EA_STORE) {
 861            tcg_gen_mov_i32(reg, val);
 862            return store_dummy;
 863        } else {
 864            return gen_extend(s, reg, opsize, what == EA_LOADS);
 865        }
 866    case 2: /* Indirect register */
 867        reg = get_areg(s, reg0);
 868        return gen_ldst(s, opsize, reg, val, what, index);
 869    case 3: /* Indirect postincrement.  */
 870        reg = get_areg(s, reg0);
 871        result = gen_ldst(s, opsize, reg, val, what, index);
 872        if (what == EA_STORE || !addrp) {
 873            TCGv tmp = tcg_temp_new();
 874            if (reg0 == 7 && opsize == OS_BYTE &&
 875                m68k_feature(s->env, M68K_FEATURE_M68000)) {
 876                tcg_gen_addi_i32(tmp, reg, 2);
 877            } else {
 878                tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
 879            }
 880            delay_set_areg(s, reg0, tmp, true);
 881        }
 882        return result;
 883    case 4: /* Indirect predecrememnt.  */
 884        if (addrp && what == EA_STORE) {
 885            tmp = *addrp;
 886        } else {
 887            tmp = gen_lea_mode(env, s, mode, reg0, opsize);
 888            if (IS_NULL_QREG(tmp)) {
 889                return tmp;
 890            }
 891            if (addrp) {
 892                *addrp = tmp;
 893            }
 894        }
 895        result = gen_ldst(s, opsize, tmp, val, what, index);
 896        if (what == EA_STORE || !addrp) {
 897            delay_set_areg(s, reg0, tmp, false);
 898        }
 899        return result;
 900    case 5: /* Indirect displacement.  */
 901    case 6: /* Indirect index + displacement.  */
 902    do_indirect:
 903        if (addrp && what == EA_STORE) {
 904            tmp = *addrp;
 905        } else {
 906            tmp = gen_lea_mode(env, s, mode, reg0, opsize);
 907            if (IS_NULL_QREG(tmp)) {
 908                return tmp;
 909            }
 910            if (addrp) {
 911                *addrp = tmp;
 912            }
 913        }
 914        return gen_ldst(s, opsize, tmp, val, what, index);
 915    case 7: /* Other */
 916        switch (reg0) {
 917        case 0: /* Absolute short.  */
 918        case 1: /* Absolute long.  */
 919        case 2: /* pc displacement  */
 920        case 3: /* pc index+displacement.  */
 921            goto do_indirect;
 922        case 4: /* Immediate.  */
 923            /* Sign extend values for consistency.  */
 924            switch (opsize) {
 925            case OS_BYTE:
 926                if (what == EA_LOADS) {
 927                    offset = (int8_t)read_im8(env, s);
 928                } else {
 929                    offset = read_im8(env, s);
 930                }
 931                break;
 932            case OS_WORD:
 933                if (what == EA_LOADS) {
 934                    offset = (int16_t)read_im16(env, s);
 935                } else {
 936                    offset = read_im16(env, s);
 937                }
 938                break;
 939            case OS_LONG:
 940                offset = read_im32(env, s);
 941                break;
 942            default:
 943                g_assert_not_reached();
 944            }
 945            return mark_to_release(s, tcg_const_i32(offset));
 946        default:
 947            return NULL_QREG;
 948        }
 949    }
 950    /* Should never happen.  */
 951    return NULL_QREG;
 952}
 953
 954static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
 955                   int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
 956{
 957    int mode = extract32(insn, 3, 3);
 958    int reg0 = REG(insn, 0);
 959    return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
 960}
 961
 962static TCGv_ptr gen_fp_ptr(int freg)
 963{
 964    TCGv_ptr fp = tcg_temp_new_ptr();
 965    tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
 966    return fp;
 967}
 968
 969static TCGv_ptr gen_fp_result_ptr(void)
 970{
 971    TCGv_ptr fp = tcg_temp_new_ptr();
 972    tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
 973    return fp;
 974}
 975
 976static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
 977{
 978    TCGv t32;
 979    TCGv_i64 t64;
 980
 981    t32 = tcg_temp_new();
 982    tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
 983    tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
 984    tcg_temp_free(t32);
 985
 986    t64 = tcg_temp_new_i64();
 987    tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
 988    tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
 989    tcg_temp_free_i64(t64);
 990}
 991
 992static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
 993                        int index)
 994{
 995    TCGv tmp;
 996    TCGv_i64 t64;
 997
 998    t64 = tcg_temp_new_i64();
 999    tmp = tcg_temp_new();
1000    switch (opsize) {
1001    case OS_BYTE:
1002        tcg_gen_qemu_ld8s(tmp, addr, index);
1003        gen_helper_exts32(cpu_env, fp, tmp);
1004        break;
1005    case OS_WORD:
1006        tcg_gen_qemu_ld16s(tmp, addr, index);
1007        gen_helper_exts32(cpu_env, fp, tmp);
1008        break;
1009    case OS_LONG:
1010        tcg_gen_qemu_ld32u(tmp, addr, index);
1011        gen_helper_exts32(cpu_env, fp, tmp);
1012        break;
1013    case OS_SINGLE:
1014        tcg_gen_qemu_ld32u(tmp, addr, index);
1015        gen_helper_extf32(cpu_env, fp, tmp);
1016        break;
1017    case OS_DOUBLE:
1018        tcg_gen_qemu_ld64(t64, addr, index);
1019        gen_helper_extf64(cpu_env, fp, t64);
1020        break;
1021    case OS_EXTENDED:
1022        if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1023            gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1024            break;
1025        }
1026        tcg_gen_qemu_ld32u(tmp, addr, index);
1027        tcg_gen_shri_i32(tmp, tmp, 16);
1028        tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1029        tcg_gen_addi_i32(tmp, addr, 4);
1030        tcg_gen_qemu_ld64(t64, tmp, index);
1031        tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1032        break;
1033    case OS_PACKED:
1034        /*
1035         * unimplemented data type on 68040/ColdFire
1036         * FIXME if needed for another FPU
1037         */
1038        gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1039        break;
1040    default:
1041        g_assert_not_reached();
1042    }
1043    tcg_temp_free(tmp);
1044    tcg_temp_free_i64(t64);
1045}
1046
1047static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1048                         int index)
1049{
1050    TCGv tmp;
1051    TCGv_i64 t64;
1052
1053    t64 = tcg_temp_new_i64();
1054    tmp = tcg_temp_new();
1055    switch (opsize) {
1056    case OS_BYTE:
1057        gen_helper_reds32(tmp, cpu_env, fp);
1058        tcg_gen_qemu_st8(tmp, addr, index);
1059        break;
1060    case OS_WORD:
1061        gen_helper_reds32(tmp, cpu_env, fp);
1062        tcg_gen_qemu_st16(tmp, addr, index);
1063        break;
1064    case OS_LONG:
1065        gen_helper_reds32(tmp, cpu_env, fp);
1066        tcg_gen_qemu_st32(tmp, addr, index);
1067        break;
1068    case OS_SINGLE:
1069        gen_helper_redf32(tmp, cpu_env, fp);
1070        tcg_gen_qemu_st32(tmp, addr, index);
1071        break;
1072    case OS_DOUBLE:
1073        gen_helper_redf64(t64, cpu_env, fp);
1074        tcg_gen_qemu_st64(t64, addr, index);
1075        break;
1076    case OS_EXTENDED:
1077        if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1078            gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1079            break;
1080        }
1081        tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1082        tcg_gen_shli_i32(tmp, tmp, 16);
1083        tcg_gen_qemu_st32(tmp, addr, index);
1084        tcg_gen_addi_i32(tmp, addr, 4);
1085        tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1086        tcg_gen_qemu_st64(t64, tmp, index);
1087        break;
1088    case OS_PACKED:
1089        /*
1090         * unimplemented data type on 68040/ColdFire
1091         * FIXME if needed for another FPU
1092         */
1093        gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1094        break;
1095    default:
1096        g_assert_not_reached();
1097    }
1098    tcg_temp_free(tmp);
1099    tcg_temp_free_i64(t64);
1100}
1101
1102static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1103                        TCGv_ptr fp, ea_what what, int index)
1104{
1105    if (what == EA_STORE) {
1106        gen_store_fp(s, opsize, addr, fp, index);
1107    } else {
1108        gen_load_fp(s, opsize, addr, fp, index);
1109    }
1110}
1111
1112static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1113                          int reg0, int opsize, TCGv_ptr fp, ea_what what,
1114                          int index)
1115{
1116    TCGv reg, addr, tmp;
1117    TCGv_i64 t64;
1118
1119    switch (mode) {
1120    case 0: /* Data register direct.  */
1121        reg = cpu_dregs[reg0];
1122        if (what == EA_STORE) {
1123            switch (opsize) {
1124            case OS_BYTE:
1125            case OS_WORD:
1126            case OS_LONG:
1127                gen_helper_reds32(reg, cpu_env, fp);
1128                break;
1129            case OS_SINGLE:
1130                gen_helper_redf32(reg, cpu_env, fp);
1131                break;
1132            default:
1133                g_assert_not_reached();
1134            }
1135        } else {
1136            tmp = tcg_temp_new();
1137            switch (opsize) {
1138            case OS_BYTE:
1139                tcg_gen_ext8s_i32(tmp, reg);
1140                gen_helper_exts32(cpu_env, fp, tmp);
1141                break;
1142            case OS_WORD:
1143                tcg_gen_ext16s_i32(tmp, reg);
1144                gen_helper_exts32(cpu_env, fp, tmp);
1145                break;
1146            case OS_LONG:
1147                gen_helper_exts32(cpu_env, fp, reg);
1148                break;
1149            case OS_SINGLE:
1150                gen_helper_extf32(cpu_env, fp, reg);
1151                break;
1152            default:
1153                g_assert_not_reached();
1154            }
1155            tcg_temp_free(tmp);
1156        }
1157        return 0;
1158    case 1: /* Address register direct.  */
1159        return -1;
1160    case 2: /* Indirect register */
1161        addr = get_areg(s, reg0);
1162        gen_ldst_fp(s, opsize, addr, fp, what, index);
1163        return 0;
1164    case 3: /* Indirect postincrement.  */
1165        addr = cpu_aregs[reg0];
1166        gen_ldst_fp(s, opsize, addr, fp, what, index);
1167        tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1168        return 0;
1169    case 4: /* Indirect predecrememnt.  */
1170        addr = gen_lea_mode(env, s, mode, reg0, opsize);
1171        if (IS_NULL_QREG(addr)) {
1172            return -1;
1173        }
1174        gen_ldst_fp(s, opsize, addr, fp, what, index);
1175        tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1176        return 0;
1177    case 5: /* Indirect displacement.  */
1178    case 6: /* Indirect index + displacement.  */
1179    do_indirect:
1180        addr = gen_lea_mode(env, s, mode, reg0, opsize);
1181        if (IS_NULL_QREG(addr)) {
1182            return -1;
1183        }
1184        gen_ldst_fp(s, opsize, addr, fp, what, index);
1185        return 0;
1186    case 7: /* Other */
1187        switch (reg0) {
1188        case 0: /* Absolute short.  */
1189        case 1: /* Absolute long.  */
1190        case 2: /* pc displacement  */
1191        case 3: /* pc index+displacement.  */
1192            goto do_indirect;
1193        case 4: /* Immediate.  */
1194            if (what == EA_STORE) {
1195                return -1;
1196            }
1197            switch (opsize) {
1198            case OS_BYTE:
1199                tmp = tcg_const_i32((int8_t)read_im8(env, s));
1200                gen_helper_exts32(cpu_env, fp, tmp);
1201                tcg_temp_free(tmp);
1202                break;
1203            case OS_WORD:
1204                tmp = tcg_const_i32((int16_t)read_im16(env, s));
1205                gen_helper_exts32(cpu_env, fp, tmp);
1206                tcg_temp_free(tmp);
1207                break;
1208            case OS_LONG:
1209                tmp = tcg_const_i32(read_im32(env, s));
1210                gen_helper_exts32(cpu_env, fp, tmp);
1211                tcg_temp_free(tmp);
1212                break;
1213            case OS_SINGLE:
1214                tmp = tcg_const_i32(read_im32(env, s));
1215                gen_helper_extf32(cpu_env, fp, tmp);
1216                tcg_temp_free(tmp);
1217                break;
1218            case OS_DOUBLE:
1219                t64 = tcg_const_i64(read_im64(env, s));
1220                gen_helper_extf64(cpu_env, fp, t64);
1221                tcg_temp_free_i64(t64);
1222                break;
1223            case OS_EXTENDED:
1224                if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1225                    gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1226                    break;
1227                }
1228                tmp = tcg_const_i32(read_im32(env, s) >> 16);
1229                tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1230                tcg_temp_free(tmp);
1231                t64 = tcg_const_i64(read_im64(env, s));
1232                tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1233                tcg_temp_free_i64(t64);
1234                break;
1235            case OS_PACKED:
1236                /*
1237                 * unimplemented data type on 68040/ColdFire
1238                 * FIXME if needed for another FPU
1239                 */
1240                gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1241                break;
1242            default:
1243                g_assert_not_reached();
1244            }
1245            return 0;
1246        default:
1247            return -1;
1248        }
1249    }
1250    return -1;
1251}
1252
1253static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1254                       int opsize, TCGv_ptr fp, ea_what what, int index)
1255{
1256    int mode = extract32(insn, 3, 3);
1257    int reg0 = REG(insn, 0);
1258    return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1259}
1260
1261typedef struct {
1262    TCGCond tcond;
1263    bool g1;
1264    bool g2;
1265    TCGv v1;
1266    TCGv v2;
1267} DisasCompare;
1268
1269static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1270{
1271    TCGv tmp, tmp2;
1272    TCGCond tcond;
1273    CCOp op = s->cc_op;
1274
1275    /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1276    if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1277        c->g1 = c->g2 = 1;
1278        c->v1 = QREG_CC_N;
1279        c->v2 = QREG_CC_V;
1280        switch (cond) {
1281        case 2: /* HI */
1282        case 3: /* LS */
1283            tcond = TCG_COND_LEU;
1284            goto done;
1285        case 4: /* CC */
1286        case 5: /* CS */
1287            tcond = TCG_COND_LTU;
1288            goto done;
1289        case 6: /* NE */
1290        case 7: /* EQ */
1291            tcond = TCG_COND_EQ;
1292            goto done;
1293        case 10: /* PL */
1294        case 11: /* MI */
1295            c->g1 = c->g2 = 0;
1296            c->v2 = tcg_const_i32(0);
1297            c->v1 = tmp = tcg_temp_new();
1298            tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1299            gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1300            /* fallthru */
1301        case 12: /* GE */
1302        case 13: /* LT */
1303            tcond = TCG_COND_LT;
1304            goto done;
1305        case 14: /* GT */
1306        case 15: /* LE */
1307            tcond = TCG_COND_LE;
1308            goto done;
1309        }
1310    }
1311
1312    c->g1 = 1;
1313    c->g2 = 0;
1314    c->v2 = tcg_const_i32(0);
1315
1316    switch (cond) {
1317    case 0: /* T */
1318    case 1: /* F */
1319        c->v1 = c->v2;
1320        tcond = TCG_COND_NEVER;
1321        goto done;
1322    case 14: /* GT (!(Z || (N ^ V))) */
1323    case 15: /* LE (Z || (N ^ V)) */
1324        /*
1325         * Logic operations clear V, which simplifies LE to (Z || N),
1326         * and since Z and N are co-located, this becomes a normal
1327         * comparison vs N.
1328         */
1329        if (op == CC_OP_LOGIC) {
1330            c->v1 = QREG_CC_N;
1331            tcond = TCG_COND_LE;
1332            goto done;
1333        }
1334        break;
1335    case 12: /* GE (!(N ^ V)) */
1336    case 13: /* LT (N ^ V) */
1337        /* Logic operations clear V, which simplifies this to N.  */
1338        if (op != CC_OP_LOGIC) {
1339            break;
1340        }
1341        /* fallthru */
1342    case 10: /* PL (!N) */
1343    case 11: /* MI (N) */
1344        /* Several cases represent N normally.  */
1345        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1346            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1347            op == CC_OP_LOGIC) {
1348            c->v1 = QREG_CC_N;
1349            tcond = TCG_COND_LT;
1350            goto done;
1351        }
1352        break;
1353    case 6: /* NE (!Z) */
1354    case 7: /* EQ (Z) */
1355        /* Some cases fold Z into N.  */
1356        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1357            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1358            op == CC_OP_LOGIC) {
1359            tcond = TCG_COND_EQ;
1360            c->v1 = QREG_CC_N;
1361            goto done;
1362        }
1363        break;
1364    case 4: /* CC (!C) */
1365    case 5: /* CS (C) */
1366        /* Some cases fold C into X.  */
1367        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1368            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1369            tcond = TCG_COND_NE;
1370            c->v1 = QREG_CC_X;
1371            goto done;
1372        }
1373        /* fallthru */
1374    case 8: /* VC (!V) */
1375    case 9: /* VS (V) */
1376        /* Logic operations clear V and C.  */
1377        if (op == CC_OP_LOGIC) {
1378            tcond = TCG_COND_NEVER;
1379            c->v1 = c->v2;
1380            goto done;
1381        }
1382        break;
1383    }
1384
1385    /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1386    gen_flush_flags(s);
1387
1388    switch (cond) {
1389    case 0: /* T */
1390    case 1: /* F */
1391    default:
1392        /* Invalid, or handled above.  */
1393        abort();
1394    case 2: /* HI (!C && !Z) -> !(C || Z)*/
1395    case 3: /* LS (C || Z) */
1396        c->v1 = tmp = tcg_temp_new();
1397        c->g1 = 0;
1398        tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1399        tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1400        tcond = TCG_COND_NE;
1401        break;
1402    case 4: /* CC (!C) */
1403    case 5: /* CS (C) */
1404        c->v1 = QREG_CC_C;
1405        tcond = TCG_COND_NE;
1406        break;
1407    case 6: /* NE (!Z) */
1408    case 7: /* EQ (Z) */
1409        c->v1 = QREG_CC_Z;
1410        tcond = TCG_COND_EQ;
1411        break;
1412    case 8: /* VC (!V) */
1413    case 9: /* VS (V) */
1414        c->v1 = QREG_CC_V;
1415        tcond = TCG_COND_LT;
1416        break;
1417    case 10: /* PL (!N) */
1418    case 11: /* MI (N) */
1419        c->v1 = QREG_CC_N;
1420        tcond = TCG_COND_LT;
1421        break;
1422    case 12: /* GE (!(N ^ V)) */
1423    case 13: /* LT (N ^ V) */
1424        c->v1 = tmp = tcg_temp_new();
1425        c->g1 = 0;
1426        tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1427        tcond = TCG_COND_LT;
1428        break;
1429    case 14: /* GT (!(Z || (N ^ V))) */
1430    case 15: /* LE (Z || (N ^ V)) */
1431        c->v1 = tmp = tcg_temp_new();
1432        c->g1 = 0;
1433        tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1434        tcg_gen_neg_i32(tmp, tmp);
1435        tmp2 = tcg_temp_new();
1436        tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1437        tcg_gen_or_i32(tmp, tmp, tmp2);
1438        tcg_temp_free(tmp2);
1439        tcond = TCG_COND_LT;
1440        break;
1441    }
1442
1443 done:
1444    if ((cond & 1) == 0) {
1445        tcond = tcg_invert_cond(tcond);
1446    }
1447    c->tcond = tcond;
1448}
1449
1450static void free_cond(DisasCompare *c)
1451{
1452    if (!c->g1) {
1453        tcg_temp_free(c->v1);
1454    }
1455    if (!c->g2) {
1456        tcg_temp_free(c->v2);
1457    }
1458}
1459
1460static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1461{
1462  DisasCompare c;
1463
1464  gen_cc_cond(&c, s, cond);
1465  update_cc_op(s);
1466  tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1467  free_cond(&c);
1468}
1469
1470/* Force a TB lookup after an instruction that changes the CPU state.  */
1471static void gen_exit_tb(DisasContext *s)
1472{
1473    update_cc_op(s);
1474    tcg_gen_movi_i32(QREG_PC, s->pc);
1475    s->base.is_jmp = DISAS_EXIT;
1476}
1477
1478#define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1479        result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1480                        op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1481        if (IS_NULL_QREG(result)) {                                     \
1482            gen_addr_fault(s);                                          \
1483            return;                                                     \
1484        }                                                               \
1485    } while (0)
1486
1487#define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1488        TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1489                                EA_STORE, IS_USER(s));                  \
1490        if (IS_NULL_QREG(ea_result)) {                                  \
1491            gen_addr_fault(s);                                          \
1492            return;                                                     \
1493        }                                                               \
1494    } while (0)
1495
1496static inline bool use_goto_tb(DisasContext *s, uint32_t dest)
1497{
1498#ifndef CONFIG_USER_ONLY
1499    return (s->base.pc_first & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)
1500        || (s->base.pc_next & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
1501#else
1502    return true;
1503#endif
1504}
1505
1506/* Generate a jump to an immediate address.  */
1507static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
1508{
1509    if (unlikely(s->base.singlestep_enabled)) {
1510        gen_exception(s, dest, EXCP_DEBUG);
1511    } else if (use_goto_tb(s, dest)) {
1512        tcg_gen_goto_tb(n);
1513        tcg_gen_movi_i32(QREG_PC, dest);
1514        tcg_gen_exit_tb(s->base.tb, n);
1515    } else {
1516        gen_jmp_im(s, dest);
1517        tcg_gen_exit_tb(NULL, 0);
1518    }
1519    s->base.is_jmp = DISAS_NORETURN;
1520}
1521
1522DISAS_INSN(scc)
1523{
1524    DisasCompare c;
1525    int cond;
1526    TCGv tmp;
1527
1528    cond = (insn >> 8) & 0xf;
1529    gen_cc_cond(&c, s, cond);
1530
1531    tmp = tcg_temp_new();
1532    tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1533    free_cond(&c);
1534
1535    tcg_gen_neg_i32(tmp, tmp);
1536    DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1537    tcg_temp_free(tmp);
1538}
1539
1540DISAS_INSN(dbcc)
1541{
1542    TCGLabel *l1;
1543    TCGv reg;
1544    TCGv tmp;
1545    int16_t offset;
1546    uint32_t base;
1547
1548    reg = DREG(insn, 0);
1549    base = s->pc;
1550    offset = (int16_t)read_im16(env, s);
1551    l1 = gen_new_label();
1552    gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1553
1554    tmp = tcg_temp_new();
1555    tcg_gen_ext16s_i32(tmp, reg);
1556    tcg_gen_addi_i32(tmp, tmp, -1);
1557    gen_partset_reg(OS_WORD, reg, tmp);
1558    tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1559    gen_jmp_tb(s, 1, base + offset);
1560    gen_set_label(l1);
1561    gen_jmp_tb(s, 0, s->pc);
1562}
1563
1564DISAS_INSN(undef_mac)
1565{
1566    gen_exception(s, s->base.pc_next, EXCP_LINEA);
1567}
1568
1569DISAS_INSN(undef_fpu)
1570{
1571    gen_exception(s, s->base.pc_next, EXCP_LINEF);
1572}
1573
1574DISAS_INSN(undef)
1575{
1576    /*
1577     * ??? This is both instructions that are as yet unimplemented
1578     * for the 680x0 series, as well as those that are implemented
1579     * but actually illegal for CPU32 or pre-68020.
1580     */
1581    qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1582                  insn, s->base.pc_next);
1583    gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1584}
1585
1586DISAS_INSN(mulw)
1587{
1588    TCGv reg;
1589    TCGv tmp;
1590    TCGv src;
1591    int sign;
1592
1593    sign = (insn & 0x100) != 0;
1594    reg = DREG(insn, 9);
1595    tmp = tcg_temp_new();
1596    if (sign)
1597        tcg_gen_ext16s_i32(tmp, reg);
1598    else
1599        tcg_gen_ext16u_i32(tmp, reg);
1600    SRC_EA(env, src, OS_WORD, sign, NULL);
1601    tcg_gen_mul_i32(tmp, tmp, src);
1602    tcg_gen_mov_i32(reg, tmp);
1603    gen_logic_cc(s, tmp, OS_LONG);
1604    tcg_temp_free(tmp);
1605}
1606
1607DISAS_INSN(divw)
1608{
1609    int sign;
1610    TCGv src;
1611    TCGv destr;
1612
1613    /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1614
1615    sign = (insn & 0x100) != 0;
1616
1617    /* dest.l / src.w */
1618
1619    SRC_EA(env, src, OS_WORD, sign, NULL);
1620    destr = tcg_const_i32(REG(insn, 9));
1621    if (sign) {
1622        gen_helper_divsw(cpu_env, destr, src);
1623    } else {
1624        gen_helper_divuw(cpu_env, destr, src);
1625    }
1626    tcg_temp_free(destr);
1627
1628    set_cc_op(s, CC_OP_FLAGS);
1629}
1630
1631DISAS_INSN(divl)
1632{
1633    TCGv num, reg, den;
1634    int sign;
1635    uint16_t ext;
1636
1637    ext = read_im16(env, s);
1638
1639    sign = (ext & 0x0800) != 0;
1640
1641    if (ext & 0x400) {
1642        if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1643            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1644            return;
1645        }
1646
1647        /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1648
1649        SRC_EA(env, den, OS_LONG, 0, NULL);
1650        num = tcg_const_i32(REG(ext, 12));
1651        reg = tcg_const_i32(REG(ext, 0));
1652        if (sign) {
1653            gen_helper_divsll(cpu_env, num, reg, den);
1654        } else {
1655            gen_helper_divull(cpu_env, num, reg, den);
1656        }
1657        tcg_temp_free(reg);
1658        tcg_temp_free(num);
1659        set_cc_op(s, CC_OP_FLAGS);
1660        return;
1661    }
1662
1663    /* divX.l <EA>, Dq        32/32 -> 32q     */
1664    /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1665
1666    SRC_EA(env, den, OS_LONG, 0, NULL);
1667    num = tcg_const_i32(REG(ext, 12));
1668    reg = tcg_const_i32(REG(ext, 0));
1669    if (sign) {
1670        gen_helper_divsl(cpu_env, num, reg, den);
1671    } else {
1672        gen_helper_divul(cpu_env, num, reg, den);
1673    }
1674    tcg_temp_free(reg);
1675    tcg_temp_free(num);
1676
1677    set_cc_op(s, CC_OP_FLAGS);
1678}
1679
1680static void bcd_add(TCGv dest, TCGv src)
1681{
1682    TCGv t0, t1;
1683
1684    /*
1685     * dest10 = dest10 + src10 + X
1686     *
1687     *        t1 = src
1688     *        t2 = t1 + 0x066
1689     *        t3 = t2 + dest + X
1690     *        t4 = t2 ^ dest
1691     *        t5 = t3 ^ t4
1692     *        t6 = ~t5 & 0x110
1693     *        t7 = (t6 >> 2) | (t6 >> 3)
1694     *        return t3 - t7
1695     */
1696
1697    /*
1698     * t1 = (src + 0x066) + dest + X
1699     *    = result with some possible exceeding 0x6
1700     */
1701
1702    t0 = tcg_const_i32(0x066);
1703    tcg_gen_add_i32(t0, t0, src);
1704
1705    t1 = tcg_temp_new();
1706    tcg_gen_add_i32(t1, t0, dest);
1707    tcg_gen_add_i32(t1, t1, QREG_CC_X);
1708
1709    /* we will remove exceeding 0x6 where there is no carry */
1710
1711    /*
1712     * t0 = (src + 0x0066) ^ dest
1713     *    = t1 without carries
1714     */
1715
1716    tcg_gen_xor_i32(t0, t0, dest);
1717
1718    /*
1719     * extract the carries
1720     * t0 = t0 ^ t1
1721     *    = only the carries
1722     */
1723
1724    tcg_gen_xor_i32(t0, t0, t1);
1725
1726    /*
1727     * generate 0x1 where there is no carry
1728     * and for each 0x10, generate a 0x6
1729     */
1730
1731    tcg_gen_shri_i32(t0, t0, 3);
1732    tcg_gen_not_i32(t0, t0);
1733    tcg_gen_andi_i32(t0, t0, 0x22);
1734    tcg_gen_add_i32(dest, t0, t0);
1735    tcg_gen_add_i32(dest, dest, t0);
1736    tcg_temp_free(t0);
1737
1738    /*
1739     * remove the exceeding 0x6
1740     * for digits that have not generated a carry
1741     */
1742
1743    tcg_gen_sub_i32(dest, t1, dest);
1744    tcg_temp_free(t1);
1745}
1746
1747static void bcd_sub(TCGv dest, TCGv src)
1748{
1749    TCGv t0, t1, t2;
1750
1751    /*
1752     *  dest10 = dest10 - src10 - X
1753     *         = bcd_add(dest + 1 - X, 0x199 - src)
1754     */
1755
1756    /* t0 = 0x066 + (0x199 - src) */
1757
1758    t0 = tcg_temp_new();
1759    tcg_gen_subfi_i32(t0, 0x1ff, src);
1760
1761    /* t1 = t0 + dest + 1 - X*/
1762
1763    t1 = tcg_temp_new();
1764    tcg_gen_add_i32(t1, t0, dest);
1765    tcg_gen_addi_i32(t1, t1, 1);
1766    tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1767
1768    /* t2 = t0 ^ dest */
1769
1770    t2 = tcg_temp_new();
1771    tcg_gen_xor_i32(t2, t0, dest);
1772
1773    /* t0 = t1 ^ t2 */
1774
1775    tcg_gen_xor_i32(t0, t1, t2);
1776
1777    /*
1778     * t2 = ~t0 & 0x110
1779     * t0 = (t2 >> 2) | (t2 >> 3)
1780     *
1781     * to fit on 8bit operands, changed in:
1782     *
1783     * t2 = ~(t0 >> 3) & 0x22
1784     * t0 = t2 + t2
1785     * t0 = t0 + t2
1786     */
1787
1788    tcg_gen_shri_i32(t2, t0, 3);
1789    tcg_gen_not_i32(t2, t2);
1790    tcg_gen_andi_i32(t2, t2, 0x22);
1791    tcg_gen_add_i32(t0, t2, t2);
1792    tcg_gen_add_i32(t0, t0, t2);
1793    tcg_temp_free(t2);
1794
1795    /* return t1 - t0 */
1796
1797    tcg_gen_sub_i32(dest, t1, t0);
1798    tcg_temp_free(t0);
1799    tcg_temp_free(t1);
1800}
1801
1802static void bcd_flags(TCGv val)
1803{
1804    tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1805    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1806
1807    tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1808
1809    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1810}
1811
1812DISAS_INSN(abcd_reg)
1813{
1814    TCGv src;
1815    TCGv dest;
1816
1817    gen_flush_flags(s); /* !Z is sticky */
1818
1819    src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1820    dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1821    bcd_add(dest, src);
1822    gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1823
1824    bcd_flags(dest);
1825}
1826
1827DISAS_INSN(abcd_mem)
1828{
1829    TCGv src, dest, addr;
1830
1831    gen_flush_flags(s); /* !Z is sticky */
1832
1833    /* Indirect pre-decrement load (mode 4) */
1834
1835    src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1836                      NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1837    dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1838                       NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1839
1840    bcd_add(dest, src);
1841
1842    gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1843                EA_STORE, IS_USER(s));
1844
1845    bcd_flags(dest);
1846}
1847
1848DISAS_INSN(sbcd_reg)
1849{
1850    TCGv src, dest;
1851
1852    gen_flush_flags(s); /* !Z is sticky */
1853
1854    src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1855    dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1856
1857    bcd_sub(dest, src);
1858
1859    gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1860
1861    bcd_flags(dest);
1862}
1863
1864DISAS_INSN(sbcd_mem)
1865{
1866    TCGv src, dest, addr;
1867
1868    gen_flush_flags(s); /* !Z is sticky */
1869
1870    /* Indirect pre-decrement load (mode 4) */
1871
1872    src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1873                      NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1874    dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1875                       NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1876
1877    bcd_sub(dest, src);
1878
1879    gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1880                EA_STORE, IS_USER(s));
1881
1882    bcd_flags(dest);
1883}
1884
1885DISAS_INSN(nbcd)
1886{
1887    TCGv src, dest;
1888    TCGv addr;
1889
1890    gen_flush_flags(s); /* !Z is sticky */
1891
1892    SRC_EA(env, src, OS_BYTE, 0, &addr);
1893
1894    dest = tcg_const_i32(0);
1895    bcd_sub(dest, src);
1896
1897    DEST_EA(env, insn, OS_BYTE, dest, &addr);
1898
1899    bcd_flags(dest);
1900
1901    tcg_temp_free(dest);
1902}
1903
1904DISAS_INSN(addsub)
1905{
1906    TCGv reg;
1907    TCGv dest;
1908    TCGv src;
1909    TCGv tmp;
1910    TCGv addr;
1911    int add;
1912    int opsize;
1913
1914    add = (insn & 0x4000) != 0;
1915    opsize = insn_opsize(insn);
1916    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1917    dest = tcg_temp_new();
1918    if (insn & 0x100) {
1919        SRC_EA(env, tmp, opsize, 1, &addr);
1920        src = reg;
1921    } else {
1922        tmp = reg;
1923        SRC_EA(env, src, opsize, 1, NULL);
1924    }
1925    if (add) {
1926        tcg_gen_add_i32(dest, tmp, src);
1927        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1928        set_cc_op(s, CC_OP_ADDB + opsize);
1929    } else {
1930        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1931        tcg_gen_sub_i32(dest, tmp, src);
1932        set_cc_op(s, CC_OP_SUBB + opsize);
1933    }
1934    gen_update_cc_add(dest, src, opsize);
1935    if (insn & 0x100) {
1936        DEST_EA(env, insn, opsize, dest, &addr);
1937    } else {
1938        gen_partset_reg(opsize, DREG(insn, 9), dest);
1939    }
1940    tcg_temp_free(dest);
1941}
1942
1943/* Reverse the order of the bits in REG.  */
1944DISAS_INSN(bitrev)
1945{
1946    TCGv reg;
1947    reg = DREG(insn, 0);
1948    gen_helper_bitrev(reg, reg);
1949}
1950
1951DISAS_INSN(bitop_reg)
1952{
1953    int opsize;
1954    int op;
1955    TCGv src1;
1956    TCGv src2;
1957    TCGv tmp;
1958    TCGv addr;
1959    TCGv dest;
1960
1961    if ((insn & 0x38) != 0)
1962        opsize = OS_BYTE;
1963    else
1964        opsize = OS_LONG;
1965    op = (insn >> 6) & 3;
1966    SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1967
1968    gen_flush_flags(s);
1969    src2 = tcg_temp_new();
1970    if (opsize == OS_BYTE)
1971        tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1972    else
1973        tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1974
1975    tmp = tcg_const_i32(1);
1976    tcg_gen_shl_i32(tmp, tmp, src2);
1977    tcg_temp_free(src2);
1978
1979    tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1980
1981    dest = tcg_temp_new();
1982    switch (op) {
1983    case 1: /* bchg */
1984        tcg_gen_xor_i32(dest, src1, tmp);
1985        break;
1986    case 2: /* bclr */
1987        tcg_gen_andc_i32(dest, src1, tmp);
1988        break;
1989    case 3: /* bset */
1990        tcg_gen_or_i32(dest, src1, tmp);
1991        break;
1992    default: /* btst */
1993        break;
1994    }
1995    tcg_temp_free(tmp);
1996    if (op) {
1997        DEST_EA(env, insn, opsize, dest, &addr);
1998    }
1999    tcg_temp_free(dest);
2000}
2001
2002DISAS_INSN(sats)
2003{
2004    TCGv reg;
2005    reg = DREG(insn, 0);
2006    gen_flush_flags(s);
2007    gen_helper_sats(reg, reg, QREG_CC_V);
2008    gen_logic_cc(s, reg, OS_LONG);
2009}
2010
2011static void gen_push(DisasContext *s, TCGv val)
2012{
2013    TCGv tmp;
2014
2015    tmp = tcg_temp_new();
2016    tcg_gen_subi_i32(tmp, QREG_SP, 4);
2017    gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2018    tcg_gen_mov_i32(QREG_SP, tmp);
2019    tcg_temp_free(tmp);
2020}
2021
2022static TCGv mreg(int reg)
2023{
2024    if (reg < 8) {
2025        /* Dx */
2026        return cpu_dregs[reg];
2027    }
2028    /* Ax */
2029    return cpu_aregs[reg & 7];
2030}
2031
2032DISAS_INSN(movem)
2033{
2034    TCGv addr, incr, tmp, r[16];
2035    int is_load = (insn & 0x0400) != 0;
2036    int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2037    uint16_t mask = read_im16(env, s);
2038    int mode = extract32(insn, 3, 3);
2039    int reg0 = REG(insn, 0);
2040    int i;
2041
2042    tmp = cpu_aregs[reg0];
2043
2044    switch (mode) {
2045    case 0: /* data register direct */
2046    case 1: /* addr register direct */
2047    do_addr_fault:
2048        gen_addr_fault(s);
2049        return;
2050
2051    case 2: /* indirect */
2052        break;
2053
2054    case 3: /* indirect post-increment */
2055        if (!is_load) {
2056            /* post-increment is not allowed */
2057            goto do_addr_fault;
2058        }
2059        break;
2060
2061    case 4: /* indirect pre-decrement */
2062        if (is_load) {
2063            /* pre-decrement is not allowed */
2064            goto do_addr_fault;
2065        }
2066        /*
2067         * We want a bare copy of the address reg, without any pre-decrement
2068         * adjustment, as gen_lea would provide.
2069         */
2070        break;
2071
2072    default:
2073        tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2074        if (IS_NULL_QREG(tmp)) {
2075            goto do_addr_fault;
2076        }
2077        break;
2078    }
2079
2080    addr = tcg_temp_new();
2081    tcg_gen_mov_i32(addr, tmp);
2082    incr = tcg_const_i32(opsize_bytes(opsize));
2083
2084    if (is_load) {
2085        /* memory to register */
2086        for (i = 0; i < 16; i++) {
2087            if (mask & (1 << i)) {
2088                r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2089                tcg_gen_add_i32(addr, addr, incr);
2090            }
2091        }
2092        for (i = 0; i < 16; i++) {
2093            if (mask & (1 << i)) {
2094                tcg_gen_mov_i32(mreg(i), r[i]);
2095                tcg_temp_free(r[i]);
2096            }
2097        }
2098        if (mode == 3) {
2099            /* post-increment: movem (An)+,X */
2100            tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2101        }
2102    } else {
2103        /* register to memory */
2104        if (mode == 4) {
2105            /* pre-decrement: movem X,-(An) */
2106            for (i = 15; i >= 0; i--) {
2107                if ((mask << i) & 0x8000) {
2108                    tcg_gen_sub_i32(addr, addr, incr);
2109                    if (reg0 + 8 == i &&
2110                        m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2111                        /*
2112                         * M68020+: if the addressing register is the
2113                         * register moved to memory, the value written
2114                         * is the initial value decremented by the size of
2115                         * the operation, regardless of how many actual
2116                         * stores have been performed until this point.
2117                         * M68000/M68010: the value is the initial value.
2118                         */
2119                        tmp = tcg_temp_new();
2120                        tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2121                        gen_store(s, opsize, addr, tmp, IS_USER(s));
2122                        tcg_temp_free(tmp);
2123                    } else {
2124                        gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2125                    }
2126                }
2127            }
2128            tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2129        } else {
2130            for (i = 0; i < 16; i++) {
2131                if (mask & (1 << i)) {
2132                    gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2133                    tcg_gen_add_i32(addr, addr, incr);
2134                }
2135            }
2136        }
2137    }
2138
2139    tcg_temp_free(incr);
2140    tcg_temp_free(addr);
2141}
2142
2143DISAS_INSN(movep)
2144{
2145    uint8_t i;
2146    int16_t displ;
2147    TCGv reg;
2148    TCGv addr;
2149    TCGv abuf;
2150    TCGv dbuf;
2151
2152    displ = read_im16(env, s);
2153
2154    addr = AREG(insn, 0);
2155    reg = DREG(insn, 9);
2156
2157    abuf = tcg_temp_new();
2158    tcg_gen_addi_i32(abuf, addr, displ);
2159    dbuf = tcg_temp_new();
2160
2161    if (insn & 0x40) {
2162        i = 4;
2163    } else {
2164        i = 2;
2165    }
2166
2167    if (insn & 0x80) {
2168        for ( ; i > 0 ; i--) {
2169            tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2170            tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2171            if (i > 1) {
2172                tcg_gen_addi_i32(abuf, abuf, 2);
2173            }
2174        }
2175    } else {
2176        for ( ; i > 0 ; i--) {
2177            tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2178            tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2179            if (i > 1) {
2180                tcg_gen_addi_i32(abuf, abuf, 2);
2181            }
2182        }
2183    }
2184    tcg_temp_free(abuf);
2185    tcg_temp_free(dbuf);
2186}
2187
2188DISAS_INSN(bitop_im)
2189{
2190    int opsize;
2191    int op;
2192    TCGv src1;
2193    uint32_t mask;
2194    int bitnum;
2195    TCGv tmp;
2196    TCGv addr;
2197
2198    if ((insn & 0x38) != 0)
2199        opsize = OS_BYTE;
2200    else
2201        opsize = OS_LONG;
2202    op = (insn >> 6) & 3;
2203
2204    bitnum = read_im16(env, s);
2205    if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2206        if (bitnum & 0xfe00) {
2207            disas_undef(env, s, insn);
2208            return;
2209        }
2210    } else {
2211        if (bitnum & 0xff00) {
2212            disas_undef(env, s, insn);
2213            return;
2214        }
2215    }
2216
2217    SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2218
2219    gen_flush_flags(s);
2220    if (opsize == OS_BYTE)
2221        bitnum &= 7;
2222    else
2223        bitnum &= 31;
2224    mask = 1 << bitnum;
2225
2226   tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2227
2228    if (op) {
2229        tmp = tcg_temp_new();
2230        switch (op) {
2231        case 1: /* bchg */
2232            tcg_gen_xori_i32(tmp, src1, mask);
2233            break;
2234        case 2: /* bclr */
2235            tcg_gen_andi_i32(tmp, src1, ~mask);
2236            break;
2237        case 3: /* bset */
2238            tcg_gen_ori_i32(tmp, src1, mask);
2239            break;
2240        default: /* btst */
2241            break;
2242        }
2243        DEST_EA(env, insn, opsize, tmp, &addr);
2244        tcg_temp_free(tmp);
2245    }
2246}
2247
2248static TCGv gen_get_ccr(DisasContext *s)
2249{
2250    TCGv dest;
2251
2252    update_cc_op(s);
2253    dest = tcg_temp_new();
2254    gen_helper_get_ccr(dest, cpu_env);
2255    return dest;
2256}
2257
2258static TCGv gen_get_sr(DisasContext *s)
2259{
2260    TCGv ccr;
2261    TCGv sr;
2262
2263    ccr = gen_get_ccr(s);
2264    sr = tcg_temp_new();
2265    tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2266    tcg_gen_or_i32(sr, sr, ccr);
2267    tcg_temp_free(ccr);
2268    return sr;
2269}
2270
2271static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2272{
2273    if (ccr_only) {
2274        tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2275        tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2276        tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2277        tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2278        tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2279    } else {
2280        TCGv sr = tcg_const_i32(val);
2281        gen_helper_set_sr(cpu_env, sr);
2282        tcg_temp_free(sr);
2283    }
2284    set_cc_op(s, CC_OP_FLAGS);
2285}
2286
2287static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2288{
2289    if (ccr_only) {
2290        gen_helper_set_ccr(cpu_env, val);
2291    } else {
2292        gen_helper_set_sr(cpu_env, val);
2293    }
2294    set_cc_op(s, CC_OP_FLAGS);
2295}
2296
2297static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2298                           bool ccr_only)
2299{
2300    if ((insn & 0x3f) == 0x3c) {
2301        uint16_t val;
2302        val = read_im16(env, s);
2303        gen_set_sr_im(s, val, ccr_only);
2304    } else {
2305        TCGv src;
2306        SRC_EA(env, src, OS_WORD, 0, NULL);
2307        gen_set_sr(s, src, ccr_only);
2308    }
2309}
2310
2311DISAS_INSN(arith_im)
2312{
2313    int op;
2314    TCGv im;
2315    TCGv src1;
2316    TCGv dest;
2317    TCGv addr;
2318    int opsize;
2319    bool with_SR = ((insn & 0x3f) == 0x3c);
2320
2321    op = (insn >> 9) & 7;
2322    opsize = insn_opsize(insn);
2323    switch (opsize) {
2324    case OS_BYTE:
2325        im = tcg_const_i32((int8_t)read_im8(env, s));
2326        break;
2327    case OS_WORD:
2328        im = tcg_const_i32((int16_t)read_im16(env, s));
2329        break;
2330    case OS_LONG:
2331        im = tcg_const_i32(read_im32(env, s));
2332        break;
2333    default:
2334        g_assert_not_reached();
2335    }
2336
2337    if (with_SR) {
2338        /* SR/CCR can only be used with andi/eori/ori */
2339        if (op == 2 || op == 3 || op == 6) {
2340            disas_undef(env, s, insn);
2341            return;
2342        }
2343        switch (opsize) {
2344        case OS_BYTE:
2345            src1 = gen_get_ccr(s);
2346            break;
2347        case OS_WORD:
2348            if (IS_USER(s)) {
2349                gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2350                return;
2351            }
2352            src1 = gen_get_sr(s);
2353            break;
2354        default:
2355            /* OS_LONG; others already g_assert_not_reached.  */
2356            disas_undef(env, s, insn);
2357            return;
2358        }
2359    } else {
2360        SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2361    }
2362    dest = tcg_temp_new();
2363    switch (op) {
2364    case 0: /* ori */
2365        tcg_gen_or_i32(dest, src1, im);
2366        if (with_SR) {
2367            gen_set_sr(s, dest, opsize == OS_BYTE);
2368        } else {
2369            DEST_EA(env, insn, opsize, dest, &addr);
2370            gen_logic_cc(s, dest, opsize);
2371        }
2372        break;
2373    case 1: /* andi */
2374        tcg_gen_and_i32(dest, src1, im);
2375        if (with_SR) {
2376            gen_set_sr(s, dest, opsize == OS_BYTE);
2377        } else {
2378            DEST_EA(env, insn, opsize, dest, &addr);
2379            gen_logic_cc(s, dest, opsize);
2380        }
2381        break;
2382    case 2: /* subi */
2383        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2384        tcg_gen_sub_i32(dest, src1, im);
2385        gen_update_cc_add(dest, im, opsize);
2386        set_cc_op(s, CC_OP_SUBB + opsize);
2387        DEST_EA(env, insn, opsize, dest, &addr);
2388        break;
2389    case 3: /* addi */
2390        tcg_gen_add_i32(dest, src1, im);
2391        gen_update_cc_add(dest, im, opsize);
2392        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2393        set_cc_op(s, CC_OP_ADDB + opsize);
2394        DEST_EA(env, insn, opsize, dest, &addr);
2395        break;
2396    case 5: /* eori */
2397        tcg_gen_xor_i32(dest, src1, im);
2398        if (with_SR) {
2399            gen_set_sr(s, dest, opsize == OS_BYTE);
2400        } else {
2401            DEST_EA(env, insn, opsize, dest, &addr);
2402            gen_logic_cc(s, dest, opsize);
2403        }
2404        break;
2405    case 6: /* cmpi */
2406        gen_update_cc_cmp(s, src1, im, opsize);
2407        break;
2408    default:
2409        abort();
2410    }
2411    tcg_temp_free(im);
2412    tcg_temp_free(dest);
2413}
2414
2415DISAS_INSN(cas)
2416{
2417    int opsize;
2418    TCGv addr;
2419    uint16_t ext;
2420    TCGv load;
2421    TCGv cmp;
2422    MemOp opc;
2423
2424    switch ((insn >> 9) & 3) {
2425    case 1:
2426        opsize = OS_BYTE;
2427        opc = MO_SB;
2428        break;
2429    case 2:
2430        opsize = OS_WORD;
2431        opc = MO_TESW;
2432        break;
2433    case 3:
2434        opsize = OS_LONG;
2435        opc = MO_TESL;
2436        break;
2437    default:
2438        g_assert_not_reached();
2439    }
2440
2441    ext = read_im16(env, s);
2442
2443    /* cas Dc,Du,<EA> */
2444
2445    addr = gen_lea(env, s, insn, opsize);
2446    if (IS_NULL_QREG(addr)) {
2447        gen_addr_fault(s);
2448        return;
2449    }
2450
2451    cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2452
2453    /*
2454     * if  <EA> == Dc then
2455     *     <EA> = Du
2456     *     Dc = <EA> (because <EA> == Dc)
2457     * else
2458     *     Dc = <EA>
2459     */
2460
2461    load = tcg_temp_new();
2462    tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2463                               IS_USER(s), opc);
2464    /* update flags before setting cmp to load */
2465    gen_update_cc_cmp(s, load, cmp, opsize);
2466    gen_partset_reg(opsize, DREG(ext, 0), load);
2467
2468    tcg_temp_free(load);
2469
2470    switch (extract32(insn, 3, 3)) {
2471    case 3: /* Indirect postincrement.  */
2472        tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2473        break;
2474    case 4: /* Indirect predecrememnt.  */
2475        tcg_gen_mov_i32(AREG(insn, 0), addr);
2476        break;
2477    }
2478}
2479
2480DISAS_INSN(cas2w)
2481{
2482    uint16_t ext1, ext2;
2483    TCGv addr1, addr2;
2484    TCGv regs;
2485
2486    /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2487
2488    ext1 = read_im16(env, s);
2489
2490    if (ext1 & 0x8000) {
2491        /* Address Register */
2492        addr1 = AREG(ext1, 12);
2493    } else {
2494        /* Data Register */
2495        addr1 = DREG(ext1, 12);
2496    }
2497
2498    ext2 = read_im16(env, s);
2499    if (ext2 & 0x8000) {
2500        /* Address Register */
2501        addr2 = AREG(ext2, 12);
2502    } else {
2503        /* Data Register */
2504        addr2 = DREG(ext2, 12);
2505    }
2506
2507    /*
2508     * if (R1) == Dc1 && (R2) == Dc2 then
2509     *     (R1) = Du1
2510     *     (R2) = Du2
2511     * else
2512     *     Dc1 = (R1)
2513     *     Dc2 = (R2)
2514     */
2515
2516    regs = tcg_const_i32(REG(ext2, 6) |
2517                         (REG(ext1, 6) << 3) |
2518                         (REG(ext2, 0) << 6) |
2519                         (REG(ext1, 0) << 9));
2520    if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2521        gen_helper_exit_atomic(cpu_env);
2522    } else {
2523        gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2524    }
2525    tcg_temp_free(regs);
2526
2527    /* Note that cas2w also assigned to env->cc_op.  */
2528    s->cc_op = CC_OP_CMPW;
2529    s->cc_op_synced = 1;
2530}
2531
2532DISAS_INSN(cas2l)
2533{
2534    uint16_t ext1, ext2;
2535    TCGv addr1, addr2, regs;
2536
2537    /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2538
2539    ext1 = read_im16(env, s);
2540
2541    if (ext1 & 0x8000) {
2542        /* Address Register */
2543        addr1 = AREG(ext1, 12);
2544    } else {
2545        /* Data Register */
2546        addr1 = DREG(ext1, 12);
2547    }
2548
2549    ext2 = read_im16(env, s);
2550    if (ext2 & 0x8000) {
2551        /* Address Register */
2552        addr2 = AREG(ext2, 12);
2553    } else {
2554        /* Data Register */
2555        addr2 = DREG(ext2, 12);
2556    }
2557
2558    /*
2559     * if (R1) == Dc1 && (R2) == Dc2 then
2560     *     (R1) = Du1
2561     *     (R2) = Du2
2562     * else
2563     *     Dc1 = (R1)
2564     *     Dc2 = (R2)
2565     */
2566
2567    regs = tcg_const_i32(REG(ext2, 6) |
2568                         (REG(ext1, 6) << 3) |
2569                         (REG(ext2, 0) << 6) |
2570                         (REG(ext1, 0) << 9));
2571    if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2572        gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2573    } else {
2574        gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2575    }
2576    tcg_temp_free(regs);
2577
2578    /* Note that cas2l also assigned to env->cc_op.  */
2579    s->cc_op = CC_OP_CMPL;
2580    s->cc_op_synced = 1;
2581}
2582
2583DISAS_INSN(byterev)
2584{
2585    TCGv reg;
2586
2587    reg = DREG(insn, 0);
2588    tcg_gen_bswap32_i32(reg, reg);
2589}
2590
2591DISAS_INSN(move)
2592{
2593    TCGv src;
2594    TCGv dest;
2595    int op;
2596    int opsize;
2597
2598    switch (insn >> 12) {
2599    case 1: /* move.b */
2600        opsize = OS_BYTE;
2601        break;
2602    case 2: /* move.l */
2603        opsize = OS_LONG;
2604        break;
2605    case 3: /* move.w */
2606        opsize = OS_WORD;
2607        break;
2608    default:
2609        abort();
2610    }
2611    SRC_EA(env, src, opsize, 1, NULL);
2612    op = (insn >> 6) & 7;
2613    if (op == 1) {
2614        /* movea */
2615        /* The value will already have been sign extended.  */
2616        dest = AREG(insn, 9);
2617        tcg_gen_mov_i32(dest, src);
2618    } else {
2619        /* normal move */
2620        uint16_t dest_ea;
2621        dest_ea = ((insn >> 9) & 7) | (op << 3);
2622        DEST_EA(env, dest_ea, opsize, src, NULL);
2623        /* This will be correct because loads sign extend.  */
2624        gen_logic_cc(s, src, opsize);
2625    }
2626}
2627
2628DISAS_INSN(negx)
2629{
2630    TCGv z;
2631    TCGv src;
2632    TCGv addr;
2633    int opsize;
2634
2635    opsize = insn_opsize(insn);
2636    SRC_EA(env, src, opsize, 1, &addr);
2637
2638    gen_flush_flags(s); /* compute old Z */
2639
2640    /*
2641     * Perform subtract with borrow.
2642     * (X, N) =  -(src + X);
2643     */
2644
2645    z = tcg_const_i32(0);
2646    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2647    tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2648    tcg_temp_free(z);
2649    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2650
2651    tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2652
2653    /*
2654     * Compute signed-overflow for negation.  The normal formula for
2655     * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2656     * this simplifies to res & src.
2657     */
2658
2659    tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2660
2661    /* Copy the rest of the results into place.  */
2662    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2663    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2664
2665    set_cc_op(s, CC_OP_FLAGS);
2666
2667    /* result is in QREG_CC_N */
2668
2669    DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2670}
2671
2672DISAS_INSN(lea)
2673{
2674    TCGv reg;
2675    TCGv tmp;
2676
2677    reg = AREG(insn, 9);
2678    tmp = gen_lea(env, s, insn, OS_LONG);
2679    if (IS_NULL_QREG(tmp)) {
2680        gen_addr_fault(s);
2681        return;
2682    }
2683    tcg_gen_mov_i32(reg, tmp);
2684}
2685
2686DISAS_INSN(clr)
2687{
2688    int opsize;
2689    TCGv zero;
2690
2691    zero = tcg_const_i32(0);
2692
2693    opsize = insn_opsize(insn);
2694    DEST_EA(env, insn, opsize, zero, NULL);
2695    gen_logic_cc(s, zero, opsize);
2696    tcg_temp_free(zero);
2697}
2698
2699DISAS_INSN(move_from_ccr)
2700{
2701    TCGv ccr;
2702
2703    ccr = gen_get_ccr(s);
2704    DEST_EA(env, insn, OS_WORD, ccr, NULL);
2705}
2706
2707DISAS_INSN(neg)
2708{
2709    TCGv src1;
2710    TCGv dest;
2711    TCGv addr;
2712    int opsize;
2713
2714    opsize = insn_opsize(insn);
2715    SRC_EA(env, src1, opsize, 1, &addr);
2716    dest = tcg_temp_new();
2717    tcg_gen_neg_i32(dest, src1);
2718    set_cc_op(s, CC_OP_SUBB + opsize);
2719    gen_update_cc_add(dest, src1, opsize);
2720    tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2721    DEST_EA(env, insn, opsize, dest, &addr);
2722    tcg_temp_free(dest);
2723}
2724
2725DISAS_INSN(move_to_ccr)
2726{
2727    gen_move_to_sr(env, s, insn, true);
2728}
2729
2730DISAS_INSN(not)
2731{
2732    TCGv src1;
2733    TCGv dest;
2734    TCGv addr;
2735    int opsize;
2736
2737    opsize = insn_opsize(insn);
2738    SRC_EA(env, src1, opsize, 1, &addr);
2739    dest = tcg_temp_new();
2740    tcg_gen_not_i32(dest, src1);
2741    DEST_EA(env, insn, opsize, dest, &addr);
2742    gen_logic_cc(s, dest, opsize);
2743}
2744
2745DISAS_INSN(swap)
2746{
2747    TCGv src1;
2748    TCGv src2;
2749    TCGv reg;
2750
2751    src1 = tcg_temp_new();
2752    src2 = tcg_temp_new();
2753    reg = DREG(insn, 0);
2754    tcg_gen_shli_i32(src1, reg, 16);
2755    tcg_gen_shri_i32(src2, reg, 16);
2756    tcg_gen_or_i32(reg, src1, src2);
2757    tcg_temp_free(src2);
2758    tcg_temp_free(src1);
2759    gen_logic_cc(s, reg, OS_LONG);
2760}
2761
2762DISAS_INSN(bkpt)
2763{
2764    gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2765}
2766
2767DISAS_INSN(pea)
2768{
2769    TCGv tmp;
2770
2771    tmp = gen_lea(env, s, insn, OS_LONG);
2772    if (IS_NULL_QREG(tmp)) {
2773        gen_addr_fault(s);
2774        return;
2775    }
2776    gen_push(s, tmp);
2777}
2778
2779DISAS_INSN(ext)
2780{
2781    int op;
2782    TCGv reg;
2783    TCGv tmp;
2784
2785    reg = DREG(insn, 0);
2786    op = (insn >> 6) & 7;
2787    tmp = tcg_temp_new();
2788    if (op == 3)
2789        tcg_gen_ext16s_i32(tmp, reg);
2790    else
2791        tcg_gen_ext8s_i32(tmp, reg);
2792    if (op == 2)
2793        gen_partset_reg(OS_WORD, reg, tmp);
2794    else
2795        tcg_gen_mov_i32(reg, tmp);
2796    gen_logic_cc(s, tmp, OS_LONG);
2797    tcg_temp_free(tmp);
2798}
2799
2800DISAS_INSN(tst)
2801{
2802    int opsize;
2803    TCGv tmp;
2804
2805    opsize = insn_opsize(insn);
2806    SRC_EA(env, tmp, opsize, 1, NULL);
2807    gen_logic_cc(s, tmp, opsize);
2808}
2809
2810DISAS_INSN(pulse)
2811{
2812  /* Implemented as a NOP.  */
2813}
2814
2815DISAS_INSN(illegal)
2816{
2817    gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2818}
2819
2820/* ??? This should be atomic.  */
2821DISAS_INSN(tas)
2822{
2823    TCGv dest;
2824    TCGv src1;
2825    TCGv addr;
2826
2827    dest = tcg_temp_new();
2828    SRC_EA(env, src1, OS_BYTE, 1, &addr);
2829    gen_logic_cc(s, src1, OS_BYTE);
2830    tcg_gen_ori_i32(dest, src1, 0x80);
2831    DEST_EA(env, insn, OS_BYTE, dest, &addr);
2832    tcg_temp_free(dest);
2833}
2834
2835DISAS_INSN(mull)
2836{
2837    uint16_t ext;
2838    TCGv src1;
2839    int sign;
2840
2841    ext = read_im16(env, s);
2842
2843    sign = ext & 0x800;
2844
2845    if (ext & 0x400) {
2846        if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2847            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2848            return;
2849        }
2850
2851        SRC_EA(env, src1, OS_LONG, 0, NULL);
2852
2853        if (sign) {
2854            tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2855        } else {
2856            tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2857        }
2858        /* if Dl == Dh, 68040 returns low word */
2859        tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2860        tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2861        tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2862
2863        tcg_gen_movi_i32(QREG_CC_V, 0);
2864        tcg_gen_movi_i32(QREG_CC_C, 0);
2865
2866        set_cc_op(s, CC_OP_FLAGS);
2867        return;
2868    }
2869    SRC_EA(env, src1, OS_LONG, 0, NULL);
2870    if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2871        tcg_gen_movi_i32(QREG_CC_C, 0);
2872        if (sign) {
2873            tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2874            /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2875            tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2876            tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2877        } else {
2878            tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2879            /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2880            tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2881        }
2882        tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2883        tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2884
2885        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2886
2887        set_cc_op(s, CC_OP_FLAGS);
2888    } else {
2889        /*
2890         * The upper 32 bits of the product are discarded, so
2891         * muls.l and mulu.l are functionally equivalent.
2892         */
2893        tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2894        gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2895    }
2896}
2897
2898static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2899{
2900    TCGv reg;
2901    TCGv tmp;
2902
2903    reg = AREG(insn, 0);
2904    tmp = tcg_temp_new();
2905    tcg_gen_subi_i32(tmp, QREG_SP, 4);
2906    gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2907    if ((insn & 7) != 7) {
2908        tcg_gen_mov_i32(reg, tmp);
2909    }
2910    tcg_gen_addi_i32(QREG_SP, tmp, offset);
2911    tcg_temp_free(tmp);
2912}
2913
2914DISAS_INSN(link)
2915{
2916    int16_t offset;
2917
2918    offset = read_im16(env, s);
2919    gen_link(s, insn, offset);
2920}
2921
2922DISAS_INSN(linkl)
2923{
2924    int32_t offset;
2925
2926    offset = read_im32(env, s);
2927    gen_link(s, insn, offset);
2928}
2929
2930DISAS_INSN(unlk)
2931{
2932    TCGv src;
2933    TCGv reg;
2934    TCGv tmp;
2935
2936    src = tcg_temp_new();
2937    reg = AREG(insn, 0);
2938    tcg_gen_mov_i32(src, reg);
2939    tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2940    tcg_gen_mov_i32(reg, tmp);
2941    tcg_gen_addi_i32(QREG_SP, src, 4);
2942    tcg_temp_free(src);
2943    tcg_temp_free(tmp);
2944}
2945
2946#if defined(CONFIG_SOFTMMU)
2947DISAS_INSN(reset)
2948{
2949    if (IS_USER(s)) {
2950        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2951        return;
2952    }
2953
2954    gen_helper_reset(cpu_env);
2955}
2956#endif
2957
2958DISAS_INSN(nop)
2959{
2960}
2961
2962DISAS_INSN(rtd)
2963{
2964    TCGv tmp;
2965    int16_t offset = read_im16(env, s);
2966
2967    tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2968    tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2969    gen_jmp(s, tmp);
2970}
2971
2972DISAS_INSN(rtr)
2973{
2974    TCGv tmp;
2975    TCGv ccr;
2976    TCGv sp;
2977
2978    sp = tcg_temp_new();
2979    ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2980    tcg_gen_addi_i32(sp, QREG_SP, 2);
2981    tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2982    tcg_gen_addi_i32(QREG_SP, sp, 4);
2983    tcg_temp_free(sp);
2984
2985    gen_set_sr(s, ccr, true);
2986    tcg_temp_free(ccr);
2987
2988    gen_jmp(s, tmp);
2989}
2990
2991DISAS_INSN(rts)
2992{
2993    TCGv tmp;
2994
2995    tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2996    tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
2997    gen_jmp(s, tmp);
2998}
2999
3000DISAS_INSN(jump)
3001{
3002    TCGv tmp;
3003
3004    /*
3005     * Load the target address first to ensure correct exception
3006     * behavior.
3007     */
3008    tmp = gen_lea(env, s, insn, OS_LONG);
3009    if (IS_NULL_QREG(tmp)) {
3010        gen_addr_fault(s);
3011        return;
3012    }
3013    if ((insn & 0x40) == 0) {
3014        /* jsr */
3015        gen_push(s, tcg_const_i32(s->pc));
3016    }
3017    gen_jmp(s, tmp);
3018}
3019
3020DISAS_INSN(addsubq)
3021{
3022    TCGv src;
3023    TCGv dest;
3024    TCGv val;
3025    int imm;
3026    TCGv addr;
3027    int opsize;
3028
3029    if ((insn & 070) == 010) {
3030        /* Operation on address register is always long.  */
3031        opsize = OS_LONG;
3032    } else {
3033        opsize = insn_opsize(insn);
3034    }
3035    SRC_EA(env, src, opsize, 1, &addr);
3036    imm = (insn >> 9) & 7;
3037    if (imm == 0) {
3038        imm = 8;
3039    }
3040    val = tcg_const_i32(imm);
3041    dest = tcg_temp_new();
3042    tcg_gen_mov_i32(dest, src);
3043    if ((insn & 0x38) == 0x08) {
3044        /*
3045         * Don't update condition codes if the destination is an
3046         * address register.
3047         */
3048        if (insn & 0x0100) {
3049            tcg_gen_sub_i32(dest, dest, val);
3050        } else {
3051            tcg_gen_add_i32(dest, dest, val);
3052        }
3053    } else {
3054        if (insn & 0x0100) {
3055            tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3056            tcg_gen_sub_i32(dest, dest, val);
3057            set_cc_op(s, CC_OP_SUBB + opsize);
3058        } else {
3059            tcg_gen_add_i32(dest, dest, val);
3060            tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3061            set_cc_op(s, CC_OP_ADDB + opsize);
3062        }
3063        gen_update_cc_add(dest, val, opsize);
3064    }
3065    tcg_temp_free(val);
3066    DEST_EA(env, insn, opsize, dest, &addr);
3067    tcg_temp_free(dest);
3068}
3069
3070DISAS_INSN(tpf)
3071{
3072    switch (insn & 7) {
3073    case 2: /* One extension word.  */
3074        s->pc += 2;
3075        break;
3076    case 3: /* Two extension words.  */
3077        s->pc += 4;
3078        break;
3079    case 4: /* No extension words.  */
3080        break;
3081    default:
3082        disas_undef(env, s, insn);
3083    }
3084}
3085
3086DISAS_INSN(branch)
3087{
3088    int32_t offset;
3089    uint32_t base;
3090    int op;
3091
3092    base = s->pc;
3093    op = (insn >> 8) & 0xf;
3094    offset = (int8_t)insn;
3095    if (offset == 0) {
3096        offset = (int16_t)read_im16(env, s);
3097    } else if (offset == -1) {
3098        offset = read_im32(env, s);
3099    }
3100    if (op == 1) {
3101        /* bsr */
3102        gen_push(s, tcg_const_i32(s->pc));
3103    }
3104    if (op > 1) {
3105        /* Bcc */
3106        TCGLabel *l1 = gen_new_label();
3107        gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3108        gen_jmp_tb(s, 1, base + offset);
3109        gen_set_label(l1);
3110        gen_jmp_tb(s, 0, s->pc);
3111    } else {
3112        /* Unconditional branch.  */
3113        update_cc_op(s);
3114        gen_jmp_tb(s, 0, base + offset);
3115    }
3116}
3117
3118DISAS_INSN(moveq)
3119{
3120    tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3121    gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3122}
3123
3124DISAS_INSN(mvzs)
3125{
3126    int opsize;
3127    TCGv src;
3128    TCGv reg;
3129
3130    if (insn & 0x40)
3131        opsize = OS_WORD;
3132    else
3133        opsize = OS_BYTE;
3134    SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3135    reg = DREG(insn, 9);
3136    tcg_gen_mov_i32(reg, src);
3137    gen_logic_cc(s, src, opsize);
3138}
3139
3140DISAS_INSN(or)
3141{
3142    TCGv reg;
3143    TCGv dest;
3144    TCGv src;
3145    TCGv addr;
3146    int opsize;
3147
3148    opsize = insn_opsize(insn);
3149    reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3150    dest = tcg_temp_new();
3151    if (insn & 0x100) {
3152        SRC_EA(env, src, opsize, 0, &addr);
3153        tcg_gen_or_i32(dest, src, reg);
3154        DEST_EA(env, insn, opsize, dest, &addr);
3155    } else {
3156        SRC_EA(env, src, opsize, 0, NULL);
3157        tcg_gen_or_i32(dest, src, reg);
3158        gen_partset_reg(opsize, DREG(insn, 9), dest);
3159    }
3160    gen_logic_cc(s, dest, opsize);
3161    tcg_temp_free(dest);
3162}
3163
3164DISAS_INSN(suba)
3165{
3166    TCGv src;
3167    TCGv reg;
3168
3169    SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3170    reg = AREG(insn, 9);
3171    tcg_gen_sub_i32(reg, reg, src);
3172}
3173
3174static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3175{
3176    TCGv tmp;
3177
3178    gen_flush_flags(s); /* compute old Z */
3179
3180    /*
3181     * Perform subtract with borrow.
3182     * (X, N) = dest - (src + X);
3183     */
3184
3185    tmp = tcg_const_i32(0);
3186    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3187    tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3188    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3189    tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3190
3191    /* Compute signed-overflow for subtract.  */
3192
3193    tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3194    tcg_gen_xor_i32(tmp, dest, src);
3195    tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3196    tcg_temp_free(tmp);
3197
3198    /* Copy the rest of the results into place.  */
3199    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3200    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3201
3202    set_cc_op(s, CC_OP_FLAGS);
3203
3204    /* result is in QREG_CC_N */
3205}
3206
3207DISAS_INSN(subx_reg)
3208{
3209    TCGv dest;
3210    TCGv src;
3211    int opsize;
3212
3213    opsize = insn_opsize(insn);
3214
3215    src = gen_extend(s, DREG(insn, 0), opsize, 1);
3216    dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3217
3218    gen_subx(s, src, dest, opsize);
3219
3220    gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3221}
3222
3223DISAS_INSN(subx_mem)
3224{
3225    TCGv src;
3226    TCGv addr_src;
3227    TCGv dest;
3228    TCGv addr_dest;
3229    int opsize;
3230
3231    opsize = insn_opsize(insn);
3232
3233    addr_src = AREG(insn, 0);
3234    tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3235    src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3236
3237    addr_dest = AREG(insn, 9);
3238    tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3239    dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3240
3241    gen_subx(s, src, dest, opsize);
3242
3243    gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3244
3245    tcg_temp_free(dest);
3246    tcg_temp_free(src);
3247}
3248
3249DISAS_INSN(mov3q)
3250{
3251    TCGv src;
3252    int val;
3253
3254    val = (insn >> 9) & 7;
3255    if (val == 0)
3256        val = -1;
3257    src = tcg_const_i32(val);
3258    gen_logic_cc(s, src, OS_LONG);
3259    DEST_EA(env, insn, OS_LONG, src, NULL);
3260    tcg_temp_free(src);
3261}
3262
3263DISAS_INSN(cmp)
3264{
3265    TCGv src;
3266    TCGv reg;
3267    int opsize;
3268
3269    opsize = insn_opsize(insn);
3270    SRC_EA(env, src, opsize, 1, NULL);
3271    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3272    gen_update_cc_cmp(s, reg, src, opsize);
3273}
3274
3275DISAS_INSN(cmpa)
3276{
3277    int opsize;
3278    TCGv src;
3279    TCGv reg;
3280
3281    if (insn & 0x100) {
3282        opsize = OS_LONG;
3283    } else {
3284        opsize = OS_WORD;
3285    }
3286    SRC_EA(env, src, opsize, 1, NULL);
3287    reg = AREG(insn, 9);
3288    gen_update_cc_cmp(s, reg, src, OS_LONG);
3289}
3290
3291DISAS_INSN(cmpm)
3292{
3293    int opsize = insn_opsize(insn);
3294    TCGv src, dst;
3295
3296    /* Post-increment load (mode 3) from Ay.  */
3297    src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3298                      NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3299    /* Post-increment load (mode 3) from Ax.  */
3300    dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3301                      NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3302
3303    gen_update_cc_cmp(s, dst, src, opsize);
3304}
3305
3306DISAS_INSN(eor)
3307{
3308    TCGv src;
3309    TCGv dest;
3310    TCGv addr;
3311    int opsize;
3312
3313    opsize = insn_opsize(insn);
3314
3315    SRC_EA(env, src, opsize, 0, &addr);
3316    dest = tcg_temp_new();
3317    tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3318    gen_logic_cc(s, dest, opsize);
3319    DEST_EA(env, insn, opsize, dest, &addr);
3320    tcg_temp_free(dest);
3321}
3322
3323static void do_exg(TCGv reg1, TCGv reg2)
3324{
3325    TCGv temp = tcg_temp_new();
3326    tcg_gen_mov_i32(temp, reg1);
3327    tcg_gen_mov_i32(reg1, reg2);
3328    tcg_gen_mov_i32(reg2, temp);
3329    tcg_temp_free(temp);
3330}
3331
3332DISAS_INSN(exg_dd)
3333{
3334    /* exchange Dx and Dy */
3335    do_exg(DREG(insn, 9), DREG(insn, 0));
3336}
3337
3338DISAS_INSN(exg_aa)
3339{
3340    /* exchange Ax and Ay */
3341    do_exg(AREG(insn, 9), AREG(insn, 0));
3342}
3343
3344DISAS_INSN(exg_da)
3345{
3346    /* exchange Dx and Ay */
3347    do_exg(DREG(insn, 9), AREG(insn, 0));
3348}
3349
3350DISAS_INSN(and)
3351{
3352    TCGv src;
3353    TCGv reg;
3354    TCGv dest;
3355    TCGv addr;
3356    int opsize;
3357
3358    dest = tcg_temp_new();
3359
3360    opsize = insn_opsize(insn);
3361    reg = DREG(insn, 9);
3362    if (insn & 0x100) {
3363        SRC_EA(env, src, opsize, 0, &addr);
3364        tcg_gen_and_i32(dest, src, reg);
3365        DEST_EA(env, insn, opsize, dest, &addr);
3366    } else {
3367        SRC_EA(env, src, opsize, 0, NULL);
3368        tcg_gen_and_i32(dest, src, reg);
3369        gen_partset_reg(opsize, reg, dest);
3370    }
3371    gen_logic_cc(s, dest, opsize);
3372    tcg_temp_free(dest);
3373}
3374
3375DISAS_INSN(adda)
3376{
3377    TCGv src;
3378    TCGv reg;
3379
3380    SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3381    reg = AREG(insn, 9);
3382    tcg_gen_add_i32(reg, reg, src);
3383}
3384
3385static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3386{
3387    TCGv tmp;
3388
3389    gen_flush_flags(s); /* compute old Z */
3390
3391    /*
3392     * Perform addition with carry.
3393     * (X, N) = src + dest + X;
3394     */
3395
3396    tmp = tcg_const_i32(0);
3397    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3398    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3399    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3400
3401    /* Compute signed-overflow for addition.  */
3402
3403    tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3404    tcg_gen_xor_i32(tmp, dest, src);
3405    tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3406    tcg_temp_free(tmp);
3407
3408    /* Copy the rest of the results into place.  */
3409    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3410    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3411
3412    set_cc_op(s, CC_OP_FLAGS);
3413
3414    /* result is in QREG_CC_N */
3415}
3416
3417DISAS_INSN(addx_reg)
3418{
3419    TCGv dest;
3420    TCGv src;
3421    int opsize;
3422
3423    opsize = insn_opsize(insn);
3424
3425    dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3426    src = gen_extend(s, DREG(insn, 0), opsize, 1);
3427
3428    gen_addx(s, src, dest, opsize);
3429
3430    gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3431}
3432
3433DISAS_INSN(addx_mem)
3434{
3435    TCGv src;
3436    TCGv addr_src;
3437    TCGv dest;
3438    TCGv addr_dest;
3439    int opsize;
3440
3441    opsize = insn_opsize(insn);
3442
3443    addr_src = AREG(insn, 0);
3444    tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3445    src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3446
3447    addr_dest = AREG(insn, 9);
3448    tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3449    dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3450
3451    gen_addx(s, src, dest, opsize);
3452
3453    gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3454
3455    tcg_temp_free(dest);
3456    tcg_temp_free(src);
3457}
3458
3459static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3460{
3461    int count = (insn >> 9) & 7;
3462    int logical = insn & 8;
3463    int left = insn & 0x100;
3464    int bits = opsize_bytes(opsize) * 8;
3465    TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3466
3467    if (count == 0) {
3468        count = 8;
3469    }
3470
3471    tcg_gen_movi_i32(QREG_CC_V, 0);
3472    if (left) {
3473        tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3474        tcg_gen_shli_i32(QREG_CC_N, reg, count);
3475
3476        /*
3477         * Note that ColdFire always clears V (done above),
3478         * while M68000 sets if the most significant bit is changed at
3479         * any time during the shift operation.
3480         */
3481        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3482            /* if shift count >= bits, V is (reg != 0) */
3483            if (count >= bits) {
3484                tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3485            } else {
3486                TCGv t0 = tcg_temp_new();
3487                tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3488                tcg_gen_sari_i32(t0, reg, bits - count - 1);
3489                tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3490                tcg_temp_free(t0);
3491            }
3492            tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3493        }
3494    } else {
3495        tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3496        if (logical) {
3497            tcg_gen_shri_i32(QREG_CC_N, reg, count);
3498        } else {
3499            tcg_gen_sari_i32(QREG_CC_N, reg, count);
3500        }
3501    }
3502
3503    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3504    tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3505    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3506    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3507
3508    gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3509    set_cc_op(s, CC_OP_FLAGS);
3510}
3511
3512static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3513{
3514    int logical = insn & 8;
3515    int left = insn & 0x100;
3516    int bits = opsize_bytes(opsize) * 8;
3517    TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3518    TCGv s32;
3519    TCGv_i64 t64, s64;
3520
3521    t64 = tcg_temp_new_i64();
3522    s64 = tcg_temp_new_i64();
3523    s32 = tcg_temp_new();
3524
3525    /*
3526     * Note that m68k truncates the shift count modulo 64, not 32.
3527     * In addition, a 64-bit shift makes it easy to find "the last
3528     * bit shifted out", for the carry flag.
3529     */
3530    tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3531    tcg_gen_extu_i32_i64(s64, s32);
3532    tcg_gen_extu_i32_i64(t64, reg);
3533
3534    /* Optimistically set V=0.  Also used as a zero source below.  */
3535    tcg_gen_movi_i32(QREG_CC_V, 0);
3536    if (left) {
3537        tcg_gen_shl_i64(t64, t64, s64);
3538
3539        if (opsize == OS_LONG) {
3540            tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3541            /* Note that C=0 if shift count is 0, and we get that for free.  */
3542        } else {
3543            TCGv zero = tcg_const_i32(0);
3544            tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3545            tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3546            tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3547                                s32, zero, zero, QREG_CC_C);
3548            tcg_temp_free(zero);
3549        }
3550        tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3551
3552        /* X = C, but only if the shift count was non-zero.  */
3553        tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3554                            QREG_CC_C, QREG_CC_X);
3555
3556        /*
3557         * M68000 sets V if the most significant bit is changed at
3558         * any time during the shift operation.  Do this via creating
3559         * an extension of the sign bit, comparing, and discarding
3560         * the bits below the sign bit.  I.e.
3561         *     int64_t s = (intN_t)reg;
3562         *     int64_t t = (int64_t)(intN_t)reg << count;
3563         *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3564         */
3565        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3566            TCGv_i64 tt = tcg_const_i64(32);
3567            /* if shift is greater than 32, use 32 */
3568            tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3569            tcg_temp_free_i64(tt);
3570            /* Sign extend the input to 64 bits; re-do the shift.  */
3571            tcg_gen_ext_i32_i64(t64, reg);
3572            tcg_gen_shl_i64(s64, t64, s64);
3573            /* Clear all bits that are unchanged.  */
3574            tcg_gen_xor_i64(t64, t64, s64);
3575            /* Ignore the bits below the sign bit.  */
3576            tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3577            /* If any bits remain set, we have overflow.  */
3578            tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3579            tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3580            tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3581        }
3582    } else {
3583        tcg_gen_shli_i64(t64, t64, 32);
3584        if (logical) {
3585            tcg_gen_shr_i64(t64, t64, s64);
3586        } else {
3587            tcg_gen_sar_i64(t64, t64, s64);
3588        }
3589        tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3590
3591        /* Note that C=0 if shift count is 0, and we get that for free.  */
3592        tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3593
3594        /* X = C, but only if the shift count was non-zero.  */
3595        tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3596                            QREG_CC_C, QREG_CC_X);
3597    }
3598    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3599    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3600
3601    tcg_temp_free(s32);
3602    tcg_temp_free_i64(s64);
3603    tcg_temp_free_i64(t64);
3604
3605    /* Write back the result.  */
3606    gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3607    set_cc_op(s, CC_OP_FLAGS);
3608}
3609
3610DISAS_INSN(shift8_im)
3611{
3612    shift_im(s, insn, OS_BYTE);
3613}
3614
3615DISAS_INSN(shift16_im)
3616{
3617    shift_im(s, insn, OS_WORD);
3618}
3619
3620DISAS_INSN(shift_im)
3621{
3622    shift_im(s, insn, OS_LONG);
3623}
3624
3625DISAS_INSN(shift8_reg)
3626{
3627    shift_reg(s, insn, OS_BYTE);
3628}
3629
3630DISAS_INSN(shift16_reg)
3631{
3632    shift_reg(s, insn, OS_WORD);
3633}
3634
3635DISAS_INSN(shift_reg)
3636{
3637    shift_reg(s, insn, OS_LONG);
3638}
3639
3640DISAS_INSN(shift_mem)
3641{
3642    int logical = insn & 8;
3643    int left = insn & 0x100;
3644    TCGv src;
3645    TCGv addr;
3646
3647    SRC_EA(env, src, OS_WORD, !logical, &addr);
3648    tcg_gen_movi_i32(QREG_CC_V, 0);
3649    if (left) {
3650        tcg_gen_shri_i32(QREG_CC_C, src, 15);
3651        tcg_gen_shli_i32(QREG_CC_N, src, 1);
3652
3653        /*
3654         * Note that ColdFire always clears V,
3655         * while M68000 sets if the most significant bit is changed at
3656         * any time during the shift operation
3657         */
3658        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3659            src = gen_extend(s, src, OS_WORD, 1);
3660            tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3661        }
3662    } else {
3663        tcg_gen_mov_i32(QREG_CC_C, src);
3664        if (logical) {
3665            tcg_gen_shri_i32(QREG_CC_N, src, 1);
3666        } else {
3667            tcg_gen_sari_i32(QREG_CC_N, src, 1);
3668        }
3669    }
3670
3671    gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3672    tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3673    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3674    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3675
3676    DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3677    set_cc_op(s, CC_OP_FLAGS);
3678}
3679
3680static void rotate(TCGv reg, TCGv shift, int left, int size)
3681{
3682    switch (size) {
3683    case 8:
3684        /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3685        tcg_gen_ext8u_i32(reg, reg);
3686        tcg_gen_muli_i32(reg, reg, 0x01010101);
3687        goto do_long;
3688    case 16:
3689        /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3690        tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3691        goto do_long;
3692    do_long:
3693    default:
3694        if (left) {
3695            tcg_gen_rotl_i32(reg, reg, shift);
3696        } else {
3697            tcg_gen_rotr_i32(reg, reg, shift);
3698        }
3699    }
3700
3701    /* compute flags */
3702
3703    switch (size) {
3704    case 8:
3705        tcg_gen_ext8s_i32(reg, reg);
3706        break;
3707    case 16:
3708        tcg_gen_ext16s_i32(reg, reg);
3709        break;
3710    default:
3711        break;
3712    }
3713
3714    /* QREG_CC_X is not affected */
3715
3716    tcg_gen_mov_i32(QREG_CC_N, reg);
3717    tcg_gen_mov_i32(QREG_CC_Z, reg);
3718
3719    if (left) {
3720        tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3721    } else {
3722        tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3723    }
3724
3725    tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3726}
3727
3728static void rotate_x_flags(TCGv reg, TCGv X, int size)
3729{
3730    switch (size) {
3731    case 8:
3732        tcg_gen_ext8s_i32(reg, reg);
3733        break;
3734    case 16:
3735        tcg_gen_ext16s_i32(reg, reg);
3736        break;
3737    default:
3738        break;
3739    }
3740    tcg_gen_mov_i32(QREG_CC_N, reg);
3741    tcg_gen_mov_i32(QREG_CC_Z, reg);
3742    tcg_gen_mov_i32(QREG_CC_X, X);
3743    tcg_gen_mov_i32(QREG_CC_C, X);
3744    tcg_gen_movi_i32(QREG_CC_V, 0);
3745}
3746
3747/* Result of rotate_x() is valid if 0 <= shift <= size */
3748static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3749{
3750    TCGv X, shl, shr, shx, sz, zero;
3751
3752    sz = tcg_const_i32(size);
3753
3754    shr = tcg_temp_new();
3755    shl = tcg_temp_new();
3756    shx = tcg_temp_new();
3757    if (left) {
3758        tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3759        tcg_gen_movi_i32(shr, size + 1);
3760        tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3761        tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3762        /* shx = shx < 0 ? size : shx; */
3763        zero = tcg_const_i32(0);
3764        tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3765        tcg_temp_free(zero);
3766    } else {
3767        tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3768        tcg_gen_movi_i32(shl, size + 1);
3769        tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3770        tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3771    }
3772    tcg_temp_free_i32(sz);
3773
3774    /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3775
3776    tcg_gen_shl_i32(shl, reg, shl);
3777    tcg_gen_shr_i32(shr, reg, shr);
3778    tcg_gen_or_i32(reg, shl, shr);
3779    tcg_temp_free(shl);
3780    tcg_temp_free(shr);
3781    tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3782    tcg_gen_or_i32(reg, reg, shx);
3783    tcg_temp_free(shx);
3784
3785    /* X = (reg >> size) & 1 */
3786
3787    X = tcg_temp_new();
3788    tcg_gen_extract_i32(X, reg, size, 1);
3789
3790    return X;
3791}
3792
3793/* Result of rotate32_x() is valid if 0 <= shift < 33 */
3794static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3795{
3796    TCGv_i64 t0, shift64;
3797    TCGv X, lo, hi, zero;
3798
3799    shift64 = tcg_temp_new_i64();
3800    tcg_gen_extu_i32_i64(shift64, shift);
3801
3802    t0 = tcg_temp_new_i64();
3803
3804    X = tcg_temp_new();
3805    lo = tcg_temp_new();
3806    hi = tcg_temp_new();
3807
3808    if (left) {
3809        /* create [reg:X:..] */
3810
3811        tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3812        tcg_gen_concat_i32_i64(t0, lo, reg);
3813
3814        /* rotate */
3815
3816        tcg_gen_rotl_i64(t0, t0, shift64);
3817        tcg_temp_free_i64(shift64);
3818
3819        /* result is [reg:..:reg:X] */
3820
3821        tcg_gen_extr_i64_i32(lo, hi, t0);
3822        tcg_gen_andi_i32(X, lo, 1);
3823
3824        tcg_gen_shri_i32(lo, lo, 1);
3825    } else {
3826        /* create [..:X:reg] */
3827
3828        tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3829
3830        tcg_gen_rotr_i64(t0, t0, shift64);
3831        tcg_temp_free_i64(shift64);
3832
3833        /* result is value: [X:reg:..:reg] */
3834
3835        tcg_gen_extr_i64_i32(lo, hi, t0);
3836
3837        /* extract X */
3838
3839        tcg_gen_shri_i32(X, hi, 31);
3840
3841        /* extract result */
3842
3843        tcg_gen_shli_i32(hi, hi, 1);
3844    }
3845    tcg_temp_free_i64(t0);
3846    tcg_gen_or_i32(lo, lo, hi);
3847    tcg_temp_free(hi);
3848
3849    /* if shift == 0, register and X are not affected */
3850
3851    zero = tcg_const_i32(0);
3852    tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3853    tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3854    tcg_temp_free(zero);
3855    tcg_temp_free(lo);
3856
3857    return X;
3858}
3859
3860DISAS_INSN(rotate_im)
3861{
3862    TCGv shift;
3863    int tmp;
3864    int left = (insn & 0x100);
3865
3866    tmp = (insn >> 9) & 7;
3867    if (tmp == 0) {
3868        tmp = 8;
3869    }
3870
3871    shift = tcg_const_i32(tmp);
3872    if (insn & 8) {
3873        rotate(DREG(insn, 0), shift, left, 32);
3874    } else {
3875        TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3876        rotate_x_flags(DREG(insn, 0), X, 32);
3877        tcg_temp_free(X);
3878    }
3879    tcg_temp_free(shift);
3880
3881    set_cc_op(s, CC_OP_FLAGS);
3882}
3883
3884DISAS_INSN(rotate8_im)
3885{
3886    int left = (insn & 0x100);
3887    TCGv reg;
3888    TCGv shift;
3889    int tmp;
3890
3891    reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3892
3893    tmp = (insn >> 9) & 7;
3894    if (tmp == 0) {
3895        tmp = 8;
3896    }
3897
3898    shift = tcg_const_i32(tmp);
3899    if (insn & 8) {
3900        rotate(reg, shift, left, 8);
3901    } else {
3902        TCGv X = rotate_x(reg, shift, left, 8);
3903        rotate_x_flags(reg, X, 8);
3904        tcg_temp_free(X);
3905    }
3906    tcg_temp_free(shift);
3907    gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3908    set_cc_op(s, CC_OP_FLAGS);
3909}
3910
3911DISAS_INSN(rotate16_im)
3912{
3913    int left = (insn & 0x100);
3914    TCGv reg;
3915    TCGv shift;
3916    int tmp;
3917
3918    reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3919    tmp = (insn >> 9) & 7;
3920    if (tmp == 0) {
3921        tmp = 8;
3922    }
3923
3924    shift = tcg_const_i32(tmp);
3925    if (insn & 8) {
3926        rotate(reg, shift, left, 16);
3927    } else {
3928        TCGv X = rotate_x(reg, shift, left, 16);
3929        rotate_x_flags(reg, X, 16);
3930        tcg_temp_free(X);
3931    }
3932    tcg_temp_free(shift);
3933    gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3934    set_cc_op(s, CC_OP_FLAGS);
3935}
3936
3937DISAS_INSN(rotate_reg)
3938{
3939    TCGv reg;
3940    TCGv src;
3941    TCGv t0, t1;
3942    int left = (insn & 0x100);
3943
3944    reg = DREG(insn, 0);
3945    src = DREG(insn, 9);
3946    /* shift in [0..63] */
3947    t0 = tcg_temp_new();
3948    tcg_gen_andi_i32(t0, src, 63);
3949    t1 = tcg_temp_new_i32();
3950    if (insn & 8) {
3951        tcg_gen_andi_i32(t1, src, 31);
3952        rotate(reg, t1, left, 32);
3953        /* if shift == 0, clear C */
3954        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3955                            t0, QREG_CC_V /* 0 */,
3956                            QREG_CC_V /* 0 */, QREG_CC_C);
3957    } else {
3958        TCGv X;
3959        /* modulo 33 */
3960        tcg_gen_movi_i32(t1, 33);
3961        tcg_gen_remu_i32(t1, t0, t1);
3962        X = rotate32_x(DREG(insn, 0), t1, left);
3963        rotate_x_flags(DREG(insn, 0), X, 32);
3964        tcg_temp_free(X);
3965    }
3966    tcg_temp_free(t1);
3967    tcg_temp_free(t0);
3968    set_cc_op(s, CC_OP_FLAGS);
3969}
3970
3971DISAS_INSN(rotate8_reg)
3972{
3973    TCGv reg;
3974    TCGv src;
3975    TCGv t0, t1;
3976    int left = (insn & 0x100);
3977
3978    reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3979    src = DREG(insn, 9);
3980    /* shift in [0..63] */
3981    t0 = tcg_temp_new_i32();
3982    tcg_gen_andi_i32(t0, src, 63);
3983    t1 = tcg_temp_new_i32();
3984    if (insn & 8) {
3985        tcg_gen_andi_i32(t1, src, 7);
3986        rotate(reg, t1, left, 8);
3987        /* if shift == 0, clear C */
3988        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3989                            t0, QREG_CC_V /* 0 */,
3990                            QREG_CC_V /* 0 */, QREG_CC_C);
3991    } else {
3992        TCGv X;
3993        /* modulo 9 */
3994        tcg_gen_movi_i32(t1, 9);
3995        tcg_gen_remu_i32(t1, t0, t1);
3996        X = rotate_x(reg, t1, left, 8);
3997        rotate_x_flags(reg, X, 8);
3998        tcg_temp_free(X);
3999    }
4000    tcg_temp_free(t1);
4001    tcg_temp_free(t0);
4002    gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
4003    set_cc_op(s, CC_OP_FLAGS);
4004}
4005
4006DISAS_INSN(rotate16_reg)
4007{
4008    TCGv reg;
4009    TCGv src;
4010    TCGv t0, t1;
4011    int left = (insn & 0x100);
4012
4013    reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
4014    src = DREG(insn, 9);
4015    /* shift in [0..63] */
4016    t0 = tcg_temp_new_i32();
4017    tcg_gen_andi_i32(t0, src, 63);
4018    t1 = tcg_temp_new_i32();
4019    if (insn & 8) {
4020        tcg_gen_andi_i32(t1, src, 15);
4021        rotate(reg, t1, left, 16);
4022        /* if shift == 0, clear C */
4023        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4024                            t0, QREG_CC_V /* 0 */,
4025                            QREG_CC_V /* 0 */, QREG_CC_C);
4026    } else {
4027        TCGv X;
4028        /* modulo 17 */
4029        tcg_gen_movi_i32(t1, 17);
4030        tcg_gen_remu_i32(t1, t0, t1);
4031        X = rotate_x(reg, t1, left, 16);
4032        rotate_x_flags(reg, X, 16);
4033        tcg_temp_free(X);
4034    }
4035    tcg_temp_free(t1);
4036    tcg_temp_free(t0);
4037    gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4038    set_cc_op(s, CC_OP_FLAGS);
4039}
4040
4041DISAS_INSN(rotate_mem)
4042{
4043    TCGv src;
4044    TCGv addr;
4045    TCGv shift;
4046    int left = (insn & 0x100);
4047
4048    SRC_EA(env, src, OS_WORD, 0, &addr);
4049
4050    shift = tcg_const_i32(1);
4051    if (insn & 0x0200) {
4052        rotate(src, shift, left, 16);
4053    } else {
4054        TCGv X = rotate_x(src, shift, left, 16);
4055        rotate_x_flags(src, X, 16);
4056        tcg_temp_free(X);
4057    }
4058    tcg_temp_free(shift);
4059    DEST_EA(env, insn, OS_WORD, src, &addr);
4060    set_cc_op(s, CC_OP_FLAGS);
4061}
4062
4063DISAS_INSN(bfext_reg)
4064{
4065    int ext = read_im16(env, s);
4066    int is_sign = insn & 0x200;
4067    TCGv src = DREG(insn, 0);
4068    TCGv dst = DREG(ext, 12);
4069    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4070    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4071    int pos = 32 - ofs - len;        /* little bit-endian */
4072    TCGv tmp = tcg_temp_new();
4073    TCGv shift;
4074
4075    /*
4076     * In general, we're going to rotate the field so that it's at the
4077     * top of the word and then right-shift by the complement of the
4078     * width to extend the field.
4079     */
4080    if (ext & 0x20) {
4081        /* Variable width.  */
4082        if (ext & 0x800) {
4083            /* Variable offset.  */
4084            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4085            tcg_gen_rotl_i32(tmp, src, tmp);
4086        } else {
4087            tcg_gen_rotli_i32(tmp, src, ofs);
4088        }
4089
4090        shift = tcg_temp_new();
4091        tcg_gen_neg_i32(shift, DREG(ext, 0));
4092        tcg_gen_andi_i32(shift, shift, 31);
4093        tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4094        if (is_sign) {
4095            tcg_gen_mov_i32(dst, QREG_CC_N);
4096        } else {
4097            tcg_gen_shr_i32(dst, tmp, shift);
4098        }
4099        tcg_temp_free(shift);
4100    } else {
4101        /* Immediate width.  */
4102        if (ext & 0x800) {
4103            /* Variable offset */
4104            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4105            tcg_gen_rotl_i32(tmp, src, tmp);
4106            src = tmp;
4107            pos = 32 - len;
4108        } else {
4109            /*
4110             * Immediate offset.  If the field doesn't wrap around the
4111             * end of the word, rely on (s)extract completely.
4112             */
4113            if (pos < 0) {
4114                tcg_gen_rotli_i32(tmp, src, ofs);
4115                src = tmp;
4116                pos = 32 - len;
4117            }
4118        }
4119
4120        tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4121        if (is_sign) {
4122            tcg_gen_mov_i32(dst, QREG_CC_N);
4123        } else {
4124            tcg_gen_extract_i32(dst, src, pos, len);
4125        }
4126    }
4127
4128    tcg_temp_free(tmp);
4129    set_cc_op(s, CC_OP_LOGIC);
4130}
4131
4132DISAS_INSN(bfext_mem)
4133{
4134    int ext = read_im16(env, s);
4135    int is_sign = insn & 0x200;
4136    TCGv dest = DREG(ext, 12);
4137    TCGv addr, len, ofs;
4138
4139    addr = gen_lea(env, s, insn, OS_UNSIZED);
4140    if (IS_NULL_QREG(addr)) {
4141        gen_addr_fault(s);
4142        return;
4143    }
4144
4145    if (ext & 0x20) {
4146        len = DREG(ext, 0);
4147    } else {
4148        len = tcg_const_i32(extract32(ext, 0, 5));
4149    }
4150    if (ext & 0x800) {
4151        ofs = DREG(ext, 6);
4152    } else {
4153        ofs = tcg_const_i32(extract32(ext, 6, 5));
4154    }
4155
4156    if (is_sign) {
4157        gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4158        tcg_gen_mov_i32(QREG_CC_N, dest);
4159    } else {
4160        TCGv_i64 tmp = tcg_temp_new_i64();
4161        gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4162        tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4163        tcg_temp_free_i64(tmp);
4164    }
4165    set_cc_op(s, CC_OP_LOGIC);
4166
4167    if (!(ext & 0x20)) {
4168        tcg_temp_free(len);
4169    }
4170    if (!(ext & 0x800)) {
4171        tcg_temp_free(ofs);
4172    }
4173}
4174
4175DISAS_INSN(bfop_reg)
4176{
4177    int ext = read_im16(env, s);
4178    TCGv src = DREG(insn, 0);
4179    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4180    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4181    TCGv mask, tofs, tlen;
4182
4183    tofs = NULL;
4184    tlen = NULL;
4185    if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4186        tofs = tcg_temp_new();
4187        tlen = tcg_temp_new();
4188    }
4189
4190    if ((ext & 0x820) == 0) {
4191        /* Immediate width and offset.  */
4192        uint32_t maski = 0x7fffffffu >> (len - 1);
4193        if (ofs + len <= 32) {
4194            tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4195        } else {
4196            tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4197        }
4198        tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4199        mask = tcg_const_i32(ror32(maski, ofs));
4200        if (tofs) {
4201            tcg_gen_movi_i32(tofs, ofs);
4202            tcg_gen_movi_i32(tlen, len);
4203        }
4204    } else {
4205        TCGv tmp = tcg_temp_new();
4206        if (ext & 0x20) {
4207            /* Variable width */
4208            tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4209            tcg_gen_andi_i32(tmp, tmp, 31);
4210            mask = tcg_const_i32(0x7fffffffu);
4211            tcg_gen_shr_i32(mask, mask, tmp);
4212            if (tlen) {
4213                tcg_gen_addi_i32(tlen, tmp, 1);
4214            }
4215        } else {
4216            /* Immediate width */
4217            mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4218            if (tlen) {
4219                tcg_gen_movi_i32(tlen, len);
4220            }
4221        }
4222        if (ext & 0x800) {
4223            /* Variable offset */
4224            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4225            tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4226            tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4227            tcg_gen_rotr_i32(mask, mask, tmp);
4228            if (tofs) {
4229                tcg_gen_mov_i32(tofs, tmp);
4230            }
4231        } else {
4232            /* Immediate offset (and variable width) */
4233            tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4234            tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4235            tcg_gen_rotri_i32(mask, mask, ofs);
4236            if (tofs) {
4237                tcg_gen_movi_i32(tofs, ofs);
4238            }
4239        }
4240        tcg_temp_free(tmp);
4241    }
4242    set_cc_op(s, CC_OP_LOGIC);
4243
4244    switch (insn & 0x0f00) {
4245    case 0x0a00: /* bfchg */
4246        tcg_gen_eqv_i32(src, src, mask);
4247        break;
4248    case 0x0c00: /* bfclr */
4249        tcg_gen_and_i32(src, src, mask);
4250        break;
4251    case 0x0d00: /* bfffo */
4252        gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4253        tcg_temp_free(tlen);
4254        tcg_temp_free(tofs);
4255        break;
4256    case 0x0e00: /* bfset */
4257        tcg_gen_orc_i32(src, src, mask);
4258        break;
4259    case 0x0800: /* bftst */
4260        /* flags already set; no other work to do.  */
4261        break;
4262    default:
4263        g_assert_not_reached();
4264    }
4265    tcg_temp_free(mask);
4266}
4267
4268DISAS_INSN(bfop_mem)
4269{
4270    int ext = read_im16(env, s);
4271    TCGv addr, len, ofs;
4272    TCGv_i64 t64;
4273
4274    addr = gen_lea(env, s, insn, OS_UNSIZED);
4275    if (IS_NULL_QREG(addr)) {
4276        gen_addr_fault(s);
4277        return;
4278    }
4279
4280    if (ext & 0x20) {
4281        len = DREG(ext, 0);
4282    } else {
4283        len = tcg_const_i32(extract32(ext, 0, 5));
4284    }
4285    if (ext & 0x800) {
4286        ofs = DREG(ext, 6);
4287    } else {
4288        ofs = tcg_const_i32(extract32(ext, 6, 5));
4289    }
4290
4291    switch (insn & 0x0f00) {
4292    case 0x0a00: /* bfchg */
4293        gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4294        break;
4295    case 0x0c00: /* bfclr */
4296        gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4297        break;
4298    case 0x0d00: /* bfffo */
4299        t64 = tcg_temp_new_i64();
4300        gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4301        tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4302        tcg_temp_free_i64(t64);
4303        break;
4304    case 0x0e00: /* bfset */
4305        gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4306        break;
4307    case 0x0800: /* bftst */
4308        gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4309        break;
4310    default:
4311        g_assert_not_reached();
4312    }
4313    set_cc_op(s, CC_OP_LOGIC);
4314
4315    if (!(ext & 0x20)) {
4316        tcg_temp_free(len);
4317    }
4318    if (!(ext & 0x800)) {
4319        tcg_temp_free(ofs);
4320    }
4321}
4322
4323DISAS_INSN(bfins_reg)
4324{
4325    int ext = read_im16(env, s);
4326    TCGv dst = DREG(insn, 0);
4327    TCGv src = DREG(ext, 12);
4328    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4329    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4330    int pos = 32 - ofs - len;        /* little bit-endian */
4331    TCGv tmp;
4332
4333    tmp = tcg_temp_new();
4334
4335    if (ext & 0x20) {
4336        /* Variable width */
4337        tcg_gen_neg_i32(tmp, DREG(ext, 0));
4338        tcg_gen_andi_i32(tmp, tmp, 31);
4339        tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4340    } else {
4341        /* Immediate width */
4342        tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4343    }
4344    set_cc_op(s, CC_OP_LOGIC);
4345
4346    /* Immediate width and offset */
4347    if ((ext & 0x820) == 0) {
4348        /* Check for suitability for deposit.  */
4349        if (pos >= 0) {
4350            tcg_gen_deposit_i32(dst, dst, src, pos, len);
4351        } else {
4352            uint32_t maski = -2U << (len - 1);
4353            uint32_t roti = (ofs + len) & 31;
4354            tcg_gen_andi_i32(tmp, src, ~maski);
4355            tcg_gen_rotri_i32(tmp, tmp, roti);
4356            tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4357            tcg_gen_or_i32(dst, dst, tmp);
4358        }
4359    } else {
4360        TCGv mask = tcg_temp_new();
4361        TCGv rot = tcg_temp_new();
4362
4363        if (ext & 0x20) {
4364            /* Variable width */
4365            tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4366            tcg_gen_andi_i32(rot, rot, 31);
4367            tcg_gen_movi_i32(mask, -2);
4368            tcg_gen_shl_i32(mask, mask, rot);
4369            tcg_gen_mov_i32(rot, DREG(ext, 0));
4370            tcg_gen_andc_i32(tmp, src, mask);
4371        } else {
4372            /* Immediate width (variable offset) */
4373            uint32_t maski = -2U << (len - 1);
4374            tcg_gen_andi_i32(tmp, src, ~maski);
4375            tcg_gen_movi_i32(mask, maski);
4376            tcg_gen_movi_i32(rot, len & 31);
4377        }
4378        if (ext & 0x800) {
4379            /* Variable offset */
4380            tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4381        } else {
4382            /* Immediate offset (variable width) */
4383            tcg_gen_addi_i32(rot, rot, ofs);
4384        }
4385        tcg_gen_andi_i32(rot, rot, 31);
4386        tcg_gen_rotr_i32(mask, mask, rot);
4387        tcg_gen_rotr_i32(tmp, tmp, rot);
4388        tcg_gen_and_i32(dst, dst, mask);
4389        tcg_gen_or_i32(dst, dst, tmp);
4390
4391        tcg_temp_free(rot);
4392        tcg_temp_free(mask);
4393    }
4394    tcg_temp_free(tmp);
4395}
4396
4397DISAS_INSN(bfins_mem)
4398{
4399    int ext = read_im16(env, s);
4400    TCGv src = DREG(ext, 12);
4401    TCGv addr, len, ofs;
4402
4403    addr = gen_lea(env, s, insn, OS_UNSIZED);
4404    if (IS_NULL_QREG(addr)) {
4405        gen_addr_fault(s);
4406        return;
4407    }
4408
4409    if (ext & 0x20) {
4410        len = DREG(ext, 0);
4411    } else {
4412        len = tcg_const_i32(extract32(ext, 0, 5));
4413    }
4414    if (ext & 0x800) {
4415        ofs = DREG(ext, 6);
4416    } else {
4417        ofs = tcg_const_i32(extract32(ext, 6, 5));
4418    }
4419
4420    gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4421    set_cc_op(s, CC_OP_LOGIC);
4422
4423    if (!(ext & 0x20)) {
4424        tcg_temp_free(len);
4425    }
4426    if (!(ext & 0x800)) {
4427        tcg_temp_free(ofs);
4428    }
4429}
4430
4431DISAS_INSN(ff1)
4432{
4433    TCGv reg;
4434    reg = DREG(insn, 0);
4435    gen_logic_cc(s, reg, OS_LONG);
4436    gen_helper_ff1(reg, reg);
4437}
4438
4439DISAS_INSN(chk)
4440{
4441    TCGv src, reg;
4442    int opsize;
4443
4444    switch ((insn >> 7) & 3) {
4445    case 3:
4446        opsize = OS_WORD;
4447        break;
4448    case 2:
4449        if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4450            opsize = OS_LONG;
4451            break;
4452        }
4453        /* fallthru */
4454    default:
4455        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4456        return;
4457    }
4458    SRC_EA(env, src, opsize, 1, NULL);
4459    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4460
4461    gen_flush_flags(s);
4462    gen_helper_chk(cpu_env, reg, src);
4463}
4464
4465DISAS_INSN(chk2)
4466{
4467    uint16_t ext;
4468    TCGv addr1, addr2, bound1, bound2, reg;
4469    int opsize;
4470
4471    switch ((insn >> 9) & 3) {
4472    case 0:
4473        opsize = OS_BYTE;
4474        break;
4475    case 1:
4476        opsize = OS_WORD;
4477        break;
4478    case 2:
4479        opsize = OS_LONG;
4480        break;
4481    default:
4482        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4483        return;
4484    }
4485
4486    ext = read_im16(env, s);
4487    if ((ext & 0x0800) == 0) {
4488        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4489        return;
4490    }
4491
4492    addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4493    addr2 = tcg_temp_new();
4494    tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4495
4496    bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4497    tcg_temp_free(addr1);
4498    bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4499    tcg_temp_free(addr2);
4500
4501    reg = tcg_temp_new();
4502    if (ext & 0x8000) {
4503        tcg_gen_mov_i32(reg, AREG(ext, 12));
4504    } else {
4505        gen_ext(reg, DREG(ext, 12), opsize, 1);
4506    }
4507
4508    gen_flush_flags(s);
4509    gen_helper_chk2(cpu_env, reg, bound1, bound2);
4510    tcg_temp_free(reg);
4511    tcg_temp_free(bound1);
4512    tcg_temp_free(bound2);
4513}
4514
4515static void m68k_copy_line(TCGv dst, TCGv src, int index)
4516{
4517    TCGv addr;
4518    TCGv_i64 t0, t1;
4519
4520    addr = tcg_temp_new();
4521
4522    t0 = tcg_temp_new_i64();
4523    t1 = tcg_temp_new_i64();
4524
4525    tcg_gen_andi_i32(addr, src, ~15);
4526    tcg_gen_qemu_ld64(t0, addr, index);
4527    tcg_gen_addi_i32(addr, addr, 8);
4528    tcg_gen_qemu_ld64(t1, addr, index);
4529
4530    tcg_gen_andi_i32(addr, dst, ~15);
4531    tcg_gen_qemu_st64(t0, addr, index);
4532    tcg_gen_addi_i32(addr, addr, 8);
4533    tcg_gen_qemu_st64(t1, addr, index);
4534
4535    tcg_temp_free_i64(t0);
4536    tcg_temp_free_i64(t1);
4537    tcg_temp_free(addr);
4538}
4539
4540DISAS_INSN(move16_reg)
4541{
4542    int index = IS_USER(s);
4543    TCGv tmp;
4544    uint16_t ext;
4545
4546    ext = read_im16(env, s);
4547    if ((ext & (1 << 15)) == 0) {
4548        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4549    }
4550
4551    m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4552
4553    /* Ax can be Ay, so save Ay before incrementing Ax */
4554    tmp = tcg_temp_new();
4555    tcg_gen_mov_i32(tmp, AREG(ext, 12));
4556    tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4557    tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4558    tcg_temp_free(tmp);
4559}
4560
4561DISAS_INSN(move16_mem)
4562{
4563    int index = IS_USER(s);
4564    TCGv reg, addr;
4565
4566    reg = AREG(insn, 0);
4567    addr = tcg_const_i32(read_im32(env, s));
4568
4569    if ((insn >> 3) & 1) {
4570        /* MOVE16 (xxx).L, (Ay) */
4571        m68k_copy_line(reg, addr, index);
4572    } else {
4573        /* MOVE16 (Ay), (xxx).L */
4574        m68k_copy_line(addr, reg, index);
4575    }
4576
4577    tcg_temp_free(addr);
4578
4579    if (((insn >> 3) & 2) == 0) {
4580        /* (Ay)+ */
4581        tcg_gen_addi_i32(reg, reg, 16);
4582    }
4583}
4584
4585DISAS_INSN(strldsr)
4586{
4587    uint16_t ext;
4588    uint32_t addr;
4589
4590    addr = s->pc - 2;
4591    ext = read_im16(env, s);
4592    if (ext != 0x46FC) {
4593        gen_exception(s, addr, EXCP_ILLEGAL);
4594        return;
4595    }
4596    ext = read_im16(env, s);
4597    if (IS_USER(s) || (ext & SR_S) == 0) {
4598        gen_exception(s, addr, EXCP_PRIVILEGE);
4599        return;
4600    }
4601    gen_push(s, gen_get_sr(s));
4602    gen_set_sr_im(s, ext, 0);
4603}
4604
4605DISAS_INSN(move_from_sr)
4606{
4607    TCGv sr;
4608
4609    if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4610        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4611        return;
4612    }
4613    sr = gen_get_sr(s);
4614    DEST_EA(env, insn, OS_WORD, sr, NULL);
4615}
4616
4617#if defined(CONFIG_SOFTMMU)
4618DISAS_INSN(moves)
4619{
4620    int opsize;
4621    uint16_t ext;
4622    TCGv reg;
4623    TCGv addr;
4624    int extend;
4625
4626    if (IS_USER(s)) {
4627        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4628        return;
4629    }
4630
4631    ext = read_im16(env, s);
4632
4633    opsize = insn_opsize(insn);
4634
4635    if (ext & 0x8000) {
4636        /* address register */
4637        reg = AREG(ext, 12);
4638        extend = 1;
4639    } else {
4640        /* data register */
4641        reg = DREG(ext, 12);
4642        extend = 0;
4643    }
4644
4645    addr = gen_lea(env, s, insn, opsize);
4646    if (IS_NULL_QREG(addr)) {
4647        gen_addr_fault(s);
4648        return;
4649    }
4650
4651    if (ext & 0x0800) {
4652        /* from reg to ea */
4653        gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4654    } else {
4655        /* from ea to reg */
4656        TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4657        if (extend) {
4658            gen_ext(reg, tmp, opsize, 1);
4659        } else {
4660            gen_partset_reg(opsize, reg, tmp);
4661        }
4662        tcg_temp_free(tmp);
4663    }
4664    switch (extract32(insn, 3, 3)) {
4665    case 3: /* Indirect postincrement.  */
4666        tcg_gen_addi_i32(AREG(insn, 0), addr,
4667                         REG(insn, 0) == 7 && opsize == OS_BYTE
4668                         ? 2
4669                         : opsize_bytes(opsize));
4670        break;
4671    case 4: /* Indirect predecrememnt.  */
4672        tcg_gen_mov_i32(AREG(insn, 0), addr);
4673        break;
4674    }
4675}
4676
4677DISAS_INSN(move_to_sr)
4678{
4679    if (IS_USER(s)) {
4680        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4681        return;
4682    }
4683    gen_move_to_sr(env, s, insn, false);
4684    gen_exit_tb(s);
4685}
4686
4687DISAS_INSN(move_from_usp)
4688{
4689    if (IS_USER(s)) {
4690        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4691        return;
4692    }
4693    tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4694                   offsetof(CPUM68KState, sp[M68K_USP]));
4695}
4696
4697DISAS_INSN(move_to_usp)
4698{
4699    if (IS_USER(s)) {
4700        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4701        return;
4702    }
4703    tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4704                   offsetof(CPUM68KState, sp[M68K_USP]));
4705}
4706
4707DISAS_INSN(halt)
4708{
4709    if (IS_USER(s)) {
4710        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4711        return;
4712    }
4713
4714    gen_exception(s, s->pc, EXCP_HALT_INSN);
4715}
4716
4717DISAS_INSN(stop)
4718{
4719    uint16_t ext;
4720
4721    if (IS_USER(s)) {
4722        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4723        return;
4724    }
4725
4726    ext = read_im16(env, s);
4727
4728    gen_set_sr_im(s, ext, 0);
4729    tcg_gen_movi_i32(cpu_halted, 1);
4730    gen_exception(s, s->pc, EXCP_HLT);
4731}
4732
4733DISAS_INSN(rte)
4734{
4735    if (IS_USER(s)) {
4736        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4737        return;
4738    }
4739    gen_exception(s, s->base.pc_next, EXCP_RTE);
4740}
4741
4742DISAS_INSN(cf_movec)
4743{
4744    uint16_t ext;
4745    TCGv reg;
4746
4747    if (IS_USER(s)) {
4748        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4749        return;
4750    }
4751
4752    ext = read_im16(env, s);
4753
4754    if (ext & 0x8000) {
4755        reg = AREG(ext, 12);
4756    } else {
4757        reg = DREG(ext, 12);
4758    }
4759    gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4760    gen_exit_tb(s);
4761}
4762
4763DISAS_INSN(m68k_movec)
4764{
4765    uint16_t ext;
4766    TCGv reg;
4767
4768    if (IS_USER(s)) {
4769        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4770        return;
4771    }
4772
4773    ext = read_im16(env, s);
4774
4775    if (ext & 0x8000) {
4776        reg = AREG(ext, 12);
4777    } else {
4778        reg = DREG(ext, 12);
4779    }
4780    if (insn & 1) {
4781        gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4782    } else {
4783        gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4784    }
4785    gen_exit_tb(s);
4786}
4787
4788DISAS_INSN(intouch)
4789{
4790    if (IS_USER(s)) {
4791        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4792        return;
4793    }
4794    /* ICache fetch.  Implement as no-op.  */
4795}
4796
4797DISAS_INSN(cpushl)
4798{
4799    if (IS_USER(s)) {
4800        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4801        return;
4802    }
4803    /* Cache push/invalidate.  Implement as no-op.  */
4804}
4805
4806DISAS_INSN(cpush)
4807{
4808    if (IS_USER(s)) {
4809        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4810        return;
4811    }
4812    /* Cache push/invalidate.  Implement as no-op.  */
4813}
4814
4815DISAS_INSN(cinv)
4816{
4817    if (IS_USER(s)) {
4818        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4819        return;
4820    }
4821    /* Invalidate cache line.  Implement as no-op.  */
4822}
4823
4824#if defined(CONFIG_SOFTMMU)
4825DISAS_INSN(pflush)
4826{
4827    TCGv opmode;
4828
4829    if (IS_USER(s)) {
4830        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4831        return;
4832    }
4833
4834    opmode = tcg_const_i32((insn >> 3) & 3);
4835    gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4836    tcg_temp_free(opmode);
4837}
4838
4839DISAS_INSN(ptest)
4840{
4841    TCGv is_read;
4842
4843    if (IS_USER(s)) {
4844        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4845        return;
4846    }
4847    is_read = tcg_const_i32((insn >> 5) & 1);
4848    gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4849    tcg_temp_free(is_read);
4850}
4851#endif
4852
4853DISAS_INSN(wddata)
4854{
4855    gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4856}
4857
4858DISAS_INSN(wdebug)
4859{
4860    if (IS_USER(s)) {
4861        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4862        return;
4863    }
4864    /* TODO: Implement wdebug.  */
4865    cpu_abort(env_cpu(env), "WDEBUG not implemented");
4866}
4867#endif
4868
4869DISAS_INSN(trap)
4870{
4871    gen_exception(s, s->base.pc_next, EXCP_TRAP0 + (insn & 0xf));
4872}
4873
4874static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4875{
4876    switch (reg) {
4877    case M68K_FPIAR:
4878        tcg_gen_movi_i32(res, 0);
4879        break;
4880    case M68K_FPSR:
4881        tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4882        break;
4883    case M68K_FPCR:
4884        tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4885        break;
4886    }
4887}
4888
4889static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4890{
4891    switch (reg) {
4892    case M68K_FPIAR:
4893        break;
4894    case M68K_FPSR:
4895        tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4896        break;
4897    case M68K_FPCR:
4898        gen_helper_set_fpcr(cpu_env, val);
4899        break;
4900    }
4901}
4902
4903static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4904{
4905    int index = IS_USER(s);
4906    TCGv tmp;
4907
4908    tmp = tcg_temp_new();
4909    gen_load_fcr(s, tmp, reg);
4910    tcg_gen_qemu_st32(tmp, addr, index);
4911    tcg_temp_free(tmp);
4912}
4913
4914static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4915{
4916    int index = IS_USER(s);
4917    TCGv tmp;
4918
4919    tmp = tcg_temp_new();
4920    tcg_gen_qemu_ld32u(tmp, addr, index);
4921    gen_store_fcr(s, tmp, reg);
4922    tcg_temp_free(tmp);
4923}
4924
4925
4926static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4927                             uint32_t insn, uint32_t ext)
4928{
4929    int mask = (ext >> 10) & 7;
4930    int is_write = (ext >> 13) & 1;
4931    int mode = extract32(insn, 3, 3);
4932    int i;
4933    TCGv addr, tmp;
4934
4935    switch (mode) {
4936    case 0: /* Dn */
4937        if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4938            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4939            return;
4940        }
4941        if (is_write) {
4942            gen_load_fcr(s, DREG(insn, 0), mask);
4943        } else {
4944            gen_store_fcr(s, DREG(insn, 0), mask);
4945        }
4946        return;
4947    case 1: /* An, only with FPIAR */
4948        if (mask != M68K_FPIAR) {
4949            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4950            return;
4951        }
4952        if (is_write) {
4953            gen_load_fcr(s, AREG(insn, 0), mask);
4954        } else {
4955            gen_store_fcr(s, AREG(insn, 0), mask);
4956        }
4957        return;
4958    case 7: /* Immediate */
4959        if (REG(insn, 0) == 4) {
4960            if (is_write ||
4961                (mask != M68K_FPIAR && mask != M68K_FPSR &&
4962                 mask != M68K_FPCR)) {
4963                gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4964                return;
4965            }
4966            tmp = tcg_const_i32(read_im32(env, s));
4967            gen_store_fcr(s, tmp, mask);
4968            tcg_temp_free(tmp);
4969            return;
4970        }
4971        break;
4972    default:
4973        break;
4974    }
4975
4976    tmp = gen_lea(env, s, insn, OS_LONG);
4977    if (IS_NULL_QREG(tmp)) {
4978        gen_addr_fault(s);
4979        return;
4980    }
4981
4982    addr = tcg_temp_new();
4983    tcg_gen_mov_i32(addr, tmp);
4984
4985    /*
4986     * mask:
4987     *
4988     * 0b100 Floating-Point Control Register
4989     * 0b010 Floating-Point Status Register
4990     * 0b001 Floating-Point Instruction Address Register
4991     *
4992     */
4993
4994    if (is_write && mode == 4) {
4995        for (i = 2; i >= 0; i--, mask >>= 1) {
4996            if (mask & 1) {
4997                gen_qemu_store_fcr(s, addr, 1 << i);
4998                if (mask != 1) {
4999                    tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
5000                }
5001            }
5002       }
5003       tcg_gen_mov_i32(AREG(insn, 0), addr);
5004    } else {
5005        for (i = 0; i < 3; i++, mask >>= 1) {
5006            if (mask & 1) {
5007                if (is_write) {
5008                    gen_qemu_store_fcr(s, addr, 1 << i);
5009                } else {
5010                    gen_qemu_load_fcr(s, addr, 1 << i);
5011                }
5012                if (mask != 1 || mode == 3) {
5013                    tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
5014                }
5015            }
5016        }
5017        if (mode == 3) {
5018            tcg_gen_mov_i32(AREG(insn, 0), addr);
5019        }
5020    }
5021    tcg_temp_free_i32(addr);
5022}
5023
5024static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
5025                          uint32_t insn, uint32_t ext)
5026{
5027    int opsize;
5028    TCGv addr, tmp;
5029    int mode = (ext >> 11) & 0x3;
5030    int is_load = ((ext & 0x2000) == 0);
5031
5032    if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
5033        opsize = OS_EXTENDED;
5034    } else {
5035        opsize = OS_DOUBLE;  /* FIXME */
5036    }
5037
5038    addr = gen_lea(env, s, insn, opsize);
5039    if (IS_NULL_QREG(addr)) {
5040        gen_addr_fault(s);
5041        return;
5042    }
5043
5044    tmp = tcg_temp_new();
5045    if (mode & 0x1) {
5046        /* Dynamic register list */
5047        tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5048    } else {
5049        /* Static register list */
5050        tcg_gen_movi_i32(tmp, ext & 0xff);
5051    }
5052
5053    if (!is_load && (mode & 2) == 0) {
5054        /*
5055         * predecrement addressing mode
5056         * only available to store register to memory
5057         */
5058        if (opsize == OS_EXTENDED) {
5059            gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5060        } else {
5061            gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5062        }
5063    } else {
5064        /* postincrement addressing mode */
5065        if (opsize == OS_EXTENDED) {
5066            if (is_load) {
5067                gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5068            } else {
5069                gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5070            }
5071        } else {
5072            if (is_load) {
5073                gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5074            } else {
5075                gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5076            }
5077        }
5078    }
5079    if ((insn & 070) == 030 || (insn & 070) == 040) {
5080        tcg_gen_mov_i32(AREG(insn, 0), tmp);
5081    }
5082    tcg_temp_free(tmp);
5083}
5084
5085/*
5086 * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5087 * immediately before the next FP instruction is executed.
5088 */
5089DISAS_INSN(fpu)
5090{
5091    uint16_t ext;
5092    int opmode;
5093    int opsize;
5094    TCGv_ptr cpu_src, cpu_dest;
5095
5096    ext = read_im16(env, s);
5097    opmode = ext & 0x7f;
5098    switch ((ext >> 13) & 7) {
5099    case 0:
5100        break;
5101    case 1:
5102        goto undef;
5103    case 2:
5104        if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5105            /* fmovecr */
5106            TCGv rom_offset = tcg_const_i32(opmode);
5107            cpu_dest = gen_fp_ptr(REG(ext, 7));
5108            gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5109            tcg_temp_free_ptr(cpu_dest);
5110            tcg_temp_free(rom_offset);
5111            return;
5112        }
5113        break;
5114    case 3: /* fmove out */
5115        cpu_src = gen_fp_ptr(REG(ext, 7));
5116        opsize = ext_opsize(ext, 10);
5117        if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5118                      EA_STORE, IS_USER(s)) == -1) {
5119            gen_addr_fault(s);
5120        }
5121        gen_helper_ftst(cpu_env, cpu_src);
5122        tcg_temp_free_ptr(cpu_src);
5123        return;
5124    case 4: /* fmove to control register.  */
5125    case 5: /* fmove from control register.  */
5126        gen_op_fmove_fcr(env, s, insn, ext);
5127        return;
5128    case 6: /* fmovem */
5129    case 7:
5130        if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5131            goto undef;
5132        }
5133        gen_op_fmovem(env, s, insn, ext);
5134        return;
5135    }
5136    if (ext & (1 << 14)) {
5137        /* Source effective address.  */
5138        opsize = ext_opsize(ext, 10);
5139        cpu_src = gen_fp_result_ptr();
5140        if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5141                      EA_LOADS, IS_USER(s)) == -1) {
5142            gen_addr_fault(s);
5143            return;
5144        }
5145    } else {
5146        /* Source register.  */
5147        opsize = OS_EXTENDED;
5148        cpu_src = gen_fp_ptr(REG(ext, 10));
5149    }
5150    cpu_dest = gen_fp_ptr(REG(ext, 7));
5151    switch (opmode) {
5152    case 0: /* fmove */
5153        gen_fp_move(cpu_dest, cpu_src);
5154        break;
5155    case 0x40: /* fsmove */
5156        gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5157        break;
5158    case 0x44: /* fdmove */
5159        gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5160        break;
5161    case 1: /* fint */
5162        gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5163        break;
5164    case 2: /* fsinh */
5165        gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5166        break;
5167    case 3: /* fintrz */
5168        gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5169        break;
5170    case 4: /* fsqrt */
5171        gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5172        break;
5173    case 0x41: /* fssqrt */
5174        gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5175        break;
5176    case 0x45: /* fdsqrt */
5177        gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5178        break;
5179    case 0x06: /* flognp1 */
5180        gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5181        break;
5182    case 0x08: /* fetoxm1 */
5183        gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5184        break;
5185    case 0x09: /* ftanh */
5186        gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5187        break;
5188    case 0x0a: /* fatan */
5189        gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5190        break;
5191    case 0x0c: /* fasin */
5192        gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5193        break;
5194    case 0x0d: /* fatanh */
5195        gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5196        break;
5197    case 0x0e: /* fsin */
5198        gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5199        break;
5200    case 0x0f: /* ftan */
5201        gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5202        break;
5203    case 0x10: /* fetox */
5204        gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5205        break;
5206    case 0x11: /* ftwotox */
5207        gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5208        break;
5209    case 0x12: /* ftentox */
5210        gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5211        break;
5212    case 0x14: /* flogn */
5213        gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5214        break;
5215    case 0x15: /* flog10 */
5216        gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5217        break;
5218    case 0x16: /* flog2 */
5219        gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5220        break;
5221    case 0x18: /* fabs */
5222        gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5223        break;
5224    case 0x58: /* fsabs */
5225        gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5226        break;
5227    case 0x5c: /* fdabs */
5228        gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5229        break;
5230    case 0x19: /* fcosh */
5231        gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5232        break;
5233    case 0x1a: /* fneg */
5234        gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5235        break;
5236    case 0x5a: /* fsneg */
5237        gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5238        break;
5239    case 0x5e: /* fdneg */
5240        gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5241        break;
5242    case 0x1c: /* facos */
5243        gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5244        break;
5245    case 0x1d: /* fcos */
5246        gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5247        break;
5248    case 0x1e: /* fgetexp */
5249        gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5250        break;
5251    case 0x1f: /* fgetman */
5252        gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5253        break;
5254    case 0x20: /* fdiv */
5255        gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5256        break;
5257    case 0x60: /* fsdiv */
5258        gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5259        break;
5260    case 0x64: /* fddiv */
5261        gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5262        break;
5263    case 0x21: /* fmod */
5264        gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5265        break;
5266    case 0x22: /* fadd */
5267        gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5268        break;
5269    case 0x62: /* fsadd */
5270        gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5271        break;
5272    case 0x66: /* fdadd */
5273        gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5274        break;
5275    case 0x23: /* fmul */
5276        gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5277        break;
5278    case 0x63: /* fsmul */
5279        gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5280        break;
5281    case 0x67: /* fdmul */
5282        gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5283        break;
5284    case 0x24: /* fsgldiv */
5285        gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5286        break;
5287    case 0x25: /* frem */
5288        gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5289        break;
5290    case 0x26: /* fscale */
5291        gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5292        break;
5293    case 0x27: /* fsglmul */
5294        gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5295        break;
5296    case 0x28: /* fsub */
5297        gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5298        break;
5299    case 0x68: /* fssub */
5300        gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5301        break;
5302    case 0x6c: /* fdsub */
5303        gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5304        break;
5305    case 0x30: case 0x31: case 0x32:
5306    case 0x33: case 0x34: case 0x35:
5307    case 0x36: case 0x37: {
5308            TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5309            gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5310            tcg_temp_free_ptr(cpu_dest2);
5311        }
5312        break;
5313    case 0x38: /* fcmp */
5314        gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5315        return;
5316    case 0x3a: /* ftst */
5317        gen_helper_ftst(cpu_env, cpu_src);
5318        return;
5319    default:
5320        goto undef;
5321    }
5322    tcg_temp_free_ptr(cpu_src);
5323    gen_helper_ftst(cpu_env, cpu_dest);
5324    tcg_temp_free_ptr(cpu_dest);
5325    return;
5326undef:
5327    /* FIXME: Is this right for offset addressing modes?  */
5328    s->pc -= 2;
5329    disas_undef_fpu(env, s, insn);
5330}
5331
5332static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5333{
5334    TCGv fpsr;
5335
5336    c->g1 = 1;
5337    c->v2 = tcg_const_i32(0);
5338    c->g2 = 0;
5339    /* TODO: Raise BSUN exception.  */
5340    fpsr = tcg_temp_new();
5341    gen_load_fcr(s, fpsr, M68K_FPSR);
5342    switch (cond) {
5343    case 0:  /* False */
5344    case 16: /* Signaling False */
5345        c->v1 = c->v2;
5346        c->tcond = TCG_COND_NEVER;
5347        break;
5348    case 1:  /* EQual Z */
5349    case 17: /* Signaling EQual Z */
5350        c->v1 = tcg_temp_new();
5351        c->g1 = 0;
5352        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5353        c->tcond = TCG_COND_NE;
5354        break;
5355    case 2:  /* Ordered Greater Than !(A || Z || N) */
5356    case 18: /* Greater Than !(A || Z || N) */
5357        c->v1 = tcg_temp_new();
5358        c->g1 = 0;
5359        tcg_gen_andi_i32(c->v1, fpsr,
5360                         FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5361        c->tcond = TCG_COND_EQ;
5362        break;
5363    case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5364    case 19: /* Greater than or Equal Z || !(A || N) */
5365        c->v1 = tcg_temp_new();
5366        c->g1 = 0;
5367        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5368        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5369        tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5370        tcg_gen_or_i32(c->v1, c->v1, fpsr);
5371        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5372        c->tcond = TCG_COND_NE;
5373        break;
5374    case 4:  /* Ordered Less Than !(!N || A || Z); */
5375    case 20: /* Less Than !(!N || A || Z); */
5376        c->v1 = tcg_temp_new();
5377        c->g1 = 0;
5378        tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5379        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5380        c->tcond = TCG_COND_EQ;
5381        break;
5382    case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5383    case 21: /* Less than or Equal Z || (N && !A) */
5384        c->v1 = tcg_temp_new();
5385        c->g1 = 0;
5386        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5387        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5388        tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5389        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5390        c->tcond = TCG_COND_NE;
5391        break;
5392    case 6:  /* Ordered Greater or Less than !(A || Z) */
5393    case 22: /* Greater or Less than !(A || Z) */
5394        c->v1 = tcg_temp_new();
5395        c->g1 = 0;
5396        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5397        c->tcond = TCG_COND_EQ;
5398        break;
5399    case 7:  /* Ordered !A */
5400    case 23: /* Greater, Less or Equal !A */
5401        c->v1 = tcg_temp_new();
5402        c->g1 = 0;
5403        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5404        c->tcond = TCG_COND_EQ;
5405        break;
5406    case 8:  /* Unordered A */
5407    case 24: /* Not Greater, Less or Equal A */
5408        c->v1 = tcg_temp_new();
5409        c->g1 = 0;
5410        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5411        c->tcond = TCG_COND_NE;
5412        break;
5413    case 9:  /* Unordered or Equal A || Z */
5414    case 25: /* Not Greater or Less then A || Z */
5415        c->v1 = tcg_temp_new();
5416        c->g1 = 0;
5417        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5418        c->tcond = TCG_COND_NE;
5419        break;
5420    case 10: /* Unordered or Greater Than A || !(N || Z)) */
5421    case 26: /* Not Less or Equal A || !(N || Z)) */
5422        c->v1 = tcg_temp_new();
5423        c->g1 = 0;
5424        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5425        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5426        tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5427        tcg_gen_or_i32(c->v1, c->v1, fpsr);
5428        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5429        c->tcond = TCG_COND_NE;
5430        break;
5431    case 11: /* Unordered or Greater or Equal A || Z || !N */
5432    case 27: /* Not Less Than A || Z || !N */
5433        c->v1 = tcg_temp_new();
5434        c->g1 = 0;
5435        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5436        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5437        c->tcond = TCG_COND_NE;
5438        break;
5439    case 12: /* Unordered or Less Than A || (N && !Z) */
5440    case 28: /* Not Greater than or Equal A || (N && !Z) */
5441        c->v1 = tcg_temp_new();
5442        c->g1 = 0;
5443        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5444        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5445        tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5446        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5447        c->tcond = TCG_COND_NE;
5448        break;
5449    case 13: /* Unordered or Less or Equal A || Z || N */
5450    case 29: /* Not Greater Than A || Z || N */
5451        c->v1 = tcg_temp_new();
5452        c->g1 = 0;
5453        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5454        c->tcond = TCG_COND_NE;
5455        break;
5456    case 14: /* Not Equal !Z */
5457    case 30: /* Signaling Not Equal !Z */
5458        c->v1 = tcg_temp_new();
5459        c->g1 = 0;
5460        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5461        c->tcond = TCG_COND_EQ;
5462        break;
5463    case 15: /* True */
5464    case 31: /* Signaling True */
5465        c->v1 = c->v2;
5466        c->tcond = TCG_COND_ALWAYS;
5467        break;
5468    }
5469    tcg_temp_free(fpsr);
5470}
5471
5472static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5473{
5474    DisasCompare c;
5475
5476    gen_fcc_cond(&c, s, cond);
5477    update_cc_op(s);
5478    tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5479    free_cond(&c);
5480}
5481
5482DISAS_INSN(fbcc)
5483{
5484    uint32_t offset;
5485    uint32_t base;
5486    TCGLabel *l1;
5487
5488    base = s->pc;
5489    offset = (int16_t)read_im16(env, s);
5490    if (insn & (1 << 6)) {
5491        offset = (offset << 16) | read_im16(env, s);
5492    }
5493
5494    l1 = gen_new_label();
5495    update_cc_op(s);
5496    gen_fjmpcc(s, insn & 0x3f, l1);
5497    gen_jmp_tb(s, 0, s->pc);
5498    gen_set_label(l1);
5499    gen_jmp_tb(s, 1, base + offset);
5500}
5501
5502DISAS_INSN(fscc)
5503{
5504    DisasCompare c;
5505    int cond;
5506    TCGv tmp;
5507    uint16_t ext;
5508
5509    ext = read_im16(env, s);
5510    cond = ext & 0x3f;
5511    gen_fcc_cond(&c, s, cond);
5512
5513    tmp = tcg_temp_new();
5514    tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5515    free_cond(&c);
5516
5517    tcg_gen_neg_i32(tmp, tmp);
5518    DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5519    tcg_temp_free(tmp);
5520}
5521
5522#if defined(CONFIG_SOFTMMU)
5523DISAS_INSN(frestore)
5524{
5525    TCGv addr;
5526
5527    if (IS_USER(s)) {
5528        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5529        return;
5530    }
5531    if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5532        SRC_EA(env, addr, OS_LONG, 0, NULL);
5533        /* FIXME: check the state frame */
5534    } else {
5535        disas_undef(env, s, insn);
5536    }
5537}
5538
5539DISAS_INSN(fsave)
5540{
5541    if (IS_USER(s)) {
5542        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5543        return;
5544    }
5545
5546    if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5547        /* always write IDLE */
5548        TCGv idle = tcg_const_i32(0x41000000);
5549        DEST_EA(env, insn, OS_LONG, idle, NULL);
5550        tcg_temp_free(idle);
5551    } else {
5552        disas_undef(env, s, insn);
5553    }
5554}
5555#endif
5556
5557static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5558{
5559    TCGv tmp = tcg_temp_new();
5560    if (s->env->macsr & MACSR_FI) {
5561        if (upper)
5562            tcg_gen_andi_i32(tmp, val, 0xffff0000);
5563        else
5564            tcg_gen_shli_i32(tmp, val, 16);
5565    } else if (s->env->macsr & MACSR_SU) {
5566        if (upper)
5567            tcg_gen_sari_i32(tmp, val, 16);
5568        else
5569            tcg_gen_ext16s_i32(tmp, val);
5570    } else {
5571        if (upper)
5572            tcg_gen_shri_i32(tmp, val, 16);
5573        else
5574            tcg_gen_ext16u_i32(tmp, val);
5575    }
5576    return tmp;
5577}
5578
5579static void gen_mac_clear_flags(void)
5580{
5581    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5582                     ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5583}
5584
5585DISAS_INSN(mac)
5586{
5587    TCGv rx;
5588    TCGv ry;
5589    uint16_t ext;
5590    int acc;
5591    TCGv tmp;
5592    TCGv addr;
5593    TCGv loadval;
5594    int dual;
5595    TCGv saved_flags;
5596
5597    if (!s->done_mac) {
5598        s->mactmp = tcg_temp_new_i64();
5599        s->done_mac = 1;
5600    }
5601
5602    ext = read_im16(env, s);
5603
5604    acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5605    dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5606    if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5607        disas_undef(env, s, insn);
5608        return;
5609    }
5610    if (insn & 0x30) {
5611        /* MAC with load.  */
5612        tmp = gen_lea(env, s, insn, OS_LONG);
5613        addr = tcg_temp_new();
5614        tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5615        /*
5616         * Load the value now to ensure correct exception behavior.
5617         * Perform writeback after reading the MAC inputs.
5618         */
5619        loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5620
5621        acc ^= 1;
5622        rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5623        ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5624    } else {
5625        loadval = addr = NULL_QREG;
5626        rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5627        ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5628    }
5629
5630    gen_mac_clear_flags();
5631#if 0
5632    l1 = -1;
5633    /* Disabled because conditional branches clobber temporary vars.  */
5634    if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5635        /* Skip the multiply if we know we will ignore it.  */
5636        l1 = gen_new_label();
5637        tmp = tcg_temp_new();
5638        tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5639        gen_op_jmp_nz32(tmp, l1);
5640    }
5641#endif
5642
5643    if ((ext & 0x0800) == 0) {
5644        /* Word.  */
5645        rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5646        ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5647    }
5648    if (s->env->macsr & MACSR_FI) {
5649        gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5650    } else {
5651        if (s->env->macsr & MACSR_SU)
5652            gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5653        else
5654            gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5655        switch ((ext >> 9) & 3) {
5656        case 1:
5657            tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5658            break;
5659        case 3:
5660            tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5661            break;
5662        }
5663    }
5664
5665    if (dual) {
5666        /* Save the overflow flag from the multiply.  */
5667        saved_flags = tcg_temp_new();
5668        tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5669    } else {
5670        saved_flags = NULL_QREG;
5671    }
5672
5673#if 0
5674    /* Disabled because conditional branches clobber temporary vars.  */
5675    if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5676        /* Skip the accumulate if the value is already saturated.  */
5677        l1 = gen_new_label();
5678        tmp = tcg_temp_new();
5679        gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5680        gen_op_jmp_nz32(tmp, l1);
5681    }
5682#endif
5683
5684    if (insn & 0x100)
5685        tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5686    else
5687        tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5688
5689    if (s->env->macsr & MACSR_FI)
5690        gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5691    else if (s->env->macsr & MACSR_SU)
5692        gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5693    else
5694        gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5695
5696#if 0
5697    /* Disabled because conditional branches clobber temporary vars.  */
5698    if (l1 != -1)
5699        gen_set_label(l1);
5700#endif
5701
5702    if (dual) {
5703        /* Dual accumulate variant.  */
5704        acc = (ext >> 2) & 3;
5705        /* Restore the overflow flag from the multiplier.  */
5706        tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5707#if 0
5708        /* Disabled because conditional branches clobber temporary vars.  */
5709        if ((s->env->macsr & MACSR_OMC) != 0) {
5710            /* Skip the accumulate if the value is already saturated.  */
5711            l1 = gen_new_label();
5712            tmp = tcg_temp_new();
5713            gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5714            gen_op_jmp_nz32(tmp, l1);
5715        }
5716#endif
5717        if (ext & 2)
5718            tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5719        else
5720            tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5721        if (s->env->macsr & MACSR_FI)
5722            gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5723        else if (s->env->macsr & MACSR_SU)
5724            gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5725        else
5726            gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5727#if 0
5728        /* Disabled because conditional branches clobber temporary vars.  */
5729        if (l1 != -1)
5730            gen_set_label(l1);
5731#endif
5732    }
5733    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5734
5735    if (insn & 0x30) {
5736        TCGv rw;
5737        rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5738        tcg_gen_mov_i32(rw, loadval);
5739        /*
5740         * FIXME: Should address writeback happen with the masked or
5741         * unmasked value?
5742         */
5743        switch ((insn >> 3) & 7) {
5744        case 3: /* Post-increment.  */
5745            tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5746            break;
5747        case 4: /* Pre-decrement.  */
5748            tcg_gen_mov_i32(AREG(insn, 0), addr);
5749        }
5750        tcg_temp_free(loadval);
5751    }
5752}
5753
5754DISAS_INSN(from_mac)
5755{
5756    TCGv rx;
5757    TCGv_i64 acc;
5758    int accnum;
5759
5760    rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5761    accnum = (insn >> 9) & 3;
5762    acc = MACREG(accnum);
5763    if (s->env->macsr & MACSR_FI) {
5764        gen_helper_get_macf(rx, cpu_env, acc);
5765    } else if ((s->env->macsr & MACSR_OMC) == 0) {
5766        tcg_gen_extrl_i64_i32(rx, acc);
5767    } else if (s->env->macsr & MACSR_SU) {
5768        gen_helper_get_macs(rx, acc);
5769    } else {
5770        gen_helper_get_macu(rx, acc);
5771    }
5772    if (insn & 0x40) {
5773        tcg_gen_movi_i64(acc, 0);
5774        tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5775    }
5776}
5777
5778DISAS_INSN(move_mac)
5779{
5780    /* FIXME: This can be done without a helper.  */
5781    int src;
5782    TCGv dest;
5783    src = insn & 3;
5784    dest = tcg_const_i32((insn >> 9) & 3);
5785    gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5786    gen_mac_clear_flags();
5787    gen_helper_mac_set_flags(cpu_env, dest);
5788}
5789
5790DISAS_INSN(from_macsr)
5791{
5792    TCGv reg;
5793
5794    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5795    tcg_gen_mov_i32(reg, QREG_MACSR);
5796}
5797
5798DISAS_INSN(from_mask)
5799{
5800    TCGv reg;
5801    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5802    tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5803}
5804
5805DISAS_INSN(from_mext)
5806{
5807    TCGv reg;
5808    TCGv acc;
5809    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5810    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5811    if (s->env->macsr & MACSR_FI)
5812        gen_helper_get_mac_extf(reg, cpu_env, acc);
5813    else
5814        gen_helper_get_mac_exti(reg, cpu_env, acc);
5815}
5816
5817DISAS_INSN(macsr_to_ccr)
5818{
5819    TCGv tmp = tcg_temp_new();
5820    tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5821    gen_helper_set_sr(cpu_env, tmp);
5822    tcg_temp_free(tmp);
5823    set_cc_op(s, CC_OP_FLAGS);
5824}
5825
5826DISAS_INSN(to_mac)
5827{
5828    TCGv_i64 acc;
5829    TCGv val;
5830    int accnum;
5831    accnum = (insn >> 9) & 3;
5832    acc = MACREG(accnum);
5833    SRC_EA(env, val, OS_LONG, 0, NULL);
5834    if (s->env->macsr & MACSR_FI) {
5835        tcg_gen_ext_i32_i64(acc, val);
5836        tcg_gen_shli_i64(acc, acc, 8);
5837    } else if (s->env->macsr & MACSR_SU) {
5838        tcg_gen_ext_i32_i64(acc, val);
5839    } else {
5840        tcg_gen_extu_i32_i64(acc, val);
5841    }
5842    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5843    gen_mac_clear_flags();
5844    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5845}
5846
5847DISAS_INSN(to_macsr)
5848{
5849    TCGv val;
5850    SRC_EA(env, val, OS_LONG, 0, NULL);
5851    gen_helper_set_macsr(cpu_env, val);
5852    gen_exit_tb(s);
5853}
5854
5855DISAS_INSN(to_mask)
5856{
5857    TCGv val;
5858    SRC_EA(env, val, OS_LONG, 0, NULL);
5859    tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5860}
5861
5862DISAS_INSN(to_mext)
5863{
5864    TCGv val;
5865    TCGv acc;
5866    SRC_EA(env, val, OS_LONG, 0, NULL);
5867    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5868    if (s->env->macsr & MACSR_FI)
5869        gen_helper_set_mac_extf(cpu_env, val, acc);
5870    else if (s->env->macsr & MACSR_SU)
5871        gen_helper_set_mac_exts(cpu_env, val, acc);
5872    else
5873        gen_helper_set_mac_extu(cpu_env, val, acc);
5874}
5875
5876static disas_proc opcode_table[65536];
5877
5878static void
5879register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5880{
5881  int i;
5882  int from;
5883  int to;
5884
5885  /* Sanity check.  All set bits must be included in the mask.  */
5886  if (opcode & ~mask) {
5887      fprintf(stderr,
5888              "qemu internal error: bogus opcode definition %04x/%04x\n",
5889              opcode, mask);
5890      abort();
5891  }
5892  /*
5893   * This could probably be cleverer.  For now just optimize the case where
5894   * the top bits are known.
5895   */
5896  /* Find the first zero bit in the mask.  */
5897  i = 0x8000;
5898  while ((i & mask) != 0)
5899      i >>= 1;
5900  /* Iterate over all combinations of this and lower bits.  */
5901  if (i == 0)
5902      i = 1;
5903  else
5904      i <<= 1;
5905  from = opcode & ~(i - 1);
5906  to = from + i;
5907  for (i = from; i < to; i++) {
5908      if ((i & mask) == opcode)
5909          opcode_table[i] = proc;
5910  }
5911}
5912
5913/*
5914 * Register m68k opcode handlers.  Order is important.
5915 * Later insn override earlier ones.
5916 */
5917void register_m68k_insns (CPUM68KState *env)
5918{
5919    /*
5920     * Build the opcode table only once to avoid
5921     * multithreading issues.
5922     */
5923    if (opcode_table[0] != NULL) {
5924        return;
5925    }
5926
5927    /*
5928     * use BASE() for instruction available
5929     * for CF_ISA_A and M68000.
5930     */
5931#define BASE(name, opcode, mask) \
5932    register_opcode(disas_##name, 0x##opcode, 0x##mask)
5933#define INSN(name, opcode, mask, feature) do { \
5934    if (m68k_feature(env, M68K_FEATURE_##feature)) \
5935        BASE(name, opcode, mask); \
5936    } while(0)
5937    BASE(undef,     0000, 0000);
5938    INSN(arith_im,  0080, fff8, CF_ISA_A);
5939    INSN(arith_im,  0000, ff00, M68000);
5940    INSN(chk2,      00c0, f9c0, CHK2);
5941    INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
5942    BASE(bitop_reg, 0100, f1c0);
5943    BASE(bitop_reg, 0140, f1c0);
5944    BASE(bitop_reg, 0180, f1c0);
5945    BASE(bitop_reg, 01c0, f1c0);
5946    INSN(movep,     0108, f138, MOVEP);
5947    INSN(arith_im,  0280, fff8, CF_ISA_A);
5948    INSN(arith_im,  0200, ff00, M68000);
5949    INSN(undef,     02c0, ffc0, M68000);
5950    INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
5951    INSN(arith_im,  0480, fff8, CF_ISA_A);
5952    INSN(arith_im,  0400, ff00, M68000);
5953    INSN(undef,     04c0, ffc0, M68000);
5954    INSN(arith_im,  0600, ff00, M68000);
5955    INSN(undef,     06c0, ffc0, M68000);
5956    INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
5957    INSN(arith_im,  0680, fff8, CF_ISA_A);
5958    INSN(arith_im,  0c00, ff38, CF_ISA_A);
5959    INSN(arith_im,  0c00, ff00, M68000);
5960    BASE(bitop_im,  0800, ffc0);
5961    BASE(bitop_im,  0840, ffc0);
5962    BASE(bitop_im,  0880, ffc0);
5963    BASE(bitop_im,  08c0, ffc0);
5964    INSN(arith_im,  0a80, fff8, CF_ISA_A);
5965    INSN(arith_im,  0a00, ff00, M68000);
5966#if defined(CONFIG_SOFTMMU)
5967    INSN(moves,     0e00, ff00, M68000);
5968#endif
5969    INSN(cas,       0ac0, ffc0, CAS);
5970    INSN(cas,       0cc0, ffc0, CAS);
5971    INSN(cas,       0ec0, ffc0, CAS);
5972    INSN(cas2w,     0cfc, ffff, CAS);
5973    INSN(cas2l,     0efc, ffff, CAS);
5974    BASE(move,      1000, f000);
5975    BASE(move,      2000, f000);
5976    BASE(move,      3000, f000);
5977    INSN(chk,       4000, f040, M68000);
5978    INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
5979    INSN(negx,      4080, fff8, CF_ISA_A);
5980    INSN(negx,      4000, ff00, M68000);
5981    INSN(undef,     40c0, ffc0, M68000);
5982    INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
5983    INSN(move_from_sr, 40c0, ffc0, M68000);
5984    BASE(lea,       41c0, f1c0);
5985    BASE(clr,       4200, ff00);
5986    BASE(undef,     42c0, ffc0);
5987    INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
5988    INSN(move_from_ccr, 42c0, ffc0, M68000);
5989    INSN(neg,       4480, fff8, CF_ISA_A);
5990    INSN(neg,       4400, ff00, M68000);
5991    INSN(undef,     44c0, ffc0, M68000);
5992    BASE(move_to_ccr, 44c0, ffc0);
5993    INSN(not,       4680, fff8, CF_ISA_A);
5994    INSN(not,       4600, ff00, M68000);
5995#if defined(CONFIG_SOFTMMU)
5996    BASE(move_to_sr, 46c0, ffc0);
5997#endif
5998    INSN(nbcd,      4800, ffc0, M68000);
5999    INSN(linkl,     4808, fff8, M68000);
6000    BASE(pea,       4840, ffc0);
6001    BASE(swap,      4840, fff8);
6002    INSN(bkpt,      4848, fff8, BKPT);
6003    INSN(movem,     48d0, fbf8, CF_ISA_A);
6004    INSN(movem,     48e8, fbf8, CF_ISA_A);
6005    INSN(movem,     4880, fb80, M68000);
6006    BASE(ext,       4880, fff8);
6007    BASE(ext,       48c0, fff8);
6008    BASE(ext,       49c0, fff8);
6009    BASE(tst,       4a00, ff00);
6010    INSN(tas,       4ac0, ffc0, CF_ISA_B);
6011    INSN(tas,       4ac0, ffc0, M68000);
6012#if defined(CONFIG_SOFTMMU)
6013    INSN(halt,      4ac8, ffff, CF_ISA_A);
6014#endif
6015    INSN(pulse,     4acc, ffff, CF_ISA_A);
6016    BASE(illegal,   4afc, ffff);
6017    INSN(mull,      4c00, ffc0, CF_ISA_A);
6018    INSN(mull,      4c00, ffc0, LONG_MULDIV);
6019    INSN(divl,      4c40, ffc0, CF_ISA_A);
6020    INSN(divl,      4c40, ffc0, LONG_MULDIV);
6021    INSN(sats,      4c80, fff8, CF_ISA_B);
6022    BASE(trap,      4e40, fff0);
6023    BASE(link,      4e50, fff8);
6024    BASE(unlk,      4e58, fff8);
6025#if defined(CONFIG_SOFTMMU)
6026    INSN(move_to_usp, 4e60, fff8, USP);
6027    INSN(move_from_usp, 4e68, fff8, USP);
6028    INSN(reset,     4e70, ffff, M68000);
6029    BASE(stop,      4e72, ffff);
6030    BASE(rte,       4e73, ffff);
6031    INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
6032    INSN(m68k_movec, 4e7a, fffe, MOVEC);
6033#endif
6034    BASE(nop,       4e71, ffff);
6035    INSN(rtd,       4e74, ffff, RTD);
6036    BASE(rts,       4e75, ffff);
6037    INSN(rtr,       4e77, ffff, M68000);
6038    BASE(jump,      4e80, ffc0);
6039    BASE(jump,      4ec0, ffc0);
6040    INSN(addsubq,   5000, f080, M68000);
6041    BASE(addsubq,   5080, f0c0);
6042    INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6043    INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
6044    INSN(dbcc,      50c8, f0f8, M68000);
6045    INSN(tpf,       51f8, fff8, CF_ISA_A);
6046
6047    /* Branch instructions.  */
6048    BASE(branch,    6000, f000);
6049    /* Disable long branch instructions, then add back the ones we want.  */
6050    BASE(undef,     60ff, f0ff); /* All long branches.  */
6051    INSN(branch,    60ff, f0ff, CF_ISA_B);
6052    INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6053    INSN(branch,    60ff, ffff, BRAL);
6054    INSN(branch,    60ff, f0ff, BCCL);
6055
6056    BASE(moveq,     7000, f100);
6057    INSN(mvzs,      7100, f100, CF_ISA_B);
6058    BASE(or,        8000, f000);
6059    BASE(divw,      80c0, f0c0);
6060    INSN(sbcd_reg,  8100, f1f8, M68000);
6061    INSN(sbcd_mem,  8108, f1f8, M68000);
6062    BASE(addsub,    9000, f000);
6063    INSN(undef,     90c0, f0c0, CF_ISA_A);
6064    INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6065    INSN(subx_reg,  9100, f138, M68000);
6066    INSN(subx_mem,  9108, f138, M68000);
6067    INSN(suba,      91c0, f1c0, CF_ISA_A);
6068    INSN(suba,      90c0, f0c0, M68000);
6069
6070    BASE(undef_mac, a000, f000);
6071    INSN(mac,       a000, f100, CF_EMAC);
6072    INSN(from_mac,  a180, f9b0, CF_EMAC);
6073    INSN(move_mac,  a110, f9fc, CF_EMAC);
6074    INSN(from_macsr,a980, f9f0, CF_EMAC);
6075    INSN(from_mask, ad80, fff0, CF_EMAC);
6076    INSN(from_mext, ab80, fbf0, CF_EMAC);
6077    INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6078    INSN(to_mac,    a100, f9c0, CF_EMAC);
6079    INSN(to_macsr,  a900, ffc0, CF_EMAC);
6080    INSN(to_mext,   ab00, fbc0, CF_EMAC);
6081    INSN(to_mask,   ad00, ffc0, CF_EMAC);
6082
6083    INSN(mov3q,     a140, f1c0, CF_ISA_B);
6084    INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6085    INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6086    INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6087    INSN(cmp,       b080, f1c0, CF_ISA_A);
6088    INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6089    INSN(cmp,       b000, f100, M68000);
6090    INSN(eor,       b100, f100, M68000);
6091    INSN(cmpm,      b108, f138, M68000);
6092    INSN(cmpa,      b0c0, f0c0, M68000);
6093    INSN(eor,       b180, f1c0, CF_ISA_A);
6094    BASE(and,       c000, f000);
6095    INSN(exg_dd,    c140, f1f8, M68000);
6096    INSN(exg_aa,    c148, f1f8, M68000);
6097    INSN(exg_da,    c188, f1f8, M68000);
6098    BASE(mulw,      c0c0, f0c0);
6099    INSN(abcd_reg,  c100, f1f8, M68000);
6100    INSN(abcd_mem,  c108, f1f8, M68000);
6101    BASE(addsub,    d000, f000);
6102    INSN(undef,     d0c0, f0c0, CF_ISA_A);
6103    INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6104    INSN(addx_reg,  d100, f138, M68000);
6105    INSN(addx_mem,  d108, f138, M68000);
6106    INSN(adda,      d1c0, f1c0, CF_ISA_A);
6107    INSN(adda,      d0c0, f0c0, M68000);
6108    INSN(shift_im,  e080, f0f0, CF_ISA_A);
6109    INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6110    INSN(shift8_im, e000, f0f0, M68000);
6111    INSN(shift16_im, e040, f0f0, M68000);
6112    INSN(shift_im,  e080, f0f0, M68000);
6113    INSN(shift8_reg, e020, f0f0, M68000);
6114    INSN(shift16_reg, e060, f0f0, M68000);
6115    INSN(shift_reg, e0a0, f0f0, M68000);
6116    INSN(shift_mem, e0c0, fcc0, M68000);
6117    INSN(rotate_im, e090, f0f0, M68000);
6118    INSN(rotate8_im, e010, f0f0, M68000);
6119    INSN(rotate16_im, e050, f0f0, M68000);
6120    INSN(rotate_reg, e0b0, f0f0, M68000);
6121    INSN(rotate8_reg, e030, f0f0, M68000);
6122    INSN(rotate16_reg, e070, f0f0, M68000);
6123    INSN(rotate_mem, e4c0, fcc0, M68000);
6124    INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6125    INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6126    INSN(bfins_mem, efc0, ffc0, BITFIELD);
6127    INSN(bfins_reg, efc0, fff8, BITFIELD);
6128    INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6129    INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6130    INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6131    INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6132    INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6133    INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6134    INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6135    INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6136    INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6137    INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6138    BASE(undef_fpu, f000, f000);
6139    INSN(fpu,       f200, ffc0, CF_FPU);
6140    INSN(fbcc,      f280, ffc0, CF_FPU);
6141    INSN(fpu,       f200, ffc0, FPU);
6142    INSN(fscc,      f240, ffc0, FPU);
6143    INSN(fbcc,      f280, ff80, FPU);
6144#if defined(CONFIG_SOFTMMU)
6145    INSN(frestore,  f340, ffc0, CF_FPU);
6146    INSN(fsave,     f300, ffc0, CF_FPU);
6147    INSN(frestore,  f340, ffc0, FPU);
6148    INSN(fsave,     f300, ffc0, FPU);
6149    INSN(intouch,   f340, ffc0, CF_ISA_A);
6150    INSN(cpushl,    f428, ff38, CF_ISA_A);
6151    INSN(cpush,     f420, ff20, M68040);
6152    INSN(cinv,      f400, ff20, M68040);
6153    INSN(pflush,    f500, ffe0, M68040);
6154    INSN(ptest,     f548, ffd8, M68040);
6155    INSN(wddata,    fb00, ff00, CF_ISA_A);
6156    INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6157#endif
6158    INSN(move16_mem, f600, ffe0, M68040);
6159    INSN(move16_reg, f620, fff8, M68040);
6160#undef INSN
6161}
6162
6163static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6164{
6165    DisasContext *dc = container_of(dcbase, DisasContext, base);
6166    CPUM68KState *env = cpu->env_ptr;
6167
6168    dc->env = env;
6169    dc->pc = dc->base.pc_first;
6170    dc->cc_op = CC_OP_DYNAMIC;
6171    dc->cc_op_synced = 1;
6172    dc->done_mac = 0;
6173    dc->writeback_mask = 0;
6174    init_release_array(dc);
6175}
6176
6177static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6178{
6179}
6180
6181static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6182{
6183    DisasContext *dc = container_of(dcbase, DisasContext, base);
6184    tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6185}
6186
6187static bool m68k_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
6188                                     const CPUBreakpoint *bp)
6189{
6190    DisasContext *dc = container_of(dcbase, DisasContext, base);
6191
6192    gen_exception(dc, dc->base.pc_next, EXCP_DEBUG);
6193    /*
6194     * The address covered by the breakpoint must be included in
6195     * [tb->pc, tb->pc + tb->size) in order to for it to be
6196     * properly cleared -- thus we increment the PC here so that
6197     * the logic setting tb->size below does the right thing.
6198     */
6199    dc->base.pc_next += 2;
6200
6201    return true;
6202}
6203
6204static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6205{
6206    DisasContext *dc = container_of(dcbase, DisasContext, base);
6207    CPUM68KState *env = cpu->env_ptr;
6208    uint16_t insn = read_im16(env, dc);
6209
6210    opcode_table[insn](env, dc, insn);
6211    do_writebacks(dc);
6212    do_release(dc);
6213
6214    dc->base.pc_next = dc->pc;
6215
6216    if (dc->base.is_jmp == DISAS_NEXT) {
6217        /*
6218         * Stop translation when the next insn might touch a new page.
6219         * This ensures that prefetch aborts at the right place.
6220         *
6221         * We cannot determine the size of the next insn without
6222         * completely decoding it.  However, the maximum insn size
6223         * is 32 bytes, so end if we do not have that much remaining.
6224         * This may produce several small TBs at the end of each page,
6225         * but they will all be linked with goto_tb.
6226         *
6227         * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6228         * smaller than MC68020's.
6229         */
6230        target_ulong start_page_offset
6231            = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6232
6233        if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6234            dc->base.is_jmp = DISAS_TOO_MANY;
6235        }
6236    }
6237}
6238
6239static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6240{
6241    DisasContext *dc = container_of(dcbase, DisasContext, base);
6242
6243    switch (dc->base.is_jmp) {
6244    case DISAS_NORETURN:
6245        break;
6246    case DISAS_TOO_MANY:
6247        update_cc_op(dc);
6248        if (dc->base.singlestep_enabled) {
6249            tcg_gen_movi_i32(QREG_PC, dc->pc);
6250            gen_raise_exception(EXCP_DEBUG);
6251        } else {
6252            gen_jmp_tb(dc, 0, dc->pc);
6253        }
6254        break;
6255    case DISAS_JUMP:
6256        /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6257        if (dc->base.singlestep_enabled) {
6258            gen_raise_exception(EXCP_DEBUG);
6259        } else {
6260            tcg_gen_lookup_and_goto_ptr();
6261        }
6262        break;
6263    case DISAS_EXIT:
6264        /*
6265         * We updated CC_OP and PC in gen_exit_tb, but also modified
6266         * other state that may require returning to the main loop.
6267         */
6268        if (dc->base.singlestep_enabled) {
6269            gen_raise_exception(EXCP_DEBUG);
6270        } else {
6271            tcg_gen_exit_tb(NULL, 0);
6272        }
6273        break;
6274    default:
6275        g_assert_not_reached();
6276    }
6277}
6278
6279static void m68k_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
6280{
6281    qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
6282    log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
6283}
6284
6285static const TranslatorOps m68k_tr_ops = {
6286    .init_disas_context = m68k_tr_init_disas_context,
6287    .tb_start           = m68k_tr_tb_start,
6288    .insn_start         = m68k_tr_insn_start,
6289    .breakpoint_check   = m68k_tr_breakpoint_check,
6290    .translate_insn     = m68k_tr_translate_insn,
6291    .tb_stop            = m68k_tr_tb_stop,
6292    .disas_log          = m68k_tr_disas_log,
6293};
6294
6295void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
6296{
6297    DisasContext dc;
6298    translator_loop(&m68k_tr_ops, &dc.base, cpu, tb, max_insns);
6299}
6300
6301static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6302{
6303    floatx80 a = { .high = high, .low = low };
6304    union {
6305        float64 f64;
6306        double d;
6307    } u;
6308
6309    u.f64 = floatx80_to_float64(a, &env->fp_status);
6310    return u.d;
6311}
6312
6313void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6314{
6315    M68kCPU *cpu = M68K_CPU(cs);
6316    CPUM68KState *env = &cpu->env;
6317    int i;
6318    uint16_t sr;
6319    for (i = 0; i < 8; i++) {
6320        qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6321                     "F%d = %04x %016"PRIx64"  (%12g)\n",
6322                     i, env->dregs[i], i, env->aregs[i],
6323                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6324                     floatx80_to_double(env, env->fregs[i].l.upper,
6325                                        env->fregs[i].l.lower));
6326    }
6327    qemu_fprintf(f, "PC = %08x   ", env->pc);
6328    sr = env->sr | cpu_m68k_get_ccr(env);
6329    qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6330                 sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6331                 (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6332                 (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6333                 (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6334                 (sr & CCF_C) ? 'C' : '-');
6335    qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6336                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6337                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6338                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6339                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6340    qemu_fprintf(f, "\n                                "
6341                 "FPCR =     %04x ", env->fpcr);
6342    switch (env->fpcr & FPCR_PREC_MASK) {
6343    case FPCR_PREC_X:
6344        qemu_fprintf(f, "X ");
6345        break;
6346    case FPCR_PREC_S:
6347        qemu_fprintf(f, "S ");
6348        break;
6349    case FPCR_PREC_D:
6350        qemu_fprintf(f, "D ");
6351        break;
6352    }
6353    switch (env->fpcr & FPCR_RND_MASK) {
6354    case FPCR_RND_N:
6355        qemu_fprintf(f, "RN ");
6356        break;
6357    case FPCR_RND_Z:
6358        qemu_fprintf(f, "RZ ");
6359        break;
6360    case FPCR_RND_M:
6361        qemu_fprintf(f, "RM ");
6362        break;
6363    case FPCR_RND_P:
6364        qemu_fprintf(f, "RP ");
6365        break;
6366    }
6367    qemu_fprintf(f, "\n");
6368#ifdef CONFIG_SOFTMMU
6369    qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6370                 env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6371                 env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6372                 env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6373    qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6374    qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6375    qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6376                 env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6377    qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6378                 env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6379                 env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6380    qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6381                 env->mmu.mmusr, env->mmu.ar);
6382#endif
6383}
6384
6385void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6386                          target_ulong *data)
6387{
6388    int cc_op = data[1];
6389    env->pc = data[0];
6390    if (cc_op != CC_OP_DYNAMIC) {
6391        env->cc_op = cc_op;
6392    }
6393}
6394