qemu/target/m68k/translate.c
<<
>>
Prefs
   1/*
   2 *  m68k translation
   3 *
   4 *  Copyright (c) 2005-2007 CodeSourcery
   5 *  Written by Paul Brook
   6 *
   7 * This library is free software; you can redistribute it and/or
   8 * modify it under the terms of the GNU Lesser General Public
   9 * License as published by the Free Software Foundation; either
  10 * version 2.1 of the License, or (at your option) any later version.
  11 *
  12 * This library is distributed in the hope that it will be useful,
  13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15 * Lesser General Public License for more details.
  16 *
  17 * You should have received a copy of the GNU Lesser General Public
  18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19 */
  20
  21#include "qemu/osdep.h"
  22#include "cpu.h"
  23#include "disas/disas.h"
  24#include "exec/exec-all.h"
  25#include "tcg/tcg-op.h"
  26#include "qemu/log.h"
  27#include "qemu/qemu-print.h"
  28#include "exec/cpu_ldst.h"
  29#include "exec/translator.h"
  30
  31#include "exec/helper-proto.h"
  32#include "exec/helper-gen.h"
  33
  34#include "exec/log.h"
  35#include "fpu/softfloat.h"
  36
  37
  38//#define DEBUG_DISPATCH 1
  39
  40#define DEFO32(name, offset) static TCGv QREG_##name;
  41#define DEFO64(name, offset) static TCGv_i64 QREG_##name;
  42#include "qregs.def"
  43#undef DEFO32
  44#undef DEFO64
  45
  46static TCGv_i32 cpu_halted;
  47static TCGv_i32 cpu_exception_index;
  48
  49static char cpu_reg_names[2 * 8 * 3 + 5 * 4];
  50static TCGv cpu_dregs[8];
  51static TCGv cpu_aregs[8];
  52static TCGv_i64 cpu_macc[4];
  53
  54#define REG(insn, pos)  (((insn) >> (pos)) & 7)
  55#define DREG(insn, pos) cpu_dregs[REG(insn, pos)]
  56#define AREG(insn, pos) get_areg(s, REG(insn, pos))
  57#define MACREG(acc)     cpu_macc[acc]
  58#define QREG_SP         get_areg(s, 7)
  59
  60static TCGv NULL_QREG;
  61#define IS_NULL_QREG(t) (t == NULL_QREG)
  62/* Used to distinguish stores from bad addressing modes.  */
  63static TCGv store_dummy;
  64
  65#include "exec/gen-icount.h"
  66
  67void m68k_tcg_init(void)
  68{
  69    char *p;
  70    int i;
  71
  72#define DEFO32(name, offset) \
  73    QREG_##name = tcg_global_mem_new_i32(cpu_env, \
  74        offsetof(CPUM68KState, offset), #name);
  75#define DEFO64(name, offset) \
  76    QREG_##name = tcg_global_mem_new_i64(cpu_env, \
  77        offsetof(CPUM68KState, offset), #name);
  78#include "qregs.def"
  79#undef DEFO32
  80#undef DEFO64
  81
  82    cpu_halted = tcg_global_mem_new_i32(cpu_env,
  83                                        -offsetof(M68kCPU, env) +
  84                                        offsetof(CPUState, halted), "HALTED");
  85    cpu_exception_index = tcg_global_mem_new_i32(cpu_env,
  86                                                 -offsetof(M68kCPU, env) +
  87                                                 offsetof(CPUState, exception_index),
  88                                                 "EXCEPTION");
  89
  90    p = cpu_reg_names;
  91    for (i = 0; i < 8; i++) {
  92        sprintf(p, "D%d", i);
  93        cpu_dregs[i] = tcg_global_mem_new(cpu_env,
  94                                          offsetof(CPUM68KState, dregs[i]), p);
  95        p += 3;
  96        sprintf(p, "A%d", i);
  97        cpu_aregs[i] = tcg_global_mem_new(cpu_env,
  98                                          offsetof(CPUM68KState, aregs[i]), p);
  99        p += 3;
 100    }
 101    for (i = 0; i < 4; i++) {
 102        sprintf(p, "ACC%d", i);
 103        cpu_macc[i] = tcg_global_mem_new_i64(cpu_env,
 104                                         offsetof(CPUM68KState, macc[i]), p);
 105        p += 5;
 106    }
 107
 108    NULL_QREG = tcg_global_mem_new(cpu_env, -4, "NULL");
 109    store_dummy = tcg_global_mem_new(cpu_env, -8, "NULL");
 110}
 111
 112/* internal defines */
 113typedef struct DisasContext {
 114    DisasContextBase base;
 115    CPUM68KState *env;
 116    target_ulong pc;
 117    target_ulong pc_prev;
 118    CCOp cc_op; /* Current CC operation */
 119    int cc_op_synced;
 120    TCGv_i64 mactmp;
 121    int done_mac;
 122    int writeback_mask;
 123    TCGv writeback[8];
 124#define MAX_TO_RELEASE 8
 125    int release_count;
 126    TCGv release[MAX_TO_RELEASE];
 127    bool ss_active;
 128} DisasContext;
 129
 130static void init_release_array(DisasContext *s)
 131{
 132#ifdef CONFIG_DEBUG_TCG
 133    memset(s->release, 0, sizeof(s->release));
 134#endif
 135    s->release_count = 0;
 136}
 137
 138static void do_release(DisasContext *s)
 139{
 140    int i;
 141    for (i = 0; i < s->release_count; i++) {
 142        tcg_temp_free(s->release[i]);
 143    }
 144    init_release_array(s);
 145}
 146
 147static TCGv mark_to_release(DisasContext *s, TCGv tmp)
 148{
 149    g_assert(s->release_count < MAX_TO_RELEASE);
 150    return s->release[s->release_count++] = tmp;
 151}
 152
 153static TCGv get_areg(DisasContext *s, unsigned regno)
 154{
 155    if (s->writeback_mask & (1 << regno)) {
 156        return s->writeback[regno];
 157    } else {
 158        return cpu_aregs[regno];
 159    }
 160}
 161
 162static void delay_set_areg(DisasContext *s, unsigned regno,
 163                           TCGv val, bool give_temp)
 164{
 165    if (s->writeback_mask & (1 << regno)) {
 166        if (give_temp) {
 167            tcg_temp_free(s->writeback[regno]);
 168            s->writeback[regno] = val;
 169        } else {
 170            tcg_gen_mov_i32(s->writeback[regno], val);
 171        }
 172    } else {
 173        s->writeback_mask |= 1 << regno;
 174        if (give_temp) {
 175            s->writeback[regno] = val;
 176        } else {
 177            TCGv tmp = tcg_temp_new();
 178            s->writeback[regno] = tmp;
 179            tcg_gen_mov_i32(tmp, val);
 180        }
 181    }
 182}
 183
 184static void do_writebacks(DisasContext *s)
 185{
 186    unsigned mask = s->writeback_mask;
 187    if (mask) {
 188        s->writeback_mask = 0;
 189        do {
 190            unsigned regno = ctz32(mask);
 191            tcg_gen_mov_i32(cpu_aregs[regno], s->writeback[regno]);
 192            tcg_temp_free(s->writeback[regno]);
 193            mask &= mask - 1;
 194        } while (mask);
 195    }
 196}
 197
 198/* is_jmp field values */
 199#define DISAS_JUMP      DISAS_TARGET_0 /* only pc was modified dynamically */
 200#define DISAS_EXIT      DISAS_TARGET_1 /* cpu state was modified dynamically */
 201
 202#if defined(CONFIG_USER_ONLY)
 203#define IS_USER(s) 1
 204#else
 205#define IS_USER(s)   (!(s->base.tb->flags & TB_FLAGS_MSR_S))
 206#define SFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_SFC_S) ? \
 207                      MMU_KERNEL_IDX : MMU_USER_IDX)
 208#define DFC_INDEX(s) ((s->base.tb->flags & TB_FLAGS_DFC_S) ? \
 209                      MMU_KERNEL_IDX : MMU_USER_IDX)
 210#endif
 211
 212typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
 213
 214#ifdef DEBUG_DISPATCH
 215#define DISAS_INSN(name)                                                \
 216    static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
 217                                  uint16_t insn);                       \
 218    static void disas_##name(CPUM68KState *env, DisasContext *s,        \
 219                             uint16_t insn)                             \
 220    {                                                                   \
 221        qemu_log("Dispatch " #name "\n");                               \
 222        real_disas_##name(env, s, insn);                                \
 223    }                                                                   \
 224    static void real_disas_##name(CPUM68KState *env, DisasContext *s,   \
 225                                  uint16_t insn)
 226#else
 227#define DISAS_INSN(name)                                                \
 228    static void disas_##name(CPUM68KState *env, DisasContext *s,        \
 229                             uint16_t insn)
 230#endif
 231
 232static const uint8_t cc_op_live[CC_OP_NB] = {
 233    [CC_OP_DYNAMIC] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
 234    [CC_OP_FLAGS] = CCF_C | CCF_V | CCF_Z | CCF_N | CCF_X,
 235    [CC_OP_ADDB ... CC_OP_ADDL] = CCF_X | CCF_N | CCF_V,
 236    [CC_OP_SUBB ... CC_OP_SUBL] = CCF_X | CCF_N | CCF_V,
 237    [CC_OP_CMPB ... CC_OP_CMPL] = CCF_X | CCF_N | CCF_V,
 238    [CC_OP_LOGIC] = CCF_X | CCF_N
 239};
 240
 241static void set_cc_op(DisasContext *s, CCOp op)
 242{
 243    CCOp old_op = s->cc_op;
 244    int dead;
 245
 246    if (old_op == op) {
 247        return;
 248    }
 249    s->cc_op = op;
 250    s->cc_op_synced = 0;
 251
 252    /*
 253     * Discard CC computation that will no longer be used.
 254     * Note that X and N are never dead.
 255     */
 256    dead = cc_op_live[old_op] & ~cc_op_live[op];
 257    if (dead & CCF_C) {
 258        tcg_gen_discard_i32(QREG_CC_C);
 259    }
 260    if (dead & CCF_Z) {
 261        tcg_gen_discard_i32(QREG_CC_Z);
 262    }
 263    if (dead & CCF_V) {
 264        tcg_gen_discard_i32(QREG_CC_V);
 265    }
 266}
 267
 268/* Update the CPU env CC_OP state.  */
 269static void update_cc_op(DisasContext *s)
 270{
 271    if (!s->cc_op_synced) {
 272        s->cc_op_synced = 1;
 273        tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
 274    }
 275}
 276
 277/* Generate a jump to an immediate address.  */
 278static void gen_jmp_im(DisasContext *s, uint32_t dest)
 279{
 280    update_cc_op(s);
 281    tcg_gen_movi_i32(QREG_PC, dest);
 282    s->base.is_jmp = DISAS_JUMP;
 283}
 284
 285/* Generate a jump to the address in qreg DEST.  */
 286static void gen_jmp(DisasContext *s, TCGv dest)
 287{
 288    update_cc_op(s);
 289    tcg_gen_mov_i32(QREG_PC, dest);
 290    s->base.is_jmp = DISAS_JUMP;
 291}
 292
 293static void gen_raise_exception(int nr)
 294{
 295    TCGv_i32 tmp;
 296
 297    tmp = tcg_const_i32(nr);
 298    gen_helper_raise_exception(cpu_env, tmp);
 299    tcg_temp_free_i32(tmp);
 300}
 301
 302static void gen_raise_exception_format2(DisasContext *s, int nr,
 303                                        target_ulong this_pc)
 304{
 305    /*
 306     * Pass the address of the insn to the exception handler,
 307     * for recording in the Format $2 (6-word) stack frame.
 308     * Re-use mmu.ar for the purpose, since that's only valid
 309     * after tlb_fill.
 310     */
 311    tcg_gen_st_i32(tcg_constant_i32(this_pc), cpu_env,
 312                   offsetof(CPUM68KState, mmu.ar));
 313    gen_raise_exception(nr);
 314    s->base.is_jmp = DISAS_NORETURN;
 315}
 316
 317static void gen_exception(DisasContext *s, uint32_t dest, int nr)
 318{
 319    update_cc_op(s);
 320    tcg_gen_movi_i32(QREG_PC, dest);
 321
 322    gen_raise_exception(nr);
 323
 324    s->base.is_jmp = DISAS_NORETURN;
 325}
 326
 327static inline void gen_addr_fault(DisasContext *s)
 328{
 329    gen_exception(s, s->base.pc_next, EXCP_ADDRESS);
 330}
 331
 332/*
 333 * Generate a load from the specified address.  Narrow values are
 334 *  sign extended to full register width.
 335 */
 336static inline TCGv gen_load(DisasContext *s, int opsize, TCGv addr,
 337                            int sign, int index)
 338{
 339    TCGv tmp;
 340    tmp = tcg_temp_new_i32();
 341    switch(opsize) {
 342    case OS_BYTE:
 343        if (sign)
 344            tcg_gen_qemu_ld8s(tmp, addr, index);
 345        else
 346            tcg_gen_qemu_ld8u(tmp, addr, index);
 347        break;
 348    case OS_WORD:
 349        if (sign)
 350            tcg_gen_qemu_ld16s(tmp, addr, index);
 351        else
 352            tcg_gen_qemu_ld16u(tmp, addr, index);
 353        break;
 354    case OS_LONG:
 355        tcg_gen_qemu_ld32u(tmp, addr, index);
 356        break;
 357    default:
 358        g_assert_not_reached();
 359    }
 360    return tmp;
 361}
 362
 363/* Generate a store.  */
 364static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val,
 365                             int index)
 366{
 367    switch(opsize) {
 368    case OS_BYTE:
 369        tcg_gen_qemu_st8(val, addr, index);
 370        break;
 371    case OS_WORD:
 372        tcg_gen_qemu_st16(val, addr, index);
 373        break;
 374    case OS_LONG:
 375        tcg_gen_qemu_st32(val, addr, index);
 376        break;
 377    default:
 378        g_assert_not_reached();
 379    }
 380}
 381
 382typedef enum {
 383    EA_STORE,
 384    EA_LOADU,
 385    EA_LOADS
 386} ea_what;
 387
 388/*
 389 * Generate an unsigned load if VAL is 0 a signed load if val is -1,
 390 * otherwise generate a store.
 391 */
 392static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
 393                     ea_what what, int index)
 394{
 395    if (what == EA_STORE) {
 396        gen_store(s, opsize, addr, val, index);
 397        return store_dummy;
 398    } else {
 399        return mark_to_release(s, gen_load(s, opsize, addr,
 400                                           what == EA_LOADS, index));
 401    }
 402}
 403
 404/* Read a 16-bit immediate constant */
 405static inline uint16_t read_im16(CPUM68KState *env, DisasContext *s)
 406{
 407    uint16_t im;
 408    im = translator_lduw(env, &s->base, s->pc);
 409    s->pc += 2;
 410    return im;
 411}
 412
 413/* Read an 8-bit immediate constant */
 414static inline uint8_t read_im8(CPUM68KState *env, DisasContext *s)
 415{
 416    return read_im16(env, s);
 417}
 418
 419/* Read a 32-bit immediate constant.  */
 420static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
 421{
 422    uint32_t im;
 423    im = read_im16(env, s) << 16;
 424    im |= 0xffff & read_im16(env, s);
 425    return im;
 426}
 427
 428/* Read a 64-bit immediate constant.  */
 429static inline uint64_t read_im64(CPUM68KState *env, DisasContext *s)
 430{
 431    uint64_t im;
 432    im = (uint64_t)read_im32(env, s) << 32;
 433    im |= (uint64_t)read_im32(env, s);
 434    return im;
 435}
 436
 437/* Calculate and address index.  */
 438static TCGv gen_addr_index(DisasContext *s, uint16_t ext, TCGv tmp)
 439{
 440    TCGv add;
 441    int scale;
 442
 443    add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
 444    if ((ext & 0x800) == 0) {
 445        tcg_gen_ext16s_i32(tmp, add);
 446        add = tmp;
 447    }
 448    scale = (ext >> 9) & 3;
 449    if (scale != 0) {
 450        tcg_gen_shli_i32(tmp, add, scale);
 451        add = tmp;
 452    }
 453    return add;
 454}
 455
 456/*
 457 * Handle a base + index + displacement effective address.
 458 * A NULL_QREG base means pc-relative.
 459 */
 460static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
 461{
 462    uint32_t offset;
 463    uint16_t ext;
 464    TCGv add;
 465    TCGv tmp;
 466    uint32_t bd, od;
 467
 468    offset = s->pc;
 469    ext = read_im16(env, s);
 470
 471    if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
 472        return NULL_QREG;
 473
 474    if (m68k_feature(s->env, M68K_FEATURE_M68000) &&
 475        !m68k_feature(s->env, M68K_FEATURE_SCALED_INDEX)) {
 476        ext &= ~(3 << 9);
 477    }
 478
 479    if (ext & 0x100) {
 480        /* full extension word format */
 481        if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
 482            return NULL_QREG;
 483
 484        if ((ext & 0x30) > 0x10) {
 485            /* base displacement */
 486            if ((ext & 0x30) == 0x20) {
 487                bd = (int16_t)read_im16(env, s);
 488            } else {
 489                bd = read_im32(env, s);
 490            }
 491        } else {
 492            bd = 0;
 493        }
 494        tmp = mark_to_release(s, tcg_temp_new());
 495        if ((ext & 0x44) == 0) {
 496            /* pre-index */
 497            add = gen_addr_index(s, ext, tmp);
 498        } else {
 499            add = NULL_QREG;
 500        }
 501        if ((ext & 0x80) == 0) {
 502            /* base not suppressed */
 503            if (IS_NULL_QREG(base)) {
 504                base = mark_to_release(s, tcg_const_i32(offset + bd));
 505                bd = 0;
 506            }
 507            if (!IS_NULL_QREG(add)) {
 508                tcg_gen_add_i32(tmp, add, base);
 509                add = tmp;
 510            } else {
 511                add = base;
 512            }
 513        }
 514        if (!IS_NULL_QREG(add)) {
 515            if (bd != 0) {
 516                tcg_gen_addi_i32(tmp, add, bd);
 517                add = tmp;
 518            }
 519        } else {
 520            add = mark_to_release(s, tcg_const_i32(bd));
 521        }
 522        if ((ext & 3) != 0) {
 523            /* memory indirect */
 524            base = mark_to_release(s, gen_load(s, OS_LONG, add, 0, IS_USER(s)));
 525            if ((ext & 0x44) == 4) {
 526                add = gen_addr_index(s, ext, tmp);
 527                tcg_gen_add_i32(tmp, add, base);
 528                add = tmp;
 529            } else {
 530                add = base;
 531            }
 532            if ((ext & 3) > 1) {
 533                /* outer displacement */
 534                if ((ext & 3) == 2) {
 535                    od = (int16_t)read_im16(env, s);
 536                } else {
 537                    od = read_im32(env, s);
 538                }
 539            } else {
 540                od = 0;
 541            }
 542            if (od != 0) {
 543                tcg_gen_addi_i32(tmp, add, od);
 544                add = tmp;
 545            }
 546        }
 547    } else {
 548        /* brief extension word format */
 549        tmp = mark_to_release(s, tcg_temp_new());
 550        add = gen_addr_index(s, ext, tmp);
 551        if (!IS_NULL_QREG(base)) {
 552            tcg_gen_add_i32(tmp, add, base);
 553            if ((int8_t)ext)
 554                tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
 555        } else {
 556            tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
 557        }
 558        add = tmp;
 559    }
 560    return add;
 561}
 562
 563/* Sign or zero extend a value.  */
 564
 565static inline void gen_ext(TCGv res, TCGv val, int opsize, int sign)
 566{
 567    switch (opsize) {
 568    case OS_BYTE:
 569        if (sign) {
 570            tcg_gen_ext8s_i32(res, val);
 571        } else {
 572            tcg_gen_ext8u_i32(res, val);
 573        }
 574        break;
 575    case OS_WORD:
 576        if (sign) {
 577            tcg_gen_ext16s_i32(res, val);
 578        } else {
 579            tcg_gen_ext16u_i32(res, val);
 580        }
 581        break;
 582    case OS_LONG:
 583        tcg_gen_mov_i32(res, val);
 584        break;
 585    default:
 586        g_assert_not_reached();
 587    }
 588}
 589
 590/* Evaluate all the CC flags.  */
 591
 592static void gen_flush_flags(DisasContext *s)
 593{
 594    TCGv t0, t1;
 595
 596    switch (s->cc_op) {
 597    case CC_OP_FLAGS:
 598        return;
 599
 600    case CC_OP_ADDB:
 601    case CC_OP_ADDW:
 602    case CC_OP_ADDL:
 603        tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
 604        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 605        /* Compute signed overflow for addition.  */
 606        t0 = tcg_temp_new();
 607        t1 = tcg_temp_new();
 608        tcg_gen_sub_i32(t0, QREG_CC_N, QREG_CC_V);
 609        gen_ext(t0, t0, s->cc_op - CC_OP_ADDB, 1);
 610        tcg_gen_xor_i32(t1, QREG_CC_N, QREG_CC_V);
 611        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
 612        tcg_temp_free(t0);
 613        tcg_gen_andc_i32(QREG_CC_V, t1, QREG_CC_V);
 614        tcg_temp_free(t1);
 615        break;
 616
 617    case CC_OP_SUBB:
 618    case CC_OP_SUBW:
 619    case CC_OP_SUBL:
 620        tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
 621        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 622        /* Compute signed overflow for subtraction.  */
 623        t0 = tcg_temp_new();
 624        t1 = tcg_temp_new();
 625        tcg_gen_add_i32(t0, QREG_CC_N, QREG_CC_V);
 626        gen_ext(t0, t0, s->cc_op - CC_OP_SUBB, 1);
 627        tcg_gen_xor_i32(t1, QREG_CC_N, t0);
 628        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, t0);
 629        tcg_temp_free(t0);
 630        tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t1);
 631        tcg_temp_free(t1);
 632        break;
 633
 634    case CC_OP_CMPB:
 635    case CC_OP_CMPW:
 636    case CC_OP_CMPL:
 637        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_C, QREG_CC_N, QREG_CC_V);
 638        tcg_gen_sub_i32(QREG_CC_Z, QREG_CC_N, QREG_CC_V);
 639        gen_ext(QREG_CC_Z, QREG_CC_Z, s->cc_op - CC_OP_CMPB, 1);
 640        /* Compute signed overflow for subtraction.  */
 641        t0 = tcg_temp_new();
 642        tcg_gen_xor_i32(t0, QREG_CC_Z, QREG_CC_N);
 643        tcg_gen_xor_i32(QREG_CC_V, QREG_CC_V, QREG_CC_N);
 644        tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, t0);
 645        tcg_temp_free(t0);
 646        tcg_gen_mov_i32(QREG_CC_N, QREG_CC_Z);
 647        break;
 648
 649    case CC_OP_LOGIC:
 650        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
 651        tcg_gen_movi_i32(QREG_CC_C, 0);
 652        tcg_gen_movi_i32(QREG_CC_V, 0);
 653        break;
 654
 655    case CC_OP_DYNAMIC:
 656        gen_helper_flush_flags(cpu_env, QREG_CC_OP);
 657        s->cc_op_synced = 1;
 658        break;
 659
 660    default:
 661        t0 = tcg_const_i32(s->cc_op);
 662        gen_helper_flush_flags(cpu_env, t0);
 663        tcg_temp_free(t0);
 664        s->cc_op_synced = 1;
 665        break;
 666    }
 667
 668    /* Note that flush_flags also assigned to env->cc_op.  */
 669    s->cc_op = CC_OP_FLAGS;
 670}
 671
 672static inline TCGv gen_extend(DisasContext *s, TCGv val, int opsize, int sign)
 673{
 674    TCGv tmp;
 675
 676    if (opsize == OS_LONG) {
 677        tmp = val;
 678    } else {
 679        tmp = mark_to_release(s, tcg_temp_new());
 680        gen_ext(tmp, val, opsize, sign);
 681    }
 682
 683    return tmp;
 684}
 685
 686static void gen_logic_cc(DisasContext *s, TCGv val, int opsize)
 687{
 688    gen_ext(QREG_CC_N, val, opsize, 1);
 689    set_cc_op(s, CC_OP_LOGIC);
 690}
 691
 692static void gen_update_cc_cmp(DisasContext *s, TCGv dest, TCGv src, int opsize)
 693{
 694    tcg_gen_mov_i32(QREG_CC_N, dest);
 695    tcg_gen_mov_i32(QREG_CC_V, src);
 696    set_cc_op(s, CC_OP_CMPB + opsize);
 697}
 698
 699static void gen_update_cc_add(TCGv dest, TCGv src, int opsize)
 700{
 701    gen_ext(QREG_CC_N, dest, opsize, 1);
 702    tcg_gen_mov_i32(QREG_CC_V, src);
 703}
 704
 705static inline int opsize_bytes(int opsize)
 706{
 707    switch (opsize) {
 708    case OS_BYTE: return 1;
 709    case OS_WORD: return 2;
 710    case OS_LONG: return 4;
 711    case OS_SINGLE: return 4;
 712    case OS_DOUBLE: return 8;
 713    case OS_EXTENDED: return 12;
 714    case OS_PACKED: return 12;
 715    default:
 716        g_assert_not_reached();
 717    }
 718}
 719
 720static inline int insn_opsize(int insn)
 721{
 722    switch ((insn >> 6) & 3) {
 723    case 0: return OS_BYTE;
 724    case 1: return OS_WORD;
 725    case 2: return OS_LONG;
 726    default:
 727        g_assert_not_reached();
 728    }
 729}
 730
 731static inline int ext_opsize(int ext, int pos)
 732{
 733    switch ((ext >> pos) & 7) {
 734    case 0: return OS_LONG;
 735    case 1: return OS_SINGLE;
 736    case 2: return OS_EXTENDED;
 737    case 3: return OS_PACKED;
 738    case 4: return OS_WORD;
 739    case 5: return OS_DOUBLE;
 740    case 6: return OS_BYTE;
 741    default:
 742        g_assert_not_reached();
 743    }
 744}
 745
 746/*
 747 * Assign value to a register.  If the width is less than the register width
 748 * only the low part of the register is set.
 749 */
 750static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
 751{
 752    TCGv tmp;
 753    switch (opsize) {
 754    case OS_BYTE:
 755        tcg_gen_andi_i32(reg, reg, 0xffffff00);
 756        tmp = tcg_temp_new();
 757        tcg_gen_ext8u_i32(tmp, val);
 758        tcg_gen_or_i32(reg, reg, tmp);
 759        tcg_temp_free(tmp);
 760        break;
 761    case OS_WORD:
 762        tcg_gen_andi_i32(reg, reg, 0xffff0000);
 763        tmp = tcg_temp_new();
 764        tcg_gen_ext16u_i32(tmp, val);
 765        tcg_gen_or_i32(reg, reg, tmp);
 766        tcg_temp_free(tmp);
 767        break;
 768    case OS_LONG:
 769    case OS_SINGLE:
 770        tcg_gen_mov_i32(reg, val);
 771        break;
 772    default:
 773        g_assert_not_reached();
 774    }
 775}
 776
 777/*
 778 * Generate code for an "effective address".  Does not adjust the base
 779 * register for autoincrement addressing modes.
 780 */
 781static TCGv gen_lea_mode(CPUM68KState *env, DisasContext *s,
 782                         int mode, int reg0, int opsize)
 783{
 784    TCGv reg;
 785    TCGv tmp;
 786    uint16_t ext;
 787    uint32_t offset;
 788
 789    switch (mode) {
 790    case 0: /* Data register direct.  */
 791    case 1: /* Address register direct.  */
 792        return NULL_QREG;
 793    case 3: /* Indirect postincrement.  */
 794        if (opsize == OS_UNSIZED) {
 795            return NULL_QREG;
 796        }
 797        /* fallthru */
 798    case 2: /* Indirect register */
 799        return get_areg(s, reg0);
 800    case 4: /* Indirect predecrememnt.  */
 801        if (opsize == OS_UNSIZED) {
 802            return NULL_QREG;
 803        }
 804        reg = get_areg(s, reg0);
 805        tmp = mark_to_release(s, tcg_temp_new());
 806        if (reg0 == 7 && opsize == OS_BYTE &&
 807            m68k_feature(s->env, M68K_FEATURE_M68000)) {
 808            tcg_gen_subi_i32(tmp, reg, 2);
 809        } else {
 810            tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
 811        }
 812        return tmp;
 813    case 5: /* Indirect displacement.  */
 814        reg = get_areg(s, reg0);
 815        tmp = mark_to_release(s, tcg_temp_new());
 816        ext = read_im16(env, s);
 817        tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
 818        return tmp;
 819    case 6: /* Indirect index + displacement.  */
 820        reg = get_areg(s, reg0);
 821        return gen_lea_indexed(env, s, reg);
 822    case 7: /* Other */
 823        switch (reg0) {
 824        case 0: /* Absolute short.  */
 825            offset = (int16_t)read_im16(env, s);
 826            return mark_to_release(s, tcg_const_i32(offset));
 827        case 1: /* Absolute long.  */
 828            offset = read_im32(env, s);
 829            return mark_to_release(s, tcg_const_i32(offset));
 830        case 2: /* pc displacement  */
 831            offset = s->pc;
 832            offset += (int16_t)read_im16(env, s);
 833            return mark_to_release(s, tcg_const_i32(offset));
 834        case 3: /* pc index+displacement.  */
 835            return gen_lea_indexed(env, s, NULL_QREG);
 836        case 4: /* Immediate.  */
 837        default:
 838            return NULL_QREG;
 839        }
 840    }
 841    /* Should never happen.  */
 842    return NULL_QREG;
 843}
 844
 845static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
 846                    int opsize)
 847{
 848    int mode = extract32(insn, 3, 3);
 849    int reg0 = REG(insn, 0);
 850    return gen_lea_mode(env, s, mode, reg0, opsize);
 851}
 852
 853/*
 854 * Generate code to load/store a value from/into an EA.  If WHAT > 0 this is
 855 * a write otherwise it is a read (0 == sign extend, -1 == zero extend).
 856 * ADDRP is non-null for readwrite operands.
 857 */
 858static TCGv gen_ea_mode(CPUM68KState *env, DisasContext *s, int mode, int reg0,
 859                        int opsize, TCGv val, TCGv *addrp, ea_what what,
 860                        int index)
 861{
 862    TCGv reg, tmp, result;
 863    int32_t offset;
 864
 865    switch (mode) {
 866    case 0: /* Data register direct.  */
 867        reg = cpu_dregs[reg0];
 868        if (what == EA_STORE) {
 869            gen_partset_reg(opsize, reg, val);
 870            return store_dummy;
 871        } else {
 872            return gen_extend(s, reg, opsize, what == EA_LOADS);
 873        }
 874    case 1: /* Address register direct.  */
 875        reg = get_areg(s, reg0);
 876        if (what == EA_STORE) {
 877            tcg_gen_mov_i32(reg, val);
 878            return store_dummy;
 879        } else {
 880            return gen_extend(s, reg, opsize, what == EA_LOADS);
 881        }
 882    case 2: /* Indirect register */
 883        reg = get_areg(s, reg0);
 884        return gen_ldst(s, opsize, reg, val, what, index);
 885    case 3: /* Indirect postincrement.  */
 886        reg = get_areg(s, reg0);
 887        result = gen_ldst(s, opsize, reg, val, what, index);
 888        if (what == EA_STORE || !addrp) {
 889            TCGv tmp = tcg_temp_new();
 890            if (reg0 == 7 && opsize == OS_BYTE &&
 891                m68k_feature(s->env, M68K_FEATURE_M68000)) {
 892                tcg_gen_addi_i32(tmp, reg, 2);
 893            } else {
 894                tcg_gen_addi_i32(tmp, reg, opsize_bytes(opsize));
 895            }
 896            delay_set_areg(s, reg0, tmp, true);
 897        }
 898        return result;
 899    case 4: /* Indirect predecrememnt.  */
 900        if (addrp && what == EA_STORE) {
 901            tmp = *addrp;
 902        } else {
 903            tmp = gen_lea_mode(env, s, mode, reg0, opsize);
 904            if (IS_NULL_QREG(tmp)) {
 905                return tmp;
 906            }
 907            if (addrp) {
 908                *addrp = tmp;
 909            }
 910        }
 911        result = gen_ldst(s, opsize, tmp, val, what, index);
 912        if (what == EA_STORE || !addrp) {
 913            delay_set_areg(s, reg0, tmp, false);
 914        }
 915        return result;
 916    case 5: /* Indirect displacement.  */
 917    case 6: /* Indirect index + displacement.  */
 918    do_indirect:
 919        if (addrp && what == EA_STORE) {
 920            tmp = *addrp;
 921        } else {
 922            tmp = gen_lea_mode(env, s, mode, reg0, opsize);
 923            if (IS_NULL_QREG(tmp)) {
 924                return tmp;
 925            }
 926            if (addrp) {
 927                *addrp = tmp;
 928            }
 929        }
 930        return gen_ldst(s, opsize, tmp, val, what, index);
 931    case 7: /* Other */
 932        switch (reg0) {
 933        case 0: /* Absolute short.  */
 934        case 1: /* Absolute long.  */
 935        case 2: /* pc displacement  */
 936        case 3: /* pc index+displacement.  */
 937            goto do_indirect;
 938        case 4: /* Immediate.  */
 939            /* Sign extend values for consistency.  */
 940            switch (opsize) {
 941            case OS_BYTE:
 942                if (what == EA_LOADS) {
 943                    offset = (int8_t)read_im8(env, s);
 944                } else {
 945                    offset = read_im8(env, s);
 946                }
 947                break;
 948            case OS_WORD:
 949                if (what == EA_LOADS) {
 950                    offset = (int16_t)read_im16(env, s);
 951                } else {
 952                    offset = read_im16(env, s);
 953                }
 954                break;
 955            case OS_LONG:
 956                offset = read_im32(env, s);
 957                break;
 958            default:
 959                g_assert_not_reached();
 960            }
 961            return mark_to_release(s, tcg_const_i32(offset));
 962        default:
 963            return NULL_QREG;
 964        }
 965    }
 966    /* Should never happen.  */
 967    return NULL_QREG;
 968}
 969
 970static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
 971                   int opsize, TCGv val, TCGv *addrp, ea_what what, int index)
 972{
 973    int mode = extract32(insn, 3, 3);
 974    int reg0 = REG(insn, 0);
 975    return gen_ea_mode(env, s, mode, reg0, opsize, val, addrp, what, index);
 976}
 977
 978static TCGv_ptr gen_fp_ptr(int freg)
 979{
 980    TCGv_ptr fp = tcg_temp_new_ptr();
 981    tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fregs[freg]));
 982    return fp;
 983}
 984
 985static TCGv_ptr gen_fp_result_ptr(void)
 986{
 987    TCGv_ptr fp = tcg_temp_new_ptr();
 988    tcg_gen_addi_ptr(fp, cpu_env, offsetof(CPUM68KState, fp_result));
 989    return fp;
 990}
 991
 992static void gen_fp_move(TCGv_ptr dest, TCGv_ptr src)
 993{
 994    TCGv t32;
 995    TCGv_i64 t64;
 996
 997    t32 = tcg_temp_new();
 998    tcg_gen_ld16u_i32(t32, src, offsetof(FPReg, l.upper));
 999    tcg_gen_st16_i32(t32, dest, offsetof(FPReg, l.upper));
1000    tcg_temp_free(t32);
1001
1002    t64 = tcg_temp_new_i64();
1003    tcg_gen_ld_i64(t64, src, offsetof(FPReg, l.lower));
1004    tcg_gen_st_i64(t64, dest, offsetof(FPReg, l.lower));
1005    tcg_temp_free_i64(t64);
1006}
1007
1008static void gen_load_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1009                        int index)
1010{
1011    TCGv tmp;
1012    TCGv_i64 t64;
1013
1014    t64 = tcg_temp_new_i64();
1015    tmp = tcg_temp_new();
1016    switch (opsize) {
1017    case OS_BYTE:
1018        tcg_gen_qemu_ld8s(tmp, addr, index);
1019        gen_helper_exts32(cpu_env, fp, tmp);
1020        break;
1021    case OS_WORD:
1022        tcg_gen_qemu_ld16s(tmp, addr, index);
1023        gen_helper_exts32(cpu_env, fp, tmp);
1024        break;
1025    case OS_LONG:
1026        tcg_gen_qemu_ld32u(tmp, addr, index);
1027        gen_helper_exts32(cpu_env, fp, tmp);
1028        break;
1029    case OS_SINGLE:
1030        tcg_gen_qemu_ld32u(tmp, addr, index);
1031        gen_helper_extf32(cpu_env, fp, tmp);
1032        break;
1033    case OS_DOUBLE:
1034        tcg_gen_qemu_ld64(t64, addr, index);
1035        gen_helper_extf64(cpu_env, fp, t64);
1036        break;
1037    case OS_EXTENDED:
1038        if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1039            gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1040            break;
1041        }
1042        tcg_gen_qemu_ld32u(tmp, addr, index);
1043        tcg_gen_shri_i32(tmp, tmp, 16);
1044        tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1045        tcg_gen_addi_i32(tmp, addr, 4);
1046        tcg_gen_qemu_ld64(t64, tmp, index);
1047        tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1048        break;
1049    case OS_PACKED:
1050        /*
1051         * unimplemented data type on 68040/ColdFire
1052         * FIXME if needed for another FPU
1053         */
1054        gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1055        break;
1056    default:
1057        g_assert_not_reached();
1058    }
1059    tcg_temp_free(tmp);
1060    tcg_temp_free_i64(t64);
1061}
1062
1063static void gen_store_fp(DisasContext *s, int opsize, TCGv addr, TCGv_ptr fp,
1064                         int index)
1065{
1066    TCGv tmp;
1067    TCGv_i64 t64;
1068
1069    t64 = tcg_temp_new_i64();
1070    tmp = tcg_temp_new();
1071    switch (opsize) {
1072    case OS_BYTE:
1073        gen_helper_reds32(tmp, cpu_env, fp);
1074        tcg_gen_qemu_st8(tmp, addr, index);
1075        break;
1076    case OS_WORD:
1077        gen_helper_reds32(tmp, cpu_env, fp);
1078        tcg_gen_qemu_st16(tmp, addr, index);
1079        break;
1080    case OS_LONG:
1081        gen_helper_reds32(tmp, cpu_env, fp);
1082        tcg_gen_qemu_st32(tmp, addr, index);
1083        break;
1084    case OS_SINGLE:
1085        gen_helper_redf32(tmp, cpu_env, fp);
1086        tcg_gen_qemu_st32(tmp, addr, index);
1087        break;
1088    case OS_DOUBLE:
1089        gen_helper_redf64(t64, cpu_env, fp);
1090        tcg_gen_qemu_st64(t64, addr, index);
1091        break;
1092    case OS_EXTENDED:
1093        if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1094            gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1095            break;
1096        }
1097        tcg_gen_ld16u_i32(tmp, fp, offsetof(FPReg, l.upper));
1098        tcg_gen_shli_i32(tmp, tmp, 16);
1099        tcg_gen_qemu_st32(tmp, addr, index);
1100        tcg_gen_addi_i32(tmp, addr, 4);
1101        tcg_gen_ld_i64(t64, fp, offsetof(FPReg, l.lower));
1102        tcg_gen_qemu_st64(t64, tmp, index);
1103        break;
1104    case OS_PACKED:
1105        /*
1106         * unimplemented data type on 68040/ColdFire
1107         * FIXME if needed for another FPU
1108         */
1109        gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1110        break;
1111    default:
1112        g_assert_not_reached();
1113    }
1114    tcg_temp_free(tmp);
1115    tcg_temp_free_i64(t64);
1116}
1117
1118static void gen_ldst_fp(DisasContext *s, int opsize, TCGv addr,
1119                        TCGv_ptr fp, ea_what what, int index)
1120{
1121    if (what == EA_STORE) {
1122        gen_store_fp(s, opsize, addr, fp, index);
1123    } else {
1124        gen_load_fp(s, opsize, addr, fp, index);
1125    }
1126}
1127
1128static int gen_ea_mode_fp(CPUM68KState *env, DisasContext *s, int mode,
1129                          int reg0, int opsize, TCGv_ptr fp, ea_what what,
1130                          int index)
1131{
1132    TCGv reg, addr, tmp;
1133    TCGv_i64 t64;
1134
1135    switch (mode) {
1136    case 0: /* Data register direct.  */
1137        reg = cpu_dregs[reg0];
1138        if (what == EA_STORE) {
1139            switch (opsize) {
1140            case OS_BYTE:
1141            case OS_WORD:
1142            case OS_LONG:
1143                gen_helper_reds32(reg, cpu_env, fp);
1144                break;
1145            case OS_SINGLE:
1146                gen_helper_redf32(reg, cpu_env, fp);
1147                break;
1148            default:
1149                g_assert_not_reached();
1150            }
1151        } else {
1152            tmp = tcg_temp_new();
1153            switch (opsize) {
1154            case OS_BYTE:
1155                tcg_gen_ext8s_i32(tmp, reg);
1156                gen_helper_exts32(cpu_env, fp, tmp);
1157                break;
1158            case OS_WORD:
1159                tcg_gen_ext16s_i32(tmp, reg);
1160                gen_helper_exts32(cpu_env, fp, tmp);
1161                break;
1162            case OS_LONG:
1163                gen_helper_exts32(cpu_env, fp, reg);
1164                break;
1165            case OS_SINGLE:
1166                gen_helper_extf32(cpu_env, fp, reg);
1167                break;
1168            default:
1169                g_assert_not_reached();
1170            }
1171            tcg_temp_free(tmp);
1172        }
1173        return 0;
1174    case 1: /* Address register direct.  */
1175        return -1;
1176    case 2: /* Indirect register */
1177        addr = get_areg(s, reg0);
1178        gen_ldst_fp(s, opsize, addr, fp, what, index);
1179        return 0;
1180    case 3: /* Indirect postincrement.  */
1181        addr = cpu_aregs[reg0];
1182        gen_ldst_fp(s, opsize, addr, fp, what, index);
1183        tcg_gen_addi_i32(addr, addr, opsize_bytes(opsize));
1184        return 0;
1185    case 4: /* Indirect predecrememnt.  */
1186        addr = gen_lea_mode(env, s, mode, reg0, opsize);
1187        if (IS_NULL_QREG(addr)) {
1188            return -1;
1189        }
1190        gen_ldst_fp(s, opsize, addr, fp, what, index);
1191        tcg_gen_mov_i32(cpu_aregs[reg0], addr);
1192        return 0;
1193    case 5: /* Indirect displacement.  */
1194    case 6: /* Indirect index + displacement.  */
1195    do_indirect:
1196        addr = gen_lea_mode(env, s, mode, reg0, opsize);
1197        if (IS_NULL_QREG(addr)) {
1198            return -1;
1199        }
1200        gen_ldst_fp(s, opsize, addr, fp, what, index);
1201        return 0;
1202    case 7: /* Other */
1203        switch (reg0) {
1204        case 0: /* Absolute short.  */
1205        case 1: /* Absolute long.  */
1206        case 2: /* pc displacement  */
1207        case 3: /* pc index+displacement.  */
1208            goto do_indirect;
1209        case 4: /* Immediate.  */
1210            if (what == EA_STORE) {
1211                return -1;
1212            }
1213            switch (opsize) {
1214            case OS_BYTE:
1215                tmp = tcg_const_i32((int8_t)read_im8(env, s));
1216                gen_helper_exts32(cpu_env, fp, tmp);
1217                tcg_temp_free(tmp);
1218                break;
1219            case OS_WORD:
1220                tmp = tcg_const_i32((int16_t)read_im16(env, s));
1221                gen_helper_exts32(cpu_env, fp, tmp);
1222                tcg_temp_free(tmp);
1223                break;
1224            case OS_LONG:
1225                tmp = tcg_const_i32(read_im32(env, s));
1226                gen_helper_exts32(cpu_env, fp, tmp);
1227                tcg_temp_free(tmp);
1228                break;
1229            case OS_SINGLE:
1230                tmp = tcg_const_i32(read_im32(env, s));
1231                gen_helper_extf32(cpu_env, fp, tmp);
1232                tcg_temp_free(tmp);
1233                break;
1234            case OS_DOUBLE:
1235                t64 = tcg_const_i64(read_im64(env, s));
1236                gen_helper_extf64(cpu_env, fp, t64);
1237                tcg_temp_free_i64(t64);
1238                break;
1239            case OS_EXTENDED:
1240                if (m68k_feature(s->env, M68K_FEATURE_CF_FPU)) {
1241                    gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1242                    break;
1243                }
1244                tmp = tcg_const_i32(read_im32(env, s) >> 16);
1245                tcg_gen_st16_i32(tmp, fp, offsetof(FPReg, l.upper));
1246                tcg_temp_free(tmp);
1247                t64 = tcg_const_i64(read_im64(env, s));
1248                tcg_gen_st_i64(t64, fp, offsetof(FPReg, l.lower));
1249                tcg_temp_free_i64(t64);
1250                break;
1251            case OS_PACKED:
1252                /*
1253                 * unimplemented data type on 68040/ColdFire
1254                 * FIXME if needed for another FPU
1255                 */
1256                gen_exception(s, s->base.pc_next, EXCP_FP_UNIMP);
1257                break;
1258            default:
1259                g_assert_not_reached();
1260            }
1261            return 0;
1262        default:
1263            return -1;
1264        }
1265    }
1266    return -1;
1267}
1268
1269static int gen_ea_fp(CPUM68KState *env, DisasContext *s, uint16_t insn,
1270                       int opsize, TCGv_ptr fp, ea_what what, int index)
1271{
1272    int mode = extract32(insn, 3, 3);
1273    int reg0 = REG(insn, 0);
1274    return gen_ea_mode_fp(env, s, mode, reg0, opsize, fp, what, index);
1275}
1276
1277typedef struct {
1278    TCGCond tcond;
1279    bool g1;
1280    bool g2;
1281    TCGv v1;
1282    TCGv v2;
1283} DisasCompare;
1284
1285static void gen_cc_cond(DisasCompare *c, DisasContext *s, int cond)
1286{
1287    TCGv tmp, tmp2;
1288    TCGCond tcond;
1289    CCOp op = s->cc_op;
1290
1291    /* The CC_OP_CMP form can handle most normal comparisons directly.  */
1292    if (op == CC_OP_CMPB || op == CC_OP_CMPW || op == CC_OP_CMPL) {
1293        c->g1 = c->g2 = 1;
1294        c->v1 = QREG_CC_N;
1295        c->v2 = QREG_CC_V;
1296        switch (cond) {
1297        case 2: /* HI */
1298        case 3: /* LS */
1299            tcond = TCG_COND_LEU;
1300            goto done;
1301        case 4: /* CC */
1302        case 5: /* CS */
1303            tcond = TCG_COND_LTU;
1304            goto done;
1305        case 6: /* NE */
1306        case 7: /* EQ */
1307            tcond = TCG_COND_EQ;
1308            goto done;
1309        case 10: /* PL */
1310        case 11: /* MI */
1311            c->g1 = c->g2 = 0;
1312            c->v2 = tcg_const_i32(0);
1313            c->v1 = tmp = tcg_temp_new();
1314            tcg_gen_sub_i32(tmp, QREG_CC_N, QREG_CC_V);
1315            gen_ext(tmp, tmp, op - CC_OP_CMPB, 1);
1316            /* fallthru */
1317        case 12: /* GE */
1318        case 13: /* LT */
1319            tcond = TCG_COND_LT;
1320            goto done;
1321        case 14: /* GT */
1322        case 15: /* LE */
1323            tcond = TCG_COND_LE;
1324            goto done;
1325        }
1326    }
1327
1328    c->g1 = 1;
1329    c->g2 = 0;
1330    c->v2 = tcg_const_i32(0);
1331
1332    switch (cond) {
1333    case 0: /* T */
1334    case 1: /* F */
1335        c->v1 = c->v2;
1336        tcond = TCG_COND_NEVER;
1337        goto done;
1338    case 14: /* GT (!(Z || (N ^ V))) */
1339    case 15: /* LE (Z || (N ^ V)) */
1340        /*
1341         * Logic operations clear V, which simplifies LE to (Z || N),
1342         * and since Z and N are co-located, this becomes a normal
1343         * comparison vs N.
1344         */
1345        if (op == CC_OP_LOGIC) {
1346            c->v1 = QREG_CC_N;
1347            tcond = TCG_COND_LE;
1348            goto done;
1349        }
1350        break;
1351    case 12: /* GE (!(N ^ V)) */
1352    case 13: /* LT (N ^ V) */
1353        /* Logic operations clear V, which simplifies this to N.  */
1354        if (op != CC_OP_LOGIC) {
1355            break;
1356        }
1357        /* fallthru */
1358    case 10: /* PL (!N) */
1359    case 11: /* MI (N) */
1360        /* Several cases represent N normally.  */
1361        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1362            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1363            op == CC_OP_LOGIC) {
1364            c->v1 = QREG_CC_N;
1365            tcond = TCG_COND_LT;
1366            goto done;
1367        }
1368        break;
1369    case 6: /* NE (!Z) */
1370    case 7: /* EQ (Z) */
1371        /* Some cases fold Z into N.  */
1372        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1373            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL ||
1374            op == CC_OP_LOGIC) {
1375            tcond = TCG_COND_EQ;
1376            c->v1 = QREG_CC_N;
1377            goto done;
1378        }
1379        break;
1380    case 4: /* CC (!C) */
1381    case 5: /* CS (C) */
1382        /* Some cases fold C into X.  */
1383        if (op == CC_OP_ADDB || op == CC_OP_ADDW || op == CC_OP_ADDL ||
1384            op == CC_OP_SUBB || op == CC_OP_SUBW || op == CC_OP_SUBL) {
1385            tcond = TCG_COND_NE;
1386            c->v1 = QREG_CC_X;
1387            goto done;
1388        }
1389        /* fallthru */
1390    case 8: /* VC (!V) */
1391    case 9: /* VS (V) */
1392        /* Logic operations clear V and C.  */
1393        if (op == CC_OP_LOGIC) {
1394            tcond = TCG_COND_NEVER;
1395            c->v1 = c->v2;
1396            goto done;
1397        }
1398        break;
1399    }
1400
1401    /* Otherwise, flush flag state to CC_OP_FLAGS.  */
1402    gen_flush_flags(s);
1403
1404    switch (cond) {
1405    case 0: /* T */
1406    case 1: /* F */
1407    default:
1408        /* Invalid, or handled above.  */
1409        abort();
1410    case 2: /* HI (!C && !Z) -> !(C || Z)*/
1411    case 3: /* LS (C || Z) */
1412        c->v1 = tmp = tcg_temp_new();
1413        c->g1 = 0;
1414        tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1415        tcg_gen_or_i32(tmp, tmp, QREG_CC_C);
1416        tcond = TCG_COND_NE;
1417        break;
1418    case 4: /* CC (!C) */
1419    case 5: /* CS (C) */
1420        c->v1 = QREG_CC_C;
1421        tcond = TCG_COND_NE;
1422        break;
1423    case 6: /* NE (!Z) */
1424    case 7: /* EQ (Z) */
1425        c->v1 = QREG_CC_Z;
1426        tcond = TCG_COND_EQ;
1427        break;
1428    case 8: /* VC (!V) */
1429    case 9: /* VS (V) */
1430        c->v1 = QREG_CC_V;
1431        tcond = TCG_COND_LT;
1432        break;
1433    case 10: /* PL (!N) */
1434    case 11: /* MI (N) */
1435        c->v1 = QREG_CC_N;
1436        tcond = TCG_COND_LT;
1437        break;
1438    case 12: /* GE (!(N ^ V)) */
1439    case 13: /* LT (N ^ V) */
1440        c->v1 = tmp = tcg_temp_new();
1441        c->g1 = 0;
1442        tcg_gen_xor_i32(tmp, QREG_CC_N, QREG_CC_V);
1443        tcond = TCG_COND_LT;
1444        break;
1445    case 14: /* GT (!(Z || (N ^ V))) */
1446    case 15: /* LE (Z || (N ^ V)) */
1447        c->v1 = tmp = tcg_temp_new();
1448        c->g1 = 0;
1449        tcg_gen_setcond_i32(TCG_COND_EQ, tmp, QREG_CC_Z, c->v2);
1450        tcg_gen_neg_i32(tmp, tmp);
1451        tmp2 = tcg_temp_new();
1452        tcg_gen_xor_i32(tmp2, QREG_CC_N, QREG_CC_V);
1453        tcg_gen_or_i32(tmp, tmp, tmp2);
1454        tcg_temp_free(tmp2);
1455        tcond = TCG_COND_LT;
1456        break;
1457    }
1458
1459 done:
1460    if ((cond & 1) == 0) {
1461        tcond = tcg_invert_cond(tcond);
1462    }
1463    c->tcond = tcond;
1464}
1465
1466static void free_cond(DisasCompare *c)
1467{
1468    if (!c->g1) {
1469        tcg_temp_free(c->v1);
1470    }
1471    if (!c->g2) {
1472        tcg_temp_free(c->v2);
1473    }
1474}
1475
1476static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
1477{
1478  DisasCompare c;
1479
1480  gen_cc_cond(&c, s, cond);
1481  update_cc_op(s);
1482  tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
1483  free_cond(&c);
1484}
1485
1486/* Force a TB lookup after an instruction that changes the CPU state.  */
1487static void gen_exit_tb(DisasContext *s)
1488{
1489    update_cc_op(s);
1490    tcg_gen_movi_i32(QREG_PC, s->pc);
1491    s->base.is_jmp = DISAS_EXIT;
1492}
1493
1494#define SRC_EA(env, result, opsize, op_sign, addrp) do {                \
1495        result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp,         \
1496                        op_sign ? EA_LOADS : EA_LOADU, IS_USER(s));     \
1497        if (IS_NULL_QREG(result)) {                                     \
1498            gen_addr_fault(s);                                          \
1499            return;                                                     \
1500        }                                                               \
1501    } while (0)
1502
1503#define DEST_EA(env, insn, opsize, val, addrp) do {                     \
1504        TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp,       \
1505                                EA_STORE, IS_USER(s));                  \
1506        if (IS_NULL_QREG(ea_result)) {                                  \
1507            gen_addr_fault(s);                                          \
1508            return;                                                     \
1509        }                                                               \
1510    } while (0)
1511
1512/* Generate a jump to an immediate address.  */
1513static void gen_jmp_tb(DisasContext *s, int n, target_ulong dest,
1514                       target_ulong src)
1515{
1516    if (unlikely(s->ss_active)) {
1517        update_cc_op(s);
1518        tcg_gen_movi_i32(QREG_PC, dest);
1519        gen_raise_exception_format2(s, EXCP_TRACE, src);
1520    } else if (translator_use_goto_tb(&s->base, dest)) {
1521        tcg_gen_goto_tb(n);
1522        tcg_gen_movi_i32(QREG_PC, dest);
1523        tcg_gen_exit_tb(s->base.tb, n);
1524    } else {
1525        gen_jmp_im(s, dest);
1526        tcg_gen_exit_tb(NULL, 0);
1527    }
1528    s->base.is_jmp = DISAS_NORETURN;
1529}
1530
1531DISAS_INSN(scc)
1532{
1533    DisasCompare c;
1534    int cond;
1535    TCGv tmp;
1536
1537    cond = (insn >> 8) & 0xf;
1538    gen_cc_cond(&c, s, cond);
1539
1540    tmp = tcg_temp_new();
1541    tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
1542    free_cond(&c);
1543
1544    tcg_gen_neg_i32(tmp, tmp);
1545    DEST_EA(env, insn, OS_BYTE, tmp, NULL);
1546    tcg_temp_free(tmp);
1547}
1548
1549DISAS_INSN(dbcc)
1550{
1551    TCGLabel *l1;
1552    TCGv reg;
1553    TCGv tmp;
1554    int16_t offset;
1555    uint32_t base;
1556
1557    reg = DREG(insn, 0);
1558    base = s->pc;
1559    offset = (int16_t)read_im16(env, s);
1560    l1 = gen_new_label();
1561    gen_jmpcc(s, (insn >> 8) & 0xf, l1);
1562
1563    tmp = tcg_temp_new();
1564    tcg_gen_ext16s_i32(tmp, reg);
1565    tcg_gen_addi_i32(tmp, tmp, -1);
1566    gen_partset_reg(OS_WORD, reg, tmp);
1567    tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, -1, l1);
1568    gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
1569    gen_set_label(l1);
1570    gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
1571}
1572
1573DISAS_INSN(undef_mac)
1574{
1575    gen_exception(s, s->base.pc_next, EXCP_LINEA);
1576}
1577
1578DISAS_INSN(undef_fpu)
1579{
1580    gen_exception(s, s->base.pc_next, EXCP_LINEF);
1581}
1582
1583DISAS_INSN(undef)
1584{
1585    /*
1586     * ??? This is both instructions that are as yet unimplemented
1587     * for the 680x0 series, as well as those that are implemented
1588     * but actually illegal for CPU32 or pre-68020.
1589     */
1590    qemu_log_mask(LOG_UNIMP, "Illegal instruction: %04x @ %08x\n",
1591                  insn, s->base.pc_next);
1592    gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1593}
1594
1595DISAS_INSN(mulw)
1596{
1597    TCGv reg;
1598    TCGv tmp;
1599    TCGv src;
1600    int sign;
1601
1602    sign = (insn & 0x100) != 0;
1603    reg = DREG(insn, 9);
1604    tmp = tcg_temp_new();
1605    if (sign)
1606        tcg_gen_ext16s_i32(tmp, reg);
1607    else
1608        tcg_gen_ext16u_i32(tmp, reg);
1609    SRC_EA(env, src, OS_WORD, sign, NULL);
1610    tcg_gen_mul_i32(tmp, tmp, src);
1611    tcg_gen_mov_i32(reg, tmp);
1612    gen_logic_cc(s, tmp, OS_LONG);
1613    tcg_temp_free(tmp);
1614}
1615
1616DISAS_INSN(divw)
1617{
1618    int sign;
1619    TCGv src;
1620    TCGv destr;
1621    TCGv ilen;
1622
1623    /* divX.w <EA>,Dn    32/16 -> 16r:16q */
1624
1625    sign = (insn & 0x100) != 0;
1626
1627    /* dest.l / src.w */
1628
1629    SRC_EA(env, src, OS_WORD, sign, NULL);
1630    destr = tcg_constant_i32(REG(insn, 9));
1631    ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1632    if (sign) {
1633        gen_helper_divsw(cpu_env, destr, src, ilen);
1634    } else {
1635        gen_helper_divuw(cpu_env, destr, src, ilen);
1636    }
1637
1638    set_cc_op(s, CC_OP_FLAGS);
1639}
1640
1641DISAS_INSN(divl)
1642{
1643    TCGv num, reg, den, ilen;
1644    int sign;
1645    uint16_t ext;
1646
1647    ext = read_im16(env, s);
1648
1649    sign = (ext & 0x0800) != 0;
1650
1651    if (ext & 0x400) {
1652        if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
1653            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
1654            return;
1655        }
1656
1657        /* divX.l <EA>, Dr:Dq    64/32 -> 32r:32q */
1658
1659        SRC_EA(env, den, OS_LONG, 0, NULL);
1660        num = tcg_constant_i32(REG(ext, 12));
1661        reg = tcg_constant_i32(REG(ext, 0));
1662        ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1663        if (sign) {
1664            gen_helper_divsll(cpu_env, num, reg, den, ilen);
1665        } else {
1666            gen_helper_divull(cpu_env, num, reg, den, ilen);
1667        }
1668        set_cc_op(s, CC_OP_FLAGS);
1669        return;
1670    }
1671
1672    /* divX.l <EA>, Dq        32/32 -> 32q     */
1673    /* divXl.l <EA>, Dr:Dq    32/32 -> 32r:32q */
1674
1675    SRC_EA(env, den, OS_LONG, 0, NULL);
1676    num = tcg_constant_i32(REG(ext, 12));
1677    reg = tcg_constant_i32(REG(ext, 0));
1678    ilen = tcg_constant_i32(s->pc - s->base.pc_next);
1679    if (sign) {
1680        gen_helper_divsl(cpu_env, num, reg, den, ilen);
1681    } else {
1682        gen_helper_divul(cpu_env, num, reg, den, ilen);
1683    }
1684
1685    set_cc_op(s, CC_OP_FLAGS);
1686}
1687
1688static void bcd_add(TCGv dest, TCGv src)
1689{
1690    TCGv t0, t1;
1691
1692    /*
1693     * dest10 = dest10 + src10 + X
1694     *
1695     *        t1 = src
1696     *        t2 = t1 + 0x066
1697     *        t3 = t2 + dest + X
1698     *        t4 = t2 ^ dest
1699     *        t5 = t3 ^ t4
1700     *        t6 = ~t5 & 0x110
1701     *        t7 = (t6 >> 2) | (t6 >> 3)
1702     *        return t3 - t7
1703     */
1704
1705    /*
1706     * t1 = (src + 0x066) + dest + X
1707     *    = result with some possible exceeding 0x6
1708     */
1709
1710    t0 = tcg_const_i32(0x066);
1711    tcg_gen_add_i32(t0, t0, src);
1712
1713    t1 = tcg_temp_new();
1714    tcg_gen_add_i32(t1, t0, dest);
1715    tcg_gen_add_i32(t1, t1, QREG_CC_X);
1716
1717    /* we will remove exceeding 0x6 where there is no carry */
1718
1719    /*
1720     * t0 = (src + 0x0066) ^ dest
1721     *    = t1 without carries
1722     */
1723
1724    tcg_gen_xor_i32(t0, t0, dest);
1725
1726    /*
1727     * extract the carries
1728     * t0 = t0 ^ t1
1729     *    = only the carries
1730     */
1731
1732    tcg_gen_xor_i32(t0, t0, t1);
1733
1734    /*
1735     * generate 0x1 where there is no carry
1736     * and for each 0x10, generate a 0x6
1737     */
1738
1739    tcg_gen_shri_i32(t0, t0, 3);
1740    tcg_gen_not_i32(t0, t0);
1741    tcg_gen_andi_i32(t0, t0, 0x22);
1742    tcg_gen_add_i32(dest, t0, t0);
1743    tcg_gen_add_i32(dest, dest, t0);
1744    tcg_temp_free(t0);
1745
1746    /*
1747     * remove the exceeding 0x6
1748     * for digits that have not generated a carry
1749     */
1750
1751    tcg_gen_sub_i32(dest, t1, dest);
1752    tcg_temp_free(t1);
1753}
1754
1755static void bcd_sub(TCGv dest, TCGv src)
1756{
1757    TCGv t0, t1, t2;
1758
1759    /*
1760     *  dest10 = dest10 - src10 - X
1761     *         = bcd_add(dest + 1 - X, 0x199 - src)
1762     */
1763
1764    /* t0 = 0x066 + (0x199 - src) */
1765
1766    t0 = tcg_temp_new();
1767    tcg_gen_subfi_i32(t0, 0x1ff, src);
1768
1769    /* t1 = t0 + dest + 1 - X*/
1770
1771    t1 = tcg_temp_new();
1772    tcg_gen_add_i32(t1, t0, dest);
1773    tcg_gen_addi_i32(t1, t1, 1);
1774    tcg_gen_sub_i32(t1, t1, QREG_CC_X);
1775
1776    /* t2 = t0 ^ dest */
1777
1778    t2 = tcg_temp_new();
1779    tcg_gen_xor_i32(t2, t0, dest);
1780
1781    /* t0 = t1 ^ t2 */
1782
1783    tcg_gen_xor_i32(t0, t1, t2);
1784
1785    /*
1786     * t2 = ~t0 & 0x110
1787     * t0 = (t2 >> 2) | (t2 >> 3)
1788     *
1789     * to fit on 8bit operands, changed in:
1790     *
1791     * t2 = ~(t0 >> 3) & 0x22
1792     * t0 = t2 + t2
1793     * t0 = t0 + t2
1794     */
1795
1796    tcg_gen_shri_i32(t2, t0, 3);
1797    tcg_gen_not_i32(t2, t2);
1798    tcg_gen_andi_i32(t2, t2, 0x22);
1799    tcg_gen_add_i32(t0, t2, t2);
1800    tcg_gen_add_i32(t0, t0, t2);
1801    tcg_temp_free(t2);
1802
1803    /* return t1 - t0 */
1804
1805    tcg_gen_sub_i32(dest, t1, t0);
1806    tcg_temp_free(t0);
1807    tcg_temp_free(t1);
1808}
1809
1810static void bcd_flags(TCGv val)
1811{
1812    tcg_gen_andi_i32(QREG_CC_C, val, 0x0ff);
1813    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_C);
1814
1815    tcg_gen_extract_i32(QREG_CC_C, val, 8, 1);
1816
1817    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
1818}
1819
1820DISAS_INSN(abcd_reg)
1821{
1822    TCGv src;
1823    TCGv dest;
1824
1825    gen_flush_flags(s); /* !Z is sticky */
1826
1827    src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1828    dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1829    bcd_add(dest, src);
1830    gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1831
1832    bcd_flags(dest);
1833}
1834
1835DISAS_INSN(abcd_mem)
1836{
1837    TCGv src, dest, addr;
1838
1839    gen_flush_flags(s); /* !Z is sticky */
1840
1841    /* Indirect pre-decrement load (mode 4) */
1842
1843    src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1844                      NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1845    dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1846                       NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1847
1848    bcd_add(dest, src);
1849
1850    gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1851                EA_STORE, IS_USER(s));
1852
1853    bcd_flags(dest);
1854}
1855
1856DISAS_INSN(sbcd_reg)
1857{
1858    TCGv src, dest;
1859
1860    gen_flush_flags(s); /* !Z is sticky */
1861
1862    src = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
1863    dest = gen_extend(s, DREG(insn, 9), OS_BYTE, 0);
1864
1865    bcd_sub(dest, src);
1866
1867    gen_partset_reg(OS_BYTE, DREG(insn, 9), dest);
1868
1869    bcd_flags(dest);
1870}
1871
1872DISAS_INSN(sbcd_mem)
1873{
1874    TCGv src, dest, addr;
1875
1876    gen_flush_flags(s); /* !Z is sticky */
1877
1878    /* Indirect pre-decrement load (mode 4) */
1879
1880    src = gen_ea_mode(env, s, 4, REG(insn, 0), OS_BYTE,
1881                      NULL_QREG, NULL, EA_LOADU, IS_USER(s));
1882    dest = gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE,
1883                       NULL_QREG, &addr, EA_LOADU, IS_USER(s));
1884
1885    bcd_sub(dest, src);
1886
1887    gen_ea_mode(env, s, 4, REG(insn, 9), OS_BYTE, dest, &addr,
1888                EA_STORE, IS_USER(s));
1889
1890    bcd_flags(dest);
1891}
1892
1893DISAS_INSN(nbcd)
1894{
1895    TCGv src, dest;
1896    TCGv addr;
1897
1898    gen_flush_flags(s); /* !Z is sticky */
1899
1900    SRC_EA(env, src, OS_BYTE, 0, &addr);
1901
1902    dest = tcg_const_i32(0);
1903    bcd_sub(dest, src);
1904
1905    DEST_EA(env, insn, OS_BYTE, dest, &addr);
1906
1907    bcd_flags(dest);
1908
1909    tcg_temp_free(dest);
1910}
1911
1912DISAS_INSN(addsub)
1913{
1914    TCGv reg;
1915    TCGv dest;
1916    TCGv src;
1917    TCGv tmp;
1918    TCGv addr;
1919    int add;
1920    int opsize;
1921
1922    add = (insn & 0x4000) != 0;
1923    opsize = insn_opsize(insn);
1924    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
1925    dest = tcg_temp_new();
1926    if (insn & 0x100) {
1927        SRC_EA(env, tmp, opsize, 1, &addr);
1928        src = reg;
1929    } else {
1930        tmp = reg;
1931        SRC_EA(env, src, opsize, 1, NULL);
1932    }
1933    if (add) {
1934        tcg_gen_add_i32(dest, tmp, src);
1935        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, src);
1936        set_cc_op(s, CC_OP_ADDB + opsize);
1937    } else {
1938        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, tmp, src);
1939        tcg_gen_sub_i32(dest, tmp, src);
1940        set_cc_op(s, CC_OP_SUBB + opsize);
1941    }
1942    gen_update_cc_add(dest, src, opsize);
1943    if (insn & 0x100) {
1944        DEST_EA(env, insn, opsize, dest, &addr);
1945    } else {
1946        gen_partset_reg(opsize, DREG(insn, 9), dest);
1947    }
1948    tcg_temp_free(dest);
1949}
1950
1951/* Reverse the order of the bits in REG.  */
1952DISAS_INSN(bitrev)
1953{
1954    TCGv reg;
1955    reg = DREG(insn, 0);
1956    gen_helper_bitrev(reg, reg);
1957}
1958
1959DISAS_INSN(bitop_reg)
1960{
1961    int opsize;
1962    int op;
1963    TCGv src1;
1964    TCGv src2;
1965    TCGv tmp;
1966    TCGv addr;
1967    TCGv dest;
1968
1969    if ((insn & 0x38) != 0)
1970        opsize = OS_BYTE;
1971    else
1972        opsize = OS_LONG;
1973    op = (insn >> 6) & 3;
1974    SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
1975
1976    gen_flush_flags(s);
1977    src2 = tcg_temp_new();
1978    if (opsize == OS_BYTE)
1979        tcg_gen_andi_i32(src2, DREG(insn, 9), 7);
1980    else
1981        tcg_gen_andi_i32(src2, DREG(insn, 9), 31);
1982
1983    tmp = tcg_const_i32(1);
1984    tcg_gen_shl_i32(tmp, tmp, src2);
1985    tcg_temp_free(src2);
1986
1987    tcg_gen_and_i32(QREG_CC_Z, src1, tmp);
1988
1989    dest = tcg_temp_new();
1990    switch (op) {
1991    case 1: /* bchg */
1992        tcg_gen_xor_i32(dest, src1, tmp);
1993        break;
1994    case 2: /* bclr */
1995        tcg_gen_andc_i32(dest, src1, tmp);
1996        break;
1997    case 3: /* bset */
1998        tcg_gen_or_i32(dest, src1, tmp);
1999        break;
2000    default: /* btst */
2001        break;
2002    }
2003    tcg_temp_free(tmp);
2004    if (op) {
2005        DEST_EA(env, insn, opsize, dest, &addr);
2006    }
2007    tcg_temp_free(dest);
2008}
2009
2010DISAS_INSN(sats)
2011{
2012    TCGv reg;
2013    reg = DREG(insn, 0);
2014    gen_flush_flags(s);
2015    gen_helper_sats(reg, reg, QREG_CC_V);
2016    gen_logic_cc(s, reg, OS_LONG);
2017}
2018
2019static void gen_push(DisasContext *s, TCGv val)
2020{
2021    TCGv tmp;
2022
2023    tmp = tcg_temp_new();
2024    tcg_gen_subi_i32(tmp, QREG_SP, 4);
2025    gen_store(s, OS_LONG, tmp, val, IS_USER(s));
2026    tcg_gen_mov_i32(QREG_SP, tmp);
2027    tcg_temp_free(tmp);
2028}
2029
2030static TCGv mreg(int reg)
2031{
2032    if (reg < 8) {
2033        /* Dx */
2034        return cpu_dregs[reg];
2035    }
2036    /* Ax */
2037    return cpu_aregs[reg & 7];
2038}
2039
2040DISAS_INSN(movem)
2041{
2042    TCGv addr, incr, tmp, r[16];
2043    int is_load = (insn & 0x0400) != 0;
2044    int opsize = (insn & 0x40) != 0 ? OS_LONG : OS_WORD;
2045    uint16_t mask = read_im16(env, s);
2046    int mode = extract32(insn, 3, 3);
2047    int reg0 = REG(insn, 0);
2048    int i;
2049
2050    tmp = cpu_aregs[reg0];
2051
2052    switch (mode) {
2053    case 0: /* data register direct */
2054    case 1: /* addr register direct */
2055    do_addr_fault:
2056        gen_addr_fault(s);
2057        return;
2058
2059    case 2: /* indirect */
2060        break;
2061
2062    case 3: /* indirect post-increment */
2063        if (!is_load) {
2064            /* post-increment is not allowed */
2065            goto do_addr_fault;
2066        }
2067        break;
2068
2069    case 4: /* indirect pre-decrement */
2070        if (is_load) {
2071            /* pre-decrement is not allowed */
2072            goto do_addr_fault;
2073        }
2074        /*
2075         * We want a bare copy of the address reg, without any pre-decrement
2076         * adjustment, as gen_lea would provide.
2077         */
2078        break;
2079
2080    default:
2081        tmp = gen_lea_mode(env, s, mode, reg0, opsize);
2082        if (IS_NULL_QREG(tmp)) {
2083            goto do_addr_fault;
2084        }
2085        break;
2086    }
2087
2088    addr = tcg_temp_new();
2089    tcg_gen_mov_i32(addr, tmp);
2090    incr = tcg_const_i32(opsize_bytes(opsize));
2091
2092    if (is_load) {
2093        /* memory to register */
2094        for (i = 0; i < 16; i++) {
2095            if (mask & (1 << i)) {
2096                r[i] = gen_load(s, opsize, addr, 1, IS_USER(s));
2097                tcg_gen_add_i32(addr, addr, incr);
2098            }
2099        }
2100        for (i = 0; i < 16; i++) {
2101            if (mask & (1 << i)) {
2102                tcg_gen_mov_i32(mreg(i), r[i]);
2103                tcg_temp_free(r[i]);
2104            }
2105        }
2106        if (mode == 3) {
2107            /* post-increment: movem (An)+,X */
2108            tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2109        }
2110    } else {
2111        /* register to memory */
2112        if (mode == 4) {
2113            /* pre-decrement: movem X,-(An) */
2114            for (i = 15; i >= 0; i--) {
2115                if ((mask << i) & 0x8000) {
2116                    tcg_gen_sub_i32(addr, addr, incr);
2117                    if (reg0 + 8 == i &&
2118                        m68k_feature(s->env, M68K_FEATURE_EXT_FULL)) {
2119                        /*
2120                         * M68020+: if the addressing register is the
2121                         * register moved to memory, the value written
2122                         * is the initial value decremented by the size of
2123                         * the operation, regardless of how many actual
2124                         * stores have been performed until this point.
2125                         * M68000/M68010: the value is the initial value.
2126                         */
2127                        tmp = tcg_temp_new();
2128                        tcg_gen_sub_i32(tmp, cpu_aregs[reg0], incr);
2129                        gen_store(s, opsize, addr, tmp, IS_USER(s));
2130                        tcg_temp_free(tmp);
2131                    } else {
2132                        gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2133                    }
2134                }
2135            }
2136            tcg_gen_mov_i32(cpu_aregs[reg0], addr);
2137        } else {
2138            for (i = 0; i < 16; i++) {
2139                if (mask & (1 << i)) {
2140                    gen_store(s, opsize, addr, mreg(i), IS_USER(s));
2141                    tcg_gen_add_i32(addr, addr, incr);
2142                }
2143            }
2144        }
2145    }
2146
2147    tcg_temp_free(incr);
2148    tcg_temp_free(addr);
2149}
2150
2151DISAS_INSN(movep)
2152{
2153    uint8_t i;
2154    int16_t displ;
2155    TCGv reg;
2156    TCGv addr;
2157    TCGv abuf;
2158    TCGv dbuf;
2159
2160    displ = read_im16(env, s);
2161
2162    addr = AREG(insn, 0);
2163    reg = DREG(insn, 9);
2164
2165    abuf = tcg_temp_new();
2166    tcg_gen_addi_i32(abuf, addr, displ);
2167    dbuf = tcg_temp_new();
2168
2169    if (insn & 0x40) {
2170        i = 4;
2171    } else {
2172        i = 2;
2173    }
2174
2175    if (insn & 0x80) {
2176        for ( ; i > 0 ; i--) {
2177            tcg_gen_shri_i32(dbuf, reg, (i - 1) * 8);
2178            tcg_gen_qemu_st8(dbuf, abuf, IS_USER(s));
2179            if (i > 1) {
2180                tcg_gen_addi_i32(abuf, abuf, 2);
2181            }
2182        }
2183    } else {
2184        for ( ; i > 0 ; i--) {
2185            tcg_gen_qemu_ld8u(dbuf, abuf, IS_USER(s));
2186            tcg_gen_deposit_i32(reg, reg, dbuf, (i - 1) * 8, 8);
2187            if (i > 1) {
2188                tcg_gen_addi_i32(abuf, abuf, 2);
2189            }
2190        }
2191    }
2192    tcg_temp_free(abuf);
2193    tcg_temp_free(dbuf);
2194}
2195
2196DISAS_INSN(bitop_im)
2197{
2198    int opsize;
2199    int op;
2200    TCGv src1;
2201    uint32_t mask;
2202    int bitnum;
2203    TCGv tmp;
2204    TCGv addr;
2205
2206    if ((insn & 0x38) != 0)
2207        opsize = OS_BYTE;
2208    else
2209        opsize = OS_LONG;
2210    op = (insn >> 6) & 3;
2211
2212    bitnum = read_im16(env, s);
2213    if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2214        if (bitnum & 0xfe00) {
2215            disas_undef(env, s, insn);
2216            return;
2217        }
2218    } else {
2219        if (bitnum & 0xff00) {
2220            disas_undef(env, s, insn);
2221            return;
2222        }
2223    }
2224
2225    SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
2226
2227    gen_flush_flags(s);
2228    if (opsize == OS_BYTE)
2229        bitnum &= 7;
2230    else
2231        bitnum &= 31;
2232    mask = 1 << bitnum;
2233
2234   tcg_gen_andi_i32(QREG_CC_Z, src1, mask);
2235
2236    if (op) {
2237        tmp = tcg_temp_new();
2238        switch (op) {
2239        case 1: /* bchg */
2240            tcg_gen_xori_i32(tmp, src1, mask);
2241            break;
2242        case 2: /* bclr */
2243            tcg_gen_andi_i32(tmp, src1, ~mask);
2244            break;
2245        case 3: /* bset */
2246            tcg_gen_ori_i32(tmp, src1, mask);
2247            break;
2248        default: /* btst */
2249            break;
2250        }
2251        DEST_EA(env, insn, opsize, tmp, &addr);
2252        tcg_temp_free(tmp);
2253    }
2254}
2255
2256static TCGv gen_get_ccr(DisasContext *s)
2257{
2258    TCGv dest;
2259
2260    update_cc_op(s);
2261    dest = tcg_temp_new();
2262    gen_helper_get_ccr(dest, cpu_env);
2263    return dest;
2264}
2265
2266static TCGv gen_get_sr(DisasContext *s)
2267{
2268    TCGv ccr;
2269    TCGv sr;
2270
2271    ccr = gen_get_ccr(s);
2272    sr = tcg_temp_new();
2273    tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
2274    tcg_gen_or_i32(sr, sr, ccr);
2275    tcg_temp_free(ccr);
2276    return sr;
2277}
2278
2279static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
2280{
2281    if (ccr_only) {
2282        tcg_gen_movi_i32(QREG_CC_C, val & CCF_C ? 1 : 0);
2283        tcg_gen_movi_i32(QREG_CC_V, val & CCF_V ? -1 : 0);
2284        tcg_gen_movi_i32(QREG_CC_Z, val & CCF_Z ? 0 : 1);
2285        tcg_gen_movi_i32(QREG_CC_N, val & CCF_N ? -1 : 0);
2286        tcg_gen_movi_i32(QREG_CC_X, val & CCF_X ? 1 : 0);
2287    } else {
2288        TCGv sr = tcg_const_i32(val);
2289        gen_helper_set_sr(cpu_env, sr);
2290        tcg_temp_free(sr);
2291    }
2292    set_cc_op(s, CC_OP_FLAGS);
2293}
2294
2295static void gen_set_sr(DisasContext *s, TCGv val, int ccr_only)
2296{
2297    if (ccr_only) {
2298        gen_helper_set_ccr(cpu_env, val);
2299    } else {
2300        gen_helper_set_sr(cpu_env, val);
2301    }
2302    set_cc_op(s, CC_OP_FLAGS);
2303}
2304
2305static void gen_move_to_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
2306                           bool ccr_only)
2307{
2308    if ((insn & 0x3f) == 0x3c) {
2309        uint16_t val;
2310        val = read_im16(env, s);
2311        gen_set_sr_im(s, val, ccr_only);
2312    } else {
2313        TCGv src;
2314        SRC_EA(env, src, OS_WORD, 0, NULL);
2315        gen_set_sr(s, src, ccr_only);
2316    }
2317}
2318
2319DISAS_INSN(arith_im)
2320{
2321    int op;
2322    TCGv im;
2323    TCGv src1;
2324    TCGv dest;
2325    TCGv addr;
2326    int opsize;
2327    bool with_SR = ((insn & 0x3f) == 0x3c);
2328
2329    op = (insn >> 9) & 7;
2330    opsize = insn_opsize(insn);
2331    switch (opsize) {
2332    case OS_BYTE:
2333        im = tcg_const_i32((int8_t)read_im8(env, s));
2334        break;
2335    case OS_WORD:
2336        im = tcg_const_i32((int16_t)read_im16(env, s));
2337        break;
2338    case OS_LONG:
2339        im = tcg_const_i32(read_im32(env, s));
2340        break;
2341    default:
2342        g_assert_not_reached();
2343    }
2344
2345    if (with_SR) {
2346        /* SR/CCR can only be used with andi/eori/ori */
2347        if (op == 2 || op == 3 || op == 6) {
2348            disas_undef(env, s, insn);
2349            return;
2350        }
2351        switch (opsize) {
2352        case OS_BYTE:
2353            src1 = gen_get_ccr(s);
2354            break;
2355        case OS_WORD:
2356            if (IS_USER(s)) {
2357                gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2358                return;
2359            }
2360            src1 = gen_get_sr(s);
2361            break;
2362        default:
2363            /* OS_LONG; others already g_assert_not_reached.  */
2364            disas_undef(env, s, insn);
2365            return;
2366        }
2367    } else {
2368        SRC_EA(env, src1, opsize, 1, (op == 6) ? NULL : &addr);
2369    }
2370    dest = tcg_temp_new();
2371    switch (op) {
2372    case 0: /* ori */
2373        tcg_gen_or_i32(dest, src1, im);
2374        if (with_SR) {
2375            gen_set_sr(s, dest, opsize == OS_BYTE);
2376        } else {
2377            DEST_EA(env, insn, opsize, dest, &addr);
2378            gen_logic_cc(s, dest, opsize);
2379        }
2380        break;
2381    case 1: /* andi */
2382        tcg_gen_and_i32(dest, src1, im);
2383        if (with_SR) {
2384            gen_set_sr(s, dest, opsize == OS_BYTE);
2385        } else {
2386            DEST_EA(env, insn, opsize, dest, &addr);
2387            gen_logic_cc(s, dest, opsize);
2388        }
2389        break;
2390    case 2: /* subi */
2391        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, src1, im);
2392        tcg_gen_sub_i32(dest, src1, im);
2393        gen_update_cc_add(dest, im, opsize);
2394        set_cc_op(s, CC_OP_SUBB + opsize);
2395        DEST_EA(env, insn, opsize, dest, &addr);
2396        break;
2397    case 3: /* addi */
2398        tcg_gen_add_i32(dest, src1, im);
2399        gen_update_cc_add(dest, im, opsize);
2400        tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, im);
2401        set_cc_op(s, CC_OP_ADDB + opsize);
2402        DEST_EA(env, insn, opsize, dest, &addr);
2403        break;
2404    case 5: /* eori */
2405        tcg_gen_xor_i32(dest, src1, im);
2406        if (with_SR) {
2407            gen_set_sr(s, dest, opsize == OS_BYTE);
2408        } else {
2409            DEST_EA(env, insn, opsize, dest, &addr);
2410            gen_logic_cc(s, dest, opsize);
2411        }
2412        break;
2413    case 6: /* cmpi */
2414        gen_update_cc_cmp(s, src1, im, opsize);
2415        break;
2416    default:
2417        abort();
2418    }
2419    tcg_temp_free(im);
2420    tcg_temp_free(dest);
2421}
2422
2423DISAS_INSN(cas)
2424{
2425    int opsize;
2426    TCGv addr;
2427    uint16_t ext;
2428    TCGv load;
2429    TCGv cmp;
2430    MemOp opc;
2431
2432    switch ((insn >> 9) & 3) {
2433    case 1:
2434        opsize = OS_BYTE;
2435        opc = MO_SB;
2436        break;
2437    case 2:
2438        opsize = OS_WORD;
2439        opc = MO_TESW;
2440        break;
2441    case 3:
2442        opsize = OS_LONG;
2443        opc = MO_TESL;
2444        break;
2445    default:
2446        g_assert_not_reached();
2447    }
2448
2449    ext = read_im16(env, s);
2450
2451    /* cas Dc,Du,<EA> */
2452
2453    addr = gen_lea(env, s, insn, opsize);
2454    if (IS_NULL_QREG(addr)) {
2455        gen_addr_fault(s);
2456        return;
2457    }
2458
2459    cmp = gen_extend(s, DREG(ext, 0), opsize, 1);
2460
2461    /*
2462     * if  <EA> == Dc then
2463     *     <EA> = Du
2464     *     Dc = <EA> (because <EA> == Dc)
2465     * else
2466     *     Dc = <EA>
2467     */
2468
2469    load = tcg_temp_new();
2470    tcg_gen_atomic_cmpxchg_i32(load, addr, cmp, DREG(ext, 6),
2471                               IS_USER(s), opc);
2472    /* update flags before setting cmp to load */
2473    gen_update_cc_cmp(s, load, cmp, opsize);
2474    gen_partset_reg(opsize, DREG(ext, 0), load);
2475
2476    tcg_temp_free(load);
2477
2478    switch (extract32(insn, 3, 3)) {
2479    case 3: /* Indirect postincrement.  */
2480        tcg_gen_addi_i32(AREG(insn, 0), addr, opsize_bytes(opsize));
2481        break;
2482    case 4: /* Indirect predecrememnt.  */
2483        tcg_gen_mov_i32(AREG(insn, 0), addr);
2484        break;
2485    }
2486}
2487
2488DISAS_INSN(cas2w)
2489{
2490    uint16_t ext1, ext2;
2491    TCGv addr1, addr2;
2492    TCGv regs;
2493
2494    /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2495
2496    ext1 = read_im16(env, s);
2497
2498    if (ext1 & 0x8000) {
2499        /* Address Register */
2500        addr1 = AREG(ext1, 12);
2501    } else {
2502        /* Data Register */
2503        addr1 = DREG(ext1, 12);
2504    }
2505
2506    ext2 = read_im16(env, s);
2507    if (ext2 & 0x8000) {
2508        /* Address Register */
2509        addr2 = AREG(ext2, 12);
2510    } else {
2511        /* Data Register */
2512        addr2 = DREG(ext2, 12);
2513    }
2514
2515    /*
2516     * if (R1) == Dc1 && (R2) == Dc2 then
2517     *     (R1) = Du1
2518     *     (R2) = Du2
2519     * else
2520     *     Dc1 = (R1)
2521     *     Dc2 = (R2)
2522     */
2523
2524    regs = tcg_const_i32(REG(ext2, 6) |
2525                         (REG(ext1, 6) << 3) |
2526                         (REG(ext2, 0) << 6) |
2527                         (REG(ext1, 0) << 9));
2528    if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2529        gen_helper_exit_atomic(cpu_env);
2530    } else {
2531        gen_helper_cas2w(cpu_env, regs, addr1, addr2);
2532    }
2533    tcg_temp_free(regs);
2534
2535    /* Note that cas2w also assigned to env->cc_op.  */
2536    s->cc_op = CC_OP_CMPW;
2537    s->cc_op_synced = 1;
2538}
2539
2540DISAS_INSN(cas2l)
2541{
2542    uint16_t ext1, ext2;
2543    TCGv addr1, addr2, regs;
2544
2545    /* cas2 Dc1:Dc2,Du1:Du2,(Rn1):(Rn2) */
2546
2547    ext1 = read_im16(env, s);
2548
2549    if (ext1 & 0x8000) {
2550        /* Address Register */
2551        addr1 = AREG(ext1, 12);
2552    } else {
2553        /* Data Register */
2554        addr1 = DREG(ext1, 12);
2555    }
2556
2557    ext2 = read_im16(env, s);
2558    if (ext2 & 0x8000) {
2559        /* Address Register */
2560        addr2 = AREG(ext2, 12);
2561    } else {
2562        /* Data Register */
2563        addr2 = DREG(ext2, 12);
2564    }
2565
2566    /*
2567     * if (R1) == Dc1 && (R2) == Dc2 then
2568     *     (R1) = Du1
2569     *     (R2) = Du2
2570     * else
2571     *     Dc1 = (R1)
2572     *     Dc2 = (R2)
2573     */
2574
2575    regs = tcg_const_i32(REG(ext2, 6) |
2576                         (REG(ext1, 6) << 3) |
2577                         (REG(ext2, 0) << 6) |
2578                         (REG(ext1, 0) << 9));
2579    if (tb_cflags(s->base.tb) & CF_PARALLEL) {
2580        gen_helper_cas2l_parallel(cpu_env, regs, addr1, addr2);
2581    } else {
2582        gen_helper_cas2l(cpu_env, regs, addr1, addr2);
2583    }
2584    tcg_temp_free(regs);
2585
2586    /* Note that cas2l also assigned to env->cc_op.  */
2587    s->cc_op = CC_OP_CMPL;
2588    s->cc_op_synced = 1;
2589}
2590
2591DISAS_INSN(byterev)
2592{
2593    TCGv reg;
2594
2595    reg = DREG(insn, 0);
2596    tcg_gen_bswap32_i32(reg, reg);
2597}
2598
2599DISAS_INSN(move)
2600{
2601    TCGv src;
2602    TCGv dest;
2603    int op;
2604    int opsize;
2605
2606    switch (insn >> 12) {
2607    case 1: /* move.b */
2608        opsize = OS_BYTE;
2609        break;
2610    case 2: /* move.l */
2611        opsize = OS_LONG;
2612        break;
2613    case 3: /* move.w */
2614        opsize = OS_WORD;
2615        break;
2616    default:
2617        abort();
2618    }
2619    SRC_EA(env, src, opsize, 1, NULL);
2620    op = (insn >> 6) & 7;
2621    if (op == 1) {
2622        /* movea */
2623        /* The value will already have been sign extended.  */
2624        dest = AREG(insn, 9);
2625        tcg_gen_mov_i32(dest, src);
2626    } else {
2627        /* normal move */
2628        uint16_t dest_ea;
2629        dest_ea = ((insn >> 9) & 7) | (op << 3);
2630        DEST_EA(env, dest_ea, opsize, src, NULL);
2631        /* This will be correct because loads sign extend.  */
2632        gen_logic_cc(s, src, opsize);
2633    }
2634}
2635
2636DISAS_INSN(negx)
2637{
2638    TCGv z;
2639    TCGv src;
2640    TCGv addr;
2641    int opsize;
2642
2643    opsize = insn_opsize(insn);
2644    SRC_EA(env, src, opsize, 1, &addr);
2645
2646    gen_flush_flags(s); /* compute old Z */
2647
2648    /*
2649     * Perform subtract with borrow.
2650     * (X, N) =  -(src + X);
2651     */
2652
2653    z = tcg_const_i32(0);
2654    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, z, QREG_CC_X, z);
2655    tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, z, z, QREG_CC_N, QREG_CC_X);
2656    tcg_temp_free(z);
2657    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
2658
2659    tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
2660
2661    /*
2662     * Compute signed-overflow for negation.  The normal formula for
2663     * subtraction is (res ^ src) & (src ^ dest), but with dest==0
2664     * this simplifies to res & src.
2665     */
2666
2667    tcg_gen_and_i32(QREG_CC_V, QREG_CC_N, src);
2668
2669    /* Copy the rest of the results into place.  */
2670    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
2671    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
2672
2673    set_cc_op(s, CC_OP_FLAGS);
2674
2675    /* result is in QREG_CC_N */
2676
2677    DEST_EA(env, insn, opsize, QREG_CC_N, &addr);
2678}
2679
2680DISAS_INSN(lea)
2681{
2682    TCGv reg;
2683    TCGv tmp;
2684
2685    reg = AREG(insn, 9);
2686    tmp = gen_lea(env, s, insn, OS_LONG);
2687    if (IS_NULL_QREG(tmp)) {
2688        gen_addr_fault(s);
2689        return;
2690    }
2691    tcg_gen_mov_i32(reg, tmp);
2692}
2693
2694DISAS_INSN(clr)
2695{
2696    int opsize;
2697    TCGv zero;
2698
2699    zero = tcg_const_i32(0);
2700
2701    opsize = insn_opsize(insn);
2702    DEST_EA(env, insn, opsize, zero, NULL);
2703    gen_logic_cc(s, zero, opsize);
2704    tcg_temp_free(zero);
2705}
2706
2707DISAS_INSN(move_from_ccr)
2708{
2709    TCGv ccr;
2710
2711    ccr = gen_get_ccr(s);
2712    DEST_EA(env, insn, OS_WORD, ccr, NULL);
2713}
2714
2715DISAS_INSN(neg)
2716{
2717    TCGv src1;
2718    TCGv dest;
2719    TCGv addr;
2720    int opsize;
2721
2722    opsize = insn_opsize(insn);
2723    SRC_EA(env, src1, opsize, 1, &addr);
2724    dest = tcg_temp_new();
2725    tcg_gen_neg_i32(dest, src1);
2726    set_cc_op(s, CC_OP_SUBB + opsize);
2727    gen_update_cc_add(dest, src1, opsize);
2728    tcg_gen_setcondi_i32(TCG_COND_NE, QREG_CC_X, dest, 0);
2729    DEST_EA(env, insn, opsize, dest, &addr);
2730    tcg_temp_free(dest);
2731}
2732
2733DISAS_INSN(move_to_ccr)
2734{
2735    gen_move_to_sr(env, s, insn, true);
2736}
2737
2738DISAS_INSN(not)
2739{
2740    TCGv src1;
2741    TCGv dest;
2742    TCGv addr;
2743    int opsize;
2744
2745    opsize = insn_opsize(insn);
2746    SRC_EA(env, src1, opsize, 1, &addr);
2747    dest = tcg_temp_new();
2748    tcg_gen_not_i32(dest, src1);
2749    DEST_EA(env, insn, opsize, dest, &addr);
2750    gen_logic_cc(s, dest, opsize);
2751}
2752
2753DISAS_INSN(swap)
2754{
2755    TCGv src1;
2756    TCGv src2;
2757    TCGv reg;
2758
2759    src1 = tcg_temp_new();
2760    src2 = tcg_temp_new();
2761    reg = DREG(insn, 0);
2762    tcg_gen_shli_i32(src1, reg, 16);
2763    tcg_gen_shri_i32(src2, reg, 16);
2764    tcg_gen_or_i32(reg, src1, src2);
2765    tcg_temp_free(src2);
2766    tcg_temp_free(src1);
2767    gen_logic_cc(s, reg, OS_LONG);
2768}
2769
2770DISAS_INSN(bkpt)
2771{
2772    gen_exception(s, s->base.pc_next, EXCP_DEBUG);
2773}
2774
2775DISAS_INSN(pea)
2776{
2777    TCGv tmp;
2778
2779    tmp = gen_lea(env, s, insn, OS_LONG);
2780    if (IS_NULL_QREG(tmp)) {
2781        gen_addr_fault(s);
2782        return;
2783    }
2784    gen_push(s, tmp);
2785}
2786
2787DISAS_INSN(ext)
2788{
2789    int op;
2790    TCGv reg;
2791    TCGv tmp;
2792
2793    reg = DREG(insn, 0);
2794    op = (insn >> 6) & 7;
2795    tmp = tcg_temp_new();
2796    if (op == 3)
2797        tcg_gen_ext16s_i32(tmp, reg);
2798    else
2799        tcg_gen_ext8s_i32(tmp, reg);
2800    if (op == 2)
2801        gen_partset_reg(OS_WORD, reg, tmp);
2802    else
2803        tcg_gen_mov_i32(reg, tmp);
2804    gen_logic_cc(s, tmp, OS_LONG);
2805    tcg_temp_free(tmp);
2806}
2807
2808DISAS_INSN(tst)
2809{
2810    int opsize;
2811    TCGv tmp;
2812
2813    opsize = insn_opsize(insn);
2814    SRC_EA(env, tmp, opsize, 1, NULL);
2815    gen_logic_cc(s, tmp, opsize);
2816}
2817
2818DISAS_INSN(pulse)
2819{
2820  /* Implemented as a NOP.  */
2821}
2822
2823DISAS_INSN(illegal)
2824{
2825    gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2826}
2827
2828/* ??? This should be atomic.  */
2829DISAS_INSN(tas)
2830{
2831    TCGv dest;
2832    TCGv src1;
2833    TCGv addr;
2834
2835    dest = tcg_temp_new();
2836    SRC_EA(env, src1, OS_BYTE, 1, &addr);
2837    gen_logic_cc(s, src1, OS_BYTE);
2838    tcg_gen_ori_i32(dest, src1, 0x80);
2839    DEST_EA(env, insn, OS_BYTE, dest, &addr);
2840    tcg_temp_free(dest);
2841}
2842
2843DISAS_INSN(mull)
2844{
2845    uint16_t ext;
2846    TCGv src1;
2847    int sign;
2848
2849    ext = read_im16(env, s);
2850
2851    sign = ext & 0x800;
2852
2853    if (ext & 0x400) {
2854        if (!m68k_feature(s->env, M68K_FEATURE_QUAD_MULDIV)) {
2855            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
2856            return;
2857        }
2858
2859        SRC_EA(env, src1, OS_LONG, 0, NULL);
2860
2861        if (sign) {
2862            tcg_gen_muls2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2863        } else {
2864            tcg_gen_mulu2_i32(QREG_CC_Z, QREG_CC_N, src1, DREG(ext, 12));
2865        }
2866        /* if Dl == Dh, 68040 returns low word */
2867        tcg_gen_mov_i32(DREG(ext, 0), QREG_CC_N);
2868        tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_Z);
2869        tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N);
2870
2871        tcg_gen_movi_i32(QREG_CC_V, 0);
2872        tcg_gen_movi_i32(QREG_CC_C, 0);
2873
2874        set_cc_op(s, CC_OP_FLAGS);
2875        return;
2876    }
2877    SRC_EA(env, src1, OS_LONG, 0, NULL);
2878    if (m68k_feature(s->env, M68K_FEATURE_M68000)) {
2879        tcg_gen_movi_i32(QREG_CC_C, 0);
2880        if (sign) {
2881            tcg_gen_muls2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2882            /* QREG_CC_V is -(QREG_CC_V != (QREG_CC_N >> 31)) */
2883            tcg_gen_sari_i32(QREG_CC_Z, QREG_CC_N, 31);
2884            tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_Z);
2885        } else {
2886            tcg_gen_mulu2_i32(QREG_CC_N, QREG_CC_V, src1, DREG(ext, 12));
2887            /* QREG_CC_V is -(QREG_CC_V != 0), use QREG_CC_C as 0 */
2888            tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, QREG_CC_C);
2889        }
2890        tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
2891        tcg_gen_mov_i32(DREG(ext, 12), QREG_CC_N);
2892
2893        tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
2894
2895        set_cc_op(s, CC_OP_FLAGS);
2896    } else {
2897        /*
2898         * The upper 32 bits of the product are discarded, so
2899         * muls.l and mulu.l are functionally equivalent.
2900         */
2901        tcg_gen_mul_i32(DREG(ext, 12), src1, DREG(ext, 12));
2902        gen_logic_cc(s, DREG(ext, 12), OS_LONG);
2903    }
2904}
2905
2906static void gen_link(DisasContext *s, uint16_t insn, int32_t offset)
2907{
2908    TCGv reg;
2909    TCGv tmp;
2910
2911    reg = AREG(insn, 0);
2912    tmp = tcg_temp_new();
2913    tcg_gen_subi_i32(tmp, QREG_SP, 4);
2914    gen_store(s, OS_LONG, tmp, reg, IS_USER(s));
2915    if ((insn & 7) != 7) {
2916        tcg_gen_mov_i32(reg, tmp);
2917    }
2918    tcg_gen_addi_i32(QREG_SP, tmp, offset);
2919    tcg_temp_free(tmp);
2920}
2921
2922DISAS_INSN(link)
2923{
2924    int16_t offset;
2925
2926    offset = read_im16(env, s);
2927    gen_link(s, insn, offset);
2928}
2929
2930DISAS_INSN(linkl)
2931{
2932    int32_t offset;
2933
2934    offset = read_im32(env, s);
2935    gen_link(s, insn, offset);
2936}
2937
2938DISAS_INSN(unlk)
2939{
2940    TCGv src;
2941    TCGv reg;
2942    TCGv tmp;
2943
2944    src = tcg_temp_new();
2945    reg = AREG(insn, 0);
2946    tcg_gen_mov_i32(src, reg);
2947    tmp = gen_load(s, OS_LONG, src, 0, IS_USER(s));
2948    tcg_gen_mov_i32(reg, tmp);
2949    tcg_gen_addi_i32(QREG_SP, src, 4);
2950    tcg_temp_free(src);
2951    tcg_temp_free(tmp);
2952}
2953
2954#if defined(CONFIG_SOFTMMU)
2955DISAS_INSN(reset)
2956{
2957    if (IS_USER(s)) {
2958        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
2959        return;
2960    }
2961
2962    gen_helper_reset(cpu_env);
2963}
2964#endif
2965
2966DISAS_INSN(nop)
2967{
2968}
2969
2970DISAS_INSN(rtd)
2971{
2972    TCGv tmp;
2973    int16_t offset = read_im16(env, s);
2974
2975    tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
2976    tcg_gen_addi_i32(QREG_SP, QREG_SP, offset + 4);
2977    gen_jmp(s, tmp);
2978}
2979
2980DISAS_INSN(rtr)
2981{
2982    TCGv tmp;
2983    TCGv ccr;
2984    TCGv sp;
2985
2986    sp = tcg_temp_new();
2987    ccr = gen_load(s, OS_WORD, QREG_SP, 0, IS_USER(s));
2988    tcg_gen_addi_i32(sp, QREG_SP, 2);
2989    tmp = gen_load(s, OS_LONG, sp, 0, IS_USER(s));
2990    tcg_gen_addi_i32(QREG_SP, sp, 4);
2991    tcg_temp_free(sp);
2992
2993    gen_set_sr(s, ccr, true);
2994    tcg_temp_free(ccr);
2995
2996    gen_jmp(s, tmp);
2997}
2998
2999DISAS_INSN(rts)
3000{
3001    TCGv tmp;
3002
3003    tmp = gen_load(s, OS_LONG, QREG_SP, 0, IS_USER(s));
3004    tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
3005    gen_jmp(s, tmp);
3006}
3007
3008DISAS_INSN(jump)
3009{
3010    TCGv tmp;
3011
3012    /*
3013     * Load the target address first to ensure correct exception
3014     * behavior.
3015     */
3016    tmp = gen_lea(env, s, insn, OS_LONG);
3017    if (IS_NULL_QREG(tmp)) {
3018        gen_addr_fault(s);
3019        return;
3020    }
3021    if ((insn & 0x40) == 0) {
3022        /* jsr */
3023        gen_push(s, tcg_const_i32(s->pc));
3024    }
3025    gen_jmp(s, tmp);
3026}
3027
3028DISAS_INSN(addsubq)
3029{
3030    TCGv src;
3031    TCGv dest;
3032    TCGv val;
3033    int imm;
3034    TCGv addr;
3035    int opsize;
3036
3037    if ((insn & 070) == 010) {
3038        /* Operation on address register is always long.  */
3039        opsize = OS_LONG;
3040    } else {
3041        opsize = insn_opsize(insn);
3042    }
3043    SRC_EA(env, src, opsize, 1, &addr);
3044    imm = (insn >> 9) & 7;
3045    if (imm == 0) {
3046        imm = 8;
3047    }
3048    val = tcg_const_i32(imm);
3049    dest = tcg_temp_new();
3050    tcg_gen_mov_i32(dest, src);
3051    if ((insn & 0x38) == 0x08) {
3052        /*
3053         * Don't update condition codes if the destination is an
3054         * address register.
3055         */
3056        if (insn & 0x0100) {
3057            tcg_gen_sub_i32(dest, dest, val);
3058        } else {
3059            tcg_gen_add_i32(dest, dest, val);
3060        }
3061    } else {
3062        if (insn & 0x0100) {
3063            tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3064            tcg_gen_sub_i32(dest, dest, val);
3065            set_cc_op(s, CC_OP_SUBB + opsize);
3066        } else {
3067            tcg_gen_add_i32(dest, dest, val);
3068            tcg_gen_setcond_i32(TCG_COND_LTU, QREG_CC_X, dest, val);
3069            set_cc_op(s, CC_OP_ADDB + opsize);
3070        }
3071        gen_update_cc_add(dest, val, opsize);
3072    }
3073    tcg_temp_free(val);
3074    DEST_EA(env, insn, opsize, dest, &addr);
3075    tcg_temp_free(dest);
3076}
3077
3078DISAS_INSN(branch)
3079{
3080    int32_t offset;
3081    uint32_t base;
3082    int op;
3083
3084    base = s->pc;
3085    op = (insn >> 8) & 0xf;
3086    offset = (int8_t)insn;
3087    if (offset == 0) {
3088        offset = (int16_t)read_im16(env, s);
3089    } else if (offset == -1) {
3090        offset = read_im32(env, s);
3091    }
3092    if (op == 1) {
3093        /* bsr */
3094        gen_push(s, tcg_const_i32(s->pc));
3095    }
3096    if (op > 1) {
3097        /* Bcc */
3098        TCGLabel *l1 = gen_new_label();
3099        gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
3100        gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
3101        gen_set_label(l1);
3102        gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
3103    } else {
3104        /* Unconditional branch.  */
3105        update_cc_op(s);
3106        gen_jmp_tb(s, 0, base + offset, s->base.pc_next);
3107    }
3108}
3109
3110DISAS_INSN(moveq)
3111{
3112    tcg_gen_movi_i32(DREG(insn, 9), (int8_t)insn);
3113    gen_logic_cc(s, DREG(insn, 9), OS_LONG);
3114}
3115
3116DISAS_INSN(mvzs)
3117{
3118    int opsize;
3119    TCGv src;
3120    TCGv reg;
3121
3122    if (insn & 0x40)
3123        opsize = OS_WORD;
3124    else
3125        opsize = OS_BYTE;
3126    SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
3127    reg = DREG(insn, 9);
3128    tcg_gen_mov_i32(reg, src);
3129    gen_logic_cc(s, src, opsize);
3130}
3131
3132DISAS_INSN(or)
3133{
3134    TCGv reg;
3135    TCGv dest;
3136    TCGv src;
3137    TCGv addr;
3138    int opsize;
3139
3140    opsize = insn_opsize(insn);
3141    reg = gen_extend(s, DREG(insn, 9), opsize, 0);
3142    dest = tcg_temp_new();
3143    if (insn & 0x100) {
3144        SRC_EA(env, src, opsize, 0, &addr);
3145        tcg_gen_or_i32(dest, src, reg);
3146        DEST_EA(env, insn, opsize, dest, &addr);
3147    } else {
3148        SRC_EA(env, src, opsize, 0, NULL);
3149        tcg_gen_or_i32(dest, src, reg);
3150        gen_partset_reg(opsize, DREG(insn, 9), dest);
3151    }
3152    gen_logic_cc(s, dest, opsize);
3153    tcg_temp_free(dest);
3154}
3155
3156DISAS_INSN(suba)
3157{
3158    TCGv src;
3159    TCGv reg;
3160
3161    SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3162    reg = AREG(insn, 9);
3163    tcg_gen_sub_i32(reg, reg, src);
3164}
3165
3166static inline void gen_subx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3167{
3168    TCGv tmp;
3169
3170    gen_flush_flags(s); /* compute old Z */
3171
3172    /*
3173     * Perform subtract with borrow.
3174     * (X, N) = dest - (src + X);
3175     */
3176
3177    tmp = tcg_const_i32(0);
3178    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, src, tmp, QREG_CC_X, tmp);
3179    tcg_gen_sub2_i32(QREG_CC_N, QREG_CC_X, dest, tmp, QREG_CC_N, QREG_CC_X);
3180    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3181    tcg_gen_andi_i32(QREG_CC_X, QREG_CC_X, 1);
3182
3183    /* Compute signed-overflow for subtract.  */
3184
3185    tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, dest);
3186    tcg_gen_xor_i32(tmp, dest, src);
3187    tcg_gen_and_i32(QREG_CC_V, QREG_CC_V, tmp);
3188    tcg_temp_free(tmp);
3189
3190    /* Copy the rest of the results into place.  */
3191    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3192    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3193
3194    set_cc_op(s, CC_OP_FLAGS);
3195
3196    /* result is in QREG_CC_N */
3197}
3198
3199DISAS_INSN(subx_reg)
3200{
3201    TCGv dest;
3202    TCGv src;
3203    int opsize;
3204
3205    opsize = insn_opsize(insn);
3206
3207    src = gen_extend(s, DREG(insn, 0), opsize, 1);
3208    dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3209
3210    gen_subx(s, src, dest, opsize);
3211
3212    gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3213}
3214
3215DISAS_INSN(subx_mem)
3216{
3217    TCGv src;
3218    TCGv addr_src;
3219    TCGv dest;
3220    TCGv addr_dest;
3221    int opsize;
3222
3223    opsize = insn_opsize(insn);
3224
3225    addr_src = AREG(insn, 0);
3226    tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3227    src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3228
3229    addr_dest = AREG(insn, 9);
3230    tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3231    dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3232
3233    gen_subx(s, src, dest, opsize);
3234
3235    gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3236
3237    tcg_temp_free(dest);
3238    tcg_temp_free(src);
3239}
3240
3241DISAS_INSN(mov3q)
3242{
3243    TCGv src;
3244    int val;
3245
3246    val = (insn >> 9) & 7;
3247    if (val == 0)
3248        val = -1;
3249    src = tcg_const_i32(val);
3250    gen_logic_cc(s, src, OS_LONG);
3251    DEST_EA(env, insn, OS_LONG, src, NULL);
3252    tcg_temp_free(src);
3253}
3254
3255DISAS_INSN(cmp)
3256{
3257    TCGv src;
3258    TCGv reg;
3259    int opsize;
3260
3261    opsize = insn_opsize(insn);
3262    SRC_EA(env, src, opsize, 1, NULL);
3263    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
3264    gen_update_cc_cmp(s, reg, src, opsize);
3265}
3266
3267DISAS_INSN(cmpa)
3268{
3269    int opsize;
3270    TCGv src;
3271    TCGv reg;
3272
3273    if (insn & 0x100) {
3274        opsize = OS_LONG;
3275    } else {
3276        opsize = OS_WORD;
3277    }
3278    SRC_EA(env, src, opsize, 1, NULL);
3279    reg = AREG(insn, 9);
3280    gen_update_cc_cmp(s, reg, src, OS_LONG);
3281}
3282
3283DISAS_INSN(cmpm)
3284{
3285    int opsize = insn_opsize(insn);
3286    TCGv src, dst;
3287
3288    /* Post-increment load (mode 3) from Ay.  */
3289    src = gen_ea_mode(env, s, 3, REG(insn, 0), opsize,
3290                      NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3291    /* Post-increment load (mode 3) from Ax.  */
3292    dst = gen_ea_mode(env, s, 3, REG(insn, 9), opsize,
3293                      NULL_QREG, NULL, EA_LOADS, IS_USER(s));
3294
3295    gen_update_cc_cmp(s, dst, src, opsize);
3296}
3297
3298DISAS_INSN(eor)
3299{
3300    TCGv src;
3301    TCGv dest;
3302    TCGv addr;
3303    int opsize;
3304
3305    opsize = insn_opsize(insn);
3306
3307    SRC_EA(env, src, opsize, 0, &addr);
3308    dest = tcg_temp_new();
3309    tcg_gen_xor_i32(dest, src, DREG(insn, 9));
3310    gen_logic_cc(s, dest, opsize);
3311    DEST_EA(env, insn, opsize, dest, &addr);
3312    tcg_temp_free(dest);
3313}
3314
3315static void do_exg(TCGv reg1, TCGv reg2)
3316{
3317    TCGv temp = tcg_temp_new();
3318    tcg_gen_mov_i32(temp, reg1);
3319    tcg_gen_mov_i32(reg1, reg2);
3320    tcg_gen_mov_i32(reg2, temp);
3321    tcg_temp_free(temp);
3322}
3323
3324DISAS_INSN(exg_dd)
3325{
3326    /* exchange Dx and Dy */
3327    do_exg(DREG(insn, 9), DREG(insn, 0));
3328}
3329
3330DISAS_INSN(exg_aa)
3331{
3332    /* exchange Ax and Ay */
3333    do_exg(AREG(insn, 9), AREG(insn, 0));
3334}
3335
3336DISAS_INSN(exg_da)
3337{
3338    /* exchange Dx and Ay */
3339    do_exg(DREG(insn, 9), AREG(insn, 0));
3340}
3341
3342DISAS_INSN(and)
3343{
3344    TCGv src;
3345    TCGv reg;
3346    TCGv dest;
3347    TCGv addr;
3348    int opsize;
3349
3350    dest = tcg_temp_new();
3351
3352    opsize = insn_opsize(insn);
3353    reg = DREG(insn, 9);
3354    if (insn & 0x100) {
3355        SRC_EA(env, src, opsize, 0, &addr);
3356        tcg_gen_and_i32(dest, src, reg);
3357        DEST_EA(env, insn, opsize, dest, &addr);
3358    } else {
3359        SRC_EA(env, src, opsize, 0, NULL);
3360        tcg_gen_and_i32(dest, src, reg);
3361        gen_partset_reg(opsize, reg, dest);
3362    }
3363    gen_logic_cc(s, dest, opsize);
3364    tcg_temp_free(dest);
3365}
3366
3367DISAS_INSN(adda)
3368{
3369    TCGv src;
3370    TCGv reg;
3371
3372    SRC_EA(env, src, (insn & 0x100) ? OS_LONG : OS_WORD, 1, NULL);
3373    reg = AREG(insn, 9);
3374    tcg_gen_add_i32(reg, reg, src);
3375}
3376
3377static inline void gen_addx(DisasContext *s, TCGv src, TCGv dest, int opsize)
3378{
3379    TCGv tmp;
3380
3381    gen_flush_flags(s); /* compute old Z */
3382
3383    /*
3384     * Perform addition with carry.
3385     * (X, N) = src + dest + X;
3386     */
3387
3388    tmp = tcg_const_i32(0);
3389    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_X, tmp, dest, tmp);
3390    tcg_gen_add2_i32(QREG_CC_N, QREG_CC_X, QREG_CC_N, QREG_CC_X, src, tmp);
3391    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3392
3393    /* Compute signed-overflow for addition.  */
3394
3395    tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3396    tcg_gen_xor_i32(tmp, dest, src);
3397    tcg_gen_andc_i32(QREG_CC_V, QREG_CC_V, tmp);
3398    tcg_temp_free(tmp);
3399
3400    /* Copy the rest of the results into place.  */
3401    tcg_gen_or_i32(QREG_CC_Z, QREG_CC_Z, QREG_CC_N); /* !Z is sticky */
3402    tcg_gen_mov_i32(QREG_CC_C, QREG_CC_X);
3403
3404    set_cc_op(s, CC_OP_FLAGS);
3405
3406    /* result is in QREG_CC_N */
3407}
3408
3409DISAS_INSN(addx_reg)
3410{
3411    TCGv dest;
3412    TCGv src;
3413    int opsize;
3414
3415    opsize = insn_opsize(insn);
3416
3417    dest = gen_extend(s, DREG(insn, 9), opsize, 1);
3418    src = gen_extend(s, DREG(insn, 0), opsize, 1);
3419
3420    gen_addx(s, src, dest, opsize);
3421
3422    gen_partset_reg(opsize, DREG(insn, 9), QREG_CC_N);
3423}
3424
3425DISAS_INSN(addx_mem)
3426{
3427    TCGv src;
3428    TCGv addr_src;
3429    TCGv dest;
3430    TCGv addr_dest;
3431    int opsize;
3432
3433    opsize = insn_opsize(insn);
3434
3435    addr_src = AREG(insn, 0);
3436    tcg_gen_subi_i32(addr_src, addr_src, opsize_bytes(opsize));
3437    src = gen_load(s, opsize, addr_src, 1, IS_USER(s));
3438
3439    addr_dest = AREG(insn, 9);
3440    tcg_gen_subi_i32(addr_dest, addr_dest, opsize_bytes(opsize));
3441    dest = gen_load(s, opsize, addr_dest, 1, IS_USER(s));
3442
3443    gen_addx(s, src, dest, opsize);
3444
3445    gen_store(s, opsize, addr_dest, QREG_CC_N, IS_USER(s));
3446
3447    tcg_temp_free(dest);
3448    tcg_temp_free(src);
3449}
3450
3451static inline void shift_im(DisasContext *s, uint16_t insn, int opsize)
3452{
3453    int count = (insn >> 9) & 7;
3454    int logical = insn & 8;
3455    int left = insn & 0x100;
3456    int bits = opsize_bytes(opsize) * 8;
3457    TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3458
3459    if (count == 0) {
3460        count = 8;
3461    }
3462
3463    tcg_gen_movi_i32(QREG_CC_V, 0);
3464    if (left) {
3465        tcg_gen_shri_i32(QREG_CC_C, reg, bits - count);
3466        tcg_gen_shli_i32(QREG_CC_N, reg, count);
3467
3468        /*
3469         * Note that ColdFire always clears V (done above),
3470         * while M68000 sets if the most significant bit is changed at
3471         * any time during the shift operation.
3472         */
3473        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3474            /* if shift count >= bits, V is (reg != 0) */
3475            if (count >= bits) {
3476                tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, reg, QREG_CC_V);
3477            } else {
3478                TCGv t0 = tcg_temp_new();
3479                tcg_gen_sari_i32(QREG_CC_V, reg, bits - 1);
3480                tcg_gen_sari_i32(t0, reg, bits - count - 1);
3481                tcg_gen_setcond_i32(TCG_COND_NE, QREG_CC_V, QREG_CC_V, t0);
3482                tcg_temp_free(t0);
3483            }
3484            tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3485        }
3486    } else {
3487        tcg_gen_shri_i32(QREG_CC_C, reg, count - 1);
3488        if (logical) {
3489            tcg_gen_shri_i32(QREG_CC_N, reg, count);
3490        } else {
3491            tcg_gen_sari_i32(QREG_CC_N, reg, count);
3492        }
3493    }
3494
3495    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3496    tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3497    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3498    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3499
3500    gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3501    set_cc_op(s, CC_OP_FLAGS);
3502}
3503
3504static inline void shift_reg(DisasContext *s, uint16_t insn, int opsize)
3505{
3506    int logical = insn & 8;
3507    int left = insn & 0x100;
3508    int bits = opsize_bytes(opsize) * 8;
3509    TCGv reg = gen_extend(s, DREG(insn, 0), opsize, !logical);
3510    TCGv s32;
3511    TCGv_i64 t64, s64;
3512
3513    t64 = tcg_temp_new_i64();
3514    s64 = tcg_temp_new_i64();
3515    s32 = tcg_temp_new();
3516
3517    /*
3518     * Note that m68k truncates the shift count modulo 64, not 32.
3519     * In addition, a 64-bit shift makes it easy to find "the last
3520     * bit shifted out", for the carry flag.
3521     */
3522    tcg_gen_andi_i32(s32, DREG(insn, 9), 63);
3523    tcg_gen_extu_i32_i64(s64, s32);
3524    tcg_gen_extu_i32_i64(t64, reg);
3525
3526    /* Optimistically set V=0.  Also used as a zero source below.  */
3527    tcg_gen_movi_i32(QREG_CC_V, 0);
3528    if (left) {
3529        tcg_gen_shl_i64(t64, t64, s64);
3530
3531        if (opsize == OS_LONG) {
3532            tcg_gen_extr_i64_i32(QREG_CC_N, QREG_CC_C, t64);
3533            /* Note that C=0 if shift count is 0, and we get that for free.  */
3534        } else {
3535            TCGv zero = tcg_const_i32(0);
3536            tcg_gen_extrl_i64_i32(QREG_CC_N, t64);
3537            tcg_gen_shri_i32(QREG_CC_C, QREG_CC_N, bits);
3538            tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3539                                s32, zero, zero, QREG_CC_C);
3540            tcg_temp_free(zero);
3541        }
3542        tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3543
3544        /* X = C, but only if the shift count was non-zero.  */
3545        tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3546                            QREG_CC_C, QREG_CC_X);
3547
3548        /*
3549         * M68000 sets V if the most significant bit is changed at
3550         * any time during the shift operation.  Do this via creating
3551         * an extension of the sign bit, comparing, and discarding
3552         * the bits below the sign bit.  I.e.
3553         *     int64_t s = (intN_t)reg;
3554         *     int64_t t = (int64_t)(intN_t)reg << count;
3555         *     V = ((s ^ t) & (-1 << (bits - 1))) != 0
3556         */
3557        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3558            TCGv_i64 tt = tcg_const_i64(32);
3559            /* if shift is greater than 32, use 32 */
3560            tcg_gen_movcond_i64(TCG_COND_GT, s64, s64, tt, tt, s64);
3561            tcg_temp_free_i64(tt);
3562            /* Sign extend the input to 64 bits; re-do the shift.  */
3563            tcg_gen_ext_i32_i64(t64, reg);
3564            tcg_gen_shl_i64(s64, t64, s64);
3565            /* Clear all bits that are unchanged.  */
3566            tcg_gen_xor_i64(t64, t64, s64);
3567            /* Ignore the bits below the sign bit.  */
3568            tcg_gen_andi_i64(t64, t64, -1ULL << (bits - 1));
3569            /* If any bits remain set, we have overflow.  */
3570            tcg_gen_setcondi_i64(TCG_COND_NE, t64, t64, 0);
3571            tcg_gen_extrl_i64_i32(QREG_CC_V, t64);
3572            tcg_gen_neg_i32(QREG_CC_V, QREG_CC_V);
3573        }
3574    } else {
3575        tcg_gen_shli_i64(t64, t64, 32);
3576        if (logical) {
3577            tcg_gen_shr_i64(t64, t64, s64);
3578        } else {
3579            tcg_gen_sar_i64(t64, t64, s64);
3580        }
3581        tcg_gen_extr_i64_i32(QREG_CC_C, QREG_CC_N, t64);
3582
3583        /* Note that C=0 if shift count is 0, and we get that for free.  */
3584        tcg_gen_shri_i32(QREG_CC_C, QREG_CC_C, 31);
3585
3586        /* X = C, but only if the shift count was non-zero.  */
3587        tcg_gen_movcond_i32(TCG_COND_NE, QREG_CC_X, s32, QREG_CC_V,
3588                            QREG_CC_C, QREG_CC_X);
3589    }
3590    gen_ext(QREG_CC_N, QREG_CC_N, opsize, 1);
3591    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3592
3593    tcg_temp_free(s32);
3594    tcg_temp_free_i64(s64);
3595    tcg_temp_free_i64(t64);
3596
3597    /* Write back the result.  */
3598    gen_partset_reg(opsize, DREG(insn, 0), QREG_CC_N);
3599    set_cc_op(s, CC_OP_FLAGS);
3600}
3601
3602DISAS_INSN(shift8_im)
3603{
3604    shift_im(s, insn, OS_BYTE);
3605}
3606
3607DISAS_INSN(shift16_im)
3608{
3609    shift_im(s, insn, OS_WORD);
3610}
3611
3612DISAS_INSN(shift_im)
3613{
3614    shift_im(s, insn, OS_LONG);
3615}
3616
3617DISAS_INSN(shift8_reg)
3618{
3619    shift_reg(s, insn, OS_BYTE);
3620}
3621
3622DISAS_INSN(shift16_reg)
3623{
3624    shift_reg(s, insn, OS_WORD);
3625}
3626
3627DISAS_INSN(shift_reg)
3628{
3629    shift_reg(s, insn, OS_LONG);
3630}
3631
3632DISAS_INSN(shift_mem)
3633{
3634    int logical = insn & 8;
3635    int left = insn & 0x100;
3636    TCGv src;
3637    TCGv addr;
3638
3639    SRC_EA(env, src, OS_WORD, !logical, &addr);
3640    tcg_gen_movi_i32(QREG_CC_V, 0);
3641    if (left) {
3642        tcg_gen_shri_i32(QREG_CC_C, src, 15);
3643        tcg_gen_shli_i32(QREG_CC_N, src, 1);
3644
3645        /*
3646         * Note that ColdFire always clears V,
3647         * while M68000 sets if the most significant bit is changed at
3648         * any time during the shift operation
3649         */
3650        if (!logical && m68k_feature(s->env, M68K_FEATURE_M68000)) {
3651            src = gen_extend(s, src, OS_WORD, 1);
3652            tcg_gen_xor_i32(QREG_CC_V, QREG_CC_N, src);
3653        }
3654    } else {
3655        tcg_gen_mov_i32(QREG_CC_C, src);
3656        if (logical) {
3657            tcg_gen_shri_i32(QREG_CC_N, src, 1);
3658        } else {
3659            tcg_gen_sari_i32(QREG_CC_N, src, 1);
3660        }
3661    }
3662
3663    gen_ext(QREG_CC_N, QREG_CC_N, OS_WORD, 1);
3664    tcg_gen_andi_i32(QREG_CC_C, QREG_CC_C, 1);
3665    tcg_gen_mov_i32(QREG_CC_Z, QREG_CC_N);
3666    tcg_gen_mov_i32(QREG_CC_X, QREG_CC_C);
3667
3668    DEST_EA(env, insn, OS_WORD, QREG_CC_N, &addr);
3669    set_cc_op(s, CC_OP_FLAGS);
3670}
3671
3672static void rotate(TCGv reg, TCGv shift, int left, int size)
3673{
3674    switch (size) {
3675    case 8:
3676        /* Replicate the 8-bit input so that a 32-bit rotate works.  */
3677        tcg_gen_ext8u_i32(reg, reg);
3678        tcg_gen_muli_i32(reg, reg, 0x01010101);
3679        goto do_long;
3680    case 16:
3681        /* Replicate the 16-bit input so that a 32-bit rotate works.  */
3682        tcg_gen_deposit_i32(reg, reg, reg, 16, 16);
3683        goto do_long;
3684    do_long:
3685    default:
3686        if (left) {
3687            tcg_gen_rotl_i32(reg, reg, shift);
3688        } else {
3689            tcg_gen_rotr_i32(reg, reg, shift);
3690        }
3691    }
3692
3693    /* compute flags */
3694
3695    switch (size) {
3696    case 8:
3697        tcg_gen_ext8s_i32(reg, reg);
3698        break;
3699    case 16:
3700        tcg_gen_ext16s_i32(reg, reg);
3701        break;
3702    default:
3703        break;
3704    }
3705
3706    /* QREG_CC_X is not affected */
3707
3708    tcg_gen_mov_i32(QREG_CC_N, reg);
3709    tcg_gen_mov_i32(QREG_CC_Z, reg);
3710
3711    if (left) {
3712        tcg_gen_andi_i32(QREG_CC_C, reg, 1);
3713    } else {
3714        tcg_gen_shri_i32(QREG_CC_C, reg, 31);
3715    }
3716
3717    tcg_gen_movi_i32(QREG_CC_V, 0); /* always cleared */
3718}
3719
3720static void rotate_x_flags(TCGv reg, TCGv X, int size)
3721{
3722    switch (size) {
3723    case 8:
3724        tcg_gen_ext8s_i32(reg, reg);
3725        break;
3726    case 16:
3727        tcg_gen_ext16s_i32(reg, reg);
3728        break;
3729    default:
3730        break;
3731    }
3732    tcg_gen_mov_i32(QREG_CC_N, reg);
3733    tcg_gen_mov_i32(QREG_CC_Z, reg);
3734    tcg_gen_mov_i32(QREG_CC_X, X);
3735    tcg_gen_mov_i32(QREG_CC_C, X);
3736    tcg_gen_movi_i32(QREG_CC_V, 0);
3737}
3738
3739/* Result of rotate_x() is valid if 0 <= shift <= size */
3740static TCGv rotate_x(TCGv reg, TCGv shift, int left, int size)
3741{
3742    TCGv X, shl, shr, shx, sz, zero;
3743
3744    sz = tcg_const_i32(size);
3745
3746    shr = tcg_temp_new();
3747    shl = tcg_temp_new();
3748    shx = tcg_temp_new();
3749    if (left) {
3750        tcg_gen_mov_i32(shl, shift);      /* shl = shift */
3751        tcg_gen_movi_i32(shr, size + 1);
3752        tcg_gen_sub_i32(shr, shr, shift); /* shr = size + 1 - shift */
3753        tcg_gen_subi_i32(shx, shift, 1);  /* shx = shift - 1 */
3754        /* shx = shx < 0 ? size : shx; */
3755        zero = tcg_const_i32(0);
3756        tcg_gen_movcond_i32(TCG_COND_LT, shx, shx, zero, sz, shx);
3757        tcg_temp_free(zero);
3758    } else {
3759        tcg_gen_mov_i32(shr, shift);      /* shr = shift */
3760        tcg_gen_movi_i32(shl, size + 1);
3761        tcg_gen_sub_i32(shl, shl, shift); /* shl = size + 1 - shift */
3762        tcg_gen_sub_i32(shx, sz, shift); /* shx = size - shift */
3763    }
3764    tcg_temp_free_i32(sz);
3765
3766    /* reg = (reg << shl) | (reg >> shr) | (x << shx); */
3767
3768    tcg_gen_shl_i32(shl, reg, shl);
3769    tcg_gen_shr_i32(shr, reg, shr);
3770    tcg_gen_or_i32(reg, shl, shr);
3771    tcg_temp_free(shl);
3772    tcg_temp_free(shr);
3773    tcg_gen_shl_i32(shx, QREG_CC_X, shx);
3774    tcg_gen_or_i32(reg, reg, shx);
3775    tcg_temp_free(shx);
3776
3777    /* X = (reg >> size) & 1 */
3778
3779    X = tcg_temp_new();
3780    tcg_gen_extract_i32(X, reg, size, 1);
3781
3782    return X;
3783}
3784
3785/* Result of rotate32_x() is valid if 0 <= shift < 33 */
3786static TCGv rotate32_x(TCGv reg, TCGv shift, int left)
3787{
3788    TCGv_i64 t0, shift64;
3789    TCGv X, lo, hi, zero;
3790
3791    shift64 = tcg_temp_new_i64();
3792    tcg_gen_extu_i32_i64(shift64, shift);
3793
3794    t0 = tcg_temp_new_i64();
3795
3796    X = tcg_temp_new();
3797    lo = tcg_temp_new();
3798    hi = tcg_temp_new();
3799
3800    if (left) {
3801        /* create [reg:X:..] */
3802
3803        tcg_gen_shli_i32(lo, QREG_CC_X, 31);
3804        tcg_gen_concat_i32_i64(t0, lo, reg);
3805
3806        /* rotate */
3807
3808        tcg_gen_rotl_i64(t0, t0, shift64);
3809        tcg_temp_free_i64(shift64);
3810
3811        /* result is [reg:..:reg:X] */
3812
3813        tcg_gen_extr_i64_i32(lo, hi, t0);
3814        tcg_gen_andi_i32(X, lo, 1);
3815
3816        tcg_gen_shri_i32(lo, lo, 1);
3817    } else {
3818        /* create [..:X:reg] */
3819
3820        tcg_gen_concat_i32_i64(t0, reg, QREG_CC_X);
3821
3822        tcg_gen_rotr_i64(t0, t0, shift64);
3823        tcg_temp_free_i64(shift64);
3824
3825        /* result is value: [X:reg:..:reg] */
3826
3827        tcg_gen_extr_i64_i32(lo, hi, t0);
3828
3829        /* extract X */
3830
3831        tcg_gen_shri_i32(X, hi, 31);
3832
3833        /* extract result */
3834
3835        tcg_gen_shli_i32(hi, hi, 1);
3836    }
3837    tcg_temp_free_i64(t0);
3838    tcg_gen_or_i32(lo, lo, hi);
3839    tcg_temp_free(hi);
3840
3841    /* if shift == 0, register and X are not affected */
3842
3843    zero = tcg_const_i32(0);
3844    tcg_gen_movcond_i32(TCG_COND_EQ, X, shift, zero, QREG_CC_X, X);
3845    tcg_gen_movcond_i32(TCG_COND_EQ, reg, shift, zero, reg, lo);
3846    tcg_temp_free(zero);
3847    tcg_temp_free(lo);
3848
3849    return X;
3850}
3851
3852DISAS_INSN(rotate_im)
3853{
3854    TCGv shift;
3855    int tmp;
3856    int left = (insn & 0x100);
3857
3858    tmp = (insn >> 9) & 7;
3859    if (tmp == 0) {
3860        tmp = 8;
3861    }
3862
3863    shift = tcg_const_i32(tmp);
3864    if (insn & 8) {
3865        rotate(DREG(insn, 0), shift, left, 32);
3866    } else {
3867        TCGv X = rotate32_x(DREG(insn, 0), shift, left);
3868        rotate_x_flags(DREG(insn, 0), X, 32);
3869        tcg_temp_free(X);
3870    }
3871    tcg_temp_free(shift);
3872
3873    set_cc_op(s, CC_OP_FLAGS);
3874}
3875
3876DISAS_INSN(rotate8_im)
3877{
3878    int left = (insn & 0x100);
3879    TCGv reg;
3880    TCGv shift;
3881    int tmp;
3882
3883    reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3884
3885    tmp = (insn >> 9) & 7;
3886    if (tmp == 0) {
3887        tmp = 8;
3888    }
3889
3890    shift = tcg_const_i32(tmp);
3891    if (insn & 8) {
3892        rotate(reg, shift, left, 8);
3893    } else {
3894        TCGv X = rotate_x(reg, shift, left, 8);
3895        rotate_x_flags(reg, X, 8);
3896        tcg_temp_free(X);
3897    }
3898    tcg_temp_free(shift);
3899    gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3900    set_cc_op(s, CC_OP_FLAGS);
3901}
3902
3903DISAS_INSN(rotate16_im)
3904{
3905    int left = (insn & 0x100);
3906    TCGv reg;
3907    TCGv shift;
3908    int tmp;
3909
3910    reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
3911    tmp = (insn >> 9) & 7;
3912    if (tmp == 0) {
3913        tmp = 8;
3914    }
3915
3916    shift = tcg_const_i32(tmp);
3917    if (insn & 8) {
3918        rotate(reg, shift, left, 16);
3919    } else {
3920        TCGv X = rotate_x(reg, shift, left, 16);
3921        rotate_x_flags(reg, X, 16);
3922        tcg_temp_free(X);
3923    }
3924    tcg_temp_free(shift);
3925    gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
3926    set_cc_op(s, CC_OP_FLAGS);
3927}
3928
3929DISAS_INSN(rotate_reg)
3930{
3931    TCGv reg;
3932    TCGv src;
3933    TCGv t0, t1;
3934    int left = (insn & 0x100);
3935
3936    reg = DREG(insn, 0);
3937    src = DREG(insn, 9);
3938    /* shift in [0..63] */
3939    t0 = tcg_temp_new();
3940    tcg_gen_andi_i32(t0, src, 63);
3941    t1 = tcg_temp_new_i32();
3942    if (insn & 8) {
3943        tcg_gen_andi_i32(t1, src, 31);
3944        rotate(reg, t1, left, 32);
3945        /* if shift == 0, clear C */
3946        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3947                            t0, QREG_CC_V /* 0 */,
3948                            QREG_CC_V /* 0 */, QREG_CC_C);
3949    } else {
3950        TCGv X;
3951        /* modulo 33 */
3952        tcg_gen_movi_i32(t1, 33);
3953        tcg_gen_remu_i32(t1, t0, t1);
3954        X = rotate32_x(DREG(insn, 0), t1, left);
3955        rotate_x_flags(DREG(insn, 0), X, 32);
3956        tcg_temp_free(X);
3957    }
3958    tcg_temp_free(t1);
3959    tcg_temp_free(t0);
3960    set_cc_op(s, CC_OP_FLAGS);
3961}
3962
3963DISAS_INSN(rotate8_reg)
3964{
3965    TCGv reg;
3966    TCGv src;
3967    TCGv t0, t1;
3968    int left = (insn & 0x100);
3969
3970    reg = gen_extend(s, DREG(insn, 0), OS_BYTE, 0);
3971    src = DREG(insn, 9);
3972    /* shift in [0..63] */
3973    t0 = tcg_temp_new_i32();
3974    tcg_gen_andi_i32(t0, src, 63);
3975    t1 = tcg_temp_new_i32();
3976    if (insn & 8) {
3977        tcg_gen_andi_i32(t1, src, 7);
3978        rotate(reg, t1, left, 8);
3979        /* if shift == 0, clear C */
3980        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
3981                            t0, QREG_CC_V /* 0 */,
3982                            QREG_CC_V /* 0 */, QREG_CC_C);
3983    } else {
3984        TCGv X;
3985        /* modulo 9 */
3986        tcg_gen_movi_i32(t1, 9);
3987        tcg_gen_remu_i32(t1, t0, t1);
3988        X = rotate_x(reg, t1, left, 8);
3989        rotate_x_flags(reg, X, 8);
3990        tcg_temp_free(X);
3991    }
3992    tcg_temp_free(t1);
3993    tcg_temp_free(t0);
3994    gen_partset_reg(OS_BYTE, DREG(insn, 0), reg);
3995    set_cc_op(s, CC_OP_FLAGS);
3996}
3997
3998DISAS_INSN(rotate16_reg)
3999{
4000    TCGv reg;
4001    TCGv src;
4002    TCGv t0, t1;
4003    int left = (insn & 0x100);
4004
4005    reg = gen_extend(s, DREG(insn, 0), OS_WORD, 0);
4006    src = DREG(insn, 9);
4007    /* shift in [0..63] */
4008    t0 = tcg_temp_new_i32();
4009    tcg_gen_andi_i32(t0, src, 63);
4010    t1 = tcg_temp_new_i32();
4011    if (insn & 8) {
4012        tcg_gen_andi_i32(t1, src, 15);
4013        rotate(reg, t1, left, 16);
4014        /* if shift == 0, clear C */
4015        tcg_gen_movcond_i32(TCG_COND_EQ, QREG_CC_C,
4016                            t0, QREG_CC_V /* 0 */,
4017                            QREG_CC_V /* 0 */, QREG_CC_C);
4018    } else {
4019        TCGv X;
4020        /* modulo 17 */
4021        tcg_gen_movi_i32(t1, 17);
4022        tcg_gen_remu_i32(t1, t0, t1);
4023        X = rotate_x(reg, t1, left, 16);
4024        rotate_x_flags(reg, X, 16);
4025        tcg_temp_free(X);
4026    }
4027    tcg_temp_free(t1);
4028    tcg_temp_free(t0);
4029    gen_partset_reg(OS_WORD, DREG(insn, 0), reg);
4030    set_cc_op(s, CC_OP_FLAGS);
4031}
4032
4033DISAS_INSN(rotate_mem)
4034{
4035    TCGv src;
4036    TCGv addr;
4037    TCGv shift;
4038    int left = (insn & 0x100);
4039
4040    SRC_EA(env, src, OS_WORD, 0, &addr);
4041
4042    shift = tcg_const_i32(1);
4043    if (insn & 0x0200) {
4044        rotate(src, shift, left, 16);
4045    } else {
4046        TCGv X = rotate_x(src, shift, left, 16);
4047        rotate_x_flags(src, X, 16);
4048        tcg_temp_free(X);
4049    }
4050    tcg_temp_free(shift);
4051    DEST_EA(env, insn, OS_WORD, src, &addr);
4052    set_cc_op(s, CC_OP_FLAGS);
4053}
4054
4055DISAS_INSN(bfext_reg)
4056{
4057    int ext = read_im16(env, s);
4058    int is_sign = insn & 0x200;
4059    TCGv src = DREG(insn, 0);
4060    TCGv dst = DREG(ext, 12);
4061    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4062    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4063    int pos = 32 - ofs - len;        /* little bit-endian */
4064    TCGv tmp = tcg_temp_new();
4065    TCGv shift;
4066
4067    /*
4068     * In general, we're going to rotate the field so that it's at the
4069     * top of the word and then right-shift by the complement of the
4070     * width to extend the field.
4071     */
4072    if (ext & 0x20) {
4073        /* Variable width.  */
4074        if (ext & 0x800) {
4075            /* Variable offset.  */
4076            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4077            tcg_gen_rotl_i32(tmp, src, tmp);
4078        } else {
4079            tcg_gen_rotli_i32(tmp, src, ofs);
4080        }
4081
4082        shift = tcg_temp_new();
4083        tcg_gen_neg_i32(shift, DREG(ext, 0));
4084        tcg_gen_andi_i32(shift, shift, 31);
4085        tcg_gen_sar_i32(QREG_CC_N, tmp, shift);
4086        if (is_sign) {
4087            tcg_gen_mov_i32(dst, QREG_CC_N);
4088        } else {
4089            tcg_gen_shr_i32(dst, tmp, shift);
4090        }
4091        tcg_temp_free(shift);
4092    } else {
4093        /* Immediate width.  */
4094        if (ext & 0x800) {
4095            /* Variable offset */
4096            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4097            tcg_gen_rotl_i32(tmp, src, tmp);
4098            src = tmp;
4099            pos = 32 - len;
4100        } else {
4101            /*
4102             * Immediate offset.  If the field doesn't wrap around the
4103             * end of the word, rely on (s)extract completely.
4104             */
4105            if (pos < 0) {
4106                tcg_gen_rotli_i32(tmp, src, ofs);
4107                src = tmp;
4108                pos = 32 - len;
4109            }
4110        }
4111
4112        tcg_gen_sextract_i32(QREG_CC_N, src, pos, len);
4113        if (is_sign) {
4114            tcg_gen_mov_i32(dst, QREG_CC_N);
4115        } else {
4116            tcg_gen_extract_i32(dst, src, pos, len);
4117        }
4118    }
4119
4120    tcg_temp_free(tmp);
4121    set_cc_op(s, CC_OP_LOGIC);
4122}
4123
4124DISAS_INSN(bfext_mem)
4125{
4126    int ext = read_im16(env, s);
4127    int is_sign = insn & 0x200;
4128    TCGv dest = DREG(ext, 12);
4129    TCGv addr, len, ofs;
4130
4131    addr = gen_lea(env, s, insn, OS_UNSIZED);
4132    if (IS_NULL_QREG(addr)) {
4133        gen_addr_fault(s);
4134        return;
4135    }
4136
4137    if (ext & 0x20) {
4138        len = DREG(ext, 0);
4139    } else {
4140        len = tcg_const_i32(extract32(ext, 0, 5));
4141    }
4142    if (ext & 0x800) {
4143        ofs = DREG(ext, 6);
4144    } else {
4145        ofs = tcg_const_i32(extract32(ext, 6, 5));
4146    }
4147
4148    if (is_sign) {
4149        gen_helper_bfexts_mem(dest, cpu_env, addr, ofs, len);
4150        tcg_gen_mov_i32(QREG_CC_N, dest);
4151    } else {
4152        TCGv_i64 tmp = tcg_temp_new_i64();
4153        gen_helper_bfextu_mem(tmp, cpu_env, addr, ofs, len);
4154        tcg_gen_extr_i64_i32(dest, QREG_CC_N, tmp);
4155        tcg_temp_free_i64(tmp);
4156    }
4157    set_cc_op(s, CC_OP_LOGIC);
4158
4159    if (!(ext & 0x20)) {
4160        tcg_temp_free(len);
4161    }
4162    if (!(ext & 0x800)) {
4163        tcg_temp_free(ofs);
4164    }
4165}
4166
4167DISAS_INSN(bfop_reg)
4168{
4169    int ext = read_im16(env, s);
4170    TCGv src = DREG(insn, 0);
4171    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4172    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4173    TCGv mask, tofs, tlen;
4174
4175    tofs = NULL;
4176    tlen = NULL;
4177    if ((insn & 0x0f00) == 0x0d00) { /* bfffo */
4178        tofs = tcg_temp_new();
4179        tlen = tcg_temp_new();
4180    }
4181
4182    if ((ext & 0x820) == 0) {
4183        /* Immediate width and offset.  */
4184        uint32_t maski = 0x7fffffffu >> (len - 1);
4185        if (ofs + len <= 32) {
4186            tcg_gen_shli_i32(QREG_CC_N, src, ofs);
4187        } else {
4188            tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4189        }
4190        tcg_gen_andi_i32(QREG_CC_N, QREG_CC_N, ~maski);
4191        mask = tcg_const_i32(ror32(maski, ofs));
4192        if (tofs) {
4193            tcg_gen_movi_i32(tofs, ofs);
4194            tcg_gen_movi_i32(tlen, len);
4195        }
4196    } else {
4197        TCGv tmp = tcg_temp_new();
4198        if (ext & 0x20) {
4199            /* Variable width */
4200            tcg_gen_subi_i32(tmp, DREG(ext, 0), 1);
4201            tcg_gen_andi_i32(tmp, tmp, 31);
4202            mask = tcg_const_i32(0x7fffffffu);
4203            tcg_gen_shr_i32(mask, mask, tmp);
4204            if (tlen) {
4205                tcg_gen_addi_i32(tlen, tmp, 1);
4206            }
4207        } else {
4208            /* Immediate width */
4209            mask = tcg_const_i32(0x7fffffffu >> (len - 1));
4210            if (tlen) {
4211                tcg_gen_movi_i32(tlen, len);
4212            }
4213        }
4214        if (ext & 0x800) {
4215            /* Variable offset */
4216            tcg_gen_andi_i32(tmp, DREG(ext, 6), 31);
4217            tcg_gen_rotl_i32(QREG_CC_N, src, tmp);
4218            tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4219            tcg_gen_rotr_i32(mask, mask, tmp);
4220            if (tofs) {
4221                tcg_gen_mov_i32(tofs, tmp);
4222            }
4223        } else {
4224            /* Immediate offset (and variable width) */
4225            tcg_gen_rotli_i32(QREG_CC_N, src, ofs);
4226            tcg_gen_andc_i32(QREG_CC_N, QREG_CC_N, mask);
4227            tcg_gen_rotri_i32(mask, mask, ofs);
4228            if (tofs) {
4229                tcg_gen_movi_i32(tofs, ofs);
4230            }
4231        }
4232        tcg_temp_free(tmp);
4233    }
4234    set_cc_op(s, CC_OP_LOGIC);
4235
4236    switch (insn & 0x0f00) {
4237    case 0x0a00: /* bfchg */
4238        tcg_gen_eqv_i32(src, src, mask);
4239        break;
4240    case 0x0c00: /* bfclr */
4241        tcg_gen_and_i32(src, src, mask);
4242        break;
4243    case 0x0d00: /* bfffo */
4244        gen_helper_bfffo_reg(DREG(ext, 12), QREG_CC_N, tofs, tlen);
4245        tcg_temp_free(tlen);
4246        tcg_temp_free(tofs);
4247        break;
4248    case 0x0e00: /* bfset */
4249        tcg_gen_orc_i32(src, src, mask);
4250        break;
4251    case 0x0800: /* bftst */
4252        /* flags already set; no other work to do.  */
4253        break;
4254    default:
4255        g_assert_not_reached();
4256    }
4257    tcg_temp_free(mask);
4258}
4259
4260DISAS_INSN(bfop_mem)
4261{
4262    int ext = read_im16(env, s);
4263    TCGv addr, len, ofs;
4264    TCGv_i64 t64;
4265
4266    addr = gen_lea(env, s, insn, OS_UNSIZED);
4267    if (IS_NULL_QREG(addr)) {
4268        gen_addr_fault(s);
4269        return;
4270    }
4271
4272    if (ext & 0x20) {
4273        len = DREG(ext, 0);
4274    } else {
4275        len = tcg_const_i32(extract32(ext, 0, 5));
4276    }
4277    if (ext & 0x800) {
4278        ofs = DREG(ext, 6);
4279    } else {
4280        ofs = tcg_const_i32(extract32(ext, 6, 5));
4281    }
4282
4283    switch (insn & 0x0f00) {
4284    case 0x0a00: /* bfchg */
4285        gen_helper_bfchg_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4286        break;
4287    case 0x0c00: /* bfclr */
4288        gen_helper_bfclr_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4289        break;
4290    case 0x0d00: /* bfffo */
4291        t64 = tcg_temp_new_i64();
4292        gen_helper_bfffo_mem(t64, cpu_env, addr, ofs, len);
4293        tcg_gen_extr_i64_i32(DREG(ext, 12), QREG_CC_N, t64);
4294        tcg_temp_free_i64(t64);
4295        break;
4296    case 0x0e00: /* bfset */
4297        gen_helper_bfset_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4298        break;
4299    case 0x0800: /* bftst */
4300        gen_helper_bfexts_mem(QREG_CC_N, cpu_env, addr, ofs, len);
4301        break;
4302    default:
4303        g_assert_not_reached();
4304    }
4305    set_cc_op(s, CC_OP_LOGIC);
4306
4307    if (!(ext & 0x20)) {
4308        tcg_temp_free(len);
4309    }
4310    if (!(ext & 0x800)) {
4311        tcg_temp_free(ofs);
4312    }
4313}
4314
4315DISAS_INSN(bfins_reg)
4316{
4317    int ext = read_im16(env, s);
4318    TCGv dst = DREG(insn, 0);
4319    TCGv src = DREG(ext, 12);
4320    int len = ((extract32(ext, 0, 5) - 1) & 31) + 1;
4321    int ofs = extract32(ext, 6, 5);  /* big bit-endian */
4322    int pos = 32 - ofs - len;        /* little bit-endian */
4323    TCGv tmp;
4324
4325    tmp = tcg_temp_new();
4326
4327    if (ext & 0x20) {
4328        /* Variable width */
4329        tcg_gen_neg_i32(tmp, DREG(ext, 0));
4330        tcg_gen_andi_i32(tmp, tmp, 31);
4331        tcg_gen_shl_i32(QREG_CC_N, src, tmp);
4332    } else {
4333        /* Immediate width */
4334        tcg_gen_shli_i32(QREG_CC_N, src, 32 - len);
4335    }
4336    set_cc_op(s, CC_OP_LOGIC);
4337
4338    /* Immediate width and offset */
4339    if ((ext & 0x820) == 0) {
4340        /* Check for suitability for deposit.  */
4341        if (pos >= 0) {
4342            tcg_gen_deposit_i32(dst, dst, src, pos, len);
4343        } else {
4344            uint32_t maski = -2U << (len - 1);
4345            uint32_t roti = (ofs + len) & 31;
4346            tcg_gen_andi_i32(tmp, src, ~maski);
4347            tcg_gen_rotri_i32(tmp, tmp, roti);
4348            tcg_gen_andi_i32(dst, dst, ror32(maski, roti));
4349            tcg_gen_or_i32(dst, dst, tmp);
4350        }
4351    } else {
4352        TCGv mask = tcg_temp_new();
4353        TCGv rot = tcg_temp_new();
4354
4355        if (ext & 0x20) {
4356            /* Variable width */
4357            tcg_gen_subi_i32(rot, DREG(ext, 0), 1);
4358            tcg_gen_andi_i32(rot, rot, 31);
4359            tcg_gen_movi_i32(mask, -2);
4360            tcg_gen_shl_i32(mask, mask, rot);
4361            tcg_gen_mov_i32(rot, DREG(ext, 0));
4362            tcg_gen_andc_i32(tmp, src, mask);
4363        } else {
4364            /* Immediate width (variable offset) */
4365            uint32_t maski = -2U << (len - 1);
4366            tcg_gen_andi_i32(tmp, src, ~maski);
4367            tcg_gen_movi_i32(mask, maski);
4368            tcg_gen_movi_i32(rot, len & 31);
4369        }
4370        if (ext & 0x800) {
4371            /* Variable offset */
4372            tcg_gen_add_i32(rot, rot, DREG(ext, 6));
4373        } else {
4374            /* Immediate offset (variable width) */
4375            tcg_gen_addi_i32(rot, rot, ofs);
4376        }
4377        tcg_gen_andi_i32(rot, rot, 31);
4378        tcg_gen_rotr_i32(mask, mask, rot);
4379        tcg_gen_rotr_i32(tmp, tmp, rot);
4380        tcg_gen_and_i32(dst, dst, mask);
4381        tcg_gen_or_i32(dst, dst, tmp);
4382
4383        tcg_temp_free(rot);
4384        tcg_temp_free(mask);
4385    }
4386    tcg_temp_free(tmp);
4387}
4388
4389DISAS_INSN(bfins_mem)
4390{
4391    int ext = read_im16(env, s);
4392    TCGv src = DREG(ext, 12);
4393    TCGv addr, len, ofs;
4394
4395    addr = gen_lea(env, s, insn, OS_UNSIZED);
4396    if (IS_NULL_QREG(addr)) {
4397        gen_addr_fault(s);
4398        return;
4399    }
4400
4401    if (ext & 0x20) {
4402        len = DREG(ext, 0);
4403    } else {
4404        len = tcg_const_i32(extract32(ext, 0, 5));
4405    }
4406    if (ext & 0x800) {
4407        ofs = DREG(ext, 6);
4408    } else {
4409        ofs = tcg_const_i32(extract32(ext, 6, 5));
4410    }
4411
4412    gen_helper_bfins_mem(QREG_CC_N, cpu_env, addr, src, ofs, len);
4413    set_cc_op(s, CC_OP_LOGIC);
4414
4415    if (!(ext & 0x20)) {
4416        tcg_temp_free(len);
4417    }
4418    if (!(ext & 0x800)) {
4419        tcg_temp_free(ofs);
4420    }
4421}
4422
4423DISAS_INSN(ff1)
4424{
4425    TCGv reg;
4426    reg = DREG(insn, 0);
4427    gen_logic_cc(s, reg, OS_LONG);
4428    gen_helper_ff1(reg, reg);
4429}
4430
4431DISAS_INSN(chk)
4432{
4433    TCGv src, reg;
4434    int opsize;
4435
4436    switch ((insn >> 7) & 3) {
4437    case 3:
4438        opsize = OS_WORD;
4439        break;
4440    case 2:
4441        if (m68k_feature(env, M68K_FEATURE_CHK2)) {
4442            opsize = OS_LONG;
4443            break;
4444        }
4445        /* fallthru */
4446    default:
4447        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4448        return;
4449    }
4450    SRC_EA(env, src, opsize, 1, NULL);
4451    reg = gen_extend(s, DREG(insn, 9), opsize, 1);
4452
4453    gen_flush_flags(s);
4454    gen_helper_chk(cpu_env, reg, src);
4455}
4456
4457DISAS_INSN(chk2)
4458{
4459    uint16_t ext;
4460    TCGv addr1, addr2, bound1, bound2, reg;
4461    int opsize;
4462
4463    switch ((insn >> 9) & 3) {
4464    case 0:
4465        opsize = OS_BYTE;
4466        break;
4467    case 1:
4468        opsize = OS_WORD;
4469        break;
4470    case 2:
4471        opsize = OS_LONG;
4472        break;
4473    default:
4474        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4475        return;
4476    }
4477
4478    ext = read_im16(env, s);
4479    if ((ext & 0x0800) == 0) {
4480        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4481        return;
4482    }
4483
4484    addr1 = gen_lea(env, s, insn, OS_UNSIZED);
4485    addr2 = tcg_temp_new();
4486    tcg_gen_addi_i32(addr2, addr1, opsize_bytes(opsize));
4487
4488    bound1 = gen_load(s, opsize, addr1, 1, IS_USER(s));
4489    tcg_temp_free(addr1);
4490    bound2 = gen_load(s, opsize, addr2, 1, IS_USER(s));
4491    tcg_temp_free(addr2);
4492
4493    reg = tcg_temp_new();
4494    if (ext & 0x8000) {
4495        tcg_gen_mov_i32(reg, AREG(ext, 12));
4496    } else {
4497        gen_ext(reg, DREG(ext, 12), opsize, 1);
4498    }
4499
4500    gen_flush_flags(s);
4501    gen_helper_chk2(cpu_env, reg, bound1, bound2);
4502    tcg_temp_free(reg);
4503    tcg_temp_free(bound1);
4504    tcg_temp_free(bound2);
4505}
4506
4507static void m68k_copy_line(TCGv dst, TCGv src, int index)
4508{
4509    TCGv addr;
4510    TCGv_i64 t0, t1;
4511
4512    addr = tcg_temp_new();
4513
4514    t0 = tcg_temp_new_i64();
4515    t1 = tcg_temp_new_i64();
4516
4517    tcg_gen_andi_i32(addr, src, ~15);
4518    tcg_gen_qemu_ld64(t0, addr, index);
4519    tcg_gen_addi_i32(addr, addr, 8);
4520    tcg_gen_qemu_ld64(t1, addr, index);
4521
4522    tcg_gen_andi_i32(addr, dst, ~15);
4523    tcg_gen_qemu_st64(t0, addr, index);
4524    tcg_gen_addi_i32(addr, addr, 8);
4525    tcg_gen_qemu_st64(t1, addr, index);
4526
4527    tcg_temp_free_i64(t0);
4528    tcg_temp_free_i64(t1);
4529    tcg_temp_free(addr);
4530}
4531
4532DISAS_INSN(move16_reg)
4533{
4534    int index = IS_USER(s);
4535    TCGv tmp;
4536    uint16_t ext;
4537
4538    ext = read_im16(env, s);
4539    if ((ext & (1 << 15)) == 0) {
4540        gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4541    }
4542
4543    m68k_copy_line(AREG(ext, 12), AREG(insn, 0), index);
4544
4545    /* Ax can be Ay, so save Ay before incrementing Ax */
4546    tmp = tcg_temp_new();
4547    tcg_gen_mov_i32(tmp, AREG(ext, 12));
4548    tcg_gen_addi_i32(AREG(insn, 0), AREG(insn, 0), 16);
4549    tcg_gen_addi_i32(AREG(ext, 12), tmp, 16);
4550    tcg_temp_free(tmp);
4551}
4552
4553DISAS_INSN(move16_mem)
4554{
4555    int index = IS_USER(s);
4556    TCGv reg, addr;
4557
4558    reg = AREG(insn, 0);
4559    addr = tcg_const_i32(read_im32(env, s));
4560
4561    if ((insn >> 3) & 1) {
4562        /* MOVE16 (xxx).L, (Ay) */
4563        m68k_copy_line(reg, addr, index);
4564    } else {
4565        /* MOVE16 (Ay), (xxx).L */
4566        m68k_copy_line(addr, reg, index);
4567    }
4568
4569    tcg_temp_free(addr);
4570
4571    if (((insn >> 3) & 2) == 0) {
4572        /* (Ay)+ */
4573        tcg_gen_addi_i32(reg, reg, 16);
4574    }
4575}
4576
4577DISAS_INSN(strldsr)
4578{
4579    uint16_t ext;
4580    uint32_t addr;
4581
4582    addr = s->pc - 2;
4583    ext = read_im16(env, s);
4584    if (ext != 0x46FC) {
4585        gen_exception(s, addr, EXCP_ILLEGAL);
4586        return;
4587    }
4588    ext = read_im16(env, s);
4589    if (IS_USER(s) || (ext & SR_S) == 0) {
4590        gen_exception(s, addr, EXCP_PRIVILEGE);
4591        return;
4592    }
4593    gen_push(s, gen_get_sr(s));
4594    gen_set_sr_im(s, ext, 0);
4595}
4596
4597DISAS_INSN(move_from_sr)
4598{
4599    TCGv sr;
4600
4601    if (IS_USER(s) && !m68k_feature(env, M68K_FEATURE_M68000)) {
4602        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4603        return;
4604    }
4605    sr = gen_get_sr(s);
4606    DEST_EA(env, insn, OS_WORD, sr, NULL);
4607}
4608
4609#if defined(CONFIG_SOFTMMU)
4610DISAS_INSN(moves)
4611{
4612    int opsize;
4613    uint16_t ext;
4614    TCGv reg;
4615    TCGv addr;
4616    int extend;
4617
4618    if (IS_USER(s)) {
4619        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4620        return;
4621    }
4622
4623    ext = read_im16(env, s);
4624
4625    opsize = insn_opsize(insn);
4626
4627    if (ext & 0x8000) {
4628        /* address register */
4629        reg = AREG(ext, 12);
4630        extend = 1;
4631    } else {
4632        /* data register */
4633        reg = DREG(ext, 12);
4634        extend = 0;
4635    }
4636
4637    addr = gen_lea(env, s, insn, opsize);
4638    if (IS_NULL_QREG(addr)) {
4639        gen_addr_fault(s);
4640        return;
4641    }
4642
4643    if (ext & 0x0800) {
4644        /* from reg to ea */
4645        gen_store(s, opsize, addr, reg, DFC_INDEX(s));
4646    } else {
4647        /* from ea to reg */
4648        TCGv tmp = gen_load(s, opsize, addr, 0, SFC_INDEX(s));
4649        if (extend) {
4650            gen_ext(reg, tmp, opsize, 1);
4651        } else {
4652            gen_partset_reg(opsize, reg, tmp);
4653        }
4654        tcg_temp_free(tmp);
4655    }
4656    switch (extract32(insn, 3, 3)) {
4657    case 3: /* Indirect postincrement.  */
4658        tcg_gen_addi_i32(AREG(insn, 0), addr,
4659                         REG(insn, 0) == 7 && opsize == OS_BYTE
4660                         ? 2
4661                         : opsize_bytes(opsize));
4662        break;
4663    case 4: /* Indirect predecrememnt.  */
4664        tcg_gen_mov_i32(AREG(insn, 0), addr);
4665        break;
4666    }
4667}
4668
4669DISAS_INSN(move_to_sr)
4670{
4671    if (IS_USER(s)) {
4672        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4673        return;
4674    }
4675    gen_move_to_sr(env, s, insn, false);
4676    gen_exit_tb(s);
4677}
4678
4679DISAS_INSN(move_from_usp)
4680{
4681    if (IS_USER(s)) {
4682        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4683        return;
4684    }
4685    tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
4686                   offsetof(CPUM68KState, sp[M68K_USP]));
4687}
4688
4689DISAS_INSN(move_to_usp)
4690{
4691    if (IS_USER(s)) {
4692        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4693        return;
4694    }
4695    tcg_gen_st_i32(AREG(insn, 0), cpu_env,
4696                   offsetof(CPUM68KState, sp[M68K_USP]));
4697}
4698
4699DISAS_INSN(halt)
4700{
4701    if (IS_USER(s)) {
4702        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4703        return;
4704    }
4705
4706    gen_exception(s, s->pc, EXCP_HALT_INSN);
4707}
4708
4709DISAS_INSN(stop)
4710{
4711    uint16_t ext;
4712
4713    if (IS_USER(s)) {
4714        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4715        return;
4716    }
4717
4718    ext = read_im16(env, s);
4719
4720    gen_set_sr_im(s, ext, 0);
4721    tcg_gen_movi_i32(cpu_halted, 1);
4722    gen_exception(s, s->pc, EXCP_HLT);
4723}
4724
4725DISAS_INSN(rte)
4726{
4727    if (IS_USER(s)) {
4728        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4729        return;
4730    }
4731    gen_exception(s, s->base.pc_next, EXCP_RTE);
4732}
4733
4734DISAS_INSN(cf_movec)
4735{
4736    uint16_t ext;
4737    TCGv reg;
4738
4739    if (IS_USER(s)) {
4740        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4741        return;
4742    }
4743
4744    ext = read_im16(env, s);
4745
4746    if (ext & 0x8000) {
4747        reg = AREG(ext, 12);
4748    } else {
4749        reg = DREG(ext, 12);
4750    }
4751    gen_helper_cf_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4752    gen_exit_tb(s);
4753}
4754
4755DISAS_INSN(m68k_movec)
4756{
4757    uint16_t ext;
4758    TCGv reg;
4759
4760    if (IS_USER(s)) {
4761        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4762        return;
4763    }
4764
4765    ext = read_im16(env, s);
4766
4767    if (ext & 0x8000) {
4768        reg = AREG(ext, 12);
4769    } else {
4770        reg = DREG(ext, 12);
4771    }
4772    if (insn & 1) {
4773        gen_helper_m68k_movec_to(cpu_env, tcg_const_i32(ext & 0xfff), reg);
4774    } else {
4775        gen_helper_m68k_movec_from(reg, cpu_env, tcg_const_i32(ext & 0xfff));
4776    }
4777    gen_exit_tb(s);
4778}
4779
4780DISAS_INSN(intouch)
4781{
4782    if (IS_USER(s)) {
4783        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4784        return;
4785    }
4786    /* ICache fetch.  Implement as no-op.  */
4787}
4788
4789DISAS_INSN(cpushl)
4790{
4791    if (IS_USER(s)) {
4792        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4793        return;
4794    }
4795    /* Cache push/invalidate.  Implement as no-op.  */
4796}
4797
4798DISAS_INSN(cpush)
4799{
4800    if (IS_USER(s)) {
4801        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4802        return;
4803    }
4804    /* Cache push/invalidate.  Implement as no-op.  */
4805}
4806
4807DISAS_INSN(cinv)
4808{
4809    if (IS_USER(s)) {
4810        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4811        return;
4812    }
4813    /* Invalidate cache line.  Implement as no-op.  */
4814}
4815
4816#if defined(CONFIG_SOFTMMU)
4817DISAS_INSN(pflush)
4818{
4819    TCGv opmode;
4820
4821    if (IS_USER(s)) {
4822        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4823        return;
4824    }
4825
4826    opmode = tcg_const_i32((insn >> 3) & 3);
4827    gen_helper_pflush(cpu_env, AREG(insn, 0), opmode);
4828    tcg_temp_free(opmode);
4829}
4830
4831DISAS_INSN(ptest)
4832{
4833    TCGv is_read;
4834
4835    if (IS_USER(s)) {
4836        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4837        return;
4838    }
4839    is_read = tcg_const_i32((insn >> 5) & 1);
4840    gen_helper_ptest(cpu_env, AREG(insn, 0), is_read);
4841    tcg_temp_free(is_read);
4842}
4843#endif
4844
4845DISAS_INSN(wddata)
4846{
4847    gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4848}
4849
4850DISAS_INSN(wdebug)
4851{
4852    if (IS_USER(s)) {
4853        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
4854        return;
4855    }
4856    /* TODO: Implement wdebug.  */
4857    cpu_abort(env_cpu(env), "WDEBUG not implemented");
4858}
4859#endif
4860
4861DISAS_INSN(trap)
4862{
4863    gen_exception(s, s->pc, EXCP_TRAP0 + (insn & 0xf));
4864}
4865
4866static void do_trapcc(DisasContext *s, DisasCompare *c)
4867{
4868    if (c->tcond != TCG_COND_NEVER) {
4869        TCGLabel *over = NULL;
4870
4871        update_cc_op(s);
4872
4873        if (c->tcond != TCG_COND_ALWAYS) {
4874            /* Jump over if !c. */
4875            over = gen_new_label();
4876            tcg_gen_brcond_i32(tcg_invert_cond(c->tcond), c->v1, c->v2, over);
4877        }
4878
4879        tcg_gen_movi_i32(QREG_PC, s->pc);
4880        gen_raise_exception_format2(s, EXCP_TRAPCC, s->base.pc_next);
4881
4882        if (over != NULL) {
4883            gen_set_label(over);
4884            s->base.is_jmp = DISAS_NEXT;
4885        }
4886    }
4887    free_cond(c);
4888}
4889
4890DISAS_INSN(trapcc)
4891{
4892    DisasCompare c;
4893
4894    /* Consume and discard the immediate operand. */
4895    switch (extract32(insn, 0, 3)) {
4896    case 2: /* trapcc.w */
4897        (void)read_im16(env, s);
4898        break;
4899    case 3: /* trapcc.l */
4900        (void)read_im32(env, s);
4901        break;
4902    case 4: /* trapcc (no operand) */
4903        break;
4904    default:
4905        /* trapcc registered with only valid opmodes */
4906        g_assert_not_reached();
4907    }
4908
4909    gen_cc_cond(&c, s, extract32(insn, 8, 4));
4910    do_trapcc(s, &c);
4911}
4912
4913DISAS_INSN(trapv)
4914{
4915    DisasCompare c;
4916
4917    gen_cc_cond(&c, s, 9); /* V set */
4918    do_trapcc(s, &c);
4919}
4920
4921static void gen_load_fcr(DisasContext *s, TCGv res, int reg)
4922{
4923    switch (reg) {
4924    case M68K_FPIAR:
4925        tcg_gen_movi_i32(res, 0);
4926        break;
4927    case M68K_FPSR:
4928        tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpsr));
4929        break;
4930    case M68K_FPCR:
4931        tcg_gen_ld_i32(res, cpu_env, offsetof(CPUM68KState, fpcr));
4932        break;
4933    }
4934}
4935
4936static void gen_store_fcr(DisasContext *s, TCGv val, int reg)
4937{
4938    switch (reg) {
4939    case M68K_FPIAR:
4940        break;
4941    case M68K_FPSR:
4942        tcg_gen_st_i32(val, cpu_env, offsetof(CPUM68KState, fpsr));
4943        break;
4944    case M68K_FPCR:
4945        gen_helper_set_fpcr(cpu_env, val);
4946        break;
4947    }
4948}
4949
4950static void gen_qemu_store_fcr(DisasContext *s, TCGv addr, int reg)
4951{
4952    int index = IS_USER(s);
4953    TCGv tmp;
4954
4955    tmp = tcg_temp_new();
4956    gen_load_fcr(s, tmp, reg);
4957    tcg_gen_qemu_st32(tmp, addr, index);
4958    tcg_temp_free(tmp);
4959}
4960
4961static void gen_qemu_load_fcr(DisasContext *s, TCGv addr, int reg)
4962{
4963    int index = IS_USER(s);
4964    TCGv tmp;
4965
4966    tmp = tcg_temp_new();
4967    tcg_gen_qemu_ld32u(tmp, addr, index);
4968    gen_store_fcr(s, tmp, reg);
4969    tcg_temp_free(tmp);
4970}
4971
4972
4973static void gen_op_fmove_fcr(CPUM68KState *env, DisasContext *s,
4974                             uint32_t insn, uint32_t ext)
4975{
4976    int mask = (ext >> 10) & 7;
4977    int is_write = (ext >> 13) & 1;
4978    int mode = extract32(insn, 3, 3);
4979    int i;
4980    TCGv addr, tmp;
4981
4982    switch (mode) {
4983    case 0: /* Dn */
4984        if (mask != M68K_FPIAR && mask != M68K_FPSR && mask != M68K_FPCR) {
4985            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4986            return;
4987        }
4988        if (is_write) {
4989            gen_load_fcr(s, DREG(insn, 0), mask);
4990        } else {
4991            gen_store_fcr(s, DREG(insn, 0), mask);
4992        }
4993        return;
4994    case 1: /* An, only with FPIAR */
4995        if (mask != M68K_FPIAR) {
4996            gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
4997            return;
4998        }
4999        if (is_write) {
5000            gen_load_fcr(s, AREG(insn, 0), mask);
5001        } else {
5002            gen_store_fcr(s, AREG(insn, 0), mask);
5003        }
5004        return;
5005    case 7: /* Immediate */
5006        if (REG(insn, 0) == 4) {
5007            if (is_write ||
5008                (mask != M68K_FPIAR && mask != M68K_FPSR &&
5009                 mask != M68K_FPCR)) {
5010                gen_exception(s, s->base.pc_next, EXCP_ILLEGAL);
5011                return;
5012            }
5013            tmp = tcg_const_i32(read_im32(env, s));
5014            gen_store_fcr(s, tmp, mask);
5015            tcg_temp_free(tmp);
5016            return;
5017        }
5018        break;
5019    default:
5020        break;
5021    }
5022
5023    tmp = gen_lea(env, s, insn, OS_LONG);
5024    if (IS_NULL_QREG(tmp)) {
5025        gen_addr_fault(s);
5026        return;
5027    }
5028
5029    addr = tcg_temp_new();
5030    tcg_gen_mov_i32(addr, tmp);
5031
5032    /*
5033     * mask:
5034     *
5035     * 0b100 Floating-Point Control Register
5036     * 0b010 Floating-Point Status Register
5037     * 0b001 Floating-Point Instruction Address Register
5038     *
5039     */
5040
5041    if (is_write && mode == 4) {
5042        for (i = 2; i >= 0; i--, mask >>= 1) {
5043            if (mask & 1) {
5044                gen_qemu_store_fcr(s, addr, 1 << i);
5045                if (mask != 1) {
5046                    tcg_gen_subi_i32(addr, addr, opsize_bytes(OS_LONG));
5047                }
5048            }
5049       }
5050       tcg_gen_mov_i32(AREG(insn, 0), addr);
5051    } else {
5052        for (i = 0; i < 3; i++, mask >>= 1) {
5053            if (mask & 1) {
5054                if (is_write) {
5055                    gen_qemu_store_fcr(s, addr, 1 << i);
5056                } else {
5057                    gen_qemu_load_fcr(s, addr, 1 << i);
5058                }
5059                if (mask != 1 || mode == 3) {
5060                    tcg_gen_addi_i32(addr, addr, opsize_bytes(OS_LONG));
5061                }
5062            }
5063        }
5064        if (mode == 3) {
5065            tcg_gen_mov_i32(AREG(insn, 0), addr);
5066        }
5067    }
5068    tcg_temp_free_i32(addr);
5069}
5070
5071static void gen_op_fmovem(CPUM68KState *env, DisasContext *s,
5072                          uint32_t insn, uint32_t ext)
5073{
5074    int opsize;
5075    TCGv addr, tmp;
5076    int mode = (ext >> 11) & 0x3;
5077    int is_load = ((ext & 0x2000) == 0);
5078
5079    if (m68k_feature(s->env, M68K_FEATURE_FPU)) {
5080        opsize = OS_EXTENDED;
5081    } else {
5082        opsize = OS_DOUBLE;  /* FIXME */
5083    }
5084
5085    addr = gen_lea(env, s, insn, opsize);
5086    if (IS_NULL_QREG(addr)) {
5087        gen_addr_fault(s);
5088        return;
5089    }
5090
5091    tmp = tcg_temp_new();
5092    if (mode & 0x1) {
5093        /* Dynamic register list */
5094        tcg_gen_ext8u_i32(tmp, DREG(ext, 4));
5095    } else {
5096        /* Static register list */
5097        tcg_gen_movi_i32(tmp, ext & 0xff);
5098    }
5099
5100    if (!is_load && (mode & 2) == 0) {
5101        /*
5102         * predecrement addressing mode
5103         * only available to store register to memory
5104         */
5105        if (opsize == OS_EXTENDED) {
5106            gen_helper_fmovemx_st_predec(tmp, cpu_env, addr, tmp);
5107        } else {
5108            gen_helper_fmovemd_st_predec(tmp, cpu_env, addr, tmp);
5109        }
5110    } else {
5111        /* postincrement addressing mode */
5112        if (opsize == OS_EXTENDED) {
5113            if (is_load) {
5114                gen_helper_fmovemx_ld_postinc(tmp, cpu_env, addr, tmp);
5115            } else {
5116                gen_helper_fmovemx_st_postinc(tmp, cpu_env, addr, tmp);
5117            }
5118        } else {
5119            if (is_load) {
5120                gen_helper_fmovemd_ld_postinc(tmp, cpu_env, addr, tmp);
5121            } else {
5122                gen_helper_fmovemd_st_postinc(tmp, cpu_env, addr, tmp);
5123            }
5124        }
5125    }
5126    if ((insn & 070) == 030 || (insn & 070) == 040) {
5127        tcg_gen_mov_i32(AREG(insn, 0), tmp);
5128    }
5129    tcg_temp_free(tmp);
5130}
5131
5132/*
5133 * ??? FP exceptions are not implemented.  Most exceptions are deferred until
5134 * immediately before the next FP instruction is executed.
5135 */
5136DISAS_INSN(fpu)
5137{
5138    uint16_t ext;
5139    int opmode;
5140    int opsize;
5141    TCGv_ptr cpu_src, cpu_dest;
5142
5143    ext = read_im16(env, s);
5144    opmode = ext & 0x7f;
5145    switch ((ext >> 13) & 7) {
5146    case 0:
5147        break;
5148    case 1:
5149        goto undef;
5150    case 2:
5151        if (insn == 0xf200 && (ext & 0xfc00) == 0x5c00) {
5152            /* fmovecr */
5153            TCGv rom_offset = tcg_const_i32(opmode);
5154            cpu_dest = gen_fp_ptr(REG(ext, 7));
5155            gen_helper_fconst(cpu_env, cpu_dest, rom_offset);
5156            tcg_temp_free_ptr(cpu_dest);
5157            tcg_temp_free(rom_offset);
5158            return;
5159        }
5160        break;
5161    case 3: /* fmove out */
5162        cpu_src = gen_fp_ptr(REG(ext, 7));
5163        opsize = ext_opsize(ext, 10);
5164        if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5165                      EA_STORE, IS_USER(s)) == -1) {
5166            gen_addr_fault(s);
5167        }
5168        gen_helper_ftst(cpu_env, cpu_src);
5169        tcg_temp_free_ptr(cpu_src);
5170        return;
5171    case 4: /* fmove to control register.  */
5172    case 5: /* fmove from control register.  */
5173        gen_op_fmove_fcr(env, s, insn, ext);
5174        return;
5175    case 6: /* fmovem */
5176    case 7:
5177        if ((ext & 0x1000) == 0 && !m68k_feature(s->env, M68K_FEATURE_FPU)) {
5178            goto undef;
5179        }
5180        gen_op_fmovem(env, s, insn, ext);
5181        return;
5182    }
5183    if (ext & (1 << 14)) {
5184        /* Source effective address.  */
5185        opsize = ext_opsize(ext, 10);
5186        cpu_src = gen_fp_result_ptr();
5187        if (gen_ea_fp(env, s, insn, opsize, cpu_src,
5188                      EA_LOADS, IS_USER(s)) == -1) {
5189            gen_addr_fault(s);
5190            return;
5191        }
5192    } else {
5193        /* Source register.  */
5194        opsize = OS_EXTENDED;
5195        cpu_src = gen_fp_ptr(REG(ext, 10));
5196    }
5197    cpu_dest = gen_fp_ptr(REG(ext, 7));
5198    switch (opmode) {
5199    case 0: /* fmove */
5200        gen_fp_move(cpu_dest, cpu_src);
5201        break;
5202    case 0x40: /* fsmove */
5203        gen_helper_fsround(cpu_env, cpu_dest, cpu_src);
5204        break;
5205    case 0x44: /* fdmove */
5206        gen_helper_fdround(cpu_env, cpu_dest, cpu_src);
5207        break;
5208    case 1: /* fint */
5209        gen_helper_firound(cpu_env, cpu_dest, cpu_src);
5210        break;
5211    case 2: /* fsinh */
5212        gen_helper_fsinh(cpu_env, cpu_dest, cpu_src);
5213        break;
5214    case 3: /* fintrz */
5215        gen_helper_fitrunc(cpu_env, cpu_dest, cpu_src);
5216        break;
5217    case 4: /* fsqrt */
5218        gen_helper_fsqrt(cpu_env, cpu_dest, cpu_src);
5219        break;
5220    case 0x41: /* fssqrt */
5221        gen_helper_fssqrt(cpu_env, cpu_dest, cpu_src);
5222        break;
5223    case 0x45: /* fdsqrt */
5224        gen_helper_fdsqrt(cpu_env, cpu_dest, cpu_src);
5225        break;
5226    case 0x06: /* flognp1 */
5227        gen_helper_flognp1(cpu_env, cpu_dest, cpu_src);
5228        break;
5229    case 0x08: /* fetoxm1 */
5230        gen_helper_fetoxm1(cpu_env, cpu_dest, cpu_src);
5231        break;
5232    case 0x09: /* ftanh */
5233        gen_helper_ftanh(cpu_env, cpu_dest, cpu_src);
5234        break;
5235    case 0x0a: /* fatan */
5236        gen_helper_fatan(cpu_env, cpu_dest, cpu_src);
5237        break;
5238    case 0x0c: /* fasin */
5239        gen_helper_fasin(cpu_env, cpu_dest, cpu_src);
5240        break;
5241    case 0x0d: /* fatanh */
5242        gen_helper_fatanh(cpu_env, cpu_dest, cpu_src);
5243        break;
5244    case 0x0e: /* fsin */
5245        gen_helper_fsin(cpu_env, cpu_dest, cpu_src);
5246        break;
5247    case 0x0f: /* ftan */
5248        gen_helper_ftan(cpu_env, cpu_dest, cpu_src);
5249        break;
5250    case 0x10: /* fetox */
5251        gen_helper_fetox(cpu_env, cpu_dest, cpu_src);
5252        break;
5253    case 0x11: /* ftwotox */
5254        gen_helper_ftwotox(cpu_env, cpu_dest, cpu_src);
5255        break;
5256    case 0x12: /* ftentox */
5257        gen_helper_ftentox(cpu_env, cpu_dest, cpu_src);
5258        break;
5259    case 0x14: /* flogn */
5260        gen_helper_flogn(cpu_env, cpu_dest, cpu_src);
5261        break;
5262    case 0x15: /* flog10 */
5263        gen_helper_flog10(cpu_env, cpu_dest, cpu_src);
5264        break;
5265    case 0x16: /* flog2 */
5266        gen_helper_flog2(cpu_env, cpu_dest, cpu_src);
5267        break;
5268    case 0x18: /* fabs */
5269        gen_helper_fabs(cpu_env, cpu_dest, cpu_src);
5270        break;
5271    case 0x58: /* fsabs */
5272        gen_helper_fsabs(cpu_env, cpu_dest, cpu_src);
5273        break;
5274    case 0x5c: /* fdabs */
5275        gen_helper_fdabs(cpu_env, cpu_dest, cpu_src);
5276        break;
5277    case 0x19: /* fcosh */
5278        gen_helper_fcosh(cpu_env, cpu_dest, cpu_src);
5279        break;
5280    case 0x1a: /* fneg */
5281        gen_helper_fneg(cpu_env, cpu_dest, cpu_src);
5282        break;
5283    case 0x5a: /* fsneg */
5284        gen_helper_fsneg(cpu_env, cpu_dest, cpu_src);
5285        break;
5286    case 0x5e: /* fdneg */
5287        gen_helper_fdneg(cpu_env, cpu_dest, cpu_src);
5288        break;
5289    case 0x1c: /* facos */
5290        gen_helper_facos(cpu_env, cpu_dest, cpu_src);
5291        break;
5292    case 0x1d: /* fcos */
5293        gen_helper_fcos(cpu_env, cpu_dest, cpu_src);
5294        break;
5295    case 0x1e: /* fgetexp */
5296        gen_helper_fgetexp(cpu_env, cpu_dest, cpu_src);
5297        break;
5298    case 0x1f: /* fgetman */
5299        gen_helper_fgetman(cpu_env, cpu_dest, cpu_src);
5300        break;
5301    case 0x20: /* fdiv */
5302        gen_helper_fdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5303        break;
5304    case 0x60: /* fsdiv */
5305        gen_helper_fsdiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5306        break;
5307    case 0x64: /* fddiv */
5308        gen_helper_fddiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5309        break;
5310    case 0x21: /* fmod */
5311        gen_helper_fmod(cpu_env, cpu_dest, cpu_src, cpu_dest);
5312        break;
5313    case 0x22: /* fadd */
5314        gen_helper_fadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5315        break;
5316    case 0x62: /* fsadd */
5317        gen_helper_fsadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5318        break;
5319    case 0x66: /* fdadd */
5320        gen_helper_fdadd(cpu_env, cpu_dest, cpu_src, cpu_dest);
5321        break;
5322    case 0x23: /* fmul */
5323        gen_helper_fmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5324        break;
5325    case 0x63: /* fsmul */
5326        gen_helper_fsmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5327        break;
5328    case 0x67: /* fdmul */
5329        gen_helper_fdmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5330        break;
5331    case 0x24: /* fsgldiv */
5332        gen_helper_fsgldiv(cpu_env, cpu_dest, cpu_src, cpu_dest);
5333        break;
5334    case 0x25: /* frem */
5335        gen_helper_frem(cpu_env, cpu_dest, cpu_src, cpu_dest);
5336        break;
5337    case 0x26: /* fscale */
5338        gen_helper_fscale(cpu_env, cpu_dest, cpu_src, cpu_dest);
5339        break;
5340    case 0x27: /* fsglmul */
5341        gen_helper_fsglmul(cpu_env, cpu_dest, cpu_src, cpu_dest);
5342        break;
5343    case 0x28: /* fsub */
5344        gen_helper_fsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5345        break;
5346    case 0x68: /* fssub */
5347        gen_helper_fssub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5348        break;
5349    case 0x6c: /* fdsub */
5350        gen_helper_fdsub(cpu_env, cpu_dest, cpu_src, cpu_dest);
5351        break;
5352    case 0x30: case 0x31: case 0x32:
5353    case 0x33: case 0x34: case 0x35:
5354    case 0x36: case 0x37: {
5355            TCGv_ptr cpu_dest2 = gen_fp_ptr(REG(ext, 0));
5356            gen_helper_fsincos(cpu_env, cpu_dest, cpu_dest2, cpu_src);
5357            tcg_temp_free_ptr(cpu_dest2);
5358        }
5359        break;
5360    case 0x38: /* fcmp */
5361        gen_helper_fcmp(cpu_env, cpu_src, cpu_dest);
5362        return;
5363    case 0x3a: /* ftst */
5364        gen_helper_ftst(cpu_env, cpu_src);
5365        return;
5366    default:
5367        goto undef;
5368    }
5369    tcg_temp_free_ptr(cpu_src);
5370    gen_helper_ftst(cpu_env, cpu_dest);
5371    tcg_temp_free_ptr(cpu_dest);
5372    return;
5373undef:
5374    /* FIXME: Is this right for offset addressing modes?  */
5375    s->pc -= 2;
5376    disas_undef_fpu(env, s, insn);
5377}
5378
5379static void gen_fcc_cond(DisasCompare *c, DisasContext *s, int cond)
5380{
5381    TCGv fpsr;
5382
5383    c->g1 = 1;
5384    c->v2 = tcg_const_i32(0);
5385    c->g2 = 0;
5386    /* TODO: Raise BSUN exception.  */
5387    fpsr = tcg_temp_new();
5388    gen_load_fcr(s, fpsr, M68K_FPSR);
5389    switch (cond) {
5390    case 0:  /* False */
5391    case 16: /* Signaling False */
5392        c->v1 = c->v2;
5393        c->tcond = TCG_COND_NEVER;
5394        break;
5395    case 1:  /* EQual Z */
5396    case 17: /* Signaling EQual Z */
5397        c->v1 = tcg_temp_new();
5398        c->g1 = 0;
5399        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5400        c->tcond = TCG_COND_NE;
5401        break;
5402    case 2:  /* Ordered Greater Than !(A || Z || N) */
5403    case 18: /* Greater Than !(A || Z || N) */
5404        c->v1 = tcg_temp_new();
5405        c->g1 = 0;
5406        tcg_gen_andi_i32(c->v1, fpsr,
5407                         FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5408        c->tcond = TCG_COND_EQ;
5409        break;
5410    case 3:  /* Ordered Greater than or Equal Z || !(A || N) */
5411    case 19: /* Greater than or Equal Z || !(A || N) */
5412        c->v1 = tcg_temp_new();
5413        c->g1 = 0;
5414        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5415        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5416        tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_Z | FPSR_CC_N);
5417        tcg_gen_or_i32(c->v1, c->v1, fpsr);
5418        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5419        c->tcond = TCG_COND_NE;
5420        break;
5421    case 4:  /* Ordered Less Than !(!N || A || Z); */
5422    case 20: /* Less Than !(!N || A || Z); */
5423        c->v1 = tcg_temp_new();
5424        c->g1 = 0;
5425        tcg_gen_xori_i32(c->v1, fpsr, FPSR_CC_N);
5426        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_N | FPSR_CC_A | FPSR_CC_Z);
5427        c->tcond = TCG_COND_EQ;
5428        break;
5429    case 5:  /* Ordered Less than or Equal Z || (N && !A) */
5430    case 21: /* Less than or Equal Z || (N && !A) */
5431        c->v1 = tcg_temp_new();
5432        c->g1 = 0;
5433        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5434        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_A));
5435        tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5436        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_Z | FPSR_CC_N);
5437        c->tcond = TCG_COND_NE;
5438        break;
5439    case 6:  /* Ordered Greater or Less than !(A || Z) */
5440    case 22: /* Greater or Less than !(A || Z) */
5441        c->v1 = tcg_temp_new();
5442        c->g1 = 0;
5443        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5444        c->tcond = TCG_COND_EQ;
5445        break;
5446    case 7:  /* Ordered !A */
5447    case 23: /* Greater, Less or Equal !A */
5448        c->v1 = tcg_temp_new();
5449        c->g1 = 0;
5450        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5451        c->tcond = TCG_COND_EQ;
5452        break;
5453    case 8:  /* Unordered A */
5454    case 24: /* Not Greater, Less or Equal A */
5455        c->v1 = tcg_temp_new();
5456        c->g1 = 0;
5457        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A);
5458        c->tcond = TCG_COND_NE;
5459        break;
5460    case 9:  /* Unordered or Equal A || Z */
5461    case 25: /* Not Greater or Less then A || Z */
5462        c->v1 = tcg_temp_new();
5463        c->g1 = 0;
5464        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z);
5465        c->tcond = TCG_COND_NE;
5466        break;
5467    case 10: /* Unordered or Greater Than A || !(N || Z)) */
5468    case 26: /* Not Less or Equal A || !(N || Z)) */
5469        c->v1 = tcg_temp_new();
5470        c->g1 = 0;
5471        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5472        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5473        tcg_gen_andi_i32(fpsr, fpsr, FPSR_CC_A | FPSR_CC_N);
5474        tcg_gen_or_i32(c->v1, c->v1, fpsr);
5475        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5476        c->tcond = TCG_COND_NE;
5477        break;
5478    case 11: /* Unordered or Greater or Equal A || Z || !N */
5479    case 27: /* Not Less Than A || Z || !N */
5480        c->v1 = tcg_temp_new();
5481        c->g1 = 0;
5482        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5483        tcg_gen_xori_i32(c->v1, c->v1, FPSR_CC_N);
5484        c->tcond = TCG_COND_NE;
5485        break;
5486    case 12: /* Unordered or Less Than A || (N && !Z) */
5487    case 28: /* Not Greater than or Equal A || (N && !Z) */
5488        c->v1 = tcg_temp_new();
5489        c->g1 = 0;
5490        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5491        tcg_gen_shli_i32(c->v1, c->v1, ctz32(FPSR_CC_N) - ctz32(FPSR_CC_Z));
5492        tcg_gen_andc_i32(c->v1, fpsr, c->v1);
5493        tcg_gen_andi_i32(c->v1, c->v1, FPSR_CC_A | FPSR_CC_N);
5494        c->tcond = TCG_COND_NE;
5495        break;
5496    case 13: /* Unordered or Less or Equal A || Z || N */
5497    case 29: /* Not Greater Than A || Z || N */
5498        c->v1 = tcg_temp_new();
5499        c->g1 = 0;
5500        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_A | FPSR_CC_Z | FPSR_CC_N);
5501        c->tcond = TCG_COND_NE;
5502        break;
5503    case 14: /* Not Equal !Z */
5504    case 30: /* Signaling Not Equal !Z */
5505        c->v1 = tcg_temp_new();
5506        c->g1 = 0;
5507        tcg_gen_andi_i32(c->v1, fpsr, FPSR_CC_Z);
5508        c->tcond = TCG_COND_EQ;
5509        break;
5510    case 15: /* True */
5511    case 31: /* Signaling True */
5512        c->v1 = c->v2;
5513        c->tcond = TCG_COND_ALWAYS;
5514        break;
5515    }
5516    tcg_temp_free(fpsr);
5517}
5518
5519static void gen_fjmpcc(DisasContext *s, int cond, TCGLabel *l1)
5520{
5521    DisasCompare c;
5522
5523    gen_fcc_cond(&c, s, cond);
5524    update_cc_op(s);
5525    tcg_gen_brcond_i32(c.tcond, c.v1, c.v2, l1);
5526    free_cond(&c);
5527}
5528
5529DISAS_INSN(fbcc)
5530{
5531    uint32_t offset;
5532    uint32_t base;
5533    TCGLabel *l1;
5534
5535    base = s->pc;
5536    offset = (int16_t)read_im16(env, s);
5537    if (insn & (1 << 6)) {
5538        offset = (offset << 16) | read_im16(env, s);
5539    }
5540
5541    l1 = gen_new_label();
5542    update_cc_op(s);
5543    gen_fjmpcc(s, insn & 0x3f, l1);
5544    gen_jmp_tb(s, 0, s->pc, s->base.pc_next);
5545    gen_set_label(l1);
5546    gen_jmp_tb(s, 1, base + offset, s->base.pc_next);
5547}
5548
5549DISAS_INSN(fscc)
5550{
5551    DisasCompare c;
5552    int cond;
5553    TCGv tmp;
5554    uint16_t ext;
5555
5556    ext = read_im16(env, s);
5557    cond = ext & 0x3f;
5558    gen_fcc_cond(&c, s, cond);
5559
5560    tmp = tcg_temp_new();
5561    tcg_gen_setcond_i32(c.tcond, tmp, c.v1, c.v2);
5562    free_cond(&c);
5563
5564    tcg_gen_neg_i32(tmp, tmp);
5565    DEST_EA(env, insn, OS_BYTE, tmp, NULL);
5566    tcg_temp_free(tmp);
5567}
5568
5569DISAS_INSN(ftrapcc)
5570{
5571    DisasCompare c;
5572    uint16_t ext;
5573    int cond;
5574
5575    ext = read_im16(env, s);
5576    cond = ext & 0x3f;
5577
5578    /* Consume and discard the immediate operand. */
5579    switch (extract32(insn, 0, 3)) {
5580    case 2: /* ftrapcc.w */
5581        (void)read_im16(env, s);
5582        break;
5583    case 3: /* ftrapcc.l */
5584        (void)read_im32(env, s);
5585        break;
5586    case 4: /* ftrapcc (no operand) */
5587        break;
5588    default:
5589        /* ftrapcc registered with only valid opmodes */
5590        g_assert_not_reached();
5591    }
5592
5593    gen_fcc_cond(&c, s, cond);
5594    do_trapcc(s, &c);
5595}
5596
5597#if defined(CONFIG_SOFTMMU)
5598DISAS_INSN(frestore)
5599{
5600    TCGv addr;
5601
5602    if (IS_USER(s)) {
5603        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5604        return;
5605    }
5606    if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5607        SRC_EA(env, addr, OS_LONG, 0, NULL);
5608        /* FIXME: check the state frame */
5609    } else {
5610        disas_undef(env, s, insn);
5611    }
5612}
5613
5614DISAS_INSN(fsave)
5615{
5616    if (IS_USER(s)) {
5617        gen_exception(s, s->base.pc_next, EXCP_PRIVILEGE);
5618        return;
5619    }
5620
5621    if (m68k_feature(s->env, M68K_FEATURE_M68040)) {
5622        /* always write IDLE */
5623        TCGv idle = tcg_const_i32(0x41000000);
5624        DEST_EA(env, insn, OS_LONG, idle, NULL);
5625        tcg_temp_free(idle);
5626    } else {
5627        disas_undef(env, s, insn);
5628    }
5629}
5630#endif
5631
5632static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
5633{
5634    TCGv tmp = tcg_temp_new();
5635    if (s->env->macsr & MACSR_FI) {
5636        if (upper)
5637            tcg_gen_andi_i32(tmp, val, 0xffff0000);
5638        else
5639            tcg_gen_shli_i32(tmp, val, 16);
5640    } else if (s->env->macsr & MACSR_SU) {
5641        if (upper)
5642            tcg_gen_sari_i32(tmp, val, 16);
5643        else
5644            tcg_gen_ext16s_i32(tmp, val);
5645    } else {
5646        if (upper)
5647            tcg_gen_shri_i32(tmp, val, 16);
5648        else
5649            tcg_gen_ext16u_i32(tmp, val);
5650    }
5651    return tmp;
5652}
5653
5654static void gen_mac_clear_flags(void)
5655{
5656    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
5657                     ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
5658}
5659
5660DISAS_INSN(mac)
5661{
5662    TCGv rx;
5663    TCGv ry;
5664    uint16_t ext;
5665    int acc;
5666    TCGv tmp;
5667    TCGv addr;
5668    TCGv loadval;
5669    int dual;
5670    TCGv saved_flags;
5671
5672    if (!s->done_mac) {
5673        s->mactmp = tcg_temp_new_i64();
5674        s->done_mac = 1;
5675    }
5676
5677    ext = read_im16(env, s);
5678
5679    acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
5680    dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
5681    if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
5682        disas_undef(env, s, insn);
5683        return;
5684    }
5685    if (insn & 0x30) {
5686        /* MAC with load.  */
5687        tmp = gen_lea(env, s, insn, OS_LONG);
5688        addr = tcg_temp_new();
5689        tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
5690        /*
5691         * Load the value now to ensure correct exception behavior.
5692         * Perform writeback after reading the MAC inputs.
5693         */
5694        loadval = gen_load(s, OS_LONG, addr, 0, IS_USER(s));
5695
5696        acc ^= 1;
5697        rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
5698        ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
5699    } else {
5700        loadval = addr = NULL_QREG;
5701        rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5702        ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5703    }
5704
5705    gen_mac_clear_flags();
5706#if 0
5707    l1 = -1;
5708    /* Disabled because conditional branches clobber temporary vars.  */
5709    if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
5710        /* Skip the multiply if we know we will ignore it.  */
5711        l1 = gen_new_label();
5712        tmp = tcg_temp_new();
5713        tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
5714        gen_op_jmp_nz32(tmp, l1);
5715    }
5716#endif
5717
5718    if ((ext & 0x0800) == 0) {
5719        /* Word.  */
5720        rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
5721        ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
5722    }
5723    if (s->env->macsr & MACSR_FI) {
5724        gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
5725    } else {
5726        if (s->env->macsr & MACSR_SU)
5727            gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
5728        else
5729            gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
5730        switch ((ext >> 9) & 3) {
5731        case 1:
5732            tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
5733            break;
5734        case 3:
5735            tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
5736            break;
5737        }
5738    }
5739
5740    if (dual) {
5741        /* Save the overflow flag from the multiply.  */
5742        saved_flags = tcg_temp_new();
5743        tcg_gen_mov_i32(saved_flags, QREG_MACSR);
5744    } else {
5745        saved_flags = NULL_QREG;
5746    }
5747
5748#if 0
5749    /* Disabled because conditional branches clobber temporary vars.  */
5750    if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
5751        /* Skip the accumulate if the value is already saturated.  */
5752        l1 = gen_new_label();
5753        tmp = tcg_temp_new();
5754        gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5755        gen_op_jmp_nz32(tmp, l1);
5756    }
5757#endif
5758
5759    if (insn & 0x100)
5760        tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5761    else
5762        tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5763
5764    if (s->env->macsr & MACSR_FI)
5765        gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5766    else if (s->env->macsr & MACSR_SU)
5767        gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5768    else
5769        gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5770
5771#if 0
5772    /* Disabled because conditional branches clobber temporary vars.  */
5773    if (l1 != -1)
5774        gen_set_label(l1);
5775#endif
5776
5777    if (dual) {
5778        /* Dual accumulate variant.  */
5779        acc = (ext >> 2) & 3;
5780        /* Restore the overflow flag from the multiplier.  */
5781        tcg_gen_mov_i32(QREG_MACSR, saved_flags);
5782#if 0
5783        /* Disabled because conditional branches clobber temporary vars.  */
5784        if ((s->env->macsr & MACSR_OMC) != 0) {
5785            /* Skip the accumulate if the value is already saturated.  */
5786            l1 = gen_new_label();
5787            tmp = tcg_temp_new();
5788            gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
5789            gen_op_jmp_nz32(tmp, l1);
5790        }
5791#endif
5792        if (ext & 2)
5793            tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
5794        else
5795            tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
5796        if (s->env->macsr & MACSR_FI)
5797            gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
5798        else if (s->env->macsr & MACSR_SU)
5799            gen_helper_macsats(cpu_env, tcg_const_i32(acc));
5800        else
5801            gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
5802#if 0
5803        /* Disabled because conditional branches clobber temporary vars.  */
5804        if (l1 != -1)
5805            gen_set_label(l1);
5806#endif
5807    }
5808    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
5809
5810    if (insn & 0x30) {
5811        TCGv rw;
5812        rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
5813        tcg_gen_mov_i32(rw, loadval);
5814        /*
5815         * FIXME: Should address writeback happen with the masked or
5816         * unmasked value?
5817         */
5818        switch ((insn >> 3) & 7) {
5819        case 3: /* Post-increment.  */
5820            tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
5821            break;
5822        case 4: /* Pre-decrement.  */
5823            tcg_gen_mov_i32(AREG(insn, 0), addr);
5824        }
5825        tcg_temp_free(loadval);
5826    }
5827}
5828
5829DISAS_INSN(from_mac)
5830{
5831    TCGv rx;
5832    TCGv_i64 acc;
5833    int accnum;
5834
5835    rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5836    accnum = (insn >> 9) & 3;
5837    acc = MACREG(accnum);
5838    if (s->env->macsr & MACSR_FI) {
5839        gen_helper_get_macf(rx, cpu_env, acc);
5840    } else if ((s->env->macsr & MACSR_OMC) == 0) {
5841        tcg_gen_extrl_i64_i32(rx, acc);
5842    } else if (s->env->macsr & MACSR_SU) {
5843        gen_helper_get_macs(rx, acc);
5844    } else {
5845        gen_helper_get_macu(rx, acc);
5846    }
5847    if (insn & 0x40) {
5848        tcg_gen_movi_i64(acc, 0);
5849        tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5850    }
5851}
5852
5853DISAS_INSN(move_mac)
5854{
5855    /* FIXME: This can be done without a helper.  */
5856    int src;
5857    TCGv dest;
5858    src = insn & 3;
5859    dest = tcg_const_i32((insn >> 9) & 3);
5860    gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
5861    gen_mac_clear_flags();
5862    gen_helper_mac_set_flags(cpu_env, dest);
5863}
5864
5865DISAS_INSN(from_macsr)
5866{
5867    TCGv reg;
5868
5869    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5870    tcg_gen_mov_i32(reg, QREG_MACSR);
5871}
5872
5873DISAS_INSN(from_mask)
5874{
5875    TCGv reg;
5876    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5877    tcg_gen_mov_i32(reg, QREG_MAC_MASK);
5878}
5879
5880DISAS_INSN(from_mext)
5881{
5882    TCGv reg;
5883    TCGv acc;
5884    reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
5885    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5886    if (s->env->macsr & MACSR_FI)
5887        gen_helper_get_mac_extf(reg, cpu_env, acc);
5888    else
5889        gen_helper_get_mac_exti(reg, cpu_env, acc);
5890}
5891
5892DISAS_INSN(macsr_to_ccr)
5893{
5894    TCGv tmp = tcg_temp_new();
5895    tcg_gen_andi_i32(tmp, QREG_MACSR, 0xf);
5896    gen_helper_set_sr(cpu_env, tmp);
5897    tcg_temp_free(tmp);
5898    set_cc_op(s, CC_OP_FLAGS);
5899}
5900
5901DISAS_INSN(to_mac)
5902{
5903    TCGv_i64 acc;
5904    TCGv val;
5905    int accnum;
5906    accnum = (insn >> 9) & 3;
5907    acc = MACREG(accnum);
5908    SRC_EA(env, val, OS_LONG, 0, NULL);
5909    if (s->env->macsr & MACSR_FI) {
5910        tcg_gen_ext_i32_i64(acc, val);
5911        tcg_gen_shli_i64(acc, acc, 8);
5912    } else if (s->env->macsr & MACSR_SU) {
5913        tcg_gen_ext_i32_i64(acc, val);
5914    } else {
5915        tcg_gen_extu_i32_i64(acc, val);
5916    }
5917    tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
5918    gen_mac_clear_flags();
5919    gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
5920}
5921
5922DISAS_INSN(to_macsr)
5923{
5924    TCGv val;
5925    SRC_EA(env, val, OS_LONG, 0, NULL);
5926    gen_helper_set_macsr(cpu_env, val);
5927    gen_exit_tb(s);
5928}
5929
5930DISAS_INSN(to_mask)
5931{
5932    TCGv val;
5933    SRC_EA(env, val, OS_LONG, 0, NULL);
5934    tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
5935}
5936
5937DISAS_INSN(to_mext)
5938{
5939    TCGv val;
5940    TCGv acc;
5941    SRC_EA(env, val, OS_LONG, 0, NULL);
5942    acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
5943    if (s->env->macsr & MACSR_FI)
5944        gen_helper_set_mac_extf(cpu_env, val, acc);
5945    else if (s->env->macsr & MACSR_SU)
5946        gen_helper_set_mac_exts(cpu_env, val, acc);
5947    else
5948        gen_helper_set_mac_extu(cpu_env, val, acc);
5949}
5950
5951static disas_proc opcode_table[65536];
5952
5953static void
5954register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
5955{
5956  int i;
5957  int from;
5958  int to;
5959
5960  /* Sanity check.  All set bits must be included in the mask.  */
5961  if (opcode & ~mask) {
5962      fprintf(stderr,
5963              "qemu internal error: bogus opcode definition %04x/%04x\n",
5964              opcode, mask);
5965      abort();
5966  }
5967  /*
5968   * This could probably be cleverer.  For now just optimize the case where
5969   * the top bits are known.
5970   */
5971  /* Find the first zero bit in the mask.  */
5972  i = 0x8000;
5973  while ((i & mask) != 0)
5974      i >>= 1;
5975  /* Iterate over all combinations of this and lower bits.  */
5976  if (i == 0)
5977      i = 1;
5978  else
5979      i <<= 1;
5980  from = opcode & ~(i - 1);
5981  to = from + i;
5982  for (i = from; i < to; i++) {
5983      if ((i & mask) == opcode)
5984          opcode_table[i] = proc;
5985  }
5986}
5987
5988/*
5989 * Register m68k opcode handlers.  Order is important.
5990 * Later insn override earlier ones.
5991 */
5992void register_m68k_insns (CPUM68KState *env)
5993{
5994    /*
5995     * Build the opcode table only once to avoid
5996     * multithreading issues.
5997     */
5998    if (opcode_table[0] != NULL) {
5999        return;
6000    }
6001
6002    /*
6003     * use BASE() for instruction available
6004     * for CF_ISA_A and M68000.
6005     */
6006#define BASE(name, opcode, mask) \
6007    register_opcode(disas_##name, 0x##opcode, 0x##mask)
6008#define INSN(name, opcode, mask, feature) do { \
6009    if (m68k_feature(env, M68K_FEATURE_##feature)) \
6010        BASE(name, opcode, mask); \
6011    } while(0)
6012    BASE(undef,     0000, 0000);
6013    INSN(arith_im,  0080, fff8, CF_ISA_A);
6014    INSN(arith_im,  0000, ff00, M68000);
6015    INSN(chk2,      00c0, f9c0, CHK2);
6016    INSN(bitrev,    00c0, fff8, CF_ISA_APLUSC);
6017    BASE(bitop_reg, 0100, f1c0);
6018    BASE(bitop_reg, 0140, f1c0);
6019    BASE(bitop_reg, 0180, f1c0);
6020    BASE(bitop_reg, 01c0, f1c0);
6021    INSN(movep,     0108, f138, MOVEP);
6022    INSN(arith_im,  0280, fff8, CF_ISA_A);
6023    INSN(arith_im,  0200, ff00, M68000);
6024    INSN(undef,     02c0, ffc0, M68000);
6025    INSN(byterev,   02c0, fff8, CF_ISA_APLUSC);
6026    INSN(arith_im,  0480, fff8, CF_ISA_A);
6027    INSN(arith_im,  0400, ff00, M68000);
6028    INSN(undef,     04c0, ffc0, M68000);
6029    INSN(arith_im,  0600, ff00, M68000);
6030    INSN(undef,     06c0, ffc0, M68000);
6031    INSN(ff1,       04c0, fff8, CF_ISA_APLUSC);
6032    INSN(arith_im,  0680, fff8, CF_ISA_A);
6033    INSN(arith_im,  0c00, ff38, CF_ISA_A);
6034    INSN(arith_im,  0c00, ff00, M68000);
6035    BASE(bitop_im,  0800, ffc0);
6036    BASE(bitop_im,  0840, ffc0);
6037    BASE(bitop_im,  0880, ffc0);
6038    BASE(bitop_im,  08c0, ffc0);
6039    INSN(arith_im,  0a80, fff8, CF_ISA_A);
6040    INSN(arith_im,  0a00, ff00, M68000);
6041#if defined(CONFIG_SOFTMMU)
6042    INSN(moves,     0e00, ff00, M68000);
6043#endif
6044    INSN(cas,       0ac0, ffc0, CAS);
6045    INSN(cas,       0cc0, ffc0, CAS);
6046    INSN(cas,       0ec0, ffc0, CAS);
6047    INSN(cas2w,     0cfc, ffff, CAS);
6048    INSN(cas2l,     0efc, ffff, CAS);
6049    BASE(move,      1000, f000);
6050    BASE(move,      2000, f000);
6051    BASE(move,      3000, f000);
6052    INSN(chk,       4000, f040, M68000);
6053    INSN(strldsr,   40e7, ffff, CF_ISA_APLUSC);
6054    INSN(negx,      4080, fff8, CF_ISA_A);
6055    INSN(negx,      4000, ff00, M68000);
6056    INSN(undef,     40c0, ffc0, M68000);
6057    INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
6058    INSN(move_from_sr, 40c0, ffc0, M68000);
6059    BASE(lea,       41c0, f1c0);
6060    BASE(clr,       4200, ff00);
6061    BASE(undef,     42c0, ffc0);
6062    INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
6063    INSN(move_from_ccr, 42c0, ffc0, M68000);
6064    INSN(neg,       4480, fff8, CF_ISA_A);
6065    INSN(neg,       4400, ff00, M68000);
6066    INSN(undef,     44c0, ffc0, M68000);
6067    BASE(move_to_ccr, 44c0, ffc0);
6068    INSN(not,       4680, fff8, CF_ISA_A);
6069    INSN(not,       4600, ff00, M68000);
6070#if defined(CONFIG_SOFTMMU)
6071    BASE(move_to_sr, 46c0, ffc0);
6072#endif
6073    INSN(nbcd,      4800, ffc0, M68000);
6074    INSN(linkl,     4808, fff8, M68000);
6075    BASE(pea,       4840, ffc0);
6076    BASE(swap,      4840, fff8);
6077    INSN(bkpt,      4848, fff8, BKPT);
6078    INSN(movem,     48d0, fbf8, CF_ISA_A);
6079    INSN(movem,     48e8, fbf8, CF_ISA_A);
6080    INSN(movem,     4880, fb80, M68000);
6081    BASE(ext,       4880, fff8);
6082    BASE(ext,       48c0, fff8);
6083    BASE(ext,       49c0, fff8);
6084    BASE(tst,       4a00, ff00);
6085    INSN(tas,       4ac0, ffc0, CF_ISA_B);
6086    INSN(tas,       4ac0, ffc0, M68000);
6087#if defined(CONFIG_SOFTMMU)
6088    INSN(halt,      4ac8, ffff, CF_ISA_A);
6089    INSN(halt,      4ac8, ffff, M68060);
6090#endif
6091    INSN(pulse,     4acc, ffff, CF_ISA_A);
6092    BASE(illegal,   4afc, ffff);
6093    INSN(mull,      4c00, ffc0, CF_ISA_A);
6094    INSN(mull,      4c00, ffc0, LONG_MULDIV);
6095    INSN(divl,      4c40, ffc0, CF_ISA_A);
6096    INSN(divl,      4c40, ffc0, LONG_MULDIV);
6097    INSN(sats,      4c80, fff8, CF_ISA_B);
6098    BASE(trap,      4e40, fff0);
6099    BASE(link,      4e50, fff8);
6100    BASE(unlk,      4e58, fff8);
6101#if defined(CONFIG_SOFTMMU)
6102    INSN(move_to_usp, 4e60, fff8, USP);
6103    INSN(move_from_usp, 4e68, fff8, USP);
6104    INSN(reset,     4e70, ffff, M68000);
6105    BASE(stop,      4e72, ffff);
6106    BASE(rte,       4e73, ffff);
6107    INSN(cf_movec,  4e7b, ffff, CF_ISA_A);
6108    INSN(m68k_movec, 4e7a, fffe, MOVEC);
6109#endif
6110    BASE(nop,       4e71, ffff);
6111    INSN(rtd,       4e74, ffff, RTD);
6112    BASE(rts,       4e75, ffff);
6113    INSN(trapv,     4e76, ffff, M68000);
6114    INSN(rtr,       4e77, ffff, M68000);
6115    BASE(jump,      4e80, ffc0);
6116    BASE(jump,      4ec0, ffc0);
6117    INSN(addsubq,   5000, f080, M68000);
6118    BASE(addsubq,   5080, f0c0);
6119    INSN(scc,       50c0, f0f8, CF_ISA_A); /* Scc.B Dx   */
6120    INSN(scc,       50c0, f0c0, M68000);   /* Scc.B <EA> */
6121    INSN(dbcc,      50c8, f0f8, M68000);
6122    INSN(trapcc,    50fa, f0fe, TRAPCC);   /* opmode 010, 011 */
6123    INSN(trapcc,    50fc, f0ff, TRAPCC);   /* opmode 100 */
6124    INSN(trapcc,    51fa, fffe, CF_ISA_A); /* TPF (trapf) opmode 010, 011 */
6125    INSN(trapcc,    51fc, ffff, CF_ISA_A); /* TPF (trapf) opmode 100 */
6126
6127    /* Branch instructions.  */
6128    BASE(branch,    6000, f000);
6129    /* Disable long branch instructions, then add back the ones we want.  */
6130    BASE(undef,     60ff, f0ff); /* All long branches.  */
6131    INSN(branch,    60ff, f0ff, CF_ISA_B);
6132    INSN(undef,     60ff, ffff, CF_ISA_B); /* bra.l */
6133    INSN(branch,    60ff, ffff, BRAL);
6134    INSN(branch,    60ff, f0ff, BCCL);
6135
6136    BASE(moveq,     7000, f100);
6137    INSN(mvzs,      7100, f100, CF_ISA_B);
6138    BASE(or,        8000, f000);
6139    BASE(divw,      80c0, f0c0);
6140    INSN(sbcd_reg,  8100, f1f8, M68000);
6141    INSN(sbcd_mem,  8108, f1f8, M68000);
6142    BASE(addsub,    9000, f000);
6143    INSN(undef,     90c0, f0c0, CF_ISA_A);
6144    INSN(subx_reg,  9180, f1f8, CF_ISA_A);
6145    INSN(subx_reg,  9100, f138, M68000);
6146    INSN(subx_mem,  9108, f138, M68000);
6147    INSN(suba,      91c0, f1c0, CF_ISA_A);
6148    INSN(suba,      90c0, f0c0, M68000);
6149
6150    BASE(undef_mac, a000, f000);
6151    INSN(mac,       a000, f100, CF_EMAC);
6152    INSN(from_mac,  a180, f9b0, CF_EMAC);
6153    INSN(move_mac,  a110, f9fc, CF_EMAC);
6154    INSN(from_macsr,a980, f9f0, CF_EMAC);
6155    INSN(from_mask, ad80, fff0, CF_EMAC);
6156    INSN(from_mext, ab80, fbf0, CF_EMAC);
6157    INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
6158    INSN(to_mac,    a100, f9c0, CF_EMAC);
6159    INSN(to_macsr,  a900, ffc0, CF_EMAC);
6160    INSN(to_mext,   ab00, fbc0, CF_EMAC);
6161    INSN(to_mask,   ad00, ffc0, CF_EMAC);
6162
6163    INSN(mov3q,     a140, f1c0, CF_ISA_B);
6164    INSN(cmp,       b000, f1c0, CF_ISA_B); /* cmp.b */
6165    INSN(cmp,       b040, f1c0, CF_ISA_B); /* cmp.w */
6166    INSN(cmpa,      b0c0, f1c0, CF_ISA_B); /* cmpa.w */
6167    INSN(cmp,       b080, f1c0, CF_ISA_A);
6168    INSN(cmpa,      b1c0, f1c0, CF_ISA_A);
6169    INSN(cmp,       b000, f100, M68000);
6170    INSN(eor,       b100, f100, M68000);
6171    INSN(cmpm,      b108, f138, M68000);
6172    INSN(cmpa,      b0c0, f0c0, M68000);
6173    INSN(eor,       b180, f1c0, CF_ISA_A);
6174    BASE(and,       c000, f000);
6175    INSN(exg_dd,    c140, f1f8, M68000);
6176    INSN(exg_aa,    c148, f1f8, M68000);
6177    INSN(exg_da,    c188, f1f8, M68000);
6178    BASE(mulw,      c0c0, f0c0);
6179    INSN(abcd_reg,  c100, f1f8, M68000);
6180    INSN(abcd_mem,  c108, f1f8, M68000);
6181    BASE(addsub,    d000, f000);
6182    INSN(undef,     d0c0, f0c0, CF_ISA_A);
6183    INSN(addx_reg,      d180, f1f8, CF_ISA_A);
6184    INSN(addx_reg,  d100, f138, M68000);
6185    INSN(addx_mem,  d108, f138, M68000);
6186    INSN(adda,      d1c0, f1c0, CF_ISA_A);
6187    INSN(adda,      d0c0, f0c0, M68000);
6188    INSN(shift_im,  e080, f0f0, CF_ISA_A);
6189    INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
6190    INSN(shift8_im, e000, f0f0, M68000);
6191    INSN(shift16_im, e040, f0f0, M68000);
6192    INSN(shift_im,  e080, f0f0, M68000);
6193    INSN(shift8_reg, e020, f0f0, M68000);
6194    INSN(shift16_reg, e060, f0f0, M68000);
6195    INSN(shift_reg, e0a0, f0f0, M68000);
6196    INSN(shift_mem, e0c0, fcc0, M68000);
6197    INSN(rotate_im, e090, f0f0, M68000);
6198    INSN(rotate8_im, e010, f0f0, M68000);
6199    INSN(rotate16_im, e050, f0f0, M68000);
6200    INSN(rotate_reg, e0b0, f0f0, M68000);
6201    INSN(rotate8_reg, e030, f0f0, M68000);
6202    INSN(rotate16_reg, e070, f0f0, M68000);
6203    INSN(rotate_mem, e4c0, fcc0, M68000);
6204    INSN(bfext_mem, e9c0, fdc0, BITFIELD);  /* bfextu & bfexts */
6205    INSN(bfext_reg, e9c0, fdf8, BITFIELD);
6206    INSN(bfins_mem, efc0, ffc0, BITFIELD);
6207    INSN(bfins_reg, efc0, fff8, BITFIELD);
6208    INSN(bfop_mem, eac0, ffc0, BITFIELD);   /* bfchg */
6209    INSN(bfop_reg, eac0, fff8, BITFIELD);   /* bfchg */
6210    INSN(bfop_mem, ecc0, ffc0, BITFIELD);   /* bfclr */
6211    INSN(bfop_reg, ecc0, fff8, BITFIELD);   /* bfclr */
6212    INSN(bfop_mem, edc0, ffc0, BITFIELD);   /* bfffo */
6213    INSN(bfop_reg, edc0, fff8, BITFIELD);   /* bfffo */
6214    INSN(bfop_mem, eec0, ffc0, BITFIELD);   /* bfset */
6215    INSN(bfop_reg, eec0, fff8, BITFIELD);   /* bfset */
6216    INSN(bfop_mem, e8c0, ffc0, BITFIELD);   /* bftst */
6217    INSN(bfop_reg, e8c0, fff8, BITFIELD);   /* bftst */
6218    BASE(undef_fpu, f000, f000);
6219    INSN(fpu,       f200, ffc0, CF_FPU);
6220    INSN(fbcc,      f280, ffc0, CF_FPU);
6221    INSN(fpu,       f200, ffc0, FPU);
6222    INSN(fscc,      f240, ffc0, FPU);
6223    INSN(ftrapcc,   f27a, fffe, FPU);       /* opmode 010, 011 */
6224    INSN(ftrapcc,   f27c, ffff, FPU);       /* opmode 100 */
6225    INSN(fbcc,      f280, ff80, FPU);
6226#if defined(CONFIG_SOFTMMU)
6227    INSN(frestore,  f340, ffc0, CF_FPU);
6228    INSN(fsave,     f300, ffc0, CF_FPU);
6229    INSN(frestore,  f340, ffc0, FPU);
6230    INSN(fsave,     f300, ffc0, FPU);
6231    INSN(intouch,   f340, ffc0, CF_ISA_A);
6232    INSN(cpushl,    f428, ff38, CF_ISA_A);
6233    INSN(cpush,     f420, ff20, M68040);
6234    INSN(cinv,      f400, ff20, M68040);
6235    INSN(pflush,    f500, ffe0, M68040);
6236    INSN(ptest,     f548, ffd8, M68040);
6237    INSN(wddata,    fb00, ff00, CF_ISA_A);
6238    INSN(wdebug,    fbc0, ffc0, CF_ISA_A);
6239#endif
6240    INSN(move16_mem, f600, ffe0, M68040);
6241    INSN(move16_reg, f620, fff8, M68040);
6242#undef INSN
6243}
6244
6245static void m68k_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
6246{
6247    DisasContext *dc = container_of(dcbase, DisasContext, base);
6248    CPUM68KState *env = cpu->env_ptr;
6249
6250    dc->env = env;
6251    dc->pc = dc->base.pc_first;
6252    /* This value will always be filled in properly before m68k_tr_tb_stop. */
6253    dc->pc_prev = 0xdeadbeef;
6254    dc->cc_op = CC_OP_DYNAMIC;
6255    dc->cc_op_synced = 1;
6256    dc->done_mac = 0;
6257    dc->writeback_mask = 0;
6258    init_release_array(dc);
6259
6260    dc->ss_active = (M68K_SR_TRACE(env->sr) == M68K_SR_TRACE_ANY_INS);
6261    /* If architectural single step active, limit to 1 */
6262    if (dc->ss_active) {
6263        dc->base.max_insns = 1;
6264    }
6265}
6266
6267static void m68k_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
6268{
6269}
6270
6271static void m68k_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
6272{
6273    DisasContext *dc = container_of(dcbase, DisasContext, base);
6274    tcg_gen_insn_start(dc->base.pc_next, dc->cc_op);
6275}
6276
6277static void m68k_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
6278{
6279    DisasContext *dc = container_of(dcbase, DisasContext, base);
6280    CPUM68KState *env = cpu->env_ptr;
6281    uint16_t insn = read_im16(env, dc);
6282
6283    opcode_table[insn](env, dc, insn);
6284    do_writebacks(dc);
6285    do_release(dc);
6286
6287    dc->pc_prev = dc->base.pc_next;
6288    dc->base.pc_next = dc->pc;
6289
6290    if (dc->base.is_jmp == DISAS_NEXT) {
6291        /*
6292         * Stop translation when the next insn might touch a new page.
6293         * This ensures that prefetch aborts at the right place.
6294         *
6295         * We cannot determine the size of the next insn without
6296         * completely decoding it.  However, the maximum insn size
6297         * is 32 bytes, so end if we do not have that much remaining.
6298         * This may produce several small TBs at the end of each page,
6299         * but they will all be linked with goto_tb.
6300         *
6301         * ??? ColdFire maximum is 4 bytes; MC68000's maximum is also
6302         * smaller than MC68020's.
6303         */
6304        target_ulong start_page_offset
6305            = dc->pc - (dc->base.pc_first & TARGET_PAGE_MASK);
6306
6307        if (start_page_offset >= TARGET_PAGE_SIZE - 32) {
6308            dc->base.is_jmp = DISAS_TOO_MANY;
6309        }
6310    }
6311}
6312
6313static void m68k_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
6314{
6315    DisasContext *dc = container_of(dcbase, DisasContext, base);
6316
6317    switch (dc->base.is_jmp) {
6318    case DISAS_NORETURN:
6319        break;
6320    case DISAS_TOO_MANY:
6321        update_cc_op(dc);
6322        gen_jmp_tb(dc, 0, dc->pc, dc->pc_prev);
6323        break;
6324    case DISAS_JUMP:
6325        /* We updated CC_OP and PC in gen_jmp/gen_jmp_im.  */
6326        if (dc->ss_active) {
6327            gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6328        } else {
6329            tcg_gen_lookup_and_goto_ptr();
6330        }
6331        break;
6332    case DISAS_EXIT:
6333        /*
6334         * We updated CC_OP and PC in gen_exit_tb, but also modified
6335         * other state that may require returning to the main loop.
6336         */
6337        if (dc->ss_active) {
6338            gen_raise_exception_format2(dc, EXCP_TRACE, dc->pc_prev);
6339        } else {
6340            tcg_gen_exit_tb(NULL, 0);
6341        }
6342        break;
6343    default:
6344        g_assert_not_reached();
6345    }
6346}
6347
6348static void m68k_tr_disas_log(const DisasContextBase *dcbase,
6349                              CPUState *cpu, FILE *logfile)
6350{
6351    fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
6352    target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
6353}
6354
6355static const TranslatorOps m68k_tr_ops = {
6356    .init_disas_context = m68k_tr_init_disas_context,
6357    .tb_start           = m68k_tr_tb_start,
6358    .insn_start         = m68k_tr_insn_start,
6359    .translate_insn     = m68k_tr_translate_insn,
6360    .tb_stop            = m68k_tr_tb_stop,
6361    .disas_log          = m68k_tr_disas_log,
6362};
6363
6364void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
6365{
6366    DisasContext dc;
6367    translator_loop(&m68k_tr_ops, &dc.base, cpu, tb, max_insns);
6368}
6369
6370static double floatx80_to_double(CPUM68KState *env, uint16_t high, uint64_t low)
6371{
6372    floatx80 a = { .high = high, .low = low };
6373    union {
6374        float64 f64;
6375        double d;
6376    } u;
6377
6378    u.f64 = floatx80_to_float64(a, &env->fp_status);
6379    return u.d;
6380}
6381
6382void m68k_cpu_dump_state(CPUState *cs, FILE *f, int flags)
6383{
6384    M68kCPU *cpu = M68K_CPU(cs);
6385    CPUM68KState *env = &cpu->env;
6386    int i;
6387    uint16_t sr;
6388    for (i = 0; i < 8; i++) {
6389        qemu_fprintf(f, "D%d = %08x   A%d = %08x   "
6390                     "F%d = %04x %016"PRIx64"  (%12g)\n",
6391                     i, env->dregs[i], i, env->aregs[i],
6392                     i, env->fregs[i].l.upper, env->fregs[i].l.lower,
6393                     floatx80_to_double(env, env->fregs[i].l.upper,
6394                                        env->fregs[i].l.lower));
6395    }
6396    qemu_fprintf(f, "PC = %08x   ", env->pc);
6397    sr = env->sr | cpu_m68k_get_ccr(env);
6398    qemu_fprintf(f, "SR = %04x T:%x I:%x %c%c %c%c%c%c%c\n",
6399                 sr, (sr & SR_T) >> SR_T_SHIFT, (sr & SR_I) >> SR_I_SHIFT,
6400                 (sr & SR_S) ? 'S' : 'U', (sr & SR_M) ? '%' : 'I',
6401                 (sr & CCF_X) ? 'X' : '-', (sr & CCF_N) ? 'N' : '-',
6402                 (sr & CCF_Z) ? 'Z' : '-', (sr & CCF_V) ? 'V' : '-',
6403                 (sr & CCF_C) ? 'C' : '-');
6404    qemu_fprintf(f, "FPSR = %08x %c%c%c%c ", env->fpsr,
6405                 (env->fpsr & FPSR_CC_A) ? 'A' : '-',
6406                 (env->fpsr & FPSR_CC_I) ? 'I' : '-',
6407                 (env->fpsr & FPSR_CC_Z) ? 'Z' : '-',
6408                 (env->fpsr & FPSR_CC_N) ? 'N' : '-');
6409    qemu_fprintf(f, "\n                                "
6410                 "FPCR =     %04x ", env->fpcr);
6411    switch (env->fpcr & FPCR_PREC_MASK) {
6412    case FPCR_PREC_X:
6413        qemu_fprintf(f, "X ");
6414        break;
6415    case FPCR_PREC_S:
6416        qemu_fprintf(f, "S ");
6417        break;
6418    case FPCR_PREC_D:
6419        qemu_fprintf(f, "D ");
6420        break;
6421    }
6422    switch (env->fpcr & FPCR_RND_MASK) {
6423    case FPCR_RND_N:
6424        qemu_fprintf(f, "RN ");
6425        break;
6426    case FPCR_RND_Z:
6427        qemu_fprintf(f, "RZ ");
6428        break;
6429    case FPCR_RND_M:
6430        qemu_fprintf(f, "RM ");
6431        break;
6432    case FPCR_RND_P:
6433        qemu_fprintf(f, "RP ");
6434        break;
6435    }
6436    qemu_fprintf(f, "\n");
6437#ifdef CONFIG_SOFTMMU
6438    qemu_fprintf(f, "%sA7(MSP) = %08x %sA7(USP) = %08x %sA7(ISP) = %08x\n",
6439                 env->current_sp == M68K_SSP ? "->" : "  ", env->sp[M68K_SSP],
6440                 env->current_sp == M68K_USP ? "->" : "  ", env->sp[M68K_USP],
6441                 env->current_sp == M68K_ISP ? "->" : "  ", env->sp[M68K_ISP]);
6442    qemu_fprintf(f, "VBR = 0x%08x\n", env->vbr);
6443    qemu_fprintf(f, "SFC = %x DFC %x\n", env->sfc, env->dfc);
6444    qemu_fprintf(f, "SSW %08x TCR %08x URP %08x SRP %08x\n",
6445                 env->mmu.ssw, env->mmu.tcr, env->mmu.urp, env->mmu.srp);
6446    qemu_fprintf(f, "DTTR0/1: %08x/%08x ITTR0/1: %08x/%08x\n",
6447                 env->mmu.ttr[M68K_DTTR0], env->mmu.ttr[M68K_DTTR1],
6448                 env->mmu.ttr[M68K_ITTR0], env->mmu.ttr[M68K_ITTR1]);
6449    qemu_fprintf(f, "MMUSR %08x, fault at %08x\n",
6450                 env->mmu.mmusr, env->mmu.ar);
6451#endif
6452}
6453
6454void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
6455                          target_ulong *data)
6456{
6457    int cc_op = data[1];
6458    env->pc = data[0];
6459    if (cc_op != CC_OP_DYNAMIC) {
6460        env->cc_op = cc_op;
6461    }
6462}
6463