qemu/tcg/tci.c
<<
>>
Prefs
   1/*
   2 * Tiny Code Interpreter for QEMU
   3 *
   4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
   5 *
   6 * This program is free software: you can redistribute it and/or modify
   7 * it under the terms of the GNU General Public License as published by
   8 * the Free Software Foundation, either version 2 of the License, or
   9 * (at your option) any later version.
  10 *
  11 * This program is distributed in the hope that it will be useful,
  12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  14 * GNU General Public License for more details.
  15 *
  16 * You should have received a copy of the GNU General Public License
  17 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
  18 */
  19
  20#include "qemu/osdep.h"
  21#include "qemu-common.h"
  22#include "tcg/tcg.h"           /* MAX_OPC_PARAM_IARGS */
  23#include "exec/cpu_ldst.h"
  24#include "tcg/tcg-op.h"
  25#include "tcg/tcg-ldst.h"
  26#include "qemu/compiler.h"
  27#include <ffi.h>
  28
  29
  30/*
  31 * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
  32 * Without assertions, the interpreter runs much faster.
  33 */
  34#if defined(CONFIG_DEBUG_TCG)
  35# define tci_assert(cond) assert(cond)
  36#else
  37# define tci_assert(cond) ((void)(cond))
  38#endif
  39
  40__thread uintptr_t tci_tb_ptr;
  41
  42static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
  43                            uint32_t low_index, uint64_t value)
  44{
  45    regs[low_index] = (uint32_t)value;
  46    regs[high_index] = value >> 32;
  47}
  48
  49/* Create a 64 bit value from two 32 bit values. */
  50static uint64_t tci_uint64(uint32_t high, uint32_t low)
  51{
  52    return ((uint64_t)high << 32) + low;
  53}
  54
  55/*
  56 * Load sets of arguments all at once.  The naming convention is:
  57 *   tci_args_<arguments>
  58 * where arguments is a sequence of
  59 *
  60 *   b = immediate (bit position)
  61 *   c = condition (TCGCond)
  62 *   i = immediate (uint32_t)
  63 *   I = immediate (tcg_target_ulong)
  64 *   l = label or pointer
  65 *   m = immediate (MemOpIdx)
  66 *   n = immediate (call return length)
  67 *   r = register
  68 *   s = signed ldst offset
  69 */
  70
  71static void tci_args_l(uint32_t insn, const void *tb_ptr, void **l0)
  72{
  73    int diff = sextract32(insn, 12, 20);
  74    *l0 = diff ? (void *)tb_ptr + diff : NULL;
  75}
  76
  77static void tci_args_r(uint32_t insn, TCGReg *r0)
  78{
  79    *r0 = extract32(insn, 8, 4);
  80}
  81
  82static void tci_args_nl(uint32_t insn, const void *tb_ptr,
  83                        uint8_t *n0, void **l1)
  84{
  85    *n0 = extract32(insn, 8, 4);
  86    *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
  87}
  88
  89static void tci_args_rl(uint32_t insn, const void *tb_ptr,
  90                        TCGReg *r0, void **l1)
  91{
  92    *r0 = extract32(insn, 8, 4);
  93    *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
  94}
  95
  96static void tci_args_rr(uint32_t insn, TCGReg *r0, TCGReg *r1)
  97{
  98    *r0 = extract32(insn, 8, 4);
  99    *r1 = extract32(insn, 12, 4);
 100}
 101
 102static void tci_args_ri(uint32_t insn, TCGReg *r0, tcg_target_ulong *i1)
 103{
 104    *r0 = extract32(insn, 8, 4);
 105    *i1 = sextract32(insn, 12, 20);
 106}
 107
 108static void tci_args_rrm(uint32_t insn, TCGReg *r0,
 109                         TCGReg *r1, MemOpIdx *m2)
 110{
 111    *r0 = extract32(insn, 8, 4);
 112    *r1 = extract32(insn, 12, 4);
 113    *m2 = extract32(insn, 20, 12);
 114}
 115
 116static void tci_args_rrr(uint32_t insn, TCGReg *r0, TCGReg *r1, TCGReg *r2)
 117{
 118    *r0 = extract32(insn, 8, 4);
 119    *r1 = extract32(insn, 12, 4);
 120    *r2 = extract32(insn, 16, 4);
 121}
 122
 123static void tci_args_rrs(uint32_t insn, TCGReg *r0, TCGReg *r1, int32_t *i2)
 124{
 125    *r0 = extract32(insn, 8, 4);
 126    *r1 = extract32(insn, 12, 4);
 127    *i2 = sextract32(insn, 16, 16);
 128}
 129
 130static void tci_args_rrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
 131                          uint8_t *i2, uint8_t *i3)
 132{
 133    *r0 = extract32(insn, 8, 4);
 134    *r1 = extract32(insn, 12, 4);
 135    *i2 = extract32(insn, 16, 6);
 136    *i3 = extract32(insn, 22, 6);
 137}
 138
 139static void tci_args_rrrc(uint32_t insn,
 140                          TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
 141{
 142    *r0 = extract32(insn, 8, 4);
 143    *r1 = extract32(insn, 12, 4);
 144    *r2 = extract32(insn, 16, 4);
 145    *c3 = extract32(insn, 20, 4);
 146}
 147
 148static void tci_args_rrrm(uint32_t insn,
 149                          TCGReg *r0, TCGReg *r1, TCGReg *r2, MemOpIdx *m3)
 150{
 151    *r0 = extract32(insn, 8, 4);
 152    *r1 = extract32(insn, 12, 4);
 153    *r2 = extract32(insn, 16, 4);
 154    *m3 = extract32(insn, 20, 12);
 155}
 156
 157static void tci_args_rrrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
 158                           TCGReg *r2, uint8_t *i3, uint8_t *i4)
 159{
 160    *r0 = extract32(insn, 8, 4);
 161    *r1 = extract32(insn, 12, 4);
 162    *r2 = extract32(insn, 16, 4);
 163    *i3 = extract32(insn, 20, 6);
 164    *i4 = extract32(insn, 26, 6);
 165}
 166
 167static void tci_args_rrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
 168                           TCGReg *r2, TCGReg *r3, TCGReg *r4)
 169{
 170    *r0 = extract32(insn, 8, 4);
 171    *r1 = extract32(insn, 12, 4);
 172    *r2 = extract32(insn, 16, 4);
 173    *r3 = extract32(insn, 20, 4);
 174    *r4 = extract32(insn, 24, 4);
 175}
 176
 177static void tci_args_rrrr(uint32_t insn,
 178                          TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
 179{
 180    *r0 = extract32(insn, 8, 4);
 181    *r1 = extract32(insn, 12, 4);
 182    *r2 = extract32(insn, 16, 4);
 183    *r3 = extract32(insn, 20, 4);
 184}
 185
 186static void tci_args_rrrrrc(uint32_t insn, TCGReg *r0, TCGReg *r1,
 187                            TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
 188{
 189    *r0 = extract32(insn, 8, 4);
 190    *r1 = extract32(insn, 12, 4);
 191    *r2 = extract32(insn, 16, 4);
 192    *r3 = extract32(insn, 20, 4);
 193    *r4 = extract32(insn, 24, 4);
 194    *c5 = extract32(insn, 28, 4);
 195}
 196
 197static void tci_args_rrrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
 198                            TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
 199{
 200    *r0 = extract32(insn, 8, 4);
 201    *r1 = extract32(insn, 12, 4);
 202    *r2 = extract32(insn, 16, 4);
 203    *r3 = extract32(insn, 20, 4);
 204    *r4 = extract32(insn, 24, 4);
 205    *r5 = extract32(insn, 28, 4);
 206}
 207
 208static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
 209{
 210    bool result = false;
 211    int32_t i0 = u0;
 212    int32_t i1 = u1;
 213    switch (condition) {
 214    case TCG_COND_EQ:
 215        result = (u0 == u1);
 216        break;
 217    case TCG_COND_NE:
 218        result = (u0 != u1);
 219        break;
 220    case TCG_COND_LT:
 221        result = (i0 < i1);
 222        break;
 223    case TCG_COND_GE:
 224        result = (i0 >= i1);
 225        break;
 226    case TCG_COND_LE:
 227        result = (i0 <= i1);
 228        break;
 229    case TCG_COND_GT:
 230        result = (i0 > i1);
 231        break;
 232    case TCG_COND_LTU:
 233        result = (u0 < u1);
 234        break;
 235    case TCG_COND_GEU:
 236        result = (u0 >= u1);
 237        break;
 238    case TCG_COND_LEU:
 239        result = (u0 <= u1);
 240        break;
 241    case TCG_COND_GTU:
 242        result = (u0 > u1);
 243        break;
 244    default:
 245        g_assert_not_reached();
 246    }
 247    return result;
 248}
 249
 250static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
 251{
 252    bool result = false;
 253    int64_t i0 = u0;
 254    int64_t i1 = u1;
 255    switch (condition) {
 256    case TCG_COND_EQ:
 257        result = (u0 == u1);
 258        break;
 259    case TCG_COND_NE:
 260        result = (u0 != u1);
 261        break;
 262    case TCG_COND_LT:
 263        result = (i0 < i1);
 264        break;
 265    case TCG_COND_GE:
 266        result = (i0 >= i1);
 267        break;
 268    case TCG_COND_LE:
 269        result = (i0 <= i1);
 270        break;
 271    case TCG_COND_GT:
 272        result = (i0 > i1);
 273        break;
 274    case TCG_COND_LTU:
 275        result = (u0 < u1);
 276        break;
 277    case TCG_COND_GEU:
 278        result = (u0 >= u1);
 279        break;
 280    case TCG_COND_LEU:
 281        result = (u0 <= u1);
 282        break;
 283    case TCG_COND_GTU:
 284        result = (u0 > u1);
 285        break;
 286    default:
 287        g_assert_not_reached();
 288    }
 289    return result;
 290}
 291
 292static uint64_t tci_qemu_ld(CPUArchState *env, target_ulong taddr,
 293                            MemOpIdx oi, const void *tb_ptr)
 294{
 295    MemOp mop = get_memop(oi);
 296    uintptr_t ra = (uintptr_t)tb_ptr;
 297
 298#ifdef CONFIG_SOFTMMU
 299    switch (mop & (MO_BSWAP | MO_SSIZE)) {
 300    case MO_UB:
 301        return helper_ret_ldub_mmu(env, taddr, oi, ra);
 302    case MO_SB:
 303        return helper_ret_ldsb_mmu(env, taddr, oi, ra);
 304    case MO_LEUW:
 305        return helper_le_lduw_mmu(env, taddr, oi, ra);
 306    case MO_LESW:
 307        return helper_le_ldsw_mmu(env, taddr, oi, ra);
 308    case MO_LEUL:
 309        return helper_le_ldul_mmu(env, taddr, oi, ra);
 310    case MO_LESL:
 311        return helper_le_ldsl_mmu(env, taddr, oi, ra);
 312    case MO_LEUQ:
 313        return helper_le_ldq_mmu(env, taddr, oi, ra);
 314    case MO_BEUW:
 315        return helper_be_lduw_mmu(env, taddr, oi, ra);
 316    case MO_BESW:
 317        return helper_be_ldsw_mmu(env, taddr, oi, ra);
 318    case MO_BEUL:
 319        return helper_be_ldul_mmu(env, taddr, oi, ra);
 320    case MO_BESL:
 321        return helper_be_ldsl_mmu(env, taddr, oi, ra);
 322    case MO_BEUQ:
 323        return helper_be_ldq_mmu(env, taddr, oi, ra);
 324    default:
 325        g_assert_not_reached();
 326    }
 327#else
 328    void *haddr = g2h(env_cpu(env), taddr);
 329    unsigned a_mask = (1u << get_alignment_bits(mop)) - 1;
 330    uint64_t ret;
 331
 332    set_helper_retaddr(ra);
 333    if (taddr & a_mask) {
 334        helper_unaligned_ld(env, taddr);
 335    }
 336    switch (mop & (MO_BSWAP | MO_SSIZE)) {
 337    case MO_UB:
 338        ret = ldub_p(haddr);
 339        break;
 340    case MO_SB:
 341        ret = ldsb_p(haddr);
 342        break;
 343    case MO_LEUW:
 344        ret = lduw_le_p(haddr);
 345        break;
 346    case MO_LESW:
 347        ret = ldsw_le_p(haddr);
 348        break;
 349    case MO_LEUL:
 350        ret = (uint32_t)ldl_le_p(haddr);
 351        break;
 352    case MO_LESL:
 353        ret = (int32_t)ldl_le_p(haddr);
 354        break;
 355    case MO_LEUQ:
 356        ret = ldq_le_p(haddr);
 357        break;
 358    case MO_BEUW:
 359        ret = lduw_be_p(haddr);
 360        break;
 361    case MO_BESW:
 362        ret = ldsw_be_p(haddr);
 363        break;
 364    case MO_BEUL:
 365        ret = (uint32_t)ldl_be_p(haddr);
 366        break;
 367    case MO_BESL:
 368        ret = (int32_t)ldl_be_p(haddr);
 369        break;
 370    case MO_BEUQ:
 371        ret = ldq_be_p(haddr);
 372        break;
 373    default:
 374        g_assert_not_reached();
 375    }
 376    clear_helper_retaddr();
 377    return ret;
 378#endif
 379}
 380
 381static void tci_qemu_st(CPUArchState *env, target_ulong taddr, uint64_t val,
 382                        MemOpIdx oi, const void *tb_ptr)
 383{
 384    MemOp mop = get_memop(oi);
 385    uintptr_t ra = (uintptr_t)tb_ptr;
 386
 387#ifdef CONFIG_SOFTMMU
 388    switch (mop & (MO_BSWAP | MO_SIZE)) {
 389    case MO_UB:
 390        helper_ret_stb_mmu(env, taddr, val, oi, ra);
 391        break;
 392    case MO_LEUW:
 393        helper_le_stw_mmu(env, taddr, val, oi, ra);
 394        break;
 395    case MO_LEUL:
 396        helper_le_stl_mmu(env, taddr, val, oi, ra);
 397        break;
 398    case MO_LEUQ:
 399        helper_le_stq_mmu(env, taddr, val, oi, ra);
 400        break;
 401    case MO_BEUW:
 402        helper_be_stw_mmu(env, taddr, val, oi, ra);
 403        break;
 404    case MO_BEUL:
 405        helper_be_stl_mmu(env, taddr, val, oi, ra);
 406        break;
 407    case MO_BEUQ:
 408        helper_be_stq_mmu(env, taddr, val, oi, ra);
 409        break;
 410    default:
 411        g_assert_not_reached();
 412    }
 413#else
 414    void *haddr = g2h(env_cpu(env), taddr);
 415    unsigned a_mask = (1u << get_alignment_bits(mop)) - 1;
 416
 417    set_helper_retaddr(ra);
 418    if (taddr & a_mask) {
 419        helper_unaligned_st(env, taddr);
 420    }
 421    switch (mop & (MO_BSWAP | MO_SIZE)) {
 422    case MO_UB:
 423        stb_p(haddr, val);
 424        break;
 425    case MO_LEUW:
 426        stw_le_p(haddr, val);
 427        break;
 428    case MO_LEUL:
 429        stl_le_p(haddr, val);
 430        break;
 431    case MO_LEUQ:
 432        stq_le_p(haddr, val);
 433        break;
 434    case MO_BEUW:
 435        stw_be_p(haddr, val);
 436        break;
 437    case MO_BEUL:
 438        stl_be_p(haddr, val);
 439        break;
 440    case MO_BEUQ:
 441        stq_be_p(haddr, val);
 442        break;
 443    default:
 444        g_assert_not_reached();
 445    }
 446    clear_helper_retaddr();
 447#endif
 448}
 449
 450#if TCG_TARGET_REG_BITS == 64
 451# define CASE_32_64(x) \
 452        case glue(glue(INDEX_op_, x), _i64): \
 453        case glue(glue(INDEX_op_, x), _i32):
 454# define CASE_64(x) \
 455        case glue(glue(INDEX_op_, x), _i64):
 456#else
 457# define CASE_32_64(x) \
 458        case glue(glue(INDEX_op_, x), _i32):
 459# define CASE_64(x)
 460#endif
 461
 462/* Interpret pseudo code in tb. */
 463/*
 464 * Disable CFI checks.
 465 * One possible operation in the pseudo code is a call to binary code.
 466 * Therefore, disable CFI checks in the interpreter function
 467 */
 468uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
 469                                            const void *v_tb_ptr)
 470{
 471    const uint32_t *tb_ptr = v_tb_ptr;
 472    tcg_target_ulong regs[TCG_TARGET_NB_REGS];
 473    uint64_t stack[(TCG_STATIC_CALL_ARGS_SIZE + TCG_STATIC_FRAME_SIZE)
 474                   / sizeof(uint64_t)];
 475    void *call_slots[TCG_STATIC_CALL_ARGS_SIZE / sizeof(uint64_t)];
 476
 477    regs[TCG_AREG0] = (tcg_target_ulong)env;
 478    regs[TCG_REG_CALL_STACK] = (uintptr_t)stack;
 479    /* Other call_slots entries initialized at first use (see below). */
 480    call_slots[0] = NULL;
 481    tci_assert(tb_ptr);
 482
 483    for (;;) {
 484        uint32_t insn;
 485        TCGOpcode opc;
 486        TCGReg r0, r1, r2, r3, r4, r5;
 487        tcg_target_ulong t1;
 488        TCGCond condition;
 489        target_ulong taddr;
 490        uint8_t pos, len;
 491        uint32_t tmp32;
 492        uint64_t tmp64;
 493        uint64_t T1, T2;
 494        MemOpIdx oi;
 495        int32_t ofs;
 496        void *ptr;
 497
 498        insn = *tb_ptr++;
 499        opc = extract32(insn, 0, 8);
 500
 501        switch (opc) {
 502        case INDEX_op_call:
 503            /*
 504             * Set up the ffi_avalue array once, delayed until now
 505             * because many TB's do not make any calls. In tcg_gen_callN,
 506             * we arranged for every real argument to be "left-aligned"
 507             * in each 64-bit slot.
 508             */
 509            if (unlikely(call_slots[0] == NULL)) {
 510                for (int i = 0; i < ARRAY_SIZE(call_slots); ++i) {
 511                    call_slots[i] = &stack[i];
 512                }
 513            }
 514
 515            tci_args_nl(insn, tb_ptr, &len, &ptr);
 516
 517            /* Helper functions may need to access the "return address" */
 518            tci_tb_ptr = (uintptr_t)tb_ptr;
 519
 520            {
 521                void **pptr = ptr;
 522                ffi_call(pptr[1], pptr[0], stack, call_slots);
 523            }
 524
 525            /* Any result winds up "left-aligned" in the stack[0] slot. */
 526            switch (len) {
 527            case 0: /* void */
 528                break;
 529            case 1: /* uint32_t */
 530                /*
 531                 * Note that libffi has an odd special case in that it will
 532                 * always widen an integral result to ffi_arg.
 533                 */
 534                if (sizeof(ffi_arg) == 4) {
 535                    regs[TCG_REG_R0] = *(uint32_t *)stack;
 536                    break;
 537                }
 538                /* fall through */
 539            case 2: /* uint64_t */
 540                if (TCG_TARGET_REG_BITS == 32) {
 541                    tci_write_reg64(regs, TCG_REG_R1, TCG_REG_R0, stack[0]);
 542                } else {
 543                    regs[TCG_REG_R0] = stack[0];
 544                }
 545                break;
 546            default:
 547                g_assert_not_reached();
 548            }
 549            break;
 550
 551        case INDEX_op_br:
 552            tci_args_l(insn, tb_ptr, &ptr);
 553            tb_ptr = ptr;
 554            continue;
 555        case INDEX_op_setcond_i32:
 556            tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
 557            regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
 558            break;
 559        case INDEX_op_movcond_i32:
 560            tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
 561            tmp32 = tci_compare32(regs[r1], regs[r2], condition);
 562            regs[r0] = regs[tmp32 ? r3 : r4];
 563            break;
 564#if TCG_TARGET_REG_BITS == 32
 565        case INDEX_op_setcond2_i32:
 566            tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
 567            T1 = tci_uint64(regs[r2], regs[r1]);
 568            T2 = tci_uint64(regs[r4], regs[r3]);
 569            regs[r0] = tci_compare64(T1, T2, condition);
 570            break;
 571#elif TCG_TARGET_REG_BITS == 64
 572        case INDEX_op_setcond_i64:
 573            tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
 574            regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
 575            break;
 576        case INDEX_op_movcond_i64:
 577            tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
 578            tmp32 = tci_compare64(regs[r1], regs[r2], condition);
 579            regs[r0] = regs[tmp32 ? r3 : r4];
 580            break;
 581#endif
 582        CASE_32_64(mov)
 583            tci_args_rr(insn, &r0, &r1);
 584            regs[r0] = regs[r1];
 585            break;
 586        case INDEX_op_tci_movi:
 587            tci_args_ri(insn, &r0, &t1);
 588            regs[r0] = t1;
 589            break;
 590        case INDEX_op_tci_movl:
 591            tci_args_rl(insn, tb_ptr, &r0, &ptr);
 592            regs[r0] = *(tcg_target_ulong *)ptr;
 593            break;
 594
 595            /* Load/store operations (32 bit). */
 596
 597        CASE_32_64(ld8u)
 598            tci_args_rrs(insn, &r0, &r1, &ofs);
 599            ptr = (void *)(regs[r1] + ofs);
 600            regs[r0] = *(uint8_t *)ptr;
 601            break;
 602        CASE_32_64(ld8s)
 603            tci_args_rrs(insn, &r0, &r1, &ofs);
 604            ptr = (void *)(regs[r1] + ofs);
 605            regs[r0] = *(int8_t *)ptr;
 606            break;
 607        CASE_32_64(ld16u)
 608            tci_args_rrs(insn, &r0, &r1, &ofs);
 609            ptr = (void *)(regs[r1] + ofs);
 610            regs[r0] = *(uint16_t *)ptr;
 611            break;
 612        CASE_32_64(ld16s)
 613            tci_args_rrs(insn, &r0, &r1, &ofs);
 614            ptr = (void *)(regs[r1] + ofs);
 615            regs[r0] = *(int16_t *)ptr;
 616            break;
 617        case INDEX_op_ld_i32:
 618        CASE_64(ld32u)
 619            tci_args_rrs(insn, &r0, &r1, &ofs);
 620            ptr = (void *)(regs[r1] + ofs);
 621            regs[r0] = *(uint32_t *)ptr;
 622            break;
 623        CASE_32_64(st8)
 624            tci_args_rrs(insn, &r0, &r1, &ofs);
 625            ptr = (void *)(regs[r1] + ofs);
 626            *(uint8_t *)ptr = regs[r0];
 627            break;
 628        CASE_32_64(st16)
 629            tci_args_rrs(insn, &r0, &r1, &ofs);
 630            ptr = (void *)(regs[r1] + ofs);
 631            *(uint16_t *)ptr = regs[r0];
 632            break;
 633        case INDEX_op_st_i32:
 634        CASE_64(st32)
 635            tci_args_rrs(insn, &r0, &r1, &ofs);
 636            ptr = (void *)(regs[r1] + ofs);
 637            *(uint32_t *)ptr = regs[r0];
 638            break;
 639
 640            /* Arithmetic operations (mixed 32/64 bit). */
 641
 642        CASE_32_64(add)
 643            tci_args_rrr(insn, &r0, &r1, &r2);
 644            regs[r0] = regs[r1] + regs[r2];
 645            break;
 646        CASE_32_64(sub)
 647            tci_args_rrr(insn, &r0, &r1, &r2);
 648            regs[r0] = regs[r1] - regs[r2];
 649            break;
 650        CASE_32_64(mul)
 651            tci_args_rrr(insn, &r0, &r1, &r2);
 652            regs[r0] = regs[r1] * regs[r2];
 653            break;
 654        CASE_32_64(and)
 655            tci_args_rrr(insn, &r0, &r1, &r2);
 656            regs[r0] = regs[r1] & regs[r2];
 657            break;
 658        CASE_32_64(or)
 659            tci_args_rrr(insn, &r0, &r1, &r2);
 660            regs[r0] = regs[r1] | regs[r2];
 661            break;
 662        CASE_32_64(xor)
 663            tci_args_rrr(insn, &r0, &r1, &r2);
 664            regs[r0] = regs[r1] ^ regs[r2];
 665            break;
 666#if TCG_TARGET_HAS_andc_i32 || TCG_TARGET_HAS_andc_i64
 667        CASE_32_64(andc)
 668            tci_args_rrr(insn, &r0, &r1, &r2);
 669            regs[r0] = regs[r1] & ~regs[r2];
 670            break;
 671#endif
 672#if TCG_TARGET_HAS_orc_i32 || TCG_TARGET_HAS_orc_i64
 673        CASE_32_64(orc)
 674            tci_args_rrr(insn, &r0, &r1, &r2);
 675            regs[r0] = regs[r1] | ~regs[r2];
 676            break;
 677#endif
 678#if TCG_TARGET_HAS_eqv_i32 || TCG_TARGET_HAS_eqv_i64
 679        CASE_32_64(eqv)
 680            tci_args_rrr(insn, &r0, &r1, &r2);
 681            regs[r0] = ~(regs[r1] ^ regs[r2]);
 682            break;
 683#endif
 684#if TCG_TARGET_HAS_nand_i32 || TCG_TARGET_HAS_nand_i64
 685        CASE_32_64(nand)
 686            tci_args_rrr(insn, &r0, &r1, &r2);
 687            regs[r0] = ~(regs[r1] & regs[r2]);
 688            break;
 689#endif
 690#if TCG_TARGET_HAS_nor_i32 || TCG_TARGET_HAS_nor_i64
 691        CASE_32_64(nor)
 692            tci_args_rrr(insn, &r0, &r1, &r2);
 693            regs[r0] = ~(regs[r1] | regs[r2]);
 694            break;
 695#endif
 696
 697            /* Arithmetic operations (32 bit). */
 698
 699        case INDEX_op_div_i32:
 700            tci_args_rrr(insn, &r0, &r1, &r2);
 701            regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
 702            break;
 703        case INDEX_op_divu_i32:
 704            tci_args_rrr(insn, &r0, &r1, &r2);
 705            regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
 706            break;
 707        case INDEX_op_rem_i32:
 708            tci_args_rrr(insn, &r0, &r1, &r2);
 709            regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
 710            break;
 711        case INDEX_op_remu_i32:
 712            tci_args_rrr(insn, &r0, &r1, &r2);
 713            regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
 714            break;
 715#if TCG_TARGET_HAS_clz_i32
 716        case INDEX_op_clz_i32:
 717            tci_args_rrr(insn, &r0, &r1, &r2);
 718            tmp32 = regs[r1];
 719            regs[r0] = tmp32 ? clz32(tmp32) : regs[r2];
 720            break;
 721#endif
 722#if TCG_TARGET_HAS_ctz_i32
 723        case INDEX_op_ctz_i32:
 724            tci_args_rrr(insn, &r0, &r1, &r2);
 725            tmp32 = regs[r1];
 726            regs[r0] = tmp32 ? ctz32(tmp32) : regs[r2];
 727            break;
 728#endif
 729#if TCG_TARGET_HAS_ctpop_i32
 730        case INDEX_op_ctpop_i32:
 731            tci_args_rr(insn, &r0, &r1);
 732            regs[r0] = ctpop32(regs[r1]);
 733            break;
 734#endif
 735
 736            /* Shift/rotate operations (32 bit). */
 737
 738        case INDEX_op_shl_i32:
 739            tci_args_rrr(insn, &r0, &r1, &r2);
 740            regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
 741            break;
 742        case INDEX_op_shr_i32:
 743            tci_args_rrr(insn, &r0, &r1, &r2);
 744            regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
 745            break;
 746        case INDEX_op_sar_i32:
 747            tci_args_rrr(insn, &r0, &r1, &r2);
 748            regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
 749            break;
 750#if TCG_TARGET_HAS_rot_i32
 751        case INDEX_op_rotl_i32:
 752            tci_args_rrr(insn, &r0, &r1, &r2);
 753            regs[r0] = rol32(regs[r1], regs[r2] & 31);
 754            break;
 755        case INDEX_op_rotr_i32:
 756            tci_args_rrr(insn, &r0, &r1, &r2);
 757            regs[r0] = ror32(regs[r1], regs[r2] & 31);
 758            break;
 759#endif
 760#if TCG_TARGET_HAS_deposit_i32
 761        case INDEX_op_deposit_i32:
 762            tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
 763            regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
 764            break;
 765#endif
 766#if TCG_TARGET_HAS_extract_i32
 767        case INDEX_op_extract_i32:
 768            tci_args_rrbb(insn, &r0, &r1, &pos, &len);
 769            regs[r0] = extract32(regs[r1], pos, len);
 770            break;
 771#endif
 772#if TCG_TARGET_HAS_sextract_i32
 773        case INDEX_op_sextract_i32:
 774            tci_args_rrbb(insn, &r0, &r1, &pos, &len);
 775            regs[r0] = sextract32(regs[r1], pos, len);
 776            break;
 777#endif
 778        case INDEX_op_brcond_i32:
 779            tci_args_rl(insn, tb_ptr, &r0, &ptr);
 780            if ((uint32_t)regs[r0]) {
 781                tb_ptr = ptr;
 782            }
 783            break;
 784#if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_add2_i32
 785        case INDEX_op_add2_i32:
 786            tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
 787            T1 = tci_uint64(regs[r3], regs[r2]);
 788            T2 = tci_uint64(regs[r5], regs[r4]);
 789            tci_write_reg64(regs, r1, r0, T1 + T2);
 790            break;
 791#endif
 792#if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_sub2_i32
 793        case INDEX_op_sub2_i32:
 794            tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
 795            T1 = tci_uint64(regs[r3], regs[r2]);
 796            T2 = tci_uint64(regs[r5], regs[r4]);
 797            tci_write_reg64(regs, r1, r0, T1 - T2);
 798            break;
 799#endif
 800#if TCG_TARGET_HAS_mulu2_i32
 801        case INDEX_op_mulu2_i32:
 802            tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
 803            tmp64 = (uint64_t)(uint32_t)regs[r2] * (uint32_t)regs[r3];
 804            tci_write_reg64(regs, r1, r0, tmp64);
 805            break;
 806#endif
 807#if TCG_TARGET_HAS_muls2_i32
 808        case INDEX_op_muls2_i32:
 809            tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
 810            tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3];
 811            tci_write_reg64(regs, r1, r0, tmp64);
 812            break;
 813#endif
 814#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
 815        CASE_32_64(ext8s)
 816            tci_args_rr(insn, &r0, &r1);
 817            regs[r0] = (int8_t)regs[r1];
 818            break;
 819#endif
 820#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 || \
 821    TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
 822        CASE_32_64(ext16s)
 823            tci_args_rr(insn, &r0, &r1);
 824            regs[r0] = (int16_t)regs[r1];
 825            break;
 826#endif
 827#if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
 828        CASE_32_64(ext8u)
 829            tci_args_rr(insn, &r0, &r1);
 830            regs[r0] = (uint8_t)regs[r1];
 831            break;
 832#endif
 833#if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
 834        CASE_32_64(ext16u)
 835            tci_args_rr(insn, &r0, &r1);
 836            regs[r0] = (uint16_t)regs[r1];
 837            break;
 838#endif
 839#if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
 840        CASE_32_64(bswap16)
 841            tci_args_rr(insn, &r0, &r1);
 842            regs[r0] = bswap16(regs[r1]);
 843            break;
 844#endif
 845#if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
 846        CASE_32_64(bswap32)
 847            tci_args_rr(insn, &r0, &r1);
 848            regs[r0] = bswap32(regs[r1]);
 849            break;
 850#endif
 851#if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
 852        CASE_32_64(not)
 853            tci_args_rr(insn, &r0, &r1);
 854            regs[r0] = ~regs[r1];
 855            break;
 856#endif
 857#if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
 858        CASE_32_64(neg)
 859            tci_args_rr(insn, &r0, &r1);
 860            regs[r0] = -regs[r1];
 861            break;
 862#endif
 863#if TCG_TARGET_REG_BITS == 64
 864            /* Load/store operations (64 bit). */
 865
 866        case INDEX_op_ld32s_i64:
 867            tci_args_rrs(insn, &r0, &r1, &ofs);
 868            ptr = (void *)(regs[r1] + ofs);
 869            regs[r0] = *(int32_t *)ptr;
 870            break;
 871        case INDEX_op_ld_i64:
 872            tci_args_rrs(insn, &r0, &r1, &ofs);
 873            ptr = (void *)(regs[r1] + ofs);
 874            regs[r0] = *(uint64_t *)ptr;
 875            break;
 876        case INDEX_op_st_i64:
 877            tci_args_rrs(insn, &r0, &r1, &ofs);
 878            ptr = (void *)(regs[r1] + ofs);
 879            *(uint64_t *)ptr = regs[r0];
 880            break;
 881
 882            /* Arithmetic operations (64 bit). */
 883
 884        case INDEX_op_div_i64:
 885            tci_args_rrr(insn, &r0, &r1, &r2);
 886            regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
 887            break;
 888        case INDEX_op_divu_i64:
 889            tci_args_rrr(insn, &r0, &r1, &r2);
 890            regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
 891            break;
 892        case INDEX_op_rem_i64:
 893            tci_args_rrr(insn, &r0, &r1, &r2);
 894            regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
 895            break;
 896        case INDEX_op_remu_i64:
 897            tci_args_rrr(insn, &r0, &r1, &r2);
 898            regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
 899            break;
 900#if TCG_TARGET_HAS_clz_i64
 901        case INDEX_op_clz_i64:
 902            tci_args_rrr(insn, &r0, &r1, &r2);
 903            regs[r0] = regs[r1] ? clz64(regs[r1]) : regs[r2];
 904            break;
 905#endif
 906#if TCG_TARGET_HAS_ctz_i64
 907        case INDEX_op_ctz_i64:
 908            tci_args_rrr(insn, &r0, &r1, &r2);
 909            regs[r0] = regs[r1] ? ctz64(regs[r1]) : regs[r2];
 910            break;
 911#endif
 912#if TCG_TARGET_HAS_ctpop_i64
 913        case INDEX_op_ctpop_i64:
 914            tci_args_rr(insn, &r0, &r1);
 915            regs[r0] = ctpop64(regs[r1]);
 916            break;
 917#endif
 918#if TCG_TARGET_HAS_mulu2_i64
 919        case INDEX_op_mulu2_i64:
 920            tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
 921            mulu64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
 922            break;
 923#endif
 924#if TCG_TARGET_HAS_muls2_i64
 925        case INDEX_op_muls2_i64:
 926            tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
 927            muls64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
 928            break;
 929#endif
 930#if TCG_TARGET_HAS_add2_i64
 931        case INDEX_op_add2_i64:
 932            tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
 933            T1 = regs[r2] + regs[r4];
 934            T2 = regs[r3] + regs[r5] + (T1 < regs[r2]);
 935            regs[r0] = T1;
 936            regs[r1] = T2;
 937            break;
 938#endif
 939#if TCG_TARGET_HAS_add2_i64
 940        case INDEX_op_sub2_i64:
 941            tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
 942            T1 = regs[r2] - regs[r4];
 943            T2 = regs[r3] - regs[r5] - (regs[r2] < regs[r4]);
 944            regs[r0] = T1;
 945            regs[r1] = T2;
 946            break;
 947#endif
 948
 949            /* Shift/rotate operations (64 bit). */
 950
 951        case INDEX_op_shl_i64:
 952            tci_args_rrr(insn, &r0, &r1, &r2);
 953            regs[r0] = regs[r1] << (regs[r2] & 63);
 954            break;
 955        case INDEX_op_shr_i64:
 956            tci_args_rrr(insn, &r0, &r1, &r2);
 957            regs[r0] = regs[r1] >> (regs[r2] & 63);
 958            break;
 959        case INDEX_op_sar_i64:
 960            tci_args_rrr(insn, &r0, &r1, &r2);
 961            regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
 962            break;
 963#if TCG_TARGET_HAS_rot_i64
 964        case INDEX_op_rotl_i64:
 965            tci_args_rrr(insn, &r0, &r1, &r2);
 966            regs[r0] = rol64(regs[r1], regs[r2] & 63);
 967            break;
 968        case INDEX_op_rotr_i64:
 969            tci_args_rrr(insn, &r0, &r1, &r2);
 970            regs[r0] = ror64(regs[r1], regs[r2] & 63);
 971            break;
 972#endif
 973#if TCG_TARGET_HAS_deposit_i64
 974        case INDEX_op_deposit_i64:
 975            tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
 976            regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
 977            break;
 978#endif
 979#if TCG_TARGET_HAS_extract_i64
 980        case INDEX_op_extract_i64:
 981            tci_args_rrbb(insn, &r0, &r1, &pos, &len);
 982            regs[r0] = extract64(regs[r1], pos, len);
 983            break;
 984#endif
 985#if TCG_TARGET_HAS_sextract_i64
 986        case INDEX_op_sextract_i64:
 987            tci_args_rrbb(insn, &r0, &r1, &pos, &len);
 988            regs[r0] = sextract64(regs[r1], pos, len);
 989            break;
 990#endif
 991        case INDEX_op_brcond_i64:
 992            tci_args_rl(insn, tb_ptr, &r0, &ptr);
 993            if (regs[r0]) {
 994                tb_ptr = ptr;
 995            }
 996            break;
 997        case INDEX_op_ext32s_i64:
 998        case INDEX_op_ext_i32_i64:
 999            tci_args_rr(insn, &r0, &r1);
1000            regs[r0] = (int32_t)regs[r1];
1001            break;
1002        case INDEX_op_ext32u_i64:
1003        case INDEX_op_extu_i32_i64:
1004            tci_args_rr(insn, &r0, &r1);
1005            regs[r0] = (uint32_t)regs[r1];
1006            break;
1007#if TCG_TARGET_HAS_bswap64_i64
1008        case INDEX_op_bswap64_i64:
1009            tci_args_rr(insn, &r0, &r1);
1010            regs[r0] = bswap64(regs[r1]);
1011            break;
1012#endif
1013#endif /* TCG_TARGET_REG_BITS == 64 */
1014
1015            /* QEMU specific operations. */
1016
1017        case INDEX_op_exit_tb:
1018            tci_args_l(insn, tb_ptr, &ptr);
1019            return (uintptr_t)ptr;
1020
1021        case INDEX_op_goto_tb:
1022            tci_args_l(insn, tb_ptr, &ptr);
1023            tb_ptr = *(void **)ptr;
1024            break;
1025
1026        case INDEX_op_goto_ptr:
1027            tci_args_r(insn, &r0);
1028            ptr = (void *)regs[r0];
1029            if (!ptr) {
1030                return 0;
1031            }
1032            tb_ptr = ptr;
1033            break;
1034
1035        case INDEX_op_qemu_ld_i32:
1036            if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
1037                tci_args_rrm(insn, &r0, &r1, &oi);
1038                taddr = regs[r1];
1039            } else {
1040                tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
1041                taddr = tci_uint64(regs[r2], regs[r1]);
1042            }
1043            tmp32 = tci_qemu_ld(env, taddr, oi, tb_ptr);
1044            regs[r0] = tmp32;
1045            break;
1046
1047        case INDEX_op_qemu_ld_i64:
1048            if (TCG_TARGET_REG_BITS == 64) {
1049                tci_args_rrm(insn, &r0, &r1, &oi);
1050                taddr = regs[r1];
1051            } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
1052                tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
1053                taddr = regs[r2];
1054            } else {
1055                tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1056                taddr = tci_uint64(regs[r3], regs[r2]);
1057                oi = regs[r4];
1058            }
1059            tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr);
1060            if (TCG_TARGET_REG_BITS == 32) {
1061                tci_write_reg64(regs, r1, r0, tmp64);
1062            } else {
1063                regs[r0] = tmp64;
1064            }
1065            break;
1066
1067        case INDEX_op_qemu_st_i32:
1068            if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
1069                tci_args_rrm(insn, &r0, &r1, &oi);
1070                taddr = regs[r1];
1071            } else {
1072                tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
1073                taddr = tci_uint64(regs[r2], regs[r1]);
1074            }
1075            tmp32 = regs[r0];
1076            tci_qemu_st(env, taddr, tmp32, oi, tb_ptr);
1077            break;
1078
1079        case INDEX_op_qemu_st_i64:
1080            if (TCG_TARGET_REG_BITS == 64) {
1081                tci_args_rrm(insn, &r0, &r1, &oi);
1082                taddr = regs[r1];
1083                tmp64 = regs[r0];
1084            } else {
1085                if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
1086                    tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
1087                    taddr = regs[r2];
1088                } else {
1089                    tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1090                    taddr = tci_uint64(regs[r3], regs[r2]);
1091                    oi = regs[r4];
1092                }
1093                tmp64 = tci_uint64(regs[r1], regs[r0]);
1094            }
1095            tci_qemu_st(env, taddr, tmp64, oi, tb_ptr);
1096            break;
1097
1098        case INDEX_op_mb:
1099            /* Ensure ordering for all kinds */
1100            smp_mb();
1101            break;
1102        default:
1103            g_assert_not_reached();
1104        }
1105    }
1106}
1107
1108/*
1109 * Disassembler that matches the interpreter
1110 */
1111
1112static const char *str_r(TCGReg r)
1113{
1114    static const char regs[TCG_TARGET_NB_REGS][4] = {
1115        "r0", "r1", "r2",  "r3",  "r4",  "r5",  "r6",  "r7",
1116        "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
1117    };
1118
1119    QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14);
1120    QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15);
1121
1122    assert((unsigned)r < TCG_TARGET_NB_REGS);
1123    return regs[r];
1124}
1125
1126static const char *str_c(TCGCond c)
1127{
1128    static const char cond[16][8] = {
1129        [TCG_COND_NEVER] = "never",
1130        [TCG_COND_ALWAYS] = "always",
1131        [TCG_COND_EQ] = "eq",
1132        [TCG_COND_NE] = "ne",
1133        [TCG_COND_LT] = "lt",
1134        [TCG_COND_GE] = "ge",
1135        [TCG_COND_LE] = "le",
1136        [TCG_COND_GT] = "gt",
1137        [TCG_COND_LTU] = "ltu",
1138        [TCG_COND_GEU] = "geu",
1139        [TCG_COND_LEU] = "leu",
1140        [TCG_COND_GTU] = "gtu",
1141    };
1142
1143    assert((unsigned)c < ARRAY_SIZE(cond));
1144    assert(cond[c][0] != 0);
1145    return cond[c];
1146}
1147
1148/* Disassemble TCI bytecode. */
1149int print_insn_tci(bfd_vma addr, disassemble_info *info)
1150{
1151    const uint32_t *tb_ptr = (const void *)(uintptr_t)addr;
1152    const TCGOpDef *def;
1153    const char *op_name;
1154    uint32_t insn;
1155    TCGOpcode op;
1156    TCGReg r0, r1, r2, r3, r4, r5;
1157    tcg_target_ulong i1;
1158    int32_t s2;
1159    TCGCond c;
1160    MemOpIdx oi;
1161    uint8_t pos, len;
1162    void *ptr;
1163
1164    /* TCI is always the host, so we don't need to load indirect. */
1165    insn = *tb_ptr++;
1166
1167    info->fprintf_func(info->stream, "%08x  ", insn);
1168
1169    op = extract32(insn, 0, 8);
1170    def = &tcg_op_defs[op];
1171    op_name = def->name;
1172
1173    switch (op) {
1174    case INDEX_op_br:
1175    case INDEX_op_exit_tb:
1176    case INDEX_op_goto_tb:
1177        tci_args_l(insn, tb_ptr, &ptr);
1178        info->fprintf_func(info->stream, "%-12s  %p", op_name, ptr);
1179        break;
1180
1181    case INDEX_op_goto_ptr:
1182        tci_args_r(insn, &r0);
1183        info->fprintf_func(info->stream, "%-12s  %s", op_name, str_r(r0));
1184        break;
1185
1186    case INDEX_op_call:
1187        tci_args_nl(insn, tb_ptr, &len, &ptr);
1188        info->fprintf_func(info->stream, "%-12s  %d, %p", op_name, len, ptr);
1189        break;
1190
1191    case INDEX_op_brcond_i32:
1192    case INDEX_op_brcond_i64:
1193        tci_args_rl(insn, tb_ptr, &r0, &ptr);
1194        info->fprintf_func(info->stream, "%-12s  %s, 0, ne, %p",
1195                           op_name, str_r(r0), ptr);
1196        break;
1197
1198    case INDEX_op_setcond_i32:
1199    case INDEX_op_setcond_i64:
1200        tci_args_rrrc(insn, &r0, &r1, &r2, &c);
1201        info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s",
1202                           op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c));
1203        break;
1204
1205    case INDEX_op_tci_movi:
1206        tci_args_ri(insn, &r0, &i1);
1207        info->fprintf_func(info->stream, "%-12s  %s, 0x%" TCG_PRIlx,
1208                           op_name, str_r(r0), i1);
1209        break;
1210
1211    case INDEX_op_tci_movl:
1212        tci_args_rl(insn, tb_ptr, &r0, &ptr);
1213        info->fprintf_func(info->stream, "%-12s  %s, %p",
1214                           op_name, str_r(r0), ptr);
1215        break;
1216
1217    case INDEX_op_ld8u_i32:
1218    case INDEX_op_ld8u_i64:
1219    case INDEX_op_ld8s_i32:
1220    case INDEX_op_ld8s_i64:
1221    case INDEX_op_ld16u_i32:
1222    case INDEX_op_ld16u_i64:
1223    case INDEX_op_ld16s_i32:
1224    case INDEX_op_ld16s_i64:
1225    case INDEX_op_ld32u_i64:
1226    case INDEX_op_ld32s_i64:
1227    case INDEX_op_ld_i32:
1228    case INDEX_op_ld_i64:
1229    case INDEX_op_st8_i32:
1230    case INDEX_op_st8_i64:
1231    case INDEX_op_st16_i32:
1232    case INDEX_op_st16_i64:
1233    case INDEX_op_st32_i64:
1234    case INDEX_op_st_i32:
1235    case INDEX_op_st_i64:
1236        tci_args_rrs(insn, &r0, &r1, &s2);
1237        info->fprintf_func(info->stream, "%-12s  %s, %s, %d",
1238                           op_name, str_r(r0), str_r(r1), s2);
1239        break;
1240
1241    case INDEX_op_mov_i32:
1242    case INDEX_op_mov_i64:
1243    case INDEX_op_ext8s_i32:
1244    case INDEX_op_ext8s_i64:
1245    case INDEX_op_ext8u_i32:
1246    case INDEX_op_ext8u_i64:
1247    case INDEX_op_ext16s_i32:
1248    case INDEX_op_ext16s_i64:
1249    case INDEX_op_ext16u_i32:
1250    case INDEX_op_ext32s_i64:
1251    case INDEX_op_ext32u_i64:
1252    case INDEX_op_ext_i32_i64:
1253    case INDEX_op_extu_i32_i64:
1254    case INDEX_op_bswap16_i32:
1255    case INDEX_op_bswap16_i64:
1256    case INDEX_op_bswap32_i32:
1257    case INDEX_op_bswap32_i64:
1258    case INDEX_op_bswap64_i64:
1259    case INDEX_op_not_i32:
1260    case INDEX_op_not_i64:
1261    case INDEX_op_neg_i32:
1262    case INDEX_op_neg_i64:
1263    case INDEX_op_ctpop_i32:
1264    case INDEX_op_ctpop_i64:
1265        tci_args_rr(insn, &r0, &r1);
1266        info->fprintf_func(info->stream, "%-12s  %s, %s",
1267                           op_name, str_r(r0), str_r(r1));
1268        break;
1269
1270    case INDEX_op_add_i32:
1271    case INDEX_op_add_i64:
1272    case INDEX_op_sub_i32:
1273    case INDEX_op_sub_i64:
1274    case INDEX_op_mul_i32:
1275    case INDEX_op_mul_i64:
1276    case INDEX_op_and_i32:
1277    case INDEX_op_and_i64:
1278    case INDEX_op_or_i32:
1279    case INDEX_op_or_i64:
1280    case INDEX_op_xor_i32:
1281    case INDEX_op_xor_i64:
1282    case INDEX_op_andc_i32:
1283    case INDEX_op_andc_i64:
1284    case INDEX_op_orc_i32:
1285    case INDEX_op_orc_i64:
1286    case INDEX_op_eqv_i32:
1287    case INDEX_op_eqv_i64:
1288    case INDEX_op_nand_i32:
1289    case INDEX_op_nand_i64:
1290    case INDEX_op_nor_i32:
1291    case INDEX_op_nor_i64:
1292    case INDEX_op_div_i32:
1293    case INDEX_op_div_i64:
1294    case INDEX_op_rem_i32:
1295    case INDEX_op_rem_i64:
1296    case INDEX_op_divu_i32:
1297    case INDEX_op_divu_i64:
1298    case INDEX_op_remu_i32:
1299    case INDEX_op_remu_i64:
1300    case INDEX_op_shl_i32:
1301    case INDEX_op_shl_i64:
1302    case INDEX_op_shr_i32:
1303    case INDEX_op_shr_i64:
1304    case INDEX_op_sar_i32:
1305    case INDEX_op_sar_i64:
1306    case INDEX_op_rotl_i32:
1307    case INDEX_op_rotl_i64:
1308    case INDEX_op_rotr_i32:
1309    case INDEX_op_rotr_i64:
1310    case INDEX_op_clz_i32:
1311    case INDEX_op_clz_i64:
1312    case INDEX_op_ctz_i32:
1313    case INDEX_op_ctz_i64:
1314        tci_args_rrr(insn, &r0, &r1, &r2);
1315        info->fprintf_func(info->stream, "%-12s  %s, %s, %s",
1316                           op_name, str_r(r0), str_r(r1), str_r(r2));
1317        break;
1318
1319    case INDEX_op_deposit_i32:
1320    case INDEX_op_deposit_i64:
1321        tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
1322        info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %d, %d",
1323                           op_name, str_r(r0), str_r(r1), str_r(r2), pos, len);
1324        break;
1325
1326    case INDEX_op_extract_i32:
1327    case INDEX_op_extract_i64:
1328    case INDEX_op_sextract_i32:
1329    case INDEX_op_sextract_i64:
1330        tci_args_rrbb(insn, &r0, &r1, &pos, &len);
1331        info->fprintf_func(info->stream, "%-12s  %s,%s,%d,%d",
1332                           op_name, str_r(r0), str_r(r1), pos, len);
1333        break;
1334
1335    case INDEX_op_movcond_i32:
1336    case INDEX_op_movcond_i64:
1337    case INDEX_op_setcond2_i32:
1338        tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &c);
1339        info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s, %s, %s",
1340                           op_name, str_r(r0), str_r(r1), str_r(r2),
1341                           str_r(r3), str_r(r4), str_c(c));
1342        break;
1343
1344    case INDEX_op_mulu2_i32:
1345    case INDEX_op_mulu2_i64:
1346    case INDEX_op_muls2_i32:
1347    case INDEX_op_muls2_i64:
1348        tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
1349        info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s",
1350                           op_name, str_r(r0), str_r(r1),
1351                           str_r(r2), str_r(r3));
1352        break;
1353
1354    case INDEX_op_add2_i32:
1355    case INDEX_op_add2_i64:
1356    case INDEX_op_sub2_i32:
1357    case INDEX_op_sub2_i64:
1358        tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
1359        info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s, %s, %s",
1360                           op_name, str_r(r0), str_r(r1), str_r(r2),
1361                           str_r(r3), str_r(r4), str_r(r5));
1362        break;
1363
1364    case INDEX_op_qemu_ld_i64:
1365    case INDEX_op_qemu_st_i64:
1366        len = DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1367        goto do_qemu_ldst;
1368    case INDEX_op_qemu_ld_i32:
1369    case INDEX_op_qemu_st_i32:
1370        len = 1;
1371    do_qemu_ldst:
1372        len += DIV_ROUND_UP(TARGET_LONG_BITS, TCG_TARGET_REG_BITS);
1373        switch (len) {
1374        case 2:
1375            tci_args_rrm(insn, &r0, &r1, &oi);
1376            info->fprintf_func(info->stream, "%-12s  %s, %s, %x",
1377                               op_name, str_r(r0), str_r(r1), oi);
1378            break;
1379        case 3:
1380            tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
1381            info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %x",
1382                               op_name, str_r(r0), str_r(r1), str_r(r2), oi);
1383            break;
1384        case 4:
1385            tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1386            info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s, %s",
1387                               op_name, str_r(r0), str_r(r1),
1388                               str_r(r2), str_r(r3), str_r(r4));
1389            break;
1390        default:
1391            g_assert_not_reached();
1392        }
1393        break;
1394
1395    case 0:
1396        /* tcg_out_nop_fill uses zeros */
1397        if (insn == 0) {
1398            info->fprintf_func(info->stream, "align");
1399            break;
1400        }
1401        /* fall through */
1402
1403    default:
1404        info->fprintf_func(info->stream, "illegal opcode %d", op);
1405        break;
1406    }
1407
1408    return sizeof(insn);
1409}
1410