qemu/tcg/tci/tcg-target.inc.c
<<
>>
Prefs
   1/*
   2 * Tiny Code Generator for QEMU
   3 *
   4 * Copyright (c) 2009, 2011 Stefan Weil
   5 *
   6 * Permission is hereby granted, free of charge, to any person obtaining a copy
   7 * of this software and associated documentation files (the "Software"), to deal
   8 * in the Software without restriction, including without limitation the rights
   9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  10 * copies of the Software, and to permit persons to whom the Software is
  11 * furnished to do so, subject to the following conditions:
  12 *
  13 * The above copyright notice and this permission notice shall be included in
  14 * all copies or substantial portions of the Software.
  15 *
  16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  22 * THE SOFTWARE.
  23 */
  24
  25#include "tcg-be-null.h"
  26
  27/* TODO list:
  28 * - See TODO comments in code.
  29 */
  30
  31/* Marker for missing code. */
  32#define TODO() \
  33    do { \
  34        fprintf(stderr, "TODO %s:%u: %s()\n", \
  35                __FILE__, __LINE__, __func__); \
  36        tcg_abort(); \
  37    } while (0)
  38
  39/* Bitfield n...m (in 32 bit value). */
  40#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
  41
  42/* Macros used in tcg_target_op_defs. */
  43#define R       "r"
  44#define RI      "ri"
  45#if TCG_TARGET_REG_BITS == 32
  46# define R64    "r", "r"
  47#else
  48# define R64    "r"
  49#endif
  50#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  51# define L      "L", "L"
  52# define S      "S", "S"
  53#else
  54# define L      "L"
  55# define S      "S"
  56#endif
  57
  58/* TODO: documentation. */
  59static const TCGTargetOpDef tcg_target_op_defs[] = {
  60    { INDEX_op_exit_tb, { NULL } },
  61    { INDEX_op_goto_tb, { NULL } },
  62    { INDEX_op_br, { NULL } },
  63
  64    { INDEX_op_ld8u_i32, { R, R } },
  65    { INDEX_op_ld8s_i32, { R, R } },
  66    { INDEX_op_ld16u_i32, { R, R } },
  67    { INDEX_op_ld16s_i32, { R, R } },
  68    { INDEX_op_ld_i32, { R, R } },
  69    { INDEX_op_st8_i32, { R, R } },
  70    { INDEX_op_st16_i32, { R, R } },
  71    { INDEX_op_st_i32, { R, R } },
  72
  73    { INDEX_op_add_i32, { R, RI, RI } },
  74    { INDEX_op_sub_i32, { R, RI, RI } },
  75    { INDEX_op_mul_i32, { R, RI, RI } },
  76#if TCG_TARGET_HAS_div_i32
  77    { INDEX_op_div_i32, { R, R, R } },
  78    { INDEX_op_divu_i32, { R, R, R } },
  79    { INDEX_op_rem_i32, { R, R, R } },
  80    { INDEX_op_remu_i32, { R, R, R } },
  81#elif TCG_TARGET_HAS_div2_i32
  82    { INDEX_op_div2_i32, { R, R, "0", "1", R } },
  83    { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
  84#endif
  85    /* TODO: Does R, RI, RI result in faster code than R, R, RI?
  86       If both operands are constants, we can optimize. */
  87    { INDEX_op_and_i32, { R, RI, RI } },
  88#if TCG_TARGET_HAS_andc_i32
  89    { INDEX_op_andc_i32, { R, RI, RI } },
  90#endif
  91#if TCG_TARGET_HAS_eqv_i32
  92    { INDEX_op_eqv_i32, { R, RI, RI } },
  93#endif
  94#if TCG_TARGET_HAS_nand_i32
  95    { INDEX_op_nand_i32, { R, RI, RI } },
  96#endif
  97#if TCG_TARGET_HAS_nor_i32
  98    { INDEX_op_nor_i32, { R, RI, RI } },
  99#endif
 100    { INDEX_op_or_i32, { R, RI, RI } },
 101#if TCG_TARGET_HAS_orc_i32
 102    { INDEX_op_orc_i32, { R, RI, RI } },
 103#endif
 104    { INDEX_op_xor_i32, { R, RI, RI } },
 105    { INDEX_op_shl_i32, { R, RI, RI } },
 106    { INDEX_op_shr_i32, { R, RI, RI } },
 107    { INDEX_op_sar_i32, { R, RI, RI } },
 108#if TCG_TARGET_HAS_rot_i32
 109    { INDEX_op_rotl_i32, { R, RI, RI } },
 110    { INDEX_op_rotr_i32, { R, RI, RI } },
 111#endif
 112#if TCG_TARGET_HAS_deposit_i32
 113    { INDEX_op_deposit_i32, { R, "0", R } },
 114#endif
 115
 116    { INDEX_op_brcond_i32, { R, RI } },
 117
 118    { INDEX_op_setcond_i32, { R, R, RI } },
 119#if TCG_TARGET_REG_BITS == 64
 120    { INDEX_op_setcond_i64, { R, R, RI } },
 121#endif /* TCG_TARGET_REG_BITS == 64 */
 122
 123#if TCG_TARGET_REG_BITS == 32
 124    /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
 125    { INDEX_op_add2_i32, { R, R, R, R, R, R } },
 126    { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
 127    { INDEX_op_brcond2_i32, { R, R, RI, RI } },
 128    { INDEX_op_mulu2_i32, { R, R, R, R } },
 129    { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
 130#endif
 131
 132#if TCG_TARGET_HAS_not_i32
 133    { INDEX_op_not_i32, { R, R } },
 134#endif
 135#if TCG_TARGET_HAS_neg_i32
 136    { INDEX_op_neg_i32, { R, R } },
 137#endif
 138
 139#if TCG_TARGET_REG_BITS == 64
 140    { INDEX_op_ld8u_i64, { R, R } },
 141    { INDEX_op_ld8s_i64, { R, R } },
 142    { INDEX_op_ld16u_i64, { R, R } },
 143    { INDEX_op_ld16s_i64, { R, R } },
 144    { INDEX_op_ld32u_i64, { R, R } },
 145    { INDEX_op_ld32s_i64, { R, R } },
 146    { INDEX_op_ld_i64, { R, R } },
 147
 148    { INDEX_op_st8_i64, { R, R } },
 149    { INDEX_op_st16_i64, { R, R } },
 150    { INDEX_op_st32_i64, { R, R } },
 151    { INDEX_op_st_i64, { R, R } },
 152
 153    { INDEX_op_add_i64, { R, RI, RI } },
 154    { INDEX_op_sub_i64, { R, RI, RI } },
 155    { INDEX_op_mul_i64, { R, RI, RI } },
 156#if TCG_TARGET_HAS_div_i64
 157    { INDEX_op_div_i64, { R, R, R } },
 158    { INDEX_op_divu_i64, { R, R, R } },
 159    { INDEX_op_rem_i64, { R, R, R } },
 160    { INDEX_op_remu_i64, { R, R, R } },
 161#elif TCG_TARGET_HAS_div2_i64
 162    { INDEX_op_div2_i64, { R, R, "0", "1", R } },
 163    { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
 164#endif
 165    { INDEX_op_and_i64, { R, RI, RI } },
 166#if TCG_TARGET_HAS_andc_i64
 167    { INDEX_op_andc_i64, { R, RI, RI } },
 168#endif
 169#if TCG_TARGET_HAS_eqv_i64
 170    { INDEX_op_eqv_i64, { R, RI, RI } },
 171#endif
 172#if TCG_TARGET_HAS_nand_i64
 173    { INDEX_op_nand_i64, { R, RI, RI } },
 174#endif
 175#if TCG_TARGET_HAS_nor_i64
 176    { INDEX_op_nor_i64, { R, RI, RI } },
 177#endif
 178    { INDEX_op_or_i64, { R, RI, RI } },
 179#if TCG_TARGET_HAS_orc_i64
 180    { INDEX_op_orc_i64, { R, RI, RI } },
 181#endif
 182    { INDEX_op_xor_i64, { R, RI, RI } },
 183    { INDEX_op_shl_i64, { R, RI, RI } },
 184    { INDEX_op_shr_i64, { R, RI, RI } },
 185    { INDEX_op_sar_i64, { R, RI, RI } },
 186#if TCG_TARGET_HAS_rot_i64
 187    { INDEX_op_rotl_i64, { R, RI, RI } },
 188    { INDEX_op_rotr_i64, { R, RI, RI } },
 189#endif
 190#if TCG_TARGET_HAS_deposit_i64
 191    { INDEX_op_deposit_i64, { R, "0", R } },
 192#endif
 193    { INDEX_op_brcond_i64, { R, RI } },
 194
 195#if TCG_TARGET_HAS_ext8s_i64
 196    { INDEX_op_ext8s_i64, { R, R } },
 197#endif
 198#if TCG_TARGET_HAS_ext16s_i64
 199    { INDEX_op_ext16s_i64, { R, R } },
 200#endif
 201#if TCG_TARGET_HAS_ext32s_i64
 202    { INDEX_op_ext32s_i64, { R, R } },
 203#endif
 204#if TCG_TARGET_HAS_ext8u_i64
 205    { INDEX_op_ext8u_i64, { R, R } },
 206#endif
 207#if TCG_TARGET_HAS_ext16u_i64
 208    { INDEX_op_ext16u_i64, { R, R } },
 209#endif
 210#if TCG_TARGET_HAS_ext32u_i64
 211    { INDEX_op_ext32u_i64, { R, R } },
 212#endif
 213    { INDEX_op_ext_i32_i64, { R, R } },
 214    { INDEX_op_extu_i32_i64, { R, R } },
 215#if TCG_TARGET_HAS_bswap16_i64
 216    { INDEX_op_bswap16_i64, { R, R } },
 217#endif
 218#if TCG_TARGET_HAS_bswap32_i64
 219    { INDEX_op_bswap32_i64, { R, R } },
 220#endif
 221#if TCG_TARGET_HAS_bswap64_i64
 222    { INDEX_op_bswap64_i64, { R, R } },
 223#endif
 224#if TCG_TARGET_HAS_not_i64
 225    { INDEX_op_not_i64, { R, R } },
 226#endif
 227#if TCG_TARGET_HAS_neg_i64
 228    { INDEX_op_neg_i64, { R, R } },
 229#endif
 230#endif /* TCG_TARGET_REG_BITS == 64 */
 231
 232    { INDEX_op_qemu_ld_i32, { R, L } },
 233    { INDEX_op_qemu_ld_i64, { R64, L } },
 234
 235    { INDEX_op_qemu_st_i32, { R, S } },
 236    { INDEX_op_qemu_st_i64, { R64, S } },
 237
 238#if TCG_TARGET_HAS_ext8s_i32
 239    { INDEX_op_ext8s_i32, { R, R } },
 240#endif
 241#if TCG_TARGET_HAS_ext16s_i32
 242    { INDEX_op_ext16s_i32, { R, R } },
 243#endif
 244#if TCG_TARGET_HAS_ext8u_i32
 245    { INDEX_op_ext8u_i32, { R, R } },
 246#endif
 247#if TCG_TARGET_HAS_ext16u_i32
 248    { INDEX_op_ext16u_i32, { R, R } },
 249#endif
 250
 251#if TCG_TARGET_HAS_bswap16_i32
 252    { INDEX_op_bswap16_i32, { R, R } },
 253#endif
 254#if TCG_TARGET_HAS_bswap32_i32
 255    { INDEX_op_bswap32_i32, { R, R } },
 256#endif
 257
 258    { -1 },
 259};
 260
 261static const int tcg_target_reg_alloc_order[] = {
 262    TCG_REG_R0,
 263    TCG_REG_R1,
 264    TCG_REG_R2,
 265    TCG_REG_R3,
 266#if 0 /* used for TCG_REG_CALL_STACK */
 267    TCG_REG_R4,
 268#endif
 269    TCG_REG_R5,
 270    TCG_REG_R6,
 271    TCG_REG_R7,
 272#if TCG_TARGET_NB_REGS >= 16
 273    TCG_REG_R8,
 274    TCG_REG_R9,
 275    TCG_REG_R10,
 276    TCG_REG_R11,
 277    TCG_REG_R12,
 278    TCG_REG_R13,
 279    TCG_REG_R14,
 280    TCG_REG_R15,
 281#endif
 282};
 283
 284#if MAX_OPC_PARAM_IARGS != 5
 285# error Fix needed, number of supported input arguments changed!
 286#endif
 287
 288static const int tcg_target_call_iarg_regs[] = {
 289    TCG_REG_R0,
 290    TCG_REG_R1,
 291    TCG_REG_R2,
 292    TCG_REG_R3,
 293#if 0 /* used for TCG_REG_CALL_STACK */
 294    TCG_REG_R4,
 295#endif
 296    TCG_REG_R5,
 297#if TCG_TARGET_REG_BITS == 32
 298    /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
 299    TCG_REG_R6,
 300    TCG_REG_R7,
 301#if TCG_TARGET_NB_REGS >= 16
 302    TCG_REG_R8,
 303    TCG_REG_R9,
 304    TCG_REG_R10,
 305#else
 306# error Too few input registers available
 307#endif
 308#endif
 309};
 310
 311static const int tcg_target_call_oarg_regs[] = {
 312    TCG_REG_R0,
 313#if TCG_TARGET_REG_BITS == 32
 314    TCG_REG_R1
 315#endif
 316};
 317
 318#ifdef CONFIG_DEBUG_TCG
 319static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
 320    "r00",
 321    "r01",
 322    "r02",
 323    "r03",
 324    "r04",
 325    "r05",
 326    "r06",
 327    "r07",
 328#if TCG_TARGET_NB_REGS >= 16
 329    "r08",
 330    "r09",
 331    "r10",
 332    "r11",
 333    "r12",
 334    "r13",
 335    "r14",
 336    "r15",
 337#if TCG_TARGET_NB_REGS >= 32
 338    "r16",
 339    "r17",
 340    "r18",
 341    "r19",
 342    "r20",
 343    "r21",
 344    "r22",
 345    "r23",
 346    "r24",
 347    "r25",
 348    "r26",
 349    "r27",
 350    "r28",
 351    "r29",
 352    "r30",
 353    "r31"
 354#endif
 355#endif
 356};
 357#endif
 358
 359static void patch_reloc(tcg_insn_unit *code_ptr, int type,
 360                        intptr_t value, intptr_t addend)
 361{
 362    /* tcg_out_reloc always uses the same type, addend. */
 363    tcg_debug_assert(type == sizeof(tcg_target_long));
 364    tcg_debug_assert(addend == 0);
 365    tcg_debug_assert(value != 0);
 366    if (TCG_TARGET_REG_BITS == 32) {
 367        tcg_patch32(code_ptr, value);
 368    } else {
 369        tcg_patch64(code_ptr, value);
 370    }
 371}
 372
 373/* Parse target specific constraints. */
 374static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
 375{
 376    const char *ct_str = *pct_str;
 377    switch (ct_str[0]) {
 378    case 'r':
 379    case 'L':                   /* qemu_ld constraint */
 380    case 'S':                   /* qemu_st constraint */
 381        ct->ct |= TCG_CT_REG;
 382        tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
 383        break;
 384    default:
 385        return -1;
 386    }
 387    ct_str++;
 388    *pct_str = ct_str;
 389    return 0;
 390}
 391
 392#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
 393/* Show current bytecode. Used by tcg interpreter. */
 394void tci_disas(uint8_t opc)
 395{
 396    const TCGOpDef *def = &tcg_op_defs[opc];
 397    fprintf(stderr, "TCG %s %u, %u, %u\n",
 398            def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
 399}
 400#endif
 401
 402/* Write value (native size). */
 403static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
 404{
 405    if (TCG_TARGET_REG_BITS == 32) {
 406        tcg_out32(s, v);
 407    } else {
 408        tcg_out64(s, v);
 409    }
 410}
 411
 412/* Write opcode. */
 413static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
 414{
 415    tcg_out8(s, op);
 416    tcg_out8(s, 0);
 417}
 418
 419/* Write register. */
 420static void tcg_out_r(TCGContext *s, TCGArg t0)
 421{
 422    tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
 423    tcg_out8(s, t0);
 424}
 425
 426/* Write register or constant (native size). */
 427static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
 428{
 429    if (const_arg) {
 430        tcg_debug_assert(const_arg == 1);
 431        tcg_out8(s, TCG_CONST);
 432        tcg_out_i(s, arg);
 433    } else {
 434        tcg_out_r(s, arg);
 435    }
 436}
 437
 438/* Write register or constant (32 bit). */
 439static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
 440{
 441    if (const_arg) {
 442        tcg_debug_assert(const_arg == 1);
 443        tcg_out8(s, TCG_CONST);
 444        tcg_out32(s, arg);
 445    } else {
 446        tcg_out_r(s, arg);
 447    }
 448}
 449
 450#if TCG_TARGET_REG_BITS == 64
 451/* Write register or constant (64 bit). */
 452static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
 453{
 454    if (const_arg) {
 455        tcg_debug_assert(const_arg == 1);
 456        tcg_out8(s, TCG_CONST);
 457        tcg_out64(s, arg);
 458    } else {
 459        tcg_out_r(s, arg);
 460    }
 461}
 462#endif
 463
 464/* Write label. */
 465static void tci_out_label(TCGContext *s, TCGLabel *label)
 466{
 467    if (label->has_value) {
 468        tcg_out_i(s, label->u.value);
 469        tcg_debug_assert(label->u.value);
 470    } else {
 471        tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
 472        s->code_ptr += sizeof(tcg_target_ulong);
 473    }
 474}
 475
 476static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
 477                       intptr_t arg2)
 478{
 479    uint8_t *old_code_ptr = s->code_ptr;
 480    if (type == TCG_TYPE_I32) {
 481        tcg_out_op_t(s, INDEX_op_ld_i32);
 482        tcg_out_r(s, ret);
 483        tcg_out_r(s, arg1);
 484        tcg_out32(s, arg2);
 485    } else {
 486        tcg_debug_assert(type == TCG_TYPE_I64);
 487#if TCG_TARGET_REG_BITS == 64
 488        tcg_out_op_t(s, INDEX_op_ld_i64);
 489        tcg_out_r(s, ret);
 490        tcg_out_r(s, arg1);
 491        tcg_debug_assert(arg2 == (int32_t)arg2);
 492        tcg_out32(s, arg2);
 493#else
 494        TODO();
 495#endif
 496    }
 497    old_code_ptr[1] = s->code_ptr - old_code_ptr;
 498}
 499
 500static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
 501{
 502    uint8_t *old_code_ptr = s->code_ptr;
 503    tcg_debug_assert(ret != arg);
 504#if TCG_TARGET_REG_BITS == 32
 505    tcg_out_op_t(s, INDEX_op_mov_i32);
 506#else
 507    tcg_out_op_t(s, INDEX_op_mov_i64);
 508#endif
 509    tcg_out_r(s, ret);
 510    tcg_out_r(s, arg);
 511    old_code_ptr[1] = s->code_ptr - old_code_ptr;
 512}
 513
 514static void tcg_out_movi(TCGContext *s, TCGType type,
 515                         TCGReg t0, tcg_target_long arg)
 516{
 517    uint8_t *old_code_ptr = s->code_ptr;
 518    uint32_t arg32 = arg;
 519    if (type == TCG_TYPE_I32 || arg == arg32) {
 520        tcg_out_op_t(s, INDEX_op_movi_i32);
 521        tcg_out_r(s, t0);
 522        tcg_out32(s, arg32);
 523    } else {
 524        tcg_debug_assert(type == TCG_TYPE_I64);
 525#if TCG_TARGET_REG_BITS == 64
 526        tcg_out_op_t(s, INDEX_op_movi_i64);
 527        tcg_out_r(s, t0);
 528        tcg_out64(s, arg);
 529#else
 530        TODO();
 531#endif
 532    }
 533    old_code_ptr[1] = s->code_ptr - old_code_ptr;
 534}
 535
 536static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *arg)
 537{
 538    uint8_t *old_code_ptr = s->code_ptr;
 539    tcg_out_op_t(s, INDEX_op_call);
 540    tcg_out_ri(s, 1, (uintptr_t)arg);
 541    old_code_ptr[1] = s->code_ptr - old_code_ptr;
 542}
 543
 544static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
 545                       const int *const_args)
 546{
 547    uint8_t *old_code_ptr = s->code_ptr;
 548
 549    tcg_out_op_t(s, opc);
 550
 551    switch (opc) {
 552    case INDEX_op_exit_tb:
 553        tcg_out64(s, args[0]);
 554        break;
 555    case INDEX_op_goto_tb:
 556        if (s->tb_jmp_insn_offset) {
 557            /* Direct jump method. */
 558            tcg_debug_assert(args[0] < ARRAY_SIZE(s->tb_jmp_insn_offset));
 559            /* Align for atomic patching and thread safety */
 560            s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
 561            s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
 562            tcg_out32(s, 0);
 563        } else {
 564            /* Indirect jump method. */
 565            TODO();
 566        }
 567        tcg_debug_assert(args[0] < ARRAY_SIZE(s->tb_jmp_reset_offset));
 568        s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s);
 569        break;
 570    case INDEX_op_br:
 571        tci_out_label(s, arg_label(args[0]));
 572        break;
 573    case INDEX_op_setcond_i32:
 574        tcg_out_r(s, args[0]);
 575        tcg_out_r(s, args[1]);
 576        tcg_out_ri32(s, const_args[2], args[2]);
 577        tcg_out8(s, args[3]);   /* condition */
 578        break;
 579#if TCG_TARGET_REG_BITS == 32
 580    case INDEX_op_setcond2_i32:
 581        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
 582        tcg_out_r(s, args[0]);
 583        tcg_out_r(s, args[1]);
 584        tcg_out_r(s, args[2]);
 585        tcg_out_ri32(s, const_args[3], args[3]);
 586        tcg_out_ri32(s, const_args[4], args[4]);
 587        tcg_out8(s, args[5]);   /* condition */
 588        break;
 589#elif TCG_TARGET_REG_BITS == 64
 590    case INDEX_op_setcond_i64:
 591        tcg_out_r(s, args[0]);
 592        tcg_out_r(s, args[1]);
 593        tcg_out_ri64(s, const_args[2], args[2]);
 594        tcg_out8(s, args[3]);   /* condition */
 595        break;
 596#endif
 597    case INDEX_op_ld8u_i32:
 598    case INDEX_op_ld8s_i32:
 599    case INDEX_op_ld16u_i32:
 600    case INDEX_op_ld16s_i32:
 601    case INDEX_op_ld_i32:
 602    case INDEX_op_st8_i32:
 603    case INDEX_op_st16_i32:
 604    case INDEX_op_st_i32:
 605    case INDEX_op_ld8u_i64:
 606    case INDEX_op_ld8s_i64:
 607    case INDEX_op_ld16u_i64:
 608    case INDEX_op_ld16s_i64:
 609    case INDEX_op_ld32u_i64:
 610    case INDEX_op_ld32s_i64:
 611    case INDEX_op_ld_i64:
 612    case INDEX_op_st8_i64:
 613    case INDEX_op_st16_i64:
 614    case INDEX_op_st32_i64:
 615    case INDEX_op_st_i64:
 616        tcg_out_r(s, args[0]);
 617        tcg_out_r(s, args[1]);
 618        tcg_debug_assert(args[2] == (int32_t)args[2]);
 619        tcg_out32(s, args[2]);
 620        break;
 621    case INDEX_op_add_i32:
 622    case INDEX_op_sub_i32:
 623    case INDEX_op_mul_i32:
 624    case INDEX_op_and_i32:
 625    case INDEX_op_andc_i32:     /* Optional (TCG_TARGET_HAS_andc_i32). */
 626    case INDEX_op_eqv_i32:      /* Optional (TCG_TARGET_HAS_eqv_i32). */
 627    case INDEX_op_nand_i32:     /* Optional (TCG_TARGET_HAS_nand_i32). */
 628    case INDEX_op_nor_i32:      /* Optional (TCG_TARGET_HAS_nor_i32). */
 629    case INDEX_op_or_i32:
 630    case INDEX_op_orc_i32:      /* Optional (TCG_TARGET_HAS_orc_i32). */
 631    case INDEX_op_xor_i32:
 632    case INDEX_op_shl_i32:
 633    case INDEX_op_shr_i32:
 634    case INDEX_op_sar_i32:
 635    case INDEX_op_rotl_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
 636    case INDEX_op_rotr_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
 637        tcg_out_r(s, args[0]);
 638        tcg_out_ri32(s, const_args[1], args[1]);
 639        tcg_out_ri32(s, const_args[2], args[2]);
 640        break;
 641    case INDEX_op_deposit_i32:  /* Optional (TCG_TARGET_HAS_deposit_i32). */
 642        tcg_out_r(s, args[0]);
 643        tcg_out_r(s, args[1]);
 644        tcg_out_r(s, args[2]);
 645        tcg_debug_assert(args[3] <= UINT8_MAX);
 646        tcg_out8(s, args[3]);
 647        tcg_debug_assert(args[4] <= UINT8_MAX);
 648        tcg_out8(s, args[4]);
 649        break;
 650
 651#if TCG_TARGET_REG_BITS == 64
 652    case INDEX_op_add_i64:
 653    case INDEX_op_sub_i64:
 654    case INDEX_op_mul_i64:
 655    case INDEX_op_and_i64:
 656    case INDEX_op_andc_i64:     /* Optional (TCG_TARGET_HAS_andc_i64). */
 657    case INDEX_op_eqv_i64:      /* Optional (TCG_TARGET_HAS_eqv_i64). */
 658    case INDEX_op_nand_i64:     /* Optional (TCG_TARGET_HAS_nand_i64). */
 659    case INDEX_op_nor_i64:      /* Optional (TCG_TARGET_HAS_nor_i64). */
 660    case INDEX_op_or_i64:
 661    case INDEX_op_orc_i64:      /* Optional (TCG_TARGET_HAS_orc_i64). */
 662    case INDEX_op_xor_i64:
 663    case INDEX_op_shl_i64:
 664    case INDEX_op_shr_i64:
 665    case INDEX_op_sar_i64:
 666    case INDEX_op_rotl_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
 667    case INDEX_op_rotr_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
 668        tcg_out_r(s, args[0]);
 669        tcg_out_ri64(s, const_args[1], args[1]);
 670        tcg_out_ri64(s, const_args[2], args[2]);
 671        break;
 672    case INDEX_op_deposit_i64:  /* Optional (TCG_TARGET_HAS_deposit_i64). */
 673        tcg_out_r(s, args[0]);
 674        tcg_out_r(s, args[1]);
 675        tcg_out_r(s, args[2]);
 676        tcg_debug_assert(args[3] <= UINT8_MAX);
 677        tcg_out8(s, args[3]);
 678        tcg_debug_assert(args[4] <= UINT8_MAX);
 679        tcg_out8(s, args[4]);
 680        break;
 681    case INDEX_op_div_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
 682    case INDEX_op_divu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
 683    case INDEX_op_rem_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
 684    case INDEX_op_remu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
 685        TODO();
 686        break;
 687    case INDEX_op_div2_i64:     /* Optional (TCG_TARGET_HAS_div2_i64). */
 688    case INDEX_op_divu2_i64:    /* Optional (TCG_TARGET_HAS_div2_i64). */
 689        TODO();
 690        break;
 691    case INDEX_op_brcond_i64:
 692        tcg_out_r(s, args[0]);
 693        tcg_out_ri64(s, const_args[1], args[1]);
 694        tcg_out8(s, args[2]);           /* condition */
 695        tci_out_label(s, arg_label(args[3]));
 696        break;
 697    case INDEX_op_bswap16_i64:  /* Optional (TCG_TARGET_HAS_bswap16_i64). */
 698    case INDEX_op_bswap32_i64:  /* Optional (TCG_TARGET_HAS_bswap32_i64). */
 699    case INDEX_op_bswap64_i64:  /* Optional (TCG_TARGET_HAS_bswap64_i64). */
 700    case INDEX_op_not_i64:      /* Optional (TCG_TARGET_HAS_not_i64). */
 701    case INDEX_op_neg_i64:      /* Optional (TCG_TARGET_HAS_neg_i64). */
 702    case INDEX_op_ext8s_i64:    /* Optional (TCG_TARGET_HAS_ext8s_i64). */
 703    case INDEX_op_ext8u_i64:    /* Optional (TCG_TARGET_HAS_ext8u_i64). */
 704    case INDEX_op_ext16s_i64:   /* Optional (TCG_TARGET_HAS_ext16s_i64). */
 705    case INDEX_op_ext16u_i64:   /* Optional (TCG_TARGET_HAS_ext16u_i64). */
 706    case INDEX_op_ext32s_i64:   /* Optional (TCG_TARGET_HAS_ext32s_i64). */
 707    case INDEX_op_ext32u_i64:   /* Optional (TCG_TARGET_HAS_ext32u_i64). */
 708    case INDEX_op_ext_i32_i64:
 709    case INDEX_op_extu_i32_i64:
 710#endif /* TCG_TARGET_REG_BITS == 64 */
 711    case INDEX_op_neg_i32:      /* Optional (TCG_TARGET_HAS_neg_i32). */
 712    case INDEX_op_not_i32:      /* Optional (TCG_TARGET_HAS_not_i32). */
 713    case INDEX_op_ext8s_i32:    /* Optional (TCG_TARGET_HAS_ext8s_i32). */
 714    case INDEX_op_ext16s_i32:   /* Optional (TCG_TARGET_HAS_ext16s_i32). */
 715    case INDEX_op_ext8u_i32:    /* Optional (TCG_TARGET_HAS_ext8u_i32). */
 716    case INDEX_op_ext16u_i32:   /* Optional (TCG_TARGET_HAS_ext16u_i32). */
 717    case INDEX_op_bswap16_i32:  /* Optional (TCG_TARGET_HAS_bswap16_i32). */
 718    case INDEX_op_bswap32_i32:  /* Optional (TCG_TARGET_HAS_bswap32_i32). */
 719        tcg_out_r(s, args[0]);
 720        tcg_out_r(s, args[1]);
 721        break;
 722    case INDEX_op_div_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
 723    case INDEX_op_divu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
 724    case INDEX_op_rem_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
 725    case INDEX_op_remu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
 726        tcg_out_r(s, args[0]);
 727        tcg_out_ri32(s, const_args[1], args[1]);
 728        tcg_out_ri32(s, const_args[2], args[2]);
 729        break;
 730    case INDEX_op_div2_i32:     /* Optional (TCG_TARGET_HAS_div2_i32). */
 731    case INDEX_op_divu2_i32:    /* Optional (TCG_TARGET_HAS_div2_i32). */
 732        TODO();
 733        break;
 734#if TCG_TARGET_REG_BITS == 32
 735    case INDEX_op_add2_i32:
 736    case INDEX_op_sub2_i32:
 737        tcg_out_r(s, args[0]);
 738        tcg_out_r(s, args[1]);
 739        tcg_out_r(s, args[2]);
 740        tcg_out_r(s, args[3]);
 741        tcg_out_r(s, args[4]);
 742        tcg_out_r(s, args[5]);
 743        break;
 744    case INDEX_op_brcond2_i32:
 745        tcg_out_r(s, args[0]);
 746        tcg_out_r(s, args[1]);
 747        tcg_out_ri32(s, const_args[2], args[2]);
 748        tcg_out_ri32(s, const_args[3], args[3]);
 749        tcg_out8(s, args[4]);           /* condition */
 750        tci_out_label(s, arg_label(args[5]));
 751        break;
 752    case INDEX_op_mulu2_i32:
 753        tcg_out_r(s, args[0]);
 754        tcg_out_r(s, args[1]);
 755        tcg_out_r(s, args[2]);
 756        tcg_out_r(s, args[3]);
 757        break;
 758#endif
 759    case INDEX_op_brcond_i32:
 760        tcg_out_r(s, args[0]);
 761        tcg_out_ri32(s, const_args[1], args[1]);
 762        tcg_out8(s, args[2]);           /* condition */
 763        tci_out_label(s, arg_label(args[3]));
 764        break;
 765    case INDEX_op_qemu_ld_i32:
 766        tcg_out_r(s, *args++);
 767        tcg_out_r(s, *args++);
 768        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
 769            tcg_out_r(s, *args++);
 770        }
 771        tcg_out_i(s, *args++);
 772        break;
 773    case INDEX_op_qemu_ld_i64:
 774        tcg_out_r(s, *args++);
 775        if (TCG_TARGET_REG_BITS == 32) {
 776            tcg_out_r(s, *args++);
 777        }
 778        tcg_out_r(s, *args++);
 779        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
 780            tcg_out_r(s, *args++);
 781        }
 782        tcg_out_i(s, *args++);
 783        break;
 784    case INDEX_op_qemu_st_i32:
 785        tcg_out_r(s, *args++);
 786        tcg_out_r(s, *args++);
 787        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
 788            tcg_out_r(s, *args++);
 789        }
 790        tcg_out_i(s, *args++);
 791        break;
 792    case INDEX_op_qemu_st_i64:
 793        tcg_out_r(s, *args++);
 794        if (TCG_TARGET_REG_BITS == 32) {
 795            tcg_out_r(s, *args++);
 796        }
 797        tcg_out_r(s, *args++);
 798        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
 799            tcg_out_r(s, *args++);
 800        }
 801        tcg_out_i(s, *args++);
 802        break;
 803    case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
 804    case INDEX_op_mov_i64:
 805    case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi.  */
 806    case INDEX_op_movi_i64:
 807    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
 808    default:
 809        tcg_abort();
 810    }
 811    old_code_ptr[1] = s->code_ptr - old_code_ptr;
 812}
 813
 814static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
 815                       intptr_t arg2)
 816{
 817    uint8_t *old_code_ptr = s->code_ptr;
 818    if (type == TCG_TYPE_I32) {
 819        tcg_out_op_t(s, INDEX_op_st_i32);
 820        tcg_out_r(s, arg);
 821        tcg_out_r(s, arg1);
 822        tcg_out32(s, arg2);
 823    } else {
 824        tcg_debug_assert(type == TCG_TYPE_I64);
 825#if TCG_TARGET_REG_BITS == 64
 826        tcg_out_op_t(s, INDEX_op_st_i64);
 827        tcg_out_r(s, arg);
 828        tcg_out_r(s, arg1);
 829        tcg_out32(s, arg2);
 830#else
 831        TODO();
 832#endif
 833    }
 834    old_code_ptr[1] = s->code_ptr - old_code_ptr;
 835}
 836
 837static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
 838                               TCGReg base, intptr_t ofs)
 839{
 840    return false;
 841}
 842
 843/* Test if a constant matches the constraint. */
 844static int tcg_target_const_match(tcg_target_long val, TCGType type,
 845                                  const TCGArgConstraint *arg_ct)
 846{
 847    /* No need to return 0 or 1, 0 or != 0 is good enough. */
 848    return arg_ct->ct & TCG_CT_CONST;
 849}
 850
 851static void tcg_target_init(TCGContext *s)
 852{
 853#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
 854    const char *envval = getenv("DEBUG_TCG");
 855    if (envval) {
 856        qemu_set_log(strtol(envval, NULL, 0));
 857    }
 858#endif
 859
 860    /* The current code uses uint8_t for tcg operations. */
 861    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
 862
 863    /* Registers available for 32 bit operations. */
 864    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
 865                     BIT(TCG_TARGET_NB_REGS) - 1);
 866    /* Registers available for 64 bit operations. */
 867    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
 868                     BIT(TCG_TARGET_NB_REGS) - 1);
 869    /* TODO: Which registers should be set here? */
 870    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
 871                     BIT(TCG_TARGET_NB_REGS) - 1);
 872
 873    tcg_regset_clear(s->reserved_regs);
 874    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
 875    tcg_add_target_add_op_defs(tcg_target_op_defs);
 876
 877    /* We use negative offsets from "sp" so that we can distinguish
 878       stores that might pretend to be call arguments.  */
 879    tcg_set_frame(s, TCG_REG_CALL_STACK,
 880                  -CPU_TEMP_BUF_NLONGS * sizeof(long),
 881                  CPU_TEMP_BUF_NLONGS * sizeof(long));
 882}
 883
 884/* Generate global QEMU prologue and epilogue code. */
 885static inline void tcg_target_qemu_prologue(TCGContext *s)
 886{
 887}
 888