qemu/tcg/tcg.h
<<
>>
Prefs
   1/*
   2 * Tiny Code Generator for QEMU
   3 *
   4 * Copyright (c) 2008 Fabrice Bellard
   5 *
   6 * Permission is hereby granted, free of charge, to any person obtaining a copy
   7 * of this software and associated documentation files (the "Software"), to deal
   8 * in the Software without restriction, including without limitation the rights
   9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  10 * copies of the Software, and to permit persons to whom the Software is
  11 * furnished to do so, subject to the following conditions:
  12 *
  13 * The above copyright notice and this permission notice shall be included in
  14 * all copies or substantial portions of the Software.
  15 *
  16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  22 * THE SOFTWARE.
  23 */
  24#include "qemu-common.h"
  25
  26/* Target word size (must be identical to pointer size). */
  27#if UINTPTR_MAX == UINT32_MAX
  28# define TCG_TARGET_REG_BITS 32
  29#elif UINTPTR_MAX == UINT64_MAX
  30# define TCG_TARGET_REG_BITS 64
  31#else
  32# error Unknown pointer size for tcg target
  33#endif
  34
  35#if TCG_TARGET_REG_BITS == 32
  36typedef int32_t tcg_target_long;
  37typedef uint32_t tcg_target_ulong;
  38#define TCG_PRIlx PRIx32
  39#define TCG_PRIld PRId32
  40#elif TCG_TARGET_REG_BITS == 64
  41typedef int64_t tcg_target_long;
  42typedef uint64_t tcg_target_ulong;
  43#define TCG_PRIlx PRIx64
  44#define TCG_PRIld PRId64
  45#else
  46#error unsupported
  47#endif
  48
  49#include "tcg-target.h"
  50#include "tcg-runtime.h"
  51
  52#if TCG_TARGET_NB_REGS <= 32
  53typedef uint32_t TCGRegSet;
  54#elif TCG_TARGET_NB_REGS <= 64
  55typedef uint64_t TCGRegSet;
  56#else
  57#error unsupported
  58#endif
  59
  60#if TCG_TARGET_REG_BITS == 32
  61/* Turn some undef macros into false macros.  */
  62#define TCG_TARGET_HAS_div_i64          0
  63#define TCG_TARGET_HAS_div2_i64         0
  64#define TCG_TARGET_HAS_rot_i64          0
  65#define TCG_TARGET_HAS_ext8s_i64        0
  66#define TCG_TARGET_HAS_ext16s_i64       0
  67#define TCG_TARGET_HAS_ext32s_i64       0
  68#define TCG_TARGET_HAS_ext8u_i64        0
  69#define TCG_TARGET_HAS_ext16u_i64       0
  70#define TCG_TARGET_HAS_ext32u_i64       0
  71#define TCG_TARGET_HAS_bswap16_i64      0
  72#define TCG_TARGET_HAS_bswap32_i64      0
  73#define TCG_TARGET_HAS_bswap64_i64      0
  74#define TCG_TARGET_HAS_neg_i64          0
  75#define TCG_TARGET_HAS_not_i64          0
  76#define TCG_TARGET_HAS_andc_i64         0
  77#define TCG_TARGET_HAS_orc_i64          0
  78#define TCG_TARGET_HAS_eqv_i64          0
  79#define TCG_TARGET_HAS_nand_i64         0
  80#define TCG_TARGET_HAS_nor_i64          0
  81#define TCG_TARGET_HAS_deposit_i64      0
  82#define TCG_TARGET_HAS_movcond_i64      0
  83#define TCG_TARGET_HAS_add2_i64         0
  84#define TCG_TARGET_HAS_sub2_i64         0
  85#define TCG_TARGET_HAS_mulu2_i64        0
  86#define TCG_TARGET_HAS_muls2_i64        0
  87/* Turn some undef macros into true macros.  */
  88#define TCG_TARGET_HAS_add2_i32         1
  89#define TCG_TARGET_HAS_sub2_i32         1
  90#define TCG_TARGET_HAS_mulu2_i32        1
  91#endif
  92
  93#ifndef TCG_TARGET_deposit_i32_valid
  94#define TCG_TARGET_deposit_i32_valid(ofs, len) 1
  95#endif
  96#ifndef TCG_TARGET_deposit_i64_valid
  97#define TCG_TARGET_deposit_i64_valid(ofs, len) 1
  98#endif
  99
 100/* Only one of DIV or DIV2 should be defined.  */
 101#if defined(TCG_TARGET_HAS_div_i32)
 102#define TCG_TARGET_HAS_div2_i32         0
 103#elif defined(TCG_TARGET_HAS_div2_i32)
 104#define TCG_TARGET_HAS_div_i32          0
 105#endif
 106#if defined(TCG_TARGET_HAS_div_i64)
 107#define TCG_TARGET_HAS_div2_i64         0
 108#elif defined(TCG_TARGET_HAS_div2_i64)
 109#define TCG_TARGET_HAS_div_i64          0
 110#endif
 111
 112typedef enum TCGOpcode {
 113#define DEF(name, oargs, iargs, cargs, flags) INDEX_op_ ## name,
 114#include "tcg-opc.h"
 115#undef DEF
 116    NB_OPS,
 117} TCGOpcode;
 118
 119#define tcg_regset_clear(d) (d) = 0
 120#define tcg_regset_set(d, s) (d) = (s)
 121#define tcg_regset_set32(d, reg, val32) (d) |= (val32) << (reg)
 122#define tcg_regset_set_reg(d, r) (d) |= 1L << (r)
 123#define tcg_regset_reset_reg(d, r) (d) &= ~(1L << (r))
 124#define tcg_regset_test_reg(d, r) (((d) >> (r)) & 1)
 125#define tcg_regset_or(d, a, b) (d) = (a) | (b)
 126#define tcg_regset_and(d, a, b) (d) = (a) & (b)
 127#define tcg_regset_andnot(d, a, b) (d) = (a) & ~(b)
 128#define tcg_regset_not(d, a) (d) = ~(a)
 129
 130typedef struct TCGRelocation {
 131    struct TCGRelocation *next;
 132    int type;
 133    uint8_t *ptr;
 134    tcg_target_long addend;
 135} TCGRelocation; 
 136
 137typedef struct TCGLabel {
 138    int has_value;
 139    union {
 140        tcg_target_ulong value;
 141        TCGRelocation *first_reloc;
 142    } u;
 143} TCGLabel;
 144
 145typedef struct TCGPool {
 146    struct TCGPool *next;
 147    int size;
 148    uint8_t data[0] __attribute__ ((aligned));
 149} TCGPool;
 150
 151#define TCG_POOL_CHUNK_SIZE 32768
 152
 153#define TCG_MAX_LABELS 512
 154
 155#define TCG_MAX_TEMPS 512
 156
 157/* when the size of the arguments of a called function is smaller than
 158   this value, they are statically allocated in the TB stack frame */
 159#define TCG_STATIC_CALL_ARGS_SIZE 128
 160
 161typedef enum TCGType {
 162    TCG_TYPE_I32,
 163    TCG_TYPE_I64,
 164    TCG_TYPE_COUNT, /* number of different types */
 165
 166    /* An alias for the size of the host register.  */
 167#if TCG_TARGET_REG_BITS == 32
 168    TCG_TYPE_REG = TCG_TYPE_I32,
 169#else
 170    TCG_TYPE_REG = TCG_TYPE_I64,
 171#endif
 172
 173    /* An alias for the size of the native pointer.  We don't currently
 174       support any hosts with 64-bit registers and 32-bit pointers.  */
 175    TCG_TYPE_PTR = TCG_TYPE_REG,
 176
 177    /* An alias for the size of the target "long", aka register.  */
 178#if TARGET_LONG_BITS == 64
 179    TCG_TYPE_TL = TCG_TYPE_I64,
 180#else
 181    TCG_TYPE_TL = TCG_TYPE_I32,
 182#endif
 183} TCGType;
 184
 185typedef tcg_target_ulong TCGArg;
 186
 187/* Define a type and accessor macros for variables.  Using a struct is
 188   nice because it gives some level of type safely.  Ideally the compiler
 189   be able to see through all this.  However in practice this is not true,
 190   especially on targets with braindamaged ABIs (e.g. i386).
 191   We use plain int by default to avoid this runtime overhead.
 192   Users of tcg_gen_* don't need to know about any of this, and should
 193   treat TCGv as an opaque type.
 194   In addition we do typechecking for different types of variables.  TCGv_i32
 195   and TCGv_i64 are 32/64-bit variables respectively.  TCGv and TCGv_ptr
 196   are aliases for target_ulong and host pointer sized values respectively.
 197 */
 198
 199#if defined(CONFIG_QEMU_LDST_OPTIMIZATION) && defined(CONFIG_SOFTMMU)
 200/* Macros/structures for qemu_ld/st IR code optimization:
 201   TCG_MAX_HELPER_LABELS is defined as same as OPC_BUF_SIZE in exec-all.h. */
 202#define TCG_MAX_QEMU_LDST       640
 203
 204typedef struct TCGLabelQemuLdst {
 205    int is_ld:1;            /* qemu_ld: 1, qemu_st: 0 */
 206    int opc:4;
 207    int addrlo_reg;         /* reg index for low word of guest virtual addr */
 208    int addrhi_reg;         /* reg index for high word of guest virtual addr */
 209    int datalo_reg;         /* reg index for low word to be loaded or stored */
 210    int datahi_reg;         /* reg index for high word to be loaded or stored */
 211    int mem_index;          /* soft MMU memory index */
 212    uint8_t *raddr;         /* gen code addr of the next IR of qemu_ld/st IR */
 213    uint8_t *label_ptr[2];  /* label pointers to be updated */
 214} TCGLabelQemuLdst;
 215#endif
 216
 217#ifdef CONFIG_DEBUG_TCG
 218#define DEBUG_TCGV 1
 219#endif
 220
 221#ifdef DEBUG_TCGV
 222
 223typedef struct
 224{
 225    int i32;
 226} TCGv_i32;
 227
 228typedef struct
 229{
 230    int i64;
 231} TCGv_i64;
 232
 233typedef struct {
 234    int iptr;
 235} TCGv_ptr;
 236
 237#define MAKE_TCGV_I32(i) __extension__                  \
 238    ({ TCGv_i32 make_tcgv_tmp = {i}; make_tcgv_tmp;})
 239#define MAKE_TCGV_I64(i) __extension__                  \
 240    ({ TCGv_i64 make_tcgv_tmp = {i}; make_tcgv_tmp;})
 241#define MAKE_TCGV_PTR(i) __extension__                  \
 242    ({ TCGv_ptr make_tcgv_tmp = {i}; make_tcgv_tmp; })
 243#define GET_TCGV_I32(t) ((t).i32)
 244#define GET_TCGV_I64(t) ((t).i64)
 245#define GET_TCGV_PTR(t) ((t).iptr)
 246#if TCG_TARGET_REG_BITS == 32
 247#define TCGV_LOW(t) MAKE_TCGV_I32(GET_TCGV_I64(t))
 248#define TCGV_HIGH(t) MAKE_TCGV_I32(GET_TCGV_I64(t) + 1)
 249#endif
 250
 251#else /* !DEBUG_TCGV */
 252
 253typedef int TCGv_i32;
 254typedef int TCGv_i64;
 255#if TCG_TARGET_REG_BITS == 32
 256#define TCGv_ptr TCGv_i32
 257#else
 258#define TCGv_ptr TCGv_i64
 259#endif
 260#define MAKE_TCGV_I32(x) (x)
 261#define MAKE_TCGV_I64(x) (x)
 262#define MAKE_TCGV_PTR(x) (x)
 263#define GET_TCGV_I32(t) (t)
 264#define GET_TCGV_I64(t) (t)
 265#define GET_TCGV_PTR(t) (t)
 266
 267#if TCG_TARGET_REG_BITS == 32
 268#define TCGV_LOW(t) (t)
 269#define TCGV_HIGH(t) ((t) + 1)
 270#endif
 271
 272#endif /* DEBUG_TCGV */
 273
 274#define TCGV_EQUAL_I32(a, b) (GET_TCGV_I32(a) == GET_TCGV_I32(b))
 275#define TCGV_EQUAL_I64(a, b) (GET_TCGV_I64(a) == GET_TCGV_I64(b))
 276
 277/* Dummy definition to avoid compiler warnings.  */
 278#define TCGV_UNUSED_I32(x) x = MAKE_TCGV_I32(-1)
 279#define TCGV_UNUSED_I64(x) x = MAKE_TCGV_I64(-1)
 280
 281#define TCGV_IS_UNUSED_I32(x) (GET_TCGV_I32(x) == -1)
 282#define TCGV_IS_UNUSED_I64(x) (GET_TCGV_I64(x) == -1)
 283
 284/* call flags */
 285/* Helper does not read globals (either directly or through an exception). It
 286   implies TCG_CALL_NO_WRITE_GLOBALS. */
 287#define TCG_CALL_NO_READ_GLOBALS    0x0010
 288/* Helper does not write globals */
 289#define TCG_CALL_NO_WRITE_GLOBALS   0x0020
 290/* Helper can be safely suppressed if the return value is not used. */
 291#define TCG_CALL_NO_SIDE_EFFECTS    0x0040
 292
 293/* convenience version of most used call flags */
 294#define TCG_CALL_NO_RWG         TCG_CALL_NO_READ_GLOBALS
 295#define TCG_CALL_NO_WG          TCG_CALL_NO_WRITE_GLOBALS
 296#define TCG_CALL_NO_SE          TCG_CALL_NO_SIDE_EFFECTS
 297#define TCG_CALL_NO_RWG_SE      (TCG_CALL_NO_RWG | TCG_CALL_NO_SE)
 298#define TCG_CALL_NO_WG_SE       (TCG_CALL_NO_WG | TCG_CALL_NO_SE)
 299
 300/* used to align parameters */
 301#define TCG_CALL_DUMMY_TCGV     MAKE_TCGV_I32(-1)
 302#define TCG_CALL_DUMMY_ARG      ((TCGArg)(-1))
 303
 304/* Conditions.  Note that these are laid out for easy manipulation by
 305   the functions below:
 306     bit 0 is used for inverting;
 307     bit 1 is signed,
 308     bit 2 is unsigned,
 309     bit 3 is used with bit 0 for swapping signed/unsigned.  */
 310typedef enum {
 311    /* non-signed */
 312    TCG_COND_NEVER  = 0 | 0 | 0 | 0,
 313    TCG_COND_ALWAYS = 0 | 0 | 0 | 1,
 314    TCG_COND_EQ     = 8 | 0 | 0 | 0,
 315    TCG_COND_NE     = 8 | 0 | 0 | 1,
 316    /* signed */
 317    TCG_COND_LT     = 0 | 0 | 2 | 0,
 318    TCG_COND_GE     = 0 | 0 | 2 | 1,
 319    TCG_COND_LE     = 8 | 0 | 2 | 0,
 320    TCG_COND_GT     = 8 | 0 | 2 | 1,
 321    /* unsigned */
 322    TCG_COND_LTU    = 0 | 4 | 0 | 0,
 323    TCG_COND_GEU    = 0 | 4 | 0 | 1,
 324    TCG_COND_LEU    = 8 | 4 | 0 | 0,
 325    TCG_COND_GTU    = 8 | 4 | 0 | 1,
 326} TCGCond;
 327
 328/* Invert the sense of the comparison.  */
 329static inline TCGCond tcg_invert_cond(TCGCond c)
 330{
 331    return (TCGCond)(c ^ 1);
 332}
 333
 334/* Swap the operands in a comparison.  */
 335static inline TCGCond tcg_swap_cond(TCGCond c)
 336{
 337    return c & 6 ? (TCGCond)(c ^ 9) : c;
 338}
 339
 340/* Create an "unsigned" version of a "signed" comparison.  */
 341static inline TCGCond tcg_unsigned_cond(TCGCond c)
 342{
 343    return c & 2 ? (TCGCond)(c ^ 6) : c;
 344}
 345
 346/* Must a comparison be considered unsigned?  */
 347static inline bool is_unsigned_cond(TCGCond c)
 348{
 349    return (c & 4) != 0;
 350}
 351
 352/* Create a "high" version of a double-word comparison.
 353   This removes equality from a LTE or GTE comparison.  */
 354static inline TCGCond tcg_high_cond(TCGCond c)
 355{
 356    switch (c) {
 357    case TCG_COND_GE:
 358    case TCG_COND_LE:
 359    case TCG_COND_GEU:
 360    case TCG_COND_LEU:
 361        return (TCGCond)(c ^ 8);
 362    default:
 363        return c;
 364    }
 365}
 366
 367#define TEMP_VAL_DEAD  0
 368#define TEMP_VAL_REG   1
 369#define TEMP_VAL_MEM   2
 370#define TEMP_VAL_CONST 3
 371
 372/* XXX: optimize memory layout */
 373typedef struct TCGTemp {
 374    TCGType base_type;
 375    TCGType type;
 376    int val_type;
 377    int reg;
 378    tcg_target_long val;
 379    int mem_reg;
 380    tcg_target_long mem_offset;
 381    unsigned int fixed_reg:1;
 382    unsigned int mem_coherent:1;
 383    unsigned int mem_allocated:1;
 384    unsigned int temp_local:1; /* If true, the temp is saved across
 385                                  basic blocks. Otherwise, it is not
 386                                  preserved across basic blocks. */
 387    unsigned int temp_allocated:1; /* never used for code gen */
 388    /* index of next free temp of same base type, -1 if end */
 389    int next_free_temp;
 390    const char *name;
 391} TCGTemp;
 392
 393typedef struct TCGHelperInfo {
 394    tcg_target_ulong func;
 395    const char *name;
 396} TCGHelperInfo;
 397
 398typedef struct TCGContext TCGContext;
 399
 400struct TCGContext {
 401    uint8_t *pool_cur, *pool_end;
 402    TCGPool *pool_first, *pool_current, *pool_first_large;
 403    TCGLabel *labels;
 404    int nb_labels;
 405    int nb_globals;
 406    int nb_temps;
 407    /* index of free temps, -1 if none */
 408    int first_free_temp[TCG_TYPE_COUNT * 2]; 
 409
 410    /* goto_tb support */
 411    uint8_t *code_buf;
 412    uintptr_t *tb_next;
 413    uint16_t *tb_next_offset;
 414    uint16_t *tb_jmp_offset; /* != NULL if USE_DIRECT_JUMP */
 415
 416    /* liveness analysis */
 417    uint16_t *op_dead_args; /* for each operation, each bit tells if the
 418                               corresponding argument is dead */
 419    uint8_t *op_sync_args;  /* for each operation, each bit tells if the
 420                               corresponding output argument needs to be
 421                               sync to memory. */
 422    
 423    /* tells in which temporary a given register is. It does not take
 424       into account fixed registers */
 425    int reg_to_temp[TCG_TARGET_NB_REGS];
 426    TCGRegSet reserved_regs;
 427    tcg_target_long current_frame_offset;
 428    tcg_target_long frame_start;
 429    tcg_target_long frame_end;
 430    int frame_reg;
 431
 432    uint8_t *code_ptr;
 433    TCGTemp temps[TCG_MAX_TEMPS]; /* globals first, temps after */
 434
 435    TCGHelperInfo *helpers;
 436    int nb_helpers;
 437    int allocated_helpers;
 438    int helpers_sorted;
 439
 440#ifdef CONFIG_PROFILER
 441    /* profiling info */
 442    int64_t tb_count1;
 443    int64_t tb_count;
 444    int64_t op_count; /* total insn count */
 445    int op_count_max; /* max insn per TB */
 446    int64_t temp_count;
 447    int temp_count_max;
 448    int64_t del_op_count;
 449    int64_t code_in_len;
 450    int64_t code_out_len;
 451    int64_t interm_time;
 452    int64_t code_time;
 453    int64_t la_time;
 454    int64_t opt_time;
 455    int64_t restore_count;
 456    int64_t restore_time;
 457#endif
 458
 459#ifdef CONFIG_DEBUG_TCG
 460    int temps_in_use;
 461    int goto_tb_issue_mask;
 462#endif
 463
 464    uint16_t gen_opc_buf[OPC_BUF_SIZE];
 465    TCGArg gen_opparam_buf[OPPARAM_BUF_SIZE];
 466
 467    uint16_t *gen_opc_ptr;
 468    TCGArg *gen_opparam_ptr;
 469    target_ulong gen_opc_pc[OPC_BUF_SIZE];
 470    uint16_t gen_opc_icount[OPC_BUF_SIZE];
 471    uint8_t gen_opc_instr_start[OPC_BUF_SIZE];
 472
 473    /* Code generation */
 474    int code_gen_max_blocks;
 475    uint8_t *code_gen_prologue;
 476    uint8_t *code_gen_buffer;
 477    size_t code_gen_buffer_size;
 478    /* threshold to flush the translated code buffer */
 479    size_t code_gen_buffer_max_size;
 480    uint8_t *code_gen_ptr;
 481
 482    TBContext tb_ctx;
 483
 484#if defined(CONFIG_QEMU_LDST_OPTIMIZATION) && defined(CONFIG_SOFTMMU)
 485    /* labels info for qemu_ld/st IRs
 486       The labels help to generate TLB miss case codes at the end of TB */
 487    TCGLabelQemuLdst *qemu_ldst_labels;
 488    int nb_qemu_ldst_labels;
 489#endif
 490};
 491
 492extern TCGContext tcg_ctx;
 493
 494/* pool based memory allocation */
 495
 496void *tcg_malloc_internal(TCGContext *s, int size);
 497void tcg_pool_reset(TCGContext *s);
 498void tcg_pool_delete(TCGContext *s);
 499
 500static inline void *tcg_malloc(int size)
 501{
 502    TCGContext *s = &tcg_ctx;
 503    uint8_t *ptr, *ptr_end;
 504    size = (size + sizeof(long) - 1) & ~(sizeof(long) - 1);
 505    ptr = s->pool_cur;
 506    ptr_end = ptr + size;
 507    if (unlikely(ptr_end > s->pool_end)) {
 508        return tcg_malloc_internal(&tcg_ctx, size);
 509    } else {
 510        s->pool_cur = ptr_end;
 511        return ptr;
 512    }
 513}
 514
 515void tcg_context_init(TCGContext *s);
 516void tcg_prologue_init(TCGContext *s);
 517void tcg_func_start(TCGContext *s);
 518
 519int tcg_gen_code(TCGContext *s, uint8_t *gen_code_buf);
 520int tcg_gen_code_search_pc(TCGContext *s, uint8_t *gen_code_buf, long offset);
 521
 522void tcg_set_frame(TCGContext *s, int reg,
 523                   tcg_target_long start, tcg_target_long size);
 524
 525TCGv_i32 tcg_global_reg_new_i32(int reg, const char *name);
 526TCGv_i32 tcg_global_mem_new_i32(int reg, tcg_target_long offset,
 527                                const char *name);
 528TCGv_i32 tcg_temp_new_internal_i32(int temp_local);
 529static inline TCGv_i32 tcg_temp_new_i32(void)
 530{
 531    return tcg_temp_new_internal_i32(0);
 532}
 533static inline TCGv_i32 tcg_temp_local_new_i32(void)
 534{
 535    return tcg_temp_new_internal_i32(1);
 536}
 537void tcg_temp_free_i32(TCGv_i32 arg);
 538char *tcg_get_arg_str_i32(TCGContext *s, char *buf, int buf_size, TCGv_i32 arg);
 539
 540TCGv_i64 tcg_global_reg_new_i64(int reg, const char *name);
 541TCGv_i64 tcg_global_mem_new_i64(int reg, tcg_target_long offset,
 542                                const char *name);
 543TCGv_i64 tcg_temp_new_internal_i64(int temp_local);
 544static inline TCGv_i64 tcg_temp_new_i64(void)
 545{
 546    return tcg_temp_new_internal_i64(0);
 547}
 548static inline TCGv_i64 tcg_temp_local_new_i64(void)
 549{
 550    return tcg_temp_new_internal_i64(1);
 551}
 552void tcg_temp_free_i64(TCGv_i64 arg);
 553char *tcg_get_arg_str_i64(TCGContext *s, char *buf, int buf_size, TCGv_i64 arg);
 554
 555#if defined(CONFIG_DEBUG_TCG)
 556/* If you call tcg_clear_temp_count() at the start of a section of
 557 * code which is not supposed to leak any TCG temporaries, then
 558 * calling tcg_check_temp_count() at the end of the section will
 559 * return 1 if the section did in fact leak a temporary.
 560 */
 561void tcg_clear_temp_count(void);
 562int tcg_check_temp_count(void);
 563#else
 564#define tcg_clear_temp_count() do { } while (0)
 565#define tcg_check_temp_count() 0
 566#endif
 567
 568void tcg_dump_info(FILE *f, fprintf_function cpu_fprintf);
 569
 570#define TCG_CT_ALIAS  0x80
 571#define TCG_CT_IALIAS 0x40
 572#define TCG_CT_REG    0x01
 573#define TCG_CT_CONST  0x02 /* any constant of register size */
 574
 575typedef struct TCGArgConstraint {
 576    uint16_t ct;
 577    uint8_t alias_index;
 578    union {
 579        TCGRegSet regs;
 580    } u;
 581} TCGArgConstraint;
 582
 583#define TCG_MAX_OP_ARGS 16
 584
 585/* Bits for TCGOpDef->flags, 8 bits available.  */
 586enum {
 587    /* Instruction defines the end of a basic block.  */
 588    TCG_OPF_BB_END       = 0x01,
 589    /* Instruction clobbers call registers and potentially update globals.  */
 590    TCG_OPF_CALL_CLOBBER = 0x02,
 591    /* Instruction has side effects: it cannot be removed if its outputs
 592       are not used, and might trigger exceptions.  */
 593    TCG_OPF_SIDE_EFFECTS = 0x04,
 594    /* Instruction operands are 64-bits (otherwise 32-bits).  */
 595    TCG_OPF_64BIT        = 0x08,
 596    /* Instruction is optional and not implemented by the host.  */
 597    TCG_OPF_NOT_PRESENT  = 0x10,
 598};
 599
 600typedef struct TCGOpDef {
 601    const char *name;
 602    uint8_t nb_oargs, nb_iargs, nb_cargs, nb_args;
 603    uint8_t flags;
 604    TCGArgConstraint *args_ct;
 605    int *sorted_args;
 606#if defined(CONFIG_DEBUG_TCG)
 607    int used;
 608#endif
 609} TCGOpDef;
 610
 611extern TCGOpDef tcg_op_defs[];
 612extern const size_t tcg_op_defs_max;
 613
 614typedef struct TCGTargetOpDef {
 615    TCGOpcode op;
 616    const char *args_ct_str[TCG_MAX_OP_ARGS];
 617} TCGTargetOpDef;
 618
 619#define tcg_abort() \
 620do {\
 621    fprintf(stderr, "%s:%d: tcg fatal error\n", __FILE__, __LINE__);\
 622    abort();\
 623} while (0)
 624
 625#ifdef CONFIG_DEBUG_TCG
 626# define tcg_debug_assert(X) do { assert(X); } while (0)
 627#elif QEMU_GNUC_PREREQ(4, 5)
 628# define tcg_debug_assert(X) \
 629    do { if (!(X)) { __builtin_unreachable(); } } while (0)
 630#else
 631# define tcg_debug_assert(X) do { (void)(X); } while (0)
 632#endif
 633
 634void tcg_add_target_add_op_defs(const TCGTargetOpDef *tdefs);
 635
 636#if TCG_TARGET_REG_BITS == 32
 637#define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I32(n))
 638#define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I32(GET_TCGV_PTR(n))
 639
 640#define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i32((tcg_target_long)(V)))
 641#define tcg_global_reg_new_ptr(R, N) \
 642    TCGV_NAT_TO_PTR(tcg_global_reg_new_i32((R), (N)))
 643#define tcg_global_mem_new_ptr(R, O, N) \
 644    TCGV_NAT_TO_PTR(tcg_global_mem_new_i32((R), (O), (N)))
 645#define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i32())
 646#define tcg_temp_free_ptr(T) tcg_temp_free_i32(TCGV_PTR_TO_NAT(T))
 647#else
 648#define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I64(n))
 649#define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I64(GET_TCGV_PTR(n))
 650
 651#define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i64((tcg_target_long)(V)))
 652#define tcg_global_reg_new_ptr(R, N) \
 653    TCGV_NAT_TO_PTR(tcg_global_reg_new_i64((R), (N)))
 654#define tcg_global_mem_new_ptr(R, O, N) \
 655    TCGV_NAT_TO_PTR(tcg_global_mem_new_i64((R), (O), (N)))
 656#define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i64())
 657#define tcg_temp_free_ptr(T) tcg_temp_free_i64(TCGV_PTR_TO_NAT(T))
 658#endif
 659
 660void tcg_gen_callN(TCGContext *s, TCGv_ptr func, unsigned int flags,
 661                   int sizemask, TCGArg ret, int nargs, TCGArg *args);
 662
 663void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
 664                        int c, int right, int arith);
 665
 666TCGArg *tcg_optimize(TCGContext *s, uint16_t *tcg_opc_ptr, TCGArg *args,
 667                     TCGOpDef *tcg_op_def);
 668
 669/* only used for debugging purposes */
 670void tcg_register_helper(void *func, const char *name);
 671const char *tcg_helper_get_name(TCGContext *s, void *func);
 672void tcg_dump_ops(TCGContext *s);
 673
 674void dump_ops(const uint16_t *opc_buf, const TCGArg *opparam_buf);
 675TCGv_i32 tcg_const_i32(int32_t val);
 676TCGv_i64 tcg_const_i64(int64_t val);
 677TCGv_i32 tcg_const_local_i32(int32_t val);
 678TCGv_i64 tcg_const_local_i64(int64_t val);
 679
 680/**
 681 * tcg_qemu_tb_exec:
 682 * @env: CPUArchState * for the CPU
 683 * @tb_ptr: address of generated code for the TB to execute
 684 *
 685 * Start executing code from a given translation block.
 686 * Where translation blocks have been linked, execution
 687 * may proceed from the given TB into successive ones.
 688 * Control eventually returns only when some action is needed
 689 * from the top-level loop: either control must pass to a TB
 690 * which has not yet been directly linked, or an asynchronous
 691 * event such as an interrupt needs handling.
 692 *
 693 * The return value is a pointer to the next TB to execute
 694 * (if known; otherwise zero). This pointer is assumed to be
 695 * 4-aligned, and the bottom two bits are used to return further
 696 * information:
 697 *  0, 1: the link between this TB and the next is via the specified
 698 *        TB index (0 or 1). That is, we left the TB via (the equivalent
 699 *        of) "goto_tb <index>". The main loop uses this to determine
 700 *        how to link the TB just executed to the next.
 701 *  2:    we are using instruction counting code generation, and we
 702 *        did not start executing this TB because the instruction counter
 703 *        would hit zero midway through it. In this case the next-TB pointer
 704 *        returned is the TB we were about to execute, and the caller must
 705 *        arrange to execute the remaining count of instructions.
 706 *  3:    we stopped because the CPU's exit_request flag was set
 707 *        (usually meaning that there is an interrupt that needs to be
 708 *        handled). The next-TB pointer returned is the TB we were
 709 *        about to execute when we noticed the pending exit request.
 710 *
 711 * If the bottom two bits indicate an exit-via-index then the CPU
 712 * state is correctly synchronised and ready for execution of the next
 713 * TB (and in particular the guest PC is the address to execute next).
 714 * Otherwise, we gave up on execution of this TB before it started, and
 715 * the caller must fix up the CPU state by calling cpu_pc_from_tb()
 716 * with the next-TB pointer we return.
 717 *
 718 * Note that TCG targets may use a different definition of tcg_qemu_tb_exec
 719 * to this default (which just calls the prologue.code emitted by
 720 * tcg_target_qemu_prologue()).
 721 */
 722#define TB_EXIT_MASK 3
 723#define TB_EXIT_IDX0 0
 724#define TB_EXIT_IDX1 1
 725#define TB_EXIT_ICOUNT_EXPIRED 2
 726#define TB_EXIT_REQUESTED 3
 727
 728#if !defined(tcg_qemu_tb_exec)
 729# define tcg_qemu_tb_exec(env, tb_ptr) \
 730    ((tcg_target_ulong (*)(void *, void *))tcg_ctx.code_gen_prologue)(env, \
 731                                                                      tb_ptr)
 732#endif
 733
 734void tcg_register_jit(void *buf, size_t buf_size);
 735
 736#if defined(CONFIG_QEMU_LDST_OPTIMIZATION) && defined(CONFIG_SOFTMMU)
 737/* Generate TB finalization at the end of block */
 738void tcg_out_tb_finalize(TCGContext *s);
 739#endif
 740