1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include "qemu/osdep.h"
21
22
23
24#if defined(CONFIG_DEBUG_TCG)
25# define tci_assert(cond) assert(cond)
26#else
27# define tci_assert(cond) ((void)(cond))
28#endif
29
30#include "qemu-common.h"
31#include "tcg/tcg.h"
32#include "exec/cpu_ldst.h"
33#include "tcg/tcg-op.h"
34#include "qemu/compiler.h"
35
36#if MAX_OPC_PARAM_IARGS != 6
37# error Fix needed, number of supported input arguments changed!
38#endif
39#if TCG_TARGET_REG_BITS == 32
40typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
41 tcg_target_ulong, tcg_target_ulong,
42 tcg_target_ulong, tcg_target_ulong,
43 tcg_target_ulong, tcg_target_ulong,
44 tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong);
46#else
47typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong);
50#endif
51
52__thread uintptr_t tci_tb_ptr;
53
54static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
55{
56 tci_assert(index < TCG_TARGET_NB_REGS);
57 return regs[index];
58}
59
60static void
61tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
62{
63 tci_assert(index < TCG_TARGET_NB_REGS);
64 tci_assert(index != TCG_AREG0);
65 tci_assert(index != TCG_REG_CALL_STACK);
66 regs[index] = value;
67}
68
69static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
70 uint32_t low_index, uint64_t value)
71{
72 tci_write_reg(regs, low_index, value);
73 tci_write_reg(regs, high_index, value >> 32);
74}
75
76
77static uint64_t tci_uint64(uint32_t high, uint32_t low)
78{
79 return ((uint64_t)high << 32) + low;
80}
81
82
83static uint8_t tci_read_b(const uint8_t **tb_ptr)
84{
85 return *(tb_ptr[0]++);
86}
87
88
89static TCGReg tci_read_r(const uint8_t **tb_ptr)
90{
91 uint8_t regno = tci_read_b(tb_ptr);
92 tci_assert(regno < TCG_TARGET_NB_REGS);
93 return regno;
94}
95
96
97static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
98{
99 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
100 *tb_ptr += sizeof(value);
101 return value;
102}
103
104
105static uint32_t tci_read_i32(const uint8_t **tb_ptr)
106{
107 uint32_t value = *(const uint32_t *)(*tb_ptr);
108 *tb_ptr += sizeof(value);
109 return value;
110}
111
112
113static int32_t tci_read_s32(const uint8_t **tb_ptr)
114{
115 int32_t value = *(const int32_t *)(*tb_ptr);
116 *tb_ptr += sizeof(value);
117 return value;
118}
119
120static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
121{
122 return tci_read_i(tb_ptr);
123}
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140static void check_size(const uint8_t *start, const uint8_t **tb_ptr)
141{
142 const uint8_t *old_code_ptr = start - 2;
143 uint8_t op_size = old_code_ptr[1];
144 tci_assert(*tb_ptr == old_code_ptr + op_size);
145}
146
147static void tci_args_l(const uint8_t **tb_ptr, void **l0)
148{
149 const uint8_t *start = *tb_ptr;
150
151 *l0 = (void *)tci_read_label(tb_ptr);
152
153 check_size(start, tb_ptr);
154}
155
156static void tci_args_rr(const uint8_t **tb_ptr,
157 TCGReg *r0, TCGReg *r1)
158{
159 const uint8_t *start = *tb_ptr;
160
161 *r0 = tci_read_r(tb_ptr);
162 *r1 = tci_read_r(tb_ptr);
163
164 check_size(start, tb_ptr);
165}
166
167static void tci_args_ri(const uint8_t **tb_ptr,
168 TCGReg *r0, tcg_target_ulong *i1)
169{
170 const uint8_t *start = *tb_ptr;
171
172 *r0 = tci_read_r(tb_ptr);
173 *i1 = tci_read_i32(tb_ptr);
174
175 check_size(start, tb_ptr);
176}
177
178#if TCG_TARGET_REG_BITS == 64
179static void tci_args_rI(const uint8_t **tb_ptr,
180 TCGReg *r0, tcg_target_ulong *i1)
181{
182 const uint8_t *start = *tb_ptr;
183
184 *r0 = tci_read_r(tb_ptr);
185 *i1 = tci_read_i(tb_ptr);
186
187 check_size(start, tb_ptr);
188}
189#endif
190
191static void tci_args_rrm(const uint8_t **tb_ptr,
192 TCGReg *r0, TCGReg *r1, TCGMemOpIdx *m2)
193{
194 const uint8_t *start = *tb_ptr;
195
196 *r0 = tci_read_r(tb_ptr);
197 *r1 = tci_read_r(tb_ptr);
198 *m2 = tci_read_i32(tb_ptr);
199
200 check_size(start, tb_ptr);
201}
202
203static void tci_args_rrr(const uint8_t **tb_ptr,
204 TCGReg *r0, TCGReg *r1, TCGReg *r2)
205{
206 const uint8_t *start = *tb_ptr;
207
208 *r0 = tci_read_r(tb_ptr);
209 *r1 = tci_read_r(tb_ptr);
210 *r2 = tci_read_r(tb_ptr);
211
212 check_size(start, tb_ptr);
213}
214
215static void tci_args_rrs(const uint8_t **tb_ptr,
216 TCGReg *r0, TCGReg *r1, int32_t *i2)
217{
218 const uint8_t *start = *tb_ptr;
219
220 *r0 = tci_read_r(tb_ptr);
221 *r1 = tci_read_r(tb_ptr);
222 *i2 = tci_read_s32(tb_ptr);
223
224 check_size(start, tb_ptr);
225}
226
227static void tci_args_rrcl(const uint8_t **tb_ptr,
228 TCGReg *r0, TCGReg *r1, TCGCond *c2, void **l3)
229{
230 const uint8_t *start = *tb_ptr;
231
232 *r0 = tci_read_r(tb_ptr);
233 *r1 = tci_read_r(tb_ptr);
234 *c2 = tci_read_b(tb_ptr);
235 *l3 = (void *)tci_read_label(tb_ptr);
236
237 check_size(start, tb_ptr);
238}
239
240static void tci_args_rrrc(const uint8_t **tb_ptr,
241 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
242{
243 const uint8_t *start = *tb_ptr;
244
245 *r0 = tci_read_r(tb_ptr);
246 *r1 = tci_read_r(tb_ptr);
247 *r2 = tci_read_r(tb_ptr);
248 *c3 = tci_read_b(tb_ptr);
249
250 check_size(start, tb_ptr);
251}
252
253static void tci_args_rrrm(const uint8_t **tb_ptr,
254 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGMemOpIdx *m3)
255{
256 const uint8_t *start = *tb_ptr;
257
258 *r0 = tci_read_r(tb_ptr);
259 *r1 = tci_read_r(tb_ptr);
260 *r2 = tci_read_r(tb_ptr);
261 *m3 = tci_read_i32(tb_ptr);
262
263 check_size(start, tb_ptr);
264}
265
266static void tci_args_rrrbb(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
267 TCGReg *r2, uint8_t *i3, uint8_t *i4)
268{
269 const uint8_t *start = *tb_ptr;
270
271 *r0 = tci_read_r(tb_ptr);
272 *r1 = tci_read_r(tb_ptr);
273 *r2 = tci_read_r(tb_ptr);
274 *i3 = tci_read_b(tb_ptr);
275 *i4 = tci_read_b(tb_ptr);
276
277 check_size(start, tb_ptr);
278}
279
280static void tci_args_rrrrm(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
281 TCGReg *r2, TCGReg *r3, TCGMemOpIdx *m4)
282{
283 const uint8_t *start = *tb_ptr;
284
285 *r0 = tci_read_r(tb_ptr);
286 *r1 = tci_read_r(tb_ptr);
287 *r2 = tci_read_r(tb_ptr);
288 *r3 = tci_read_r(tb_ptr);
289 *m4 = tci_read_i32(tb_ptr);
290
291 check_size(start, tb_ptr);
292}
293
294#if TCG_TARGET_REG_BITS == 32
295static void tci_args_rrrr(const uint8_t **tb_ptr,
296 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
297{
298 const uint8_t *start = *tb_ptr;
299
300 *r0 = tci_read_r(tb_ptr);
301 *r1 = tci_read_r(tb_ptr);
302 *r2 = tci_read_r(tb_ptr);
303 *r3 = tci_read_r(tb_ptr);
304
305 check_size(start, tb_ptr);
306}
307
308static void tci_args_rrrrcl(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
309 TCGReg *r2, TCGReg *r3, TCGCond *c4, void **l5)
310{
311 const uint8_t *start = *tb_ptr;
312
313 *r0 = tci_read_r(tb_ptr);
314 *r1 = tci_read_r(tb_ptr);
315 *r2 = tci_read_r(tb_ptr);
316 *r3 = tci_read_r(tb_ptr);
317 *c4 = tci_read_b(tb_ptr);
318 *l5 = (void *)tci_read_label(tb_ptr);
319
320 check_size(start, tb_ptr);
321}
322
323static void tci_args_rrrrrc(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
324 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
325{
326 const uint8_t *start = *tb_ptr;
327
328 *r0 = tci_read_r(tb_ptr);
329 *r1 = tci_read_r(tb_ptr);
330 *r2 = tci_read_r(tb_ptr);
331 *r3 = tci_read_r(tb_ptr);
332 *r4 = tci_read_r(tb_ptr);
333 *c5 = tci_read_b(tb_ptr);
334
335 check_size(start, tb_ptr);
336}
337
338static void tci_args_rrrrrr(const uint8_t **tb_ptr, TCGReg *r0, TCGReg *r1,
339 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
340{
341 const uint8_t *start = *tb_ptr;
342
343 *r0 = tci_read_r(tb_ptr);
344 *r1 = tci_read_r(tb_ptr);
345 *r2 = tci_read_r(tb_ptr);
346 *r3 = tci_read_r(tb_ptr);
347 *r4 = tci_read_r(tb_ptr);
348 *r5 = tci_read_r(tb_ptr);
349
350 check_size(start, tb_ptr);
351}
352#endif
353
354static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
355{
356 bool result = false;
357 int32_t i0 = u0;
358 int32_t i1 = u1;
359 switch (condition) {
360 case TCG_COND_EQ:
361 result = (u0 == u1);
362 break;
363 case TCG_COND_NE:
364 result = (u0 != u1);
365 break;
366 case TCG_COND_LT:
367 result = (i0 < i1);
368 break;
369 case TCG_COND_GE:
370 result = (i0 >= i1);
371 break;
372 case TCG_COND_LE:
373 result = (i0 <= i1);
374 break;
375 case TCG_COND_GT:
376 result = (i0 > i1);
377 break;
378 case TCG_COND_LTU:
379 result = (u0 < u1);
380 break;
381 case TCG_COND_GEU:
382 result = (u0 >= u1);
383 break;
384 case TCG_COND_LEU:
385 result = (u0 <= u1);
386 break;
387 case TCG_COND_GTU:
388 result = (u0 > u1);
389 break;
390 default:
391 g_assert_not_reached();
392 }
393 return result;
394}
395
396static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
397{
398 bool result = false;
399 int64_t i0 = u0;
400 int64_t i1 = u1;
401 switch (condition) {
402 case TCG_COND_EQ:
403 result = (u0 == u1);
404 break;
405 case TCG_COND_NE:
406 result = (u0 != u1);
407 break;
408 case TCG_COND_LT:
409 result = (i0 < i1);
410 break;
411 case TCG_COND_GE:
412 result = (i0 >= i1);
413 break;
414 case TCG_COND_LE:
415 result = (i0 <= i1);
416 break;
417 case TCG_COND_GT:
418 result = (i0 > i1);
419 break;
420 case TCG_COND_LTU:
421 result = (u0 < u1);
422 break;
423 case TCG_COND_GEU:
424 result = (u0 >= u1);
425 break;
426 case TCG_COND_LEU:
427 result = (u0 <= u1);
428 break;
429 case TCG_COND_GTU:
430 result = (u0 > u1);
431 break;
432 default:
433 g_assert_not_reached();
434 }
435 return result;
436}
437
438#define qemu_ld_ub \
439 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
440#define qemu_ld_leuw \
441 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
442#define qemu_ld_leul \
443 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
444#define qemu_ld_leq \
445 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
446#define qemu_ld_beuw \
447 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
448#define qemu_ld_beul \
449 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
450#define qemu_ld_beq \
451 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
452#define qemu_st_b(X) \
453 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
454#define qemu_st_lew(X) \
455 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
456#define qemu_st_lel(X) \
457 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
458#define qemu_st_leq(X) \
459 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
460#define qemu_st_bew(X) \
461 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
462#define qemu_st_bel(X) \
463 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
464#define qemu_st_beq(X) \
465 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
466
467#if TCG_TARGET_REG_BITS == 64
468# define CASE_32_64(x) \
469 case glue(glue(INDEX_op_, x), _i64): \
470 case glue(glue(INDEX_op_, x), _i32):
471# define CASE_64(x) \
472 case glue(glue(INDEX_op_, x), _i64):
473#else
474# define CASE_32_64(x) \
475 case glue(glue(INDEX_op_, x), _i32):
476# define CASE_64(x)
477#endif
478
479
480
481
482
483
484
485uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
486 const void *v_tb_ptr)
487{
488 const uint8_t *tb_ptr = v_tb_ptr;
489 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
490 long tcg_temps[CPU_TEMP_BUF_NLONGS];
491 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
492
493 regs[TCG_AREG0] = (tcg_target_ulong)env;
494 regs[TCG_REG_CALL_STACK] = sp_value;
495 tci_assert(tb_ptr);
496
497 for (;;) {
498 TCGOpcode opc = tb_ptr[0];
499 TCGReg r0, r1, r2, r3;
500 tcg_target_ulong t1;
501 TCGCond condition;
502 target_ulong taddr;
503 uint8_t pos, len;
504 uint32_t tmp32;
505 uint64_t tmp64;
506#if TCG_TARGET_REG_BITS == 32
507 TCGReg r4, r5;
508 uint64_t T1, T2;
509#endif
510 TCGMemOpIdx oi;
511 int32_t ofs;
512 void *ptr;
513
514
515 tb_ptr += 2;
516
517 switch (opc) {
518 case INDEX_op_call:
519 tci_args_l(&tb_ptr, &ptr);
520 tci_tb_ptr = (uintptr_t)tb_ptr;
521#if TCG_TARGET_REG_BITS == 32
522 tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
523 tci_read_reg(regs, TCG_REG_R1),
524 tci_read_reg(regs, TCG_REG_R2),
525 tci_read_reg(regs, TCG_REG_R3),
526 tci_read_reg(regs, TCG_REG_R4),
527 tci_read_reg(regs, TCG_REG_R5),
528 tci_read_reg(regs, TCG_REG_R6),
529 tci_read_reg(regs, TCG_REG_R7),
530 tci_read_reg(regs, TCG_REG_R8),
531 tci_read_reg(regs, TCG_REG_R9),
532 tci_read_reg(regs, TCG_REG_R10),
533 tci_read_reg(regs, TCG_REG_R11));
534 tci_write_reg(regs, TCG_REG_R0, tmp64);
535 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
536#else
537 tmp64 = ((helper_function)ptr)(tci_read_reg(regs, TCG_REG_R0),
538 tci_read_reg(regs, TCG_REG_R1),
539 tci_read_reg(regs, TCG_REG_R2),
540 tci_read_reg(regs, TCG_REG_R3),
541 tci_read_reg(regs, TCG_REG_R4),
542 tci_read_reg(regs, TCG_REG_R5));
543 tci_write_reg(regs, TCG_REG_R0, tmp64);
544#endif
545 break;
546 case INDEX_op_br:
547 tci_args_l(&tb_ptr, &ptr);
548 tb_ptr = ptr;
549 continue;
550 case INDEX_op_setcond_i32:
551 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
552 regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
553 break;
554#if TCG_TARGET_REG_BITS == 32
555 case INDEX_op_setcond2_i32:
556 tci_args_rrrrrc(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &condition);
557 T1 = tci_uint64(regs[r2], regs[r1]);
558 T2 = tci_uint64(regs[r4], regs[r3]);
559 regs[r0] = tci_compare64(T1, T2, condition);
560 break;
561#elif TCG_TARGET_REG_BITS == 64
562 case INDEX_op_setcond_i64:
563 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &condition);
564 regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
565 break;
566#endif
567 CASE_32_64(mov)
568 tci_args_rr(&tb_ptr, &r0, &r1);
569 regs[r0] = regs[r1];
570 break;
571 case INDEX_op_tci_movi_i32:
572 tci_args_ri(&tb_ptr, &r0, &t1);
573 regs[r0] = t1;
574 break;
575
576
577
578 CASE_32_64(ld8u)
579 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
580 ptr = (void *)(regs[r1] + ofs);
581 regs[r0] = *(uint8_t *)ptr;
582 break;
583 CASE_32_64(ld8s)
584 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
585 ptr = (void *)(regs[r1] + ofs);
586 regs[r0] = *(int8_t *)ptr;
587 break;
588 CASE_32_64(ld16u)
589 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
590 ptr = (void *)(regs[r1] + ofs);
591 regs[r0] = *(uint16_t *)ptr;
592 break;
593 CASE_32_64(ld16s)
594 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
595 ptr = (void *)(regs[r1] + ofs);
596 regs[r0] = *(int16_t *)ptr;
597 break;
598 case INDEX_op_ld_i32:
599 CASE_64(ld32u)
600 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
601 ptr = (void *)(regs[r1] + ofs);
602 regs[r0] = *(uint32_t *)ptr;
603 break;
604 CASE_32_64(st8)
605 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
606 ptr = (void *)(regs[r1] + ofs);
607 *(uint8_t *)ptr = regs[r0];
608 break;
609 CASE_32_64(st16)
610 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
611 ptr = (void *)(regs[r1] + ofs);
612 *(uint16_t *)ptr = regs[r0];
613 break;
614 case INDEX_op_st_i32:
615 CASE_64(st32)
616 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
617 ptr = (void *)(regs[r1] + ofs);
618 *(uint32_t *)ptr = regs[r0];
619 break;
620
621
622
623 CASE_32_64(add)
624 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
625 regs[r0] = regs[r1] + regs[r2];
626 break;
627 CASE_32_64(sub)
628 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
629 regs[r0] = regs[r1] - regs[r2];
630 break;
631 CASE_32_64(mul)
632 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
633 regs[r0] = regs[r1] * regs[r2];
634 break;
635 CASE_32_64(and)
636 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
637 regs[r0] = regs[r1] & regs[r2];
638 break;
639 CASE_32_64(or)
640 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
641 regs[r0] = regs[r1] | regs[r2];
642 break;
643 CASE_32_64(xor)
644 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
645 regs[r0] = regs[r1] ^ regs[r2];
646 break;
647
648
649
650 case INDEX_op_div_i32:
651 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
652 regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
653 break;
654 case INDEX_op_divu_i32:
655 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
656 regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
657 break;
658 case INDEX_op_rem_i32:
659 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
660 regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
661 break;
662 case INDEX_op_remu_i32:
663 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
664 regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
665 break;
666
667
668
669 case INDEX_op_shl_i32:
670 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
671 regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
672 break;
673 case INDEX_op_shr_i32:
674 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
675 regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
676 break;
677 case INDEX_op_sar_i32:
678 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
679 regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
680 break;
681#if TCG_TARGET_HAS_rot_i32
682 case INDEX_op_rotl_i32:
683 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
684 regs[r0] = rol32(regs[r1], regs[r2] & 31);
685 break;
686 case INDEX_op_rotr_i32:
687 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
688 regs[r0] = ror32(regs[r1], regs[r2] & 31);
689 break;
690#endif
691#if TCG_TARGET_HAS_deposit_i32
692 case INDEX_op_deposit_i32:
693 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
694 regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
695 break;
696#endif
697 case INDEX_op_brcond_i32:
698 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
699 if (tci_compare32(regs[r0], regs[r1], condition)) {
700 tb_ptr = ptr;
701 }
702 break;
703#if TCG_TARGET_REG_BITS == 32
704 case INDEX_op_add2_i32:
705 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
706 T1 = tci_uint64(regs[r3], regs[r2]);
707 T2 = tci_uint64(regs[r5], regs[r4]);
708 tci_write_reg64(regs, r1, r0, T1 + T2);
709 break;
710 case INDEX_op_sub2_i32:
711 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
712 T1 = tci_uint64(regs[r3], regs[r2]);
713 T2 = tci_uint64(regs[r5], regs[r4]);
714 tci_write_reg64(regs, r1, r0, T1 - T2);
715 break;
716 case INDEX_op_brcond2_i32:
717 tci_args_rrrrcl(&tb_ptr, &r0, &r1, &r2, &r3, &condition, &ptr);
718 T1 = tci_uint64(regs[r1], regs[r0]);
719 T2 = tci_uint64(regs[r3], regs[r2]);
720 if (tci_compare64(T1, T2, condition)) {
721 tb_ptr = ptr;
722 continue;
723 }
724 break;
725 case INDEX_op_mulu2_i32:
726 tci_args_rrrr(&tb_ptr, &r0, &r1, &r2, &r3);
727 tci_write_reg64(regs, r1, r0, (uint64_t)regs[r2] * regs[r3]);
728 break;
729#endif
730#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
731 CASE_32_64(ext8s)
732 tci_args_rr(&tb_ptr, &r0, &r1);
733 regs[r0] = (int8_t)regs[r1];
734 break;
735#endif
736#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
737 CASE_32_64(ext16s)
738 tci_args_rr(&tb_ptr, &r0, &r1);
739 regs[r0] = (int16_t)regs[r1];
740 break;
741#endif
742#if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
743 CASE_32_64(ext8u)
744 tci_args_rr(&tb_ptr, &r0, &r1);
745 regs[r0] = (uint8_t)regs[r1];
746 break;
747#endif
748#if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
749 CASE_32_64(ext16u)
750 tci_args_rr(&tb_ptr, &r0, &r1);
751 regs[r0] = (uint16_t)regs[r1];
752 break;
753#endif
754#if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
755 CASE_32_64(bswap16)
756 tci_args_rr(&tb_ptr, &r0, &r1);
757 regs[r0] = bswap16(regs[r1]);
758 break;
759#endif
760#if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
761 CASE_32_64(bswap32)
762 tci_args_rr(&tb_ptr, &r0, &r1);
763 regs[r0] = bswap32(regs[r1]);
764 break;
765#endif
766#if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
767 CASE_32_64(not)
768 tci_args_rr(&tb_ptr, &r0, &r1);
769 regs[r0] = ~regs[r1];
770 break;
771#endif
772#if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
773 CASE_32_64(neg)
774 tci_args_rr(&tb_ptr, &r0, &r1);
775 regs[r0] = -regs[r1];
776 break;
777#endif
778#if TCG_TARGET_REG_BITS == 64
779 case INDEX_op_tci_movi_i64:
780 tci_args_rI(&tb_ptr, &r0, &t1);
781 regs[r0] = t1;
782 break;
783
784
785
786 case INDEX_op_ld32s_i64:
787 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
788 ptr = (void *)(regs[r1] + ofs);
789 regs[r0] = *(int32_t *)ptr;
790 break;
791 case INDEX_op_ld_i64:
792 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
793 ptr = (void *)(regs[r1] + ofs);
794 regs[r0] = *(uint64_t *)ptr;
795 break;
796 case INDEX_op_st_i64:
797 tci_args_rrs(&tb_ptr, &r0, &r1, &ofs);
798 ptr = (void *)(regs[r1] + ofs);
799 *(uint64_t *)ptr = regs[r0];
800 break;
801
802
803
804 case INDEX_op_div_i64:
805 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
806 regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
807 break;
808 case INDEX_op_divu_i64:
809 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
810 regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
811 break;
812 case INDEX_op_rem_i64:
813 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
814 regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
815 break;
816 case INDEX_op_remu_i64:
817 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
818 regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
819 break;
820
821
822
823 case INDEX_op_shl_i64:
824 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
825 regs[r0] = regs[r1] << (regs[r2] & 63);
826 break;
827 case INDEX_op_shr_i64:
828 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
829 regs[r0] = regs[r1] >> (regs[r2] & 63);
830 break;
831 case INDEX_op_sar_i64:
832 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
833 regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
834 break;
835#if TCG_TARGET_HAS_rot_i64
836 case INDEX_op_rotl_i64:
837 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
838 regs[r0] = rol64(regs[r1], regs[r2] & 63);
839 break;
840 case INDEX_op_rotr_i64:
841 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
842 regs[r0] = ror64(regs[r1], regs[r2] & 63);
843 break;
844#endif
845#if TCG_TARGET_HAS_deposit_i64
846 case INDEX_op_deposit_i64:
847 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
848 regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
849 break;
850#endif
851 case INDEX_op_brcond_i64:
852 tci_args_rrcl(&tb_ptr, &r0, &r1, &condition, &ptr);
853 if (tci_compare64(regs[r0], regs[r1], condition)) {
854 tb_ptr = ptr;
855 }
856 break;
857 case INDEX_op_ext32s_i64:
858 case INDEX_op_ext_i32_i64:
859 tci_args_rr(&tb_ptr, &r0, &r1);
860 regs[r0] = (int32_t)regs[r1];
861 break;
862 case INDEX_op_ext32u_i64:
863 case INDEX_op_extu_i32_i64:
864 tci_args_rr(&tb_ptr, &r0, &r1);
865 regs[r0] = (uint32_t)regs[r1];
866 break;
867#if TCG_TARGET_HAS_bswap64_i64
868 case INDEX_op_bswap64_i64:
869 tci_args_rr(&tb_ptr, &r0, &r1);
870 regs[r0] = bswap64(regs[r1]);
871 break;
872#endif
873#endif
874
875
876
877 case INDEX_op_exit_tb:
878 tci_args_l(&tb_ptr, &ptr);
879 return (uintptr_t)ptr;
880
881 case INDEX_op_goto_tb:
882 tci_args_l(&tb_ptr, &ptr);
883 tb_ptr = *(void **)ptr;
884 break;
885
886 case INDEX_op_qemu_ld_i32:
887 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
888 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
889 taddr = regs[r1];
890 } else {
891 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
892 taddr = tci_uint64(regs[r2], regs[r1]);
893 }
894 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
895 case MO_UB:
896 tmp32 = qemu_ld_ub;
897 break;
898 case MO_SB:
899 tmp32 = (int8_t)qemu_ld_ub;
900 break;
901 case MO_LEUW:
902 tmp32 = qemu_ld_leuw;
903 break;
904 case MO_LESW:
905 tmp32 = (int16_t)qemu_ld_leuw;
906 break;
907 case MO_LEUL:
908 tmp32 = qemu_ld_leul;
909 break;
910 case MO_BEUW:
911 tmp32 = qemu_ld_beuw;
912 break;
913 case MO_BESW:
914 tmp32 = (int16_t)qemu_ld_beuw;
915 break;
916 case MO_BEUL:
917 tmp32 = qemu_ld_beul;
918 break;
919 default:
920 g_assert_not_reached();
921 }
922 regs[r0] = tmp32;
923 break;
924
925 case INDEX_op_qemu_ld_i64:
926 if (TCG_TARGET_REG_BITS == 64) {
927 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
928 taddr = regs[r1];
929 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
930 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
931 taddr = regs[r2];
932 } else {
933 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
934 taddr = tci_uint64(regs[r3], regs[r2]);
935 }
936 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
937 case MO_UB:
938 tmp64 = qemu_ld_ub;
939 break;
940 case MO_SB:
941 tmp64 = (int8_t)qemu_ld_ub;
942 break;
943 case MO_LEUW:
944 tmp64 = qemu_ld_leuw;
945 break;
946 case MO_LESW:
947 tmp64 = (int16_t)qemu_ld_leuw;
948 break;
949 case MO_LEUL:
950 tmp64 = qemu_ld_leul;
951 break;
952 case MO_LESL:
953 tmp64 = (int32_t)qemu_ld_leul;
954 break;
955 case MO_LEQ:
956 tmp64 = qemu_ld_leq;
957 break;
958 case MO_BEUW:
959 tmp64 = qemu_ld_beuw;
960 break;
961 case MO_BESW:
962 tmp64 = (int16_t)qemu_ld_beuw;
963 break;
964 case MO_BEUL:
965 tmp64 = qemu_ld_beul;
966 break;
967 case MO_BESL:
968 tmp64 = (int32_t)qemu_ld_beul;
969 break;
970 case MO_BEQ:
971 tmp64 = qemu_ld_beq;
972 break;
973 default:
974 g_assert_not_reached();
975 }
976 if (TCG_TARGET_REG_BITS == 32) {
977 tci_write_reg64(regs, r1, r0, tmp64);
978 } else {
979 regs[r0] = tmp64;
980 }
981 break;
982
983 case INDEX_op_qemu_st_i32:
984 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
985 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
986 taddr = regs[r1];
987 } else {
988 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
989 taddr = tci_uint64(regs[r2], regs[r1]);
990 }
991 tmp32 = regs[r0];
992 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
993 case MO_UB:
994 qemu_st_b(tmp32);
995 break;
996 case MO_LEUW:
997 qemu_st_lew(tmp32);
998 break;
999 case MO_LEUL:
1000 qemu_st_lel(tmp32);
1001 break;
1002 case MO_BEUW:
1003 qemu_st_bew(tmp32);
1004 break;
1005 case MO_BEUL:
1006 qemu_st_bel(tmp32);
1007 break;
1008 default:
1009 g_assert_not_reached();
1010 }
1011 break;
1012
1013 case INDEX_op_qemu_st_i64:
1014 if (TCG_TARGET_REG_BITS == 64) {
1015 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
1016 taddr = regs[r1];
1017 tmp64 = regs[r0];
1018 } else {
1019 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
1020 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
1021 taddr = regs[r2];
1022 } else {
1023 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
1024 taddr = tci_uint64(regs[r3], regs[r2]);
1025 }
1026 tmp64 = tci_uint64(regs[r1], regs[r0]);
1027 }
1028 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1029 case MO_UB:
1030 qemu_st_b(tmp64);
1031 break;
1032 case MO_LEUW:
1033 qemu_st_lew(tmp64);
1034 break;
1035 case MO_LEUL:
1036 qemu_st_lel(tmp64);
1037 break;
1038 case MO_LEQ:
1039 qemu_st_leq(tmp64);
1040 break;
1041 case MO_BEUW:
1042 qemu_st_bew(tmp64);
1043 break;
1044 case MO_BEUL:
1045 qemu_st_bel(tmp64);
1046 break;
1047 case MO_BEQ:
1048 qemu_st_beq(tmp64);
1049 break;
1050 default:
1051 g_assert_not_reached();
1052 }
1053 break;
1054
1055 case INDEX_op_mb:
1056
1057 smp_mb();
1058 break;
1059 default:
1060 g_assert_not_reached();
1061 }
1062 }
1063}
1064
1065
1066
1067
1068
1069static const char *str_r(TCGReg r)
1070{
1071 static const char regs[TCG_TARGET_NB_REGS][4] = {
1072 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
1073 "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
1074 };
1075
1076 QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14);
1077 QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15);
1078
1079 assert((unsigned)r < TCG_TARGET_NB_REGS);
1080 return regs[r];
1081}
1082
1083static const char *str_c(TCGCond c)
1084{
1085 static const char cond[16][8] = {
1086 [TCG_COND_NEVER] = "never",
1087 [TCG_COND_ALWAYS] = "always",
1088 [TCG_COND_EQ] = "eq",
1089 [TCG_COND_NE] = "ne",
1090 [TCG_COND_LT] = "lt",
1091 [TCG_COND_GE] = "ge",
1092 [TCG_COND_LE] = "le",
1093 [TCG_COND_GT] = "gt",
1094 [TCG_COND_LTU] = "ltu",
1095 [TCG_COND_GEU] = "geu",
1096 [TCG_COND_LEU] = "leu",
1097 [TCG_COND_GTU] = "gtu",
1098 };
1099
1100 assert((unsigned)c < ARRAY_SIZE(cond));
1101 assert(cond[c][0] != 0);
1102 return cond[c];
1103}
1104
1105
1106int print_insn_tci(bfd_vma addr, disassemble_info *info)
1107{
1108 uint8_t buf[256];
1109 int length, status;
1110 const TCGOpDef *def;
1111 const char *op_name;
1112 TCGOpcode op;
1113 TCGReg r0, r1, r2, r3;
1114#if TCG_TARGET_REG_BITS == 32
1115 TCGReg r4, r5;
1116#endif
1117 tcg_target_ulong i1;
1118 int32_t s2;
1119 TCGCond c;
1120 TCGMemOpIdx oi;
1121 uint8_t pos, len;
1122 void *ptr;
1123 const uint8_t *tb_ptr;
1124
1125 status = info->read_memory_func(addr, buf, 2, info);
1126 if (status != 0) {
1127 info->memory_error_func(status, addr, info);
1128 return -1;
1129 }
1130 op = buf[0];
1131 length = buf[1];
1132
1133 if (length < 2) {
1134 info->fprintf_func(info->stream, "invalid length %d", length);
1135 return 1;
1136 }
1137
1138 status = info->read_memory_func(addr + 2, buf + 2, length - 2, info);
1139 if (status != 0) {
1140 info->memory_error_func(status, addr + 2, info);
1141 return -1;
1142 }
1143
1144 def = &tcg_op_defs[op];
1145 op_name = def->name;
1146 tb_ptr = buf + 2;
1147
1148 switch (op) {
1149 case INDEX_op_br:
1150 case INDEX_op_call:
1151 case INDEX_op_exit_tb:
1152 case INDEX_op_goto_tb:
1153 tci_args_l(&tb_ptr, &ptr);
1154 info->fprintf_func(info->stream, "%-12s %p", op_name, ptr);
1155 break;
1156
1157 case INDEX_op_brcond_i32:
1158 case INDEX_op_brcond_i64:
1159 tci_args_rrcl(&tb_ptr, &r0, &r1, &c, &ptr);
1160 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %p",
1161 op_name, str_r(r0), str_r(r1), str_c(c), ptr);
1162 break;
1163
1164 case INDEX_op_setcond_i32:
1165 case INDEX_op_setcond_i64:
1166 tci_args_rrrc(&tb_ptr, &r0, &r1, &r2, &c);
1167 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1168 op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c));
1169 break;
1170
1171 case INDEX_op_tci_movi_i32:
1172 tci_args_ri(&tb_ptr, &r0, &i1);
1173 info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx,
1174 op_name, str_r(r0), i1);
1175 break;
1176
1177#if TCG_TARGET_REG_BITS == 64
1178 case INDEX_op_tci_movi_i64:
1179 tci_args_rI(&tb_ptr, &r0, &i1);
1180 info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx,
1181 op_name, str_r(r0), i1);
1182 break;
1183#endif
1184
1185 case INDEX_op_ld8u_i32:
1186 case INDEX_op_ld8u_i64:
1187 case INDEX_op_ld8s_i32:
1188 case INDEX_op_ld8s_i64:
1189 case INDEX_op_ld16u_i32:
1190 case INDEX_op_ld16u_i64:
1191 case INDEX_op_ld16s_i32:
1192 case INDEX_op_ld16s_i64:
1193 case INDEX_op_ld32u_i64:
1194 case INDEX_op_ld32s_i64:
1195 case INDEX_op_ld_i32:
1196 case INDEX_op_ld_i64:
1197 case INDEX_op_st8_i32:
1198 case INDEX_op_st8_i64:
1199 case INDEX_op_st16_i32:
1200 case INDEX_op_st16_i64:
1201 case INDEX_op_st32_i64:
1202 case INDEX_op_st_i32:
1203 case INDEX_op_st_i64:
1204 tci_args_rrs(&tb_ptr, &r0, &r1, &s2);
1205 info->fprintf_func(info->stream, "%-12s %s, %s, %d",
1206 op_name, str_r(r0), str_r(r1), s2);
1207 break;
1208
1209 case INDEX_op_mov_i32:
1210 case INDEX_op_mov_i64:
1211 case INDEX_op_ext8s_i32:
1212 case INDEX_op_ext8s_i64:
1213 case INDEX_op_ext8u_i32:
1214 case INDEX_op_ext8u_i64:
1215 case INDEX_op_ext16s_i32:
1216 case INDEX_op_ext16s_i64:
1217 case INDEX_op_ext16u_i32:
1218 case INDEX_op_ext32s_i64:
1219 case INDEX_op_ext32u_i64:
1220 case INDEX_op_ext_i32_i64:
1221 case INDEX_op_extu_i32_i64:
1222 case INDEX_op_bswap16_i32:
1223 case INDEX_op_bswap16_i64:
1224 case INDEX_op_bswap32_i32:
1225 case INDEX_op_bswap32_i64:
1226 case INDEX_op_bswap64_i64:
1227 case INDEX_op_not_i32:
1228 case INDEX_op_not_i64:
1229 case INDEX_op_neg_i32:
1230 case INDEX_op_neg_i64:
1231 tci_args_rr(&tb_ptr, &r0, &r1);
1232 info->fprintf_func(info->stream, "%-12s %s, %s",
1233 op_name, str_r(r0), str_r(r1));
1234 break;
1235
1236 case INDEX_op_add_i32:
1237 case INDEX_op_add_i64:
1238 case INDEX_op_sub_i32:
1239 case INDEX_op_sub_i64:
1240 case INDEX_op_mul_i32:
1241 case INDEX_op_mul_i64:
1242 case INDEX_op_and_i32:
1243 case INDEX_op_and_i64:
1244 case INDEX_op_or_i32:
1245 case INDEX_op_or_i64:
1246 case INDEX_op_xor_i32:
1247 case INDEX_op_xor_i64:
1248 case INDEX_op_div_i32:
1249 case INDEX_op_div_i64:
1250 case INDEX_op_rem_i32:
1251 case INDEX_op_rem_i64:
1252 case INDEX_op_divu_i32:
1253 case INDEX_op_divu_i64:
1254 case INDEX_op_remu_i32:
1255 case INDEX_op_remu_i64:
1256 case INDEX_op_shl_i32:
1257 case INDEX_op_shl_i64:
1258 case INDEX_op_shr_i32:
1259 case INDEX_op_shr_i64:
1260 case INDEX_op_sar_i32:
1261 case INDEX_op_sar_i64:
1262 case INDEX_op_rotl_i32:
1263 case INDEX_op_rotl_i64:
1264 case INDEX_op_rotr_i32:
1265 case INDEX_op_rotr_i64:
1266 tci_args_rrr(&tb_ptr, &r0, &r1, &r2);
1267 info->fprintf_func(info->stream, "%-12s %s, %s, %s",
1268 op_name, str_r(r0), str_r(r1), str_r(r2));
1269 break;
1270
1271 case INDEX_op_deposit_i32:
1272 case INDEX_op_deposit_i64:
1273 tci_args_rrrbb(&tb_ptr, &r0, &r1, &r2, &pos, &len);
1274 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %d, %d",
1275 op_name, str_r(r0), str_r(r1), str_r(r2), pos, len);
1276 break;
1277
1278#if TCG_TARGET_REG_BITS == 32
1279 case INDEX_op_setcond2_i32:
1280 tci_args_rrrrrc(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &c);
1281 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1282 op_name, str_r(r0), str_r(r1), str_r(r2),
1283 str_r(r3), str_r(r4), str_c(c));
1284 break;
1285
1286 case INDEX_op_brcond2_i32:
1287 tci_args_rrrrcl(&tb_ptr, &r0, &r1, &r2, &r3, &c, &ptr);
1288 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %p",
1289 op_name, str_r(r0), str_r(r1),
1290 str_r(r2), str_r(r3), str_c(c), ptr);
1291 break;
1292
1293 case INDEX_op_mulu2_i32:
1294 tci_args_rrrr(&tb_ptr, &r0, &r1, &r2, &r3);
1295 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1296 op_name, str_r(r0), str_r(r1),
1297 str_r(r2), str_r(r3));
1298 break;
1299
1300 case INDEX_op_add2_i32:
1301 case INDEX_op_sub2_i32:
1302 tci_args_rrrrrr(&tb_ptr, &r0, &r1, &r2, &r3, &r4, &r5);
1303 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1304 op_name, str_r(r0), str_r(r1), str_r(r2),
1305 str_r(r3), str_r(r4), str_r(r5));
1306 break;
1307#endif
1308
1309 case INDEX_op_qemu_ld_i64:
1310 case INDEX_op_qemu_st_i64:
1311 len = DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1312 goto do_qemu_ldst;
1313 case INDEX_op_qemu_ld_i32:
1314 case INDEX_op_qemu_st_i32:
1315 len = 1;
1316 do_qemu_ldst:
1317 len += DIV_ROUND_UP(TARGET_LONG_BITS, TCG_TARGET_REG_BITS);
1318 switch (len) {
1319 case 2:
1320 tci_args_rrm(&tb_ptr, &r0, &r1, &oi);
1321 info->fprintf_func(info->stream, "%-12s %s, %s, %x",
1322 op_name, str_r(r0), str_r(r1), oi);
1323 break;
1324 case 3:
1325 tci_args_rrrm(&tb_ptr, &r0, &r1, &r2, &oi);
1326 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %x",
1327 op_name, str_r(r0), str_r(r1), str_r(r2), oi);
1328 break;
1329 case 4:
1330 tci_args_rrrrm(&tb_ptr, &r0, &r1, &r2, &r3, &oi);
1331 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %x",
1332 op_name, str_r(r0), str_r(r1),
1333 str_r(r2), str_r(r3), oi);
1334 break;
1335 default:
1336 g_assert_not_reached();
1337 }
1338 break;
1339
1340 default:
1341 info->fprintf_func(info->stream, "illegal opcode %d", op);
1342 break;
1343 }
1344
1345 return length;
1346}
1347