1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#include "qemu/osdep.h"
26#include "qemu-common.h"
27#include "cpu.h"
28#include "exec/exec-all.h"
29#include "tcg.h"
30#include "tcg-op.h"
31#include "trace-tcg.h"
32#include "trace/mem.h"
33
34
35
36
37#if TCG_TARGET_REG_BITS == 64
38extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40#define TCGV_LOW TCGV_LOW_link_error
41#define TCGV_HIGH TCGV_HIGH_link_error
42#endif
43
44
45
46
47
48static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
49{
50 int oi = ctx->gen_next_op_idx;
51 int ni = oi + 1;
52 int pi = oi - 1;
53
54 tcg_debug_assert(oi < OPC_BUF_SIZE);
55 ctx->gen_op_buf[0].prev = oi;
56 ctx->gen_next_op_idx = ni;
57
58 ctx->gen_op_buf[oi] = (TCGOp){
59 .opc = opc,
60 .args = args,
61 .prev = pi,
62 .next = ni
63 };
64}
65
66void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
67{
68 int pi = ctx->gen_next_parm_idx;
69
70 tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
71 ctx->gen_next_parm_idx = pi + 1;
72 ctx->gen_opparam_buf[pi] = a1;
73
74 tcg_emit_op(ctx, opc, pi);
75}
76
77void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
78{
79 int pi = ctx->gen_next_parm_idx;
80
81 tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
82 ctx->gen_next_parm_idx = pi + 2;
83 ctx->gen_opparam_buf[pi + 0] = a1;
84 ctx->gen_opparam_buf[pi + 1] = a2;
85
86 tcg_emit_op(ctx, opc, pi);
87}
88
89void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
90 TCGArg a2, TCGArg a3)
91{
92 int pi = ctx->gen_next_parm_idx;
93
94 tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
95 ctx->gen_next_parm_idx = pi + 3;
96 ctx->gen_opparam_buf[pi + 0] = a1;
97 ctx->gen_opparam_buf[pi + 1] = a2;
98 ctx->gen_opparam_buf[pi + 2] = a3;
99
100 tcg_emit_op(ctx, opc, pi);
101}
102
103void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
104 TCGArg a2, TCGArg a3, TCGArg a4)
105{
106 int pi = ctx->gen_next_parm_idx;
107
108 tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
109 ctx->gen_next_parm_idx = pi + 4;
110 ctx->gen_opparam_buf[pi + 0] = a1;
111 ctx->gen_opparam_buf[pi + 1] = a2;
112 ctx->gen_opparam_buf[pi + 2] = a3;
113 ctx->gen_opparam_buf[pi + 3] = a4;
114
115 tcg_emit_op(ctx, opc, pi);
116}
117
118void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
119 TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
120{
121 int pi = ctx->gen_next_parm_idx;
122
123 tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
124 ctx->gen_next_parm_idx = pi + 5;
125 ctx->gen_opparam_buf[pi + 0] = a1;
126 ctx->gen_opparam_buf[pi + 1] = a2;
127 ctx->gen_opparam_buf[pi + 2] = a3;
128 ctx->gen_opparam_buf[pi + 3] = a4;
129 ctx->gen_opparam_buf[pi + 4] = a5;
130
131 tcg_emit_op(ctx, opc, pi);
132}
133
134void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
135 TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
136{
137 int pi = ctx->gen_next_parm_idx;
138
139 tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
140 ctx->gen_next_parm_idx = pi + 6;
141 ctx->gen_opparam_buf[pi + 0] = a1;
142 ctx->gen_opparam_buf[pi + 1] = a2;
143 ctx->gen_opparam_buf[pi + 2] = a3;
144 ctx->gen_opparam_buf[pi + 3] = a4;
145 ctx->gen_opparam_buf[pi + 4] = a5;
146 ctx->gen_opparam_buf[pi + 5] = a6;
147
148 tcg_emit_op(ctx, opc, pi);
149}
150
151void tcg_gen_mb(TCGBar mb_type)
152{
153 if (parallel_cpus) {
154 tcg_gen_op1(&tcg_ctx, INDEX_op_mb, mb_type);
155 }
156}
157
158
159
160void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
161{
162
163 if (arg2 == 0) {
164 tcg_gen_mov_i32(ret, arg1);
165 } else {
166 TCGv_i32 t0 = tcg_const_i32(arg2);
167 tcg_gen_add_i32(ret, arg1, t0);
168 tcg_temp_free_i32(t0);
169 }
170}
171
172void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
173{
174 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
175
176 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
177 } else {
178 TCGv_i32 t0 = tcg_const_i32(arg1);
179 tcg_gen_sub_i32(ret, t0, arg2);
180 tcg_temp_free_i32(t0);
181 }
182}
183
184void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
185{
186
187 if (arg2 == 0) {
188 tcg_gen_mov_i32(ret, arg1);
189 } else {
190 TCGv_i32 t0 = tcg_const_i32(arg2);
191 tcg_gen_sub_i32(ret, arg1, t0);
192 tcg_temp_free_i32(t0);
193 }
194}
195
196void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
197{
198 TCGv_i32 t0;
199
200 switch (arg2) {
201 case 0:
202 tcg_gen_movi_i32(ret, 0);
203 return;
204 case 0xffffffffu:
205 tcg_gen_mov_i32(ret, arg1);
206 return;
207 case 0xffu:
208
209 if (TCG_TARGET_HAS_ext8u_i32) {
210 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
211 return;
212 }
213 break;
214 case 0xffffu:
215 if (TCG_TARGET_HAS_ext16u_i32) {
216 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
217 return;
218 }
219 break;
220 }
221 t0 = tcg_const_i32(arg2);
222 tcg_gen_and_i32(ret, arg1, t0);
223 tcg_temp_free_i32(t0);
224}
225
226void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
227{
228
229 if (arg2 == -1) {
230 tcg_gen_movi_i32(ret, -1);
231 } else if (arg2 == 0) {
232 tcg_gen_mov_i32(ret, arg1);
233 } else {
234 TCGv_i32 t0 = tcg_const_i32(arg2);
235 tcg_gen_or_i32(ret, arg1, t0);
236 tcg_temp_free_i32(t0);
237 }
238}
239
240void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
241{
242
243 if (arg2 == 0) {
244 tcg_gen_mov_i32(ret, arg1);
245 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
246
247 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
248 } else {
249 TCGv_i32 t0 = tcg_const_i32(arg2);
250 tcg_gen_xor_i32(ret, arg1, t0);
251 tcg_temp_free_i32(t0);
252 }
253}
254
255void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
256{
257 tcg_debug_assert(arg2 < 32);
258 if (arg2 == 0) {
259 tcg_gen_mov_i32(ret, arg1);
260 } else {
261 TCGv_i32 t0 = tcg_const_i32(arg2);
262 tcg_gen_shl_i32(ret, arg1, t0);
263 tcg_temp_free_i32(t0);
264 }
265}
266
267void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
268{
269 tcg_debug_assert(arg2 < 32);
270 if (arg2 == 0) {
271 tcg_gen_mov_i32(ret, arg1);
272 } else {
273 TCGv_i32 t0 = tcg_const_i32(arg2);
274 tcg_gen_shr_i32(ret, arg1, t0);
275 tcg_temp_free_i32(t0);
276 }
277}
278
279void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
280{
281 tcg_debug_assert(arg2 < 32);
282 if (arg2 == 0) {
283 tcg_gen_mov_i32(ret, arg1);
284 } else {
285 TCGv_i32 t0 = tcg_const_i32(arg2);
286 tcg_gen_sar_i32(ret, arg1, t0);
287 tcg_temp_free_i32(t0);
288 }
289}
290
291void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
292{
293 if (cond == TCG_COND_ALWAYS) {
294 tcg_gen_br(l);
295 } else if (cond != TCG_COND_NEVER) {
296 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
297 }
298}
299
300void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
301{
302 if (cond == TCG_COND_ALWAYS) {
303 tcg_gen_br(l);
304 } else if (cond != TCG_COND_NEVER) {
305 TCGv_i32 t0 = tcg_const_i32(arg2);
306 tcg_gen_brcond_i32(cond, arg1, t0, l);
307 tcg_temp_free_i32(t0);
308 }
309}
310
311void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
312 TCGv_i32 arg1, TCGv_i32 arg2)
313{
314 if (cond == TCG_COND_ALWAYS) {
315 tcg_gen_movi_i32(ret, 1);
316 } else if (cond == TCG_COND_NEVER) {
317 tcg_gen_movi_i32(ret, 0);
318 } else {
319 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
320 }
321}
322
323void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
324 TCGv_i32 arg1, int32_t arg2)
325{
326 TCGv_i32 t0 = tcg_const_i32(arg2);
327 tcg_gen_setcond_i32(cond, ret, arg1, t0);
328 tcg_temp_free_i32(t0);
329}
330
331void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
332{
333 TCGv_i32 t0 = tcg_const_i32(arg2);
334 tcg_gen_mul_i32(ret, arg1, t0);
335 tcg_temp_free_i32(t0);
336}
337
338void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
339{
340 if (TCG_TARGET_HAS_div_i32) {
341 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
342 } else if (TCG_TARGET_HAS_div2_i32) {
343 TCGv_i32 t0 = tcg_temp_new_i32();
344 tcg_gen_sari_i32(t0, arg1, 31);
345 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
346 tcg_temp_free_i32(t0);
347 } else {
348 gen_helper_div_i32(ret, arg1, arg2);
349 }
350}
351
352void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
353{
354 if (TCG_TARGET_HAS_rem_i32) {
355 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
356 } else if (TCG_TARGET_HAS_div_i32) {
357 TCGv_i32 t0 = tcg_temp_new_i32();
358 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
359 tcg_gen_mul_i32(t0, t0, arg2);
360 tcg_gen_sub_i32(ret, arg1, t0);
361 tcg_temp_free_i32(t0);
362 } else if (TCG_TARGET_HAS_div2_i32) {
363 TCGv_i32 t0 = tcg_temp_new_i32();
364 tcg_gen_sari_i32(t0, arg1, 31);
365 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
366 tcg_temp_free_i32(t0);
367 } else {
368 gen_helper_rem_i32(ret, arg1, arg2);
369 }
370}
371
372void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
373{
374 if (TCG_TARGET_HAS_div_i32) {
375 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
376 } else if (TCG_TARGET_HAS_div2_i32) {
377 TCGv_i32 t0 = tcg_temp_new_i32();
378 tcg_gen_movi_i32(t0, 0);
379 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
380 tcg_temp_free_i32(t0);
381 } else {
382 gen_helper_divu_i32(ret, arg1, arg2);
383 }
384}
385
386void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
387{
388 if (TCG_TARGET_HAS_rem_i32) {
389 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
390 } else if (TCG_TARGET_HAS_div_i32) {
391 TCGv_i32 t0 = tcg_temp_new_i32();
392 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
393 tcg_gen_mul_i32(t0, t0, arg2);
394 tcg_gen_sub_i32(ret, arg1, t0);
395 tcg_temp_free_i32(t0);
396 } else if (TCG_TARGET_HAS_div2_i32) {
397 TCGv_i32 t0 = tcg_temp_new_i32();
398 tcg_gen_movi_i32(t0, 0);
399 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
400 tcg_temp_free_i32(t0);
401 } else {
402 gen_helper_remu_i32(ret, arg1, arg2);
403 }
404}
405
406void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
407{
408 if (TCG_TARGET_HAS_andc_i32) {
409 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
410 } else {
411 TCGv_i32 t0 = tcg_temp_new_i32();
412 tcg_gen_not_i32(t0, arg2);
413 tcg_gen_and_i32(ret, arg1, t0);
414 tcg_temp_free_i32(t0);
415 }
416}
417
418void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
419{
420 if (TCG_TARGET_HAS_eqv_i32) {
421 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
422 } else {
423 tcg_gen_xor_i32(ret, arg1, arg2);
424 tcg_gen_not_i32(ret, ret);
425 }
426}
427
428void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
429{
430 if (TCG_TARGET_HAS_nand_i32) {
431 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
432 } else {
433 tcg_gen_and_i32(ret, arg1, arg2);
434 tcg_gen_not_i32(ret, ret);
435 }
436}
437
438void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
439{
440 if (TCG_TARGET_HAS_nor_i32) {
441 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
442 } else {
443 tcg_gen_or_i32(ret, arg1, arg2);
444 tcg_gen_not_i32(ret, ret);
445 }
446}
447
448void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
449{
450 if (TCG_TARGET_HAS_orc_i32) {
451 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
452 } else {
453 TCGv_i32 t0 = tcg_temp_new_i32();
454 tcg_gen_not_i32(t0, arg2);
455 tcg_gen_or_i32(ret, arg1, t0);
456 tcg_temp_free_i32(t0);
457 }
458}
459
460void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
461{
462 if (TCG_TARGET_HAS_rot_i32) {
463 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
464 } else {
465 TCGv_i32 t0, t1;
466
467 t0 = tcg_temp_new_i32();
468 t1 = tcg_temp_new_i32();
469 tcg_gen_shl_i32(t0, arg1, arg2);
470 tcg_gen_subfi_i32(t1, 32, arg2);
471 tcg_gen_shr_i32(t1, arg1, t1);
472 tcg_gen_or_i32(ret, t0, t1);
473 tcg_temp_free_i32(t0);
474 tcg_temp_free_i32(t1);
475 }
476}
477
478void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
479{
480 tcg_debug_assert(arg2 < 32);
481
482 if (arg2 == 0) {
483 tcg_gen_mov_i32(ret, arg1);
484 } else if (TCG_TARGET_HAS_rot_i32) {
485 TCGv_i32 t0 = tcg_const_i32(arg2);
486 tcg_gen_rotl_i32(ret, arg1, t0);
487 tcg_temp_free_i32(t0);
488 } else {
489 TCGv_i32 t0, t1;
490 t0 = tcg_temp_new_i32();
491 t1 = tcg_temp_new_i32();
492 tcg_gen_shli_i32(t0, arg1, arg2);
493 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
494 tcg_gen_or_i32(ret, t0, t1);
495 tcg_temp_free_i32(t0);
496 tcg_temp_free_i32(t1);
497 }
498}
499
500void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
501{
502 if (TCG_TARGET_HAS_rot_i32) {
503 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
504 } else {
505 TCGv_i32 t0, t1;
506
507 t0 = tcg_temp_new_i32();
508 t1 = tcg_temp_new_i32();
509 tcg_gen_shr_i32(t0, arg1, arg2);
510 tcg_gen_subfi_i32(t1, 32, arg2);
511 tcg_gen_shl_i32(t1, arg1, t1);
512 tcg_gen_or_i32(ret, t0, t1);
513 tcg_temp_free_i32(t0);
514 tcg_temp_free_i32(t1);
515 }
516}
517
518void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
519{
520 tcg_debug_assert(arg2 < 32);
521
522 if (arg2 == 0) {
523 tcg_gen_mov_i32(ret, arg1);
524 } else {
525 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
526 }
527}
528
529void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
530 unsigned int ofs, unsigned int len)
531{
532 uint32_t mask;
533 TCGv_i32 t1;
534
535 tcg_debug_assert(ofs < 32);
536 tcg_debug_assert(len <= 32);
537 tcg_debug_assert(ofs + len <= 32);
538
539 if (ofs == 0 && len == 32) {
540 tcg_gen_mov_i32(ret, arg2);
541 return;
542 }
543 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
544 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
545 return;
546 }
547
548 mask = (1u << len) - 1;
549 t1 = tcg_temp_new_i32();
550
551 if (ofs + len < 32) {
552 tcg_gen_andi_i32(t1, arg2, mask);
553 tcg_gen_shli_i32(t1, t1, ofs);
554 } else {
555 tcg_gen_shli_i32(t1, arg2, ofs);
556 }
557 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
558 tcg_gen_or_i32(ret, ret, t1);
559
560 tcg_temp_free_i32(t1);
561}
562
563void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
564 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
565{
566 if (cond == TCG_COND_ALWAYS) {
567 tcg_gen_mov_i32(ret, v1);
568 } else if (cond == TCG_COND_NEVER) {
569 tcg_gen_mov_i32(ret, v2);
570 } else if (TCG_TARGET_HAS_movcond_i32) {
571 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
572 } else {
573 TCGv_i32 t0 = tcg_temp_new_i32();
574 TCGv_i32 t1 = tcg_temp_new_i32();
575 tcg_gen_setcond_i32(cond, t0, c1, c2);
576 tcg_gen_neg_i32(t0, t0);
577 tcg_gen_and_i32(t1, v1, t0);
578 tcg_gen_andc_i32(ret, v2, t0);
579 tcg_gen_or_i32(ret, ret, t1);
580 tcg_temp_free_i32(t0);
581 tcg_temp_free_i32(t1);
582 }
583}
584
585void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
586 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
587{
588 if (TCG_TARGET_HAS_add2_i32) {
589 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
590 } else {
591 TCGv_i64 t0 = tcg_temp_new_i64();
592 TCGv_i64 t1 = tcg_temp_new_i64();
593 tcg_gen_concat_i32_i64(t0, al, ah);
594 tcg_gen_concat_i32_i64(t1, bl, bh);
595 tcg_gen_add_i64(t0, t0, t1);
596 tcg_gen_extr_i64_i32(rl, rh, t0);
597 tcg_temp_free_i64(t0);
598 tcg_temp_free_i64(t1);
599 }
600}
601
602void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
603 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
604{
605 if (TCG_TARGET_HAS_sub2_i32) {
606 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
607 } else {
608 TCGv_i64 t0 = tcg_temp_new_i64();
609 TCGv_i64 t1 = tcg_temp_new_i64();
610 tcg_gen_concat_i32_i64(t0, al, ah);
611 tcg_gen_concat_i32_i64(t1, bl, bh);
612 tcg_gen_sub_i64(t0, t0, t1);
613 tcg_gen_extr_i64_i32(rl, rh, t0);
614 tcg_temp_free_i64(t0);
615 tcg_temp_free_i64(t1);
616 }
617}
618
619void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
620{
621 if (TCG_TARGET_HAS_mulu2_i32) {
622 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
623 } else if (TCG_TARGET_HAS_muluh_i32) {
624 TCGv_i32 t = tcg_temp_new_i32();
625 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
626 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
627 tcg_gen_mov_i32(rl, t);
628 tcg_temp_free_i32(t);
629 } else {
630 TCGv_i64 t0 = tcg_temp_new_i64();
631 TCGv_i64 t1 = tcg_temp_new_i64();
632 tcg_gen_extu_i32_i64(t0, arg1);
633 tcg_gen_extu_i32_i64(t1, arg2);
634 tcg_gen_mul_i64(t0, t0, t1);
635 tcg_gen_extr_i64_i32(rl, rh, t0);
636 tcg_temp_free_i64(t0);
637 tcg_temp_free_i64(t1);
638 }
639}
640
641void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
642{
643 if (TCG_TARGET_HAS_muls2_i32) {
644 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
645 } else if (TCG_TARGET_HAS_mulsh_i32) {
646 TCGv_i32 t = tcg_temp_new_i32();
647 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
648 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
649 tcg_gen_mov_i32(rl, t);
650 tcg_temp_free_i32(t);
651 } else if (TCG_TARGET_REG_BITS == 32) {
652 TCGv_i32 t0 = tcg_temp_new_i32();
653 TCGv_i32 t1 = tcg_temp_new_i32();
654 TCGv_i32 t2 = tcg_temp_new_i32();
655 TCGv_i32 t3 = tcg_temp_new_i32();
656 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
657
658 tcg_gen_sari_i32(t2, arg1, 31);
659 tcg_gen_sari_i32(t3, arg2, 31);
660 tcg_gen_and_i32(t2, t2, arg2);
661 tcg_gen_and_i32(t3, t3, arg1);
662 tcg_gen_sub_i32(rh, t1, t2);
663 tcg_gen_sub_i32(rh, rh, t3);
664 tcg_gen_mov_i32(rl, t0);
665 tcg_temp_free_i32(t0);
666 tcg_temp_free_i32(t1);
667 tcg_temp_free_i32(t2);
668 tcg_temp_free_i32(t3);
669 } else {
670 TCGv_i64 t0 = tcg_temp_new_i64();
671 TCGv_i64 t1 = tcg_temp_new_i64();
672 tcg_gen_ext_i32_i64(t0, arg1);
673 tcg_gen_ext_i32_i64(t1, arg2);
674 tcg_gen_mul_i64(t0, t0, t1);
675 tcg_gen_extr_i64_i32(rl, rh, t0);
676 tcg_temp_free_i64(t0);
677 tcg_temp_free_i64(t1);
678 }
679}
680
681void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
682{
683 if (TCG_TARGET_REG_BITS == 32) {
684 TCGv_i32 t0 = tcg_temp_new_i32();
685 TCGv_i32 t1 = tcg_temp_new_i32();
686 TCGv_i32 t2 = tcg_temp_new_i32();
687 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
688
689 tcg_gen_sari_i32(t2, arg1, 31);
690 tcg_gen_and_i32(t2, t2, arg2);
691 tcg_gen_sub_i32(rh, t1, t2);
692 tcg_gen_mov_i32(rl, t0);
693 tcg_temp_free_i32(t0);
694 tcg_temp_free_i32(t1);
695 tcg_temp_free_i32(t2);
696 } else {
697 TCGv_i64 t0 = tcg_temp_new_i64();
698 TCGv_i64 t1 = tcg_temp_new_i64();
699 tcg_gen_ext_i32_i64(t0, arg1);
700 tcg_gen_extu_i32_i64(t1, arg2);
701 tcg_gen_mul_i64(t0, t0, t1);
702 tcg_gen_extr_i64_i32(rl, rh, t0);
703 tcg_temp_free_i64(t0);
704 tcg_temp_free_i64(t1);
705 }
706}
707
708void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
709{
710 if (TCG_TARGET_HAS_ext8s_i32) {
711 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
712 } else {
713 tcg_gen_shli_i32(ret, arg, 24);
714 tcg_gen_sari_i32(ret, ret, 24);
715 }
716}
717
718void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
719{
720 if (TCG_TARGET_HAS_ext16s_i32) {
721 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
722 } else {
723 tcg_gen_shli_i32(ret, arg, 16);
724 tcg_gen_sari_i32(ret, ret, 16);
725 }
726}
727
728void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
729{
730 if (TCG_TARGET_HAS_ext8u_i32) {
731 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
732 } else {
733 tcg_gen_andi_i32(ret, arg, 0xffu);
734 }
735}
736
737void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
738{
739 if (TCG_TARGET_HAS_ext16u_i32) {
740 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
741 } else {
742 tcg_gen_andi_i32(ret, arg, 0xffffu);
743 }
744}
745
746
747void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
748{
749 if (TCG_TARGET_HAS_bswap16_i32) {
750 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
751 } else {
752 TCGv_i32 t0 = tcg_temp_new_i32();
753
754 tcg_gen_ext8u_i32(t0, arg);
755 tcg_gen_shli_i32(t0, t0, 8);
756 tcg_gen_shri_i32(ret, arg, 8);
757 tcg_gen_or_i32(ret, ret, t0);
758 tcg_temp_free_i32(t0);
759 }
760}
761
762void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
763{
764 if (TCG_TARGET_HAS_bswap32_i32) {
765 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
766 } else {
767 TCGv_i32 t0, t1;
768 t0 = tcg_temp_new_i32();
769 t1 = tcg_temp_new_i32();
770
771 tcg_gen_shli_i32(t0, arg, 24);
772
773 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
774 tcg_gen_shli_i32(t1, t1, 8);
775 tcg_gen_or_i32(t0, t0, t1);
776
777 tcg_gen_shri_i32(t1, arg, 8);
778 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
779 tcg_gen_or_i32(t0, t0, t1);
780
781 tcg_gen_shri_i32(t1, arg, 24);
782 tcg_gen_or_i32(ret, t0, t1);
783 tcg_temp_free_i32(t0);
784 tcg_temp_free_i32(t1);
785 }
786}
787
788
789
790#if TCG_TARGET_REG_BITS == 32
791
792
793void tcg_gen_discard_i64(TCGv_i64 arg)
794{
795 tcg_gen_discard_i32(TCGV_LOW(arg));
796 tcg_gen_discard_i32(TCGV_HIGH(arg));
797}
798
799void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
800{
801 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
802 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
803}
804
805void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
806{
807 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
808 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
809}
810
811void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
812{
813 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
814 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
815}
816
817void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
818{
819 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
820 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
821}
822
823void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
824{
825 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
826 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
827}
828
829void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
830{
831 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
832 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
833}
834
835void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
836{
837 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
838 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
839}
840
841void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
842{
843 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
844 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
845}
846
847void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
848{
849
850
851#ifdef HOST_WORDS_BIGENDIAN
852 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
853 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
854#else
855 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
856 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
857#endif
858}
859
860void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
861{
862#ifdef HOST_WORDS_BIGENDIAN
863 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
864 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
865#else
866 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
867 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
868#endif
869}
870
871void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
872{
873 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
874 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
875}
876
877void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
878{
879 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
880 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
881}
882
883void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
884{
885 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
886 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
887}
888
889void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
890{
891 gen_helper_shl_i64(ret, arg1, arg2);
892}
893
894void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
895{
896 gen_helper_shr_i64(ret, arg1, arg2);
897}
898
899void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
900{
901 gen_helper_sar_i64(ret, arg1, arg2);
902}
903
904void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
905{
906 TCGv_i64 t0;
907 TCGv_i32 t1;
908
909 t0 = tcg_temp_new_i64();
910 t1 = tcg_temp_new_i32();
911
912 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
913 TCGV_LOW(arg1), TCGV_LOW(arg2));
914
915 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
916 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
917 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
918 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
919
920 tcg_gen_mov_i64(ret, t0);
921 tcg_temp_free_i64(t0);
922 tcg_temp_free_i32(t1);
923}
924#endif
925
926void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
927{
928
929 if (arg2 == 0) {
930 tcg_gen_mov_i64(ret, arg1);
931 } else {
932 TCGv_i64 t0 = tcg_const_i64(arg2);
933 tcg_gen_add_i64(ret, arg1, t0);
934 tcg_temp_free_i64(t0);
935 }
936}
937
938void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
939{
940 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
941
942 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
943 } else {
944 TCGv_i64 t0 = tcg_const_i64(arg1);
945 tcg_gen_sub_i64(ret, t0, arg2);
946 tcg_temp_free_i64(t0);
947 }
948}
949
950void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
951{
952
953 if (arg2 == 0) {
954 tcg_gen_mov_i64(ret, arg1);
955 } else {
956 TCGv_i64 t0 = tcg_const_i64(arg2);
957 tcg_gen_sub_i64(ret, arg1, t0);
958 tcg_temp_free_i64(t0);
959 }
960}
961
962void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
963{
964 TCGv_i64 t0;
965
966 if (TCG_TARGET_REG_BITS == 32) {
967 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
968 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
969 return;
970 }
971
972
973 switch (arg2) {
974 case 0:
975 tcg_gen_movi_i64(ret, 0);
976 return;
977 case 0xffffffffffffffffull:
978 tcg_gen_mov_i64(ret, arg1);
979 return;
980 case 0xffull:
981
982 if (TCG_TARGET_HAS_ext8u_i64) {
983 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
984 return;
985 }
986 break;
987 case 0xffffu:
988 if (TCG_TARGET_HAS_ext16u_i64) {
989 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
990 return;
991 }
992 break;
993 case 0xffffffffull:
994 if (TCG_TARGET_HAS_ext32u_i64) {
995 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
996 return;
997 }
998 break;
999 }
1000 t0 = tcg_const_i64(arg2);
1001 tcg_gen_and_i64(ret, arg1, t0);
1002 tcg_temp_free_i64(t0);
1003}
1004
1005void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1006{
1007 if (TCG_TARGET_REG_BITS == 32) {
1008 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1009 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1010 return;
1011 }
1012
1013 if (arg2 == -1) {
1014 tcg_gen_movi_i64(ret, -1);
1015 } else if (arg2 == 0) {
1016 tcg_gen_mov_i64(ret, arg1);
1017 } else {
1018 TCGv_i64 t0 = tcg_const_i64(arg2);
1019 tcg_gen_or_i64(ret, arg1, t0);
1020 tcg_temp_free_i64(t0);
1021 }
1022}
1023
1024void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1025{
1026 if (TCG_TARGET_REG_BITS == 32) {
1027 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1028 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1029 return;
1030 }
1031
1032 if (arg2 == 0) {
1033 tcg_gen_mov_i64(ret, arg1);
1034 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1035
1036 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1037 } else {
1038 TCGv_i64 t0 = tcg_const_i64(arg2);
1039 tcg_gen_xor_i64(ret, arg1, t0);
1040 tcg_temp_free_i64(t0);
1041 }
1042}
1043
1044static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1045 unsigned c, bool right, bool arith)
1046{
1047 tcg_debug_assert(c < 64);
1048 if (c == 0) {
1049 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1050 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1051 } else if (c >= 32) {
1052 c -= 32;
1053 if (right) {
1054 if (arith) {
1055 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1056 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1057 } else {
1058 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1059 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1060 }
1061 } else {
1062 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1063 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1064 }
1065 } else {
1066 TCGv_i32 t0, t1;
1067
1068 t0 = tcg_temp_new_i32();
1069 t1 = tcg_temp_new_i32();
1070 if (right) {
1071 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1072 if (arith) {
1073 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1074 } else {
1075 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1076 }
1077 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1078 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1079 tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1080 } else {
1081 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1082
1083 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1084 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1085 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1086 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1087 }
1088 tcg_temp_free_i32(t0);
1089 tcg_temp_free_i32(t1);
1090 }
1091}
1092
1093void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1094{
1095 tcg_debug_assert(arg2 < 64);
1096 if (TCG_TARGET_REG_BITS == 32) {
1097 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1098 } else if (arg2 == 0) {
1099 tcg_gen_mov_i64(ret, arg1);
1100 } else {
1101 TCGv_i64 t0 = tcg_const_i64(arg2);
1102 tcg_gen_shl_i64(ret, arg1, t0);
1103 tcg_temp_free_i64(t0);
1104 }
1105}
1106
1107void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1108{
1109 tcg_debug_assert(arg2 < 64);
1110 if (TCG_TARGET_REG_BITS == 32) {
1111 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1112 } else if (arg2 == 0) {
1113 tcg_gen_mov_i64(ret, arg1);
1114 } else {
1115 TCGv_i64 t0 = tcg_const_i64(arg2);
1116 tcg_gen_shr_i64(ret, arg1, t0);
1117 tcg_temp_free_i64(t0);
1118 }
1119}
1120
1121void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1122{
1123 tcg_debug_assert(arg2 < 64);
1124 if (TCG_TARGET_REG_BITS == 32) {
1125 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1126 } else if (arg2 == 0) {
1127 tcg_gen_mov_i64(ret, arg1);
1128 } else {
1129 TCGv_i64 t0 = tcg_const_i64(arg2);
1130 tcg_gen_sar_i64(ret, arg1, t0);
1131 tcg_temp_free_i64(t0);
1132 }
1133}
1134
1135void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1136{
1137 if (cond == TCG_COND_ALWAYS) {
1138 tcg_gen_br(l);
1139 } else if (cond != TCG_COND_NEVER) {
1140 if (TCG_TARGET_REG_BITS == 32) {
1141 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1142 TCGV_HIGH(arg1), TCGV_LOW(arg2),
1143 TCGV_HIGH(arg2), cond, label_arg(l));
1144 } else {
1145 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1146 label_arg(l));
1147 }
1148 }
1149}
1150
1151void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1152{
1153 if (cond == TCG_COND_ALWAYS) {
1154 tcg_gen_br(l);
1155 } else if (cond != TCG_COND_NEVER) {
1156 TCGv_i64 t0 = tcg_const_i64(arg2);
1157 tcg_gen_brcond_i64(cond, arg1, t0, l);
1158 tcg_temp_free_i64(t0);
1159 }
1160}
1161
1162void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1163 TCGv_i64 arg1, TCGv_i64 arg2)
1164{
1165 if (cond == TCG_COND_ALWAYS) {
1166 tcg_gen_movi_i64(ret, 1);
1167 } else if (cond == TCG_COND_NEVER) {
1168 tcg_gen_movi_i64(ret, 0);
1169 } else {
1170 if (TCG_TARGET_REG_BITS == 32) {
1171 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1172 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1173 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1174 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1175 } else {
1176 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1177 }
1178 }
1179}
1180
1181void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1182 TCGv_i64 arg1, int64_t arg2)
1183{
1184 TCGv_i64 t0 = tcg_const_i64(arg2);
1185 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1186 tcg_temp_free_i64(t0);
1187}
1188
1189void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1190{
1191 TCGv_i64 t0 = tcg_const_i64(arg2);
1192 tcg_gen_mul_i64(ret, arg1, t0);
1193 tcg_temp_free_i64(t0);
1194}
1195
1196void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1197{
1198 if (TCG_TARGET_HAS_div_i64) {
1199 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1200 } else if (TCG_TARGET_HAS_div2_i64) {
1201 TCGv_i64 t0 = tcg_temp_new_i64();
1202 tcg_gen_sari_i64(t0, arg1, 63);
1203 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1204 tcg_temp_free_i64(t0);
1205 } else {
1206 gen_helper_div_i64(ret, arg1, arg2);
1207 }
1208}
1209
1210void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1211{
1212 if (TCG_TARGET_HAS_rem_i64) {
1213 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1214 } else if (TCG_TARGET_HAS_div_i64) {
1215 TCGv_i64 t0 = tcg_temp_new_i64();
1216 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1217 tcg_gen_mul_i64(t0, t0, arg2);
1218 tcg_gen_sub_i64(ret, arg1, t0);
1219 tcg_temp_free_i64(t0);
1220 } else if (TCG_TARGET_HAS_div2_i64) {
1221 TCGv_i64 t0 = tcg_temp_new_i64();
1222 tcg_gen_sari_i64(t0, arg1, 63);
1223 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1224 tcg_temp_free_i64(t0);
1225 } else {
1226 gen_helper_rem_i64(ret, arg1, arg2);
1227 }
1228}
1229
1230void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1231{
1232 if (TCG_TARGET_HAS_div_i64) {
1233 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1234 } else if (TCG_TARGET_HAS_div2_i64) {
1235 TCGv_i64 t0 = tcg_temp_new_i64();
1236 tcg_gen_movi_i64(t0, 0);
1237 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1238 tcg_temp_free_i64(t0);
1239 } else {
1240 gen_helper_divu_i64(ret, arg1, arg2);
1241 }
1242}
1243
1244void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1245{
1246 if (TCG_TARGET_HAS_rem_i64) {
1247 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1248 } else if (TCG_TARGET_HAS_div_i64) {
1249 TCGv_i64 t0 = tcg_temp_new_i64();
1250 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1251 tcg_gen_mul_i64(t0, t0, arg2);
1252 tcg_gen_sub_i64(ret, arg1, t0);
1253 tcg_temp_free_i64(t0);
1254 } else if (TCG_TARGET_HAS_div2_i64) {
1255 TCGv_i64 t0 = tcg_temp_new_i64();
1256 tcg_gen_movi_i64(t0, 0);
1257 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1258 tcg_temp_free_i64(t0);
1259 } else {
1260 gen_helper_remu_i64(ret, arg1, arg2);
1261 }
1262}
1263
1264void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1265{
1266 if (TCG_TARGET_REG_BITS == 32) {
1267 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1268 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1269 } else if (TCG_TARGET_HAS_ext8s_i64) {
1270 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1271 } else {
1272 tcg_gen_shli_i64(ret, arg, 56);
1273 tcg_gen_sari_i64(ret, ret, 56);
1274 }
1275}
1276
1277void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1278{
1279 if (TCG_TARGET_REG_BITS == 32) {
1280 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1281 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1282 } else if (TCG_TARGET_HAS_ext16s_i64) {
1283 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1284 } else {
1285 tcg_gen_shli_i64(ret, arg, 48);
1286 tcg_gen_sari_i64(ret, ret, 48);
1287 }
1288}
1289
1290void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1291{
1292 if (TCG_TARGET_REG_BITS == 32) {
1293 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1294 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1295 } else if (TCG_TARGET_HAS_ext32s_i64) {
1296 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1297 } else {
1298 tcg_gen_shli_i64(ret, arg, 32);
1299 tcg_gen_sari_i64(ret, ret, 32);
1300 }
1301}
1302
1303void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1304{
1305 if (TCG_TARGET_REG_BITS == 32) {
1306 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1307 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1308 } else if (TCG_TARGET_HAS_ext8u_i64) {
1309 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1310 } else {
1311 tcg_gen_andi_i64(ret, arg, 0xffu);
1312 }
1313}
1314
1315void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1316{
1317 if (TCG_TARGET_REG_BITS == 32) {
1318 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1319 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1320 } else if (TCG_TARGET_HAS_ext16u_i64) {
1321 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1322 } else {
1323 tcg_gen_andi_i64(ret, arg, 0xffffu);
1324 }
1325}
1326
1327void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1328{
1329 if (TCG_TARGET_REG_BITS == 32) {
1330 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1331 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1332 } else if (TCG_TARGET_HAS_ext32u_i64) {
1333 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1334 } else {
1335 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1336 }
1337}
1338
1339
1340void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1341{
1342 if (TCG_TARGET_REG_BITS == 32) {
1343 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1344 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1345 } else if (TCG_TARGET_HAS_bswap16_i64) {
1346 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1347 } else {
1348 TCGv_i64 t0 = tcg_temp_new_i64();
1349
1350 tcg_gen_ext8u_i64(t0, arg);
1351 tcg_gen_shli_i64(t0, t0, 8);
1352 tcg_gen_shri_i64(ret, arg, 8);
1353 tcg_gen_or_i64(ret, ret, t0);
1354 tcg_temp_free_i64(t0);
1355 }
1356}
1357
1358
1359void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1360{
1361 if (TCG_TARGET_REG_BITS == 32) {
1362 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1363 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1364 } else if (TCG_TARGET_HAS_bswap32_i64) {
1365 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1366 } else {
1367 TCGv_i64 t0, t1;
1368 t0 = tcg_temp_new_i64();
1369 t1 = tcg_temp_new_i64();
1370
1371 tcg_gen_shli_i64(t0, arg, 24);
1372 tcg_gen_ext32u_i64(t0, t0);
1373
1374 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1375 tcg_gen_shli_i64(t1, t1, 8);
1376 tcg_gen_or_i64(t0, t0, t1);
1377
1378 tcg_gen_shri_i64(t1, arg, 8);
1379 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1380 tcg_gen_or_i64(t0, t0, t1);
1381
1382 tcg_gen_shri_i64(t1, arg, 24);
1383 tcg_gen_or_i64(ret, t0, t1);
1384 tcg_temp_free_i64(t0);
1385 tcg_temp_free_i64(t1);
1386 }
1387}
1388
1389void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1390{
1391 if (TCG_TARGET_REG_BITS == 32) {
1392 TCGv_i32 t0, t1;
1393 t0 = tcg_temp_new_i32();
1394 t1 = tcg_temp_new_i32();
1395
1396 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1397 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1398 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1399 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1400 tcg_temp_free_i32(t0);
1401 tcg_temp_free_i32(t1);
1402 } else if (TCG_TARGET_HAS_bswap64_i64) {
1403 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1404 } else {
1405 TCGv_i64 t0 = tcg_temp_new_i64();
1406 TCGv_i64 t1 = tcg_temp_new_i64();
1407
1408 tcg_gen_shli_i64(t0, arg, 56);
1409
1410 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1411 tcg_gen_shli_i64(t1, t1, 40);
1412 tcg_gen_or_i64(t0, t0, t1);
1413
1414 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1415 tcg_gen_shli_i64(t1, t1, 24);
1416 tcg_gen_or_i64(t0, t0, t1);
1417
1418 tcg_gen_andi_i64(t1, arg, 0xff000000);
1419 tcg_gen_shli_i64(t1, t1, 8);
1420 tcg_gen_or_i64(t0, t0, t1);
1421
1422 tcg_gen_shri_i64(t1, arg, 8);
1423 tcg_gen_andi_i64(t1, t1, 0xff000000);
1424 tcg_gen_or_i64(t0, t0, t1);
1425
1426 tcg_gen_shri_i64(t1, arg, 24);
1427 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1428 tcg_gen_or_i64(t0, t0, t1);
1429
1430 tcg_gen_shri_i64(t1, arg, 40);
1431 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1432 tcg_gen_or_i64(t0, t0, t1);
1433
1434 tcg_gen_shri_i64(t1, arg, 56);
1435 tcg_gen_or_i64(ret, t0, t1);
1436 tcg_temp_free_i64(t0);
1437 tcg_temp_free_i64(t1);
1438 }
1439}
1440
1441void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1442{
1443 if (TCG_TARGET_REG_BITS == 32) {
1444 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1445 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1446 } else if (TCG_TARGET_HAS_not_i64) {
1447 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1448 } else {
1449 tcg_gen_xori_i64(ret, arg, -1);
1450 }
1451}
1452
1453void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1454{
1455 if (TCG_TARGET_REG_BITS == 32) {
1456 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1457 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1458 } else if (TCG_TARGET_HAS_andc_i64) {
1459 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1460 } else {
1461 TCGv_i64 t0 = tcg_temp_new_i64();
1462 tcg_gen_not_i64(t0, arg2);
1463 tcg_gen_and_i64(ret, arg1, t0);
1464 tcg_temp_free_i64(t0);
1465 }
1466}
1467
1468void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1469{
1470 if (TCG_TARGET_REG_BITS == 32) {
1471 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1472 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1473 } else if (TCG_TARGET_HAS_eqv_i64) {
1474 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1475 } else {
1476 tcg_gen_xor_i64(ret, arg1, arg2);
1477 tcg_gen_not_i64(ret, ret);
1478 }
1479}
1480
1481void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1482{
1483 if (TCG_TARGET_REG_BITS == 32) {
1484 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1485 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1486 } else if (TCG_TARGET_HAS_nand_i64) {
1487 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1488 } else {
1489 tcg_gen_and_i64(ret, arg1, arg2);
1490 tcg_gen_not_i64(ret, ret);
1491 }
1492}
1493
1494void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1495{
1496 if (TCG_TARGET_REG_BITS == 32) {
1497 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1498 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1499 } else if (TCG_TARGET_HAS_nor_i64) {
1500 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1501 } else {
1502 tcg_gen_or_i64(ret, arg1, arg2);
1503 tcg_gen_not_i64(ret, ret);
1504 }
1505}
1506
1507void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1508{
1509 if (TCG_TARGET_REG_BITS == 32) {
1510 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1511 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1512 } else if (TCG_TARGET_HAS_orc_i64) {
1513 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1514 } else {
1515 TCGv_i64 t0 = tcg_temp_new_i64();
1516 tcg_gen_not_i64(t0, arg2);
1517 tcg_gen_or_i64(ret, arg1, t0);
1518 tcg_temp_free_i64(t0);
1519 }
1520}
1521
1522void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1523{
1524 if (TCG_TARGET_HAS_rot_i64) {
1525 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1526 } else {
1527 TCGv_i64 t0, t1;
1528 t0 = tcg_temp_new_i64();
1529 t1 = tcg_temp_new_i64();
1530 tcg_gen_shl_i64(t0, arg1, arg2);
1531 tcg_gen_subfi_i64(t1, 64, arg2);
1532 tcg_gen_shr_i64(t1, arg1, t1);
1533 tcg_gen_or_i64(ret, t0, t1);
1534 tcg_temp_free_i64(t0);
1535 tcg_temp_free_i64(t1);
1536 }
1537}
1538
1539void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1540{
1541 tcg_debug_assert(arg2 < 64);
1542
1543 if (arg2 == 0) {
1544 tcg_gen_mov_i64(ret, arg1);
1545 } else if (TCG_TARGET_HAS_rot_i64) {
1546 TCGv_i64 t0 = tcg_const_i64(arg2);
1547 tcg_gen_rotl_i64(ret, arg1, t0);
1548 tcg_temp_free_i64(t0);
1549 } else {
1550 TCGv_i64 t0, t1;
1551 t0 = tcg_temp_new_i64();
1552 t1 = tcg_temp_new_i64();
1553 tcg_gen_shli_i64(t0, arg1, arg2);
1554 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1555 tcg_gen_or_i64(ret, t0, t1);
1556 tcg_temp_free_i64(t0);
1557 tcg_temp_free_i64(t1);
1558 }
1559}
1560
1561void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1562{
1563 if (TCG_TARGET_HAS_rot_i64) {
1564 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1565 } else {
1566 TCGv_i64 t0, t1;
1567 t0 = tcg_temp_new_i64();
1568 t1 = tcg_temp_new_i64();
1569 tcg_gen_shr_i64(t0, arg1, arg2);
1570 tcg_gen_subfi_i64(t1, 64, arg2);
1571 tcg_gen_shl_i64(t1, arg1, t1);
1572 tcg_gen_or_i64(ret, t0, t1);
1573 tcg_temp_free_i64(t0);
1574 tcg_temp_free_i64(t1);
1575 }
1576}
1577
1578void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1579{
1580 tcg_debug_assert(arg2 < 64);
1581
1582 if (arg2 == 0) {
1583 tcg_gen_mov_i64(ret, arg1);
1584 } else {
1585 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1586 }
1587}
1588
1589void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1590 unsigned int ofs, unsigned int len)
1591{
1592 uint64_t mask;
1593 TCGv_i64 t1;
1594
1595 tcg_debug_assert(ofs < 64);
1596 tcg_debug_assert(len <= 64);
1597 tcg_debug_assert(ofs + len <= 64);
1598
1599 if (ofs == 0 && len == 64) {
1600 tcg_gen_mov_i64(ret, arg2);
1601 return;
1602 }
1603 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1604 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1605 return;
1606 }
1607
1608 if (TCG_TARGET_REG_BITS == 32) {
1609 if (ofs >= 32) {
1610 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1611 TCGV_LOW(arg2), ofs - 32, len);
1612 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1613 return;
1614 }
1615 if (ofs + len <= 32) {
1616 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1617 TCGV_LOW(arg2), ofs, len);
1618 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1619 return;
1620 }
1621 }
1622
1623 mask = (1ull << len) - 1;
1624 t1 = tcg_temp_new_i64();
1625
1626 if (ofs + len < 64) {
1627 tcg_gen_andi_i64(t1, arg2, mask);
1628 tcg_gen_shli_i64(t1, t1, ofs);
1629 } else {
1630 tcg_gen_shli_i64(t1, arg2, ofs);
1631 }
1632 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1633 tcg_gen_or_i64(ret, ret, t1);
1634
1635 tcg_temp_free_i64(t1);
1636}
1637
1638void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1639 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1640{
1641 if (cond == TCG_COND_ALWAYS) {
1642 tcg_gen_mov_i64(ret, v1);
1643 } else if (cond == TCG_COND_NEVER) {
1644 tcg_gen_mov_i64(ret, v2);
1645 } else if (TCG_TARGET_REG_BITS == 32) {
1646 TCGv_i32 t0 = tcg_temp_new_i32();
1647 TCGv_i32 t1 = tcg_temp_new_i32();
1648 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1649 TCGV_LOW(c1), TCGV_HIGH(c1),
1650 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1651
1652 if (TCG_TARGET_HAS_movcond_i32) {
1653 tcg_gen_movi_i32(t1, 0);
1654 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1655 TCGV_LOW(v1), TCGV_LOW(v2));
1656 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1657 TCGV_HIGH(v1), TCGV_HIGH(v2));
1658 } else {
1659 tcg_gen_neg_i32(t0, t0);
1660
1661 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1662 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1663 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1664
1665 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1666 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1667 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1668 }
1669 tcg_temp_free_i32(t0);
1670 tcg_temp_free_i32(t1);
1671 } else if (TCG_TARGET_HAS_movcond_i64) {
1672 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1673 } else {
1674 TCGv_i64 t0 = tcg_temp_new_i64();
1675 TCGv_i64 t1 = tcg_temp_new_i64();
1676 tcg_gen_setcond_i64(cond, t0, c1, c2);
1677 tcg_gen_neg_i64(t0, t0);
1678 tcg_gen_and_i64(t1, v1, t0);
1679 tcg_gen_andc_i64(ret, v2, t0);
1680 tcg_gen_or_i64(ret, ret, t1);
1681 tcg_temp_free_i64(t0);
1682 tcg_temp_free_i64(t1);
1683 }
1684}
1685
1686void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1687 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1688{
1689 if (TCG_TARGET_HAS_add2_i64) {
1690 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1691 } else {
1692 TCGv_i64 t0 = tcg_temp_new_i64();
1693 TCGv_i64 t1 = tcg_temp_new_i64();
1694 tcg_gen_add_i64(t0, al, bl);
1695 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1696 tcg_gen_add_i64(rh, ah, bh);
1697 tcg_gen_add_i64(rh, rh, t1);
1698 tcg_gen_mov_i64(rl, t0);
1699 tcg_temp_free_i64(t0);
1700 tcg_temp_free_i64(t1);
1701 }
1702}
1703
1704void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1705 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1706{
1707 if (TCG_TARGET_HAS_sub2_i64) {
1708 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1709 } else {
1710 TCGv_i64 t0 = tcg_temp_new_i64();
1711 TCGv_i64 t1 = tcg_temp_new_i64();
1712 tcg_gen_sub_i64(t0, al, bl);
1713 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1714 tcg_gen_sub_i64(rh, ah, bh);
1715 tcg_gen_sub_i64(rh, rh, t1);
1716 tcg_gen_mov_i64(rl, t0);
1717 tcg_temp_free_i64(t0);
1718 tcg_temp_free_i64(t1);
1719 }
1720}
1721
1722void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1723{
1724 if (TCG_TARGET_HAS_mulu2_i64) {
1725 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1726 } else if (TCG_TARGET_HAS_muluh_i64) {
1727 TCGv_i64 t = tcg_temp_new_i64();
1728 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1729 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1730 tcg_gen_mov_i64(rl, t);
1731 tcg_temp_free_i64(t);
1732 } else {
1733 TCGv_i64 t0 = tcg_temp_new_i64();
1734 tcg_gen_mul_i64(t0, arg1, arg2);
1735 gen_helper_muluh_i64(rh, arg1, arg2);
1736 tcg_gen_mov_i64(rl, t0);
1737 tcg_temp_free_i64(t0);
1738 }
1739}
1740
1741void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1742{
1743 if (TCG_TARGET_HAS_muls2_i64) {
1744 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1745 } else if (TCG_TARGET_HAS_mulsh_i64) {
1746 TCGv_i64 t = tcg_temp_new_i64();
1747 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1748 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1749 tcg_gen_mov_i64(rl, t);
1750 tcg_temp_free_i64(t);
1751 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1752 TCGv_i64 t0 = tcg_temp_new_i64();
1753 TCGv_i64 t1 = tcg_temp_new_i64();
1754 TCGv_i64 t2 = tcg_temp_new_i64();
1755 TCGv_i64 t3 = tcg_temp_new_i64();
1756 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1757
1758 tcg_gen_sari_i64(t2, arg1, 63);
1759 tcg_gen_sari_i64(t3, arg2, 63);
1760 tcg_gen_and_i64(t2, t2, arg2);
1761 tcg_gen_and_i64(t3, t3, arg1);
1762 tcg_gen_sub_i64(rh, t1, t2);
1763 tcg_gen_sub_i64(rh, rh, t3);
1764 tcg_gen_mov_i64(rl, t0);
1765 tcg_temp_free_i64(t0);
1766 tcg_temp_free_i64(t1);
1767 tcg_temp_free_i64(t2);
1768 tcg_temp_free_i64(t3);
1769 } else {
1770 TCGv_i64 t0 = tcg_temp_new_i64();
1771 tcg_gen_mul_i64(t0, arg1, arg2);
1772 gen_helper_mulsh_i64(rh, arg1, arg2);
1773 tcg_gen_mov_i64(rl, t0);
1774 tcg_temp_free_i64(t0);
1775 }
1776}
1777
1778void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1779{
1780 TCGv_i64 t0 = tcg_temp_new_i64();
1781 TCGv_i64 t1 = tcg_temp_new_i64();
1782 TCGv_i64 t2 = tcg_temp_new_i64();
1783 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1784
1785 tcg_gen_sari_i64(t2, arg1, 63);
1786 tcg_gen_and_i64(t2, t2, arg2);
1787 tcg_gen_sub_i64(rh, t1, t2);
1788 tcg_gen_mov_i64(rl, t0);
1789 tcg_temp_free_i64(t0);
1790 tcg_temp_free_i64(t1);
1791 tcg_temp_free_i64(t2);
1792}
1793
1794
1795
1796void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1797{
1798 if (TCG_TARGET_REG_BITS == 32) {
1799 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1800 } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1801 tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1802 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1803 } else {
1804 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1805 }
1806}
1807
1808void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1809{
1810 if (TCG_TARGET_REG_BITS == 32) {
1811 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1812 } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1813 tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1814 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1815 } else {
1816 TCGv_i64 t = tcg_temp_new_i64();
1817 tcg_gen_shri_i64(t, arg, 32);
1818 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1819 tcg_temp_free_i64(t);
1820 }
1821}
1822
1823void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1824{
1825 if (TCG_TARGET_REG_BITS == 32) {
1826 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1827 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1828 } else {
1829 tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1830 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1831 }
1832}
1833
1834void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1835{
1836 if (TCG_TARGET_REG_BITS == 32) {
1837 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1838 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1839 } else {
1840 tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1841 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1842 }
1843}
1844
1845void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1846{
1847 TCGv_i64 tmp;
1848
1849 if (TCG_TARGET_REG_BITS == 32) {
1850 tcg_gen_mov_i32(TCGV_LOW(dest), low);
1851 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1852 return;
1853 }
1854
1855 tmp = tcg_temp_new_i64();
1856
1857
1858 tcg_gen_extu_i32_i64(tmp, high);
1859 tcg_gen_extu_i32_i64(dest, low);
1860
1861
1862 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1863 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1864 } else {
1865 tcg_gen_shli_i64(tmp, tmp, 32);
1866 tcg_gen_or_i64(dest, dest, tmp);
1867 }
1868 tcg_temp_free_i64(tmp);
1869}
1870
1871void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1872{
1873 if (TCG_TARGET_REG_BITS == 32) {
1874 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1875 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1876 } else {
1877 tcg_gen_extrl_i64_i32(lo, arg);
1878 tcg_gen_extrh_i64_i32(hi, arg);
1879 }
1880}
1881
1882void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1883{
1884 tcg_gen_ext32u_i64(lo, arg);
1885 tcg_gen_shri_i64(hi, arg, 32);
1886}
1887
1888
1889
1890void tcg_gen_goto_tb(unsigned idx)
1891{
1892
1893 tcg_debug_assert(idx <= 1);
1894#ifdef CONFIG_DEBUG_TCG
1895
1896 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1897 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1898#endif
1899 tcg_gen_op1i(INDEX_op_goto_tb, idx);
1900}
1901
1902static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1903{
1904
1905 (void)get_alignment_bits(op);
1906
1907 switch (op & MO_SIZE) {
1908 case MO_8:
1909 op &= ~MO_BSWAP;
1910 break;
1911 case MO_16:
1912 break;
1913 case MO_32:
1914 if (!is64) {
1915 op &= ~MO_SIGN;
1916 }
1917 break;
1918 case MO_64:
1919 if (!is64) {
1920 tcg_abort();
1921 }
1922 break;
1923 }
1924 if (st) {
1925 op &= ~MO_SIGN;
1926 }
1927 return op;
1928}
1929
1930static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1931 TCGMemOp memop, TCGArg idx)
1932{
1933 TCGMemOpIdx oi = make_memop_idx(memop, idx);
1934#if TARGET_LONG_BITS == 32
1935 tcg_gen_op3i_i32(opc, val, addr, oi);
1936#else
1937 if (TCG_TARGET_REG_BITS == 32) {
1938 tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1939 } else {
1940 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1941 }
1942#endif
1943}
1944
1945static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1946 TCGMemOp memop, TCGArg idx)
1947{
1948 TCGMemOpIdx oi = make_memop_idx(memop, idx);
1949#if TARGET_LONG_BITS == 32
1950 if (TCG_TARGET_REG_BITS == 32) {
1951 tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1952 } else {
1953 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1954 }
1955#else
1956 if (TCG_TARGET_REG_BITS == 32) {
1957 tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1958 TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1959 } else {
1960 tcg_gen_op3i_i64(opc, val, addr, oi);
1961 }
1962#endif
1963}
1964
1965void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1966{
1967 memop = tcg_canonicalize_memop(memop, 0, 0);
1968 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1969 addr, trace_mem_get_info(memop, 0));
1970 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1971}
1972
1973void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1974{
1975 memop = tcg_canonicalize_memop(memop, 0, 1);
1976 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1977 addr, trace_mem_get_info(memop, 1));
1978 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1979}
1980
1981void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1982{
1983 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1984 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1985 if (memop & MO_SIGN) {
1986 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1987 } else {
1988 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1989 }
1990 return;
1991 }
1992
1993 memop = tcg_canonicalize_memop(memop, 1, 0);
1994 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1995 addr, trace_mem_get_info(memop, 0));
1996 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1997}
1998
1999void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
2000{
2001 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2002 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2003 return;
2004 }
2005
2006 memop = tcg_canonicalize_memop(memop, 1, 1);
2007 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
2008 addr, trace_mem_get_info(memop, 1));
2009 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
2010}
2011
2012static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, TCGMemOp opc)
2013{
2014 switch (opc & MO_SSIZE) {
2015 case MO_SB:
2016 tcg_gen_ext8s_i32(ret, val);
2017 break;
2018 case MO_UB:
2019 tcg_gen_ext8u_i32(ret, val);
2020 break;
2021 case MO_SW:
2022 tcg_gen_ext16s_i32(ret, val);
2023 break;
2024 case MO_UW:
2025 tcg_gen_ext16u_i32(ret, val);
2026 break;
2027 default:
2028 tcg_gen_mov_i32(ret, val);
2029 break;
2030 }
2031}
2032
2033static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, TCGMemOp opc)
2034{
2035 switch (opc & MO_SSIZE) {
2036 case MO_SB:
2037 tcg_gen_ext8s_i64(ret, val);
2038 break;
2039 case MO_UB:
2040 tcg_gen_ext8u_i64(ret, val);
2041 break;
2042 case MO_SW:
2043 tcg_gen_ext16s_i64(ret, val);
2044 break;
2045 case MO_UW:
2046 tcg_gen_ext16u_i64(ret, val);
2047 break;
2048 case MO_SL:
2049 tcg_gen_ext32s_i64(ret, val);
2050 break;
2051 case MO_UL:
2052 tcg_gen_ext32u_i64(ret, val);
2053 break;
2054 default:
2055 tcg_gen_mov_i64(ret, val);
2056 break;
2057 }
2058}
2059
2060#ifdef CONFIG_SOFTMMU
2061typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
2062 TCGv_i32, TCGv_i32, TCGv_i32);
2063typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
2064 TCGv_i64, TCGv_i64, TCGv_i32);
2065typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
2066 TCGv_i32, TCGv_i32);
2067typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
2068 TCGv_i64, TCGv_i32);
2069#else
2070typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32, TCGv_i32);
2071typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64, TCGv_i64);
2072typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32);
2073typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64);
2074#endif
2075
2076#ifdef CONFIG_ATOMIC64
2077# define WITH_ATOMIC64(X) X,
2078#else
2079# define WITH_ATOMIC64(X)
2080#endif
2081
2082static void * const table_cmpxchg[16] = {
2083 [MO_8] = gen_helper_atomic_cmpxchgb,
2084 [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
2085 [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
2086 [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
2087 [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
2088 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
2089 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
2090};
2091
2092void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
2093 TCGv_i32 newv, TCGArg idx, TCGMemOp memop)
2094{
2095 memop = tcg_canonicalize_memop(memop, 0, 0);
2096
2097 if (!parallel_cpus) {
2098 TCGv_i32 t1 = tcg_temp_new_i32();
2099 TCGv_i32 t2 = tcg_temp_new_i32();
2100
2101 tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
2102
2103 tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2104 tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
2105 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2106 tcg_temp_free_i32(t2);
2107
2108 if (memop & MO_SIGN) {
2109 tcg_gen_ext_i32(retv, t1, memop);
2110 } else {
2111 tcg_gen_mov_i32(retv, t1);
2112 }
2113 tcg_temp_free_i32(t1);
2114 } else {
2115 gen_atomic_cx_i32 gen;
2116
2117 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2118 tcg_debug_assert(gen != NULL);
2119
2120#ifdef CONFIG_SOFTMMU
2121 {
2122 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2123 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2124 tcg_temp_free_i32(oi);
2125 }
2126#else
2127 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2128#endif
2129
2130 if (memop & MO_SIGN) {
2131 tcg_gen_ext_i32(retv, retv, memop);
2132 }
2133 }
2134}
2135
2136void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
2137 TCGv_i64 newv, TCGArg idx, TCGMemOp memop)
2138{
2139 memop = tcg_canonicalize_memop(memop, 1, 0);
2140
2141 if (!parallel_cpus) {
2142 TCGv_i64 t1 = tcg_temp_new_i64();
2143 TCGv_i64 t2 = tcg_temp_new_i64();
2144
2145 tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
2146
2147 tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2148 tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
2149 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2150 tcg_temp_free_i64(t2);
2151
2152 if (memop & MO_SIGN) {
2153 tcg_gen_ext_i64(retv, t1, memop);
2154 } else {
2155 tcg_gen_mov_i64(retv, t1);
2156 }
2157 tcg_temp_free_i64(t1);
2158 } else if ((memop & MO_SIZE) == MO_64) {
2159#ifdef CONFIG_ATOMIC64
2160 gen_atomic_cx_i64 gen;
2161
2162 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2163 tcg_debug_assert(gen != NULL);
2164
2165#ifdef CONFIG_SOFTMMU
2166 {
2167 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop, idx));
2168 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2169 tcg_temp_free_i32(oi);
2170 }
2171#else
2172 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2173#endif
2174#else
2175 gen_helper_exit_atomic(tcg_ctx.tcg_env);
2176#endif
2177 } else {
2178 TCGv_i32 c32 = tcg_temp_new_i32();
2179 TCGv_i32 n32 = tcg_temp_new_i32();
2180 TCGv_i32 r32 = tcg_temp_new_i32();
2181
2182 tcg_gen_extrl_i64_i32(c32, cmpv);
2183 tcg_gen_extrl_i64_i32(n32, newv);
2184 tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
2185 tcg_temp_free_i32(c32);
2186 tcg_temp_free_i32(n32);
2187
2188 tcg_gen_extu_i32_i64(retv, r32);
2189 tcg_temp_free_i32(r32);
2190
2191 if (memop & MO_SIGN) {
2192 tcg_gen_ext_i64(retv, retv, memop);
2193 }
2194 }
2195}
2196
2197static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2198 TCGArg idx, TCGMemOp memop, bool new_val,
2199 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
2200{
2201 TCGv_i32 t1 = tcg_temp_new_i32();
2202 TCGv_i32 t2 = tcg_temp_new_i32();
2203
2204 memop = tcg_canonicalize_memop(memop, 0, 0);
2205
2206 tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2207 gen(t2, t1, val);
2208 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2209
2210 tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
2211 tcg_temp_free_i32(t1);
2212 tcg_temp_free_i32(t2);
2213}
2214
2215static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2216 TCGArg idx, TCGMemOp memop, void * const table[])
2217{
2218 gen_atomic_op_i32 gen;
2219
2220 memop = tcg_canonicalize_memop(memop, 0, 0);
2221
2222 gen = table[memop & (MO_SIZE | MO_BSWAP)];
2223 tcg_debug_assert(gen != NULL);
2224
2225#ifdef CONFIG_SOFTMMU
2226 {
2227 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2228 gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2229 tcg_temp_free_i32(oi);
2230 }
2231#else
2232 gen(ret, tcg_ctx.tcg_env, addr, val);
2233#endif
2234
2235 if (memop & MO_SIGN) {
2236 tcg_gen_ext_i32(ret, ret, memop);
2237 }
2238}
2239
2240static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2241 TCGArg idx, TCGMemOp memop, bool new_val,
2242 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
2243{
2244 TCGv_i64 t1 = tcg_temp_new_i64();
2245 TCGv_i64 t2 = tcg_temp_new_i64();
2246
2247 memop = tcg_canonicalize_memop(memop, 1, 0);
2248
2249 tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2250 gen(t2, t1, val);
2251 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2252
2253 tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
2254 tcg_temp_free_i64(t1);
2255 tcg_temp_free_i64(t2);
2256}
2257
2258static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2259 TCGArg idx, TCGMemOp memop, void * const table[])
2260{
2261 memop = tcg_canonicalize_memop(memop, 1, 0);
2262
2263 if ((memop & MO_SIZE) == MO_64) {
2264#ifdef CONFIG_ATOMIC64
2265 gen_atomic_op_i64 gen;
2266
2267 gen = table[memop & (MO_SIZE | MO_BSWAP)];
2268 tcg_debug_assert(gen != NULL);
2269
2270#ifdef CONFIG_SOFTMMU
2271 {
2272 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2273 gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2274 tcg_temp_free_i32(oi);
2275 }
2276#else
2277 gen(ret, tcg_ctx.tcg_env, addr, val);
2278#endif
2279#else
2280 gen_helper_exit_atomic(tcg_ctx.tcg_env);
2281#endif
2282 } else {
2283 TCGv_i32 v32 = tcg_temp_new_i32();
2284 TCGv_i32 r32 = tcg_temp_new_i32();
2285
2286 tcg_gen_extrl_i64_i32(v32, val);
2287 do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
2288 tcg_temp_free_i32(v32);
2289
2290 tcg_gen_extu_i32_i64(ret, r32);
2291 tcg_temp_free_i32(r32);
2292
2293 if (memop & MO_SIGN) {
2294 tcg_gen_ext_i64(ret, ret, memop);
2295 }
2296 }
2297}
2298
2299#define GEN_ATOMIC_HELPER(NAME, OP, NEW) \
2300static void * const table_##NAME[16] = { \
2301 [MO_8] = gen_helper_atomic_##NAME##b, \
2302 [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le, \
2303 [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be, \
2304 [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le, \
2305 [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be, \
2306 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le) \
2307 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be) \
2308}; \
2309void tcg_gen_atomic_##NAME##_i32 \
2310 (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, TCGMemOp memop) \
2311{ \
2312 if (parallel_cpus) { \
2313 do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME); \
2314 } else { \
2315 do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW, \
2316 tcg_gen_##OP##_i32); \
2317 } \
2318} \
2319void tcg_gen_atomic_##NAME##_i64 \
2320 (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, TCGMemOp memop) \
2321{ \
2322 if (parallel_cpus) { \
2323 do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME); \
2324 } else { \
2325 do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW, \
2326 tcg_gen_##OP##_i64); \
2327 } \
2328}
2329
2330GEN_ATOMIC_HELPER(fetch_add, add, 0)
2331GEN_ATOMIC_HELPER(fetch_and, and, 0)
2332GEN_ATOMIC_HELPER(fetch_or, or, 0)
2333GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
2334
2335GEN_ATOMIC_HELPER(add_fetch, add, 1)
2336GEN_ATOMIC_HELPER(and_fetch, and, 1)
2337GEN_ATOMIC_HELPER(or_fetch, or, 1)
2338GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
2339
2340static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
2341{
2342 tcg_gen_mov_i32(r, b);
2343}
2344
2345static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
2346{
2347 tcg_gen_mov_i64(r, b);
2348}
2349
2350GEN_ATOMIC_HELPER(xchg, mov2, 0)
2351
2352#undef GEN_ATOMIC_HELPER
2353