1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include "qemu/osdep.h"
22
23#include "cpu.h"
24#include "disas/disas.h"
25#include "exec/helper-proto.h"
26#include "exec/exec-all.h"
27#include "tcg-op.h"
28#include "exec/cpu_ldst.h"
29
30#include "exec/helper-gen.h"
31
32#include "trace-tcg.h"
33#include "exec/log.h"
34#include "asi.h"
35
36
37#define DEBUG_DISAS
38
39#define DYNAMIC_PC 1
40#define JUMP_PC 2
41
42
43
44static TCGv_env cpu_env;
45static TCGv_ptr cpu_regwptr;
46static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47static TCGv_i32 cpu_cc_op;
48static TCGv_i32 cpu_psr;
49static TCGv cpu_fsr, cpu_pc, cpu_npc;
50static TCGv cpu_regs[32];
51static TCGv cpu_y;
52#ifndef CONFIG_USER_ONLY
53static TCGv cpu_tbr;
54#endif
55static TCGv cpu_cond;
56#ifdef TARGET_SPARC64
57static TCGv_i32 cpu_xcc, cpu_fprs;
58static TCGv cpu_gsr;
59static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
60static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
61#else
62static TCGv cpu_wim;
63#endif
64
65static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66
67#include "exec/gen-icount.h"
68
69typedef struct DisasContext {
70 target_ulong pc;
71 target_ulong npc;
72 target_ulong jump_pc[2];
73 int is_br;
74 int mem_idx;
75 int fpu_enabled;
76 int address_mask_32bit;
77 int singlestep;
78 uint32_t cc_op;
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 TCGv_i32 t32[3];
82 TCGv ttl[5];
83 int n_t32;
84 int n_ttl;
85#ifdef TARGET_SPARC64
86 int fprs_dirty;
87 int asi;
88#endif
89} DisasContext;
90
91typedef struct {
92 TCGCond cond;
93 bool is_bool;
94 bool g1, g2;
95 TCGv c1, c2;
96} DisasCompare;
97
98
99#define GET_FIELD(X, FROM, TO) \
100 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
101
102
103#define GET_FIELD_SP(X, FROM, TO) \
104 GET_FIELD(X, 31 - (TO), 31 - (FROM))
105
106#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
107#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
108
109#ifdef TARGET_SPARC64
110#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
111#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
112#else
113#define DFPREG(r) (r & 0x1e)
114#define QFPREG(r) (r & 0x1c)
115#endif
116
117#define UA2005_HTRAP_MASK 0xff
118#define V8_TRAP_MASK 0x7f
119
120static int sign_extend(int x, int len)
121{
122 len = 32 - len;
123 return (x << len) >> len;
124}
125
126#define IS_IMM (insn & (1<<13))
127
128static inline TCGv_i32 get_temp_i32(DisasContext *dc)
129{
130 TCGv_i32 t;
131 assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
132 dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
133 return t;
134}
135
136static inline TCGv get_temp_tl(DisasContext *dc)
137{
138 TCGv t;
139 assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
140 dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
141 return t;
142}
143
144static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
145{
146#if defined(TARGET_SPARC64)
147 int bit = (rd < 32) ? 1 : 2;
148
149
150 if (!(dc->fprs_dirty & bit)) {
151 dc->fprs_dirty |= bit;
152 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
153 }
154#endif
155}
156
157
158static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
159{
160#if TCG_TARGET_REG_BITS == 32
161 if (src & 1) {
162 return TCGV_LOW(cpu_fpr[src / 2]);
163 } else {
164 return TCGV_HIGH(cpu_fpr[src / 2]);
165 }
166#else
167 if (src & 1) {
168 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
169 } else {
170 TCGv_i32 ret = get_temp_i32(dc);
171 TCGv_i64 t = tcg_temp_new_i64();
172
173 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
174 tcg_gen_extrl_i64_i32(ret, t);
175 tcg_temp_free_i64(t);
176
177 return ret;
178 }
179#endif
180}
181
182static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
183{
184#if TCG_TARGET_REG_BITS == 32
185 if (dst & 1) {
186 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
187 } else {
188 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
189 }
190#else
191 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
192 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
193 (dst & 1 ? 0 : 32), 32);
194#endif
195 gen_update_fprs_dirty(dc, dst);
196}
197
198static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
199{
200 return get_temp_i32(dc);
201}
202
203static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
204{
205 src = DFPREG(src);
206 return cpu_fpr[src / 2];
207}
208
209static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
210{
211 dst = DFPREG(dst);
212 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
213 gen_update_fprs_dirty(dc, dst);
214}
215
216static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
217{
218 return cpu_fpr[DFPREG(dst) / 2];
219}
220
221static void gen_op_load_fpr_QT0(unsigned int src)
222{
223 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
224 offsetof(CPU_QuadU, ll.upper));
225 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
226 offsetof(CPU_QuadU, ll.lower));
227}
228
229static void gen_op_load_fpr_QT1(unsigned int src)
230{
231 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
232 offsetof(CPU_QuadU, ll.upper));
233 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
234 offsetof(CPU_QuadU, ll.lower));
235}
236
237static void gen_op_store_QT0_fpr(unsigned int dst)
238{
239 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
240 offsetof(CPU_QuadU, ll.upper));
241 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
242 offsetof(CPU_QuadU, ll.lower));
243}
244
245static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
246 TCGv_i64 v1, TCGv_i64 v2)
247{
248 dst = QFPREG(dst);
249
250 tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
251 tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
252 gen_update_fprs_dirty(dc, dst);
253}
254
255#ifdef TARGET_SPARC64
256static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
257{
258 src = QFPREG(src);
259 return cpu_fpr[src / 2];
260}
261
262static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
263{
264 src = QFPREG(src);
265 return cpu_fpr[src / 2 + 1];
266}
267
268static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
269{
270 rd = QFPREG(rd);
271 rs = QFPREG(rs);
272
273 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
274 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
275 gen_update_fprs_dirty(dc, rd);
276}
277#endif
278
279
280#ifdef CONFIG_USER_ONLY
281#define supervisor(dc) 0
282#ifdef TARGET_SPARC64
283#define hypervisor(dc) 0
284#endif
285#else
286#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
287#ifdef TARGET_SPARC64
288#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
289#else
290#endif
291#endif
292
293#ifdef TARGET_SPARC64
294#ifndef TARGET_ABI32
295#define AM_CHECK(dc) ((dc)->address_mask_32bit)
296#else
297#define AM_CHECK(dc) (1)
298#endif
299#endif
300
301static inline void gen_address_mask(DisasContext *dc, TCGv addr)
302{
303#ifdef TARGET_SPARC64
304 if (AM_CHECK(dc))
305 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
306#endif
307}
308
309static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
310{
311 if (reg > 0) {
312 assert(reg < 32);
313 return cpu_regs[reg];
314 } else {
315 TCGv t = get_temp_tl(dc);
316 tcg_gen_movi_tl(t, 0);
317 return t;
318 }
319}
320
321static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
322{
323 if (reg > 0) {
324 assert(reg < 32);
325 tcg_gen_mov_tl(cpu_regs[reg], v);
326 }
327}
328
329static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
330{
331 if (reg > 0) {
332 assert(reg < 32);
333 return cpu_regs[reg];
334 } else {
335 return get_temp_tl(dc);
336 }
337}
338
339static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
340 target_ulong npc)
341{
342 if (unlikely(s->singlestep)) {
343 return false;
344 }
345
346#ifndef CONFIG_USER_ONLY
347 return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
348 (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
349#else
350 return true;
351#endif
352}
353
354static inline void gen_goto_tb(DisasContext *s, int tb_num,
355 target_ulong pc, target_ulong npc)
356{
357 if (use_goto_tb(s, pc, npc)) {
358
359 tcg_gen_goto_tb(tb_num);
360 tcg_gen_movi_tl(cpu_pc, pc);
361 tcg_gen_movi_tl(cpu_npc, npc);
362 tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
363 } else {
364
365 tcg_gen_movi_tl(cpu_pc, pc);
366 tcg_gen_movi_tl(cpu_npc, npc);
367 tcg_gen_exit_tb(0);
368 }
369}
370
371
372static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
373{
374 tcg_gen_extu_i32_tl(reg, src);
375 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
376 tcg_gen_andi_tl(reg, reg, 0x1);
377}
378
379static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
380{
381 tcg_gen_extu_i32_tl(reg, src);
382 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
383 tcg_gen_andi_tl(reg, reg, 0x1);
384}
385
386static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
387{
388 tcg_gen_extu_i32_tl(reg, src);
389 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
390 tcg_gen_andi_tl(reg, reg, 0x1);
391}
392
393static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
394{
395 tcg_gen_extu_i32_tl(reg, src);
396 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
397 tcg_gen_andi_tl(reg, reg, 0x1);
398}
399
400static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
401{
402 tcg_gen_mov_tl(cpu_cc_src, src1);
403 tcg_gen_mov_tl(cpu_cc_src2, src2);
404 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
405 tcg_gen_mov_tl(dst, cpu_cc_dst);
406}
407
408static TCGv_i32 gen_add32_carry32(void)
409{
410 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
411
412
413#if TARGET_LONG_BITS == 64
414 cc_src1_32 = tcg_temp_new_i32();
415 cc_src2_32 = tcg_temp_new_i32();
416 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
417 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
418#else
419 cc_src1_32 = cpu_cc_dst;
420 cc_src2_32 = cpu_cc_src;
421#endif
422
423 carry_32 = tcg_temp_new_i32();
424 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
425
426#if TARGET_LONG_BITS == 64
427 tcg_temp_free_i32(cc_src1_32);
428 tcg_temp_free_i32(cc_src2_32);
429#endif
430
431 return carry_32;
432}
433
434static TCGv_i32 gen_sub32_carry32(void)
435{
436 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
437
438
439#if TARGET_LONG_BITS == 64
440 cc_src1_32 = tcg_temp_new_i32();
441 cc_src2_32 = tcg_temp_new_i32();
442 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
443 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
444#else
445 cc_src1_32 = cpu_cc_src;
446 cc_src2_32 = cpu_cc_src2;
447#endif
448
449 carry_32 = tcg_temp_new_i32();
450 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
451
452#if TARGET_LONG_BITS == 64
453 tcg_temp_free_i32(cc_src1_32);
454 tcg_temp_free_i32(cc_src2_32);
455#endif
456
457 return carry_32;
458}
459
460static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
461 TCGv src2, int update_cc)
462{
463 TCGv_i32 carry_32;
464 TCGv carry;
465
466 switch (dc->cc_op) {
467 case CC_OP_DIV:
468 case CC_OP_LOGIC:
469
470 if (update_cc) {
471 gen_op_add_cc(dst, src1, src2);
472 } else {
473 tcg_gen_add_tl(dst, src1, src2);
474 }
475 return;
476
477 case CC_OP_ADD:
478 case CC_OP_TADD:
479 case CC_OP_TADDTV:
480 if (TARGET_LONG_BITS == 32) {
481
482
483
484
485 carry = tcg_temp_new();
486 tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
487 tcg_temp_free(carry);
488 goto add_done;
489 }
490 carry_32 = gen_add32_carry32();
491 break;
492
493 case CC_OP_SUB:
494 case CC_OP_TSUB:
495 case CC_OP_TSUBTV:
496 carry_32 = gen_sub32_carry32();
497 break;
498
499 default:
500
501 carry_32 = tcg_temp_new_i32();
502 gen_helper_compute_C_icc(carry_32, cpu_env);
503 break;
504 }
505
506#if TARGET_LONG_BITS == 64
507 carry = tcg_temp_new();
508 tcg_gen_extu_i32_i64(carry, carry_32);
509#else
510 carry = carry_32;
511#endif
512
513 tcg_gen_add_tl(dst, src1, src2);
514 tcg_gen_add_tl(dst, dst, carry);
515
516 tcg_temp_free_i32(carry_32);
517#if TARGET_LONG_BITS == 64
518 tcg_temp_free(carry);
519#endif
520
521 add_done:
522 if (update_cc) {
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 tcg_gen_mov_tl(cpu_cc_dst, dst);
526 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
527 dc->cc_op = CC_OP_ADDX;
528 }
529}
530
531static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
532{
533 tcg_gen_mov_tl(cpu_cc_src, src1);
534 tcg_gen_mov_tl(cpu_cc_src2, src2);
535 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
536 tcg_gen_mov_tl(dst, cpu_cc_dst);
537}
538
539static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
540 TCGv src2, int update_cc)
541{
542 TCGv_i32 carry_32;
543 TCGv carry;
544
545 switch (dc->cc_op) {
546 case CC_OP_DIV:
547 case CC_OP_LOGIC:
548
549 if (update_cc) {
550 gen_op_sub_cc(dst, src1, src2);
551 } else {
552 tcg_gen_sub_tl(dst, src1, src2);
553 }
554 return;
555
556 case CC_OP_ADD:
557 case CC_OP_TADD:
558 case CC_OP_TADDTV:
559 carry_32 = gen_add32_carry32();
560 break;
561
562 case CC_OP_SUB:
563 case CC_OP_TSUB:
564 case CC_OP_TSUBTV:
565 if (TARGET_LONG_BITS == 32) {
566
567
568
569
570 carry = tcg_temp_new();
571 tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
572 tcg_temp_free(carry);
573 goto sub_done;
574 }
575 carry_32 = gen_sub32_carry32();
576 break;
577
578 default:
579
580 carry_32 = tcg_temp_new_i32();
581 gen_helper_compute_C_icc(carry_32, cpu_env);
582 break;
583 }
584
585#if TARGET_LONG_BITS == 64
586 carry = tcg_temp_new();
587 tcg_gen_extu_i32_i64(carry, carry_32);
588#else
589 carry = carry_32;
590#endif
591
592 tcg_gen_sub_tl(dst, src1, src2);
593 tcg_gen_sub_tl(dst, dst, carry);
594
595 tcg_temp_free_i32(carry_32);
596#if TARGET_LONG_BITS == 64
597 tcg_temp_free(carry);
598#endif
599
600 sub_done:
601 if (update_cc) {
602 tcg_gen_mov_tl(cpu_cc_src, src1);
603 tcg_gen_mov_tl(cpu_cc_src2, src2);
604 tcg_gen_mov_tl(cpu_cc_dst, dst);
605 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
606 dc->cc_op = CC_OP_SUBX;
607 }
608}
609
610static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
611{
612 TCGv r_temp, zero, t0;
613
614 r_temp = tcg_temp_new();
615 t0 = tcg_temp_new();
616
617
618
619
620
621 zero = tcg_const_tl(0);
622 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
623 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
624 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
625 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
626 zero, cpu_cc_src2);
627 tcg_temp_free(zero);
628
629
630
631 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
632 tcg_gen_shli_tl(r_temp, r_temp, 31);
633 tcg_gen_shri_tl(t0, cpu_y, 1);
634 tcg_gen_andi_tl(t0, t0, 0x7fffffff);
635 tcg_gen_or_tl(t0, t0, r_temp);
636 tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
637
638
639 gen_mov_reg_N(t0, cpu_psr);
640 gen_mov_reg_V(r_temp, cpu_psr);
641 tcg_gen_xor_tl(t0, t0, r_temp);
642 tcg_temp_free(r_temp);
643
644
645
646 tcg_gen_shli_tl(t0, t0, 31);
647 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
648 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
649 tcg_temp_free(t0);
650
651 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
652
653 tcg_gen_mov_tl(dst, cpu_cc_dst);
654}
655
656static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
657{
658#if TARGET_LONG_BITS == 32
659 if (sign_ext) {
660 tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
661 } else {
662 tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
663 }
664#else
665 TCGv t0 = tcg_temp_new_i64();
666 TCGv t1 = tcg_temp_new_i64();
667
668 if (sign_ext) {
669 tcg_gen_ext32s_i64(t0, src1);
670 tcg_gen_ext32s_i64(t1, src2);
671 } else {
672 tcg_gen_ext32u_i64(t0, src1);
673 tcg_gen_ext32u_i64(t1, src2);
674 }
675
676 tcg_gen_mul_i64(dst, t0, t1);
677 tcg_temp_free(t0);
678 tcg_temp_free(t1);
679
680 tcg_gen_shri_i64(cpu_y, dst, 32);
681#endif
682}
683
684static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
685{
686
687 gen_op_multiply(dst, src1, src2, 0);
688}
689
690static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
691{
692
693 gen_op_multiply(dst, src1, src2, 1);
694}
695
696
697static inline void gen_op_eval_ba(TCGv dst)
698{
699 tcg_gen_movi_tl(dst, 1);
700}
701
702
703static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
704{
705 gen_mov_reg_Z(dst, src);
706}
707
708
709static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
710{
711 TCGv t0 = tcg_temp_new();
712 gen_mov_reg_N(t0, src);
713 gen_mov_reg_V(dst, src);
714 tcg_gen_xor_tl(dst, dst, t0);
715 gen_mov_reg_Z(t0, src);
716 tcg_gen_or_tl(dst, dst, t0);
717 tcg_temp_free(t0);
718}
719
720
721static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
722{
723 TCGv t0 = tcg_temp_new();
724 gen_mov_reg_V(t0, src);
725 gen_mov_reg_N(dst, src);
726 tcg_gen_xor_tl(dst, dst, t0);
727 tcg_temp_free(t0);
728}
729
730
731static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
732{
733 TCGv t0 = tcg_temp_new();
734 gen_mov_reg_Z(t0, src);
735 gen_mov_reg_C(dst, src);
736 tcg_gen_or_tl(dst, dst, t0);
737 tcg_temp_free(t0);
738}
739
740
741static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
742{
743 gen_mov_reg_C(dst, src);
744}
745
746
747static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
748{
749 gen_mov_reg_V(dst, src);
750}
751
752
753static inline void gen_op_eval_bn(TCGv dst)
754{
755 tcg_gen_movi_tl(dst, 0);
756}
757
758
759static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
760{
761 gen_mov_reg_N(dst, src);
762}
763
764
765static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
766{
767 gen_mov_reg_Z(dst, src);
768 tcg_gen_xori_tl(dst, dst, 0x1);
769}
770
771
772static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
773{
774 gen_op_eval_ble(dst, src);
775 tcg_gen_xori_tl(dst, dst, 0x1);
776}
777
778
779static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
780{
781 gen_op_eval_bl(dst, src);
782 tcg_gen_xori_tl(dst, dst, 0x1);
783}
784
785
786static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
787{
788 gen_op_eval_bleu(dst, src);
789 tcg_gen_xori_tl(dst, dst, 0x1);
790}
791
792
793static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
794{
795 gen_mov_reg_C(dst, src);
796 tcg_gen_xori_tl(dst, dst, 0x1);
797}
798
799
800static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
801{
802 gen_mov_reg_N(dst, src);
803 tcg_gen_xori_tl(dst, dst, 0x1);
804}
805
806
807static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
808{
809 gen_mov_reg_V(dst, src);
810 tcg_gen_xori_tl(dst, dst, 0x1);
811}
812
813
814
815
816
817
818
819
820static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
821 unsigned int fcc_offset)
822{
823 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
824 tcg_gen_andi_tl(reg, reg, 0x1);
825}
826
827static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
828 unsigned int fcc_offset)
829{
830 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
831 tcg_gen_andi_tl(reg, reg, 0x1);
832}
833
834
835static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
837{
838 TCGv t0 = tcg_temp_new();
839 gen_mov_reg_FCC0(dst, src, fcc_offset);
840 gen_mov_reg_FCC1(t0, src, fcc_offset);
841 tcg_gen_or_tl(dst, dst, t0);
842 tcg_temp_free(t0);
843}
844
845
846static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
847 unsigned int fcc_offset)
848{
849 TCGv t0 = tcg_temp_new();
850 gen_mov_reg_FCC0(dst, src, fcc_offset);
851 gen_mov_reg_FCC1(t0, src, fcc_offset);
852 tcg_gen_xor_tl(dst, dst, t0);
853 tcg_temp_free(t0);
854}
855
856
857static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
858 unsigned int fcc_offset)
859{
860 gen_mov_reg_FCC0(dst, src, fcc_offset);
861}
862
863
864static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
865 unsigned int fcc_offset)
866{
867 TCGv t0 = tcg_temp_new();
868 gen_mov_reg_FCC0(dst, src, fcc_offset);
869 gen_mov_reg_FCC1(t0, src, fcc_offset);
870 tcg_gen_andc_tl(dst, dst, t0);
871 tcg_temp_free(t0);
872}
873
874
875static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
876 unsigned int fcc_offset)
877{
878 gen_mov_reg_FCC1(dst, src, fcc_offset);
879}
880
881
882static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
883 unsigned int fcc_offset)
884{
885 TCGv t0 = tcg_temp_new();
886 gen_mov_reg_FCC0(dst, src, fcc_offset);
887 gen_mov_reg_FCC1(t0, src, fcc_offset);
888 tcg_gen_andc_tl(dst, t0, dst);
889 tcg_temp_free(t0);
890}
891
892
893static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
894 unsigned int fcc_offset)
895{
896 TCGv t0 = tcg_temp_new();
897 gen_mov_reg_FCC0(dst, src, fcc_offset);
898 gen_mov_reg_FCC1(t0, src, fcc_offset);
899 tcg_gen_and_tl(dst, dst, t0);
900 tcg_temp_free(t0);
901}
902
903
904static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
905 unsigned int fcc_offset)
906{
907 TCGv t0 = tcg_temp_new();
908 gen_mov_reg_FCC0(dst, src, fcc_offset);
909 gen_mov_reg_FCC1(t0, src, fcc_offset);
910 tcg_gen_or_tl(dst, dst, t0);
911 tcg_gen_xori_tl(dst, dst, 0x1);
912 tcg_temp_free(t0);
913}
914
915
916static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
917 unsigned int fcc_offset)
918{
919 TCGv t0 = tcg_temp_new();
920 gen_mov_reg_FCC0(dst, src, fcc_offset);
921 gen_mov_reg_FCC1(t0, src, fcc_offset);
922 tcg_gen_xor_tl(dst, dst, t0);
923 tcg_gen_xori_tl(dst, dst, 0x1);
924 tcg_temp_free(t0);
925}
926
927
928static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
929 unsigned int fcc_offset)
930{
931 gen_mov_reg_FCC0(dst, src, fcc_offset);
932 tcg_gen_xori_tl(dst, dst, 0x1);
933}
934
935
936static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
937 unsigned int fcc_offset)
938{
939 TCGv t0 = tcg_temp_new();
940 gen_mov_reg_FCC0(dst, src, fcc_offset);
941 gen_mov_reg_FCC1(t0, src, fcc_offset);
942 tcg_gen_andc_tl(dst, dst, t0);
943 tcg_gen_xori_tl(dst, dst, 0x1);
944 tcg_temp_free(t0);
945}
946
947
948static inline void gen_op_eval_fble(TCGv dst, TCGv src,
949 unsigned int fcc_offset)
950{
951 gen_mov_reg_FCC1(dst, src, fcc_offset);
952 tcg_gen_xori_tl(dst, dst, 0x1);
953}
954
955
956static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
957 unsigned int fcc_offset)
958{
959 TCGv t0 = tcg_temp_new();
960 gen_mov_reg_FCC0(dst, src, fcc_offset);
961 gen_mov_reg_FCC1(t0, src, fcc_offset);
962 tcg_gen_andc_tl(dst, t0, dst);
963 tcg_gen_xori_tl(dst, dst, 0x1);
964 tcg_temp_free(t0);
965}
966
967
968static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
970{
971 TCGv t0 = tcg_temp_new();
972 gen_mov_reg_FCC0(dst, src, fcc_offset);
973 gen_mov_reg_FCC1(t0, src, fcc_offset);
974 tcg_gen_and_tl(dst, dst, t0);
975 tcg_gen_xori_tl(dst, dst, 0x1);
976 tcg_temp_free(t0);
977}
978
979static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
980 target_ulong pc2, TCGv r_cond)
981{
982 TCGLabel *l1 = gen_new_label();
983
984 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
985
986 gen_goto_tb(dc, 0, pc1, pc1 + 4);
987
988 gen_set_label(l1);
989 gen_goto_tb(dc, 1, pc2, pc2 + 4);
990}
991
992static void gen_branch_a(DisasContext *dc, target_ulong pc1)
993{
994 TCGLabel *l1 = gen_new_label();
995 target_ulong npc = dc->npc;
996
997 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
998
999 gen_goto_tb(dc, 0, npc, pc1);
1000
1001 gen_set_label(l1);
1002 gen_goto_tb(dc, 1, npc + 4, npc + 8);
1003
1004 dc->is_br = 1;
1005}
1006
1007static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1008{
1009 target_ulong npc = dc->npc;
1010
1011 if (likely(npc != DYNAMIC_PC)) {
1012 dc->pc = npc;
1013 dc->jump_pc[0] = pc1;
1014 dc->jump_pc[1] = npc + 4;
1015 dc->npc = JUMP_PC;
1016 } else {
1017 TCGv t, z;
1018
1019 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1020
1021 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1022 t = tcg_const_tl(pc1);
1023 z = tcg_const_tl(0);
1024 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1025 tcg_temp_free(t);
1026 tcg_temp_free(z);
1027
1028 dc->pc = DYNAMIC_PC;
1029 }
1030}
1031
1032static inline void gen_generic_branch(DisasContext *dc)
1033{
1034 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1035 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1036 TCGv zero = tcg_const_tl(0);
1037
1038 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1039
1040 tcg_temp_free(npc0);
1041 tcg_temp_free(npc1);
1042 tcg_temp_free(zero);
1043}
1044
1045
1046
1047static inline void flush_cond(DisasContext *dc)
1048{
1049 if (dc->npc == JUMP_PC) {
1050 gen_generic_branch(dc);
1051 dc->npc = DYNAMIC_PC;
1052 }
1053}
1054
1055static inline void save_npc(DisasContext *dc)
1056{
1057 if (dc->npc == JUMP_PC) {
1058 gen_generic_branch(dc);
1059 dc->npc = DYNAMIC_PC;
1060 } else if (dc->npc != DYNAMIC_PC) {
1061 tcg_gen_movi_tl(cpu_npc, dc->npc);
1062 }
1063}
1064
1065static inline void update_psr(DisasContext *dc)
1066{
1067 if (dc->cc_op != CC_OP_FLAGS) {
1068 dc->cc_op = CC_OP_FLAGS;
1069 gen_helper_compute_psr(cpu_env);
1070 }
1071}
1072
1073static inline void save_state(DisasContext *dc)
1074{
1075 tcg_gen_movi_tl(cpu_pc, dc->pc);
1076 save_npc(dc);
1077}
1078
1079static void gen_exception(DisasContext *dc, int which)
1080{
1081 TCGv_i32 t;
1082
1083 save_state(dc);
1084 t = tcg_const_i32(which);
1085 gen_helper_raise_exception(cpu_env, t);
1086 tcg_temp_free_i32(t);
1087 dc->is_br = 1;
1088}
1089
1090static void gen_check_align(TCGv addr, int mask)
1091{
1092 TCGv_i32 r_mask = tcg_const_i32(mask);
1093 gen_helper_check_align(cpu_env, addr, r_mask);
1094 tcg_temp_free_i32(r_mask);
1095}
1096
1097static inline void gen_mov_pc_npc(DisasContext *dc)
1098{
1099 if (dc->npc == JUMP_PC) {
1100 gen_generic_branch(dc);
1101 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1102 dc->pc = DYNAMIC_PC;
1103 } else if (dc->npc == DYNAMIC_PC) {
1104 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1105 dc->pc = DYNAMIC_PC;
1106 } else {
1107 dc->pc = dc->npc;
1108 }
1109}
1110
1111static inline void gen_op_next_insn(void)
1112{
1113 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1114 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1115}
1116
1117static void free_compare(DisasCompare *cmp)
1118{
1119 if (!cmp->g1) {
1120 tcg_temp_free(cmp->c1);
1121 }
1122 if (!cmp->g2) {
1123 tcg_temp_free(cmp->c2);
1124 }
1125}
1126
1127static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1128 DisasContext *dc)
1129{
1130 static int subcc_cond[16] = {
1131 TCG_COND_NEVER,
1132 TCG_COND_EQ,
1133 TCG_COND_LE,
1134 TCG_COND_LT,
1135 TCG_COND_LEU,
1136 TCG_COND_LTU,
1137 -1,
1138 -1,
1139 TCG_COND_ALWAYS,
1140 TCG_COND_NE,
1141 TCG_COND_GT,
1142 TCG_COND_GE,
1143 TCG_COND_GTU,
1144 TCG_COND_GEU,
1145 -1,
1146 -1,
1147 };
1148
1149 static int logic_cond[16] = {
1150 TCG_COND_NEVER,
1151 TCG_COND_EQ,
1152 TCG_COND_LE,
1153 TCG_COND_LT,
1154 TCG_COND_EQ,
1155 TCG_COND_NEVER,
1156 TCG_COND_LT,
1157 TCG_COND_NEVER,
1158 TCG_COND_ALWAYS,
1159 TCG_COND_NE,
1160 TCG_COND_GT,
1161 TCG_COND_GE,
1162 TCG_COND_NE,
1163 TCG_COND_ALWAYS,
1164 TCG_COND_GE,
1165 TCG_COND_ALWAYS,
1166 };
1167
1168 TCGv_i32 r_src;
1169 TCGv r_dst;
1170
1171#ifdef TARGET_SPARC64
1172 if (xcc) {
1173 r_src = cpu_xcc;
1174 } else {
1175 r_src = cpu_psr;
1176 }
1177#else
1178 r_src = cpu_psr;
1179#endif
1180
1181 switch (dc->cc_op) {
1182 case CC_OP_LOGIC:
1183 cmp->cond = logic_cond[cond];
1184 do_compare_dst_0:
1185 cmp->is_bool = false;
1186 cmp->g2 = false;
1187 cmp->c2 = tcg_const_tl(0);
1188#ifdef TARGET_SPARC64
1189 if (!xcc) {
1190 cmp->g1 = false;
1191 cmp->c1 = tcg_temp_new();
1192 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1193 break;
1194 }
1195#endif
1196 cmp->g1 = true;
1197 cmp->c1 = cpu_cc_dst;
1198 break;
1199
1200 case CC_OP_SUB:
1201 switch (cond) {
1202 case 6:
1203 case 14:
1204 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1205 goto do_compare_dst_0;
1206
1207 case 7:
1208 case 15:
1209 goto do_dynamic;
1210
1211 default:
1212 cmp->cond = subcc_cond[cond];
1213 cmp->is_bool = false;
1214#ifdef TARGET_SPARC64
1215 if (!xcc) {
1216
1217
1218 cmp->g1 = cmp->g2 = false;
1219 cmp->c1 = tcg_temp_new();
1220 cmp->c2 = tcg_temp_new();
1221 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1222 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1223 break;
1224 }
1225#endif
1226 cmp->g1 = cmp->g2 = true;
1227 cmp->c1 = cpu_cc_src;
1228 cmp->c2 = cpu_cc_src2;
1229 break;
1230 }
1231 break;
1232
1233 default:
1234 do_dynamic:
1235 gen_helper_compute_psr(cpu_env);
1236 dc->cc_op = CC_OP_FLAGS;
1237
1238
1239 case CC_OP_FLAGS:
1240
1241 cmp->cond = TCG_COND_NE;
1242 cmp->is_bool = true;
1243 cmp->g1 = cmp->g2 = false;
1244 cmp->c1 = r_dst = tcg_temp_new();
1245 cmp->c2 = tcg_const_tl(0);
1246
1247 switch (cond) {
1248 case 0x0:
1249 gen_op_eval_bn(r_dst);
1250 break;
1251 case 0x1:
1252 gen_op_eval_be(r_dst, r_src);
1253 break;
1254 case 0x2:
1255 gen_op_eval_ble(r_dst, r_src);
1256 break;
1257 case 0x3:
1258 gen_op_eval_bl(r_dst, r_src);
1259 break;
1260 case 0x4:
1261 gen_op_eval_bleu(r_dst, r_src);
1262 break;
1263 case 0x5:
1264 gen_op_eval_bcs(r_dst, r_src);
1265 break;
1266 case 0x6:
1267 gen_op_eval_bneg(r_dst, r_src);
1268 break;
1269 case 0x7:
1270 gen_op_eval_bvs(r_dst, r_src);
1271 break;
1272 case 0x8:
1273 gen_op_eval_ba(r_dst);
1274 break;
1275 case 0x9:
1276 gen_op_eval_bne(r_dst, r_src);
1277 break;
1278 case 0xa:
1279 gen_op_eval_bg(r_dst, r_src);
1280 break;
1281 case 0xb:
1282 gen_op_eval_bge(r_dst, r_src);
1283 break;
1284 case 0xc:
1285 gen_op_eval_bgu(r_dst, r_src);
1286 break;
1287 case 0xd:
1288 gen_op_eval_bcc(r_dst, r_src);
1289 break;
1290 case 0xe:
1291 gen_op_eval_bpos(r_dst, r_src);
1292 break;
1293 case 0xf:
1294 gen_op_eval_bvc(r_dst, r_src);
1295 break;
1296 }
1297 break;
1298 }
1299}
1300
1301static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1302{
1303 unsigned int offset;
1304 TCGv r_dst;
1305
1306
1307 cmp->cond = TCG_COND_NE;
1308 cmp->is_bool = true;
1309 cmp->g1 = cmp->g2 = false;
1310 cmp->c1 = r_dst = tcg_temp_new();
1311 cmp->c2 = tcg_const_tl(0);
1312
1313 switch (cc) {
1314 default:
1315 case 0x0:
1316 offset = 0;
1317 break;
1318 case 0x1:
1319 offset = 32 - 10;
1320 break;
1321 case 0x2:
1322 offset = 34 - 10;
1323 break;
1324 case 0x3:
1325 offset = 36 - 10;
1326 break;
1327 }
1328
1329 switch (cond) {
1330 case 0x0:
1331 gen_op_eval_bn(r_dst);
1332 break;
1333 case 0x1:
1334 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1335 break;
1336 case 0x2:
1337 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1338 break;
1339 case 0x3:
1340 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1341 break;
1342 case 0x4:
1343 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1344 break;
1345 case 0x5:
1346 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1347 break;
1348 case 0x6:
1349 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1350 break;
1351 case 0x7:
1352 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1353 break;
1354 case 0x8:
1355 gen_op_eval_ba(r_dst);
1356 break;
1357 case 0x9:
1358 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1359 break;
1360 case 0xa:
1361 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1362 break;
1363 case 0xb:
1364 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1365 break;
1366 case 0xc:
1367 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1368 break;
1369 case 0xd:
1370 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1371 break;
1372 case 0xe:
1373 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1374 break;
1375 case 0xf:
1376 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1377 break;
1378 }
1379}
1380
1381static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1382 DisasContext *dc)
1383{
1384 DisasCompare cmp;
1385 gen_compare(&cmp, cc, cond, dc);
1386
1387
1388 if (cmp.is_bool) {
1389 tcg_gen_mov_tl(r_dst, cmp.c1);
1390 } else {
1391 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1392 }
1393
1394 free_compare(&cmp);
1395}
1396
1397static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1398{
1399 DisasCompare cmp;
1400 gen_fcompare(&cmp, cc, cond);
1401
1402
1403 if (cmp.is_bool) {
1404 tcg_gen_mov_tl(r_dst, cmp.c1);
1405 } else {
1406 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1407 }
1408
1409 free_compare(&cmp);
1410}
1411
1412#ifdef TARGET_SPARC64
1413
1414static const int gen_tcg_cond_reg[8] = {
1415 -1,
1416 TCG_COND_NE,
1417 TCG_COND_GT,
1418 TCG_COND_GE,
1419 -1,
1420 TCG_COND_EQ,
1421 TCG_COND_LE,
1422 TCG_COND_LT,
1423};
1424
1425static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1426{
1427 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1428 cmp->is_bool = false;
1429 cmp->g1 = true;
1430 cmp->g2 = false;
1431 cmp->c1 = r_src;
1432 cmp->c2 = tcg_const_tl(0);
1433}
1434
1435static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1436{
1437 DisasCompare cmp;
1438 gen_compare_reg(&cmp, cond, r_src);
1439
1440
1441 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1442
1443 free_compare(&cmp);
1444}
1445#endif
1446
1447static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1448{
1449 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450 target_ulong target = dc->pc + offset;
1451
1452#ifdef TARGET_SPARC64
1453 if (unlikely(AM_CHECK(dc))) {
1454 target &= 0xffffffffULL;
1455 }
1456#endif
1457 if (cond == 0x0) {
1458
1459 if (a) {
1460 dc->pc = dc->npc + 4;
1461 dc->npc = dc->pc + 4;
1462 } else {
1463 dc->pc = dc->npc;
1464 dc->npc = dc->pc + 4;
1465 }
1466 } else if (cond == 0x8) {
1467
1468 if (a) {
1469 dc->pc = target;
1470 dc->npc = dc->pc + 4;
1471 } else {
1472 dc->pc = dc->npc;
1473 dc->npc = target;
1474 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1475 }
1476 } else {
1477 flush_cond(dc);
1478 gen_cond(cpu_cond, cc, cond, dc);
1479 if (a) {
1480 gen_branch_a(dc, target);
1481 } else {
1482 gen_branch_n(dc, target);
1483 }
1484 }
1485}
1486
1487static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1488{
1489 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1490 target_ulong target = dc->pc + offset;
1491
1492#ifdef TARGET_SPARC64
1493 if (unlikely(AM_CHECK(dc))) {
1494 target &= 0xffffffffULL;
1495 }
1496#endif
1497 if (cond == 0x0) {
1498
1499 if (a) {
1500 dc->pc = dc->npc + 4;
1501 dc->npc = dc->pc + 4;
1502 } else {
1503 dc->pc = dc->npc;
1504 dc->npc = dc->pc + 4;
1505 }
1506 } else if (cond == 0x8) {
1507
1508 if (a) {
1509 dc->pc = target;
1510 dc->npc = dc->pc + 4;
1511 } else {
1512 dc->pc = dc->npc;
1513 dc->npc = target;
1514 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1515 }
1516 } else {
1517 flush_cond(dc);
1518 gen_fcond(cpu_cond, cc, cond);
1519 if (a) {
1520 gen_branch_a(dc, target);
1521 } else {
1522 gen_branch_n(dc, target);
1523 }
1524 }
1525}
1526
1527#ifdef TARGET_SPARC64
1528static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1529 TCGv r_reg)
1530{
1531 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1532 target_ulong target = dc->pc + offset;
1533
1534 if (unlikely(AM_CHECK(dc))) {
1535 target &= 0xffffffffULL;
1536 }
1537 flush_cond(dc);
1538 gen_cond_reg(cpu_cond, cond, r_reg);
1539 if (a) {
1540 gen_branch_a(dc, target);
1541 } else {
1542 gen_branch_n(dc, target);
1543 }
1544}
1545
1546static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1547{
1548 switch (fccno) {
1549 case 0:
1550 gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1551 break;
1552 case 1:
1553 gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1554 break;
1555 case 2:
1556 gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1557 break;
1558 case 3:
1559 gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1560 break;
1561 }
1562}
1563
1564static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1565{
1566 switch (fccno) {
1567 case 0:
1568 gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1569 break;
1570 case 1:
1571 gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1572 break;
1573 case 2:
1574 gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1575 break;
1576 case 3:
1577 gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1578 break;
1579 }
1580}
1581
1582static inline void gen_op_fcmpq(int fccno)
1583{
1584 switch (fccno) {
1585 case 0:
1586 gen_helper_fcmpq(cpu_fsr, cpu_env);
1587 break;
1588 case 1:
1589 gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1590 break;
1591 case 2:
1592 gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1593 break;
1594 case 3:
1595 gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1596 break;
1597 }
1598}
1599
1600static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1601{
1602 switch (fccno) {
1603 case 0:
1604 gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1605 break;
1606 case 1:
1607 gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1608 break;
1609 case 2:
1610 gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1611 break;
1612 case 3:
1613 gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1614 break;
1615 }
1616}
1617
1618static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1619{
1620 switch (fccno) {
1621 case 0:
1622 gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1623 break;
1624 case 1:
1625 gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1626 break;
1627 case 2:
1628 gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1629 break;
1630 case 3:
1631 gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1632 break;
1633 }
1634}
1635
1636static inline void gen_op_fcmpeq(int fccno)
1637{
1638 switch (fccno) {
1639 case 0:
1640 gen_helper_fcmpeq(cpu_fsr, cpu_env);
1641 break;
1642 case 1:
1643 gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1644 break;
1645 case 2:
1646 gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1647 break;
1648 case 3:
1649 gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1650 break;
1651 }
1652}
1653
1654#else
1655
1656static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1657{
1658 gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1659}
1660
1661static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1662{
1663 gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1664}
1665
1666static inline void gen_op_fcmpq(int fccno)
1667{
1668 gen_helper_fcmpq(cpu_fsr, cpu_env);
1669}
1670
1671static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1672{
1673 gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1674}
1675
1676static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1677{
1678 gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1679}
1680
1681static inline void gen_op_fcmpeq(int fccno)
1682{
1683 gen_helper_fcmpeq(cpu_fsr, cpu_env);
1684}
1685#endif
1686
1687static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1688{
1689 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1690 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1691 gen_exception(dc, TT_FP_EXCP);
1692}
1693
1694static int gen_trap_ifnofpu(DisasContext *dc)
1695{
1696#if !defined(CONFIG_USER_ONLY)
1697 if (!dc->fpu_enabled) {
1698 gen_exception(dc, TT_NFPU_INSN);
1699 return 1;
1700 }
1701#endif
1702 return 0;
1703}
1704
1705static inline void gen_op_clear_ieee_excp_and_FTT(void)
1706{
1707 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1708}
1709
1710static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1711 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1712{
1713 TCGv_i32 dst, src;
1714
1715 src = gen_load_fpr_F(dc, rs);
1716 dst = gen_dest_fpr_F(dc);
1717
1718 gen(dst, cpu_env, src);
1719 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1720
1721 gen_store_fpr_F(dc, rd, dst);
1722}
1723
1724static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1725 void (*gen)(TCGv_i32, TCGv_i32))
1726{
1727 TCGv_i32 dst, src;
1728
1729 src = gen_load_fpr_F(dc, rs);
1730 dst = gen_dest_fpr_F(dc);
1731
1732 gen(dst, src);
1733
1734 gen_store_fpr_F(dc, rd, dst);
1735}
1736
1737static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1738 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1739{
1740 TCGv_i32 dst, src1, src2;
1741
1742 src1 = gen_load_fpr_F(dc, rs1);
1743 src2 = gen_load_fpr_F(dc, rs2);
1744 dst = gen_dest_fpr_F(dc);
1745
1746 gen(dst, cpu_env, src1, src2);
1747 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1748
1749 gen_store_fpr_F(dc, rd, dst);
1750}
1751
1752#ifdef TARGET_SPARC64
1753static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1754 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1755{
1756 TCGv_i32 dst, src1, src2;
1757
1758 src1 = gen_load_fpr_F(dc, rs1);
1759 src2 = gen_load_fpr_F(dc, rs2);
1760 dst = gen_dest_fpr_F(dc);
1761
1762 gen(dst, src1, src2);
1763
1764 gen_store_fpr_F(dc, rd, dst);
1765}
1766#endif
1767
1768static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1769 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1770{
1771 TCGv_i64 dst, src;
1772
1773 src = gen_load_fpr_D(dc, rs);
1774 dst = gen_dest_fpr_D(dc, rd);
1775
1776 gen(dst, cpu_env, src);
1777 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1778
1779 gen_store_fpr_D(dc, rd, dst);
1780}
1781
1782#ifdef TARGET_SPARC64
1783static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1784 void (*gen)(TCGv_i64, TCGv_i64))
1785{
1786 TCGv_i64 dst, src;
1787
1788 src = gen_load_fpr_D(dc, rs);
1789 dst = gen_dest_fpr_D(dc, rd);
1790
1791 gen(dst, src);
1792
1793 gen_store_fpr_D(dc, rd, dst);
1794}
1795#endif
1796
1797static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1798 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1799{
1800 TCGv_i64 dst, src1, src2;
1801
1802 src1 = gen_load_fpr_D(dc, rs1);
1803 src2 = gen_load_fpr_D(dc, rs2);
1804 dst = gen_dest_fpr_D(dc, rd);
1805
1806 gen(dst, cpu_env, src1, src2);
1807 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1808
1809 gen_store_fpr_D(dc, rd, dst);
1810}
1811
1812#ifdef TARGET_SPARC64
1813static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1814 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1815{
1816 TCGv_i64 dst, src1, src2;
1817
1818 src1 = gen_load_fpr_D(dc, rs1);
1819 src2 = gen_load_fpr_D(dc, rs2);
1820 dst = gen_dest_fpr_D(dc, rd);
1821
1822 gen(dst, src1, src2);
1823
1824 gen_store_fpr_D(dc, rd, dst);
1825}
1826
1827static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1828 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1829{
1830 TCGv_i64 dst, src1, src2;
1831
1832 src1 = gen_load_fpr_D(dc, rs1);
1833 src2 = gen_load_fpr_D(dc, rs2);
1834 dst = gen_dest_fpr_D(dc, rd);
1835
1836 gen(dst, cpu_gsr, src1, src2);
1837
1838 gen_store_fpr_D(dc, rd, dst);
1839}
1840
1841static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1842 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1843{
1844 TCGv_i64 dst, src0, src1, src2;
1845
1846 src1 = gen_load_fpr_D(dc, rs1);
1847 src2 = gen_load_fpr_D(dc, rs2);
1848 src0 = gen_load_fpr_D(dc, rd);
1849 dst = gen_dest_fpr_D(dc, rd);
1850
1851 gen(dst, src0, src1, src2);
1852
1853 gen_store_fpr_D(dc, rd, dst);
1854}
1855#endif
1856
1857static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1858 void (*gen)(TCGv_ptr))
1859{
1860 gen_op_load_fpr_QT1(QFPREG(rs));
1861
1862 gen(cpu_env);
1863 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1864
1865 gen_op_store_QT0_fpr(QFPREG(rd));
1866 gen_update_fprs_dirty(dc, QFPREG(rd));
1867}
1868
1869#ifdef TARGET_SPARC64
1870static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1871 void (*gen)(TCGv_ptr))
1872{
1873 gen_op_load_fpr_QT1(QFPREG(rs));
1874
1875 gen(cpu_env);
1876
1877 gen_op_store_QT0_fpr(QFPREG(rd));
1878 gen_update_fprs_dirty(dc, QFPREG(rd));
1879}
1880#endif
1881
1882static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1883 void (*gen)(TCGv_ptr))
1884{
1885 gen_op_load_fpr_QT0(QFPREG(rs1));
1886 gen_op_load_fpr_QT1(QFPREG(rs2));
1887
1888 gen(cpu_env);
1889 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1890
1891 gen_op_store_QT0_fpr(QFPREG(rd));
1892 gen_update_fprs_dirty(dc, QFPREG(rd));
1893}
1894
1895static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1896 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1897{
1898 TCGv_i64 dst;
1899 TCGv_i32 src1, src2;
1900
1901 src1 = gen_load_fpr_F(dc, rs1);
1902 src2 = gen_load_fpr_F(dc, rs2);
1903 dst = gen_dest_fpr_D(dc, rd);
1904
1905 gen(dst, cpu_env, src1, src2);
1906 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1907
1908 gen_store_fpr_D(dc, rd, dst);
1909}
1910
1911static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1912 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1913{
1914 TCGv_i64 src1, src2;
1915
1916 src1 = gen_load_fpr_D(dc, rs1);
1917 src2 = gen_load_fpr_D(dc, rs2);
1918
1919 gen(cpu_env, src1, src2);
1920 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1921
1922 gen_op_store_QT0_fpr(QFPREG(rd));
1923 gen_update_fprs_dirty(dc, QFPREG(rd));
1924}
1925
1926#ifdef TARGET_SPARC64
1927static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1928 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1929{
1930 TCGv_i64 dst;
1931 TCGv_i32 src;
1932
1933 src = gen_load_fpr_F(dc, rs);
1934 dst = gen_dest_fpr_D(dc, rd);
1935
1936 gen(dst, cpu_env, src);
1937 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1938
1939 gen_store_fpr_D(dc, rd, dst);
1940}
1941#endif
1942
1943static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1944 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1945{
1946 TCGv_i64 dst;
1947 TCGv_i32 src;
1948
1949 src = gen_load_fpr_F(dc, rs);
1950 dst = gen_dest_fpr_D(dc, rd);
1951
1952 gen(dst, cpu_env, src);
1953
1954 gen_store_fpr_D(dc, rd, dst);
1955}
1956
1957static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1958 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1959{
1960 TCGv_i32 dst;
1961 TCGv_i64 src;
1962
1963 src = gen_load_fpr_D(dc, rs);
1964 dst = gen_dest_fpr_F(dc);
1965
1966 gen(dst, cpu_env, src);
1967 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1968
1969 gen_store_fpr_F(dc, rd, dst);
1970}
1971
1972static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1973 void (*gen)(TCGv_i32, TCGv_ptr))
1974{
1975 TCGv_i32 dst;
1976
1977 gen_op_load_fpr_QT1(QFPREG(rs));
1978 dst = gen_dest_fpr_F(dc);
1979
1980 gen(dst, cpu_env);
1981 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1982
1983 gen_store_fpr_F(dc, rd, dst);
1984}
1985
1986static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1987 void (*gen)(TCGv_i64, TCGv_ptr))
1988{
1989 TCGv_i64 dst;
1990
1991 gen_op_load_fpr_QT1(QFPREG(rs));
1992 dst = gen_dest_fpr_D(dc, rd);
1993
1994 gen(dst, cpu_env);
1995 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1996
1997 gen_store_fpr_D(dc, rd, dst);
1998}
1999
2000static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
2001 void (*gen)(TCGv_ptr, TCGv_i32))
2002{
2003 TCGv_i32 src;
2004
2005 src = gen_load_fpr_F(dc, rs);
2006
2007 gen(cpu_env, src);
2008
2009 gen_op_store_QT0_fpr(QFPREG(rd));
2010 gen_update_fprs_dirty(dc, QFPREG(rd));
2011}
2012
2013static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2014 void (*gen)(TCGv_ptr, TCGv_i64))
2015{
2016 TCGv_i64 src;
2017
2018 src = gen_load_fpr_D(dc, rs);
2019
2020 gen(cpu_env, src);
2021
2022 gen_op_store_QT0_fpr(QFPREG(rd));
2023 gen_update_fprs_dirty(dc, QFPREG(rd));
2024}
2025
2026static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2027 TCGv addr, int mmu_idx, TCGMemOp memop)
2028{
2029 gen_address_mask(dc, addr);
2030 tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2031}
2032
2033static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2034{
2035 TCGv m1 = tcg_const_tl(0xff);
2036 gen_address_mask(dc, addr);
2037 tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2038 tcg_temp_free(m1);
2039}
2040
2041
2042#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2043typedef enum {
2044 GET_ASI_HELPER,
2045 GET_ASI_EXCP,
2046 GET_ASI_DIRECT,
2047 GET_ASI_DTWINX,
2048 GET_ASI_BLOCK,
2049 GET_ASI_SHORT,
2050 GET_ASI_BCOPY,
2051 GET_ASI_BFILL,
2052} ASIType;
2053
2054typedef struct {
2055 ASIType type;
2056 int asi;
2057 int mem_idx;
2058 TCGMemOp memop;
2059} DisasASI;
2060
2061static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2062{
2063 int asi = GET_FIELD(insn, 19, 26);
2064 ASIType type = GET_ASI_HELPER;
2065 int mem_idx = dc->mem_idx;
2066
2067#ifndef TARGET_SPARC64
2068
2069 if (IS_IMM) {
2070 gen_exception(dc, TT_ILL_INSN);
2071 type = GET_ASI_EXCP;
2072 } else if (supervisor(dc)
2073
2074
2075
2076
2077 || (asi == ASI_USERDATA
2078 && (dc->def->features & CPU_FEATURE_CASA))) {
2079 switch (asi) {
2080 case ASI_USERDATA:
2081 mem_idx = MMU_USER_IDX;
2082 type = GET_ASI_DIRECT;
2083 break;
2084 case ASI_KERNELDATA:
2085 mem_idx = MMU_KERNEL_IDX;
2086 type = GET_ASI_DIRECT;
2087 break;
2088 case ASI_M_BYPASS:
2089 case ASI_LEON_BYPASS:
2090 mem_idx = MMU_PHYS_IDX;
2091 type = GET_ASI_DIRECT;
2092 break;
2093 case ASI_M_BCOPY:
2094 mem_idx = MMU_KERNEL_IDX;
2095 type = GET_ASI_BCOPY;
2096 break;
2097 case ASI_M_BFILL:
2098 mem_idx = MMU_KERNEL_IDX;
2099 type = GET_ASI_BFILL;
2100 break;
2101 }
2102 } else {
2103 gen_exception(dc, TT_PRIV_INSN);
2104 type = GET_ASI_EXCP;
2105 }
2106#else
2107 if (IS_IMM) {
2108 asi = dc->asi;
2109 }
2110
2111
2112
2113
2114
2115
2116 if (!supervisor(dc) && asi < 0x80) {
2117 gen_exception(dc, TT_PRIV_ACT);
2118 type = GET_ASI_EXCP;
2119 } else {
2120 switch (asi) {
2121 case ASI_REAL:
2122 case ASI_REAL_IO:
2123 case ASI_REAL_L:
2124 case ASI_REAL_IO_L:
2125 case ASI_TWINX_REAL:
2126 case ASI_TWINX_REAL_L:
2127 case ASI_QUAD_LDD_PHYS:
2128 case ASI_QUAD_LDD_PHYS_L:
2129 mem_idx = MMU_PHYS_IDX;
2130 break;
2131 case ASI_N:
2132 case ASI_NL:
2133 case ASI_TWINX_N:
2134 case ASI_TWINX_NL:
2135 case ASI_NUCLEUS_QUAD_LDD:
2136 case ASI_NUCLEUS_QUAD_LDD_L:
2137 mem_idx = MMU_NUCLEUS_IDX;
2138 break;
2139 case ASI_AIUP:
2140 case ASI_AIUPL:
2141 case ASI_TWINX_AIUP:
2142 case ASI_TWINX_AIUP_L:
2143 case ASI_BLK_AIUP_4V:
2144 case ASI_BLK_AIUP_L_4V:
2145 case ASI_BLK_AIUP:
2146 case ASI_BLK_AIUPL:
2147 mem_idx = MMU_USER_IDX;
2148 break;
2149 case ASI_AIUS:
2150 case ASI_AIUSL:
2151 case ASI_TWINX_AIUS:
2152 case ASI_TWINX_AIUS_L:
2153 case ASI_BLK_AIUS_4V:
2154 case ASI_BLK_AIUS_L_4V:
2155 case ASI_BLK_AIUS:
2156 case ASI_BLK_AIUSL:
2157 mem_idx = MMU_USER_SECONDARY_IDX;
2158 break;
2159 case ASI_S:
2160 case ASI_SL:
2161 case ASI_TWINX_S:
2162 case ASI_TWINX_SL:
2163 case ASI_BLK_COMMIT_S:
2164 case ASI_BLK_S:
2165 case ASI_BLK_SL:
2166 case ASI_FL8_S:
2167 case ASI_FL8_SL:
2168 case ASI_FL16_S:
2169 case ASI_FL16_SL:
2170 if (mem_idx == MMU_USER_IDX) {
2171 mem_idx = MMU_USER_SECONDARY_IDX;
2172 } else if (mem_idx == MMU_KERNEL_IDX) {
2173 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2174 }
2175 break;
2176 case ASI_P:
2177 case ASI_PL:
2178 case ASI_TWINX_P:
2179 case ASI_TWINX_PL:
2180 case ASI_BLK_COMMIT_P:
2181 case ASI_BLK_P:
2182 case ASI_BLK_PL:
2183 case ASI_FL8_P:
2184 case ASI_FL8_PL:
2185 case ASI_FL16_P:
2186 case ASI_FL16_PL:
2187 break;
2188 }
2189 switch (asi) {
2190 case ASI_REAL:
2191 case ASI_REAL_IO:
2192 case ASI_REAL_L:
2193 case ASI_REAL_IO_L:
2194 case ASI_N:
2195 case ASI_NL:
2196 case ASI_AIUP:
2197 case ASI_AIUPL:
2198 case ASI_AIUS:
2199 case ASI_AIUSL:
2200 case ASI_S:
2201 case ASI_SL:
2202 case ASI_P:
2203 case ASI_PL:
2204 type = GET_ASI_DIRECT;
2205 break;
2206 case ASI_TWINX_REAL:
2207 case ASI_TWINX_REAL_L:
2208 case ASI_TWINX_N:
2209 case ASI_TWINX_NL:
2210 case ASI_TWINX_AIUP:
2211 case ASI_TWINX_AIUP_L:
2212 case ASI_TWINX_AIUS:
2213 case ASI_TWINX_AIUS_L:
2214 case ASI_TWINX_P:
2215 case ASI_TWINX_PL:
2216 case ASI_TWINX_S:
2217 case ASI_TWINX_SL:
2218 case ASI_QUAD_LDD_PHYS:
2219 case ASI_QUAD_LDD_PHYS_L:
2220 case ASI_NUCLEUS_QUAD_LDD:
2221 case ASI_NUCLEUS_QUAD_LDD_L:
2222 type = GET_ASI_DTWINX;
2223 break;
2224 case ASI_BLK_COMMIT_P:
2225 case ASI_BLK_COMMIT_S:
2226 case ASI_BLK_AIUP_4V:
2227 case ASI_BLK_AIUP_L_4V:
2228 case ASI_BLK_AIUP:
2229 case ASI_BLK_AIUPL:
2230 case ASI_BLK_AIUS_4V:
2231 case ASI_BLK_AIUS_L_4V:
2232 case ASI_BLK_AIUS:
2233 case ASI_BLK_AIUSL:
2234 case ASI_BLK_S:
2235 case ASI_BLK_SL:
2236 case ASI_BLK_P:
2237 case ASI_BLK_PL:
2238 type = GET_ASI_BLOCK;
2239 break;
2240 case ASI_FL8_S:
2241 case ASI_FL8_SL:
2242 case ASI_FL8_P:
2243 case ASI_FL8_PL:
2244 memop = MO_UB;
2245 type = GET_ASI_SHORT;
2246 break;
2247 case ASI_FL16_S:
2248 case ASI_FL16_SL:
2249 case ASI_FL16_P:
2250 case ASI_FL16_PL:
2251 memop = MO_TEUW;
2252 type = GET_ASI_SHORT;
2253 break;
2254 }
2255
2256 if (asi & 8) {
2257 memop ^= MO_BSWAP;
2258 }
2259 }
2260#endif
2261
2262 return (DisasASI){ type, asi, mem_idx, memop };
2263}
2264
2265static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2266 int insn, TCGMemOp memop)
2267{
2268 DisasASI da = get_asi(dc, insn, memop);
2269
2270 switch (da.type) {
2271 case GET_ASI_EXCP:
2272 break;
2273 case GET_ASI_DTWINX:
2274 gen_exception(dc, TT_ILL_INSN);
2275 break;
2276 case GET_ASI_DIRECT:
2277 gen_address_mask(dc, addr);
2278 tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2279 break;
2280 default:
2281 {
2282 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2283 TCGv_i32 r_mop = tcg_const_i32(memop);
2284
2285 save_state(dc);
2286#ifdef TARGET_SPARC64
2287 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2288#else
2289 {
2290 TCGv_i64 t64 = tcg_temp_new_i64();
2291 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2292 tcg_gen_trunc_i64_tl(dst, t64);
2293 tcg_temp_free_i64(t64);
2294 }
2295#endif
2296 tcg_temp_free_i32(r_mop);
2297 tcg_temp_free_i32(r_asi);
2298 }
2299 break;
2300 }
2301}
2302
2303static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2304 int insn, TCGMemOp memop)
2305{
2306 DisasASI da = get_asi(dc, insn, memop);
2307
2308 switch (da.type) {
2309 case GET_ASI_EXCP:
2310 break;
2311 case GET_ASI_DTWINX:
2312 gen_exception(dc, TT_ILL_INSN);
2313 break;
2314 case GET_ASI_DIRECT:
2315 gen_address_mask(dc, addr);
2316 tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2317 break;
2318#if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2319 case GET_ASI_BCOPY:
2320
2321
2322
2323
2324
2325 {
2326 TCGv saddr = tcg_temp_new();
2327 TCGv daddr = tcg_temp_new();
2328 TCGv four = tcg_const_tl(4);
2329 TCGv_i32 tmp = tcg_temp_new_i32();
2330 int i;
2331
2332 tcg_gen_andi_tl(saddr, src, -4);
2333 tcg_gen_andi_tl(daddr, addr, -4);
2334 for (i = 0; i < 32; i += 4) {
2335
2336
2337 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2338 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2339 tcg_gen_add_tl(saddr, saddr, four);
2340 tcg_gen_add_tl(daddr, daddr, four);
2341 }
2342
2343 tcg_temp_free(saddr);
2344 tcg_temp_free(daddr);
2345 tcg_temp_free(four);
2346 tcg_temp_free_i32(tmp);
2347 }
2348 break;
2349#endif
2350 default:
2351 {
2352 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2353 TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2354
2355 save_state(dc);
2356#ifdef TARGET_SPARC64
2357 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2358#else
2359 {
2360 TCGv_i64 t64 = tcg_temp_new_i64();
2361 tcg_gen_extu_tl_i64(t64, src);
2362 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2363 tcg_temp_free_i64(t64);
2364 }
2365#endif
2366 tcg_temp_free_i32(r_mop);
2367 tcg_temp_free_i32(r_asi);
2368
2369
2370 dc->npc = DYNAMIC_PC;
2371 }
2372 break;
2373 }
2374}
2375
2376static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2377 TCGv addr, int insn)
2378{
2379 DisasASI da = get_asi(dc, insn, MO_TEUL);
2380
2381 switch (da.type) {
2382 case GET_ASI_EXCP:
2383 break;
2384 case GET_ASI_DIRECT:
2385 gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2386 break;
2387 default:
2388
2389 gen_exception(dc, TT_DATA_ACCESS);
2390 break;
2391 }
2392}
2393
2394static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2395 int insn, int rd)
2396{
2397 DisasASI da = get_asi(dc, insn, MO_TEUL);
2398 TCGv oldv;
2399
2400 switch (da.type) {
2401 case GET_ASI_EXCP:
2402 return;
2403 case GET_ASI_DIRECT:
2404 oldv = tcg_temp_new();
2405 tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2406 da.mem_idx, da.memop);
2407 gen_store_gpr(dc, rd, oldv);
2408 tcg_temp_free(oldv);
2409 break;
2410 default:
2411
2412 gen_exception(dc, TT_DATA_ACCESS);
2413 break;
2414 }
2415}
2416
2417static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2418{
2419 DisasASI da = get_asi(dc, insn, MO_UB);
2420
2421 switch (da.type) {
2422 case GET_ASI_EXCP:
2423 break;
2424 case GET_ASI_DIRECT:
2425 gen_ldstub(dc, dst, addr, da.mem_idx);
2426 break;
2427 default:
2428
2429
2430 if (parallel_cpus) {
2431 gen_helper_exit_atomic(cpu_env);
2432 } else {
2433 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2434 TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2435 TCGv_i64 s64, t64;
2436
2437 save_state(dc);
2438 t64 = tcg_temp_new_i64();
2439 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2440
2441 s64 = tcg_const_i64(0xff);
2442 gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2443 tcg_temp_free_i64(s64);
2444 tcg_temp_free_i32(r_mop);
2445 tcg_temp_free_i32(r_asi);
2446
2447 tcg_gen_trunc_i64_tl(dst, t64);
2448 tcg_temp_free_i64(t64);
2449
2450
2451 dc->npc = DYNAMIC_PC;
2452 }
2453 break;
2454 }
2455}
2456#endif
2457
2458#ifdef TARGET_SPARC64
2459static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2460 int insn, int size, int rd)
2461{
2462 DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2463 TCGv_i32 d32;
2464 TCGv_i64 d64;
2465
2466 switch (da.type) {
2467 case GET_ASI_EXCP:
2468 break;
2469
2470 case GET_ASI_DIRECT:
2471 gen_address_mask(dc, addr);
2472 switch (size) {
2473 case 4:
2474 d32 = gen_dest_fpr_F(dc);
2475 tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2476 gen_store_fpr_F(dc, rd, d32);
2477 break;
2478 case 8:
2479 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2480 da.memop | MO_ALIGN_4);
2481 break;
2482 case 16:
2483 d64 = tcg_temp_new_i64();
2484 tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2485 tcg_gen_addi_tl(addr, addr, 8);
2486 tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2487 da.memop | MO_ALIGN_4);
2488 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2489 tcg_temp_free_i64(d64);
2490 break;
2491 default:
2492 g_assert_not_reached();
2493 }
2494 break;
2495
2496 case GET_ASI_BLOCK:
2497
2498 if (size == 8 && (rd & 7) == 0) {
2499 TCGMemOp memop;
2500 TCGv eight;
2501 int i;
2502
2503 gen_address_mask(dc, addr);
2504
2505
2506 memop = da.memop | MO_ALIGN_64;
2507 eight = tcg_const_tl(8);
2508 for (i = 0; ; ++i) {
2509 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2510 da.mem_idx, memop);
2511 if (i == 7) {
2512 break;
2513 }
2514 tcg_gen_add_tl(addr, addr, eight);
2515 memop = da.memop;
2516 }
2517 tcg_temp_free(eight);
2518 } else {
2519 gen_exception(dc, TT_ILL_INSN);
2520 }
2521 break;
2522
2523 case GET_ASI_SHORT:
2524
2525 if (size == 8) {
2526 gen_address_mask(dc, addr);
2527 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2528 } else {
2529 gen_exception(dc, TT_ILL_INSN);
2530 }
2531 break;
2532
2533 default:
2534 {
2535 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2536 TCGv_i32 r_mop = tcg_const_i32(da.memop);
2537
2538 save_state(dc);
2539
2540
2541
2542
2543 switch (size) {
2544 case 4:
2545 d64 = tcg_temp_new_i64();
2546 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2547 d32 = gen_dest_fpr_F(dc);
2548 tcg_gen_extrl_i64_i32(d32, d64);
2549 tcg_temp_free_i64(d64);
2550 gen_store_fpr_F(dc, rd, d32);
2551 break;
2552 case 8:
2553 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2554 break;
2555 case 16:
2556 d64 = tcg_temp_new_i64();
2557 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2558 tcg_gen_addi_tl(addr, addr, 8);
2559 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2560 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2561 tcg_temp_free_i64(d64);
2562 break;
2563 default:
2564 g_assert_not_reached();
2565 }
2566 tcg_temp_free_i32(r_mop);
2567 tcg_temp_free_i32(r_asi);
2568 }
2569 break;
2570 }
2571}
2572
2573static void gen_stf_asi(DisasContext *dc, TCGv addr,
2574 int insn, int size, int rd)
2575{
2576 DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2577 TCGv_i32 d32;
2578
2579 switch (da.type) {
2580 case GET_ASI_EXCP:
2581 break;
2582
2583 case GET_ASI_DIRECT:
2584 gen_address_mask(dc, addr);
2585 switch (size) {
2586 case 4:
2587 d32 = gen_load_fpr_F(dc, rd);
2588 tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2589 break;
2590 case 8:
2591 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2592 da.memop | MO_ALIGN_4);
2593 break;
2594 case 16:
2595
2596
2597
2598
2599
2600 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2601 da.memop | MO_ALIGN_16);
2602 tcg_gen_addi_tl(addr, addr, 8);
2603 tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2604 break;
2605 default:
2606 g_assert_not_reached();
2607 }
2608 break;
2609
2610 case GET_ASI_BLOCK:
2611
2612 if (size == 8 && (rd & 7) == 0) {
2613 TCGMemOp memop;
2614 TCGv eight;
2615 int i;
2616
2617 gen_address_mask(dc, addr);
2618
2619
2620 memop = da.memop | MO_ALIGN_64;
2621 eight = tcg_const_tl(8);
2622 for (i = 0; ; ++i) {
2623 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2624 da.mem_idx, memop);
2625 if (i == 7) {
2626 break;
2627 }
2628 tcg_gen_add_tl(addr, addr, eight);
2629 memop = da.memop;
2630 }
2631 tcg_temp_free(eight);
2632 } else {
2633 gen_exception(dc, TT_ILL_INSN);
2634 }
2635 break;
2636
2637 case GET_ASI_SHORT:
2638
2639 if (size == 8) {
2640 gen_address_mask(dc, addr);
2641 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2642 } else {
2643 gen_exception(dc, TT_ILL_INSN);
2644 }
2645 break;
2646
2647 default:
2648
2649
2650
2651 gen_exception(dc, TT_ILL_INSN);
2652 break;
2653 }
2654}
2655
2656static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2657{
2658 DisasASI da = get_asi(dc, insn, MO_TEQ);
2659 TCGv_i64 hi = gen_dest_gpr(dc, rd);
2660 TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2661
2662 switch (da.type) {
2663 case GET_ASI_EXCP:
2664 return;
2665
2666 case GET_ASI_DTWINX:
2667 gen_address_mask(dc, addr);
2668 tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2669 tcg_gen_addi_tl(addr, addr, 8);
2670 tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2671 break;
2672
2673 case GET_ASI_DIRECT:
2674 {
2675 TCGv_i64 tmp = tcg_temp_new_i64();
2676
2677 gen_address_mask(dc, addr);
2678 tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2679
2680
2681
2682
2683 if ((da.memop & MO_BSWAP) == MO_TE) {
2684 tcg_gen_extr32_i64(lo, hi, tmp);
2685 } else {
2686 tcg_gen_extr32_i64(hi, lo, tmp);
2687 }
2688 tcg_temp_free_i64(tmp);
2689 }
2690 break;
2691
2692 default:
2693
2694
2695
2696
2697 {
2698 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2699 TCGv_i32 r_mop = tcg_const_i32(da.memop);
2700 TCGv_i64 tmp = tcg_temp_new_i64();
2701
2702 save_state(dc);
2703 gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2704 tcg_temp_free_i32(r_asi);
2705 tcg_temp_free_i32(r_mop);
2706
2707
2708 if ((da.memop & MO_BSWAP) == MO_TE) {
2709 tcg_gen_extr32_i64(lo, hi, tmp);
2710 } else {
2711 tcg_gen_extr32_i64(hi, lo, tmp);
2712 }
2713 tcg_temp_free_i64(tmp);
2714 }
2715 break;
2716 }
2717
2718 gen_store_gpr(dc, rd, hi);
2719 gen_store_gpr(dc, rd + 1, lo);
2720}
2721
2722static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2723 int insn, int rd)
2724{
2725 DisasASI da = get_asi(dc, insn, MO_TEQ);
2726 TCGv lo = gen_load_gpr(dc, rd + 1);
2727
2728 switch (da.type) {
2729 case GET_ASI_EXCP:
2730 break;
2731
2732 case GET_ASI_DTWINX:
2733 gen_address_mask(dc, addr);
2734 tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2735 tcg_gen_addi_tl(addr, addr, 8);
2736 tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2737 break;
2738
2739 case GET_ASI_DIRECT:
2740 {
2741 TCGv_i64 t64 = tcg_temp_new_i64();
2742
2743
2744
2745
2746 if ((da.memop & MO_BSWAP) == MO_TE) {
2747 tcg_gen_concat32_i64(t64, lo, hi);
2748 } else {
2749 tcg_gen_concat32_i64(t64, hi, lo);
2750 }
2751 gen_address_mask(dc, addr);
2752 tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2753 tcg_temp_free_i64(t64);
2754 }
2755 break;
2756
2757 default:
2758
2759
2760 {
2761 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2762 TCGv_i32 r_mop = tcg_const_i32(da.memop);
2763 TCGv_i64 t64 = tcg_temp_new_i64();
2764
2765
2766 if ((da.memop & MO_BSWAP) == MO_TE) {
2767 tcg_gen_concat32_i64(t64, lo, hi);
2768 } else {
2769 tcg_gen_concat32_i64(t64, hi, lo);
2770 }
2771
2772 save_state(dc);
2773 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2774 tcg_temp_free_i32(r_mop);
2775 tcg_temp_free_i32(r_asi);
2776 tcg_temp_free_i64(t64);
2777 }
2778 break;
2779 }
2780}
2781
2782static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2783 int insn, int rd)
2784{
2785 DisasASI da = get_asi(dc, insn, MO_TEQ);
2786 TCGv oldv;
2787
2788 switch (da.type) {
2789 case GET_ASI_EXCP:
2790 return;
2791 case GET_ASI_DIRECT:
2792 oldv = tcg_temp_new();
2793 tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2794 da.mem_idx, da.memop);
2795 gen_store_gpr(dc, rd, oldv);
2796 tcg_temp_free(oldv);
2797 break;
2798 default:
2799
2800 gen_exception(dc, TT_DATA_ACCESS);
2801 break;
2802 }
2803}
2804
2805#elif !defined(CONFIG_USER_ONLY)
2806static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2807{
2808
2809
2810
2811
2812 TCGv lo = gen_dest_gpr(dc, rd | 1);
2813 TCGv hi = gen_dest_gpr(dc, rd);
2814 TCGv_i64 t64 = tcg_temp_new_i64();
2815 DisasASI da = get_asi(dc, insn, MO_TEQ);
2816
2817 switch (da.type) {
2818 case GET_ASI_EXCP:
2819 tcg_temp_free_i64(t64);
2820 return;
2821 case GET_ASI_DIRECT:
2822 gen_address_mask(dc, addr);
2823 tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2824 break;
2825 default:
2826 {
2827 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2828 TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2829
2830 save_state(dc);
2831 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2832 tcg_temp_free_i32(r_mop);
2833 tcg_temp_free_i32(r_asi);
2834 }
2835 break;
2836 }
2837
2838 tcg_gen_extr_i64_i32(lo, hi, t64);
2839 tcg_temp_free_i64(t64);
2840 gen_store_gpr(dc, rd | 1, lo);
2841 gen_store_gpr(dc, rd, hi);
2842}
2843
2844static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2845 int insn, int rd)
2846{
2847 DisasASI da = get_asi(dc, insn, MO_TEQ);
2848 TCGv lo = gen_load_gpr(dc, rd + 1);
2849 TCGv_i64 t64 = tcg_temp_new_i64();
2850
2851 tcg_gen_concat_tl_i64(t64, lo, hi);
2852
2853 switch (da.type) {
2854 case GET_ASI_EXCP:
2855 break;
2856 case GET_ASI_DIRECT:
2857 gen_address_mask(dc, addr);
2858 tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2859 break;
2860 case GET_ASI_BFILL:
2861
2862
2863
2864
2865
2866 {
2867 TCGv d_addr = tcg_temp_new();
2868 TCGv eight = tcg_const_tl(8);
2869 int i;
2870
2871 tcg_gen_andi_tl(d_addr, addr, -8);
2872 for (i = 0; i < 32; i += 8) {
2873 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2874 tcg_gen_add_tl(d_addr, d_addr, eight);
2875 }
2876
2877 tcg_temp_free(d_addr);
2878 tcg_temp_free(eight);
2879 }
2880 break;
2881 default:
2882 {
2883 TCGv_i32 r_asi = tcg_const_i32(da.asi);
2884 TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2885
2886 save_state(dc);
2887 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2888 tcg_temp_free_i32(r_mop);
2889 tcg_temp_free_i32(r_asi);
2890 }
2891 break;
2892 }
2893
2894 tcg_temp_free_i64(t64);
2895}
2896#endif
2897
2898static TCGv get_src1(DisasContext *dc, unsigned int insn)
2899{
2900 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2901 return gen_load_gpr(dc, rs1);
2902}
2903
2904static TCGv get_src2(DisasContext *dc, unsigned int insn)
2905{
2906 if (IS_IMM) {
2907 target_long simm = GET_FIELDs(insn, 19, 31);
2908 TCGv t = get_temp_tl(dc);
2909 tcg_gen_movi_tl(t, simm);
2910 return t;
2911 } else {
2912 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2913 return gen_load_gpr(dc, rs2);
2914 }
2915}
2916
2917#ifdef TARGET_SPARC64
2918static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2919{
2920 TCGv_i32 c32, zero, dst, s1, s2;
2921
2922
2923
2924
2925 c32 = tcg_temp_new_i32();
2926 if (cmp->is_bool) {
2927 tcg_gen_extrl_i64_i32(c32, cmp->c1);
2928 } else {
2929 TCGv_i64 c64 = tcg_temp_new_i64();
2930 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2931 tcg_gen_extrl_i64_i32(c32, c64);
2932 tcg_temp_free_i64(c64);
2933 }
2934
2935 s1 = gen_load_fpr_F(dc, rs);
2936 s2 = gen_load_fpr_F(dc, rd);
2937 dst = gen_dest_fpr_F(dc);
2938 zero = tcg_const_i32(0);
2939
2940 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2941
2942 tcg_temp_free_i32(c32);
2943 tcg_temp_free_i32(zero);
2944 gen_store_fpr_F(dc, rd, dst);
2945}
2946
2947static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2948{
2949 TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2950 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2951 gen_load_fpr_D(dc, rs),
2952 gen_load_fpr_D(dc, rd));
2953 gen_store_fpr_D(dc, rd, dst);
2954}
2955
2956static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2957{
2958 int qd = QFPREG(rd);
2959 int qs = QFPREG(rs);
2960
2961 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2962 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2963 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2964 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2965
2966 gen_update_fprs_dirty(dc, qd);
2967}
2968
2969#ifndef CONFIG_USER_ONLY
2970static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2971{
2972 TCGv_i32 r_tl = tcg_temp_new_i32();
2973
2974
2975 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2976
2977
2978 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2979
2980
2981 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2982 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2983
2984
2985 {
2986 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2987 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2988 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2989 tcg_temp_free_ptr(r_tl_tmp);
2990 }
2991
2992 tcg_temp_free_i32(r_tl);
2993}
2994#endif
2995
2996static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2997 int width, bool cc, bool left)
2998{
2999 TCGv lo1, lo2, t1, t2;
3000 uint64_t amask, tabl, tabr;
3001 int shift, imask, omask;
3002
3003 if (cc) {
3004 tcg_gen_mov_tl(cpu_cc_src, s1);
3005 tcg_gen_mov_tl(cpu_cc_src2, s2);
3006 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3007 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3008 dc->cc_op = CC_OP_SUB;
3009 }
3010
3011
3012
3013
3014
3015
3016
3017
3018 switch (width) {
3019 case 8:
3020 imask = 0x7;
3021 shift = 3;
3022 omask = 0xff;
3023 if (left) {
3024 tabl = 0x80c0e0f0f8fcfeffULL;
3025 tabr = 0xff7f3f1f0f070301ULL;
3026 } else {
3027 tabl = 0x0103070f1f3f7fffULL;
3028 tabr = 0xfffefcf8f0e0c080ULL;
3029 }
3030 break;
3031 case 16:
3032 imask = 0x6;
3033 shift = 1;
3034 omask = 0xf;
3035 if (left) {
3036 tabl = 0x8cef;
3037 tabr = 0xf731;
3038 } else {
3039 tabl = 0x137f;
3040 tabr = 0xfec8;
3041 }
3042 break;
3043 case 32:
3044 imask = 0x4;
3045 shift = 0;
3046 omask = 0x3;
3047 if (left) {
3048 tabl = (2 << 2) | 3;
3049 tabr = (3 << 2) | 1;
3050 } else {
3051 tabl = (1 << 2) | 3;
3052 tabr = (3 << 2) | 2;
3053 }
3054 break;
3055 default:
3056 abort();
3057 }
3058
3059 lo1 = tcg_temp_new();
3060 lo2 = tcg_temp_new();
3061 tcg_gen_andi_tl(lo1, s1, imask);
3062 tcg_gen_andi_tl(lo2, s2, imask);
3063 tcg_gen_shli_tl(lo1, lo1, shift);
3064 tcg_gen_shli_tl(lo2, lo2, shift);
3065
3066 t1 = tcg_const_tl(tabl);
3067 t2 = tcg_const_tl(tabr);
3068 tcg_gen_shr_tl(lo1, t1, lo1);
3069 tcg_gen_shr_tl(lo2, t2, lo2);
3070 tcg_gen_andi_tl(dst, lo1, omask);
3071 tcg_gen_andi_tl(lo2, lo2, omask);
3072
3073 amask = -8;
3074 if (AM_CHECK(dc)) {
3075 amask &= 0xffffffffULL;
3076 }
3077 tcg_gen_andi_tl(s1, s1, amask);
3078 tcg_gen_andi_tl(s2, s2, amask);
3079
3080
3081
3082
3083
3084
3085
3086
3087
3088 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3089 tcg_gen_neg_tl(t1, t1);
3090 tcg_gen_or_tl(lo2, lo2, t1);
3091 tcg_gen_and_tl(dst, dst, lo2);
3092
3093 tcg_temp_free(lo1);
3094 tcg_temp_free(lo2);
3095 tcg_temp_free(t1);
3096 tcg_temp_free(t2);
3097}
3098
3099static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3100{
3101 TCGv tmp = tcg_temp_new();
3102
3103 tcg_gen_add_tl(tmp, s1, s2);
3104 tcg_gen_andi_tl(dst, tmp, -8);
3105 if (left) {
3106 tcg_gen_neg_tl(tmp, tmp);
3107 }
3108 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3109
3110 tcg_temp_free(tmp);
3111}
3112
3113static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3114{
3115 TCGv t1, t2, shift;
3116
3117 t1 = tcg_temp_new();
3118 t2 = tcg_temp_new();
3119 shift = tcg_temp_new();
3120
3121 tcg_gen_andi_tl(shift, gsr, 7);
3122 tcg_gen_shli_tl(shift, shift, 3);
3123 tcg_gen_shl_tl(t1, s1, shift);
3124
3125
3126
3127 tcg_gen_xori_tl(shift, shift, 63);
3128 tcg_gen_shr_tl(t2, s2, shift);
3129 tcg_gen_shri_tl(t2, t2, 1);
3130
3131 tcg_gen_or_tl(dst, t1, t2);
3132
3133 tcg_temp_free(t1);
3134 tcg_temp_free(t2);
3135 tcg_temp_free(shift);
3136}
3137#endif
3138
3139#define CHECK_IU_FEATURE(dc, FEATURE) \
3140 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
3141 goto illegal_insn;
3142#define CHECK_FPU_FEATURE(dc, FEATURE) \
3143 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
3144 goto nfpu_insn;
3145
3146
3147static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3148{
3149 unsigned int opc, rs1, rs2, rd;
3150 TCGv cpu_src1, cpu_src2;
3151 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3152 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3153 target_long simm;
3154
3155 opc = GET_FIELD(insn, 0, 1);
3156 rd = GET_FIELD(insn, 2, 6);
3157
3158 switch (opc) {
3159 case 0:
3160 {
3161 unsigned int xop = GET_FIELD(insn, 7, 9);
3162 int32_t target;
3163 switch (xop) {
3164#ifdef TARGET_SPARC64
3165 case 0x1:
3166 {
3167 int cc;
3168
3169 target = GET_FIELD_SP(insn, 0, 18);
3170 target = sign_extend(target, 19);
3171 target <<= 2;
3172 cc = GET_FIELD_SP(insn, 20, 21);
3173 if (cc == 0)
3174 do_branch(dc, target, insn, 0);
3175 else if (cc == 2)
3176 do_branch(dc, target, insn, 1);
3177 else
3178 goto illegal_insn;
3179 goto jmp_insn;
3180 }
3181 case 0x3:
3182 {
3183 target = GET_FIELD_SP(insn, 0, 13) |
3184 (GET_FIELD_SP(insn, 20, 21) << 14);
3185 target = sign_extend(target, 16);
3186 target <<= 2;
3187 cpu_src1 = get_src1(dc, insn);
3188 do_branch_reg(dc, target, insn, cpu_src1);
3189 goto jmp_insn;
3190 }
3191 case 0x5:
3192 {
3193 int cc = GET_FIELD_SP(insn, 20, 21);
3194 if (gen_trap_ifnofpu(dc)) {
3195 goto jmp_insn;
3196 }
3197 target = GET_FIELD_SP(insn, 0, 18);
3198 target = sign_extend(target, 19);
3199 target <<= 2;
3200 do_fbranch(dc, target, insn, cc);
3201 goto jmp_insn;
3202 }
3203#else
3204 case 0x7:
3205 {
3206 goto ncp_insn;
3207 }
3208#endif
3209 case 0x2:
3210 {
3211 target = GET_FIELD(insn, 10, 31);
3212 target = sign_extend(target, 22);
3213 target <<= 2;
3214 do_branch(dc, target, insn, 0);
3215 goto jmp_insn;
3216 }
3217 case 0x6:
3218 {
3219 if (gen_trap_ifnofpu(dc)) {
3220 goto jmp_insn;
3221 }
3222 target = GET_FIELD(insn, 10, 31);
3223 target = sign_extend(target, 22);
3224 target <<= 2;
3225 do_fbranch(dc, target, insn, 0);
3226 goto jmp_insn;
3227 }
3228 case 0x4:
3229
3230 if (rd) {
3231 uint32_t value = GET_FIELD(insn, 10, 31);
3232 TCGv t = gen_dest_gpr(dc, rd);
3233 tcg_gen_movi_tl(t, value << 10);
3234 gen_store_gpr(dc, rd, t);
3235 }
3236 break;
3237 case 0x0:
3238 default:
3239 goto illegal_insn;
3240 }
3241 break;
3242 }
3243 break;
3244 case 1:
3245 {
3246 target_long target = GET_FIELDs(insn, 2, 31) << 2;
3247 TCGv o7 = gen_dest_gpr(dc, 15);
3248
3249 tcg_gen_movi_tl(o7, dc->pc);
3250 gen_store_gpr(dc, 15, o7);
3251 target += dc->pc;
3252 gen_mov_pc_npc(dc);
3253#ifdef TARGET_SPARC64
3254 if (unlikely(AM_CHECK(dc))) {
3255 target &= 0xffffffffULL;
3256 }
3257#endif
3258 dc->npc = target;
3259 }
3260 goto jmp_insn;
3261 case 2:
3262 {
3263 unsigned int xop = GET_FIELD(insn, 7, 12);
3264 TCGv cpu_dst = get_temp_tl(dc);
3265 TCGv cpu_tmp0;
3266
3267 if (xop == 0x3a) {
3268 int cond = GET_FIELD(insn, 3, 6);
3269 TCGv_i32 trap;
3270 TCGLabel *l1 = NULL;
3271 int mask;
3272
3273 if (cond == 0) {
3274
3275 break;
3276 }
3277
3278 save_state(dc);
3279
3280 if (cond != 8) {
3281
3282 DisasCompare cmp;
3283#ifdef TARGET_SPARC64
3284
3285 int cc = GET_FIELD_SP(insn, 11, 12);
3286 if (cc == 0) {
3287 gen_compare(&cmp, 0, cond, dc);
3288 } else if (cc == 2) {
3289 gen_compare(&cmp, 1, cond, dc);
3290 } else {
3291 goto illegal_insn;
3292 }
3293#else
3294 gen_compare(&cmp, 0, cond, dc);
3295#endif
3296 l1 = gen_new_label();
3297 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3298 cmp.c1, cmp.c2, l1);
3299 free_compare(&cmp);
3300 }
3301
3302 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3303 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3304
3305
3306
3307
3308 trap = tcg_temp_new_i32();
3309
3310 rs1 = GET_FIELD_SP(insn, 14, 18);
3311 if (IS_IMM) {
3312 rs2 = GET_FIELD_SP(insn, 0, 6);
3313 if (rs1 == 0) {
3314 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3315
3316 mask = 0;
3317 } else {
3318 TCGv t1 = gen_load_gpr(dc, rs1);
3319 tcg_gen_trunc_tl_i32(trap, t1);
3320 tcg_gen_addi_i32(trap, trap, rs2);
3321 }
3322 } else {
3323 TCGv t1, t2;
3324 rs2 = GET_FIELD_SP(insn, 0, 4);
3325 t1 = gen_load_gpr(dc, rs1);
3326 t2 = gen_load_gpr(dc, rs2);
3327 tcg_gen_add_tl(t1, t1, t2);
3328 tcg_gen_trunc_tl_i32(trap, t1);
3329 }
3330 if (mask != 0) {
3331 tcg_gen_andi_i32(trap, trap, mask);
3332 tcg_gen_addi_i32(trap, trap, TT_TRAP);
3333 }
3334
3335 gen_helper_raise_exception(cpu_env, trap);
3336 tcg_temp_free_i32(trap);
3337
3338 if (cond == 8) {
3339
3340 dc->is_br = 1;
3341 goto jmp_insn;
3342 } else {
3343
3344 gen_set_label(l1);
3345 break;
3346 }
3347 } else if (xop == 0x28) {
3348 rs1 = GET_FIELD(insn, 13, 17);
3349 switch(rs1) {
3350 case 0:
3351#ifndef TARGET_SPARC64
3352 case 0x01 ... 0x0e:
3353
3354
3355 case 0x0f:
3356
3357 case 0x10 ... 0x1f:
3358
3359
3360
3361 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3362 TCGv t = gen_dest_gpr(dc, rd);
3363
3364 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3365 gen_store_gpr(dc, rd, t);
3366 break;
3367 }
3368#endif
3369 gen_store_gpr(dc, rd, cpu_y);
3370 break;
3371#ifdef TARGET_SPARC64
3372 case 0x2:
3373 update_psr(dc);
3374 gen_helper_rdccr(cpu_dst, cpu_env);
3375 gen_store_gpr(dc, rd, cpu_dst);
3376 break;
3377 case 0x3:
3378 tcg_gen_movi_tl(cpu_dst, dc->asi);
3379 gen_store_gpr(dc, rd, cpu_dst);
3380 break;
3381 case 0x4:
3382 {
3383 TCGv_ptr r_tickptr;
3384 TCGv_i32 r_const;
3385
3386 r_tickptr = tcg_temp_new_ptr();
3387 r_const = tcg_const_i32(dc->mem_idx);
3388 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3389 offsetof(CPUSPARCState, tick));
3390 gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3391 r_const);
3392 tcg_temp_free_ptr(r_tickptr);
3393 tcg_temp_free_i32(r_const);
3394 gen_store_gpr(dc, rd, cpu_dst);
3395 }
3396 break;
3397 case 0x5:
3398 {
3399 TCGv t = gen_dest_gpr(dc, rd);
3400 if (unlikely(AM_CHECK(dc))) {
3401 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3402 } else {
3403 tcg_gen_movi_tl(t, dc->pc);
3404 }
3405 gen_store_gpr(dc, rd, t);
3406 }
3407 break;
3408 case 0x6:
3409 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3410 gen_store_gpr(dc, rd, cpu_dst);
3411 break;
3412 case 0xf:
3413 break;
3414 case 0x13:
3415 if (gen_trap_ifnofpu(dc)) {
3416 goto jmp_insn;
3417 }
3418 gen_store_gpr(dc, rd, cpu_gsr);
3419 break;
3420 case 0x16:
3421 tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3422 offsetof(CPUSPARCState, softint));
3423 gen_store_gpr(dc, rd, cpu_dst);
3424 break;
3425 case 0x17:
3426 gen_store_gpr(dc, rd, cpu_tick_cmpr);
3427 break;
3428 case 0x18:
3429 {
3430 TCGv_ptr r_tickptr;
3431 TCGv_i32 r_const;
3432
3433 r_tickptr = tcg_temp_new_ptr();
3434 r_const = tcg_const_i32(dc->mem_idx);
3435 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3436 offsetof(CPUSPARCState, stick));
3437 gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3438 r_const);
3439 tcg_temp_free_ptr(r_tickptr);
3440 tcg_temp_free_i32(r_const);
3441 gen_store_gpr(dc, rd, cpu_dst);
3442 }
3443 break;
3444 case 0x19:
3445 gen_store_gpr(dc, rd, cpu_stick_cmpr);
3446 break;
3447 case 0x10:
3448 case 0x11:
3449 case 0x12:
3450 case 0x14:
3451 case 0x15:
3452#endif
3453 default:
3454 goto illegal_insn;
3455 }
3456#if !defined(CONFIG_USER_ONLY)
3457 } else if (xop == 0x29) {
3458#ifndef TARGET_SPARC64
3459 if (!supervisor(dc)) {
3460 goto priv_insn;
3461 }
3462 update_psr(dc);
3463 gen_helper_rdpsr(cpu_dst, cpu_env);
3464#else
3465 CHECK_IU_FEATURE(dc, HYPV);
3466 if (!hypervisor(dc))
3467 goto priv_insn;
3468 rs1 = GET_FIELD(insn, 13, 17);
3469 switch (rs1) {
3470 case 0:
3471
3472 break;
3473 case 1:
3474
3475 break;
3476 case 3:
3477 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3478 break;
3479 case 5:
3480 tcg_gen_mov_tl(cpu_dst, cpu_htba);
3481 break;
3482 case 6:
3483 tcg_gen_mov_tl(cpu_dst, cpu_hver);
3484 break;
3485 case 31:
3486 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3487 break;
3488 default:
3489 goto illegal_insn;
3490 }
3491#endif
3492 gen_store_gpr(dc, rd, cpu_dst);
3493 break;
3494 } else if (xop == 0x2a) {
3495 if (!supervisor(dc)) {
3496 goto priv_insn;
3497 }
3498 cpu_tmp0 = get_temp_tl(dc);
3499#ifdef TARGET_SPARC64
3500 rs1 = GET_FIELD(insn, 13, 17);
3501 switch (rs1) {
3502 case 0:
3503 {
3504 TCGv_ptr r_tsptr;
3505
3506 r_tsptr = tcg_temp_new_ptr();
3507 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3508 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3509 offsetof(trap_state, tpc));
3510 tcg_temp_free_ptr(r_tsptr);
3511 }
3512 break;
3513 case 1:
3514 {
3515 TCGv_ptr r_tsptr;
3516
3517 r_tsptr = tcg_temp_new_ptr();
3518 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3519 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3520 offsetof(trap_state, tnpc));
3521 tcg_temp_free_ptr(r_tsptr);
3522 }
3523 break;
3524 case 2:
3525 {
3526 TCGv_ptr r_tsptr;
3527
3528 r_tsptr = tcg_temp_new_ptr();
3529 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3530 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3531 offsetof(trap_state, tstate));
3532 tcg_temp_free_ptr(r_tsptr);
3533 }
3534 break;
3535 case 3:
3536 {
3537 TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3538
3539 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3540 tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3541 offsetof(trap_state, tt));
3542 tcg_temp_free_ptr(r_tsptr);
3543 }
3544 break;
3545 case 4:
3546 {
3547 TCGv_ptr r_tickptr;
3548 TCGv_i32 r_const;
3549
3550 r_tickptr = tcg_temp_new_ptr();
3551 r_const = tcg_const_i32(dc->mem_idx);
3552 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3553 offsetof(CPUSPARCState, tick));
3554 gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3555 r_tickptr, r_const);
3556 tcg_temp_free_ptr(r_tickptr);
3557 tcg_temp_free_i32(r_const);
3558 }
3559 break;
3560 case 5:
3561 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3562 break;
3563 case 6:
3564 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3565 offsetof(CPUSPARCState, pstate));
3566 break;
3567 case 7:
3568 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3569 offsetof(CPUSPARCState, tl));
3570 break;
3571 case 8:
3572 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3573 offsetof(CPUSPARCState, psrpil));
3574 break;
3575 case 9:
3576 gen_helper_rdcwp(cpu_tmp0, cpu_env);
3577 break;
3578 case 10:
3579 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3580 offsetof(CPUSPARCState, cansave));
3581 break;
3582 case 11:
3583 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3584 offsetof(CPUSPARCState, canrestore));
3585 break;
3586 case 12:
3587 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3588 offsetof(CPUSPARCState, cleanwin));
3589 break;
3590 case 13:
3591 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3592 offsetof(CPUSPARCState, otherwin));
3593 break;
3594 case 14:
3595 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3596 offsetof(CPUSPARCState, wstate));
3597 break;
3598 case 16:
3599 CHECK_IU_FEATURE(dc, GL);
3600 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3601 offsetof(CPUSPARCState, gl));
3602 break;
3603 case 26:
3604 CHECK_IU_FEATURE(dc, HYPV);
3605 if (!hypervisor(dc))
3606 goto priv_insn;
3607 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3608 break;
3609 case 31:
3610 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3611 break;
3612 case 15:
3613 default:
3614 goto illegal_insn;
3615 }
3616#else
3617 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3618#endif
3619 gen_store_gpr(dc, rd, cpu_tmp0);
3620 break;
3621 } else if (xop == 0x2b) {
3622#ifdef TARGET_SPARC64
3623 gen_helper_flushw(cpu_env);
3624#else
3625 if (!supervisor(dc))
3626 goto priv_insn;
3627 gen_store_gpr(dc, rd, cpu_tbr);
3628#endif
3629 break;
3630#endif
3631 } else if (xop == 0x34) {
3632 if (gen_trap_ifnofpu(dc)) {
3633 goto jmp_insn;
3634 }
3635 gen_op_clear_ieee_excp_and_FTT();
3636 rs1 = GET_FIELD(insn, 13, 17);
3637 rs2 = GET_FIELD(insn, 27, 31);
3638 xop = GET_FIELD(insn, 18, 26);
3639
3640 switch (xop) {
3641 case 0x1:
3642 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3643 gen_store_fpr_F(dc, rd, cpu_src1_32);
3644 break;
3645 case 0x5:
3646 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3647 break;
3648 case 0x9:
3649 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3650 break;
3651 case 0x29:
3652 CHECK_FPU_FEATURE(dc, FSQRT);
3653 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3654 break;
3655 case 0x2a:
3656 CHECK_FPU_FEATURE(dc, FSQRT);
3657 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3658 break;
3659 case 0x2b:
3660 CHECK_FPU_FEATURE(dc, FLOAT128);
3661 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3662 break;
3663 case 0x41:
3664 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3665 break;
3666 case 0x42:
3667 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3668 break;
3669 case 0x43:
3670 CHECK_FPU_FEATURE(dc, FLOAT128);
3671 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3672 break;
3673 case 0x45:
3674 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3675 break;
3676 case 0x46:
3677 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3678 break;
3679 case 0x47:
3680 CHECK_FPU_FEATURE(dc, FLOAT128);
3681 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3682 break;
3683 case 0x49:
3684 CHECK_FPU_FEATURE(dc, FMUL);
3685 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3686 break;
3687 case 0x4a:
3688 CHECK_FPU_FEATURE(dc, FMUL);
3689 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3690 break;
3691 case 0x4b:
3692 CHECK_FPU_FEATURE(dc, FLOAT128);
3693 CHECK_FPU_FEATURE(dc, FMUL);
3694 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3695 break;
3696 case 0x4d:
3697 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3698 break;
3699 case 0x4e:
3700 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3701 break;
3702 case 0x4f:
3703 CHECK_FPU_FEATURE(dc, FLOAT128);
3704 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3705 break;
3706 case 0x69:
3707 CHECK_FPU_FEATURE(dc, FSMULD);
3708 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3709 break;
3710 case 0x6e:
3711 CHECK_FPU_FEATURE(dc, FLOAT128);
3712 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3713 break;
3714 case 0xc4:
3715 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3716 break;
3717 case 0xc6:
3718 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3719 break;
3720 case 0xc7:
3721 CHECK_FPU_FEATURE(dc, FLOAT128);
3722 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3723 break;
3724 case 0xc8:
3725 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3726 break;
3727 case 0xc9:
3728 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3729 break;
3730 case 0xcb:
3731 CHECK_FPU_FEATURE(dc, FLOAT128);
3732 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3733 break;
3734 case 0xcc:
3735 CHECK_FPU_FEATURE(dc, FLOAT128);
3736 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3737 break;
3738 case 0xcd:
3739 CHECK_FPU_FEATURE(dc, FLOAT128);
3740 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3741 break;
3742 case 0xce:
3743 CHECK_FPU_FEATURE(dc, FLOAT128);
3744 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3745 break;
3746 case 0xd1:
3747 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3748 break;
3749 case 0xd2:
3750 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3751 break;
3752 case 0xd3:
3753 CHECK_FPU_FEATURE(dc, FLOAT128);
3754 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3755 break;
3756#ifdef TARGET_SPARC64
3757 case 0x2:
3758 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3759 gen_store_fpr_D(dc, rd, cpu_src1_64);
3760 break;
3761 case 0x3:
3762 CHECK_FPU_FEATURE(dc, FLOAT128);
3763 gen_move_Q(dc, rd, rs2);
3764 break;
3765 case 0x6:
3766 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3767 break;
3768 case 0x7:
3769 CHECK_FPU_FEATURE(dc, FLOAT128);
3770 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3771 break;
3772 case 0xa:
3773 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3774 break;
3775 case 0xb:
3776 CHECK_FPU_FEATURE(dc, FLOAT128);
3777 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3778 break;
3779 case 0x81:
3780 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3781 break;
3782 case 0x82:
3783 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3784 break;
3785 case 0x83:
3786 CHECK_FPU_FEATURE(dc, FLOAT128);
3787 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3788 break;
3789 case 0x84:
3790 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3791 break;
3792 case 0x88:
3793 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3794 break;
3795 case 0x8c:
3796 CHECK_FPU_FEATURE(dc, FLOAT128);
3797 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3798 break;
3799#endif
3800 default:
3801 goto illegal_insn;
3802 }
3803 } else if (xop == 0x35) {
3804#ifdef TARGET_SPARC64
3805 int cond;
3806#endif
3807 if (gen_trap_ifnofpu(dc)) {
3808 goto jmp_insn;
3809 }
3810 gen_op_clear_ieee_excp_and_FTT();
3811 rs1 = GET_FIELD(insn, 13, 17);
3812 rs2 = GET_FIELD(insn, 27, 31);
3813 xop = GET_FIELD(insn, 18, 26);
3814
3815#ifdef TARGET_SPARC64
3816#define FMOVR(sz) \
3817 do { \
3818 DisasCompare cmp; \
3819 cond = GET_FIELD_SP(insn, 10, 12); \
3820 cpu_src1 = get_src1(dc, insn); \
3821 gen_compare_reg(&cmp, cond, cpu_src1); \
3822 gen_fmov##sz(dc, &cmp, rd, rs2); \
3823 free_compare(&cmp); \
3824 } while (0)
3825
3826 if ((xop & 0x11f) == 0x005) {
3827 FMOVR(s);
3828 break;
3829 } else if ((xop & 0x11f) == 0x006) {
3830 FMOVR(d);
3831 break;
3832 } else if ((xop & 0x11f) == 0x007) {
3833 CHECK_FPU_FEATURE(dc, FLOAT128);
3834 FMOVR(q);
3835 break;
3836 }
3837#undef FMOVR
3838#endif
3839 switch (xop) {
3840#ifdef TARGET_SPARC64
3841#define FMOVCC(fcc, sz) \
3842 do { \
3843 DisasCompare cmp; \
3844 cond = GET_FIELD_SP(insn, 14, 17); \
3845 gen_fcompare(&cmp, fcc, cond); \
3846 gen_fmov##sz(dc, &cmp, rd, rs2); \
3847 free_compare(&cmp); \
3848 } while (0)
3849
3850 case 0x001:
3851 FMOVCC(0, s);
3852 break;
3853 case 0x002:
3854 FMOVCC(0, d);
3855 break;
3856 case 0x003:
3857 CHECK_FPU_FEATURE(dc, FLOAT128);
3858 FMOVCC(0, q);
3859 break;
3860 case 0x041:
3861 FMOVCC(1, s);
3862 break;
3863 case 0x042:
3864 FMOVCC(1, d);
3865 break;
3866 case 0x043:
3867 CHECK_FPU_FEATURE(dc, FLOAT128);
3868 FMOVCC(1, q);
3869 break;
3870 case 0x081:
3871 FMOVCC(2, s);
3872 break;
3873 case 0x082:
3874 FMOVCC(2, d);
3875 break;
3876 case 0x083:
3877 CHECK_FPU_FEATURE(dc, FLOAT128);
3878 FMOVCC(2, q);
3879 break;
3880 case 0x0c1:
3881 FMOVCC(3, s);
3882 break;
3883 case 0x0c2:
3884 FMOVCC(3, d);
3885 break;
3886 case 0x0c3:
3887 CHECK_FPU_FEATURE(dc, FLOAT128);
3888 FMOVCC(3, q);
3889 break;
3890#undef FMOVCC
3891#define FMOVCC(xcc, sz) \
3892 do { \
3893 DisasCompare cmp; \
3894 cond = GET_FIELD_SP(insn, 14, 17); \
3895 gen_compare(&cmp, xcc, cond, dc); \
3896 gen_fmov##sz(dc, &cmp, rd, rs2); \
3897 free_compare(&cmp); \
3898 } while (0)
3899
3900 case 0x101:
3901 FMOVCC(0, s);
3902 break;
3903 case 0x102:
3904 FMOVCC(0, d);
3905 break;
3906 case 0x103:
3907 CHECK_FPU_FEATURE(dc, FLOAT128);
3908 FMOVCC(0, q);
3909 break;
3910 case 0x181:
3911 FMOVCC(1, s);
3912 break;
3913 case 0x182:
3914 FMOVCC(1, d);
3915 break;
3916 case 0x183:
3917 CHECK_FPU_FEATURE(dc, FLOAT128);
3918 FMOVCC(1, q);
3919 break;
3920#undef FMOVCC
3921#endif
3922 case 0x51:
3923 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3924 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3925 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3926 break;
3927 case 0x52:
3928 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3929 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3930 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3931 break;
3932 case 0x53:
3933 CHECK_FPU_FEATURE(dc, FLOAT128);
3934 gen_op_load_fpr_QT0(QFPREG(rs1));
3935 gen_op_load_fpr_QT1(QFPREG(rs2));
3936 gen_op_fcmpq(rd & 3);
3937 break;
3938 case 0x55:
3939 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3940 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3941 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3942 break;
3943 case 0x56:
3944 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3945 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3946 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3947 break;
3948 case 0x57:
3949 CHECK_FPU_FEATURE(dc, FLOAT128);
3950 gen_op_load_fpr_QT0(QFPREG(rs1));
3951 gen_op_load_fpr_QT1(QFPREG(rs2));
3952 gen_op_fcmpeq(rd & 3);
3953 break;
3954 default:
3955 goto illegal_insn;
3956 }
3957 } else if (xop == 0x2) {
3958 TCGv dst = gen_dest_gpr(dc, rd);
3959 rs1 = GET_FIELD(insn, 13, 17);
3960 if (rs1 == 0) {
3961
3962 if (IS_IMM) {
3963 simm = GET_FIELDs(insn, 19, 31);
3964 tcg_gen_movi_tl(dst, simm);
3965 gen_store_gpr(dc, rd, dst);
3966 } else {
3967 rs2 = GET_FIELD(insn, 27, 31);
3968 if (rs2 == 0) {
3969 tcg_gen_movi_tl(dst, 0);
3970 gen_store_gpr(dc, rd, dst);
3971 } else {
3972 cpu_src2 = gen_load_gpr(dc, rs2);
3973 gen_store_gpr(dc, rd, cpu_src2);
3974 }
3975 }
3976 } else {
3977 cpu_src1 = get_src1(dc, insn);
3978 if (IS_IMM) {
3979 simm = GET_FIELDs(insn, 19, 31);
3980 tcg_gen_ori_tl(dst, cpu_src1, simm);
3981 gen_store_gpr(dc, rd, dst);
3982 } else {
3983 rs2 = GET_FIELD(insn, 27, 31);
3984 if (rs2 == 0) {
3985
3986 gen_store_gpr(dc, rd, cpu_src1);
3987 } else {
3988 cpu_src2 = gen_load_gpr(dc, rs2);
3989 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3990 gen_store_gpr(dc, rd, dst);
3991 }
3992 }
3993 }
3994#ifdef TARGET_SPARC64
3995 } else if (xop == 0x25) {
3996 cpu_src1 = get_src1(dc, insn);
3997 if (IS_IMM) {
3998 simm = GET_FIELDs(insn, 20, 31);
3999 if (insn & (1 << 12)) {
4000 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4001 } else {
4002 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4003 }
4004 } else {
4005 rs2 = GET_FIELD(insn, 27, 31);
4006 cpu_src2 = gen_load_gpr(dc, rs2);
4007 cpu_tmp0 = get_temp_tl(dc);
4008 if (insn & (1 << 12)) {
4009 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4010 } else {
4011 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4012 }
4013 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4014 }
4015 gen_store_gpr(dc, rd, cpu_dst);
4016 } else if (xop == 0x26) {
4017 cpu_src1 = get_src1(dc, insn);
4018 if (IS_IMM) {
4019 simm = GET_FIELDs(insn, 20, 31);
4020 if (insn & (1 << 12)) {
4021 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4022 } else {
4023 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4024 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4025 }
4026 } else {
4027 rs2 = GET_FIELD(insn, 27, 31);
4028 cpu_src2 = gen_load_gpr(dc, rs2);
4029 cpu_tmp0 = get_temp_tl(dc);
4030 if (insn & (1 << 12)) {
4031 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4032 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4033 } else {
4034 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4035 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4036 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4037 }
4038 }
4039 gen_store_gpr(dc, rd, cpu_dst);
4040 } else if (xop == 0x27) {
4041 cpu_src1 = get_src1(dc, insn);
4042 if (IS_IMM) {
4043 simm = GET_FIELDs(insn, 20, 31);
4044 if (insn & (1 << 12)) {
4045 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4046 } else {
4047 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4048 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4049 }
4050 } else {
4051 rs2 = GET_FIELD(insn, 27, 31);
4052 cpu_src2 = gen_load_gpr(dc, rs2);
4053 cpu_tmp0 = get_temp_tl(dc);
4054 if (insn & (1 << 12)) {
4055 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4056 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4057 } else {
4058 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4059 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4060 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4061 }
4062 }
4063 gen_store_gpr(dc, rd, cpu_dst);
4064#endif
4065 } else if (xop < 0x36) {
4066 if (xop < 0x20) {
4067 cpu_src1 = get_src1(dc, insn);
4068 cpu_src2 = get_src2(dc, insn);
4069 switch (xop & ~0x10) {
4070 case 0x0:
4071 if (xop & 0x10) {
4072 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4073 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4074 dc->cc_op = CC_OP_ADD;
4075 } else {
4076 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4077 }
4078 break;
4079 case 0x1:
4080 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4081 if (xop & 0x10) {
4082 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4083 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4084 dc->cc_op = CC_OP_LOGIC;
4085 }
4086 break;
4087 case 0x2:
4088 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4089 if (xop & 0x10) {
4090 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4091 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4092 dc->cc_op = CC_OP_LOGIC;
4093 }
4094 break;
4095 case 0x3:
4096 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4097 if (xop & 0x10) {
4098 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4099 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4100 dc->cc_op = CC_OP_LOGIC;
4101 }
4102 break;
4103 case 0x4:
4104 if (xop & 0x10) {
4105 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4106 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4107 dc->cc_op = CC_OP_SUB;
4108 } else {
4109 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4110 }
4111 break;
4112 case 0x5:
4113 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4114 if (xop & 0x10) {
4115 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4116 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4117 dc->cc_op = CC_OP_LOGIC;
4118 }
4119 break;
4120 case 0x6:
4121 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4122 if (xop & 0x10) {
4123 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4124 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4125 dc->cc_op = CC_OP_LOGIC;
4126 }
4127 break;
4128 case 0x7:
4129 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4130 if (xop & 0x10) {
4131 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4132 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4133 dc->cc_op = CC_OP_LOGIC;
4134 }
4135 break;
4136 case 0x8:
4137 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4138 (xop & 0x10));
4139 break;
4140#ifdef TARGET_SPARC64
4141 case 0x9:
4142 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4143 break;
4144#endif
4145 case 0xa:
4146 CHECK_IU_FEATURE(dc, MUL);
4147 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4148 if (xop & 0x10) {
4149 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4150 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4151 dc->cc_op = CC_OP_LOGIC;
4152 }
4153 break;
4154 case 0xb:
4155 CHECK_IU_FEATURE(dc, MUL);
4156 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4157 if (xop & 0x10) {
4158 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4159 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4160 dc->cc_op = CC_OP_LOGIC;
4161 }
4162 break;
4163 case 0xc:
4164 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4165 (xop & 0x10));
4166 break;
4167#ifdef TARGET_SPARC64
4168 case 0xd:
4169 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4170 break;
4171#endif
4172 case 0xe:
4173 CHECK_IU_FEATURE(dc, DIV);
4174 if (xop & 0x10) {
4175 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4176 cpu_src2);
4177 dc->cc_op = CC_OP_DIV;
4178 } else {
4179 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4180 cpu_src2);
4181 }
4182 break;
4183 case 0xf:
4184 CHECK_IU_FEATURE(dc, DIV);
4185 if (xop & 0x10) {
4186 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4187 cpu_src2);
4188 dc->cc_op = CC_OP_DIV;
4189 } else {
4190 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4191 cpu_src2);
4192 }
4193 break;
4194 default:
4195 goto illegal_insn;
4196 }
4197 gen_store_gpr(dc, rd, cpu_dst);
4198 } else {
4199 cpu_src1 = get_src1(dc, insn);
4200 cpu_src2 = get_src2(dc, insn);
4201 switch (xop) {
4202 case 0x20:
4203 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4204 gen_store_gpr(dc, rd, cpu_dst);
4205 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4206 dc->cc_op = CC_OP_TADD;
4207 break;
4208 case 0x21:
4209 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4210 gen_store_gpr(dc, rd, cpu_dst);
4211 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4212 dc->cc_op = CC_OP_TSUB;
4213 break;
4214 case 0x22:
4215 gen_helper_taddcctv(cpu_dst, cpu_env,
4216 cpu_src1, cpu_src2);
4217 gen_store_gpr(dc, rd, cpu_dst);
4218 dc->cc_op = CC_OP_TADDTV;
4219 break;
4220 case 0x23:
4221 gen_helper_tsubcctv(cpu_dst, cpu_env,
4222 cpu_src1, cpu_src2);
4223 gen_store_gpr(dc, rd, cpu_dst);
4224 dc->cc_op = CC_OP_TSUBTV;
4225 break;
4226 case 0x24:
4227 update_psr(dc);
4228 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4229 gen_store_gpr(dc, rd, cpu_dst);
4230 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4231 dc->cc_op = CC_OP_ADD;
4232 break;
4233#ifndef TARGET_SPARC64
4234 case 0x25:
4235 if (IS_IMM) {
4236 simm = GET_FIELDs(insn, 20, 31);
4237 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4238 } else {
4239 cpu_tmp0 = get_temp_tl(dc);
4240 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4241 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4242 }
4243 gen_store_gpr(dc, rd, cpu_dst);
4244 break;
4245 case 0x26:
4246 if (IS_IMM) {
4247 simm = GET_FIELDs(insn, 20, 31);
4248 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4249 } else {
4250 cpu_tmp0 = get_temp_tl(dc);
4251 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4252 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4253 }
4254 gen_store_gpr(dc, rd, cpu_dst);
4255 break;
4256 case 0x27:
4257 if (IS_IMM) {
4258 simm = GET_FIELDs(insn, 20, 31);
4259 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4260 } else {
4261 cpu_tmp0 = get_temp_tl(dc);
4262 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4263 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4264 }
4265 gen_store_gpr(dc, rd, cpu_dst);
4266 break;
4267#endif
4268 case 0x30:
4269 {
4270 cpu_tmp0 = get_temp_tl(dc);
4271 switch(rd) {
4272 case 0:
4273 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4274 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4275 break;
4276#ifndef TARGET_SPARC64
4277 case 0x01 ... 0x0f:
4278
4279
4280
4281 case 0x10 ... 0x1f:
4282
4283
4284
4285 if ((rd == 0x13) && (dc->def->features &
4286 CPU_FEATURE_POWERDOWN)) {
4287
4288 save_state(dc);
4289 gen_helper_power_down(cpu_env);
4290 }
4291 break;
4292#else
4293 case 0x2:
4294 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4295 gen_helper_wrccr(cpu_env, cpu_tmp0);
4296 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4297 dc->cc_op = CC_OP_FLAGS;
4298 break;
4299 case 0x3:
4300 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4301 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4302 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4303 offsetof(CPUSPARCState, asi));
4304
4305 save_state(dc);
4306 gen_op_next_insn();
4307 tcg_gen_exit_tb(0);
4308 dc->is_br = 1;
4309 break;
4310 case 0x6:
4311 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4312 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4313 dc->fprs_dirty = 0;
4314 save_state(dc);
4315 gen_op_next_insn();
4316 tcg_gen_exit_tb(0);
4317 dc->is_br = 1;
4318 break;
4319 case 0xf:
4320#if !defined(CONFIG_USER_ONLY)
4321 if (supervisor(dc)) {
4322 ;
4323 }
4324#endif
4325 break;
4326 case 0x13:
4327 if (gen_trap_ifnofpu(dc)) {
4328 goto jmp_insn;
4329 }
4330 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4331 break;
4332 case 0x14:
4333 if (!supervisor(dc))
4334 goto illegal_insn;
4335 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4336 gen_helper_set_softint(cpu_env, cpu_tmp0);
4337 break;
4338 case 0x15:
4339 if (!supervisor(dc))
4340 goto illegal_insn;
4341 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4342 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4343 break;
4344 case 0x16:
4345 if (!supervisor(dc))
4346 goto illegal_insn;
4347 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4348 gen_helper_write_softint(cpu_env, cpu_tmp0);
4349 break;
4350 case 0x17:
4351#if !defined(CONFIG_USER_ONLY)
4352 if (!supervisor(dc))
4353 goto illegal_insn;
4354#endif
4355 {
4356 TCGv_ptr r_tickptr;
4357
4358 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4359 cpu_src2);
4360 r_tickptr = tcg_temp_new_ptr();
4361 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4362 offsetof(CPUSPARCState, tick));
4363 gen_helper_tick_set_limit(r_tickptr,
4364 cpu_tick_cmpr);
4365 tcg_temp_free_ptr(r_tickptr);
4366 }
4367 break;
4368 case 0x18:
4369#if !defined(CONFIG_USER_ONLY)
4370 if (!supervisor(dc))
4371 goto illegal_insn;
4372#endif
4373 {
4374 TCGv_ptr r_tickptr;
4375
4376 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4377 cpu_src2);
4378 r_tickptr = tcg_temp_new_ptr();
4379 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4380 offsetof(CPUSPARCState, stick));
4381 gen_helper_tick_set_count(r_tickptr,
4382 cpu_tmp0);
4383 tcg_temp_free_ptr(r_tickptr);
4384 }
4385 break;
4386 case 0x19:
4387#if !defined(CONFIG_USER_ONLY)
4388 if (!supervisor(dc))
4389 goto illegal_insn;
4390#endif
4391 {
4392 TCGv_ptr r_tickptr;
4393
4394 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4395 cpu_src2);
4396 r_tickptr = tcg_temp_new_ptr();
4397 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4398 offsetof(CPUSPARCState, stick));
4399 gen_helper_tick_set_limit(r_tickptr,
4400 cpu_stick_cmpr);
4401 tcg_temp_free_ptr(r_tickptr);
4402 }
4403 break;
4404
4405 case 0x10:
4406 case 0x11:
4407
4408 case 0x12:
4409#endif
4410 default:
4411 goto illegal_insn;
4412 }
4413 }
4414 break;
4415#if !defined(CONFIG_USER_ONLY)
4416 case 0x31:
4417 {
4418 if (!supervisor(dc))
4419 goto priv_insn;
4420#ifdef TARGET_SPARC64
4421 switch (rd) {
4422 case 0:
4423 gen_helper_saved(cpu_env);
4424 break;
4425 case 1:
4426 gen_helper_restored(cpu_env);
4427 break;
4428 case 2:
4429 case 3:
4430 case 4:
4431 case 5:
4432
4433 default:
4434 goto illegal_insn;
4435 }
4436#else
4437 cpu_tmp0 = get_temp_tl(dc);
4438 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4439 gen_helper_wrpsr(cpu_env, cpu_tmp0);
4440 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4441 dc->cc_op = CC_OP_FLAGS;
4442 save_state(dc);
4443 gen_op_next_insn();
4444 tcg_gen_exit_tb(0);
4445 dc->is_br = 1;
4446#endif
4447 }
4448 break;
4449 case 0x32:
4450 {
4451 if (!supervisor(dc))
4452 goto priv_insn;
4453 cpu_tmp0 = get_temp_tl(dc);
4454 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4455#ifdef TARGET_SPARC64
4456 switch (rd) {
4457 case 0:
4458 {
4459 TCGv_ptr r_tsptr;
4460
4461 r_tsptr = tcg_temp_new_ptr();
4462 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4463 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4464 offsetof(trap_state, tpc));
4465 tcg_temp_free_ptr(r_tsptr);
4466 }
4467 break;
4468 case 1:
4469 {
4470 TCGv_ptr r_tsptr;
4471
4472 r_tsptr = tcg_temp_new_ptr();
4473 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4474 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4475 offsetof(trap_state, tnpc));
4476 tcg_temp_free_ptr(r_tsptr);
4477 }
4478 break;
4479 case 2:
4480 {
4481 TCGv_ptr r_tsptr;
4482
4483 r_tsptr = tcg_temp_new_ptr();
4484 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4485 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4486 offsetof(trap_state,
4487 tstate));
4488 tcg_temp_free_ptr(r_tsptr);
4489 }
4490 break;
4491 case 3:
4492 {
4493 TCGv_ptr r_tsptr;
4494
4495 r_tsptr = tcg_temp_new_ptr();
4496 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4497 tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4498 offsetof(trap_state, tt));
4499 tcg_temp_free_ptr(r_tsptr);
4500 }
4501 break;
4502 case 4:
4503 {
4504 TCGv_ptr r_tickptr;
4505
4506 r_tickptr = tcg_temp_new_ptr();
4507 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4508 offsetof(CPUSPARCState, tick));
4509 gen_helper_tick_set_count(r_tickptr,
4510 cpu_tmp0);
4511 tcg_temp_free_ptr(r_tickptr);
4512 }
4513 break;
4514 case 5:
4515 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4516 break;
4517 case 6:
4518 save_state(dc);
4519 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4520 dc->npc = DYNAMIC_PC;
4521 break;
4522 case 7:
4523 save_state(dc);
4524 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4525 offsetof(CPUSPARCState, tl));
4526 dc->npc = DYNAMIC_PC;
4527 break;
4528 case 8:
4529 gen_helper_wrpil(cpu_env, cpu_tmp0);
4530 break;
4531 case 9:
4532 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4533 break;
4534 case 10:
4535 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4536 offsetof(CPUSPARCState,
4537 cansave));
4538 break;
4539 case 11:
4540 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4541 offsetof(CPUSPARCState,
4542 canrestore));
4543 break;
4544 case 12:
4545 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4546 offsetof(CPUSPARCState,
4547 cleanwin));
4548 break;
4549 case 13:
4550 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4551 offsetof(CPUSPARCState,
4552 otherwin));
4553 break;
4554 case 14:
4555 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4556 offsetof(CPUSPARCState,
4557 wstate));
4558 break;
4559 case 16:
4560 CHECK_IU_FEATURE(dc, GL);
4561 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4562 offsetof(CPUSPARCState, gl));
4563 break;
4564 case 26:
4565 CHECK_IU_FEATURE(dc, HYPV);
4566 if (!hypervisor(dc))
4567 goto priv_insn;
4568 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4569 break;
4570 default:
4571 goto illegal_insn;
4572 }
4573#else
4574 tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4575 if (dc->def->nwindows != 32) {
4576 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4577 (1 << dc->def->nwindows) - 1);
4578 }
4579#endif
4580 }
4581 break;
4582 case 0x33:
4583 {
4584#ifndef TARGET_SPARC64
4585 if (!supervisor(dc))
4586 goto priv_insn;
4587 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4588#else
4589 CHECK_IU_FEATURE(dc, HYPV);
4590 if (!hypervisor(dc))
4591 goto priv_insn;
4592 cpu_tmp0 = get_temp_tl(dc);
4593 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4594 switch (rd) {
4595 case 0:
4596
4597 save_state(dc);
4598 gen_op_next_insn();
4599 tcg_gen_exit_tb(0);
4600 dc->is_br = 1;
4601 break;
4602 case 1:
4603
4604 break;
4605 case 3:
4606 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4607 break;
4608 case 5:
4609 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4610 break;
4611 case 31:
4612 {
4613 TCGv_ptr r_tickptr;
4614
4615 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4616 r_tickptr = tcg_temp_new_ptr();
4617 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4618 offsetof(CPUSPARCState, hstick));
4619 gen_helper_tick_set_limit(r_tickptr,
4620 cpu_hstick_cmpr);
4621 tcg_temp_free_ptr(r_tickptr);
4622 }
4623 break;
4624 case 6:
4625 default:
4626 goto illegal_insn;
4627 }
4628#endif
4629 }
4630 break;
4631#endif
4632#ifdef TARGET_SPARC64
4633 case 0x2c:
4634 {
4635 int cc = GET_FIELD_SP(insn, 11, 12);
4636 int cond = GET_FIELD_SP(insn, 14, 17);
4637 DisasCompare cmp;
4638 TCGv dst;
4639
4640 if (insn & (1 << 18)) {
4641 if (cc == 0) {
4642 gen_compare(&cmp, 0, cond, dc);
4643 } else if (cc == 2) {
4644 gen_compare(&cmp, 1, cond, dc);
4645 } else {
4646 goto illegal_insn;
4647 }
4648 } else {
4649 gen_fcompare(&cmp, cc, cond);
4650 }
4651
4652
4653
4654
4655 if (IS_IMM) {
4656 simm = GET_FIELD_SPs(insn, 0, 10);
4657 tcg_gen_movi_tl(cpu_src2, simm);
4658 }
4659
4660 dst = gen_load_gpr(dc, rd);
4661 tcg_gen_movcond_tl(cmp.cond, dst,
4662 cmp.c1, cmp.c2,
4663 cpu_src2, dst);
4664 free_compare(&cmp);
4665 gen_store_gpr(dc, rd, dst);
4666 break;
4667 }
4668 case 0x2d:
4669 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4670 gen_store_gpr(dc, rd, cpu_dst);
4671 break;
4672 case 0x2e:
4673 gen_helper_popc(cpu_dst, cpu_src2);
4674 gen_store_gpr(dc, rd, cpu_dst);
4675 break;
4676 case 0x2f:
4677 {
4678 int cond = GET_FIELD_SP(insn, 10, 12);
4679 DisasCompare cmp;
4680 TCGv dst;
4681
4682 gen_compare_reg(&cmp, cond, cpu_src1);
4683
4684
4685
4686
4687 if (IS_IMM) {
4688 simm = GET_FIELD_SPs(insn, 0, 9);
4689 tcg_gen_movi_tl(cpu_src2, simm);
4690 }
4691
4692 dst = gen_load_gpr(dc, rd);
4693 tcg_gen_movcond_tl(cmp.cond, dst,
4694 cmp.c1, cmp.c2,
4695 cpu_src2, dst);
4696 free_compare(&cmp);
4697 gen_store_gpr(dc, rd, dst);
4698 break;
4699 }
4700#endif
4701 default:
4702 goto illegal_insn;
4703 }
4704 }
4705 } else if (xop == 0x36) {
4706#ifdef TARGET_SPARC64
4707 int opf = GET_FIELD_SP(insn, 5, 13);
4708 rs1 = GET_FIELD(insn, 13, 17);
4709 rs2 = GET_FIELD(insn, 27, 31);
4710 if (gen_trap_ifnofpu(dc)) {
4711 goto jmp_insn;
4712 }
4713
4714 switch (opf) {
4715 case 0x000:
4716 CHECK_FPU_FEATURE(dc, VIS1);
4717 cpu_src1 = gen_load_gpr(dc, rs1);
4718 cpu_src2 = gen_load_gpr(dc, rs2);
4719 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4720 gen_store_gpr(dc, rd, cpu_dst);
4721 break;
4722 case 0x001:
4723 CHECK_FPU_FEATURE(dc, VIS2);
4724 cpu_src1 = gen_load_gpr(dc, rs1);
4725 cpu_src2 = gen_load_gpr(dc, rs2);
4726 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4727 gen_store_gpr(dc, rd, cpu_dst);
4728 break;
4729 case 0x002:
4730 CHECK_FPU_FEATURE(dc, VIS1);
4731 cpu_src1 = gen_load_gpr(dc, rs1);
4732 cpu_src2 = gen_load_gpr(dc, rs2);
4733 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4734 gen_store_gpr(dc, rd, cpu_dst);
4735 break;
4736 case 0x003:
4737 CHECK_FPU_FEATURE(dc, VIS2);
4738 cpu_src1 = gen_load_gpr(dc, rs1);
4739 cpu_src2 = gen_load_gpr(dc, rs2);
4740 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4741 gen_store_gpr(dc, rd, cpu_dst);
4742 break;
4743 case 0x004:
4744 CHECK_FPU_FEATURE(dc, VIS1);
4745 cpu_src1 = gen_load_gpr(dc, rs1);
4746 cpu_src2 = gen_load_gpr(dc, rs2);
4747 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4748 gen_store_gpr(dc, rd, cpu_dst);
4749 break;
4750 case 0x005:
4751 CHECK_FPU_FEATURE(dc, VIS2);
4752 cpu_src1 = gen_load_gpr(dc, rs1);
4753 cpu_src2 = gen_load_gpr(dc, rs2);
4754 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4755 gen_store_gpr(dc, rd, cpu_dst);
4756 break;
4757 case 0x006:
4758 CHECK_FPU_FEATURE(dc, VIS1);
4759 cpu_src1 = gen_load_gpr(dc, rs1);
4760 cpu_src2 = gen_load_gpr(dc, rs2);
4761 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4762 gen_store_gpr(dc, rd, cpu_dst);
4763 break;
4764 case 0x007:
4765 CHECK_FPU_FEATURE(dc, VIS2);
4766 cpu_src1 = gen_load_gpr(dc, rs1);
4767 cpu_src2 = gen_load_gpr(dc, rs2);
4768 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4769 gen_store_gpr(dc, rd, cpu_dst);
4770 break;
4771 case 0x008:
4772 CHECK_FPU_FEATURE(dc, VIS1);
4773 cpu_src1 = gen_load_gpr(dc, rs1);
4774 cpu_src2 = gen_load_gpr(dc, rs2);
4775 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4776 gen_store_gpr(dc, rd, cpu_dst);
4777 break;
4778 case 0x009:
4779 CHECK_FPU_FEATURE(dc, VIS2);
4780 cpu_src1 = gen_load_gpr(dc, rs1);
4781 cpu_src2 = gen_load_gpr(dc, rs2);
4782 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4783 gen_store_gpr(dc, rd, cpu_dst);
4784 break;
4785 case 0x00a:
4786 CHECK_FPU_FEATURE(dc, VIS1);
4787 cpu_src1 = gen_load_gpr(dc, rs1);
4788 cpu_src2 = gen_load_gpr(dc, rs2);
4789 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4790 gen_store_gpr(dc, rd, cpu_dst);
4791 break;
4792 case 0x00b:
4793 CHECK_FPU_FEATURE(dc, VIS2);
4794 cpu_src1 = gen_load_gpr(dc, rs1);
4795 cpu_src2 = gen_load_gpr(dc, rs2);
4796 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4797 gen_store_gpr(dc, rd, cpu_dst);
4798 break;
4799 case 0x010:
4800 CHECK_FPU_FEATURE(dc, VIS1);
4801 cpu_src1 = gen_load_gpr(dc, rs1);
4802 cpu_src2 = gen_load_gpr(dc, rs2);
4803 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4804 gen_store_gpr(dc, rd, cpu_dst);
4805 break;
4806 case 0x012:
4807 CHECK_FPU_FEATURE(dc, VIS1);
4808 cpu_src1 = gen_load_gpr(dc, rs1);
4809 cpu_src2 = gen_load_gpr(dc, rs2);
4810 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4811 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4812 gen_store_gpr(dc, rd, cpu_dst);
4813 break;
4814 case 0x014:
4815 CHECK_FPU_FEATURE(dc, VIS1);
4816 cpu_src1 = gen_load_gpr(dc, rs1);
4817 cpu_src2 = gen_load_gpr(dc, rs2);
4818 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4819 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4820 gen_store_gpr(dc, rd, cpu_dst);
4821 break;
4822 case 0x018:
4823 CHECK_FPU_FEATURE(dc, VIS1);
4824 cpu_src1 = gen_load_gpr(dc, rs1);
4825 cpu_src2 = gen_load_gpr(dc, rs2);
4826 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4827 gen_store_gpr(dc, rd, cpu_dst);
4828 break;
4829 case 0x01a:
4830 CHECK_FPU_FEATURE(dc, VIS1);
4831 cpu_src1 = gen_load_gpr(dc, rs1);
4832 cpu_src2 = gen_load_gpr(dc, rs2);
4833 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4834 gen_store_gpr(dc, rd, cpu_dst);
4835 break;
4836 case 0x019:
4837 CHECK_FPU_FEATURE(dc, VIS2);
4838 cpu_src1 = gen_load_gpr(dc, rs1);
4839 cpu_src2 = gen_load_gpr(dc, rs2);
4840 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4841 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4842 gen_store_gpr(dc, rd, cpu_dst);
4843 break;
4844 case 0x020:
4845 CHECK_FPU_FEATURE(dc, VIS1);
4846 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4847 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4848 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4849 gen_store_gpr(dc, rd, cpu_dst);
4850 break;
4851 case 0x022:
4852 CHECK_FPU_FEATURE(dc, VIS1);
4853 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4854 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4855 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4856 gen_store_gpr(dc, rd, cpu_dst);
4857 break;
4858 case 0x024:
4859 CHECK_FPU_FEATURE(dc, VIS1);
4860 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4861 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4862 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4863 gen_store_gpr(dc, rd, cpu_dst);
4864 break;
4865 case 0x026:
4866 CHECK_FPU_FEATURE(dc, VIS1);
4867 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4868 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4869 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4870 gen_store_gpr(dc, rd, cpu_dst);
4871 break;
4872 case 0x028:
4873 CHECK_FPU_FEATURE(dc, VIS1);
4874 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4875 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4876 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4877 gen_store_gpr(dc, rd, cpu_dst);
4878 break;
4879 case 0x02a:
4880 CHECK_FPU_FEATURE(dc, VIS1);
4881 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4882 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4883 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4884 gen_store_gpr(dc, rd, cpu_dst);
4885 break;
4886 case 0x02c:
4887 CHECK_FPU_FEATURE(dc, VIS1);
4888 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4889 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4890 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4891 gen_store_gpr(dc, rd, cpu_dst);
4892 break;
4893 case 0x02e:
4894 CHECK_FPU_FEATURE(dc, VIS1);
4895 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4896 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4897 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4898 gen_store_gpr(dc, rd, cpu_dst);
4899 break;
4900 case 0x031:
4901 CHECK_FPU_FEATURE(dc, VIS1);
4902 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4903 break;
4904 case 0x033:
4905 CHECK_FPU_FEATURE(dc, VIS1);
4906 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4907 break;
4908 case 0x035:
4909 CHECK_FPU_FEATURE(dc, VIS1);
4910 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4911 break;
4912 case 0x036:
4913 CHECK_FPU_FEATURE(dc, VIS1);
4914 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4915 break;
4916 case 0x037:
4917 CHECK_FPU_FEATURE(dc, VIS1);
4918 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4919 break;
4920 case 0x038:
4921 CHECK_FPU_FEATURE(dc, VIS1);
4922 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4923 break;
4924 case 0x039:
4925 CHECK_FPU_FEATURE(dc, VIS1);
4926 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4927 break;
4928 case 0x03a:
4929 CHECK_FPU_FEATURE(dc, VIS1);
4930 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4931 break;
4932 case 0x03b:
4933 CHECK_FPU_FEATURE(dc, VIS1);
4934 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4935 cpu_dst_32 = gen_dest_fpr_F(dc);
4936 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4937 gen_store_fpr_F(dc, rd, cpu_dst_32);
4938 break;
4939 case 0x03d:
4940 CHECK_FPU_FEATURE(dc, VIS1);
4941 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4942 cpu_dst_32 = gen_dest_fpr_F(dc);
4943 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4944 gen_store_fpr_F(dc, rd, cpu_dst_32);
4945 break;
4946 case 0x03e:
4947 CHECK_FPU_FEATURE(dc, VIS1);
4948 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4949 break;
4950 case 0x048:
4951 CHECK_FPU_FEATURE(dc, VIS1);
4952 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4953 break;
4954 case 0x04b:
4955 CHECK_FPU_FEATURE(dc, VIS1);
4956 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4957 break;
4958 case 0x04c:
4959 CHECK_FPU_FEATURE(dc, VIS2);
4960 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4961 break;
4962 case 0x04d:
4963 CHECK_FPU_FEATURE(dc, VIS1);
4964 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4965 break;
4966 case 0x050:
4967 CHECK_FPU_FEATURE(dc, VIS1);
4968 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4969 break;
4970 case 0x051:
4971 CHECK_FPU_FEATURE(dc, VIS1);
4972 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4973 break;
4974 case 0x052:
4975 CHECK_FPU_FEATURE(dc, VIS1);
4976 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4977 break;
4978 case 0x053:
4979 CHECK_FPU_FEATURE(dc, VIS1);
4980 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4981 break;
4982 case 0x054:
4983 CHECK_FPU_FEATURE(dc, VIS1);
4984 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4985 break;
4986 case 0x055:
4987 CHECK_FPU_FEATURE(dc, VIS1);
4988 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4989 break;
4990 case 0x056:
4991 CHECK_FPU_FEATURE(dc, VIS1);
4992 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4993 break;
4994 case 0x057:
4995 CHECK_FPU_FEATURE(dc, VIS1);
4996 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4997 break;
4998 case 0x060:
4999 CHECK_FPU_FEATURE(dc, VIS1);
5000 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5001 tcg_gen_movi_i64(cpu_dst_64, 0);
5002 gen_store_fpr_D(dc, rd, cpu_dst_64);
5003 break;
5004 case 0x061:
5005 CHECK_FPU_FEATURE(dc, VIS1);
5006 cpu_dst_32 = gen_dest_fpr_F(dc);
5007 tcg_gen_movi_i32(cpu_dst_32, 0);
5008 gen_store_fpr_F(dc, rd, cpu_dst_32);
5009 break;
5010 case 0x062:
5011 CHECK_FPU_FEATURE(dc, VIS1);
5012 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5013 break;
5014 case 0x063:
5015 CHECK_FPU_FEATURE(dc, VIS1);
5016 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5017 break;
5018 case 0x064:
5019 CHECK_FPU_FEATURE(dc, VIS1);
5020 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5021 break;
5022 case 0x065:
5023 CHECK_FPU_FEATURE(dc, VIS1);
5024 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5025 break;
5026 case 0x066:
5027 CHECK_FPU_FEATURE(dc, VIS1);
5028 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5029 break;
5030 case 0x067:
5031 CHECK_FPU_FEATURE(dc, VIS1);
5032 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5033 break;
5034 case 0x068:
5035 CHECK_FPU_FEATURE(dc, VIS1);
5036 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5037 break;
5038 case 0x069:
5039 CHECK_FPU_FEATURE(dc, VIS1);
5040 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5041 break;
5042 case 0x06a:
5043 CHECK_FPU_FEATURE(dc, VIS1);
5044 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5045 break;
5046 case 0x06b:
5047 CHECK_FPU_FEATURE(dc, VIS1);
5048 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5049 break;
5050 case 0x06c:
5051 CHECK_FPU_FEATURE(dc, VIS1);
5052 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5053 break;
5054 case 0x06d:
5055 CHECK_FPU_FEATURE(dc, VIS1);
5056 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5057 break;
5058 case 0x06e:
5059 CHECK_FPU_FEATURE(dc, VIS1);
5060 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5061 break;
5062 case 0x06f:
5063 CHECK_FPU_FEATURE(dc, VIS1);
5064 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5065 break;
5066 case 0x070:
5067 CHECK_FPU_FEATURE(dc, VIS1);
5068 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5069 break;
5070 case 0x071:
5071 CHECK_FPU_FEATURE(dc, VIS1);
5072 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5073 break;
5074 case 0x072:
5075 CHECK_FPU_FEATURE(dc, VIS1);
5076 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5077 break;
5078 case 0x073:
5079 CHECK_FPU_FEATURE(dc, VIS1);
5080 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5081 break;
5082 case 0x074:
5083 CHECK_FPU_FEATURE(dc, VIS1);
5084 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5085 gen_store_fpr_D(dc, rd, cpu_src1_64);
5086 break;
5087 case 0x075:
5088 CHECK_FPU_FEATURE(dc, VIS1);
5089 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5090 gen_store_fpr_F(dc, rd, cpu_src1_32);
5091 break;
5092 case 0x076:
5093 CHECK_FPU_FEATURE(dc, VIS1);
5094 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5095 break;
5096 case 0x077:
5097 CHECK_FPU_FEATURE(dc, VIS1);
5098 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5099 break;
5100 case 0x078:
5101 CHECK_FPU_FEATURE(dc, VIS1);
5102 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5103 gen_store_fpr_D(dc, rd, cpu_src1_64);
5104 break;
5105 case 0x079:
5106 CHECK_FPU_FEATURE(dc, VIS1);
5107 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5108 gen_store_fpr_F(dc, rd, cpu_src1_32);
5109 break;
5110 case 0x07a:
5111 CHECK_FPU_FEATURE(dc, VIS1);
5112 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5113 break;
5114 case 0x07b:
5115 CHECK_FPU_FEATURE(dc, VIS1);
5116 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5117 break;
5118 case 0x07c:
5119 CHECK_FPU_FEATURE(dc, VIS1);
5120 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5121 break;
5122 case 0x07d:
5123 CHECK_FPU_FEATURE(dc, VIS1);
5124 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5125 break;
5126 case 0x07e:
5127 CHECK_FPU_FEATURE(dc, VIS1);
5128 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5129 tcg_gen_movi_i64(cpu_dst_64, -1);
5130 gen_store_fpr_D(dc, rd, cpu_dst_64);
5131 break;
5132 case 0x07f:
5133 CHECK_FPU_FEATURE(dc, VIS1);
5134 cpu_dst_32 = gen_dest_fpr_F(dc);
5135 tcg_gen_movi_i32(cpu_dst_32, -1);
5136 gen_store_fpr_F(dc, rd, cpu_dst_32);
5137 break;
5138 case 0x080:
5139 case 0x081:
5140
5141 goto illegal_insn;
5142 default:
5143 goto illegal_insn;
5144 }
5145#else
5146 goto ncp_insn;
5147#endif
5148 } else if (xop == 0x37) {
5149#ifdef TARGET_SPARC64
5150 goto illegal_insn;
5151#else
5152 goto ncp_insn;
5153#endif
5154#ifdef TARGET_SPARC64
5155 } else if (xop == 0x39) {
5156 save_state(dc);
5157 cpu_src1 = get_src1(dc, insn);
5158 cpu_tmp0 = get_temp_tl(dc);
5159 if (IS_IMM) {
5160 simm = GET_FIELDs(insn, 19, 31);
5161 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5162 } else {
5163 rs2 = GET_FIELD(insn, 27, 31);
5164 if (rs2) {
5165 cpu_src2 = gen_load_gpr(dc, rs2);
5166 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5167 } else {
5168 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5169 }
5170 }
5171 gen_helper_restore(cpu_env);
5172 gen_mov_pc_npc(dc);
5173 gen_check_align(cpu_tmp0, 3);
5174 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5175 dc->npc = DYNAMIC_PC;
5176 goto jmp_insn;
5177#endif
5178 } else {
5179 cpu_src1 = get_src1(dc, insn);
5180 cpu_tmp0 = get_temp_tl(dc);
5181 if (IS_IMM) {
5182 simm = GET_FIELDs(insn, 19, 31);
5183 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5184 } else {
5185 rs2 = GET_FIELD(insn, 27, 31);
5186 if (rs2) {
5187 cpu_src2 = gen_load_gpr(dc, rs2);
5188 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5189 } else {
5190 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5191 }
5192 }
5193 switch (xop) {
5194 case 0x38:
5195 {
5196 TCGv t = gen_dest_gpr(dc, rd);
5197 tcg_gen_movi_tl(t, dc->pc);
5198 gen_store_gpr(dc, rd, t);
5199
5200 gen_mov_pc_npc(dc);
5201 gen_check_align(cpu_tmp0, 3);
5202 gen_address_mask(dc, cpu_tmp0);
5203 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5204 dc->npc = DYNAMIC_PC;
5205 }
5206 goto jmp_insn;
5207#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5208 case 0x39:
5209 {
5210 if (!supervisor(dc))
5211 goto priv_insn;
5212 gen_mov_pc_npc(dc);
5213 gen_check_align(cpu_tmp0, 3);
5214 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5215 dc->npc = DYNAMIC_PC;
5216 gen_helper_rett(cpu_env);
5217 }
5218 goto jmp_insn;
5219#endif
5220 case 0x3b:
5221 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5222 goto unimp_flush;
5223
5224 break;
5225 case 0x3c:
5226 gen_helper_save(cpu_env);
5227 gen_store_gpr(dc, rd, cpu_tmp0);
5228 break;
5229 case 0x3d:
5230 gen_helper_restore(cpu_env);
5231 gen_store_gpr(dc, rd, cpu_tmp0);
5232 break;
5233#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5234 case 0x3e:
5235 {
5236 switch (rd) {
5237 case 0:
5238 if (!supervisor(dc))
5239 goto priv_insn;
5240 dc->npc = DYNAMIC_PC;
5241 dc->pc = DYNAMIC_PC;
5242 gen_helper_done(cpu_env);
5243 goto jmp_insn;
5244 case 1:
5245 if (!supervisor(dc))
5246 goto priv_insn;
5247 dc->npc = DYNAMIC_PC;
5248 dc->pc = DYNAMIC_PC;
5249 gen_helper_retry(cpu_env);
5250 goto jmp_insn;
5251 default:
5252 goto illegal_insn;
5253 }
5254 }
5255 break;
5256#endif
5257 default:
5258 goto illegal_insn;
5259 }
5260 }
5261 break;
5262 }
5263 break;
5264 case 3:
5265 {
5266 unsigned int xop = GET_FIELD(insn, 7, 12);
5267
5268
5269 TCGv cpu_addr = get_temp_tl(dc);
5270
5271 tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5272 if (xop == 0x3c || xop == 0x3e) {
5273
5274 } else if (IS_IMM) {
5275 simm = GET_FIELDs(insn, 19, 31);
5276 if (simm != 0) {
5277 tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5278 }
5279 } else {
5280 rs2 = GET_FIELD(insn, 27, 31);
5281 if (rs2 != 0) {
5282 tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5283 }
5284 }
5285 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5286 (xop > 0x17 && xop <= 0x1d ) ||
5287 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5288 TCGv cpu_val = gen_dest_gpr(dc, rd);
5289
5290 switch (xop) {
5291 case 0x0:
5292 gen_address_mask(dc, cpu_addr);
5293 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5294 break;
5295 case 0x1:
5296 gen_address_mask(dc, cpu_addr);
5297 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5298 break;
5299 case 0x2:
5300 gen_address_mask(dc, cpu_addr);
5301 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5302 break;
5303 case 0x3:
5304 if (rd & 1)
5305 goto illegal_insn;
5306 else {
5307 TCGv_i64 t64;
5308
5309 gen_address_mask(dc, cpu_addr);
5310 t64 = tcg_temp_new_i64();
5311 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5312 tcg_gen_trunc_i64_tl(cpu_val, t64);
5313 tcg_gen_ext32u_tl(cpu_val, cpu_val);
5314 gen_store_gpr(dc, rd + 1, cpu_val);
5315 tcg_gen_shri_i64(t64, t64, 32);
5316 tcg_gen_trunc_i64_tl(cpu_val, t64);
5317 tcg_temp_free_i64(t64);
5318 tcg_gen_ext32u_tl(cpu_val, cpu_val);
5319 }
5320 break;
5321 case 0x9:
5322 gen_address_mask(dc, cpu_addr);
5323 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5324 break;
5325 case 0xa:
5326 gen_address_mask(dc, cpu_addr);
5327 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5328 break;
5329 case 0xd:
5330 gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5331 break;
5332 case 0x0f:
5333
5334 CHECK_IU_FEATURE(dc, SWAP);
5335 cpu_src1 = gen_load_gpr(dc, rd);
5336 gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5337 dc->mem_idx, MO_TEUL);
5338 break;
5339#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5340 case 0x10:
5341 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5342 break;
5343 case 0x11:
5344 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5345 break;
5346 case 0x12:
5347 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5348 break;
5349 case 0x13:
5350 if (rd & 1) {
5351 goto illegal_insn;
5352 }
5353 gen_ldda_asi(dc, cpu_addr, insn, rd);
5354 goto skip_move;
5355 case 0x19:
5356 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5357 break;
5358 case 0x1a:
5359 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5360 break;
5361 case 0x1d:
5362 gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5363 break;
5364 case 0x1f:
5365
5366 CHECK_IU_FEATURE(dc, SWAP);
5367 cpu_src1 = gen_load_gpr(dc, rd);
5368 gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5369 break;
5370
5371#ifndef TARGET_SPARC64
5372 case 0x30:
5373 case 0x31:
5374 case 0x33:
5375 goto ncp_insn;
5376#endif
5377#endif
5378#ifdef TARGET_SPARC64
5379 case 0x08:
5380 gen_address_mask(dc, cpu_addr);
5381 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5382 break;
5383 case 0x0b:
5384 gen_address_mask(dc, cpu_addr);
5385 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5386 break;
5387 case 0x18:
5388 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5389 break;
5390 case 0x1b:
5391 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5392 break;
5393 case 0x2d:
5394 goto skip_move;
5395 case 0x30:
5396 if (gen_trap_ifnofpu(dc)) {
5397 goto jmp_insn;
5398 }
5399 gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5400 gen_update_fprs_dirty(dc, rd);
5401 goto skip_move;
5402 case 0x33:
5403 if (gen_trap_ifnofpu(dc)) {
5404 goto jmp_insn;
5405 }
5406 gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5407 gen_update_fprs_dirty(dc, DFPREG(rd));
5408 goto skip_move;
5409 case 0x3d:
5410 goto skip_move;
5411 case 0x32:
5412 CHECK_FPU_FEATURE(dc, FLOAT128);
5413 if (gen_trap_ifnofpu(dc)) {
5414 goto jmp_insn;
5415 }
5416 gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5417 gen_update_fprs_dirty(dc, QFPREG(rd));
5418 goto skip_move;
5419#endif
5420 default:
5421 goto illegal_insn;
5422 }
5423 gen_store_gpr(dc, rd, cpu_val);
5424#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5425 skip_move: ;
5426#endif
5427 } else if (xop >= 0x20 && xop < 0x24) {
5428 if (gen_trap_ifnofpu(dc)) {
5429 goto jmp_insn;
5430 }
5431 switch (xop) {
5432 case 0x20:
5433 gen_address_mask(dc, cpu_addr);
5434 cpu_dst_32 = gen_dest_fpr_F(dc);
5435 tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5436 dc->mem_idx, MO_TEUL);
5437 gen_store_fpr_F(dc, rd, cpu_dst_32);
5438 break;
5439 case 0x21:
5440#ifdef TARGET_SPARC64
5441 gen_address_mask(dc, cpu_addr);
5442 if (rd == 1) {
5443 TCGv_i64 t64 = tcg_temp_new_i64();
5444 tcg_gen_qemu_ld_i64(t64, cpu_addr,
5445 dc->mem_idx, MO_TEQ);
5446 gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5447 tcg_temp_free_i64(t64);
5448 break;
5449 }
5450#endif
5451 cpu_dst_32 = get_temp_i32(dc);
5452 tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5453 dc->mem_idx, MO_TEUL);
5454 gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5455 break;
5456 case 0x22:
5457 CHECK_FPU_FEATURE(dc, FLOAT128);
5458 gen_address_mask(dc, cpu_addr);
5459 cpu_src1_64 = tcg_temp_new_i64();
5460 tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5461 MO_TEQ | MO_ALIGN_4);
5462 tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5463 cpu_src2_64 = tcg_temp_new_i64();
5464 tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5465 MO_TEQ | MO_ALIGN_4);
5466 gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5467 tcg_temp_free_i64(cpu_src1_64);
5468 tcg_temp_free_i64(cpu_src2_64);
5469 break;
5470 case 0x23:
5471 gen_address_mask(dc, cpu_addr);
5472 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5473 tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5474 MO_TEQ | MO_ALIGN_4);
5475 gen_store_fpr_D(dc, rd, cpu_dst_64);
5476 break;
5477 default:
5478 goto illegal_insn;
5479 }
5480 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5481 xop == 0xe || xop == 0x1e) {
5482 TCGv cpu_val = gen_load_gpr(dc, rd);
5483
5484 switch (xop) {
5485 case 0x4:
5486 gen_address_mask(dc, cpu_addr);
5487 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5488 break;
5489 case 0x5:
5490 gen_address_mask(dc, cpu_addr);
5491 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5492 break;
5493 case 0x6:
5494 gen_address_mask(dc, cpu_addr);
5495 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5496 break;
5497 case 0x7:
5498 if (rd & 1)
5499 goto illegal_insn;
5500 else {
5501 TCGv_i64 t64;
5502 TCGv lo;
5503
5504 gen_address_mask(dc, cpu_addr);
5505 lo = gen_load_gpr(dc, rd + 1);
5506 t64 = tcg_temp_new_i64();
5507 tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5508 tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5509 tcg_temp_free_i64(t64);
5510 }
5511 break;
5512#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5513 case 0x14:
5514 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5515 break;
5516 case 0x15:
5517 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5518 break;
5519 case 0x16:
5520 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5521 break;
5522 case 0x17:
5523 if (rd & 1) {
5524 goto illegal_insn;
5525 }
5526 gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5527 break;
5528#endif
5529#ifdef TARGET_SPARC64
5530 case 0x0e:
5531 gen_address_mask(dc, cpu_addr);
5532 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5533 break;
5534 case 0x1e:
5535 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5536 break;
5537#endif
5538 default:
5539 goto illegal_insn;
5540 }
5541 } else if (xop > 0x23 && xop < 0x28) {
5542 if (gen_trap_ifnofpu(dc)) {
5543 goto jmp_insn;
5544 }
5545 switch (xop) {
5546 case 0x24:
5547 gen_address_mask(dc, cpu_addr);
5548 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5549 tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5550 dc->mem_idx, MO_TEUL);
5551 break;
5552 case 0x25:
5553 {
5554#ifdef TARGET_SPARC64
5555 gen_address_mask(dc, cpu_addr);
5556 if (rd == 1) {
5557 tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5558 break;
5559 }
5560#endif
5561 tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5562 }
5563 break;
5564 case 0x26:
5565#ifdef TARGET_SPARC64
5566
5567 CHECK_FPU_FEATURE(dc, FLOAT128);
5568 gen_address_mask(dc, cpu_addr);
5569
5570
5571
5572
5573
5574 cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5575 tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5576 dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5577 tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5578 cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5579 tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5580 dc->mem_idx, MO_TEQ);
5581 break;
5582#else
5583
5584#if defined(CONFIG_USER_ONLY)
5585 goto illegal_insn;
5586#else
5587 if (!supervisor(dc))
5588 goto priv_insn;
5589 if (gen_trap_ifnofpu(dc)) {
5590 goto jmp_insn;
5591 }
5592 goto nfq_insn;
5593#endif
5594#endif
5595 case 0x27:
5596 gen_address_mask(dc, cpu_addr);
5597 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5598 tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5599 MO_TEQ | MO_ALIGN_4);
5600 break;
5601 default:
5602 goto illegal_insn;
5603 }
5604 } else if (xop > 0x33 && xop < 0x3f) {
5605 switch (xop) {
5606#ifdef TARGET_SPARC64
5607 case 0x34:
5608 if (gen_trap_ifnofpu(dc)) {
5609 goto jmp_insn;
5610 }
5611 gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5612 break;
5613 case 0x36:
5614 {
5615 CHECK_FPU_FEATURE(dc, FLOAT128);
5616 if (gen_trap_ifnofpu(dc)) {
5617 goto jmp_insn;
5618 }
5619 gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5620 }
5621 break;
5622 case 0x37:
5623 if (gen_trap_ifnofpu(dc)) {
5624 goto jmp_insn;
5625 }
5626 gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5627 break;
5628 case 0x3e:
5629 rs2 = GET_FIELD(insn, 27, 31);
5630 cpu_src2 = gen_load_gpr(dc, rs2);
5631 gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5632 break;
5633#else
5634 case 0x34:
5635 case 0x35:
5636 case 0x36:
5637 case 0x37:
5638 goto ncp_insn;
5639#endif
5640#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5641 case 0x3c:
5642#ifndef TARGET_SPARC64
5643 CHECK_IU_FEATURE(dc, CASA);
5644#endif
5645 rs2 = GET_FIELD(insn, 27, 31);
5646 cpu_src2 = gen_load_gpr(dc, rs2);
5647 gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5648 break;
5649#endif
5650 default:
5651 goto illegal_insn;
5652 }
5653 } else {
5654 goto illegal_insn;
5655 }
5656 }
5657 break;
5658 }
5659
5660 if (dc->npc == DYNAMIC_PC) {
5661 dc->pc = DYNAMIC_PC;
5662 gen_op_next_insn();
5663 } else if (dc->npc == JUMP_PC) {
5664
5665 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5666 dc->is_br = 1;
5667 } else {
5668 dc->pc = dc->npc;
5669 dc->npc = dc->npc + 4;
5670 }
5671 jmp_insn:
5672 goto egress;
5673 illegal_insn:
5674 gen_exception(dc, TT_ILL_INSN);
5675 goto egress;
5676 unimp_flush:
5677 gen_exception(dc, TT_UNIMP_FLUSH);
5678 goto egress;
5679#if !defined(CONFIG_USER_ONLY)
5680 priv_insn:
5681 gen_exception(dc, TT_PRIV_INSN);
5682 goto egress;
5683#endif
5684 nfpu_insn:
5685 gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5686 goto egress;
5687#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5688 nfq_insn:
5689 gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5690 goto egress;
5691#endif
5692#ifndef TARGET_SPARC64
5693 ncp_insn:
5694 gen_exception(dc, TT_NCP_INSN);
5695 goto egress;
5696#endif
5697 egress:
5698 if (dc->n_t32 != 0) {
5699 int i;
5700 for (i = dc->n_t32 - 1; i >= 0; --i) {
5701 tcg_temp_free_i32(dc->t32[i]);
5702 }
5703 dc->n_t32 = 0;
5704 }
5705 if (dc->n_ttl != 0) {
5706 int i;
5707 for (i = dc->n_ttl - 1; i >= 0; --i) {
5708 tcg_temp_free(dc->ttl[i]);
5709 }
5710 dc->n_ttl = 0;
5711 }
5712}
5713
5714void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5715{
5716 SPARCCPU *cpu = sparc_env_get_cpu(env);
5717 CPUState *cs = CPU(cpu);
5718 target_ulong pc_start, last_pc;
5719 DisasContext dc1, *dc = &dc1;
5720 int num_insns;
5721 int max_insns;
5722 unsigned int insn;
5723
5724 memset(dc, 0, sizeof(DisasContext));
5725 dc->tb = tb;
5726 pc_start = tb->pc;
5727 dc->pc = pc_start;
5728 last_pc = dc->pc;
5729 dc->npc = (target_ulong) tb->cs_base;
5730 dc->cc_op = CC_OP_DYNAMIC;
5731 dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5732 dc->def = env->def;
5733 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5734 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5735 dc->singlestep = (cs->singlestep_enabled || singlestep);
5736#ifdef TARGET_SPARC64
5737 dc->fprs_dirty = 0;
5738 dc->asi = (tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5739#endif
5740
5741 num_insns = 0;
5742 max_insns = tb->cflags & CF_COUNT_MASK;
5743 if (max_insns == 0) {
5744 max_insns = CF_COUNT_MASK;
5745 }
5746 if (max_insns > TCG_MAX_INSNS) {
5747 max_insns = TCG_MAX_INSNS;
5748 }
5749
5750 gen_tb_start(tb);
5751 do {
5752 if (dc->npc & JUMP_PC) {
5753 assert(dc->jump_pc[1] == dc->pc + 4);
5754 tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5755 } else {
5756 tcg_gen_insn_start(dc->pc, dc->npc);
5757 }
5758 num_insns++;
5759 last_pc = dc->pc;
5760
5761 if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5762 if (dc->pc != pc_start) {
5763 save_state(dc);
5764 }
5765 gen_helper_debug(cpu_env);
5766 tcg_gen_exit_tb(0);
5767 dc->is_br = 1;
5768 goto exit_gen_loop;
5769 }
5770
5771 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5772 gen_io_start();
5773 }
5774
5775 insn = cpu_ldl_code(env, dc->pc);
5776
5777 disas_sparc_insn(dc, insn);
5778
5779 if (dc->is_br)
5780 break;
5781
5782 if (dc->pc != (last_pc + 4))
5783 break;
5784
5785
5786 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5787 break;
5788
5789
5790 if (dc->singlestep) {
5791 break;
5792 }
5793 } while (!tcg_op_buf_full() &&
5794 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5795 num_insns < max_insns);
5796
5797 exit_gen_loop:
5798 if (tb->cflags & CF_LAST_IO) {
5799 gen_io_end();
5800 }
5801 if (!dc->is_br) {
5802 if (dc->pc != DYNAMIC_PC &&
5803 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5804
5805 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5806 } else {
5807 if (dc->pc != DYNAMIC_PC) {
5808 tcg_gen_movi_tl(cpu_pc, dc->pc);
5809 }
5810 save_npc(dc);
5811 tcg_gen_exit_tb(0);
5812 }
5813 }
5814 gen_tb_end(tb, num_insns);
5815
5816 tb->size = last_pc + 4 - pc_start;
5817 tb->icount = num_insns;
5818
5819#ifdef DEBUG_DISAS
5820 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5821 && qemu_log_in_addr_range(pc_start)) {
5822 qemu_log_lock();
5823 qemu_log("--------------\n");
5824 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5825 log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5826 qemu_log("\n");
5827 qemu_log_unlock();
5828 }
5829#endif
5830}
5831
5832void gen_intermediate_code_init(CPUSPARCState *env)
5833{
5834 static int inited;
5835 static const char gregnames[32][4] = {
5836 "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5837 "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5838 "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5839 "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5840 };
5841 static const char fregnames[32][4] = {
5842 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5843 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5844 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5845 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5846 };
5847
5848 static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5849#ifdef TARGET_SPARC64
5850 { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5851 { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5852#else
5853 { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5854#endif
5855 { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5856 { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5857 };
5858
5859 static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5860#ifdef TARGET_SPARC64
5861 { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5862 { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5863 { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5864 { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5865 "hstick_cmpr" },
5866 { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5867 { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5868 { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5869 { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5870 { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5871#endif
5872 { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5873 { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5874 { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5875 { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5876 { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5877 { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5878 { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5879 { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5880#ifndef CONFIG_USER_ONLY
5881 { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5882#endif
5883 };
5884
5885 unsigned int i;
5886
5887
5888 if (inited) {
5889 return;
5890 }
5891 inited = 1;
5892
5893 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5894 tcg_ctx.tcg_env = cpu_env;
5895
5896 cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5897 offsetof(CPUSPARCState, regwptr),
5898 "regwptr");
5899
5900 for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5901 *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5902 }
5903
5904 for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5905 *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5906 }
5907
5908 TCGV_UNUSED(cpu_regs[0]);
5909 for (i = 1; i < 8; ++i) {
5910 cpu_regs[i] = tcg_global_mem_new(cpu_env,
5911 offsetof(CPUSPARCState, gregs[i]),
5912 gregnames[i]);
5913 }
5914
5915 for (i = 8; i < 32; ++i) {
5916 cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5917 (i - 8) * sizeof(target_ulong),
5918 gregnames[i]);
5919 }
5920
5921 for (i = 0; i < TARGET_DPREGS; i++) {
5922 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5923 offsetof(CPUSPARCState, fpr[i]),
5924 fregnames[i]);
5925 }
5926}
5927
5928void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5929 target_ulong *data)
5930{
5931 target_ulong pc = data[0];
5932 target_ulong npc = data[1];
5933
5934 env->pc = pc;
5935 if (npc == DYNAMIC_PC) {
5936
5937 } else if (npc & JUMP_PC) {
5938
5939 if (env->cond) {
5940 env->npc = npc & ~3;
5941 } else {
5942 env->npc = pc + 4;
5943 }
5944 } else {
5945 env->npc = npc;
5946 }
5947}
5948