1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include "qemu/osdep.h"
22
23#include "cpu.h"
24#include "disas/disas.h"
25#include "exec/helper-proto.h"
26#include "exec/exec-all.h"
27#include "tcg/tcg-op.h"
28#include "exec/cpu_ldst.h"
29
30#include "exec/helper-gen.h"
31
32#include "exec/translator.h"
33#include "exec/log.h"
34#include "asi.h"
35
36
37#define DEBUG_DISAS
38
39#define DYNAMIC_PC 1
40#define JUMP_PC 2
41
42
43#define DISAS_EXIT DISAS_TARGET_0
44
45
46static TCGv_ptr cpu_regwptr;
47static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
48static TCGv_i32 cpu_cc_op;
49static TCGv_i32 cpu_psr;
50static TCGv cpu_fsr, cpu_pc, cpu_npc;
51static TCGv cpu_regs[32];
52static TCGv cpu_y;
53#ifndef CONFIG_USER_ONLY
54static TCGv cpu_tbr;
55#endif
56static TCGv cpu_cond;
57#ifdef TARGET_SPARC64
58static TCGv_i32 cpu_xcc, cpu_fprs;
59static TCGv cpu_gsr;
60static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
61static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
62#else
63static TCGv cpu_wim;
64#endif
65
66static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68#include "exec/gen-icount.h"
69
70typedef struct DisasContext {
71 DisasContextBase base;
72 target_ulong pc;
73 target_ulong npc;
74 target_ulong jump_pc[2];
75 int mem_idx;
76 bool fpu_enabled;
77 bool address_mask_32bit;
78#ifndef CONFIG_USER_ONLY
79 bool supervisor;
80#ifdef TARGET_SPARC64
81 bool hypervisor;
82#endif
83#endif
84
85 uint32_t cc_op;
86 sparc_def_t *def;
87#ifdef TARGET_SPARC64
88 int fprs_dirty;
89 int asi;
90#endif
91} DisasContext;
92
93typedef struct {
94 TCGCond cond;
95 bool is_bool;
96 TCGv c1, c2;
97} DisasCompare;
98
99
100#define GET_FIELD(X, FROM, TO) \
101 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
102
103
104#define GET_FIELD_SP(X, FROM, TO) \
105 GET_FIELD(X, 31 - (TO), 31 - (FROM))
106
107#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
108#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
109
110#ifdef TARGET_SPARC64
111#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
112#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
113#else
114#define DFPREG(r) (r & 0x1e)
115#define QFPREG(r) (r & 0x1c)
116#endif
117
118#define UA2005_HTRAP_MASK 0xff
119#define V8_TRAP_MASK 0x7f
120
121static int sign_extend(int x, int len)
122{
123 len = 32 - len;
124 return (x << len) >> len;
125}
126
127#define IS_IMM (insn & (1<<13))
128
129static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
130{
131#if defined(TARGET_SPARC64)
132 int bit = (rd < 32) ? 1 : 2;
133
134
135 if (!(dc->fprs_dirty & bit)) {
136 dc->fprs_dirty |= bit;
137 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
138 }
139#endif
140}
141
142
143static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
144{
145 TCGv_i32 ret = tcg_temp_new_i32();
146 if (src & 1) {
147 tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
148 } else {
149 tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
150 }
151 return ret;
152}
153
154static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155{
156 TCGv_i64 t = tcg_temp_new_i64();
157
158 tcg_gen_extu_i32_i64(t, v);
159 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
160 (dst & 1 ? 0 : 32), 32);
161 gen_update_fprs_dirty(dc, dst);
162}
163
164static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
165{
166 return tcg_temp_new_i32();
167}
168
169static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
170{
171 src = DFPREG(src);
172 return cpu_fpr[src / 2];
173}
174
175static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
176{
177 dst = DFPREG(dst);
178 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
179 gen_update_fprs_dirty(dc, dst);
180}
181
182static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
183{
184 return cpu_fpr[DFPREG(dst) / 2];
185}
186
187static void gen_op_load_fpr_QT0(unsigned int src)
188{
189 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
190 offsetof(CPU_QuadU, ll.upper));
191 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
192 offsetof(CPU_QuadU, ll.lower));
193}
194
195static void gen_op_load_fpr_QT1(unsigned int src)
196{
197 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
198 offsetof(CPU_QuadU, ll.upper));
199 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
200 offsetof(CPU_QuadU, ll.lower));
201}
202
203static void gen_op_store_QT0_fpr(unsigned int dst)
204{
205 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
206 offsetof(CPU_QuadU, ll.upper));
207 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
208 offsetof(CPU_QuadU, ll.lower));
209}
210
211static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
212 TCGv_i64 v1, TCGv_i64 v2)
213{
214 dst = QFPREG(dst);
215
216 tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
217 tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
218 gen_update_fprs_dirty(dc, dst);
219}
220
221#ifdef TARGET_SPARC64
222static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
223{
224 src = QFPREG(src);
225 return cpu_fpr[src / 2];
226}
227
228static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
229{
230 src = QFPREG(src);
231 return cpu_fpr[src / 2 + 1];
232}
233
234static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
235{
236 rd = QFPREG(rd);
237 rs = QFPREG(rs);
238
239 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
240 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
241 gen_update_fprs_dirty(dc, rd);
242}
243#endif
244
245
246#ifdef CONFIG_USER_ONLY
247#define supervisor(dc) 0
248#ifdef TARGET_SPARC64
249#define hypervisor(dc) 0
250#endif
251#else
252#ifdef TARGET_SPARC64
253#define hypervisor(dc) (dc->hypervisor)
254#define supervisor(dc) (dc->supervisor | dc->hypervisor)
255#else
256#define supervisor(dc) (dc->supervisor)
257#endif
258#endif
259
260#ifdef TARGET_SPARC64
261#ifndef TARGET_ABI32
262#define AM_CHECK(dc) ((dc)->address_mask_32bit)
263#else
264#define AM_CHECK(dc) (1)
265#endif
266#endif
267
268static inline void gen_address_mask(DisasContext *dc, TCGv addr)
269{
270#ifdef TARGET_SPARC64
271 if (AM_CHECK(dc))
272 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
273#endif
274}
275
276static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
277{
278 if (reg > 0) {
279 assert(reg < 32);
280 return cpu_regs[reg];
281 } else {
282 TCGv t = tcg_temp_new();
283 tcg_gen_movi_tl(t, 0);
284 return t;
285 }
286}
287
288static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
289{
290 if (reg > 0) {
291 assert(reg < 32);
292 tcg_gen_mov_tl(cpu_regs[reg], v);
293 }
294}
295
296static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
297{
298 if (reg > 0) {
299 assert(reg < 32);
300 return cpu_regs[reg];
301 } else {
302 return tcg_temp_new();
303 }
304}
305
306static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
307{
308 return translator_use_goto_tb(&s->base, pc) &&
309 translator_use_goto_tb(&s->base, npc);
310}
311
312static void gen_goto_tb(DisasContext *s, int tb_num,
313 target_ulong pc, target_ulong npc)
314{
315 if (use_goto_tb(s, pc, npc)) {
316
317 tcg_gen_goto_tb(tb_num);
318 tcg_gen_movi_tl(cpu_pc, pc);
319 tcg_gen_movi_tl(cpu_npc, npc);
320 tcg_gen_exit_tb(s->base.tb, tb_num);
321 } else {
322
323 tcg_gen_movi_tl(cpu_pc, pc);
324 tcg_gen_movi_tl(cpu_npc, npc);
325 tcg_gen_exit_tb(NULL, 0);
326 }
327}
328
329
330static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
331{
332 tcg_gen_extu_i32_tl(reg, src);
333 tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
334}
335
336static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
337{
338 tcg_gen_extu_i32_tl(reg, src);
339 tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
340}
341
342static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
343{
344 tcg_gen_extu_i32_tl(reg, src);
345 tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
346}
347
348static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
349{
350 tcg_gen_extu_i32_tl(reg, src);
351 tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
352}
353
354static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
355{
356 tcg_gen_mov_tl(cpu_cc_src, src1);
357 tcg_gen_mov_tl(cpu_cc_src2, src2);
358 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
359 tcg_gen_mov_tl(dst, cpu_cc_dst);
360}
361
362static TCGv_i32 gen_add32_carry32(void)
363{
364 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
365
366
367#if TARGET_LONG_BITS == 64
368 cc_src1_32 = tcg_temp_new_i32();
369 cc_src2_32 = tcg_temp_new_i32();
370 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
371 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
372#else
373 cc_src1_32 = cpu_cc_dst;
374 cc_src2_32 = cpu_cc_src;
375#endif
376
377 carry_32 = tcg_temp_new_i32();
378 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
379
380 return carry_32;
381}
382
383static TCGv_i32 gen_sub32_carry32(void)
384{
385 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
386
387
388#if TARGET_LONG_BITS == 64
389 cc_src1_32 = tcg_temp_new_i32();
390 cc_src2_32 = tcg_temp_new_i32();
391 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
392 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
393#else
394 cc_src1_32 = cpu_cc_src;
395 cc_src2_32 = cpu_cc_src2;
396#endif
397
398 carry_32 = tcg_temp_new_i32();
399 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
400
401 return carry_32;
402}
403
404static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
405 TCGv src2, int update_cc)
406{
407 TCGv_i32 carry_32;
408 TCGv carry;
409
410 switch (dc->cc_op) {
411 case CC_OP_DIV:
412 case CC_OP_LOGIC:
413
414 if (update_cc) {
415 gen_op_add_cc(dst, src1, src2);
416 } else {
417 tcg_gen_add_tl(dst, src1, src2);
418 }
419 return;
420
421 case CC_OP_ADD:
422 case CC_OP_TADD:
423 case CC_OP_TADDTV:
424 if (TARGET_LONG_BITS == 32) {
425
426
427
428
429 carry = tcg_temp_new();
430 tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
431 goto add_done;
432 }
433 carry_32 = gen_add32_carry32();
434 break;
435
436 case CC_OP_SUB:
437 case CC_OP_TSUB:
438 case CC_OP_TSUBTV:
439 carry_32 = gen_sub32_carry32();
440 break;
441
442 default:
443
444 carry_32 = tcg_temp_new_i32();
445 gen_helper_compute_C_icc(carry_32, cpu_env);
446 break;
447 }
448
449#if TARGET_LONG_BITS == 64
450 carry = tcg_temp_new();
451 tcg_gen_extu_i32_i64(carry, carry_32);
452#else
453 carry = carry_32;
454#endif
455
456 tcg_gen_add_tl(dst, src1, src2);
457 tcg_gen_add_tl(dst, dst, carry);
458
459 add_done:
460 if (update_cc) {
461 tcg_gen_mov_tl(cpu_cc_src, src1);
462 tcg_gen_mov_tl(cpu_cc_src2, src2);
463 tcg_gen_mov_tl(cpu_cc_dst, dst);
464 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
465 dc->cc_op = CC_OP_ADDX;
466 }
467}
468
469static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
470{
471 tcg_gen_mov_tl(cpu_cc_src, src1);
472 tcg_gen_mov_tl(cpu_cc_src2, src2);
473 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
474 tcg_gen_mov_tl(dst, cpu_cc_dst);
475}
476
477static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
478 TCGv src2, int update_cc)
479{
480 TCGv_i32 carry_32;
481 TCGv carry;
482
483 switch (dc->cc_op) {
484 case CC_OP_DIV:
485 case CC_OP_LOGIC:
486
487 if (update_cc) {
488 gen_op_sub_cc(dst, src1, src2);
489 } else {
490 tcg_gen_sub_tl(dst, src1, src2);
491 }
492 return;
493
494 case CC_OP_ADD:
495 case CC_OP_TADD:
496 case CC_OP_TADDTV:
497 carry_32 = gen_add32_carry32();
498 break;
499
500 case CC_OP_SUB:
501 case CC_OP_TSUB:
502 case CC_OP_TSUBTV:
503 if (TARGET_LONG_BITS == 32) {
504
505
506
507
508 carry = tcg_temp_new();
509 tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
510 goto sub_done;
511 }
512 carry_32 = gen_sub32_carry32();
513 break;
514
515 default:
516
517 carry_32 = tcg_temp_new_i32();
518 gen_helper_compute_C_icc(carry_32, cpu_env);
519 break;
520 }
521
522#if TARGET_LONG_BITS == 64
523 carry = tcg_temp_new();
524 tcg_gen_extu_i32_i64(carry, carry_32);
525#else
526 carry = carry_32;
527#endif
528
529 tcg_gen_sub_tl(dst, src1, src2);
530 tcg_gen_sub_tl(dst, dst, carry);
531
532 sub_done:
533 if (update_cc) {
534 tcg_gen_mov_tl(cpu_cc_src, src1);
535 tcg_gen_mov_tl(cpu_cc_src2, src2);
536 tcg_gen_mov_tl(cpu_cc_dst, dst);
537 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
538 dc->cc_op = CC_OP_SUBX;
539 }
540}
541
542static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
543{
544 TCGv r_temp, zero, t0;
545
546 r_temp = tcg_temp_new();
547 t0 = tcg_temp_new();
548
549
550
551
552
553 zero = tcg_constant_tl(0);
554 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
555 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
556 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
557 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
558 zero, cpu_cc_src2);
559
560
561
562 tcg_gen_extract_tl(t0, cpu_y, 1, 31);
563 tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
564
565
566 gen_mov_reg_N(t0, cpu_psr);
567 gen_mov_reg_V(r_temp, cpu_psr);
568 tcg_gen_xor_tl(t0, t0, r_temp);
569
570
571
572 tcg_gen_shli_tl(t0, t0, 31);
573 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
574 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
575
576 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
577
578 tcg_gen_mov_tl(dst, cpu_cc_dst);
579}
580
581static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
582{
583#if TARGET_LONG_BITS == 32
584 if (sign_ext) {
585 tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
586 } else {
587 tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
588 }
589#else
590 TCGv t0 = tcg_temp_new_i64();
591 TCGv t1 = tcg_temp_new_i64();
592
593 if (sign_ext) {
594 tcg_gen_ext32s_i64(t0, src1);
595 tcg_gen_ext32s_i64(t1, src2);
596 } else {
597 tcg_gen_ext32u_i64(t0, src1);
598 tcg_gen_ext32u_i64(t1, src2);
599 }
600
601 tcg_gen_mul_i64(dst, t0, t1);
602 tcg_gen_shri_i64(cpu_y, dst, 32);
603#endif
604}
605
606static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
607{
608
609 gen_op_multiply(dst, src1, src2, 0);
610}
611
612static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
613{
614
615 gen_op_multiply(dst, src1, src2, 1);
616}
617
618
619static inline void gen_op_eval_ba(TCGv dst)
620{
621 tcg_gen_movi_tl(dst, 1);
622}
623
624
625static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
626{
627 gen_mov_reg_Z(dst, src);
628}
629
630
631static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
632{
633 TCGv t0 = tcg_temp_new();
634 gen_mov_reg_N(t0, src);
635 gen_mov_reg_V(dst, src);
636 tcg_gen_xor_tl(dst, dst, t0);
637 gen_mov_reg_Z(t0, src);
638 tcg_gen_or_tl(dst, dst, t0);
639}
640
641
642static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
643{
644 TCGv t0 = tcg_temp_new();
645 gen_mov_reg_V(t0, src);
646 gen_mov_reg_N(dst, src);
647 tcg_gen_xor_tl(dst, dst, t0);
648}
649
650
651static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
652{
653 TCGv t0 = tcg_temp_new();
654 gen_mov_reg_Z(t0, src);
655 gen_mov_reg_C(dst, src);
656 tcg_gen_or_tl(dst, dst, t0);
657}
658
659
660static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
661{
662 gen_mov_reg_C(dst, src);
663}
664
665
666static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
667{
668 gen_mov_reg_V(dst, src);
669}
670
671
672static inline void gen_op_eval_bn(TCGv dst)
673{
674 tcg_gen_movi_tl(dst, 0);
675}
676
677
678static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
679{
680 gen_mov_reg_N(dst, src);
681}
682
683
684static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
685{
686 gen_mov_reg_Z(dst, src);
687 tcg_gen_xori_tl(dst, dst, 0x1);
688}
689
690
691static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
692{
693 gen_op_eval_ble(dst, src);
694 tcg_gen_xori_tl(dst, dst, 0x1);
695}
696
697
698static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
699{
700 gen_op_eval_bl(dst, src);
701 tcg_gen_xori_tl(dst, dst, 0x1);
702}
703
704
705static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
706{
707 gen_op_eval_bleu(dst, src);
708 tcg_gen_xori_tl(dst, dst, 0x1);
709}
710
711
712static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
713{
714 gen_mov_reg_C(dst, src);
715 tcg_gen_xori_tl(dst, dst, 0x1);
716}
717
718
719static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
720{
721 gen_mov_reg_N(dst, src);
722 tcg_gen_xori_tl(dst, dst, 0x1);
723}
724
725
726static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
727{
728 gen_mov_reg_V(dst, src);
729 tcg_gen_xori_tl(dst, dst, 0x1);
730}
731
732
733
734
735
736
737
738
739static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
740 unsigned int fcc_offset)
741{
742 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
743 tcg_gen_andi_tl(reg, reg, 0x1);
744}
745
746static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
747 unsigned int fcc_offset)
748{
749 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
750 tcg_gen_andi_tl(reg, reg, 0x1);
751}
752
753
754static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
755 unsigned int fcc_offset)
756{
757 TCGv t0 = tcg_temp_new();
758 gen_mov_reg_FCC0(dst, src, fcc_offset);
759 gen_mov_reg_FCC1(t0, src, fcc_offset);
760 tcg_gen_or_tl(dst, dst, t0);
761}
762
763
764static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
765 unsigned int fcc_offset)
766{
767 TCGv t0 = tcg_temp_new();
768 gen_mov_reg_FCC0(dst, src, fcc_offset);
769 gen_mov_reg_FCC1(t0, src, fcc_offset);
770 tcg_gen_xor_tl(dst, dst, t0);
771}
772
773
774static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
775 unsigned int fcc_offset)
776{
777 gen_mov_reg_FCC0(dst, src, fcc_offset);
778}
779
780
781static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
782 unsigned int fcc_offset)
783{
784 TCGv t0 = tcg_temp_new();
785 gen_mov_reg_FCC0(dst, src, fcc_offset);
786 gen_mov_reg_FCC1(t0, src, fcc_offset);
787 tcg_gen_andc_tl(dst, dst, t0);
788}
789
790
791static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
792 unsigned int fcc_offset)
793{
794 gen_mov_reg_FCC1(dst, src, fcc_offset);
795}
796
797
798static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
799 unsigned int fcc_offset)
800{
801 TCGv t0 = tcg_temp_new();
802 gen_mov_reg_FCC0(dst, src, fcc_offset);
803 gen_mov_reg_FCC1(t0, src, fcc_offset);
804 tcg_gen_andc_tl(dst, t0, dst);
805}
806
807
808static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
809 unsigned int fcc_offset)
810{
811 TCGv t0 = tcg_temp_new();
812 gen_mov_reg_FCC0(dst, src, fcc_offset);
813 gen_mov_reg_FCC1(t0, src, fcc_offset);
814 tcg_gen_and_tl(dst, dst, t0);
815}
816
817
818static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
819 unsigned int fcc_offset)
820{
821 TCGv t0 = tcg_temp_new();
822 gen_mov_reg_FCC0(dst, src, fcc_offset);
823 gen_mov_reg_FCC1(t0, src, fcc_offset);
824 tcg_gen_or_tl(dst, dst, t0);
825 tcg_gen_xori_tl(dst, dst, 0x1);
826}
827
828
829static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
830 unsigned int fcc_offset)
831{
832 TCGv t0 = tcg_temp_new();
833 gen_mov_reg_FCC0(dst, src, fcc_offset);
834 gen_mov_reg_FCC1(t0, src, fcc_offset);
835 tcg_gen_xor_tl(dst, dst, t0);
836 tcg_gen_xori_tl(dst, dst, 0x1);
837}
838
839
840static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
841 unsigned int fcc_offset)
842{
843 gen_mov_reg_FCC0(dst, src, fcc_offset);
844 tcg_gen_xori_tl(dst, dst, 0x1);
845}
846
847
848static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
849 unsigned int fcc_offset)
850{
851 TCGv t0 = tcg_temp_new();
852 gen_mov_reg_FCC0(dst, src, fcc_offset);
853 gen_mov_reg_FCC1(t0, src, fcc_offset);
854 tcg_gen_andc_tl(dst, dst, t0);
855 tcg_gen_xori_tl(dst, dst, 0x1);
856}
857
858
859static inline void gen_op_eval_fble(TCGv dst, TCGv src,
860 unsigned int fcc_offset)
861{
862 gen_mov_reg_FCC1(dst, src, fcc_offset);
863 tcg_gen_xori_tl(dst, dst, 0x1);
864}
865
866
867static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
868 unsigned int fcc_offset)
869{
870 TCGv t0 = tcg_temp_new();
871 gen_mov_reg_FCC0(dst, src, fcc_offset);
872 gen_mov_reg_FCC1(t0, src, fcc_offset);
873 tcg_gen_andc_tl(dst, t0, dst);
874 tcg_gen_xori_tl(dst, dst, 0x1);
875}
876
877
878static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
879 unsigned int fcc_offset)
880{
881 TCGv t0 = tcg_temp_new();
882 gen_mov_reg_FCC0(dst, src, fcc_offset);
883 gen_mov_reg_FCC1(t0, src, fcc_offset);
884 tcg_gen_and_tl(dst, dst, t0);
885 tcg_gen_xori_tl(dst, dst, 0x1);
886}
887
888static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
889 target_ulong pc2, TCGv r_cond)
890{
891 TCGLabel *l1 = gen_new_label();
892
893 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
894
895 gen_goto_tb(dc, 0, pc1, pc1 + 4);
896
897 gen_set_label(l1);
898 gen_goto_tb(dc, 1, pc2, pc2 + 4);
899}
900
901static void gen_branch_a(DisasContext *dc, target_ulong pc1)
902{
903 TCGLabel *l1 = gen_new_label();
904 target_ulong npc = dc->npc;
905
906 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
907
908 gen_goto_tb(dc, 0, npc, pc1);
909
910 gen_set_label(l1);
911 gen_goto_tb(dc, 1, npc + 4, npc + 8);
912
913 dc->base.is_jmp = DISAS_NORETURN;
914}
915
916static void gen_branch_n(DisasContext *dc, target_ulong pc1)
917{
918 target_ulong npc = dc->npc;
919
920 if (likely(npc != DYNAMIC_PC)) {
921 dc->pc = npc;
922 dc->jump_pc[0] = pc1;
923 dc->jump_pc[1] = npc + 4;
924 dc->npc = JUMP_PC;
925 } else {
926 TCGv t, z;
927
928 tcg_gen_mov_tl(cpu_pc, cpu_npc);
929
930 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
931 t = tcg_constant_tl(pc1);
932 z = tcg_constant_tl(0);
933 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
934
935 dc->pc = DYNAMIC_PC;
936 }
937}
938
939static inline void gen_generic_branch(DisasContext *dc)
940{
941 TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
942 TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
943 TCGv zero = tcg_constant_tl(0);
944
945 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
946}
947
948
949
950static inline void flush_cond(DisasContext *dc)
951{
952 if (dc->npc == JUMP_PC) {
953 gen_generic_branch(dc);
954 dc->npc = DYNAMIC_PC;
955 }
956}
957
958static inline void save_npc(DisasContext *dc)
959{
960 if (dc->npc == JUMP_PC) {
961 gen_generic_branch(dc);
962 dc->npc = DYNAMIC_PC;
963 } else if (dc->npc != DYNAMIC_PC) {
964 tcg_gen_movi_tl(cpu_npc, dc->npc);
965 }
966}
967
968static inline void update_psr(DisasContext *dc)
969{
970 if (dc->cc_op != CC_OP_FLAGS) {
971 dc->cc_op = CC_OP_FLAGS;
972 gen_helper_compute_psr(cpu_env);
973 }
974}
975
976static inline void save_state(DisasContext *dc)
977{
978 tcg_gen_movi_tl(cpu_pc, dc->pc);
979 save_npc(dc);
980}
981
982static void gen_exception(DisasContext *dc, int which)
983{
984 save_state(dc);
985 gen_helper_raise_exception(cpu_env, tcg_constant_i32(which));
986 dc->base.is_jmp = DISAS_NORETURN;
987}
988
989static void gen_check_align(TCGv addr, int mask)
990{
991 gen_helper_check_align(cpu_env, addr, tcg_constant_i32(mask));
992}
993
994static inline void gen_mov_pc_npc(DisasContext *dc)
995{
996 if (dc->npc == JUMP_PC) {
997 gen_generic_branch(dc);
998 tcg_gen_mov_tl(cpu_pc, cpu_npc);
999 dc->pc = DYNAMIC_PC;
1000 } else if (dc->npc == DYNAMIC_PC) {
1001 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1002 dc->pc = DYNAMIC_PC;
1003 } else {
1004 dc->pc = dc->npc;
1005 }
1006}
1007
1008static inline void gen_op_next_insn(void)
1009{
1010 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1011 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1012}
1013
1014static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1015 DisasContext *dc)
1016{
1017 static int subcc_cond[16] = {
1018 TCG_COND_NEVER,
1019 TCG_COND_EQ,
1020 TCG_COND_LE,
1021 TCG_COND_LT,
1022 TCG_COND_LEU,
1023 TCG_COND_LTU,
1024 -1,
1025 -1,
1026 TCG_COND_ALWAYS,
1027 TCG_COND_NE,
1028 TCG_COND_GT,
1029 TCG_COND_GE,
1030 TCG_COND_GTU,
1031 TCG_COND_GEU,
1032 -1,
1033 -1,
1034 };
1035
1036 static int logic_cond[16] = {
1037 TCG_COND_NEVER,
1038 TCG_COND_EQ,
1039 TCG_COND_LE,
1040 TCG_COND_LT,
1041 TCG_COND_EQ,
1042 TCG_COND_NEVER,
1043 TCG_COND_LT,
1044 TCG_COND_NEVER,
1045 TCG_COND_ALWAYS,
1046 TCG_COND_NE,
1047 TCG_COND_GT,
1048 TCG_COND_GE,
1049 TCG_COND_NE,
1050 TCG_COND_ALWAYS,
1051 TCG_COND_GE,
1052 TCG_COND_ALWAYS,
1053 };
1054
1055 TCGv_i32 r_src;
1056 TCGv r_dst;
1057
1058#ifdef TARGET_SPARC64
1059 if (xcc) {
1060 r_src = cpu_xcc;
1061 } else {
1062 r_src = cpu_psr;
1063 }
1064#else
1065 r_src = cpu_psr;
1066#endif
1067
1068 switch (dc->cc_op) {
1069 case CC_OP_LOGIC:
1070 cmp->cond = logic_cond[cond];
1071 do_compare_dst_0:
1072 cmp->is_bool = false;
1073 cmp->c2 = tcg_constant_tl(0);
1074#ifdef TARGET_SPARC64
1075 if (!xcc) {
1076 cmp->c1 = tcg_temp_new();
1077 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1078 break;
1079 }
1080#endif
1081 cmp->c1 = cpu_cc_dst;
1082 break;
1083
1084 case CC_OP_SUB:
1085 switch (cond) {
1086 case 6:
1087 case 14:
1088 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1089 goto do_compare_dst_0;
1090
1091 case 7:
1092 case 15:
1093 goto do_dynamic;
1094
1095 default:
1096 cmp->cond = subcc_cond[cond];
1097 cmp->is_bool = false;
1098#ifdef TARGET_SPARC64
1099 if (!xcc) {
1100
1101
1102 cmp->c1 = tcg_temp_new();
1103 cmp->c2 = tcg_temp_new();
1104 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1105 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1106 break;
1107 }
1108#endif
1109 cmp->c1 = cpu_cc_src;
1110 cmp->c2 = cpu_cc_src2;
1111 break;
1112 }
1113 break;
1114
1115 default:
1116 do_dynamic:
1117 gen_helper_compute_psr(cpu_env);
1118 dc->cc_op = CC_OP_FLAGS;
1119
1120
1121 case CC_OP_FLAGS:
1122
1123 cmp->cond = TCG_COND_NE;
1124 cmp->is_bool = true;
1125 cmp->c1 = r_dst = tcg_temp_new();
1126 cmp->c2 = tcg_constant_tl(0);
1127
1128 switch (cond) {
1129 case 0x0:
1130 gen_op_eval_bn(r_dst);
1131 break;
1132 case 0x1:
1133 gen_op_eval_be(r_dst, r_src);
1134 break;
1135 case 0x2:
1136 gen_op_eval_ble(r_dst, r_src);
1137 break;
1138 case 0x3:
1139 gen_op_eval_bl(r_dst, r_src);
1140 break;
1141 case 0x4:
1142 gen_op_eval_bleu(r_dst, r_src);
1143 break;
1144 case 0x5:
1145 gen_op_eval_bcs(r_dst, r_src);
1146 break;
1147 case 0x6:
1148 gen_op_eval_bneg(r_dst, r_src);
1149 break;
1150 case 0x7:
1151 gen_op_eval_bvs(r_dst, r_src);
1152 break;
1153 case 0x8:
1154 gen_op_eval_ba(r_dst);
1155 break;
1156 case 0x9:
1157 gen_op_eval_bne(r_dst, r_src);
1158 break;
1159 case 0xa:
1160 gen_op_eval_bg(r_dst, r_src);
1161 break;
1162 case 0xb:
1163 gen_op_eval_bge(r_dst, r_src);
1164 break;
1165 case 0xc:
1166 gen_op_eval_bgu(r_dst, r_src);
1167 break;
1168 case 0xd:
1169 gen_op_eval_bcc(r_dst, r_src);
1170 break;
1171 case 0xe:
1172 gen_op_eval_bpos(r_dst, r_src);
1173 break;
1174 case 0xf:
1175 gen_op_eval_bvc(r_dst, r_src);
1176 break;
1177 }
1178 break;
1179 }
1180}
1181
1182static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1183{
1184 unsigned int offset;
1185 TCGv r_dst;
1186
1187
1188 cmp->cond = TCG_COND_NE;
1189 cmp->is_bool = true;
1190 cmp->c1 = r_dst = tcg_temp_new();
1191 cmp->c2 = tcg_constant_tl(0);
1192
1193 switch (cc) {
1194 default:
1195 case 0x0:
1196 offset = 0;
1197 break;
1198 case 0x1:
1199 offset = 32 - 10;
1200 break;
1201 case 0x2:
1202 offset = 34 - 10;
1203 break;
1204 case 0x3:
1205 offset = 36 - 10;
1206 break;
1207 }
1208
1209 switch (cond) {
1210 case 0x0:
1211 gen_op_eval_bn(r_dst);
1212 break;
1213 case 0x1:
1214 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1215 break;
1216 case 0x2:
1217 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1218 break;
1219 case 0x3:
1220 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1221 break;
1222 case 0x4:
1223 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1224 break;
1225 case 0x5:
1226 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1227 break;
1228 case 0x6:
1229 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1230 break;
1231 case 0x7:
1232 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1233 break;
1234 case 0x8:
1235 gen_op_eval_ba(r_dst);
1236 break;
1237 case 0x9:
1238 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1239 break;
1240 case 0xa:
1241 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1242 break;
1243 case 0xb:
1244 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1245 break;
1246 case 0xc:
1247 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1248 break;
1249 case 0xd:
1250 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1251 break;
1252 case 0xe:
1253 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1254 break;
1255 case 0xf:
1256 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1257 break;
1258 }
1259}
1260
1261static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1262 DisasContext *dc)
1263{
1264 DisasCompare cmp;
1265 gen_compare(&cmp, cc, cond, dc);
1266
1267
1268 if (cmp.is_bool) {
1269 tcg_gen_mov_tl(r_dst, cmp.c1);
1270 } else {
1271 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1272 }
1273}
1274
1275static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1276{
1277 DisasCompare cmp;
1278 gen_fcompare(&cmp, cc, cond);
1279
1280
1281 if (cmp.is_bool) {
1282 tcg_gen_mov_tl(r_dst, cmp.c1);
1283 } else {
1284 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1285 }
1286}
1287
1288#ifdef TARGET_SPARC64
1289
1290static const int gen_tcg_cond_reg[8] = {
1291 -1,
1292 TCG_COND_NE,
1293 TCG_COND_GT,
1294 TCG_COND_GE,
1295 -1,
1296 TCG_COND_EQ,
1297 TCG_COND_LE,
1298 TCG_COND_LT,
1299};
1300
1301static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1302{
1303 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1304 cmp->is_bool = false;
1305 cmp->c1 = r_src;
1306 cmp->c2 = tcg_constant_tl(0);
1307}
1308
1309static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1310{
1311 DisasCompare cmp;
1312 gen_compare_reg(&cmp, cond, r_src);
1313
1314
1315 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1316}
1317#endif
1318
1319static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1320{
1321 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1322 target_ulong target = dc->pc + offset;
1323
1324#ifdef TARGET_SPARC64
1325 if (unlikely(AM_CHECK(dc))) {
1326 target &= 0xffffffffULL;
1327 }
1328#endif
1329 if (cond == 0x0) {
1330
1331 if (a) {
1332 dc->pc = dc->npc + 4;
1333 dc->npc = dc->pc + 4;
1334 } else {
1335 dc->pc = dc->npc;
1336 dc->npc = dc->pc + 4;
1337 }
1338 } else if (cond == 0x8) {
1339
1340 if (a) {
1341 dc->pc = target;
1342 dc->npc = dc->pc + 4;
1343 } else {
1344 dc->pc = dc->npc;
1345 dc->npc = target;
1346 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1347 }
1348 } else {
1349 flush_cond(dc);
1350 gen_cond(cpu_cond, cc, cond, dc);
1351 if (a) {
1352 gen_branch_a(dc, target);
1353 } else {
1354 gen_branch_n(dc, target);
1355 }
1356 }
1357}
1358
1359static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1360{
1361 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1362 target_ulong target = dc->pc + offset;
1363
1364#ifdef TARGET_SPARC64
1365 if (unlikely(AM_CHECK(dc))) {
1366 target &= 0xffffffffULL;
1367 }
1368#endif
1369 if (cond == 0x0) {
1370
1371 if (a) {
1372 dc->pc = dc->npc + 4;
1373 dc->npc = dc->pc + 4;
1374 } else {
1375 dc->pc = dc->npc;
1376 dc->npc = dc->pc + 4;
1377 }
1378 } else if (cond == 0x8) {
1379
1380 if (a) {
1381 dc->pc = target;
1382 dc->npc = dc->pc + 4;
1383 } else {
1384 dc->pc = dc->npc;
1385 dc->npc = target;
1386 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1387 }
1388 } else {
1389 flush_cond(dc);
1390 gen_fcond(cpu_cond, cc, cond);
1391 if (a) {
1392 gen_branch_a(dc, target);
1393 } else {
1394 gen_branch_n(dc, target);
1395 }
1396 }
1397}
1398
1399#ifdef TARGET_SPARC64
1400static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1401 TCGv r_reg)
1402{
1403 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1404 target_ulong target = dc->pc + offset;
1405
1406 if (unlikely(AM_CHECK(dc))) {
1407 target &= 0xffffffffULL;
1408 }
1409 flush_cond(dc);
1410 gen_cond_reg(cpu_cond, cond, r_reg);
1411 if (a) {
1412 gen_branch_a(dc, target);
1413 } else {
1414 gen_branch_n(dc, target);
1415 }
1416}
1417
1418static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1419{
1420 switch (fccno) {
1421 case 0:
1422 gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1423 break;
1424 case 1:
1425 gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1426 break;
1427 case 2:
1428 gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1429 break;
1430 case 3:
1431 gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1432 break;
1433 }
1434}
1435
1436static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1437{
1438 switch (fccno) {
1439 case 0:
1440 gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1441 break;
1442 case 1:
1443 gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1444 break;
1445 case 2:
1446 gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1447 break;
1448 case 3:
1449 gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1450 break;
1451 }
1452}
1453
1454static inline void gen_op_fcmpq(int fccno)
1455{
1456 switch (fccno) {
1457 case 0:
1458 gen_helper_fcmpq(cpu_fsr, cpu_env);
1459 break;
1460 case 1:
1461 gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1462 break;
1463 case 2:
1464 gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1465 break;
1466 case 3:
1467 gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1468 break;
1469 }
1470}
1471
1472static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1473{
1474 switch (fccno) {
1475 case 0:
1476 gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1477 break;
1478 case 1:
1479 gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1480 break;
1481 case 2:
1482 gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1483 break;
1484 case 3:
1485 gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1486 break;
1487 }
1488}
1489
1490static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1491{
1492 switch (fccno) {
1493 case 0:
1494 gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1495 break;
1496 case 1:
1497 gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1498 break;
1499 case 2:
1500 gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1501 break;
1502 case 3:
1503 gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1504 break;
1505 }
1506}
1507
1508static inline void gen_op_fcmpeq(int fccno)
1509{
1510 switch (fccno) {
1511 case 0:
1512 gen_helper_fcmpeq(cpu_fsr, cpu_env);
1513 break;
1514 case 1:
1515 gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1516 break;
1517 case 2:
1518 gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1519 break;
1520 case 3:
1521 gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1522 break;
1523 }
1524}
1525
1526#else
1527
1528static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1529{
1530 gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1531}
1532
1533static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1534{
1535 gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1536}
1537
1538static inline void gen_op_fcmpq(int fccno)
1539{
1540 gen_helper_fcmpq(cpu_fsr, cpu_env);
1541}
1542
1543static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1544{
1545 gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1546}
1547
1548static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1549{
1550 gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1551}
1552
1553static inline void gen_op_fcmpeq(int fccno)
1554{
1555 gen_helper_fcmpeq(cpu_fsr, cpu_env);
1556}
1557#endif
1558
1559static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1560{
1561 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1562 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1563 gen_exception(dc, TT_FP_EXCP);
1564}
1565
1566static int gen_trap_ifnofpu(DisasContext *dc)
1567{
1568#if !defined(CONFIG_USER_ONLY)
1569 if (!dc->fpu_enabled) {
1570 gen_exception(dc, TT_NFPU_INSN);
1571 return 1;
1572 }
1573#endif
1574 return 0;
1575}
1576
1577static inline void gen_op_clear_ieee_excp_and_FTT(void)
1578{
1579 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1580}
1581
1582static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1583 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1584{
1585 TCGv_i32 dst, src;
1586
1587 src = gen_load_fpr_F(dc, rs);
1588 dst = gen_dest_fpr_F(dc);
1589
1590 gen(dst, cpu_env, src);
1591 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1592
1593 gen_store_fpr_F(dc, rd, dst);
1594}
1595
1596static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1597 void (*gen)(TCGv_i32, TCGv_i32))
1598{
1599 TCGv_i32 dst, src;
1600
1601 src = gen_load_fpr_F(dc, rs);
1602 dst = gen_dest_fpr_F(dc);
1603
1604 gen(dst, src);
1605
1606 gen_store_fpr_F(dc, rd, dst);
1607}
1608
1609static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1610 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1611{
1612 TCGv_i32 dst, src1, src2;
1613
1614 src1 = gen_load_fpr_F(dc, rs1);
1615 src2 = gen_load_fpr_F(dc, rs2);
1616 dst = gen_dest_fpr_F(dc);
1617
1618 gen(dst, cpu_env, src1, src2);
1619 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1620
1621 gen_store_fpr_F(dc, rd, dst);
1622}
1623
1624#ifdef TARGET_SPARC64
1625static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1626 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1627{
1628 TCGv_i32 dst, src1, src2;
1629
1630 src1 = gen_load_fpr_F(dc, rs1);
1631 src2 = gen_load_fpr_F(dc, rs2);
1632 dst = gen_dest_fpr_F(dc);
1633
1634 gen(dst, src1, src2);
1635
1636 gen_store_fpr_F(dc, rd, dst);
1637}
1638#endif
1639
1640static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1641 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1642{
1643 TCGv_i64 dst, src;
1644
1645 src = gen_load_fpr_D(dc, rs);
1646 dst = gen_dest_fpr_D(dc, rd);
1647
1648 gen(dst, cpu_env, src);
1649 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1650
1651 gen_store_fpr_D(dc, rd, dst);
1652}
1653
1654#ifdef TARGET_SPARC64
1655static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1656 void (*gen)(TCGv_i64, TCGv_i64))
1657{
1658 TCGv_i64 dst, src;
1659
1660 src = gen_load_fpr_D(dc, rs);
1661 dst = gen_dest_fpr_D(dc, rd);
1662
1663 gen(dst, src);
1664
1665 gen_store_fpr_D(dc, rd, dst);
1666}
1667#endif
1668
1669static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1670 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1671{
1672 TCGv_i64 dst, src1, src2;
1673
1674 src1 = gen_load_fpr_D(dc, rs1);
1675 src2 = gen_load_fpr_D(dc, rs2);
1676 dst = gen_dest_fpr_D(dc, rd);
1677
1678 gen(dst, cpu_env, src1, src2);
1679 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1680
1681 gen_store_fpr_D(dc, rd, dst);
1682}
1683
1684#ifdef TARGET_SPARC64
1685static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1686 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1687{
1688 TCGv_i64 dst, src1, src2;
1689
1690 src1 = gen_load_fpr_D(dc, rs1);
1691 src2 = gen_load_fpr_D(dc, rs2);
1692 dst = gen_dest_fpr_D(dc, rd);
1693
1694 gen(dst, src1, src2);
1695
1696 gen_store_fpr_D(dc, rd, dst);
1697}
1698
1699static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1700 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1701{
1702 TCGv_i64 dst, src1, src2;
1703
1704 src1 = gen_load_fpr_D(dc, rs1);
1705 src2 = gen_load_fpr_D(dc, rs2);
1706 dst = gen_dest_fpr_D(dc, rd);
1707
1708 gen(dst, cpu_gsr, src1, src2);
1709
1710 gen_store_fpr_D(dc, rd, dst);
1711}
1712
1713static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1714 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1715{
1716 TCGv_i64 dst, src0, src1, src2;
1717
1718 src1 = gen_load_fpr_D(dc, rs1);
1719 src2 = gen_load_fpr_D(dc, rs2);
1720 src0 = gen_load_fpr_D(dc, rd);
1721 dst = gen_dest_fpr_D(dc, rd);
1722
1723 gen(dst, src0, src1, src2);
1724
1725 gen_store_fpr_D(dc, rd, dst);
1726}
1727#endif
1728
1729static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1730 void (*gen)(TCGv_ptr))
1731{
1732 gen_op_load_fpr_QT1(QFPREG(rs));
1733
1734 gen(cpu_env);
1735 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1736
1737 gen_op_store_QT0_fpr(QFPREG(rd));
1738 gen_update_fprs_dirty(dc, QFPREG(rd));
1739}
1740
1741#ifdef TARGET_SPARC64
1742static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1743 void (*gen)(TCGv_ptr))
1744{
1745 gen_op_load_fpr_QT1(QFPREG(rs));
1746
1747 gen(cpu_env);
1748
1749 gen_op_store_QT0_fpr(QFPREG(rd));
1750 gen_update_fprs_dirty(dc, QFPREG(rd));
1751}
1752#endif
1753
1754static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1755 void (*gen)(TCGv_ptr))
1756{
1757 gen_op_load_fpr_QT0(QFPREG(rs1));
1758 gen_op_load_fpr_QT1(QFPREG(rs2));
1759
1760 gen(cpu_env);
1761 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1762
1763 gen_op_store_QT0_fpr(QFPREG(rd));
1764 gen_update_fprs_dirty(dc, QFPREG(rd));
1765}
1766
1767static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1768 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1769{
1770 TCGv_i64 dst;
1771 TCGv_i32 src1, src2;
1772
1773 src1 = gen_load_fpr_F(dc, rs1);
1774 src2 = gen_load_fpr_F(dc, rs2);
1775 dst = gen_dest_fpr_D(dc, rd);
1776
1777 gen(dst, cpu_env, src1, src2);
1778 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1779
1780 gen_store_fpr_D(dc, rd, dst);
1781}
1782
1783static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1784 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1785{
1786 TCGv_i64 src1, src2;
1787
1788 src1 = gen_load_fpr_D(dc, rs1);
1789 src2 = gen_load_fpr_D(dc, rs2);
1790
1791 gen(cpu_env, src1, src2);
1792 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1793
1794 gen_op_store_QT0_fpr(QFPREG(rd));
1795 gen_update_fprs_dirty(dc, QFPREG(rd));
1796}
1797
1798#ifdef TARGET_SPARC64
1799static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1800 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1801{
1802 TCGv_i64 dst;
1803 TCGv_i32 src;
1804
1805 src = gen_load_fpr_F(dc, rs);
1806 dst = gen_dest_fpr_D(dc, rd);
1807
1808 gen(dst, cpu_env, src);
1809 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1810
1811 gen_store_fpr_D(dc, rd, dst);
1812}
1813#endif
1814
1815static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1816 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1817{
1818 TCGv_i64 dst;
1819 TCGv_i32 src;
1820
1821 src = gen_load_fpr_F(dc, rs);
1822 dst = gen_dest_fpr_D(dc, rd);
1823
1824 gen(dst, cpu_env, src);
1825
1826 gen_store_fpr_D(dc, rd, dst);
1827}
1828
1829static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1830 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1831{
1832 TCGv_i32 dst;
1833 TCGv_i64 src;
1834
1835 src = gen_load_fpr_D(dc, rs);
1836 dst = gen_dest_fpr_F(dc);
1837
1838 gen(dst, cpu_env, src);
1839 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1840
1841 gen_store_fpr_F(dc, rd, dst);
1842}
1843
1844static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1845 void (*gen)(TCGv_i32, TCGv_ptr))
1846{
1847 TCGv_i32 dst;
1848
1849 gen_op_load_fpr_QT1(QFPREG(rs));
1850 dst = gen_dest_fpr_F(dc);
1851
1852 gen(dst, cpu_env);
1853 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1854
1855 gen_store_fpr_F(dc, rd, dst);
1856}
1857
1858static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1859 void (*gen)(TCGv_i64, TCGv_ptr))
1860{
1861 TCGv_i64 dst;
1862
1863 gen_op_load_fpr_QT1(QFPREG(rs));
1864 dst = gen_dest_fpr_D(dc, rd);
1865
1866 gen(dst, cpu_env);
1867 gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1868
1869 gen_store_fpr_D(dc, rd, dst);
1870}
1871
1872static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1873 void (*gen)(TCGv_ptr, TCGv_i32))
1874{
1875 TCGv_i32 src;
1876
1877 src = gen_load_fpr_F(dc, rs);
1878
1879 gen(cpu_env, src);
1880
1881 gen_op_store_QT0_fpr(QFPREG(rd));
1882 gen_update_fprs_dirty(dc, QFPREG(rd));
1883}
1884
1885static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1886 void (*gen)(TCGv_ptr, TCGv_i64))
1887{
1888 TCGv_i64 src;
1889
1890 src = gen_load_fpr_D(dc, rs);
1891
1892 gen(cpu_env, src);
1893
1894 gen_op_store_QT0_fpr(QFPREG(rd));
1895 gen_update_fprs_dirty(dc, QFPREG(rd));
1896}
1897
1898static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1899 TCGv addr, int mmu_idx, MemOp memop)
1900{
1901 gen_address_mask(dc, addr);
1902 tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
1903}
1904
1905static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1906{
1907 TCGv m1 = tcg_constant_tl(0xff);
1908 gen_address_mask(dc, addr);
1909 tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1910}
1911
1912
1913#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1914typedef enum {
1915 GET_ASI_HELPER,
1916 GET_ASI_EXCP,
1917 GET_ASI_DIRECT,
1918 GET_ASI_DTWINX,
1919 GET_ASI_BLOCK,
1920 GET_ASI_SHORT,
1921 GET_ASI_BCOPY,
1922 GET_ASI_BFILL,
1923} ASIType;
1924
1925typedef struct {
1926 ASIType type;
1927 int asi;
1928 int mem_idx;
1929 MemOp memop;
1930} DisasASI;
1931
1932static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1933{
1934 int asi = GET_FIELD(insn, 19, 26);
1935 ASIType type = GET_ASI_HELPER;
1936 int mem_idx = dc->mem_idx;
1937
1938#ifndef TARGET_SPARC64
1939
1940 if (IS_IMM) {
1941 gen_exception(dc, TT_ILL_INSN);
1942 type = GET_ASI_EXCP;
1943 } else if (supervisor(dc)
1944
1945
1946
1947
1948 || (asi == ASI_USERDATA
1949 && (dc->def->features & CPU_FEATURE_CASA))) {
1950 switch (asi) {
1951 case ASI_USERDATA:
1952 mem_idx = MMU_USER_IDX;
1953 type = GET_ASI_DIRECT;
1954 break;
1955 case ASI_KERNELDATA:
1956 mem_idx = MMU_KERNEL_IDX;
1957 type = GET_ASI_DIRECT;
1958 break;
1959 case ASI_M_BYPASS:
1960 case ASI_LEON_BYPASS:
1961 mem_idx = MMU_PHYS_IDX;
1962 type = GET_ASI_DIRECT;
1963 break;
1964 case ASI_M_BCOPY:
1965 mem_idx = MMU_KERNEL_IDX;
1966 type = GET_ASI_BCOPY;
1967 break;
1968 case ASI_M_BFILL:
1969 mem_idx = MMU_KERNEL_IDX;
1970 type = GET_ASI_BFILL;
1971 break;
1972 }
1973
1974
1975
1976
1977 mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1978 } else {
1979 gen_exception(dc, TT_PRIV_INSN);
1980 type = GET_ASI_EXCP;
1981 }
1982#else
1983 if (IS_IMM) {
1984 asi = dc->asi;
1985 }
1986
1987
1988
1989
1990
1991
1992 if (!supervisor(dc) && asi < 0x80) {
1993 gen_exception(dc, TT_PRIV_ACT);
1994 type = GET_ASI_EXCP;
1995 } else {
1996 switch (asi) {
1997 case ASI_REAL:
1998 case ASI_REAL_IO:
1999 case ASI_REAL_L:
2000 case ASI_REAL_IO_L:
2001 case ASI_TWINX_REAL:
2002 case ASI_TWINX_REAL_L:
2003 case ASI_QUAD_LDD_PHYS:
2004 case ASI_QUAD_LDD_PHYS_L:
2005 mem_idx = MMU_PHYS_IDX;
2006 break;
2007 case ASI_N:
2008 case ASI_NL:
2009 case ASI_TWINX_N:
2010 case ASI_TWINX_NL:
2011 case ASI_NUCLEUS_QUAD_LDD:
2012 case ASI_NUCLEUS_QUAD_LDD_L:
2013 if (hypervisor(dc)) {
2014 mem_idx = MMU_PHYS_IDX;
2015 } else {
2016 mem_idx = MMU_NUCLEUS_IDX;
2017 }
2018 break;
2019 case ASI_AIUP:
2020 case ASI_AIUPL:
2021 case ASI_TWINX_AIUP:
2022 case ASI_TWINX_AIUP_L:
2023 case ASI_BLK_AIUP_4V:
2024 case ASI_BLK_AIUP_L_4V:
2025 case ASI_BLK_AIUP:
2026 case ASI_BLK_AIUPL:
2027 mem_idx = MMU_USER_IDX;
2028 break;
2029 case ASI_AIUS:
2030 case ASI_AIUSL:
2031 case ASI_TWINX_AIUS:
2032 case ASI_TWINX_AIUS_L:
2033 case ASI_BLK_AIUS_4V:
2034 case ASI_BLK_AIUS_L_4V:
2035 case ASI_BLK_AIUS:
2036 case ASI_BLK_AIUSL:
2037 mem_idx = MMU_USER_SECONDARY_IDX;
2038 break;
2039 case ASI_S:
2040 case ASI_SL:
2041 case ASI_TWINX_S:
2042 case ASI_TWINX_SL:
2043 case ASI_BLK_COMMIT_S:
2044 case ASI_BLK_S:
2045 case ASI_BLK_SL:
2046 case ASI_FL8_S:
2047 case ASI_FL8_SL:
2048 case ASI_FL16_S:
2049 case ASI_FL16_SL:
2050 if (mem_idx == MMU_USER_IDX) {
2051 mem_idx = MMU_USER_SECONDARY_IDX;
2052 } else if (mem_idx == MMU_KERNEL_IDX) {
2053 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2054 }
2055 break;
2056 case ASI_P:
2057 case ASI_PL:
2058 case ASI_TWINX_P:
2059 case ASI_TWINX_PL:
2060 case ASI_BLK_COMMIT_P:
2061 case ASI_BLK_P:
2062 case ASI_BLK_PL:
2063 case ASI_FL8_P:
2064 case ASI_FL8_PL:
2065 case ASI_FL16_P:
2066 case ASI_FL16_PL:
2067 break;
2068 }
2069 switch (asi) {
2070 case ASI_REAL:
2071 case ASI_REAL_IO:
2072 case ASI_REAL_L:
2073 case ASI_REAL_IO_L:
2074 case ASI_N:
2075 case ASI_NL:
2076 case ASI_AIUP:
2077 case ASI_AIUPL:
2078 case ASI_AIUS:
2079 case ASI_AIUSL:
2080 case ASI_S:
2081 case ASI_SL:
2082 case ASI_P:
2083 case ASI_PL:
2084 type = GET_ASI_DIRECT;
2085 break;
2086 case ASI_TWINX_REAL:
2087 case ASI_TWINX_REAL_L:
2088 case ASI_TWINX_N:
2089 case ASI_TWINX_NL:
2090 case ASI_TWINX_AIUP:
2091 case ASI_TWINX_AIUP_L:
2092 case ASI_TWINX_AIUS:
2093 case ASI_TWINX_AIUS_L:
2094 case ASI_TWINX_P:
2095 case ASI_TWINX_PL:
2096 case ASI_TWINX_S:
2097 case ASI_TWINX_SL:
2098 case ASI_QUAD_LDD_PHYS:
2099 case ASI_QUAD_LDD_PHYS_L:
2100 case ASI_NUCLEUS_QUAD_LDD:
2101 case ASI_NUCLEUS_QUAD_LDD_L:
2102 type = GET_ASI_DTWINX;
2103 break;
2104 case ASI_BLK_COMMIT_P:
2105 case ASI_BLK_COMMIT_S:
2106 case ASI_BLK_AIUP_4V:
2107 case ASI_BLK_AIUP_L_4V:
2108 case ASI_BLK_AIUP:
2109 case ASI_BLK_AIUPL:
2110 case ASI_BLK_AIUS_4V:
2111 case ASI_BLK_AIUS_L_4V:
2112 case ASI_BLK_AIUS:
2113 case ASI_BLK_AIUSL:
2114 case ASI_BLK_S:
2115 case ASI_BLK_SL:
2116 case ASI_BLK_P:
2117 case ASI_BLK_PL:
2118 type = GET_ASI_BLOCK;
2119 break;
2120 case ASI_FL8_S:
2121 case ASI_FL8_SL:
2122 case ASI_FL8_P:
2123 case ASI_FL8_PL:
2124 memop = MO_UB;
2125 type = GET_ASI_SHORT;
2126 break;
2127 case ASI_FL16_S:
2128 case ASI_FL16_SL:
2129 case ASI_FL16_P:
2130 case ASI_FL16_PL:
2131 memop = MO_TEUW;
2132 type = GET_ASI_SHORT;
2133 break;
2134 }
2135
2136 if (asi & 8) {
2137 memop ^= MO_BSWAP;
2138 }
2139 }
2140#endif
2141
2142 return (DisasASI){ type, asi, mem_idx, memop };
2143}
2144
2145static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2146 int insn, MemOp memop)
2147{
2148 DisasASI da = get_asi(dc, insn, memop);
2149
2150 switch (da.type) {
2151 case GET_ASI_EXCP:
2152 break;
2153 case GET_ASI_DTWINX:
2154 gen_exception(dc, TT_ILL_INSN);
2155 break;
2156 case GET_ASI_DIRECT:
2157 gen_address_mask(dc, addr);
2158 tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2159 break;
2160 default:
2161 {
2162 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2163 TCGv_i32 r_mop = tcg_constant_i32(memop);
2164
2165 save_state(dc);
2166#ifdef TARGET_SPARC64
2167 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2168#else
2169 {
2170 TCGv_i64 t64 = tcg_temp_new_i64();
2171 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2172 tcg_gen_trunc_i64_tl(dst, t64);
2173 }
2174#endif
2175 }
2176 break;
2177 }
2178}
2179
2180static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2181 int insn, MemOp memop)
2182{
2183 DisasASI da = get_asi(dc, insn, memop);
2184
2185 switch (da.type) {
2186 case GET_ASI_EXCP:
2187 break;
2188 case GET_ASI_DTWINX:
2189#ifndef TARGET_SPARC64
2190 gen_exception(dc, TT_ILL_INSN);
2191 break;
2192#else
2193 if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2194
2195 gen_exception(dc, TT_ILL_INSN);
2196 return;
2197 }
2198
2199
2200#endif
2201
2202 case GET_ASI_DIRECT:
2203 gen_address_mask(dc, addr);
2204 tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2205 break;
2206#if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2207 case GET_ASI_BCOPY:
2208
2209
2210
2211
2212
2213 {
2214 TCGv saddr = tcg_temp_new();
2215 TCGv daddr = tcg_temp_new();
2216 TCGv four = tcg_constant_tl(4);
2217 TCGv_i32 tmp = tcg_temp_new_i32();
2218 int i;
2219
2220 tcg_gen_andi_tl(saddr, src, -4);
2221 tcg_gen_andi_tl(daddr, addr, -4);
2222 for (i = 0; i < 32; i += 4) {
2223
2224
2225 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2226 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2227 tcg_gen_add_tl(saddr, saddr, four);
2228 tcg_gen_add_tl(daddr, daddr, four);
2229 }
2230 }
2231 break;
2232#endif
2233 default:
2234 {
2235 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2236 TCGv_i32 r_mop = tcg_constant_i32(memop & MO_SIZE);
2237
2238 save_state(dc);
2239#ifdef TARGET_SPARC64
2240 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2241#else
2242 {
2243 TCGv_i64 t64 = tcg_temp_new_i64();
2244 tcg_gen_extu_tl_i64(t64, src);
2245 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2246 }
2247#endif
2248
2249
2250 dc->npc = DYNAMIC_PC;
2251 }
2252 break;
2253 }
2254}
2255
2256static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2257 TCGv addr, int insn)
2258{
2259 DisasASI da = get_asi(dc, insn, MO_TEUL);
2260
2261 switch (da.type) {
2262 case GET_ASI_EXCP:
2263 break;
2264 case GET_ASI_DIRECT:
2265 gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2266 break;
2267 default:
2268
2269 gen_exception(dc, TT_DATA_ACCESS);
2270 break;
2271 }
2272}
2273
2274static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2275 int insn, int rd)
2276{
2277 DisasASI da = get_asi(dc, insn, MO_TEUL);
2278 TCGv oldv;
2279
2280 switch (da.type) {
2281 case GET_ASI_EXCP:
2282 return;
2283 case GET_ASI_DIRECT:
2284 oldv = tcg_temp_new();
2285 tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2286 da.mem_idx, da.memop);
2287 gen_store_gpr(dc, rd, oldv);
2288 break;
2289 default:
2290
2291 gen_exception(dc, TT_DATA_ACCESS);
2292 break;
2293 }
2294}
2295
2296static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2297{
2298 DisasASI da = get_asi(dc, insn, MO_UB);
2299
2300 switch (da.type) {
2301 case GET_ASI_EXCP:
2302 break;
2303 case GET_ASI_DIRECT:
2304 gen_ldstub(dc, dst, addr, da.mem_idx);
2305 break;
2306 default:
2307
2308
2309 if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2310 gen_helper_exit_atomic(cpu_env);
2311 } else {
2312 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2313 TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2314 TCGv_i64 s64, t64;
2315
2316 save_state(dc);
2317 t64 = tcg_temp_new_i64();
2318 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2319
2320 s64 = tcg_constant_i64(0xff);
2321 gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2322
2323 tcg_gen_trunc_i64_tl(dst, t64);
2324
2325
2326 dc->npc = DYNAMIC_PC;
2327 }
2328 break;
2329 }
2330}
2331#endif
2332
2333#ifdef TARGET_SPARC64
2334static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2335 int insn, int size, int rd)
2336{
2337 DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2338 TCGv_i32 d32;
2339 TCGv_i64 d64;
2340
2341 switch (da.type) {
2342 case GET_ASI_EXCP:
2343 break;
2344
2345 case GET_ASI_DIRECT:
2346 gen_address_mask(dc, addr);
2347 switch (size) {
2348 case 4:
2349 d32 = gen_dest_fpr_F(dc);
2350 tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2351 gen_store_fpr_F(dc, rd, d32);
2352 break;
2353 case 8:
2354 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2355 da.memop | MO_ALIGN_4);
2356 break;
2357 case 16:
2358 d64 = tcg_temp_new_i64();
2359 tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2360 tcg_gen_addi_tl(addr, addr, 8);
2361 tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2362 da.memop | MO_ALIGN_4);
2363 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2364 break;
2365 default:
2366 g_assert_not_reached();
2367 }
2368 break;
2369
2370 case GET_ASI_BLOCK:
2371
2372 if (size == 8 && (rd & 7) == 0) {
2373 MemOp memop;
2374 TCGv eight;
2375 int i;
2376
2377 gen_address_mask(dc, addr);
2378
2379
2380 memop = da.memop | MO_ALIGN_64;
2381 eight = tcg_constant_tl(8);
2382 for (i = 0; ; ++i) {
2383 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2384 da.mem_idx, memop);
2385 if (i == 7) {
2386 break;
2387 }
2388 tcg_gen_add_tl(addr, addr, eight);
2389 memop = da.memop;
2390 }
2391 } else {
2392 gen_exception(dc, TT_ILL_INSN);
2393 }
2394 break;
2395
2396 case GET_ASI_SHORT:
2397
2398 if (size == 8) {
2399 gen_address_mask(dc, addr);
2400 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2401 } else {
2402 gen_exception(dc, TT_ILL_INSN);
2403 }
2404 break;
2405
2406 default:
2407 {
2408 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2409 TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2410
2411 save_state(dc);
2412
2413
2414
2415
2416 switch (size) {
2417 case 4:
2418 d64 = tcg_temp_new_i64();
2419 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2420 d32 = gen_dest_fpr_F(dc);
2421 tcg_gen_extrl_i64_i32(d32, d64);
2422 gen_store_fpr_F(dc, rd, d32);
2423 break;
2424 case 8:
2425 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2426 break;
2427 case 16:
2428 d64 = tcg_temp_new_i64();
2429 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2430 tcg_gen_addi_tl(addr, addr, 8);
2431 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2432 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2433 break;
2434 default:
2435 g_assert_not_reached();
2436 }
2437 }
2438 break;
2439 }
2440}
2441
2442static void gen_stf_asi(DisasContext *dc, TCGv addr,
2443 int insn, int size, int rd)
2444{
2445 DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2446 TCGv_i32 d32;
2447
2448 switch (da.type) {
2449 case GET_ASI_EXCP:
2450 break;
2451
2452 case GET_ASI_DIRECT:
2453 gen_address_mask(dc, addr);
2454 switch (size) {
2455 case 4:
2456 d32 = gen_load_fpr_F(dc, rd);
2457 tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2458 break;
2459 case 8:
2460 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2461 da.memop | MO_ALIGN_4);
2462 break;
2463 case 16:
2464
2465
2466
2467
2468
2469 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2470 da.memop | MO_ALIGN_16);
2471 tcg_gen_addi_tl(addr, addr, 8);
2472 tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2473 break;
2474 default:
2475 g_assert_not_reached();
2476 }
2477 break;
2478
2479 case GET_ASI_BLOCK:
2480
2481 if (size == 8 && (rd & 7) == 0) {
2482 MemOp memop;
2483 TCGv eight;
2484 int i;
2485
2486 gen_address_mask(dc, addr);
2487
2488
2489 memop = da.memop | MO_ALIGN_64;
2490 eight = tcg_constant_tl(8);
2491 for (i = 0; ; ++i) {
2492 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2493 da.mem_idx, memop);
2494 if (i == 7) {
2495 break;
2496 }
2497 tcg_gen_add_tl(addr, addr, eight);
2498 memop = da.memop;
2499 }
2500 } else {
2501 gen_exception(dc, TT_ILL_INSN);
2502 }
2503 break;
2504
2505 case GET_ASI_SHORT:
2506
2507 if (size == 8) {
2508 gen_address_mask(dc, addr);
2509 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2510 } else {
2511 gen_exception(dc, TT_ILL_INSN);
2512 }
2513 break;
2514
2515 default:
2516
2517
2518
2519 gen_exception(dc, TT_ILL_INSN);
2520 break;
2521 }
2522}
2523
2524static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2525{
2526 DisasASI da = get_asi(dc, insn, MO_TEUQ);
2527 TCGv_i64 hi = gen_dest_gpr(dc, rd);
2528 TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2529
2530 switch (da.type) {
2531 case GET_ASI_EXCP:
2532 return;
2533
2534 case GET_ASI_DTWINX:
2535 gen_address_mask(dc, addr);
2536 tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2537 tcg_gen_addi_tl(addr, addr, 8);
2538 tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2539 break;
2540
2541 case GET_ASI_DIRECT:
2542 {
2543 TCGv_i64 tmp = tcg_temp_new_i64();
2544
2545 gen_address_mask(dc, addr);
2546 tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2547
2548
2549
2550
2551 if ((da.memop & MO_BSWAP) == MO_TE) {
2552 tcg_gen_extr32_i64(lo, hi, tmp);
2553 } else {
2554 tcg_gen_extr32_i64(hi, lo, tmp);
2555 }
2556 }
2557 break;
2558
2559 default:
2560
2561
2562
2563
2564 {
2565 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2566 TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2567 TCGv_i64 tmp = tcg_temp_new_i64();
2568
2569 save_state(dc);
2570 gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2571
2572
2573 if ((da.memop & MO_BSWAP) == MO_TE) {
2574 tcg_gen_extr32_i64(lo, hi, tmp);
2575 } else {
2576 tcg_gen_extr32_i64(hi, lo, tmp);
2577 }
2578 }
2579 break;
2580 }
2581
2582 gen_store_gpr(dc, rd, hi);
2583 gen_store_gpr(dc, rd + 1, lo);
2584}
2585
2586static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2587 int insn, int rd)
2588{
2589 DisasASI da = get_asi(dc, insn, MO_TEUQ);
2590 TCGv lo = gen_load_gpr(dc, rd + 1);
2591
2592 switch (da.type) {
2593 case GET_ASI_EXCP:
2594 break;
2595
2596 case GET_ASI_DTWINX:
2597 gen_address_mask(dc, addr);
2598 tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2599 tcg_gen_addi_tl(addr, addr, 8);
2600 tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2601 break;
2602
2603 case GET_ASI_DIRECT:
2604 {
2605 TCGv_i64 t64 = tcg_temp_new_i64();
2606
2607
2608
2609
2610 if ((da.memop & MO_BSWAP) == MO_TE) {
2611 tcg_gen_concat32_i64(t64, lo, hi);
2612 } else {
2613 tcg_gen_concat32_i64(t64, hi, lo);
2614 }
2615 gen_address_mask(dc, addr);
2616 tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2617 }
2618 break;
2619
2620 default:
2621
2622
2623 {
2624 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2625 TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2626 TCGv_i64 t64 = tcg_temp_new_i64();
2627
2628
2629 if ((da.memop & MO_BSWAP) == MO_TE) {
2630 tcg_gen_concat32_i64(t64, lo, hi);
2631 } else {
2632 tcg_gen_concat32_i64(t64, hi, lo);
2633 }
2634
2635 save_state(dc);
2636 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2637 }
2638 break;
2639 }
2640}
2641
2642static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2643 int insn, int rd)
2644{
2645 DisasASI da = get_asi(dc, insn, MO_TEUQ);
2646 TCGv oldv;
2647
2648 switch (da.type) {
2649 case GET_ASI_EXCP:
2650 return;
2651 case GET_ASI_DIRECT:
2652 oldv = tcg_temp_new();
2653 tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2654 da.mem_idx, da.memop);
2655 gen_store_gpr(dc, rd, oldv);
2656 break;
2657 default:
2658
2659 gen_exception(dc, TT_DATA_ACCESS);
2660 break;
2661 }
2662}
2663
2664#elif !defined(CONFIG_USER_ONLY)
2665static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2666{
2667
2668
2669
2670
2671 TCGv lo = gen_dest_gpr(dc, rd | 1);
2672 TCGv hi = gen_dest_gpr(dc, rd);
2673 TCGv_i64 t64 = tcg_temp_new_i64();
2674 DisasASI da = get_asi(dc, insn, MO_TEUQ);
2675
2676 switch (da.type) {
2677 case GET_ASI_EXCP:
2678 return;
2679 case GET_ASI_DIRECT:
2680 gen_address_mask(dc, addr);
2681 tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2682 break;
2683 default:
2684 {
2685 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2686 TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2687
2688 save_state(dc);
2689 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2690 }
2691 break;
2692 }
2693
2694 tcg_gen_extr_i64_i32(lo, hi, t64);
2695 gen_store_gpr(dc, rd | 1, lo);
2696 gen_store_gpr(dc, rd, hi);
2697}
2698
2699static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2700 int insn, int rd)
2701{
2702 DisasASI da = get_asi(dc, insn, MO_TEUQ);
2703 TCGv lo = gen_load_gpr(dc, rd + 1);
2704 TCGv_i64 t64 = tcg_temp_new_i64();
2705
2706 tcg_gen_concat_tl_i64(t64, lo, hi);
2707
2708 switch (da.type) {
2709 case GET_ASI_EXCP:
2710 break;
2711 case GET_ASI_DIRECT:
2712 gen_address_mask(dc, addr);
2713 tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2714 break;
2715 case GET_ASI_BFILL:
2716
2717
2718
2719
2720
2721 {
2722 TCGv d_addr = tcg_temp_new();
2723 TCGv eight = tcg_constant_tl(8);
2724 int i;
2725
2726 tcg_gen_andi_tl(d_addr, addr, -8);
2727 for (i = 0; i < 32; i += 8) {
2728 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2729 tcg_gen_add_tl(d_addr, d_addr, eight);
2730 }
2731 }
2732 break;
2733 default:
2734 {
2735 TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2736 TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2737
2738 save_state(dc);
2739 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2740 }
2741 break;
2742 }
2743}
2744#endif
2745
2746static TCGv get_src1(DisasContext *dc, unsigned int insn)
2747{
2748 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2749 return gen_load_gpr(dc, rs1);
2750}
2751
2752static TCGv get_src2(DisasContext *dc, unsigned int insn)
2753{
2754 if (IS_IMM) {
2755 target_long simm = GET_FIELDs(insn, 19, 31);
2756 TCGv t = tcg_temp_new();
2757 tcg_gen_movi_tl(t, simm);
2758 return t;
2759 } else {
2760 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2761 return gen_load_gpr(dc, rs2);
2762 }
2763}
2764
2765#ifdef TARGET_SPARC64
2766static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2767{
2768 TCGv_i32 c32, zero, dst, s1, s2;
2769
2770
2771
2772
2773 c32 = tcg_temp_new_i32();
2774 if (cmp->is_bool) {
2775 tcg_gen_extrl_i64_i32(c32, cmp->c1);
2776 } else {
2777 TCGv_i64 c64 = tcg_temp_new_i64();
2778 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2779 tcg_gen_extrl_i64_i32(c32, c64);
2780 }
2781
2782 s1 = gen_load_fpr_F(dc, rs);
2783 s2 = gen_load_fpr_F(dc, rd);
2784 dst = gen_dest_fpr_F(dc);
2785 zero = tcg_constant_i32(0);
2786
2787 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2788
2789 gen_store_fpr_F(dc, rd, dst);
2790}
2791
2792static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2793{
2794 TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2795 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2796 gen_load_fpr_D(dc, rs),
2797 gen_load_fpr_D(dc, rd));
2798 gen_store_fpr_D(dc, rd, dst);
2799}
2800
2801static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2802{
2803 int qd = QFPREG(rd);
2804 int qs = QFPREG(rs);
2805
2806 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2807 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2808 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2809 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2810
2811 gen_update_fprs_dirty(dc, qd);
2812}
2813
2814#ifndef CONFIG_USER_ONLY
2815static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2816{
2817 TCGv_i32 r_tl = tcg_temp_new_i32();
2818
2819
2820 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2821
2822
2823 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2824
2825
2826 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2827 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2828
2829
2830 {
2831 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2832 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2833 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2834 }
2835}
2836#endif
2837
2838static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2839 int width, bool cc, bool left)
2840{
2841 TCGv lo1, lo2;
2842 uint64_t amask, tabl, tabr;
2843 int shift, imask, omask;
2844
2845 if (cc) {
2846 tcg_gen_mov_tl(cpu_cc_src, s1);
2847 tcg_gen_mov_tl(cpu_cc_src2, s2);
2848 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2849 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2850 dc->cc_op = CC_OP_SUB;
2851 }
2852
2853
2854
2855
2856
2857
2858
2859
2860 switch (width) {
2861 case 8:
2862 imask = 0x7;
2863 shift = 3;
2864 omask = 0xff;
2865 if (left) {
2866 tabl = 0x80c0e0f0f8fcfeffULL;
2867 tabr = 0xff7f3f1f0f070301ULL;
2868 } else {
2869 tabl = 0x0103070f1f3f7fffULL;
2870 tabr = 0xfffefcf8f0e0c080ULL;
2871 }
2872 break;
2873 case 16:
2874 imask = 0x6;
2875 shift = 1;
2876 omask = 0xf;
2877 if (left) {
2878 tabl = 0x8cef;
2879 tabr = 0xf731;
2880 } else {
2881 tabl = 0x137f;
2882 tabr = 0xfec8;
2883 }
2884 break;
2885 case 32:
2886 imask = 0x4;
2887 shift = 0;
2888 omask = 0x3;
2889 if (left) {
2890 tabl = (2 << 2) | 3;
2891 tabr = (3 << 2) | 1;
2892 } else {
2893 tabl = (1 << 2) | 3;
2894 tabr = (3 << 2) | 2;
2895 }
2896 break;
2897 default:
2898 abort();
2899 }
2900
2901 lo1 = tcg_temp_new();
2902 lo2 = tcg_temp_new();
2903 tcg_gen_andi_tl(lo1, s1, imask);
2904 tcg_gen_andi_tl(lo2, s2, imask);
2905 tcg_gen_shli_tl(lo1, lo1, shift);
2906 tcg_gen_shli_tl(lo2, lo2, shift);
2907
2908 tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2909 tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2910 tcg_gen_andi_tl(dst, lo1, omask);
2911 tcg_gen_andi_tl(lo2, lo2, omask);
2912
2913 amask = -8;
2914 if (AM_CHECK(dc)) {
2915 amask &= 0xffffffffULL;
2916 }
2917 tcg_gen_andi_tl(s1, s1, amask);
2918 tcg_gen_andi_tl(s2, s2, amask);
2919
2920
2921
2922
2923
2924
2925
2926
2927
2928 tcg_gen_setcond_tl(TCG_COND_EQ, lo1, s1, s2);
2929 tcg_gen_neg_tl(lo1, lo1);
2930 tcg_gen_or_tl(lo2, lo2, lo1);
2931 tcg_gen_and_tl(dst, dst, lo2);
2932}
2933
2934static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2935{
2936 TCGv tmp = tcg_temp_new();
2937
2938 tcg_gen_add_tl(tmp, s1, s2);
2939 tcg_gen_andi_tl(dst, tmp, -8);
2940 if (left) {
2941 tcg_gen_neg_tl(tmp, tmp);
2942 }
2943 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2944}
2945
2946static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2947{
2948 TCGv t1, t2, shift;
2949
2950 t1 = tcg_temp_new();
2951 t2 = tcg_temp_new();
2952 shift = tcg_temp_new();
2953
2954 tcg_gen_andi_tl(shift, gsr, 7);
2955 tcg_gen_shli_tl(shift, shift, 3);
2956 tcg_gen_shl_tl(t1, s1, shift);
2957
2958
2959
2960 tcg_gen_xori_tl(shift, shift, 63);
2961 tcg_gen_shr_tl(t2, s2, shift);
2962 tcg_gen_shri_tl(t2, t2, 1);
2963
2964 tcg_gen_or_tl(dst, t1, t2);
2965}
2966#endif
2967
2968#define CHECK_IU_FEATURE(dc, FEATURE) \
2969 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2970 goto illegal_insn;
2971#define CHECK_FPU_FEATURE(dc, FEATURE) \
2972 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2973 goto nfpu_insn;
2974
2975
2976static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2977{
2978 unsigned int opc, rs1, rs2, rd;
2979 TCGv cpu_src1, cpu_src2;
2980 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2981 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2982 target_long simm;
2983
2984 opc = GET_FIELD(insn, 0, 1);
2985 rd = GET_FIELD(insn, 2, 6);
2986
2987 switch (opc) {
2988 case 0:
2989 {
2990 unsigned int xop = GET_FIELD(insn, 7, 9);
2991 int32_t target;
2992 switch (xop) {
2993#ifdef TARGET_SPARC64
2994 case 0x1:
2995 {
2996 int cc;
2997
2998 target = GET_FIELD_SP(insn, 0, 18);
2999 target = sign_extend(target, 19);
3000 target <<= 2;
3001 cc = GET_FIELD_SP(insn, 20, 21);
3002 if (cc == 0)
3003 do_branch(dc, target, insn, 0);
3004 else if (cc == 2)
3005 do_branch(dc, target, insn, 1);
3006 else
3007 goto illegal_insn;
3008 goto jmp_insn;
3009 }
3010 case 0x3:
3011 {
3012 target = GET_FIELD_SP(insn, 0, 13) |
3013 (GET_FIELD_SP(insn, 20, 21) << 14);
3014 target = sign_extend(target, 16);
3015 target <<= 2;
3016 cpu_src1 = get_src1(dc, insn);
3017 do_branch_reg(dc, target, insn, cpu_src1);
3018 goto jmp_insn;
3019 }
3020 case 0x5:
3021 {
3022 int cc = GET_FIELD_SP(insn, 20, 21);
3023 if (gen_trap_ifnofpu(dc)) {
3024 goto jmp_insn;
3025 }
3026 target = GET_FIELD_SP(insn, 0, 18);
3027 target = sign_extend(target, 19);
3028 target <<= 2;
3029 do_fbranch(dc, target, insn, cc);
3030 goto jmp_insn;
3031 }
3032#else
3033 case 0x7:
3034 {
3035 goto ncp_insn;
3036 }
3037#endif
3038 case 0x2:
3039 {
3040 target = GET_FIELD(insn, 10, 31);
3041 target = sign_extend(target, 22);
3042 target <<= 2;
3043 do_branch(dc, target, insn, 0);
3044 goto jmp_insn;
3045 }
3046 case 0x6:
3047 {
3048 if (gen_trap_ifnofpu(dc)) {
3049 goto jmp_insn;
3050 }
3051 target = GET_FIELD(insn, 10, 31);
3052 target = sign_extend(target, 22);
3053 target <<= 2;
3054 do_fbranch(dc, target, insn, 0);
3055 goto jmp_insn;
3056 }
3057 case 0x4:
3058
3059 if (rd) {
3060 uint32_t value = GET_FIELD(insn, 10, 31);
3061 TCGv t = gen_dest_gpr(dc, rd);
3062 tcg_gen_movi_tl(t, value << 10);
3063 gen_store_gpr(dc, rd, t);
3064 }
3065 break;
3066 case 0x0:
3067 default:
3068 goto illegal_insn;
3069 }
3070 break;
3071 }
3072 break;
3073 case 1:
3074 {
3075 target_long target = GET_FIELDs(insn, 2, 31) << 2;
3076 TCGv o7 = gen_dest_gpr(dc, 15);
3077
3078 tcg_gen_movi_tl(o7, dc->pc);
3079 gen_store_gpr(dc, 15, o7);
3080 target += dc->pc;
3081 gen_mov_pc_npc(dc);
3082#ifdef TARGET_SPARC64
3083 if (unlikely(AM_CHECK(dc))) {
3084 target &= 0xffffffffULL;
3085 }
3086#endif
3087 dc->npc = target;
3088 }
3089 goto jmp_insn;
3090 case 2:
3091 {
3092 unsigned int xop = GET_FIELD(insn, 7, 12);
3093 TCGv cpu_dst = tcg_temp_new();
3094 TCGv cpu_tmp0;
3095
3096 if (xop == 0x3a) {
3097 int cond = GET_FIELD(insn, 3, 6);
3098 TCGv_i32 trap;
3099 TCGLabel *l1 = NULL;
3100 int mask;
3101
3102 if (cond == 0) {
3103
3104 break;
3105 }
3106
3107 save_state(dc);
3108
3109 if (cond != 8) {
3110
3111 DisasCompare cmp;
3112#ifdef TARGET_SPARC64
3113
3114 int cc = GET_FIELD_SP(insn, 11, 12);
3115 if (cc == 0) {
3116 gen_compare(&cmp, 0, cond, dc);
3117 } else if (cc == 2) {
3118 gen_compare(&cmp, 1, cond, dc);
3119 } else {
3120 goto illegal_insn;
3121 }
3122#else
3123 gen_compare(&cmp, 0, cond, dc);
3124#endif
3125 l1 = gen_new_label();
3126 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3127 cmp.c1, cmp.c2, l1);
3128 }
3129
3130 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3131 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3132
3133
3134
3135
3136 trap = tcg_temp_new_i32();
3137
3138 rs1 = GET_FIELD_SP(insn, 14, 18);
3139 if (IS_IMM) {
3140 rs2 = GET_FIELD_SP(insn, 0, 7);
3141 if (rs1 == 0) {
3142 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3143
3144 mask = 0;
3145 } else {
3146 TCGv t1 = gen_load_gpr(dc, rs1);
3147 tcg_gen_trunc_tl_i32(trap, t1);
3148 tcg_gen_addi_i32(trap, trap, rs2);
3149 }
3150 } else {
3151 TCGv t1, t2;
3152 rs2 = GET_FIELD_SP(insn, 0, 4);
3153 t1 = gen_load_gpr(dc, rs1);
3154 t2 = gen_load_gpr(dc, rs2);
3155 tcg_gen_add_tl(t1, t1, t2);
3156 tcg_gen_trunc_tl_i32(trap, t1);
3157 }
3158 if (mask != 0) {
3159 tcg_gen_andi_i32(trap, trap, mask);
3160 tcg_gen_addi_i32(trap, trap, TT_TRAP);
3161 }
3162
3163 gen_helper_raise_exception(cpu_env, trap);
3164
3165 if (cond == 8) {
3166
3167 dc->base.is_jmp = DISAS_NORETURN;
3168 goto jmp_insn;
3169 } else {
3170
3171 gen_set_label(l1);
3172 break;
3173 }
3174 } else if (xop == 0x28) {
3175 rs1 = GET_FIELD(insn, 13, 17);
3176 switch(rs1) {
3177 case 0:
3178#ifndef TARGET_SPARC64
3179 case 0x01 ... 0x0e:
3180
3181
3182 case 0x0f:
3183
3184 case 0x10 ... 0x1f:
3185
3186
3187
3188 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3189 TCGv t = gen_dest_gpr(dc, rd);
3190
3191 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3192 gen_store_gpr(dc, rd, t);
3193 break;
3194 }
3195#endif
3196 gen_store_gpr(dc, rd, cpu_y);
3197 break;
3198#ifdef TARGET_SPARC64
3199 case 0x2:
3200 update_psr(dc);
3201 gen_helper_rdccr(cpu_dst, cpu_env);
3202 gen_store_gpr(dc, rd, cpu_dst);
3203 break;
3204 case 0x3:
3205 tcg_gen_movi_tl(cpu_dst, dc->asi);
3206 gen_store_gpr(dc, rd, cpu_dst);
3207 break;
3208 case 0x4:
3209 {
3210 TCGv_ptr r_tickptr;
3211 TCGv_i32 r_const;
3212
3213 r_tickptr = tcg_temp_new_ptr();
3214 r_const = tcg_constant_i32(dc->mem_idx);
3215 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3216 offsetof(CPUSPARCState, tick));
3217 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3218 gen_io_start();
3219 }
3220 gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3221 r_const);
3222 gen_store_gpr(dc, rd, cpu_dst);
3223 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3224
3225 dc->base.is_jmp = DISAS_EXIT;
3226 }
3227 }
3228 break;
3229 case 0x5:
3230 {
3231 TCGv t = gen_dest_gpr(dc, rd);
3232 if (unlikely(AM_CHECK(dc))) {
3233 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3234 } else {
3235 tcg_gen_movi_tl(t, dc->pc);
3236 }
3237 gen_store_gpr(dc, rd, t);
3238 }
3239 break;
3240 case 0x6:
3241 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3242 gen_store_gpr(dc, rd, cpu_dst);
3243 break;
3244 case 0xf:
3245 break;
3246 case 0x13:
3247 if (gen_trap_ifnofpu(dc)) {
3248 goto jmp_insn;
3249 }
3250 gen_store_gpr(dc, rd, cpu_gsr);
3251 break;
3252 case 0x16:
3253 tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3254 offsetof(CPUSPARCState, softint));
3255 gen_store_gpr(dc, rd, cpu_dst);
3256 break;
3257 case 0x17:
3258 gen_store_gpr(dc, rd, cpu_tick_cmpr);
3259 break;
3260 case 0x18:
3261 {
3262 TCGv_ptr r_tickptr;
3263 TCGv_i32 r_const;
3264
3265 r_tickptr = tcg_temp_new_ptr();
3266 r_const = tcg_constant_i32(dc->mem_idx);
3267 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3268 offsetof(CPUSPARCState, stick));
3269 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3270 gen_io_start();
3271 }
3272 gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3273 r_const);
3274 gen_store_gpr(dc, rd, cpu_dst);
3275 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3276
3277 dc->base.is_jmp = DISAS_EXIT;
3278 }
3279 }
3280 break;
3281 case 0x19:
3282 gen_store_gpr(dc, rd, cpu_stick_cmpr);
3283 break;
3284 case 0x1a:
3285
3286
3287
3288 CHECK_IU_FEATURE(dc, HYPV);
3289 {
3290 TCGv t = gen_dest_gpr(dc, rd);
3291 tcg_gen_movi_tl(t, 1UL);
3292 gen_store_gpr(dc, rd, t);
3293 }
3294 break;
3295 case 0x10:
3296 case 0x11:
3297 case 0x12:
3298 case 0x14:
3299 case 0x15:
3300#endif
3301 default:
3302 goto illegal_insn;
3303 }
3304#if !defined(CONFIG_USER_ONLY)
3305 } else if (xop == 0x29) {
3306#ifndef TARGET_SPARC64
3307 if (!supervisor(dc)) {
3308 goto priv_insn;
3309 }
3310 update_psr(dc);
3311 gen_helper_rdpsr(cpu_dst, cpu_env);
3312#else
3313 CHECK_IU_FEATURE(dc, HYPV);
3314 if (!hypervisor(dc))
3315 goto priv_insn;
3316 rs1 = GET_FIELD(insn, 13, 17);
3317 switch (rs1) {
3318 case 0:
3319 tcg_gen_ld_i64(cpu_dst, cpu_env,
3320 offsetof(CPUSPARCState, hpstate));
3321 break;
3322 case 1:
3323
3324 break;
3325 case 3:
3326 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3327 break;
3328 case 5:
3329 tcg_gen_mov_tl(cpu_dst, cpu_htba);
3330 break;
3331 case 6:
3332 tcg_gen_mov_tl(cpu_dst, cpu_hver);
3333 break;
3334 case 31:
3335 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3336 break;
3337 default:
3338 goto illegal_insn;
3339 }
3340#endif
3341 gen_store_gpr(dc, rd, cpu_dst);
3342 break;
3343 } else if (xop == 0x2a) {
3344 if (!supervisor(dc)) {
3345 goto priv_insn;
3346 }
3347 cpu_tmp0 = tcg_temp_new();
3348#ifdef TARGET_SPARC64
3349 rs1 = GET_FIELD(insn, 13, 17);
3350 switch (rs1) {
3351 case 0:
3352 {
3353 TCGv_ptr r_tsptr;
3354
3355 r_tsptr = tcg_temp_new_ptr();
3356 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3357 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3358 offsetof(trap_state, tpc));
3359 }
3360 break;
3361 case 1:
3362 {
3363 TCGv_ptr r_tsptr;
3364
3365 r_tsptr = tcg_temp_new_ptr();
3366 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3367 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3368 offsetof(trap_state, tnpc));
3369 }
3370 break;
3371 case 2:
3372 {
3373 TCGv_ptr r_tsptr;
3374
3375 r_tsptr = tcg_temp_new_ptr();
3376 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3377 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3378 offsetof(trap_state, tstate));
3379 }
3380 break;
3381 case 3:
3382 {
3383 TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3384
3385 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3386 tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3387 offsetof(trap_state, tt));
3388 }
3389 break;
3390 case 4:
3391 {
3392 TCGv_ptr r_tickptr;
3393 TCGv_i32 r_const;
3394
3395 r_tickptr = tcg_temp_new_ptr();
3396 r_const = tcg_constant_i32(dc->mem_idx);
3397 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3398 offsetof(CPUSPARCState, tick));
3399 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3400 gen_io_start();
3401 }
3402 gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3403 r_tickptr, r_const);
3404 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3405
3406 dc->base.is_jmp = DISAS_EXIT;
3407 }
3408 }
3409 break;
3410 case 5:
3411 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3412 break;
3413 case 6:
3414 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3415 offsetof(CPUSPARCState, pstate));
3416 break;
3417 case 7:
3418 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3419 offsetof(CPUSPARCState, tl));
3420 break;
3421 case 8:
3422 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3423 offsetof(CPUSPARCState, psrpil));
3424 break;
3425 case 9:
3426 gen_helper_rdcwp(cpu_tmp0, cpu_env);
3427 break;
3428 case 10:
3429 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3430 offsetof(CPUSPARCState, cansave));
3431 break;
3432 case 11:
3433 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3434 offsetof(CPUSPARCState, canrestore));
3435 break;
3436 case 12:
3437 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3438 offsetof(CPUSPARCState, cleanwin));
3439 break;
3440 case 13:
3441 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3442 offsetof(CPUSPARCState, otherwin));
3443 break;
3444 case 14:
3445 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3446 offsetof(CPUSPARCState, wstate));
3447 break;
3448 case 16:
3449 CHECK_IU_FEATURE(dc, GL);
3450 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3451 offsetof(CPUSPARCState, gl));
3452 break;
3453 case 26:
3454 CHECK_IU_FEATURE(dc, HYPV);
3455 if (!hypervisor(dc))
3456 goto priv_insn;
3457 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3458 break;
3459 case 31:
3460 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3461 break;
3462 case 15:
3463 default:
3464 goto illegal_insn;
3465 }
3466#else
3467 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3468#endif
3469 gen_store_gpr(dc, rd, cpu_tmp0);
3470 break;
3471#endif
3472#if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3473 } else if (xop == 0x2b) {
3474#ifdef TARGET_SPARC64
3475 gen_helper_flushw(cpu_env);
3476#else
3477 if (!supervisor(dc))
3478 goto priv_insn;
3479 gen_store_gpr(dc, rd, cpu_tbr);
3480#endif
3481 break;
3482#endif
3483 } else if (xop == 0x34) {
3484 if (gen_trap_ifnofpu(dc)) {
3485 goto jmp_insn;
3486 }
3487 gen_op_clear_ieee_excp_and_FTT();
3488 rs1 = GET_FIELD(insn, 13, 17);
3489 rs2 = GET_FIELD(insn, 27, 31);
3490 xop = GET_FIELD(insn, 18, 26);
3491
3492 switch (xop) {
3493 case 0x1:
3494 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3495 gen_store_fpr_F(dc, rd, cpu_src1_32);
3496 break;
3497 case 0x5:
3498 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3499 break;
3500 case 0x9:
3501 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3502 break;
3503 case 0x29:
3504 CHECK_FPU_FEATURE(dc, FSQRT);
3505 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3506 break;
3507 case 0x2a:
3508 CHECK_FPU_FEATURE(dc, FSQRT);
3509 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3510 break;
3511 case 0x2b:
3512 CHECK_FPU_FEATURE(dc, FLOAT128);
3513 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3514 break;
3515 case 0x41:
3516 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3517 break;
3518 case 0x42:
3519 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3520 break;
3521 case 0x43:
3522 CHECK_FPU_FEATURE(dc, FLOAT128);
3523 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3524 break;
3525 case 0x45:
3526 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3527 break;
3528 case 0x46:
3529 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3530 break;
3531 case 0x47:
3532 CHECK_FPU_FEATURE(dc, FLOAT128);
3533 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3534 break;
3535 case 0x49:
3536 CHECK_FPU_FEATURE(dc, FMUL);
3537 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3538 break;
3539 case 0x4a:
3540 CHECK_FPU_FEATURE(dc, FMUL);
3541 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3542 break;
3543 case 0x4b:
3544 CHECK_FPU_FEATURE(dc, FLOAT128);
3545 CHECK_FPU_FEATURE(dc, FMUL);
3546 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3547 break;
3548 case 0x4d:
3549 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3550 break;
3551 case 0x4e:
3552 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3553 break;
3554 case 0x4f:
3555 CHECK_FPU_FEATURE(dc, FLOAT128);
3556 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3557 break;
3558 case 0x69:
3559 CHECK_FPU_FEATURE(dc, FSMULD);
3560 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3561 break;
3562 case 0x6e:
3563 CHECK_FPU_FEATURE(dc, FLOAT128);
3564 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3565 break;
3566 case 0xc4:
3567 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3568 break;
3569 case 0xc6:
3570 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3571 break;
3572 case 0xc7:
3573 CHECK_FPU_FEATURE(dc, FLOAT128);
3574 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3575 break;
3576 case 0xc8:
3577 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3578 break;
3579 case 0xc9:
3580 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3581 break;
3582 case 0xcb:
3583 CHECK_FPU_FEATURE(dc, FLOAT128);
3584 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3585 break;
3586 case 0xcc:
3587 CHECK_FPU_FEATURE(dc, FLOAT128);
3588 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3589 break;
3590 case 0xcd:
3591 CHECK_FPU_FEATURE(dc, FLOAT128);
3592 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3593 break;
3594 case 0xce:
3595 CHECK_FPU_FEATURE(dc, FLOAT128);
3596 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3597 break;
3598 case 0xd1:
3599 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3600 break;
3601 case 0xd2:
3602 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3603 break;
3604 case 0xd3:
3605 CHECK_FPU_FEATURE(dc, FLOAT128);
3606 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3607 break;
3608#ifdef TARGET_SPARC64
3609 case 0x2:
3610 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3611 gen_store_fpr_D(dc, rd, cpu_src1_64);
3612 break;
3613 case 0x3:
3614 CHECK_FPU_FEATURE(dc, FLOAT128);
3615 gen_move_Q(dc, rd, rs2);
3616 break;
3617 case 0x6:
3618 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3619 break;
3620 case 0x7:
3621 CHECK_FPU_FEATURE(dc, FLOAT128);
3622 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3623 break;
3624 case 0xa:
3625 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3626 break;
3627 case 0xb:
3628 CHECK_FPU_FEATURE(dc, FLOAT128);
3629 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3630 break;
3631 case 0x81:
3632 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3633 break;
3634 case 0x82:
3635 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3636 break;
3637 case 0x83:
3638 CHECK_FPU_FEATURE(dc, FLOAT128);
3639 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3640 break;
3641 case 0x84:
3642 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3643 break;
3644 case 0x88:
3645 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3646 break;
3647 case 0x8c:
3648 CHECK_FPU_FEATURE(dc, FLOAT128);
3649 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3650 break;
3651#endif
3652 default:
3653 goto illegal_insn;
3654 }
3655 } else if (xop == 0x35) {
3656#ifdef TARGET_SPARC64
3657 int cond;
3658#endif
3659 if (gen_trap_ifnofpu(dc)) {
3660 goto jmp_insn;
3661 }
3662 gen_op_clear_ieee_excp_and_FTT();
3663 rs1 = GET_FIELD(insn, 13, 17);
3664 rs2 = GET_FIELD(insn, 27, 31);
3665 xop = GET_FIELD(insn, 18, 26);
3666
3667#ifdef TARGET_SPARC64
3668#define FMOVR(sz) \
3669 do { \
3670 DisasCompare cmp; \
3671 cond = GET_FIELD_SP(insn, 10, 12); \
3672 cpu_src1 = get_src1(dc, insn); \
3673 gen_compare_reg(&cmp, cond, cpu_src1); \
3674 gen_fmov##sz(dc, &cmp, rd, rs2); \
3675 } while (0)
3676
3677 if ((xop & 0x11f) == 0x005) {
3678 FMOVR(s);
3679 break;
3680 } else if ((xop & 0x11f) == 0x006) {
3681 FMOVR(d);
3682 break;
3683 } else if ((xop & 0x11f) == 0x007) {
3684 CHECK_FPU_FEATURE(dc, FLOAT128);
3685 FMOVR(q);
3686 break;
3687 }
3688#undef FMOVR
3689#endif
3690 switch (xop) {
3691#ifdef TARGET_SPARC64
3692#define FMOVCC(fcc, sz) \
3693 do { \
3694 DisasCompare cmp; \
3695 cond = GET_FIELD_SP(insn, 14, 17); \
3696 gen_fcompare(&cmp, fcc, cond); \
3697 gen_fmov##sz(dc, &cmp, rd, rs2); \
3698 } while (0)
3699
3700 case 0x001:
3701 FMOVCC(0, s);
3702 break;
3703 case 0x002:
3704 FMOVCC(0, d);
3705 break;
3706 case 0x003:
3707 CHECK_FPU_FEATURE(dc, FLOAT128);
3708 FMOVCC(0, q);
3709 break;
3710 case 0x041:
3711 FMOVCC(1, s);
3712 break;
3713 case 0x042:
3714 FMOVCC(1, d);
3715 break;
3716 case 0x043:
3717 CHECK_FPU_FEATURE(dc, FLOAT128);
3718 FMOVCC(1, q);
3719 break;
3720 case 0x081:
3721 FMOVCC(2, s);
3722 break;
3723 case 0x082:
3724 FMOVCC(2, d);
3725 break;
3726 case 0x083:
3727 CHECK_FPU_FEATURE(dc, FLOAT128);
3728 FMOVCC(2, q);
3729 break;
3730 case 0x0c1:
3731 FMOVCC(3, s);
3732 break;
3733 case 0x0c2:
3734 FMOVCC(3, d);
3735 break;
3736 case 0x0c3:
3737 CHECK_FPU_FEATURE(dc, FLOAT128);
3738 FMOVCC(3, q);
3739 break;
3740#undef FMOVCC
3741#define FMOVCC(xcc, sz) \
3742 do { \
3743 DisasCompare cmp; \
3744 cond = GET_FIELD_SP(insn, 14, 17); \
3745 gen_compare(&cmp, xcc, cond, dc); \
3746 gen_fmov##sz(dc, &cmp, rd, rs2); \
3747 } while (0)
3748
3749 case 0x101:
3750 FMOVCC(0, s);
3751 break;
3752 case 0x102:
3753 FMOVCC(0, d);
3754 break;
3755 case 0x103:
3756 CHECK_FPU_FEATURE(dc, FLOAT128);
3757 FMOVCC(0, q);
3758 break;
3759 case 0x181:
3760 FMOVCC(1, s);
3761 break;
3762 case 0x182:
3763 FMOVCC(1, d);
3764 break;
3765 case 0x183:
3766 CHECK_FPU_FEATURE(dc, FLOAT128);
3767 FMOVCC(1, q);
3768 break;
3769#undef FMOVCC
3770#endif
3771 case 0x51:
3772 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3773 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3774 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3775 break;
3776 case 0x52:
3777 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3778 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3779 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3780 break;
3781 case 0x53:
3782 CHECK_FPU_FEATURE(dc, FLOAT128);
3783 gen_op_load_fpr_QT0(QFPREG(rs1));
3784 gen_op_load_fpr_QT1(QFPREG(rs2));
3785 gen_op_fcmpq(rd & 3);
3786 break;
3787 case 0x55:
3788 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3789 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3790 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3791 break;
3792 case 0x56:
3793 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3794 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3795 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3796 break;
3797 case 0x57:
3798 CHECK_FPU_FEATURE(dc, FLOAT128);
3799 gen_op_load_fpr_QT0(QFPREG(rs1));
3800 gen_op_load_fpr_QT1(QFPREG(rs2));
3801 gen_op_fcmpeq(rd & 3);
3802 break;
3803 default:
3804 goto illegal_insn;
3805 }
3806 } else if (xop == 0x2) {
3807 TCGv dst = gen_dest_gpr(dc, rd);
3808 rs1 = GET_FIELD(insn, 13, 17);
3809 if (rs1 == 0) {
3810
3811 if (IS_IMM) {
3812 simm = GET_FIELDs(insn, 19, 31);
3813 tcg_gen_movi_tl(dst, simm);
3814 gen_store_gpr(dc, rd, dst);
3815 } else {
3816 rs2 = GET_FIELD(insn, 27, 31);
3817 if (rs2 == 0) {
3818 tcg_gen_movi_tl(dst, 0);
3819 gen_store_gpr(dc, rd, dst);
3820 } else {
3821 cpu_src2 = gen_load_gpr(dc, rs2);
3822 gen_store_gpr(dc, rd, cpu_src2);
3823 }
3824 }
3825 } else {
3826 cpu_src1 = get_src1(dc, insn);
3827 if (IS_IMM) {
3828 simm = GET_FIELDs(insn, 19, 31);
3829 tcg_gen_ori_tl(dst, cpu_src1, simm);
3830 gen_store_gpr(dc, rd, dst);
3831 } else {
3832 rs2 = GET_FIELD(insn, 27, 31);
3833 if (rs2 == 0) {
3834
3835 gen_store_gpr(dc, rd, cpu_src1);
3836 } else {
3837 cpu_src2 = gen_load_gpr(dc, rs2);
3838 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3839 gen_store_gpr(dc, rd, dst);
3840 }
3841 }
3842 }
3843#ifdef TARGET_SPARC64
3844 } else if (xop == 0x25) {
3845 cpu_src1 = get_src1(dc, insn);
3846 if (IS_IMM) {
3847 simm = GET_FIELDs(insn, 20, 31);
3848 if (insn & (1 << 12)) {
3849 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3850 } else {
3851 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3852 }
3853 } else {
3854 rs2 = GET_FIELD(insn, 27, 31);
3855 cpu_src2 = gen_load_gpr(dc, rs2);
3856 cpu_tmp0 = tcg_temp_new();
3857 if (insn & (1 << 12)) {
3858 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3859 } else {
3860 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3861 }
3862 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3863 }
3864 gen_store_gpr(dc, rd, cpu_dst);
3865 } else if (xop == 0x26) {
3866 cpu_src1 = get_src1(dc, insn);
3867 if (IS_IMM) {
3868 simm = GET_FIELDs(insn, 20, 31);
3869 if (insn & (1 << 12)) {
3870 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3871 } else {
3872 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3873 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3874 }
3875 } else {
3876 rs2 = GET_FIELD(insn, 27, 31);
3877 cpu_src2 = gen_load_gpr(dc, rs2);
3878 cpu_tmp0 = tcg_temp_new();
3879 if (insn & (1 << 12)) {
3880 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3881 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3882 } else {
3883 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3884 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3885 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3886 }
3887 }
3888 gen_store_gpr(dc, rd, cpu_dst);
3889 } else if (xop == 0x27) {
3890 cpu_src1 = get_src1(dc, insn);
3891 if (IS_IMM) {
3892 simm = GET_FIELDs(insn, 20, 31);
3893 if (insn & (1 << 12)) {
3894 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3895 } else {
3896 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3897 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3898 }
3899 } else {
3900 rs2 = GET_FIELD(insn, 27, 31);
3901 cpu_src2 = gen_load_gpr(dc, rs2);
3902 cpu_tmp0 = tcg_temp_new();
3903 if (insn & (1 << 12)) {
3904 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3905 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3906 } else {
3907 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3908 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3909 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3910 }
3911 }
3912 gen_store_gpr(dc, rd, cpu_dst);
3913#endif
3914 } else if (xop < 0x36) {
3915 if (xop < 0x20) {
3916 cpu_src1 = get_src1(dc, insn);
3917 cpu_src2 = get_src2(dc, insn);
3918 switch (xop & ~0x10) {
3919 case 0x0:
3920 if (xop & 0x10) {
3921 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3922 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3923 dc->cc_op = CC_OP_ADD;
3924 } else {
3925 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3926 }
3927 break;
3928 case 0x1:
3929 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3930 if (xop & 0x10) {
3931 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3932 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3933 dc->cc_op = CC_OP_LOGIC;
3934 }
3935 break;
3936 case 0x2:
3937 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3938 if (xop & 0x10) {
3939 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3940 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3941 dc->cc_op = CC_OP_LOGIC;
3942 }
3943 break;
3944 case 0x3:
3945 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3946 if (xop & 0x10) {
3947 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3948 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3949 dc->cc_op = CC_OP_LOGIC;
3950 }
3951 break;
3952 case 0x4:
3953 if (xop & 0x10) {
3954 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3955 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3956 dc->cc_op = CC_OP_SUB;
3957 } else {
3958 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3959 }
3960 break;
3961 case 0x5:
3962 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3963 if (xop & 0x10) {
3964 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3965 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3966 dc->cc_op = CC_OP_LOGIC;
3967 }
3968 break;
3969 case 0x6:
3970 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3971 if (xop & 0x10) {
3972 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3973 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3974 dc->cc_op = CC_OP_LOGIC;
3975 }
3976 break;
3977 case 0x7:
3978 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3979 if (xop & 0x10) {
3980 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3981 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3982 dc->cc_op = CC_OP_LOGIC;
3983 }
3984 break;
3985 case 0x8:
3986 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3987 (xop & 0x10));
3988 break;
3989#ifdef TARGET_SPARC64
3990 case 0x9:
3991 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3992 break;
3993#endif
3994 case 0xa:
3995 CHECK_IU_FEATURE(dc, MUL);
3996 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3997 if (xop & 0x10) {
3998 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3999 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4000 dc->cc_op = CC_OP_LOGIC;
4001 }
4002 break;
4003 case 0xb:
4004 CHECK_IU_FEATURE(dc, MUL);
4005 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4006 if (xop & 0x10) {
4007 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4008 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4009 dc->cc_op = CC_OP_LOGIC;
4010 }
4011 break;
4012 case 0xc:
4013 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4014 (xop & 0x10));
4015 break;
4016#ifdef TARGET_SPARC64
4017 case 0xd:
4018 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4019 break;
4020#endif
4021 case 0xe:
4022 CHECK_IU_FEATURE(dc, DIV);
4023 if (xop & 0x10) {
4024 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4025 cpu_src2);
4026 dc->cc_op = CC_OP_DIV;
4027 } else {
4028 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4029 cpu_src2);
4030 }
4031 break;
4032 case 0xf:
4033 CHECK_IU_FEATURE(dc, DIV);
4034 if (xop & 0x10) {
4035 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4036 cpu_src2);
4037 dc->cc_op = CC_OP_DIV;
4038 } else {
4039 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4040 cpu_src2);
4041 }
4042 break;
4043 default:
4044 goto illegal_insn;
4045 }
4046 gen_store_gpr(dc, rd, cpu_dst);
4047 } else {
4048 cpu_src1 = get_src1(dc, insn);
4049 cpu_src2 = get_src2(dc, insn);
4050 switch (xop) {
4051 case 0x20:
4052 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4053 gen_store_gpr(dc, rd, cpu_dst);
4054 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4055 dc->cc_op = CC_OP_TADD;
4056 break;
4057 case 0x21:
4058 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4059 gen_store_gpr(dc, rd, cpu_dst);
4060 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4061 dc->cc_op = CC_OP_TSUB;
4062 break;
4063 case 0x22:
4064 gen_helper_taddcctv(cpu_dst, cpu_env,
4065 cpu_src1, cpu_src2);
4066 gen_store_gpr(dc, rd, cpu_dst);
4067 dc->cc_op = CC_OP_TADDTV;
4068 break;
4069 case 0x23:
4070 gen_helper_tsubcctv(cpu_dst, cpu_env,
4071 cpu_src1, cpu_src2);
4072 gen_store_gpr(dc, rd, cpu_dst);
4073 dc->cc_op = CC_OP_TSUBTV;
4074 break;
4075 case 0x24:
4076 update_psr(dc);
4077 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4078 gen_store_gpr(dc, rd, cpu_dst);
4079 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4080 dc->cc_op = CC_OP_ADD;
4081 break;
4082#ifndef TARGET_SPARC64
4083 case 0x25:
4084 if (IS_IMM) {
4085 simm = GET_FIELDs(insn, 20, 31);
4086 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4087 } else {
4088 cpu_tmp0 = tcg_temp_new();
4089 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4090 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4091 }
4092 gen_store_gpr(dc, rd, cpu_dst);
4093 break;
4094 case 0x26:
4095 if (IS_IMM) {
4096 simm = GET_FIELDs(insn, 20, 31);
4097 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4098 } else {
4099 cpu_tmp0 = tcg_temp_new();
4100 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4101 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4102 }
4103 gen_store_gpr(dc, rd, cpu_dst);
4104 break;
4105 case 0x27:
4106 if (IS_IMM) {
4107 simm = GET_FIELDs(insn, 20, 31);
4108 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4109 } else {
4110 cpu_tmp0 = tcg_temp_new();
4111 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4112 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4113 }
4114 gen_store_gpr(dc, rd, cpu_dst);
4115 break;
4116#endif
4117 case 0x30:
4118 {
4119 cpu_tmp0 = tcg_temp_new();
4120 switch(rd) {
4121 case 0:
4122 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4123 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4124 break;
4125#ifndef TARGET_SPARC64
4126 case 0x01 ... 0x0f:
4127
4128
4129
4130 case 0x10 ... 0x1f:
4131
4132
4133
4134 if ((rd == 0x13) && (dc->def->features &
4135 CPU_FEATURE_POWERDOWN)) {
4136
4137 save_state(dc);
4138 gen_helper_power_down(cpu_env);
4139 }
4140 break;
4141#else
4142 case 0x2:
4143 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4144 gen_helper_wrccr(cpu_env, cpu_tmp0);
4145 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4146 dc->cc_op = CC_OP_FLAGS;
4147 break;
4148 case 0x3:
4149 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4150 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4151 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4152 offsetof(CPUSPARCState, asi));
4153
4154 save_state(dc);
4155 gen_op_next_insn();
4156 tcg_gen_exit_tb(NULL, 0);
4157 dc->base.is_jmp = DISAS_NORETURN;
4158 break;
4159 case 0x6:
4160 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4161 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4162 dc->fprs_dirty = 0;
4163 save_state(dc);
4164 gen_op_next_insn();
4165 tcg_gen_exit_tb(NULL, 0);
4166 dc->base.is_jmp = DISAS_NORETURN;
4167 break;
4168 case 0xf:
4169#if !defined(CONFIG_USER_ONLY)
4170 if (supervisor(dc)) {
4171 ;
4172 }
4173#endif
4174 break;
4175 case 0x13:
4176 if (gen_trap_ifnofpu(dc)) {
4177 goto jmp_insn;
4178 }
4179 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4180 break;
4181 case 0x14:
4182 if (!supervisor(dc))
4183 goto illegal_insn;
4184 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4185 gen_helper_set_softint(cpu_env, cpu_tmp0);
4186 break;
4187 case 0x15:
4188 if (!supervisor(dc))
4189 goto illegal_insn;
4190 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4191 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4192 break;
4193 case 0x16:
4194 if (!supervisor(dc))
4195 goto illegal_insn;
4196 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4197 gen_helper_write_softint(cpu_env, cpu_tmp0);
4198 break;
4199 case 0x17:
4200#if !defined(CONFIG_USER_ONLY)
4201 if (!supervisor(dc))
4202 goto illegal_insn;
4203#endif
4204 {
4205 TCGv_ptr r_tickptr;
4206
4207 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4208 cpu_src2);
4209 r_tickptr = tcg_temp_new_ptr();
4210 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4211 offsetof(CPUSPARCState, tick));
4212 if (tb_cflags(dc->base.tb) &
4213 CF_USE_ICOUNT) {
4214 gen_io_start();
4215 }
4216 gen_helper_tick_set_limit(r_tickptr,
4217 cpu_tick_cmpr);
4218
4219 dc->base.is_jmp = DISAS_EXIT;
4220 }
4221 break;
4222 case 0x18:
4223#if !defined(CONFIG_USER_ONLY)
4224 if (!supervisor(dc))
4225 goto illegal_insn;
4226#endif
4227 {
4228 TCGv_ptr r_tickptr;
4229
4230 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4231 cpu_src2);
4232 r_tickptr = tcg_temp_new_ptr();
4233 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4234 offsetof(CPUSPARCState, stick));
4235 if (tb_cflags(dc->base.tb) &
4236 CF_USE_ICOUNT) {
4237 gen_io_start();
4238 }
4239 gen_helper_tick_set_count(r_tickptr,
4240 cpu_tmp0);
4241
4242 dc->base.is_jmp = DISAS_EXIT;
4243 }
4244 break;
4245 case 0x19:
4246#if !defined(CONFIG_USER_ONLY)
4247 if (!supervisor(dc))
4248 goto illegal_insn;
4249#endif
4250 {
4251 TCGv_ptr r_tickptr;
4252
4253 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4254 cpu_src2);
4255 r_tickptr = tcg_temp_new_ptr();
4256 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4257 offsetof(CPUSPARCState, stick));
4258 if (tb_cflags(dc->base.tb) &
4259 CF_USE_ICOUNT) {
4260 gen_io_start();
4261 }
4262 gen_helper_tick_set_limit(r_tickptr,
4263 cpu_stick_cmpr);
4264
4265 dc->base.is_jmp = DISAS_EXIT;
4266 }
4267 break;
4268
4269 case 0x10:
4270 case 0x11:
4271
4272 case 0x12:
4273#endif
4274 default:
4275 goto illegal_insn;
4276 }
4277 }
4278 break;
4279#if !defined(CONFIG_USER_ONLY)
4280 case 0x31:
4281 {
4282 if (!supervisor(dc))
4283 goto priv_insn;
4284#ifdef TARGET_SPARC64
4285 switch (rd) {
4286 case 0:
4287 gen_helper_saved(cpu_env);
4288 break;
4289 case 1:
4290 gen_helper_restored(cpu_env);
4291 break;
4292 case 2:
4293 case 3:
4294 case 4:
4295 case 5:
4296
4297 default:
4298 goto illegal_insn;
4299 }
4300#else
4301 cpu_tmp0 = tcg_temp_new();
4302 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4303 gen_helper_wrpsr(cpu_env, cpu_tmp0);
4304 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4305 dc->cc_op = CC_OP_FLAGS;
4306 save_state(dc);
4307 gen_op_next_insn();
4308 tcg_gen_exit_tb(NULL, 0);
4309 dc->base.is_jmp = DISAS_NORETURN;
4310#endif
4311 }
4312 break;
4313 case 0x32:
4314 {
4315 if (!supervisor(dc))
4316 goto priv_insn;
4317 cpu_tmp0 = tcg_temp_new();
4318 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4319#ifdef TARGET_SPARC64
4320 switch (rd) {
4321 case 0:
4322 {
4323 TCGv_ptr r_tsptr;
4324
4325 r_tsptr = tcg_temp_new_ptr();
4326 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4327 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4328 offsetof(trap_state, tpc));
4329 }
4330 break;
4331 case 1:
4332 {
4333 TCGv_ptr r_tsptr;
4334
4335 r_tsptr = tcg_temp_new_ptr();
4336 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4337 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4338 offsetof(trap_state, tnpc));
4339 }
4340 break;
4341 case 2:
4342 {
4343 TCGv_ptr r_tsptr;
4344
4345 r_tsptr = tcg_temp_new_ptr();
4346 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4347 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4348 offsetof(trap_state,
4349 tstate));
4350 }
4351 break;
4352 case 3:
4353 {
4354 TCGv_ptr r_tsptr;
4355
4356 r_tsptr = tcg_temp_new_ptr();
4357 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4358 tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4359 offsetof(trap_state, tt));
4360 }
4361 break;
4362 case 4:
4363 {
4364 TCGv_ptr r_tickptr;
4365
4366 r_tickptr = tcg_temp_new_ptr();
4367 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4368 offsetof(CPUSPARCState, tick));
4369 if (tb_cflags(dc->base.tb) &
4370 CF_USE_ICOUNT) {
4371 gen_io_start();
4372 }
4373 gen_helper_tick_set_count(r_tickptr,
4374 cpu_tmp0);
4375
4376 dc->base.is_jmp = DISAS_EXIT;
4377 }
4378 break;
4379 case 5:
4380 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4381 break;
4382 case 6:
4383 save_state(dc);
4384 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4385 gen_io_start();
4386 }
4387 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4388 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4389
4390 dc->base.is_jmp = DISAS_EXIT;
4391 }
4392 dc->npc = DYNAMIC_PC;
4393 break;
4394 case 7:
4395 save_state(dc);
4396 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4397 offsetof(CPUSPARCState, tl));
4398 dc->npc = DYNAMIC_PC;
4399 break;
4400 case 8:
4401 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4402 gen_io_start();
4403 }
4404 gen_helper_wrpil(cpu_env, cpu_tmp0);
4405 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4406
4407 dc->base.is_jmp = DISAS_EXIT;
4408 }
4409 break;
4410 case 9:
4411 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4412 break;
4413 case 10:
4414 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4415 offsetof(CPUSPARCState,
4416 cansave));
4417 break;
4418 case 11:
4419 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4420 offsetof(CPUSPARCState,
4421 canrestore));
4422 break;
4423 case 12:
4424 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4425 offsetof(CPUSPARCState,
4426 cleanwin));
4427 break;
4428 case 13:
4429 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4430 offsetof(CPUSPARCState,
4431 otherwin));
4432 break;
4433 case 14:
4434 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4435 offsetof(CPUSPARCState,
4436 wstate));
4437 break;
4438 case 16:
4439 CHECK_IU_FEATURE(dc, GL);
4440 gen_helper_wrgl(cpu_env, cpu_tmp0);
4441 break;
4442 case 26:
4443 CHECK_IU_FEATURE(dc, HYPV);
4444 if (!hypervisor(dc))
4445 goto priv_insn;
4446 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4447 break;
4448 default:
4449 goto illegal_insn;
4450 }
4451#else
4452 tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4453 if (dc->def->nwindows != 32) {
4454 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4455 (1 << dc->def->nwindows) - 1);
4456 }
4457#endif
4458 }
4459 break;
4460 case 0x33:
4461 {
4462#ifndef TARGET_SPARC64
4463 if (!supervisor(dc))
4464 goto priv_insn;
4465 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4466#else
4467 CHECK_IU_FEATURE(dc, HYPV);
4468 if (!hypervisor(dc))
4469 goto priv_insn;
4470 cpu_tmp0 = tcg_temp_new();
4471 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4472 switch (rd) {
4473 case 0:
4474 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4475 offsetof(CPUSPARCState,
4476 hpstate));
4477 save_state(dc);
4478 gen_op_next_insn();
4479 tcg_gen_exit_tb(NULL, 0);
4480 dc->base.is_jmp = DISAS_NORETURN;
4481 break;
4482 case 1:
4483
4484 break;
4485 case 3:
4486 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4487 break;
4488 case 5:
4489 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4490 break;
4491 case 31:
4492 {
4493 TCGv_ptr r_tickptr;
4494
4495 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4496 r_tickptr = tcg_temp_new_ptr();
4497 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4498 offsetof(CPUSPARCState, hstick));
4499 if (tb_cflags(dc->base.tb) &
4500 CF_USE_ICOUNT) {
4501 gen_io_start();
4502 }
4503 gen_helper_tick_set_limit(r_tickptr,
4504 cpu_hstick_cmpr);
4505
4506 dc->base.is_jmp = DISAS_EXIT;
4507 }
4508 break;
4509 case 6:
4510 default:
4511 goto illegal_insn;
4512 }
4513#endif
4514 }
4515 break;
4516#endif
4517#ifdef TARGET_SPARC64
4518 case 0x2c:
4519 {
4520 int cc = GET_FIELD_SP(insn, 11, 12);
4521 int cond = GET_FIELD_SP(insn, 14, 17);
4522 DisasCompare cmp;
4523 TCGv dst;
4524
4525 if (insn & (1 << 18)) {
4526 if (cc == 0) {
4527 gen_compare(&cmp, 0, cond, dc);
4528 } else if (cc == 2) {
4529 gen_compare(&cmp, 1, cond, dc);
4530 } else {
4531 goto illegal_insn;
4532 }
4533 } else {
4534 gen_fcompare(&cmp, cc, cond);
4535 }
4536
4537
4538
4539
4540 if (IS_IMM) {
4541 simm = GET_FIELD_SPs(insn, 0, 10);
4542 tcg_gen_movi_tl(cpu_src2, simm);
4543 }
4544
4545 dst = gen_load_gpr(dc, rd);
4546 tcg_gen_movcond_tl(cmp.cond, dst,
4547 cmp.c1, cmp.c2,
4548 cpu_src2, dst);
4549 gen_store_gpr(dc, rd, dst);
4550 break;
4551 }
4552 case 0x2d:
4553 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4554 gen_store_gpr(dc, rd, cpu_dst);
4555 break;
4556 case 0x2e:
4557 tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4558 gen_store_gpr(dc, rd, cpu_dst);
4559 break;
4560 case 0x2f:
4561 {
4562 int cond = GET_FIELD_SP(insn, 10, 12);
4563 DisasCompare cmp;
4564 TCGv dst;
4565
4566 gen_compare_reg(&cmp, cond, cpu_src1);
4567
4568
4569
4570
4571 if (IS_IMM) {
4572 simm = GET_FIELD_SPs(insn, 0, 9);
4573 tcg_gen_movi_tl(cpu_src2, simm);
4574 }
4575
4576 dst = gen_load_gpr(dc, rd);
4577 tcg_gen_movcond_tl(cmp.cond, dst,
4578 cmp.c1, cmp.c2,
4579 cpu_src2, dst);
4580 gen_store_gpr(dc, rd, dst);
4581 break;
4582 }
4583#endif
4584 default:
4585 goto illegal_insn;
4586 }
4587 }
4588 } else if (xop == 0x36) {
4589#ifdef TARGET_SPARC64
4590 int opf = GET_FIELD_SP(insn, 5, 13);
4591 rs1 = GET_FIELD(insn, 13, 17);
4592 rs2 = GET_FIELD(insn, 27, 31);
4593 if (gen_trap_ifnofpu(dc)) {
4594 goto jmp_insn;
4595 }
4596
4597 switch (opf) {
4598 case 0x000:
4599 CHECK_FPU_FEATURE(dc, VIS1);
4600 cpu_src1 = gen_load_gpr(dc, rs1);
4601 cpu_src2 = gen_load_gpr(dc, rs2);
4602 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4603 gen_store_gpr(dc, rd, cpu_dst);
4604 break;
4605 case 0x001:
4606 CHECK_FPU_FEATURE(dc, VIS2);
4607 cpu_src1 = gen_load_gpr(dc, rs1);
4608 cpu_src2 = gen_load_gpr(dc, rs2);
4609 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4610 gen_store_gpr(dc, rd, cpu_dst);
4611 break;
4612 case 0x002:
4613 CHECK_FPU_FEATURE(dc, VIS1);
4614 cpu_src1 = gen_load_gpr(dc, rs1);
4615 cpu_src2 = gen_load_gpr(dc, rs2);
4616 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4617 gen_store_gpr(dc, rd, cpu_dst);
4618 break;
4619 case 0x003:
4620 CHECK_FPU_FEATURE(dc, VIS2);
4621 cpu_src1 = gen_load_gpr(dc, rs1);
4622 cpu_src2 = gen_load_gpr(dc, rs2);
4623 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4624 gen_store_gpr(dc, rd, cpu_dst);
4625 break;
4626 case 0x004:
4627 CHECK_FPU_FEATURE(dc, VIS1);
4628 cpu_src1 = gen_load_gpr(dc, rs1);
4629 cpu_src2 = gen_load_gpr(dc, rs2);
4630 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4631 gen_store_gpr(dc, rd, cpu_dst);
4632 break;
4633 case 0x005:
4634 CHECK_FPU_FEATURE(dc, VIS2);
4635 cpu_src1 = gen_load_gpr(dc, rs1);
4636 cpu_src2 = gen_load_gpr(dc, rs2);
4637 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4638 gen_store_gpr(dc, rd, cpu_dst);
4639 break;
4640 case 0x006:
4641 CHECK_FPU_FEATURE(dc, VIS1);
4642 cpu_src1 = gen_load_gpr(dc, rs1);
4643 cpu_src2 = gen_load_gpr(dc, rs2);
4644 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4645 gen_store_gpr(dc, rd, cpu_dst);
4646 break;
4647 case 0x007:
4648 CHECK_FPU_FEATURE(dc, VIS2);
4649 cpu_src1 = gen_load_gpr(dc, rs1);
4650 cpu_src2 = gen_load_gpr(dc, rs2);
4651 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4652 gen_store_gpr(dc, rd, cpu_dst);
4653 break;
4654 case 0x008:
4655 CHECK_FPU_FEATURE(dc, VIS1);
4656 cpu_src1 = gen_load_gpr(dc, rs1);
4657 cpu_src2 = gen_load_gpr(dc, rs2);
4658 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4659 gen_store_gpr(dc, rd, cpu_dst);
4660 break;
4661 case 0x009:
4662 CHECK_FPU_FEATURE(dc, VIS2);
4663 cpu_src1 = gen_load_gpr(dc, rs1);
4664 cpu_src2 = gen_load_gpr(dc, rs2);
4665 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4666 gen_store_gpr(dc, rd, cpu_dst);
4667 break;
4668 case 0x00a:
4669 CHECK_FPU_FEATURE(dc, VIS1);
4670 cpu_src1 = gen_load_gpr(dc, rs1);
4671 cpu_src2 = gen_load_gpr(dc, rs2);
4672 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4673 gen_store_gpr(dc, rd, cpu_dst);
4674 break;
4675 case 0x00b:
4676 CHECK_FPU_FEATURE(dc, VIS2);
4677 cpu_src1 = gen_load_gpr(dc, rs1);
4678 cpu_src2 = gen_load_gpr(dc, rs2);
4679 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4680 gen_store_gpr(dc, rd, cpu_dst);
4681 break;
4682 case 0x010:
4683 CHECK_FPU_FEATURE(dc, VIS1);
4684 cpu_src1 = gen_load_gpr(dc, rs1);
4685 cpu_src2 = gen_load_gpr(dc, rs2);
4686 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4687 gen_store_gpr(dc, rd, cpu_dst);
4688 break;
4689 case 0x012:
4690 CHECK_FPU_FEATURE(dc, VIS1);
4691 cpu_src1 = gen_load_gpr(dc, rs1);
4692 cpu_src2 = gen_load_gpr(dc, rs2);
4693 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4694 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4695 gen_store_gpr(dc, rd, cpu_dst);
4696 break;
4697 case 0x014:
4698 CHECK_FPU_FEATURE(dc, VIS1);
4699 cpu_src1 = gen_load_gpr(dc, rs1);
4700 cpu_src2 = gen_load_gpr(dc, rs2);
4701 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4702 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4703 gen_store_gpr(dc, rd, cpu_dst);
4704 break;
4705 case 0x018:
4706 CHECK_FPU_FEATURE(dc, VIS1);
4707 cpu_src1 = gen_load_gpr(dc, rs1);
4708 cpu_src2 = gen_load_gpr(dc, rs2);
4709 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4710 gen_store_gpr(dc, rd, cpu_dst);
4711 break;
4712 case 0x01a:
4713 CHECK_FPU_FEATURE(dc, VIS1);
4714 cpu_src1 = gen_load_gpr(dc, rs1);
4715 cpu_src2 = gen_load_gpr(dc, rs2);
4716 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4717 gen_store_gpr(dc, rd, cpu_dst);
4718 break;
4719 case 0x019:
4720 CHECK_FPU_FEATURE(dc, VIS2);
4721 cpu_src1 = gen_load_gpr(dc, rs1);
4722 cpu_src2 = gen_load_gpr(dc, rs2);
4723 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4724 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4725 gen_store_gpr(dc, rd, cpu_dst);
4726 break;
4727 case 0x020:
4728 CHECK_FPU_FEATURE(dc, VIS1);
4729 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4730 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4731 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4732 gen_store_gpr(dc, rd, cpu_dst);
4733 break;
4734 case 0x022:
4735 CHECK_FPU_FEATURE(dc, VIS1);
4736 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4737 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4738 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4739 gen_store_gpr(dc, rd, cpu_dst);
4740 break;
4741 case 0x024:
4742 CHECK_FPU_FEATURE(dc, VIS1);
4743 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4744 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4745 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4746 gen_store_gpr(dc, rd, cpu_dst);
4747 break;
4748 case 0x026:
4749 CHECK_FPU_FEATURE(dc, VIS1);
4750 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4751 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4752 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4753 gen_store_gpr(dc, rd, cpu_dst);
4754 break;
4755 case 0x028:
4756 CHECK_FPU_FEATURE(dc, VIS1);
4757 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4758 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4759 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4760 gen_store_gpr(dc, rd, cpu_dst);
4761 break;
4762 case 0x02a:
4763 CHECK_FPU_FEATURE(dc, VIS1);
4764 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4765 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4766 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4767 gen_store_gpr(dc, rd, cpu_dst);
4768 break;
4769 case 0x02c:
4770 CHECK_FPU_FEATURE(dc, VIS1);
4771 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4772 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4773 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4774 gen_store_gpr(dc, rd, cpu_dst);
4775 break;
4776 case 0x02e:
4777 CHECK_FPU_FEATURE(dc, VIS1);
4778 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4779 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4780 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4781 gen_store_gpr(dc, rd, cpu_dst);
4782 break;
4783 case 0x031:
4784 CHECK_FPU_FEATURE(dc, VIS1);
4785 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4786 break;
4787 case 0x033:
4788 CHECK_FPU_FEATURE(dc, VIS1);
4789 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4790 break;
4791 case 0x035:
4792 CHECK_FPU_FEATURE(dc, VIS1);
4793 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4794 break;
4795 case 0x036:
4796 CHECK_FPU_FEATURE(dc, VIS1);
4797 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4798 break;
4799 case 0x037:
4800 CHECK_FPU_FEATURE(dc, VIS1);
4801 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4802 break;
4803 case 0x038:
4804 CHECK_FPU_FEATURE(dc, VIS1);
4805 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4806 break;
4807 case 0x039:
4808 CHECK_FPU_FEATURE(dc, VIS1);
4809 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4810 break;
4811 case 0x03a:
4812 CHECK_FPU_FEATURE(dc, VIS1);
4813 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4814 break;
4815 case 0x03b:
4816 CHECK_FPU_FEATURE(dc, VIS1);
4817 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4818 cpu_dst_32 = gen_dest_fpr_F(dc);
4819 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4820 gen_store_fpr_F(dc, rd, cpu_dst_32);
4821 break;
4822 case 0x03d:
4823 CHECK_FPU_FEATURE(dc, VIS1);
4824 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4825 cpu_dst_32 = gen_dest_fpr_F(dc);
4826 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4827 gen_store_fpr_F(dc, rd, cpu_dst_32);
4828 break;
4829 case 0x03e:
4830 CHECK_FPU_FEATURE(dc, VIS1);
4831 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4832 break;
4833 case 0x048:
4834 CHECK_FPU_FEATURE(dc, VIS1);
4835 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4836 break;
4837 case 0x04b:
4838 CHECK_FPU_FEATURE(dc, VIS1);
4839 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4840 break;
4841 case 0x04c:
4842 CHECK_FPU_FEATURE(dc, VIS2);
4843 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4844 break;
4845 case 0x04d:
4846 CHECK_FPU_FEATURE(dc, VIS1);
4847 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4848 break;
4849 case 0x050:
4850 CHECK_FPU_FEATURE(dc, VIS1);
4851 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4852 break;
4853 case 0x051:
4854 CHECK_FPU_FEATURE(dc, VIS1);
4855 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4856 break;
4857 case 0x052:
4858 CHECK_FPU_FEATURE(dc, VIS1);
4859 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4860 break;
4861 case 0x053:
4862 CHECK_FPU_FEATURE(dc, VIS1);
4863 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4864 break;
4865 case 0x054:
4866 CHECK_FPU_FEATURE(dc, VIS1);
4867 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4868 break;
4869 case 0x055:
4870 CHECK_FPU_FEATURE(dc, VIS1);
4871 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4872 break;
4873 case 0x056:
4874 CHECK_FPU_FEATURE(dc, VIS1);
4875 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4876 break;
4877 case 0x057:
4878 CHECK_FPU_FEATURE(dc, VIS1);
4879 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4880 break;
4881 case 0x060:
4882 CHECK_FPU_FEATURE(dc, VIS1);
4883 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4884 tcg_gen_movi_i64(cpu_dst_64, 0);
4885 gen_store_fpr_D(dc, rd, cpu_dst_64);
4886 break;
4887 case 0x061:
4888 CHECK_FPU_FEATURE(dc, VIS1);
4889 cpu_dst_32 = gen_dest_fpr_F(dc);
4890 tcg_gen_movi_i32(cpu_dst_32, 0);
4891 gen_store_fpr_F(dc, rd, cpu_dst_32);
4892 break;
4893 case 0x062:
4894 CHECK_FPU_FEATURE(dc, VIS1);
4895 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4896 break;
4897 case 0x063:
4898 CHECK_FPU_FEATURE(dc, VIS1);
4899 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4900 break;
4901 case 0x064:
4902 CHECK_FPU_FEATURE(dc, VIS1);
4903 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4904 break;
4905 case 0x065:
4906 CHECK_FPU_FEATURE(dc, VIS1);
4907 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4908 break;
4909 case 0x066:
4910 CHECK_FPU_FEATURE(dc, VIS1);
4911 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4912 break;
4913 case 0x067:
4914 CHECK_FPU_FEATURE(dc, VIS1);
4915 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4916 break;
4917 case 0x068:
4918 CHECK_FPU_FEATURE(dc, VIS1);
4919 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4920 break;
4921 case 0x069:
4922 CHECK_FPU_FEATURE(dc, VIS1);
4923 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4924 break;
4925 case 0x06a:
4926 CHECK_FPU_FEATURE(dc, VIS1);
4927 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4928 break;
4929 case 0x06b:
4930 CHECK_FPU_FEATURE(dc, VIS1);
4931 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4932 break;
4933 case 0x06c:
4934 CHECK_FPU_FEATURE(dc, VIS1);
4935 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4936 break;
4937 case 0x06d:
4938 CHECK_FPU_FEATURE(dc, VIS1);
4939 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4940 break;
4941 case 0x06e:
4942 CHECK_FPU_FEATURE(dc, VIS1);
4943 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4944 break;
4945 case 0x06f:
4946 CHECK_FPU_FEATURE(dc, VIS1);
4947 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4948 break;
4949 case 0x070:
4950 CHECK_FPU_FEATURE(dc, VIS1);
4951 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4952 break;
4953 case 0x071:
4954 CHECK_FPU_FEATURE(dc, VIS1);
4955 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4956 break;
4957 case 0x072:
4958 CHECK_FPU_FEATURE(dc, VIS1);
4959 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4960 break;
4961 case 0x073:
4962 CHECK_FPU_FEATURE(dc, VIS1);
4963 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4964 break;
4965 case 0x074:
4966 CHECK_FPU_FEATURE(dc, VIS1);
4967 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4968 gen_store_fpr_D(dc, rd, cpu_src1_64);
4969 break;
4970 case 0x075:
4971 CHECK_FPU_FEATURE(dc, VIS1);
4972 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4973 gen_store_fpr_F(dc, rd, cpu_src1_32);
4974 break;
4975 case 0x076:
4976 CHECK_FPU_FEATURE(dc, VIS1);
4977 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4978 break;
4979 case 0x077:
4980 CHECK_FPU_FEATURE(dc, VIS1);
4981 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4982 break;
4983 case 0x078:
4984 CHECK_FPU_FEATURE(dc, VIS1);
4985 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4986 gen_store_fpr_D(dc, rd, cpu_src1_64);
4987 break;
4988 case 0x079:
4989 CHECK_FPU_FEATURE(dc, VIS1);
4990 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4991 gen_store_fpr_F(dc, rd, cpu_src1_32);
4992 break;
4993 case 0x07a:
4994 CHECK_FPU_FEATURE(dc, VIS1);
4995 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4996 break;
4997 case 0x07b:
4998 CHECK_FPU_FEATURE(dc, VIS1);
4999 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5000 break;
5001 case 0x07c:
5002 CHECK_FPU_FEATURE(dc, VIS1);
5003 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5004 break;
5005 case 0x07d:
5006 CHECK_FPU_FEATURE(dc, VIS1);
5007 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5008 break;
5009 case 0x07e:
5010 CHECK_FPU_FEATURE(dc, VIS1);
5011 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5012 tcg_gen_movi_i64(cpu_dst_64, -1);
5013 gen_store_fpr_D(dc, rd, cpu_dst_64);
5014 break;
5015 case 0x07f:
5016 CHECK_FPU_FEATURE(dc, VIS1);
5017 cpu_dst_32 = gen_dest_fpr_F(dc);
5018 tcg_gen_movi_i32(cpu_dst_32, -1);
5019 gen_store_fpr_F(dc, rd, cpu_dst_32);
5020 break;
5021 case 0x080:
5022 case 0x081:
5023
5024 goto illegal_insn;
5025 default:
5026 goto illegal_insn;
5027 }
5028#else
5029 goto ncp_insn;
5030#endif
5031 } else if (xop == 0x37) {
5032#ifdef TARGET_SPARC64
5033 goto illegal_insn;
5034#else
5035 goto ncp_insn;
5036#endif
5037#ifdef TARGET_SPARC64
5038 } else if (xop == 0x39) {
5039 save_state(dc);
5040 cpu_src1 = get_src1(dc, insn);
5041 cpu_tmp0 = tcg_temp_new();
5042 if (IS_IMM) {
5043 simm = GET_FIELDs(insn, 19, 31);
5044 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5045 } else {
5046 rs2 = GET_FIELD(insn, 27, 31);
5047 if (rs2) {
5048 cpu_src2 = gen_load_gpr(dc, rs2);
5049 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5050 } else {
5051 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5052 }
5053 }
5054 gen_helper_restore(cpu_env);
5055 gen_mov_pc_npc(dc);
5056 gen_check_align(cpu_tmp0, 3);
5057 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5058 dc->npc = DYNAMIC_PC;
5059 goto jmp_insn;
5060#endif
5061 } else {
5062 cpu_src1 = get_src1(dc, insn);
5063 cpu_tmp0 = tcg_temp_new();
5064 if (IS_IMM) {
5065 simm = GET_FIELDs(insn, 19, 31);
5066 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5067 } else {
5068 rs2 = GET_FIELD(insn, 27, 31);
5069 if (rs2) {
5070 cpu_src2 = gen_load_gpr(dc, rs2);
5071 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5072 } else {
5073 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5074 }
5075 }
5076 switch (xop) {
5077 case 0x38:
5078 {
5079 TCGv t = gen_dest_gpr(dc, rd);
5080 tcg_gen_movi_tl(t, dc->pc);
5081 gen_store_gpr(dc, rd, t);
5082
5083 gen_mov_pc_npc(dc);
5084 gen_check_align(cpu_tmp0, 3);
5085 gen_address_mask(dc, cpu_tmp0);
5086 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5087 dc->npc = DYNAMIC_PC;
5088 }
5089 goto jmp_insn;
5090#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5091 case 0x39:
5092 {
5093 if (!supervisor(dc))
5094 goto priv_insn;
5095 gen_mov_pc_npc(dc);
5096 gen_check_align(cpu_tmp0, 3);
5097 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5098 dc->npc = DYNAMIC_PC;
5099 gen_helper_rett(cpu_env);
5100 }
5101 goto jmp_insn;
5102#endif
5103 case 0x3b:
5104 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5105 goto unimp_flush;
5106
5107 break;
5108 case 0x3c:
5109 gen_helper_save(cpu_env);
5110 gen_store_gpr(dc, rd, cpu_tmp0);
5111 break;
5112 case 0x3d:
5113 gen_helper_restore(cpu_env);
5114 gen_store_gpr(dc, rd, cpu_tmp0);
5115 break;
5116#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5117 case 0x3e:
5118 {
5119 switch (rd) {
5120 case 0:
5121 if (!supervisor(dc))
5122 goto priv_insn;
5123 dc->npc = DYNAMIC_PC;
5124 dc->pc = DYNAMIC_PC;
5125 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5126 gen_io_start();
5127 }
5128 gen_helper_done(cpu_env);
5129 goto jmp_insn;
5130 case 1:
5131 if (!supervisor(dc))
5132 goto priv_insn;
5133 dc->npc = DYNAMIC_PC;
5134 dc->pc = DYNAMIC_PC;
5135 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5136 gen_io_start();
5137 }
5138 gen_helper_retry(cpu_env);
5139 goto jmp_insn;
5140 default:
5141 goto illegal_insn;
5142 }
5143 }
5144 break;
5145#endif
5146 default:
5147 goto illegal_insn;
5148 }
5149 }
5150 break;
5151 }
5152 break;
5153 case 3:
5154 {
5155 unsigned int xop = GET_FIELD(insn, 7, 12);
5156
5157
5158 TCGv cpu_addr = tcg_temp_new();
5159
5160 tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5161 if (xop == 0x3c || xop == 0x3e) {
5162
5163 } else if (IS_IMM) {
5164 simm = GET_FIELDs(insn, 19, 31);
5165 if (simm != 0) {
5166 tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5167 }
5168 } else {
5169 rs2 = GET_FIELD(insn, 27, 31);
5170 if (rs2 != 0) {
5171 tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5172 }
5173 }
5174 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5175 (xop > 0x17 && xop <= 0x1d ) ||
5176 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5177 TCGv cpu_val = gen_dest_gpr(dc, rd);
5178
5179 switch (xop) {
5180 case 0x0:
5181 gen_address_mask(dc, cpu_addr);
5182 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5183 break;
5184 case 0x1:
5185 gen_address_mask(dc, cpu_addr);
5186 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5187 break;
5188 case 0x2:
5189 gen_address_mask(dc, cpu_addr);
5190 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5191 break;
5192 case 0x3:
5193 if (rd & 1)
5194 goto illegal_insn;
5195 else {
5196 TCGv_i64 t64;
5197
5198 gen_address_mask(dc, cpu_addr);
5199 t64 = tcg_temp_new_i64();
5200 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5201 tcg_gen_trunc_i64_tl(cpu_val, t64);
5202 tcg_gen_ext32u_tl(cpu_val, cpu_val);
5203 gen_store_gpr(dc, rd + 1, cpu_val);
5204 tcg_gen_shri_i64(t64, t64, 32);
5205 tcg_gen_trunc_i64_tl(cpu_val, t64);
5206 tcg_gen_ext32u_tl(cpu_val, cpu_val);
5207 }
5208 break;
5209 case 0x9:
5210 gen_address_mask(dc, cpu_addr);
5211 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5212 break;
5213 case 0xa:
5214 gen_address_mask(dc, cpu_addr);
5215 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5216 break;
5217 case 0xd:
5218 gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5219 break;
5220 case 0x0f:
5221
5222 CHECK_IU_FEATURE(dc, SWAP);
5223 cpu_src1 = gen_load_gpr(dc, rd);
5224 gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5225 dc->mem_idx, MO_TEUL);
5226 break;
5227#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5228 case 0x10:
5229 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5230 break;
5231 case 0x11:
5232 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5233 break;
5234 case 0x12:
5235 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5236 break;
5237 case 0x13:
5238 if (rd & 1) {
5239 goto illegal_insn;
5240 }
5241 gen_ldda_asi(dc, cpu_addr, insn, rd);
5242 goto skip_move;
5243 case 0x19:
5244 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5245 break;
5246 case 0x1a:
5247 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5248 break;
5249 case 0x1d:
5250 gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5251 break;
5252 case 0x1f:
5253
5254 CHECK_IU_FEATURE(dc, SWAP);
5255 cpu_src1 = gen_load_gpr(dc, rd);
5256 gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5257 break;
5258
5259#ifndef TARGET_SPARC64
5260 case 0x30:
5261 case 0x31:
5262 case 0x33:
5263 goto ncp_insn;
5264#endif
5265#endif
5266#ifdef TARGET_SPARC64
5267 case 0x08:
5268 gen_address_mask(dc, cpu_addr);
5269 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5270 break;
5271 case 0x0b:
5272 gen_address_mask(dc, cpu_addr);
5273 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5274 break;
5275 case 0x18:
5276 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5277 break;
5278 case 0x1b:
5279 gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5280 break;
5281 case 0x2d:
5282 goto skip_move;
5283 case 0x30:
5284 if (gen_trap_ifnofpu(dc)) {
5285 goto jmp_insn;
5286 }
5287 gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5288 gen_update_fprs_dirty(dc, rd);
5289 goto skip_move;
5290 case 0x33:
5291 if (gen_trap_ifnofpu(dc)) {
5292 goto jmp_insn;
5293 }
5294 gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5295 gen_update_fprs_dirty(dc, DFPREG(rd));
5296 goto skip_move;
5297 case 0x3d:
5298 goto skip_move;
5299 case 0x32:
5300 CHECK_FPU_FEATURE(dc, FLOAT128);
5301 if (gen_trap_ifnofpu(dc)) {
5302 goto jmp_insn;
5303 }
5304 gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5305 gen_update_fprs_dirty(dc, QFPREG(rd));
5306 goto skip_move;
5307#endif
5308 default:
5309 goto illegal_insn;
5310 }
5311 gen_store_gpr(dc, rd, cpu_val);
5312#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5313 skip_move: ;
5314#endif
5315 } else if (xop >= 0x20 && xop < 0x24) {
5316 if (gen_trap_ifnofpu(dc)) {
5317 goto jmp_insn;
5318 }
5319 switch (xop) {
5320 case 0x20:
5321 gen_address_mask(dc, cpu_addr);
5322 cpu_dst_32 = gen_dest_fpr_F(dc);
5323 tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5324 dc->mem_idx, MO_TEUL);
5325 gen_store_fpr_F(dc, rd, cpu_dst_32);
5326 break;
5327 case 0x21:
5328#ifdef TARGET_SPARC64
5329 gen_address_mask(dc, cpu_addr);
5330 if (rd == 1) {
5331 TCGv_i64 t64 = tcg_temp_new_i64();
5332 tcg_gen_qemu_ld_i64(t64, cpu_addr,
5333 dc->mem_idx, MO_TEUQ);
5334 gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5335 break;
5336 }
5337#endif
5338 cpu_dst_32 = tcg_temp_new_i32();
5339 tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5340 dc->mem_idx, MO_TEUL);
5341 gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5342 break;
5343 case 0x22:
5344 CHECK_FPU_FEATURE(dc, FLOAT128);
5345 gen_address_mask(dc, cpu_addr);
5346 cpu_src1_64 = tcg_temp_new_i64();
5347 tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5348 MO_TEUQ | MO_ALIGN_4);
5349 tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5350 cpu_src2_64 = tcg_temp_new_i64();
5351 tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5352 MO_TEUQ | MO_ALIGN_4);
5353 gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5354 break;
5355 case 0x23:
5356 gen_address_mask(dc, cpu_addr);
5357 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5358 tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5359 MO_TEUQ | MO_ALIGN_4);
5360 gen_store_fpr_D(dc, rd, cpu_dst_64);
5361 break;
5362 default:
5363 goto illegal_insn;
5364 }
5365 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5366 xop == 0xe || xop == 0x1e) {
5367 TCGv cpu_val = gen_load_gpr(dc, rd);
5368
5369 switch (xop) {
5370 case 0x4:
5371 gen_address_mask(dc, cpu_addr);
5372 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5373 break;
5374 case 0x5:
5375 gen_address_mask(dc, cpu_addr);
5376 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5377 break;
5378 case 0x6:
5379 gen_address_mask(dc, cpu_addr);
5380 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5381 break;
5382 case 0x7:
5383 if (rd & 1)
5384 goto illegal_insn;
5385 else {
5386 TCGv_i64 t64;
5387 TCGv lo;
5388
5389 gen_address_mask(dc, cpu_addr);
5390 lo = gen_load_gpr(dc, rd + 1);
5391 t64 = tcg_temp_new_i64();
5392 tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5393 tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5394 }
5395 break;
5396#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5397 case 0x14:
5398 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5399 break;
5400 case 0x15:
5401 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5402 break;
5403 case 0x16:
5404 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5405 break;
5406 case 0x17:
5407 if (rd & 1) {
5408 goto illegal_insn;
5409 }
5410 gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5411 break;
5412#endif
5413#ifdef TARGET_SPARC64
5414 case 0x0e:
5415 gen_address_mask(dc, cpu_addr);
5416 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5417 break;
5418 case 0x1e:
5419 gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5420 break;
5421#endif
5422 default:
5423 goto illegal_insn;
5424 }
5425 } else if (xop > 0x23 && xop < 0x28) {
5426 if (gen_trap_ifnofpu(dc)) {
5427 goto jmp_insn;
5428 }
5429 switch (xop) {
5430 case 0x24:
5431 gen_address_mask(dc, cpu_addr);
5432 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5433 tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5434 dc->mem_idx, MO_TEUL);
5435 break;
5436 case 0x25:
5437 {
5438#ifdef TARGET_SPARC64
5439 gen_address_mask(dc, cpu_addr);
5440 if (rd == 1) {
5441 tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5442 break;
5443 }
5444#endif
5445 tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5446 }
5447 break;
5448 case 0x26:
5449#ifdef TARGET_SPARC64
5450
5451 CHECK_FPU_FEATURE(dc, FLOAT128);
5452 gen_address_mask(dc, cpu_addr);
5453
5454
5455
5456
5457
5458 cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5459 tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5460 dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5461 tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5462 cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5463 tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5464 dc->mem_idx, MO_TEUQ);
5465 break;
5466#else
5467
5468#if defined(CONFIG_USER_ONLY)
5469 goto illegal_insn;
5470#else
5471 if (!supervisor(dc))
5472 goto priv_insn;
5473 if (gen_trap_ifnofpu(dc)) {
5474 goto jmp_insn;
5475 }
5476 goto nfq_insn;
5477#endif
5478#endif
5479 case 0x27:
5480 gen_address_mask(dc, cpu_addr);
5481 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5482 tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5483 MO_TEUQ | MO_ALIGN_4);
5484 break;
5485 default:
5486 goto illegal_insn;
5487 }
5488 } else if (xop > 0x33 && xop < 0x3f) {
5489 switch (xop) {
5490#ifdef TARGET_SPARC64
5491 case 0x34:
5492 if (gen_trap_ifnofpu(dc)) {
5493 goto jmp_insn;
5494 }
5495 gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5496 break;
5497 case 0x36:
5498 {
5499 CHECK_FPU_FEATURE(dc, FLOAT128);
5500 if (gen_trap_ifnofpu(dc)) {
5501 goto jmp_insn;
5502 }
5503 gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5504 }
5505 break;
5506 case 0x37:
5507 if (gen_trap_ifnofpu(dc)) {
5508 goto jmp_insn;
5509 }
5510 gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5511 break;
5512 case 0x3e:
5513 rs2 = GET_FIELD(insn, 27, 31);
5514 cpu_src2 = gen_load_gpr(dc, rs2);
5515 gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5516 break;
5517#else
5518 case 0x34:
5519 case 0x35:
5520 case 0x36:
5521 case 0x37:
5522 goto ncp_insn;
5523#endif
5524#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5525 case 0x3c:
5526#ifndef TARGET_SPARC64
5527 CHECK_IU_FEATURE(dc, CASA);
5528#endif
5529 rs2 = GET_FIELD(insn, 27, 31);
5530 cpu_src2 = gen_load_gpr(dc, rs2);
5531 gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5532 break;
5533#endif
5534 default:
5535 goto illegal_insn;
5536 }
5537 } else {
5538 goto illegal_insn;
5539 }
5540 }
5541 break;
5542 }
5543
5544 if (dc->npc == DYNAMIC_PC) {
5545 dc->pc = DYNAMIC_PC;
5546 gen_op_next_insn();
5547 } else if (dc->npc == JUMP_PC) {
5548
5549 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5550 dc->base.is_jmp = DISAS_NORETURN;
5551 } else {
5552 dc->pc = dc->npc;
5553 dc->npc = dc->npc + 4;
5554 }
5555 jmp_insn:
5556 return;
5557 illegal_insn:
5558 gen_exception(dc, TT_ILL_INSN);
5559 return;
5560 unimp_flush:
5561 gen_exception(dc, TT_UNIMP_FLUSH);
5562 return;
5563#if !defined(CONFIG_USER_ONLY)
5564 priv_insn:
5565 gen_exception(dc, TT_PRIV_INSN);
5566 return;
5567#endif
5568 nfpu_insn:
5569 gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5570 return;
5571#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5572 nfq_insn:
5573 gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5574 return;
5575#endif
5576#ifndef TARGET_SPARC64
5577 ncp_insn:
5578 gen_exception(dc, TT_NCP_INSN);
5579 return;
5580#endif
5581}
5582
5583static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5584{
5585 DisasContext *dc = container_of(dcbase, DisasContext, base);
5586 CPUSPARCState *env = cs->env_ptr;
5587 int bound;
5588
5589 dc->pc = dc->base.pc_first;
5590 dc->npc = (target_ulong)dc->base.tb->cs_base;
5591 dc->cc_op = CC_OP_DYNAMIC;
5592 dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5593 dc->def = &env->def;
5594 dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5595 dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5596#ifndef CONFIG_USER_ONLY
5597 dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5598#endif
5599#ifdef TARGET_SPARC64
5600 dc->fprs_dirty = 0;
5601 dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5602#ifndef CONFIG_USER_ONLY
5603 dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5604#endif
5605#endif
5606
5607
5608
5609
5610 bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5611 dc->base.max_insns = MIN(dc->base.max_insns, bound);
5612}
5613
5614static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5615{
5616}
5617
5618static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5619{
5620 DisasContext *dc = container_of(dcbase, DisasContext, base);
5621
5622 if (dc->npc & JUMP_PC) {
5623 assert(dc->jump_pc[1] == dc->pc + 4);
5624 tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5625 } else {
5626 tcg_gen_insn_start(dc->pc, dc->npc);
5627 }
5628}
5629
5630static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5631{
5632 DisasContext *dc = container_of(dcbase, DisasContext, base);
5633 CPUSPARCState *env = cs->env_ptr;
5634 unsigned int insn;
5635
5636 insn = translator_ldl(env, &dc->base, dc->pc);
5637 dc->base.pc_next += 4;
5638 disas_sparc_insn(dc, insn);
5639
5640 if (dc->base.is_jmp == DISAS_NORETURN) {
5641 return;
5642 }
5643 if (dc->pc != dc->base.pc_next) {
5644 dc->base.is_jmp = DISAS_TOO_MANY;
5645 }
5646}
5647
5648static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5649{
5650 DisasContext *dc = container_of(dcbase, DisasContext, base);
5651
5652 switch (dc->base.is_jmp) {
5653 case DISAS_NEXT:
5654 case DISAS_TOO_MANY:
5655 if (dc->pc != DYNAMIC_PC &&
5656 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5657
5658 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5659 } else {
5660 if (dc->pc != DYNAMIC_PC) {
5661 tcg_gen_movi_tl(cpu_pc, dc->pc);
5662 }
5663 save_npc(dc);
5664 tcg_gen_exit_tb(NULL, 0);
5665 }
5666 break;
5667
5668 case DISAS_NORETURN:
5669 break;
5670
5671 case DISAS_EXIT:
5672
5673 save_state(dc);
5674 tcg_gen_exit_tb(NULL, 0);
5675 break;
5676
5677 default:
5678 g_assert_not_reached();
5679 }
5680}
5681
5682static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5683 CPUState *cpu, FILE *logfile)
5684{
5685 fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5686 target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5687}
5688
5689static const TranslatorOps sparc_tr_ops = {
5690 .init_disas_context = sparc_tr_init_disas_context,
5691 .tb_start = sparc_tr_tb_start,
5692 .insn_start = sparc_tr_insn_start,
5693 .translate_insn = sparc_tr_translate_insn,
5694 .tb_stop = sparc_tr_tb_stop,
5695 .disas_log = sparc_tr_disas_log,
5696};
5697
5698void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5699 target_ulong pc, void *host_pc)
5700{
5701 DisasContext dc = {};
5702
5703 translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5704}
5705
5706void sparc_tcg_init(void)
5707{
5708 static const char gregnames[32][4] = {
5709 "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5710 "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5711 "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5712 "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5713 };
5714 static const char fregnames[32][4] = {
5715 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5716 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5717 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5718 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5719 };
5720
5721 static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5722#ifdef TARGET_SPARC64
5723 { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5724 { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5725#else
5726 { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5727#endif
5728 { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5729 { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5730 };
5731
5732 static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5733#ifdef TARGET_SPARC64
5734 { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5735 { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5736 { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5737 { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5738 "hstick_cmpr" },
5739 { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5740 { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5741 { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5742 { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5743 { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5744#endif
5745 { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5746 { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5747 { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5748 { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5749 { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5750 { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5751 { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5752 { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5753#ifndef CONFIG_USER_ONLY
5754 { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5755#endif
5756 };
5757
5758 unsigned int i;
5759
5760 cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5761 offsetof(CPUSPARCState, regwptr),
5762 "regwptr");
5763
5764 for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5765 *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5766 }
5767
5768 for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5769 *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5770 }
5771
5772 cpu_regs[0] = NULL;
5773 for (i = 1; i < 8; ++i) {
5774 cpu_regs[i] = tcg_global_mem_new(cpu_env,
5775 offsetof(CPUSPARCState, gregs[i]),
5776 gregnames[i]);
5777 }
5778
5779 for (i = 8; i < 32; ++i) {
5780 cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5781 (i - 8) * sizeof(target_ulong),
5782 gregnames[i]);
5783 }
5784
5785 for (i = 0; i < TARGET_DPREGS; i++) {
5786 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5787 offsetof(CPUSPARCState, fpr[i]),
5788 fregnames[i]);
5789 }
5790}
5791
5792void sparc_restore_state_to_opc(CPUState *cs,
5793 const TranslationBlock *tb,
5794 const uint64_t *data)
5795{
5796 SPARCCPU *cpu = SPARC_CPU(cs);
5797 CPUSPARCState *env = &cpu->env;
5798 target_ulong pc = data[0];
5799 target_ulong npc = data[1];
5800
5801 env->pc = pc;
5802 if (npc == DYNAMIC_PC) {
5803
5804 } else if (npc & JUMP_PC) {
5805
5806 if (env->cond) {
5807 env->npc = npc & ~3;
5808 } else {
5809 env->npc = pc + 4;
5810 }
5811 } else {
5812 env->npc = npc;
5813 }
5814}
5815