1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include "cpu.h"
21#include "disas/disas.h"
22#include "qemu/host-utils.h"
23#include "tcg-op.h"
24#include "exec/cpu_ldst.h"
25
26#include "exec/helper-proto.h"
27#include "exec/helper-gen.h"
28
29#include "trace-tcg.h"
30
31
32#undef ALPHA_DEBUG_DISAS
33#define CONFIG_SOFTFLOAT_INLINE
34
35#ifdef ALPHA_DEBUG_DISAS
36# define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
37#else
38# define LOG_DISAS(...) do { } while (0)
39#endif
40
41typedef struct DisasContext DisasContext;
42struct DisasContext {
43 struct TranslationBlock *tb;
44 uint64_t pc;
45#ifndef CONFIG_USER_ONLY
46 uint64_t palbr;
47#endif
48 int mem_idx;
49
50
51 int tb_rm;
52
53 int tb_ftz;
54
55
56 int implver;
57
58
59 TCGv *ir;
60
61
62 TCGv zero;
63 TCGv sink;
64
65 TCGv lit;
66
67 bool singlestep_enabled;
68};
69
70
71
72
73typedef enum {
74 NO_EXIT,
75
76
77 EXIT_GOTO_TB,
78
79
80
81
82 EXIT_PC_UPDATED,
83
84
85
86 EXIT_PC_STALE,
87
88
89
90 EXIT_NORETURN,
91} ExitStatus;
92
93
94static TCGv_ptr cpu_env;
95static TCGv cpu_std_ir[31];
96static TCGv cpu_fir[31];
97static TCGv cpu_pc;
98static TCGv cpu_lock_addr;
99static TCGv cpu_lock_st_addr;
100static TCGv cpu_lock_value;
101
102#ifndef CONFIG_USER_ONLY
103static TCGv cpu_pal_ir[31];
104#endif
105
106#include "exec/gen-icount.h"
107
108void alpha_translate_init(void)
109{
110#define DEF_VAR(V) { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
111
112 typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
113 static const GlobalVar vars[] = {
114 DEF_VAR(pc),
115 DEF_VAR(lock_addr),
116 DEF_VAR(lock_st_addr),
117 DEF_VAR(lock_value),
118 };
119
120#undef DEF_VAR
121
122
123 static const char greg_names[31][4] = {
124 "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
125 "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
126 "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
127 "t10", "t11", "ra", "t12", "at", "gp", "sp"
128 };
129 static const char freg_names[31][4] = {
130 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
131 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
132 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
133 "f24", "f25", "f26", "f27", "f28", "f29", "f30"
134 };
135#ifndef CONFIG_USER_ONLY
136 static const char shadow_names[8][8] = {
137 "pal_t7", "pal_s0", "pal_s1", "pal_s2",
138 "pal_s3", "pal_s4", "pal_s5", "pal_t11"
139 };
140#endif
141
142 static bool done_init = 0;
143 int i;
144
145 if (done_init) {
146 return;
147 }
148 done_init = 1;
149
150 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
151
152 for (i = 0; i < 31; i++) {
153 cpu_std_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
154 offsetof(CPUAlphaState, ir[i]),
155 greg_names[i]);
156 }
157
158 for (i = 0; i < 31; i++) {
159 cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
160 offsetof(CPUAlphaState, fir[i]),
161 freg_names[i]);
162 }
163
164#ifndef CONFIG_USER_ONLY
165 memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
166 for (i = 0; i < 8; i++) {
167 int r = (i == 7 ? 25 : i + 8);
168 cpu_pal_ir[r] = tcg_global_mem_new_i64(TCG_AREG0,
169 offsetof(CPUAlphaState,
170 shadow[i]),
171 shadow_names[i]);
172 }
173#endif
174
175 for (i = 0; i < ARRAY_SIZE(vars); ++i) {
176 const GlobalVar *v = &vars[i];
177 *v->var = tcg_global_mem_new_i64(TCG_AREG0, v->ofs, v->name);
178 }
179}
180
181static TCGv load_zero(DisasContext *ctx)
182{
183 if (TCGV_IS_UNUSED_I64(ctx->zero)) {
184 ctx->zero = tcg_const_i64(0);
185 }
186 return ctx->zero;
187}
188
189static TCGv dest_sink(DisasContext *ctx)
190{
191 if (TCGV_IS_UNUSED_I64(ctx->sink)) {
192 ctx->sink = tcg_temp_new();
193 }
194 return ctx->sink;
195}
196
197static TCGv load_gpr(DisasContext *ctx, unsigned reg)
198{
199 if (likely(reg < 31)) {
200 return ctx->ir[reg];
201 } else {
202 return load_zero(ctx);
203 }
204}
205
206static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
207 uint8_t lit, bool islit)
208{
209 if (islit) {
210 ctx->lit = tcg_const_i64(lit);
211 return ctx->lit;
212 } else if (likely(reg < 31)) {
213 return ctx->ir[reg];
214 } else {
215 return load_zero(ctx);
216 }
217}
218
219static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
220{
221 if (likely(reg < 31)) {
222 return ctx->ir[reg];
223 } else {
224 return dest_sink(ctx);
225 }
226}
227
228static TCGv load_fpr(DisasContext *ctx, unsigned reg)
229{
230 if (likely(reg < 31)) {
231 return cpu_fir[reg];
232 } else {
233 return load_zero(ctx);
234 }
235}
236
237static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
238{
239 if (likely(reg < 31)) {
240 return cpu_fir[reg];
241 } else {
242 return dest_sink(ctx);
243 }
244}
245
246static void gen_excp_1(int exception, int error_code)
247{
248 TCGv_i32 tmp1, tmp2;
249
250 tmp1 = tcg_const_i32(exception);
251 tmp2 = tcg_const_i32(error_code);
252 gen_helper_excp(cpu_env, tmp1, tmp2);
253 tcg_temp_free_i32(tmp2);
254 tcg_temp_free_i32(tmp1);
255}
256
257static ExitStatus gen_excp(DisasContext *ctx, int exception, int error_code)
258{
259 tcg_gen_movi_i64(cpu_pc, ctx->pc);
260 gen_excp_1(exception, error_code);
261 return EXIT_NORETURN;
262}
263
264static inline ExitStatus gen_invalid(DisasContext *ctx)
265{
266 return gen_excp(ctx, EXCP_OPCDEC, 0);
267}
268
269static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
270{
271 TCGv_i32 tmp32 = tcg_temp_new_i32();
272 tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
273 gen_helper_memory_to_f(t0, tmp32);
274 tcg_temp_free_i32(tmp32);
275}
276
277static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
278{
279 TCGv tmp = tcg_temp_new();
280 tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
281 gen_helper_memory_to_g(t0, tmp);
282 tcg_temp_free(tmp);
283}
284
285static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
286{
287 TCGv_i32 tmp32 = tcg_temp_new_i32();
288 tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
289 gen_helper_memory_to_s(t0, tmp32);
290 tcg_temp_free_i32(tmp32);
291}
292
293static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
294{
295 tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
296 tcg_gen_mov_i64(cpu_lock_addr, t1);
297 tcg_gen_mov_i64(cpu_lock_value, t0);
298}
299
300static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
301{
302 tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
303 tcg_gen_mov_i64(cpu_lock_addr, t1);
304 tcg_gen_mov_i64(cpu_lock_value, t0);
305}
306
307static inline void gen_load_mem(DisasContext *ctx,
308 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
309 int flags),
310 int ra, int rb, int32_t disp16, bool fp,
311 bool clear)
312{
313 TCGv tmp, addr, va;
314
315
316
317
318 if (unlikely(ra == 31)) {
319 return;
320 }
321
322 tmp = tcg_temp_new();
323 addr = load_gpr(ctx, rb);
324
325 if (disp16) {
326 tcg_gen_addi_i64(tmp, addr, disp16);
327 addr = tmp;
328 }
329 if (clear) {
330 tcg_gen_andi_i64(tmp, addr, ~0x7);
331 addr = tmp;
332 }
333
334 va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
335 tcg_gen_qemu_load(va, addr, ctx->mem_idx);
336
337 tcg_temp_free(tmp);
338}
339
340static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
341{
342 TCGv_i32 tmp32 = tcg_temp_new_i32();
343 gen_helper_f_to_memory(tmp32, t0);
344 tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
345 tcg_temp_free_i32(tmp32);
346}
347
348static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
349{
350 TCGv tmp = tcg_temp_new();
351 gen_helper_g_to_memory(tmp, t0);
352 tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
353 tcg_temp_free(tmp);
354}
355
356static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
357{
358 TCGv_i32 tmp32 = tcg_temp_new_i32();
359 gen_helper_s_to_memory(tmp32, t0);
360 tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
361 tcg_temp_free_i32(tmp32);
362}
363
364static inline void gen_store_mem(DisasContext *ctx,
365 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
366 int flags),
367 int ra, int rb, int32_t disp16, bool fp,
368 bool clear)
369{
370 TCGv tmp, addr, va;
371
372 tmp = tcg_temp_new();
373 addr = load_gpr(ctx, rb);
374
375 if (disp16) {
376 tcg_gen_addi_i64(tmp, addr, disp16);
377 addr = tmp;
378 }
379 if (clear) {
380 tcg_gen_andi_i64(tmp, addr, ~0x7);
381 addr = tmp;
382 }
383
384 va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
385 tcg_gen_qemu_store(va, addr, ctx->mem_idx);
386
387 tcg_temp_free(tmp);
388}
389
390static ExitStatus gen_store_conditional(DisasContext *ctx, int ra, int rb,
391 int32_t disp16, int quad)
392{
393 TCGv addr;
394
395 if (ra == 31) {
396
397
398 return NO_EXIT;
399 }
400
401#if defined(CONFIG_USER_ONLY)
402 addr = cpu_lock_st_addr;
403#else
404 addr = tcg_temp_local_new();
405#endif
406
407 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
408
409#if defined(CONFIG_USER_ONLY)
410
411
412
413 return gen_excp(ctx, quad ? EXCP_STQ_C : EXCP_STL_C, ra);
414#else
415
416
417 {
418 TCGLabel *lab_fail, *lab_done;
419 TCGv val;
420
421 lab_fail = gen_new_label();
422 lab_done = gen_new_label();
423 tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
424
425 val = tcg_temp_new();
426 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, quad ? MO_LEQ : MO_LESL);
427 tcg_gen_brcond_i64(TCG_COND_NE, val, cpu_lock_value, lab_fail);
428
429 tcg_gen_qemu_st_i64(ctx->ir[ra], addr, ctx->mem_idx,
430 quad ? MO_LEQ : MO_LEUL);
431 tcg_gen_movi_i64(ctx->ir[ra], 1);
432 tcg_gen_br(lab_done);
433
434 gen_set_label(lab_fail);
435 tcg_gen_movi_i64(ctx->ir[ra], 0);
436
437 gen_set_label(lab_done);
438 tcg_gen_movi_i64(cpu_lock_addr, -1);
439
440 tcg_temp_free(addr);
441 return NO_EXIT;
442 }
443#endif
444}
445
446static bool in_superpage(DisasContext *ctx, int64_t addr)
447{
448 return ((ctx->tb->flags & TB_FLAGS_USER_MODE) == 0
449 && addr < 0
450 && ((addr >> 41) & 3) == 2
451 && addr >> TARGET_VIRT_ADDR_SPACE_BITS == addr >> 63);
452}
453
454static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
455{
456
457 if ((ctx->tb->cflags & CF_LAST_IO)
458 || ctx->singlestep_enabled || singlestep) {
459 return false;
460 }
461
462 if (in_superpage(ctx, dest)) {
463 return true;
464 }
465
466 return ((ctx->tb->pc ^ dest) & TARGET_PAGE_MASK) == 0;
467}
468
469static ExitStatus gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
470{
471 uint64_t dest = ctx->pc + (disp << 2);
472
473 if (ra != 31) {
474 tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
475 }
476
477
478 if (disp == 0) {
479 return 0;
480 } else if (use_goto_tb(ctx, dest)) {
481 tcg_gen_goto_tb(0);
482 tcg_gen_movi_i64(cpu_pc, dest);
483 tcg_gen_exit_tb((uintptr_t)ctx->tb);
484 return EXIT_GOTO_TB;
485 } else {
486 tcg_gen_movi_i64(cpu_pc, dest);
487 return EXIT_PC_UPDATED;
488 }
489}
490
491static ExitStatus gen_bcond_internal(DisasContext *ctx, TCGCond cond,
492 TCGv cmp, int32_t disp)
493{
494 uint64_t dest = ctx->pc + (disp << 2);
495 TCGLabel *lab_true = gen_new_label();
496
497 if (use_goto_tb(ctx, dest)) {
498 tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
499
500 tcg_gen_goto_tb(0);
501 tcg_gen_movi_i64(cpu_pc, ctx->pc);
502 tcg_gen_exit_tb((uintptr_t)ctx->tb);
503
504 gen_set_label(lab_true);
505 tcg_gen_goto_tb(1);
506 tcg_gen_movi_i64(cpu_pc, dest);
507 tcg_gen_exit_tb((uintptr_t)ctx->tb + 1);
508
509 return EXIT_GOTO_TB;
510 } else {
511 TCGv_i64 z = tcg_const_i64(0);
512 TCGv_i64 d = tcg_const_i64(dest);
513 TCGv_i64 p = tcg_const_i64(ctx->pc);
514
515 tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
516
517 tcg_temp_free_i64(z);
518 tcg_temp_free_i64(d);
519 tcg_temp_free_i64(p);
520 return EXIT_PC_UPDATED;
521 }
522}
523
524static ExitStatus gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
525 int32_t disp, int mask)
526{
527 TCGv cmp_tmp;
528
529 if (mask) {
530 cmp_tmp = tcg_temp_new();
531 tcg_gen_andi_i64(cmp_tmp, load_gpr(ctx, ra), 1);
532 } else {
533 cmp_tmp = load_gpr(ctx, ra);
534 }
535
536 return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
537}
538
539
540
541static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
542{
543 uint64_t mzero = 1ull << 63;
544
545 switch (cond) {
546 case TCG_COND_LE:
547 case TCG_COND_GT:
548
549 tcg_gen_mov_i64(dest, src);
550 break;
551
552 case TCG_COND_EQ:
553 case TCG_COND_NE:
554
555 tcg_gen_andi_i64(dest, src, mzero - 1);
556 break;
557
558 case TCG_COND_GE:
559 case TCG_COND_LT:
560
561 tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
562 tcg_gen_neg_i64(dest, dest);
563 tcg_gen_and_i64(dest, dest, src);
564 break;
565
566 default:
567 abort();
568 }
569}
570
571static ExitStatus gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
572 int32_t disp)
573{
574 TCGv cmp_tmp = tcg_temp_new();
575 gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
576 return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
577}
578
579static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
580{
581 TCGv_i64 va, vb, z;
582
583 z = load_zero(ctx);
584 vb = load_fpr(ctx, rb);
585 va = tcg_temp_new();
586 gen_fold_mzero(cond, va, load_fpr(ctx, ra));
587
588 tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
589
590 tcg_temp_free(va);
591}
592
593#define QUAL_RM_N 0x080
594#define QUAL_RM_C 0x000
595#define QUAL_RM_M 0x040
596#define QUAL_RM_D 0x0c0
597#define QUAL_RM_MASK 0x0c0
598
599#define QUAL_U 0x100
600#define QUAL_V 0x100
601#define QUAL_S 0x400
602#define QUAL_I 0x200
603
604static void gen_qual_roundmode(DisasContext *ctx, int fn11)
605{
606 TCGv_i32 tmp;
607
608 fn11 &= QUAL_RM_MASK;
609 if (fn11 == ctx->tb_rm) {
610 return;
611 }
612 ctx->tb_rm = fn11;
613
614 tmp = tcg_temp_new_i32();
615 switch (fn11) {
616 case QUAL_RM_N:
617 tcg_gen_movi_i32(tmp, float_round_nearest_even);
618 break;
619 case QUAL_RM_C:
620 tcg_gen_movi_i32(tmp, float_round_to_zero);
621 break;
622 case QUAL_RM_M:
623 tcg_gen_movi_i32(tmp, float_round_down);
624 break;
625 case QUAL_RM_D:
626 tcg_gen_ld8u_i32(tmp, cpu_env,
627 offsetof(CPUAlphaState, fpcr_dyn_round));
628 break;
629 }
630
631#if defined(CONFIG_SOFTFLOAT_INLINE)
632
633
634
635 tcg_gen_st8_i32(tmp, cpu_env,
636 offsetof(CPUAlphaState, fp_status.float_rounding_mode));
637#else
638 gen_helper_setroundmode(tmp);
639#endif
640
641 tcg_temp_free_i32(tmp);
642}
643
644static void gen_qual_flushzero(DisasContext *ctx, int fn11)
645{
646 TCGv_i32 tmp;
647
648 fn11 &= QUAL_U;
649 if (fn11 == ctx->tb_ftz) {
650 return;
651 }
652 ctx->tb_ftz = fn11;
653
654 tmp = tcg_temp_new_i32();
655 if (fn11) {
656
657 tcg_gen_ld8u_i32(tmp, cpu_env,
658 offsetof(CPUAlphaState, fpcr_flush_to_zero));
659 } else {
660
661 tcg_gen_movi_i32(tmp, 1);
662 }
663
664#if defined(CONFIG_SOFTFLOAT_INLINE)
665 tcg_gen_st8_i32(tmp, cpu_env,
666 offsetof(CPUAlphaState, fp_status.flush_to_zero));
667#else
668 gen_helper_setflushzero(tmp);
669#endif
670
671 tcg_temp_free_i32(tmp);
672}
673
674static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
675{
676 TCGv val;
677
678 if (unlikely(reg == 31)) {
679 val = load_zero(ctx);
680 } else {
681 val = cpu_fir[reg];
682 if ((fn11 & QUAL_S) == 0) {
683 if (is_cmp) {
684 gen_helper_ieee_input_cmp(cpu_env, val);
685 } else {
686 gen_helper_ieee_input(cpu_env, val);
687 }
688 } else {
689#ifndef CONFIG_USER_ONLY
690
691
692
693 gen_helper_ieee_input_s(cpu_env, val);
694#endif
695 }
696 }
697 return val;
698}
699
700static void gen_fp_exc_raise(int rc, int fn11)
701{
702
703
704
705
706
707 TCGv_i32 reg, ign;
708 uint32_t ignore = 0;
709
710 if (!(fn11 & QUAL_U)) {
711
712 ignore |= FPCR_UNF | FPCR_IOV;
713 }
714 if (!(fn11 & QUAL_I)) {
715 ignore |= FPCR_INE;
716 }
717 ign = tcg_const_i32(ignore);
718
719
720
721
722
723
724 reg = tcg_const_i32(rc + 32);
725 if (fn11 & QUAL_S) {
726 gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
727 } else {
728 gen_helper_fp_exc_raise(cpu_env, ign, reg);
729 }
730
731 tcg_temp_free_i32(reg);
732 tcg_temp_free_i32(ign);
733}
734
735static void gen_cvtlq(TCGv vc, TCGv vb)
736{
737 TCGv tmp = tcg_temp_new();
738
739
740
741 tcg_gen_sari_i64(tmp, vb, 32);
742 tcg_gen_shri_i64(vc, vb, 29);
743 tcg_gen_andi_i64(tmp, tmp, (int32_t)0xc0000000);
744 tcg_gen_andi_i64(vc, vc, 0x3fffffff);
745 tcg_gen_or_i64(vc, vc, tmp);
746
747 tcg_temp_free(tmp);
748}
749
750static void gen_ieee_arith2(DisasContext *ctx,
751 void (*helper)(TCGv, TCGv_ptr, TCGv),
752 int rb, int rc, int fn11)
753{
754 TCGv vb;
755
756 gen_qual_roundmode(ctx, fn11);
757 gen_qual_flushzero(ctx, fn11);
758
759 vb = gen_ieee_input(ctx, rb, fn11, 0);
760 helper(dest_fpr(ctx, rc), cpu_env, vb);
761
762 gen_fp_exc_raise(rc, fn11);
763}
764
765#define IEEE_ARITH2(name) \
766static inline void glue(gen_, name)(DisasContext *ctx, \
767 int rb, int rc, int fn11) \
768{ \
769 gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11); \
770}
771IEEE_ARITH2(sqrts)
772IEEE_ARITH2(sqrtt)
773IEEE_ARITH2(cvtst)
774IEEE_ARITH2(cvtts)
775
776static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
777{
778 TCGv vb, vc;
779
780
781 vb = gen_ieee_input(ctx, rb, fn11, 0);
782 vc = dest_fpr(ctx, rc);
783
784
785
786 if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
787 gen_helper_cvttq_c(vc, cpu_env, vb);
788 } else {
789 gen_qual_roundmode(ctx, fn11);
790 gen_helper_cvttq(vc, cpu_env, vb);
791 }
792 gen_fp_exc_raise(rc, fn11);
793}
794
795static void gen_ieee_intcvt(DisasContext *ctx,
796 void (*helper)(TCGv, TCGv_ptr, TCGv),
797 int rb, int rc, int fn11)
798{
799 TCGv vb, vc;
800
801 gen_qual_roundmode(ctx, fn11);
802 vb = load_fpr(ctx, rb);
803 vc = dest_fpr(ctx, rc);
804
805
806
807
808 if (fn11 & QUAL_I) {
809 helper(vc, cpu_env, vb);
810 gen_fp_exc_raise(rc, fn11);
811 } else {
812 helper(vc, cpu_env, vb);
813 }
814}
815
816#define IEEE_INTCVT(name) \
817static inline void glue(gen_, name)(DisasContext *ctx, \
818 int rb, int rc, int fn11) \
819{ \
820 gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11); \
821}
822IEEE_INTCVT(cvtqs)
823IEEE_INTCVT(cvtqt)
824
825static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
826{
827 TCGv vmask = tcg_const_i64(mask);
828 TCGv tmp = tcg_temp_new_i64();
829
830 if (inv_a) {
831 tcg_gen_andc_i64(tmp, vmask, va);
832 } else {
833 tcg_gen_and_i64(tmp, va, vmask);
834 }
835
836 tcg_gen_andc_i64(vc, vb, vmask);
837 tcg_gen_or_i64(vc, vc, tmp);
838
839 tcg_temp_free(vmask);
840 tcg_temp_free(tmp);
841}
842
843static void gen_ieee_arith3(DisasContext *ctx,
844 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
845 int ra, int rb, int rc, int fn11)
846{
847 TCGv va, vb, vc;
848
849 gen_qual_roundmode(ctx, fn11);
850 gen_qual_flushzero(ctx, fn11);
851
852 va = gen_ieee_input(ctx, ra, fn11, 0);
853 vb = gen_ieee_input(ctx, rb, fn11, 0);
854 vc = dest_fpr(ctx, rc);
855 helper(vc, cpu_env, va, vb);
856
857 gen_fp_exc_raise(rc, fn11);
858}
859
860#define IEEE_ARITH3(name) \
861static inline void glue(gen_, name)(DisasContext *ctx, \
862 int ra, int rb, int rc, int fn11) \
863{ \
864 gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11); \
865}
866IEEE_ARITH3(adds)
867IEEE_ARITH3(subs)
868IEEE_ARITH3(muls)
869IEEE_ARITH3(divs)
870IEEE_ARITH3(addt)
871IEEE_ARITH3(subt)
872IEEE_ARITH3(mult)
873IEEE_ARITH3(divt)
874
875static void gen_ieee_compare(DisasContext *ctx,
876 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
877 int ra, int rb, int rc, int fn11)
878{
879 TCGv va, vb, vc;
880
881 va = gen_ieee_input(ctx, ra, fn11, 1);
882 vb = gen_ieee_input(ctx, rb, fn11, 1);
883 vc = dest_fpr(ctx, rc);
884 helper(vc, cpu_env, va, vb);
885
886 gen_fp_exc_raise(rc, fn11);
887}
888
889#define IEEE_CMP3(name) \
890static inline void glue(gen_, name)(DisasContext *ctx, \
891 int ra, int rb, int rc, int fn11) \
892{ \
893 gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11); \
894}
895IEEE_CMP3(cmptun)
896IEEE_CMP3(cmpteq)
897IEEE_CMP3(cmptlt)
898IEEE_CMP3(cmptle)
899
900static inline uint64_t zapnot_mask(uint8_t lit)
901{
902 uint64_t mask = 0;
903 int i;
904
905 for (i = 0; i < 8; ++i) {
906 if ((lit >> i) & 1) {
907 mask |= 0xffull << (i * 8);
908 }
909 }
910 return mask;
911}
912
913
914
915
916static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
917{
918 switch (lit) {
919 case 0x00:
920 tcg_gen_movi_i64(dest, 0);
921 break;
922 case 0x01:
923 tcg_gen_ext8u_i64(dest, src);
924 break;
925 case 0x03:
926 tcg_gen_ext16u_i64(dest, src);
927 break;
928 case 0x0f:
929 tcg_gen_ext32u_i64(dest, src);
930 break;
931 case 0xff:
932 tcg_gen_mov_i64(dest, src);
933 break;
934 default:
935 tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
936 break;
937 }
938}
939
940
941static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
942 uint8_t lit, uint8_t byte_mask)
943{
944 if (islit) {
945 tcg_gen_shli_i64(vc, va, (64 - lit * 8) & 0x3f);
946 } else {
947 TCGv tmp = tcg_temp_new();
948 tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
949 tcg_gen_neg_i64(tmp, tmp);
950 tcg_gen_andi_i64(tmp, tmp, 0x3f);
951 tcg_gen_shl_i64(vc, va, tmp);
952 tcg_temp_free(tmp);
953 }
954 gen_zapnoti(vc, vc, byte_mask);
955}
956
957
958static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
959 uint8_t lit, uint8_t byte_mask)
960{
961 if (islit) {
962 tcg_gen_shri_i64(vc, va, (lit & 7) * 8);
963 } else {
964 TCGv tmp = tcg_temp_new();
965 tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
966 tcg_gen_shli_i64(tmp, tmp, 3);
967 tcg_gen_shr_i64(vc, va, tmp);
968 tcg_temp_free(tmp);
969 }
970 gen_zapnoti(vc, vc, byte_mask);
971}
972
973
974static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
975 uint8_t lit, uint8_t byte_mask)
976{
977 TCGv tmp = tcg_temp_new();
978
979
980
981
982 gen_zapnoti(tmp, va, byte_mask);
983
984 if (islit) {
985 lit &= 7;
986 if (unlikely(lit == 0)) {
987 tcg_gen_movi_i64(vc, 0);
988 } else {
989 tcg_gen_shri_i64(vc, tmp, 64 - lit * 8);
990 }
991 } else {
992 TCGv shift = tcg_temp_new();
993
994
995
996
997
998
999 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1000 tcg_gen_not_i64(shift, shift);
1001 tcg_gen_andi_i64(shift, shift, 0x3f);
1002
1003 tcg_gen_shr_i64(vc, tmp, shift);
1004 tcg_gen_shri_i64(vc, vc, 1);
1005 tcg_temp_free(shift);
1006 }
1007 tcg_temp_free(tmp);
1008}
1009
1010
1011static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1012 uint8_t lit, uint8_t byte_mask)
1013{
1014 TCGv tmp = tcg_temp_new();
1015
1016
1017
1018
1019
1020 gen_zapnoti(tmp, va, byte_mask);
1021
1022 if (islit) {
1023 tcg_gen_shli_i64(vc, tmp, (lit & 7) * 8);
1024 } else {
1025 TCGv shift = tcg_temp_new();
1026 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1027 tcg_gen_shli_i64(shift, shift, 3);
1028 tcg_gen_shl_i64(vc, tmp, shift);
1029 tcg_temp_free(shift);
1030 }
1031 tcg_temp_free(tmp);
1032}
1033
1034
1035static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1036 uint8_t lit, uint8_t byte_mask)
1037{
1038 if (islit) {
1039 gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1040 } else {
1041 TCGv shift = tcg_temp_new();
1042 TCGv mask = tcg_temp_new();
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1054 tcg_gen_not_i64(shift, shift);
1055 tcg_gen_andi_i64(shift, shift, 0x3f);
1056 tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1057 tcg_gen_shr_i64(mask, mask, shift);
1058 tcg_gen_shri_i64(mask, mask, 1);
1059
1060 tcg_gen_andc_i64(vc, va, mask);
1061
1062 tcg_temp_free(mask);
1063 tcg_temp_free(shift);
1064 }
1065}
1066
1067
1068static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1069 uint8_t lit, uint8_t byte_mask)
1070{
1071 if (islit) {
1072 gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1073 } else {
1074 TCGv shift = tcg_temp_new();
1075 TCGv mask = tcg_temp_new();
1076
1077 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1078 tcg_gen_shli_i64(shift, shift, 3);
1079 tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1080 tcg_gen_shl_i64(mask, mask, shift);
1081
1082 tcg_gen_andc_i64(vc, va, mask);
1083
1084 tcg_temp_free(mask);
1085 tcg_temp_free(shift);
1086 }
1087}
1088
1089static void gen_rx(DisasContext *ctx, int ra, int set)
1090{
1091 TCGv_i32 tmp;
1092
1093 if (ra != 31) {
1094 tcg_gen_ld8u_i64(ctx->ir[ra], cpu_env,
1095 offsetof(CPUAlphaState, intr_flag));
1096 }
1097
1098 tmp = tcg_const_i32(set);
1099 tcg_gen_st8_i32(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
1100 tcg_temp_free_i32(tmp);
1101}
1102
1103static ExitStatus gen_call_pal(DisasContext *ctx, int palcode)
1104{
1105
1106
1107
1108
1109 if (palcode >= 0x80 && palcode < 0xC0) {
1110 switch (palcode) {
1111 case 0x86:
1112
1113
1114 break;
1115 case 0x9E:
1116
1117 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1118 offsetof(CPUAlphaState, unique));
1119 break;
1120 case 0x9F:
1121
1122 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1123 offsetof(CPUAlphaState, unique));
1124 break;
1125 default:
1126 palcode &= 0xbf;
1127 goto do_call_pal;
1128 }
1129 return NO_EXIT;
1130 }
1131
1132#ifndef CONFIG_USER_ONLY
1133
1134 if (palcode < 0x40 && (ctx->tb->flags & TB_FLAGS_USER_MODE) == 0) {
1135 switch (palcode) {
1136 case 0x01:
1137
1138
1139 break;
1140 case 0x02:
1141
1142
1143 break;
1144 case 0x2D:
1145
1146 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1147 offsetof(CPUAlphaState, vptptr));
1148 break;
1149 case 0x31:
1150
1151 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1152 offsetof(CPUAlphaState, sysval));
1153 break;
1154 case 0x32:
1155
1156 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1157 offsetof(CPUAlphaState, sysval));
1158 break;
1159
1160 case 0x35: {
1161
1162 TCGv tmp;
1163
1164
1165
1166 tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1167 offsetof(CPUAlphaState, ps));
1168
1169
1170 tmp = tcg_temp_new();
1171 tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1172 tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, ps));
1173 tcg_temp_free(tmp);
1174 break;
1175 }
1176
1177 case 0x36:
1178
1179 tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1180 offsetof(CPUAlphaState, ps));
1181 break;
1182 case 0x38:
1183
1184 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1185 offsetof(CPUAlphaState, usp));
1186 break;
1187 case 0x3A:
1188
1189 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1190 offsetof(CPUAlphaState, usp));
1191 break;
1192 case 0x3C:
1193
1194 tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1195 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1196 break;
1197
1198 default:
1199 palcode &= 0x3f;
1200 goto do_call_pal;
1201 }
1202 return NO_EXIT;
1203 }
1204#endif
1205 return gen_invalid(ctx);
1206
1207 do_call_pal:
1208#ifdef CONFIG_USER_ONLY
1209 return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1210#else
1211 {
1212 TCGv tmp = tcg_temp_new();
1213 uint64_t exc_addr = ctx->pc;
1214 uint64_t entry = ctx->palbr;
1215
1216 if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
1217 exc_addr |= 1;
1218 } else {
1219 tcg_gen_movi_i64(tmp, 1);
1220 tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
1221 }
1222
1223 tcg_gen_movi_i64(tmp, exc_addr);
1224 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1225 tcg_temp_free(tmp);
1226
1227 entry += (palcode & 0x80
1228 ? 0x2000 + (palcode - 0x80) * 64
1229 : 0x1000 + palcode * 64);
1230
1231
1232
1233
1234
1235 if (!ctx->singlestep_enabled && !(ctx->tb->cflags & CF_LAST_IO)) {
1236 tcg_gen_goto_tb(0);
1237 tcg_gen_movi_i64(cpu_pc, entry);
1238 tcg_gen_exit_tb((uintptr_t)ctx->tb);
1239 return EXIT_GOTO_TB;
1240 } else {
1241 tcg_gen_movi_i64(cpu_pc, entry);
1242 return EXIT_PC_UPDATED;
1243 }
1244 }
1245#endif
1246}
1247
1248#ifndef CONFIG_USER_ONLY
1249
1250#define PR_BYTE 0x100000
1251#define PR_LONG 0x200000
1252
1253static int cpu_pr_data(int pr)
1254{
1255 switch (pr) {
1256 case 0: return offsetof(CPUAlphaState, ps) | PR_BYTE;
1257 case 1: return offsetof(CPUAlphaState, fen) | PR_BYTE;
1258 case 2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1259 case 3: return offsetof(CPUAlphaState, trap_arg0);
1260 case 4: return offsetof(CPUAlphaState, trap_arg1);
1261 case 5: return offsetof(CPUAlphaState, trap_arg2);
1262 case 6: return offsetof(CPUAlphaState, exc_addr);
1263 case 7: return offsetof(CPUAlphaState, palbr);
1264 case 8: return offsetof(CPUAlphaState, ptbr);
1265 case 9: return offsetof(CPUAlphaState, vptptr);
1266 case 10: return offsetof(CPUAlphaState, unique);
1267 case 11: return offsetof(CPUAlphaState, sysval);
1268 case 12: return offsetof(CPUAlphaState, usp);
1269
1270 case 40 ... 63:
1271 return offsetof(CPUAlphaState, scratch[pr - 40]);
1272
1273 case 251:
1274 return offsetof(CPUAlphaState, alarm_expire);
1275 }
1276 return 0;
1277}
1278
1279static ExitStatus gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1280{
1281 void (*helper)(TCGv);
1282 int data;
1283
1284 switch (regno) {
1285 case 32 ... 39:
1286
1287 regno = regno == 39 ? 25 : regno - 32 + 8;
1288 tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1289 break;
1290
1291 case 250:
1292 helper = gen_helper_get_walltime;
1293 goto do_helper;
1294 case 249:
1295 helper = gen_helper_get_vmtime;
1296 do_helper:
1297 if (use_icount) {
1298 gen_io_start();
1299 helper(va);
1300 gen_io_end();
1301 return EXIT_PC_STALE;
1302 } else {
1303 helper(va);
1304 }
1305 break;
1306
1307 default:
1308
1309
1310 data = cpu_pr_data(regno);
1311 if (data == 0) {
1312 tcg_gen_movi_i64(va, 0);
1313 } else if (data & PR_BYTE) {
1314 tcg_gen_ld8u_i64(va, cpu_env, data & ~PR_BYTE);
1315 } else if (data & PR_LONG) {
1316 tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1317 } else {
1318 tcg_gen_ld_i64(va, cpu_env, data);
1319 }
1320 break;
1321 }
1322
1323 return NO_EXIT;
1324}
1325
1326static ExitStatus gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1327{
1328 TCGv tmp;
1329 int data;
1330
1331 switch (regno) {
1332 case 255:
1333
1334 gen_helper_tbia(cpu_env);
1335 break;
1336
1337 case 254:
1338
1339 gen_helper_tbis(cpu_env, vb);
1340 break;
1341
1342 case 253:
1343
1344 tmp = tcg_const_i64(1);
1345 tcg_gen_st32_i64(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1346 offsetof(CPUState, halted));
1347 return gen_excp(ctx, EXCP_HLT, 0);
1348
1349 case 252:
1350
1351 gen_helper_halt(vb);
1352 return EXIT_PC_STALE;
1353
1354 case 251:
1355
1356 gen_helper_set_alarm(cpu_env, vb);
1357 break;
1358
1359 case 7:
1360
1361 tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1362
1363
1364
1365 gen_helper_tb_flush(cpu_env);
1366 return EXIT_PC_STALE;
1367
1368 case 32 ... 39:
1369
1370 regno = regno == 39 ? 25 : regno - 32 + 8;
1371 tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1372 break;
1373
1374 default:
1375
1376
1377 data = cpu_pr_data(regno);
1378 if (data != 0) {
1379 if (data & PR_BYTE) {
1380 tcg_gen_st8_i64(vb, cpu_env, data & ~PR_BYTE);
1381 } else if (data & PR_LONG) {
1382 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1383 } else {
1384 tcg_gen_st_i64(vb, cpu_env, data);
1385 }
1386 }
1387 break;
1388 }
1389
1390 return NO_EXIT;
1391}
1392#endif
1393
1394#define REQUIRE_NO_LIT \
1395 do { \
1396 if (real_islit) { \
1397 goto invalid_opc; \
1398 } \
1399 } while (0)
1400
1401#define REQUIRE_TB_FLAG(FLAG) \
1402 do { \
1403 if ((ctx->tb->flags & (FLAG)) == 0) { \
1404 goto invalid_opc; \
1405 } \
1406 } while (0)
1407
1408#define REQUIRE_REG_31(WHICH) \
1409 do { \
1410 if (WHICH != 31) { \
1411 goto invalid_opc; \
1412 } \
1413 } while (0)
1414
1415static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
1416{
1417 int32_t disp21, disp16, disp12 __attribute__((unused));
1418 uint16_t fn11;
1419 uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1420 bool islit, real_islit;
1421 TCGv va, vb, vc, tmp, tmp2;
1422 TCGv_i32 t32;
1423 ExitStatus ret;
1424
1425
1426 opc = extract32(insn, 26, 6);
1427 ra = extract32(insn, 21, 5);
1428 rb = extract32(insn, 16, 5);
1429 rc = extract32(insn, 0, 5);
1430 real_islit = islit = extract32(insn, 12, 1);
1431 lit = extract32(insn, 13, 8);
1432
1433 disp21 = sextract32(insn, 0, 21);
1434 disp16 = sextract32(insn, 0, 16);
1435 disp12 = sextract32(insn, 0, 12);
1436
1437 fn11 = extract32(insn, 5, 11);
1438 fpfn = extract32(insn, 5, 6);
1439 fn7 = extract32(insn, 5, 7);
1440
1441 if (rb == 31 && !islit) {
1442 islit = true;
1443 lit = 0;
1444 }
1445
1446 ret = NO_EXIT;
1447 switch (opc) {
1448 case 0x00:
1449
1450 ret = gen_call_pal(ctx, insn & 0x03ffffff);
1451 break;
1452 case 0x01:
1453
1454 goto invalid_opc;
1455 case 0x02:
1456
1457 goto invalid_opc;
1458 case 0x03:
1459
1460 goto invalid_opc;
1461 case 0x04:
1462
1463 goto invalid_opc;
1464 case 0x05:
1465
1466 goto invalid_opc;
1467 case 0x06:
1468
1469 goto invalid_opc;
1470 case 0x07:
1471
1472 goto invalid_opc;
1473
1474 case 0x09:
1475
1476 disp16 = (uint32_t)disp16 << 16;
1477
1478 case 0x08:
1479
1480 va = dest_gpr(ctx, ra);
1481
1482 if (rb == 31) {
1483 tcg_gen_movi_i64(va, disp16);
1484 } else {
1485 tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1486 }
1487 break;
1488
1489 case 0x0A:
1490
1491 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1492 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1493 break;
1494 case 0x0B:
1495
1496 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1497 break;
1498 case 0x0C:
1499
1500 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1501 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1502 break;
1503 case 0x0D:
1504
1505 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1506 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1507 break;
1508 case 0x0E:
1509
1510 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1511 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1512 break;
1513 case 0x0F:
1514
1515 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1516 break;
1517
1518 case 0x10:
1519 vc = dest_gpr(ctx, rc);
1520 vb = load_gpr_lit(ctx, rb, lit, islit);
1521
1522 if (ra == 31) {
1523 if (fn7 == 0x00) {
1524
1525 tcg_gen_ext32s_i64(vc, vb);
1526 break;
1527 }
1528 if (fn7 == 0x29) {
1529
1530 tcg_gen_neg_i64(vc, vb);
1531 break;
1532 }
1533 }
1534
1535 va = load_gpr(ctx, ra);
1536 switch (fn7) {
1537 case 0x00:
1538
1539 tcg_gen_add_i64(vc, va, vb);
1540 tcg_gen_ext32s_i64(vc, vc);
1541 break;
1542 case 0x02:
1543
1544 tmp = tcg_temp_new();
1545 tcg_gen_shli_i64(tmp, va, 2);
1546 tcg_gen_add_i64(tmp, tmp, vb);
1547 tcg_gen_ext32s_i64(vc, tmp);
1548 tcg_temp_free(tmp);
1549 break;
1550 case 0x09:
1551
1552 tcg_gen_sub_i64(vc, va, vb);
1553 tcg_gen_ext32s_i64(vc, vc);
1554 break;
1555 case 0x0B:
1556
1557 tmp = tcg_temp_new();
1558 tcg_gen_shli_i64(tmp, va, 2);
1559 tcg_gen_sub_i64(tmp, tmp, vb);
1560 tcg_gen_ext32s_i64(vc, tmp);
1561 tcg_temp_free(tmp);
1562 break;
1563 case 0x0F:
1564
1565 if (ra == 31) {
1566
1567 gen_helper_cmpbe0(vc, vb);
1568 } else {
1569 gen_helper_cmpbge(vc, va, vb);
1570 }
1571 break;
1572 case 0x12:
1573
1574 tmp = tcg_temp_new();
1575 tcg_gen_shli_i64(tmp, va, 3);
1576 tcg_gen_add_i64(tmp, tmp, vb);
1577 tcg_gen_ext32s_i64(vc, tmp);
1578 tcg_temp_free(tmp);
1579 break;
1580 case 0x1B:
1581
1582 tmp = tcg_temp_new();
1583 tcg_gen_shli_i64(tmp, va, 3);
1584 tcg_gen_sub_i64(tmp, tmp, vb);
1585 tcg_gen_ext32s_i64(vc, tmp);
1586 tcg_temp_free(tmp);
1587 break;
1588 case 0x1D:
1589
1590 tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1591 break;
1592 case 0x20:
1593
1594 tcg_gen_add_i64(vc, va, vb);
1595 break;
1596 case 0x22:
1597
1598 tmp = tcg_temp_new();
1599 tcg_gen_shli_i64(tmp, va, 2);
1600 tcg_gen_add_i64(vc, tmp, vb);
1601 tcg_temp_free(tmp);
1602 break;
1603 case 0x29:
1604
1605 tcg_gen_sub_i64(vc, va, vb);
1606 break;
1607 case 0x2B:
1608
1609 tmp = tcg_temp_new();
1610 tcg_gen_shli_i64(tmp, va, 2);
1611 tcg_gen_sub_i64(vc, tmp, vb);
1612 tcg_temp_free(tmp);
1613 break;
1614 case 0x2D:
1615
1616 tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1617 break;
1618 case 0x32:
1619
1620 tmp = tcg_temp_new();
1621 tcg_gen_shli_i64(tmp, va, 3);
1622 tcg_gen_add_i64(vc, tmp, vb);
1623 tcg_temp_free(tmp);
1624 break;
1625 case 0x3B:
1626
1627 tmp = tcg_temp_new();
1628 tcg_gen_shli_i64(tmp, va, 3);
1629 tcg_gen_sub_i64(vc, tmp, vb);
1630 tcg_temp_free(tmp);
1631 break;
1632 case 0x3D:
1633
1634 tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1635 break;
1636 case 0x40:
1637
1638 tmp = tcg_temp_new();
1639 tcg_gen_ext32s_i64(tmp, va);
1640 tcg_gen_ext32s_i64(vc, vb);
1641 tcg_gen_add_i64(tmp, tmp, vc);
1642 tcg_gen_ext32s_i64(vc, tmp);
1643 gen_helper_check_overflow(cpu_env, vc, tmp);
1644 tcg_temp_free(tmp);
1645 break;
1646 case 0x49:
1647
1648 tmp = tcg_temp_new();
1649 tcg_gen_ext32s_i64(tmp, va);
1650 tcg_gen_ext32s_i64(vc, vb);
1651 tcg_gen_sub_i64(tmp, tmp, vc);
1652 tcg_gen_ext32s_i64(vc, tmp);
1653 gen_helper_check_overflow(cpu_env, vc, tmp);
1654 tcg_temp_free(tmp);
1655 break;
1656 case 0x4D:
1657
1658 tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1659 break;
1660 case 0x60:
1661
1662 tmp = tcg_temp_new();
1663 tmp2 = tcg_temp_new();
1664 tcg_gen_eqv_i64(tmp, va, vb);
1665 tcg_gen_mov_i64(tmp2, va);
1666 tcg_gen_add_i64(vc, va, vb);
1667 tcg_gen_xor_i64(tmp2, tmp2, vc);
1668 tcg_gen_and_i64(tmp, tmp, tmp2);
1669 tcg_gen_shri_i64(tmp, tmp, 63);
1670 tcg_gen_movi_i64(tmp2, 0);
1671 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1672 tcg_temp_free(tmp);
1673 tcg_temp_free(tmp2);
1674 break;
1675 case 0x69:
1676
1677 tmp = tcg_temp_new();
1678 tmp2 = tcg_temp_new();
1679 tcg_gen_xor_i64(tmp, va, vb);
1680 tcg_gen_mov_i64(tmp2, va);
1681 tcg_gen_sub_i64(vc, va, vb);
1682 tcg_gen_xor_i64(tmp2, tmp2, vc);
1683 tcg_gen_and_i64(tmp, tmp, tmp2);
1684 tcg_gen_shri_i64(tmp, tmp, 63);
1685 tcg_gen_movi_i64(tmp2, 0);
1686 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1687 tcg_temp_free(tmp);
1688 tcg_temp_free(tmp2);
1689 break;
1690 case 0x6D:
1691
1692 tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1693 break;
1694 default:
1695 goto invalid_opc;
1696 }
1697 break;
1698
1699 case 0x11:
1700 if (fn7 == 0x20) {
1701 if (rc == 31) {
1702
1703 break;
1704 }
1705 if (ra == 31) {
1706
1707 vc = dest_gpr(ctx, rc);
1708 if (islit) {
1709 tcg_gen_movi_i64(vc, lit);
1710 } else {
1711 tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1712 }
1713 break;
1714 }
1715 }
1716
1717 vc = dest_gpr(ctx, rc);
1718 vb = load_gpr_lit(ctx, rb, lit, islit);
1719
1720 if (fn7 == 0x28 && ra == 31) {
1721
1722 tcg_gen_not_i64(vc, vb);
1723 break;
1724 }
1725
1726 va = load_gpr(ctx, ra);
1727 switch (fn7) {
1728 case 0x00:
1729
1730 tcg_gen_and_i64(vc, va, vb);
1731 break;
1732 case 0x08:
1733
1734 tcg_gen_andc_i64(vc, va, vb);
1735 break;
1736 case 0x14:
1737
1738 tmp = tcg_temp_new();
1739 tcg_gen_andi_i64(tmp, va, 1);
1740 tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1741 vb, load_gpr(ctx, rc));
1742 tcg_temp_free(tmp);
1743 break;
1744 case 0x16:
1745
1746 tmp = tcg_temp_new();
1747 tcg_gen_andi_i64(tmp, va, 1);
1748 tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1749 vb, load_gpr(ctx, rc));
1750 tcg_temp_free(tmp);
1751 break;
1752 case 0x20:
1753
1754 tcg_gen_or_i64(vc, va, vb);
1755 break;
1756 case 0x24:
1757
1758 tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1759 vb, load_gpr(ctx, rc));
1760 break;
1761 case 0x26:
1762
1763 tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1764 vb, load_gpr(ctx, rc));
1765 break;
1766 case 0x28:
1767
1768 tcg_gen_orc_i64(vc, va, vb);
1769 break;
1770 case 0x40:
1771
1772 tcg_gen_xor_i64(vc, va, vb);
1773 break;
1774 case 0x44:
1775
1776 tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1777 vb, load_gpr(ctx, rc));
1778 break;
1779 case 0x46:
1780
1781 tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1782 vb, load_gpr(ctx, rc));
1783 break;
1784 case 0x48:
1785
1786 tcg_gen_eqv_i64(vc, va, vb);
1787 break;
1788 case 0x61:
1789
1790 REQUIRE_REG_31(ra);
1791 {
1792 uint64_t amask = ctx->tb->flags >> TB_FLAGS_AMASK_SHIFT;
1793 tcg_gen_andi_i64(vc, vb, ~amask);
1794 }
1795 break;
1796 case 0x64:
1797
1798 tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1799 vb, load_gpr(ctx, rc));
1800 break;
1801 case 0x66:
1802
1803 tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1804 vb, load_gpr(ctx, rc));
1805 break;
1806 case 0x6C:
1807
1808 REQUIRE_REG_31(ra);
1809 tcg_gen_movi_i64(vc, ctx->implver);
1810 break;
1811 default:
1812 goto invalid_opc;
1813 }
1814 break;
1815
1816 case 0x12:
1817 vc = dest_gpr(ctx, rc);
1818 va = load_gpr(ctx, ra);
1819 switch (fn7) {
1820 case 0x02:
1821
1822 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1823 break;
1824 case 0x06:
1825
1826 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1827 break;
1828 case 0x0B:
1829
1830 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1831 break;
1832 case 0x12:
1833
1834 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1835 break;
1836 case 0x16:
1837
1838 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1839 break;
1840 case 0x1B:
1841
1842 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1843 break;
1844 case 0x22:
1845
1846 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1847 break;
1848 case 0x26:
1849
1850 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1851 break;
1852 case 0x2B:
1853
1854 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1855 break;
1856 case 0x30:
1857
1858 if (islit) {
1859 gen_zapnoti(vc, va, ~lit);
1860 } else {
1861 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1862 }
1863 break;
1864 case 0x31:
1865
1866 if (islit) {
1867 gen_zapnoti(vc, va, lit);
1868 } else {
1869 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1870 }
1871 break;
1872 case 0x32:
1873
1874 gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1875 break;
1876 case 0x34:
1877
1878 if (islit) {
1879 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1880 } else {
1881 tmp = tcg_temp_new();
1882 vb = load_gpr(ctx, rb);
1883 tcg_gen_andi_i64(tmp, vb, 0x3f);
1884 tcg_gen_shr_i64(vc, va, tmp);
1885 tcg_temp_free(tmp);
1886 }
1887 break;
1888 case 0x36:
1889
1890 gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1891 break;
1892 case 0x39:
1893
1894 if (islit) {
1895 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1896 } else {
1897 tmp = tcg_temp_new();
1898 vb = load_gpr(ctx, rb);
1899 tcg_gen_andi_i64(tmp, vb, 0x3f);
1900 tcg_gen_shl_i64(vc, va, tmp);
1901 tcg_temp_free(tmp);
1902 }
1903 break;
1904 case 0x3B:
1905
1906 gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1907 break;
1908 case 0x3C:
1909
1910 if (islit) {
1911 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1912 } else {
1913 tmp = tcg_temp_new();
1914 vb = load_gpr(ctx, rb);
1915 tcg_gen_andi_i64(tmp, vb, 0x3f);
1916 tcg_gen_sar_i64(vc, va, tmp);
1917 tcg_temp_free(tmp);
1918 }
1919 break;
1920 case 0x52:
1921
1922 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1923 break;
1924 case 0x57:
1925
1926 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1927 break;
1928 case 0x5A:
1929
1930 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1931 break;
1932 case 0x62:
1933
1934 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1935 break;
1936 case 0x67:
1937
1938 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1939 break;
1940 case 0x6A:
1941
1942 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1943 break;
1944 case 0x72:
1945
1946 gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1947 break;
1948 case 0x77:
1949
1950 gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
1951 break;
1952 case 0x7A:
1953
1954 gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
1955 break;
1956 default:
1957 goto invalid_opc;
1958 }
1959 break;
1960
1961 case 0x13:
1962 vc = dest_gpr(ctx, rc);
1963 vb = load_gpr_lit(ctx, rb, lit, islit);
1964 va = load_gpr(ctx, ra);
1965 switch (fn7) {
1966 case 0x00:
1967
1968 tcg_gen_mul_i64(vc, va, vb);
1969 tcg_gen_ext32s_i64(vc, vc);
1970 break;
1971 case 0x20:
1972
1973 tcg_gen_mul_i64(vc, va, vb);
1974 break;
1975 case 0x30:
1976
1977 tmp = tcg_temp_new();
1978 tcg_gen_mulu2_i64(tmp, vc, va, vb);
1979 tcg_temp_free(tmp);
1980 break;
1981 case 0x40:
1982
1983 tmp = tcg_temp_new();
1984 tcg_gen_ext32s_i64(tmp, va);
1985 tcg_gen_ext32s_i64(vc, vb);
1986 tcg_gen_mul_i64(tmp, tmp, vc);
1987 tcg_gen_ext32s_i64(vc, tmp);
1988 gen_helper_check_overflow(cpu_env, vc, tmp);
1989 tcg_temp_free(tmp);
1990 break;
1991 case 0x60:
1992
1993 tmp = tcg_temp_new();
1994 tmp2 = tcg_temp_new();
1995 tcg_gen_muls2_i64(vc, tmp, va, vb);
1996 tcg_gen_sari_i64(tmp2, vc, 63);
1997 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1998 tcg_temp_free(tmp);
1999 tcg_temp_free(tmp2);
2000 break;
2001 default:
2002 goto invalid_opc;
2003 }
2004 break;
2005
2006 case 0x14:
2007 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2008 vc = dest_fpr(ctx, rc);
2009 switch (fpfn) {
2010 case 0x04:
2011
2012 REQUIRE_REG_31(rb);
2013 t32 = tcg_temp_new_i32();
2014 va = load_gpr(ctx, ra);
2015 tcg_gen_extrl_i64_i32(t32, va);
2016 gen_helper_memory_to_s(vc, t32);
2017 tcg_temp_free_i32(t32);
2018 break;
2019 case 0x0A:
2020
2021 REQUIRE_REG_31(ra);
2022 vb = load_fpr(ctx, rb);
2023 gen_helper_sqrtf(vc, cpu_env, vb);
2024 break;
2025 case 0x0B:
2026
2027 REQUIRE_REG_31(ra);
2028 gen_sqrts(ctx, rb, rc, fn11);
2029 break;
2030 case 0x14:
2031
2032 REQUIRE_REG_31(rb);
2033 t32 = tcg_temp_new_i32();
2034 va = load_gpr(ctx, ra);
2035 tcg_gen_extrl_i64_i32(t32, va);
2036 gen_helper_memory_to_f(vc, t32);
2037 tcg_temp_free_i32(t32);
2038 break;
2039 case 0x24:
2040
2041 REQUIRE_REG_31(rb);
2042 va = load_gpr(ctx, ra);
2043 tcg_gen_mov_i64(vc, va);
2044 break;
2045 case 0x2A:
2046
2047 REQUIRE_REG_31(ra);
2048 vb = load_fpr(ctx, rb);
2049 gen_helper_sqrtg(vc, cpu_env, vb);
2050 break;
2051 case 0x02B:
2052
2053 REQUIRE_REG_31(ra);
2054 gen_sqrtt(ctx, rb, rc, fn11);
2055 break;
2056 default:
2057 goto invalid_opc;
2058 }
2059 break;
2060
2061 case 0x15:
2062
2063
2064 vc = dest_fpr(ctx, rc);
2065 vb = load_fpr(ctx, rb);
2066 va = load_fpr(ctx, ra);
2067 switch (fpfn) {
2068 case 0x00:
2069
2070 gen_helper_addf(vc, cpu_env, va, vb);
2071 break;
2072 case 0x01:
2073
2074 gen_helper_subf(vc, cpu_env, va, vb);
2075 break;
2076 case 0x02:
2077
2078 gen_helper_mulf(vc, cpu_env, va, vb);
2079 break;
2080 case 0x03:
2081
2082 gen_helper_divf(vc, cpu_env, va, vb);
2083 break;
2084 case 0x1E:
2085
2086 REQUIRE_REG_31(ra);
2087 goto invalid_opc;
2088 case 0x20:
2089
2090 gen_helper_addg(vc, cpu_env, va, vb);
2091 break;
2092 case 0x21:
2093
2094 gen_helper_subg(vc, cpu_env, va, vb);
2095 break;
2096 case 0x22:
2097
2098 gen_helper_mulg(vc, cpu_env, va, vb);
2099 break;
2100 case 0x23:
2101
2102 gen_helper_divg(vc, cpu_env, va, vb);
2103 break;
2104 case 0x25:
2105
2106 gen_helper_cmpgeq(vc, cpu_env, va, vb);
2107 break;
2108 case 0x26:
2109
2110 gen_helper_cmpglt(vc, cpu_env, va, vb);
2111 break;
2112 case 0x27:
2113
2114 gen_helper_cmpgle(vc, cpu_env, va, vb);
2115 break;
2116 case 0x2C:
2117
2118 REQUIRE_REG_31(ra);
2119 gen_helper_cvtgf(vc, cpu_env, vb);
2120 break;
2121 case 0x2D:
2122
2123 REQUIRE_REG_31(ra);
2124 goto invalid_opc;
2125 case 0x2F:
2126
2127 REQUIRE_REG_31(ra);
2128 gen_helper_cvtgq(vc, cpu_env, vb);
2129 break;
2130 case 0x3C:
2131
2132 REQUIRE_REG_31(ra);
2133 gen_helper_cvtqf(vc, cpu_env, vb);
2134 break;
2135 case 0x3E:
2136
2137 REQUIRE_REG_31(ra);
2138 gen_helper_cvtqg(vc, cpu_env, vb);
2139 break;
2140 default:
2141 goto invalid_opc;
2142 }
2143 break;
2144
2145 case 0x16:
2146
2147 switch (fpfn) {
2148 case 0x00:
2149
2150 gen_adds(ctx, ra, rb, rc, fn11);
2151 break;
2152 case 0x01:
2153
2154 gen_subs(ctx, ra, rb, rc, fn11);
2155 break;
2156 case 0x02:
2157
2158 gen_muls(ctx, ra, rb, rc, fn11);
2159 break;
2160 case 0x03:
2161
2162 gen_divs(ctx, ra, rb, rc, fn11);
2163 break;
2164 case 0x20:
2165
2166 gen_addt(ctx, ra, rb, rc, fn11);
2167 break;
2168 case 0x21:
2169
2170 gen_subt(ctx, ra, rb, rc, fn11);
2171 break;
2172 case 0x22:
2173
2174 gen_mult(ctx, ra, rb, rc, fn11);
2175 break;
2176 case 0x23:
2177
2178 gen_divt(ctx, ra, rb, rc, fn11);
2179 break;
2180 case 0x24:
2181
2182 gen_cmptun(ctx, ra, rb, rc, fn11);
2183 break;
2184 case 0x25:
2185
2186 gen_cmpteq(ctx, ra, rb, rc, fn11);
2187 break;
2188 case 0x26:
2189
2190 gen_cmptlt(ctx, ra, rb, rc, fn11);
2191 break;
2192 case 0x27:
2193
2194 gen_cmptle(ctx, ra, rb, rc, fn11);
2195 break;
2196 case 0x2C:
2197 REQUIRE_REG_31(ra);
2198 if (fn11 == 0x2AC || fn11 == 0x6AC) {
2199
2200 gen_cvtst(ctx, rb, rc, fn11);
2201 } else {
2202
2203 gen_cvtts(ctx, rb, rc, fn11);
2204 }
2205 break;
2206 case 0x2F:
2207
2208 REQUIRE_REG_31(ra);
2209 gen_cvttq(ctx, rb, rc, fn11);
2210 break;
2211 case 0x3C:
2212
2213 REQUIRE_REG_31(ra);
2214 gen_cvtqs(ctx, rb, rc, fn11);
2215 break;
2216 case 0x3E:
2217
2218 REQUIRE_REG_31(ra);
2219 gen_cvtqt(ctx, rb, rc, fn11);
2220 break;
2221 default:
2222 goto invalid_opc;
2223 }
2224 break;
2225
2226 case 0x17:
2227 switch (fn11) {
2228 case 0x010:
2229
2230 REQUIRE_REG_31(ra);
2231 vc = dest_fpr(ctx, rc);
2232 vb = load_fpr(ctx, rb);
2233 gen_cvtlq(vc, vb);
2234 break;
2235 case 0x020:
2236
2237 if (rc == 31) {
2238
2239 } else {
2240 vc = dest_fpr(ctx, rc);
2241 va = load_fpr(ctx, ra);
2242 if (ra == rb) {
2243
2244 tcg_gen_mov_i64(vc, va);
2245 } else {
2246 vb = load_fpr(ctx, rb);
2247 gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2248 }
2249 }
2250 break;
2251 case 0x021:
2252
2253 vc = dest_fpr(ctx, rc);
2254 vb = load_fpr(ctx, rb);
2255 va = load_fpr(ctx, ra);
2256 gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2257 break;
2258 case 0x022:
2259
2260 vc = dest_fpr(ctx, rc);
2261 vb = load_fpr(ctx, rb);
2262 va = load_fpr(ctx, ra);
2263 gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2264 break;
2265 case 0x024:
2266
2267 va = load_fpr(ctx, ra);
2268 gen_helper_store_fpcr(cpu_env, va);
2269 if (ctx->tb_rm == QUAL_RM_D) {
2270
2271
2272 ctx->tb_rm = -1;
2273 }
2274 break;
2275 case 0x025:
2276
2277 va = dest_fpr(ctx, ra);
2278 gen_helper_load_fpcr(va, cpu_env);
2279 break;
2280 case 0x02A:
2281
2282 gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2283 break;
2284 case 0x02B:
2285
2286 gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2287 break;
2288 case 0x02C:
2289
2290 gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2291 break;
2292 case 0x02D:
2293
2294 gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2295 break;
2296 case 0x02E:
2297
2298 gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2299 break;
2300 case 0x02F:
2301
2302 gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2303 break;
2304 case 0x030:
2305 case 0x130:
2306 case 0x530:
2307 REQUIRE_REG_31(ra);
2308 vc = dest_fpr(ctx, rc);
2309 vb = load_fpr(ctx, rb);
2310 gen_helper_cvtql(vc, cpu_env, vb);
2311 gen_fp_exc_raise(rc, fn11);
2312 break;
2313 default:
2314 goto invalid_opc;
2315 }
2316 break;
2317
2318 case 0x18:
2319 switch ((uint16_t)disp16) {
2320 case 0x0000:
2321
2322
2323 break;
2324 case 0x0400:
2325
2326
2327 break;
2328 case 0x4000:
2329
2330
2331 break;
2332 case 0x4400:
2333
2334
2335 break;
2336 case 0x8000:
2337
2338
2339 break;
2340 case 0xA000:
2341
2342
2343 break;
2344 case 0xC000:
2345
2346 va = dest_gpr(ctx, ra);
2347 if (ctx->tb->cflags & CF_USE_ICOUNT) {
2348 gen_io_start();
2349 gen_helper_load_pcc(va, cpu_env);
2350 gen_io_end();
2351 ret = EXIT_PC_STALE;
2352 } else {
2353 gen_helper_load_pcc(va, cpu_env);
2354 }
2355 break;
2356 case 0xE000:
2357
2358 gen_rx(ctx, ra, 0);
2359 break;
2360 case 0xE800:
2361
2362 break;
2363 case 0xF000:
2364
2365 gen_rx(ctx, ra, 1);
2366 break;
2367 case 0xF800:
2368
2369
2370 break;
2371 case 0xFC00:
2372
2373
2374 break;
2375 default:
2376 goto invalid_opc;
2377 }
2378 break;
2379
2380 case 0x19:
2381
2382#ifndef CONFIG_USER_ONLY
2383 REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2384 va = dest_gpr(ctx, ra);
2385 ret = gen_mfpr(ctx, va, insn & 0xffff);
2386 break;
2387#else
2388 goto invalid_opc;
2389#endif
2390
2391 case 0x1A:
2392
2393
2394 vb = load_gpr(ctx, rb);
2395 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2396 if (ra != 31) {
2397 tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
2398 }
2399 ret = EXIT_PC_UPDATED;
2400 break;
2401
2402 case 0x1B:
2403
2404#ifndef CONFIG_USER_ONLY
2405 REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2406 {
2407 TCGv addr = tcg_temp_new();
2408 vb = load_gpr(ctx, rb);
2409 va = dest_gpr(ctx, ra);
2410
2411 tcg_gen_addi_i64(addr, vb, disp12);
2412 switch ((insn >> 12) & 0xF) {
2413 case 0x0:
2414
2415 gen_helper_ldl_phys(va, cpu_env, addr);
2416 break;
2417 case 0x1:
2418
2419 gen_helper_ldq_phys(va, cpu_env, addr);
2420 break;
2421 case 0x2:
2422
2423 gen_helper_ldl_l_phys(va, cpu_env, addr);
2424 break;
2425 case 0x3:
2426
2427 gen_helper_ldq_l_phys(va, cpu_env, addr);
2428 break;
2429 case 0x4:
2430
2431 goto invalid_opc;
2432 case 0x5:
2433
2434 goto invalid_opc;
2435 break;
2436 case 0x6:
2437
2438 goto invalid_opc;
2439 case 0x7:
2440
2441 goto invalid_opc;
2442 case 0x8:
2443
2444 goto invalid_opc;
2445 case 0x9:
2446
2447 goto invalid_opc;
2448 case 0xA:
2449
2450 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2451 break;
2452 case 0xB:
2453
2454 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2455 break;
2456 case 0xC:
2457
2458 goto invalid_opc;
2459 case 0xD:
2460
2461 goto invalid_opc;
2462 case 0xE:
2463
2464
2465 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2466 break;
2467 case 0xF:
2468
2469
2470 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2471 break;
2472 }
2473 tcg_temp_free(addr);
2474 break;
2475 }
2476#else
2477 goto invalid_opc;
2478#endif
2479
2480 case 0x1C:
2481 vc = dest_gpr(ctx, rc);
2482 if (fn7 == 0x70) {
2483
2484 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2485 REQUIRE_REG_31(rb);
2486 va = load_fpr(ctx, ra);
2487 tcg_gen_mov_i64(vc, va);
2488 break;
2489 } else if (fn7 == 0x78) {
2490
2491 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2492 REQUIRE_REG_31(rb);
2493 t32 = tcg_temp_new_i32();
2494 va = load_fpr(ctx, ra);
2495 gen_helper_s_to_memory(t32, va);
2496 tcg_gen_ext_i32_i64(vc, t32);
2497 tcg_temp_free_i32(t32);
2498 break;
2499 }
2500
2501 vb = load_gpr_lit(ctx, rb, lit, islit);
2502 switch (fn7) {
2503 case 0x00:
2504
2505 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2506 REQUIRE_REG_31(ra);
2507 tcg_gen_ext8s_i64(vc, vb);
2508 break;
2509 case 0x01:
2510
2511 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2512 REQUIRE_REG_31(ra);
2513 tcg_gen_ext16s_i64(vc, vb);
2514 break;
2515 case 0x30:
2516
2517 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2518 REQUIRE_REG_31(ra);
2519 REQUIRE_NO_LIT;
2520 gen_helper_ctpop(vc, vb);
2521 break;
2522 case 0x31:
2523
2524 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2525 REQUIRE_NO_LIT;
2526 va = load_gpr(ctx, ra);
2527 gen_helper_perr(vc, va, vb);
2528 break;
2529 case 0x32:
2530
2531 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2532 REQUIRE_REG_31(ra);
2533 REQUIRE_NO_LIT;
2534 gen_helper_ctlz(vc, vb);
2535 break;
2536 case 0x33:
2537
2538 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2539 REQUIRE_REG_31(ra);
2540 REQUIRE_NO_LIT;
2541 gen_helper_cttz(vc, vb);
2542 break;
2543 case 0x34:
2544
2545 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2546 REQUIRE_REG_31(ra);
2547 REQUIRE_NO_LIT;
2548 gen_helper_unpkbw(vc, vb);
2549 break;
2550 case 0x35:
2551
2552 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2553 REQUIRE_REG_31(ra);
2554 REQUIRE_NO_LIT;
2555 gen_helper_unpkbl(vc, vb);
2556 break;
2557 case 0x36:
2558
2559 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2560 REQUIRE_REG_31(ra);
2561 REQUIRE_NO_LIT;
2562 gen_helper_pkwb(vc, vb);
2563 break;
2564 case 0x37:
2565
2566 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2567 REQUIRE_REG_31(ra);
2568 REQUIRE_NO_LIT;
2569 gen_helper_pklb(vc, vb);
2570 break;
2571 case 0x38:
2572
2573 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2574 va = load_gpr(ctx, ra);
2575 gen_helper_minsb8(vc, va, vb);
2576 break;
2577 case 0x39:
2578
2579 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2580 va = load_gpr(ctx, ra);
2581 gen_helper_minsw4(vc, va, vb);
2582 break;
2583 case 0x3A:
2584
2585 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2586 va = load_gpr(ctx, ra);
2587 gen_helper_minub8(vc, va, vb);
2588 break;
2589 case 0x3B:
2590
2591 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2592 va = load_gpr(ctx, ra);
2593 gen_helper_minuw4(vc, va, vb);
2594 break;
2595 case 0x3C:
2596
2597 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2598 va = load_gpr(ctx, ra);
2599 gen_helper_maxub8(vc, va, vb);
2600 break;
2601 case 0x3D:
2602
2603 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2604 va = load_gpr(ctx, ra);
2605 gen_helper_maxuw4(vc, va, vb);
2606 break;
2607 case 0x3E:
2608
2609 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2610 va = load_gpr(ctx, ra);
2611 gen_helper_maxsb8(vc, va, vb);
2612 break;
2613 case 0x3F:
2614
2615 REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2616 va = load_gpr(ctx, ra);
2617 gen_helper_maxsw4(vc, va, vb);
2618 break;
2619 default:
2620 goto invalid_opc;
2621 }
2622 break;
2623
2624 case 0x1D:
2625
2626#ifndef CONFIG_USER_ONLY
2627 REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2628 vb = load_gpr(ctx, rb);
2629 ret = gen_mtpr(ctx, vb, insn & 0xffff);
2630 break;
2631#else
2632 goto invalid_opc;
2633#endif
2634
2635 case 0x1E:
2636
2637#ifndef CONFIG_USER_ONLY
2638 REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2639 if (rb == 31) {
2640
2641
2642
2643 ctx->lit = vb = tcg_temp_new();
2644 tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2645 } else {
2646 vb = load_gpr(ctx, rb);
2647 }
2648 tmp = tcg_temp_new();
2649 tcg_gen_movi_i64(tmp, 0);
2650 tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
2651 tcg_gen_movi_i64(cpu_lock_addr, -1);
2652 tcg_gen_andi_i64(tmp, vb, 1);
2653 tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
2654 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2655 ret = EXIT_PC_UPDATED;
2656 break;
2657#else
2658 goto invalid_opc;
2659#endif
2660
2661 case 0x1F:
2662
2663#ifndef CONFIG_USER_ONLY
2664 REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2665 {
2666 TCGv addr = tcg_temp_new();
2667 va = load_gpr(ctx, ra);
2668 vb = load_gpr(ctx, rb);
2669
2670 tcg_gen_addi_i64(addr, vb, disp12);
2671 switch ((insn >> 12) & 0xF) {
2672 case 0x0:
2673
2674 gen_helper_stl_phys(cpu_env, addr, va);
2675 break;
2676 case 0x1:
2677
2678 gen_helper_stq_phys(cpu_env, addr, va);
2679 break;
2680 case 0x2:
2681
2682 gen_helper_stl_c_phys(dest_gpr(ctx, ra), cpu_env, addr, va);
2683 break;
2684 case 0x3:
2685
2686 gen_helper_stq_c_phys(dest_gpr(ctx, ra), cpu_env, addr, va);
2687 break;
2688 case 0x4:
2689
2690 goto invalid_opc;
2691 case 0x5:
2692
2693 goto invalid_opc;
2694 case 0x6:
2695
2696 goto invalid_opc;
2697 case 0x7:
2698
2699 goto invalid_opc;
2700 case 0x8:
2701
2702 goto invalid_opc;
2703 case 0x9:
2704
2705 goto invalid_opc;
2706 case 0xA:
2707
2708 goto invalid_opc;
2709 case 0xB:
2710
2711 goto invalid_opc;
2712 case 0xC:
2713
2714 goto invalid_opc;
2715 case 0xD:
2716
2717 goto invalid_opc;
2718 case 0xE:
2719
2720 goto invalid_opc;
2721 case 0xF:
2722
2723 goto invalid_opc;
2724 }
2725 tcg_temp_free(addr);
2726 break;
2727 }
2728#else
2729 goto invalid_opc;
2730#endif
2731 case 0x20:
2732
2733 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2734 break;
2735 case 0x21:
2736
2737 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2738 break;
2739 case 0x22:
2740
2741 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2742 break;
2743 case 0x23:
2744
2745 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2746 break;
2747 case 0x24:
2748
2749 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2750 break;
2751 case 0x25:
2752
2753 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2754 break;
2755 case 0x26:
2756
2757 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2758 break;
2759 case 0x27:
2760
2761 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2762 break;
2763 case 0x28:
2764
2765 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2766 break;
2767 case 0x29:
2768
2769 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2770 break;
2771 case 0x2A:
2772
2773 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2774 break;
2775 case 0x2B:
2776
2777 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2778 break;
2779 case 0x2C:
2780
2781 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2782 break;
2783 case 0x2D:
2784
2785 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2786 break;
2787 case 0x2E:
2788
2789 ret = gen_store_conditional(ctx, ra, rb, disp16, 0);
2790 break;
2791 case 0x2F:
2792
2793 ret = gen_store_conditional(ctx, ra, rb, disp16, 1);
2794 break;
2795 case 0x30:
2796
2797 ret = gen_bdirect(ctx, ra, disp21);
2798 break;
2799 case 0x31:
2800 ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2801 break;
2802 case 0x32:
2803 ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2804 break;
2805 case 0x33:
2806 ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2807 break;
2808 case 0x34:
2809
2810 ret = gen_bdirect(ctx, ra, disp21);
2811 break;
2812 case 0x35:
2813 ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2814 break;
2815 case 0x36:
2816 ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2817 break;
2818 case 0x37:
2819 ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2820 break;
2821 case 0x38:
2822
2823 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2824 break;
2825 case 0x39:
2826
2827 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2828 break;
2829 case 0x3A:
2830
2831 ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2832 break;
2833 case 0x3B:
2834
2835 ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2836 break;
2837 case 0x3C:
2838
2839 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2840 break;
2841 case 0x3D:
2842
2843 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2844 break;
2845 case 0x3E:
2846
2847 ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2848 break;
2849 case 0x3F:
2850
2851 ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2852 break;
2853 invalid_opc:
2854 ret = gen_invalid(ctx);
2855 break;
2856 }
2857
2858 return ret;
2859}
2860
2861void gen_intermediate_code(CPUAlphaState *env, struct TranslationBlock *tb)
2862{
2863 AlphaCPU *cpu = alpha_env_get_cpu(env);
2864 CPUState *cs = CPU(cpu);
2865 DisasContext ctx, *ctxp = &ctx;
2866 target_ulong pc_start;
2867 target_ulong pc_mask;
2868 uint32_t insn;
2869 ExitStatus ret;
2870 int num_insns;
2871 int max_insns;
2872
2873 pc_start = tb->pc;
2874
2875 ctx.tb = tb;
2876 ctx.pc = pc_start;
2877 ctx.mem_idx = cpu_mmu_index(env, false);
2878 ctx.implver = env->implver;
2879 ctx.singlestep_enabled = cs->singlestep_enabled;
2880
2881#ifdef CONFIG_USER_ONLY
2882 ctx.ir = cpu_std_ir;
2883#else
2884 ctx.palbr = env->palbr;
2885 ctx.ir = (tb->flags & TB_FLAGS_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2886#endif
2887
2888
2889
2890
2891
2892
2893
2894 ctx.tb_rm = -1;
2895
2896 ctx.tb_ftz = -1;
2897
2898 num_insns = 0;
2899 max_insns = tb->cflags & CF_COUNT_MASK;
2900 if (max_insns == 0) {
2901 max_insns = CF_COUNT_MASK;
2902 }
2903 if (max_insns > TCG_MAX_INSNS) {
2904 max_insns = TCG_MAX_INSNS;
2905 }
2906
2907 if (in_superpage(&ctx, pc_start)) {
2908 pc_mask = (1ULL << 41) - 1;
2909 } else {
2910 pc_mask = ~TARGET_PAGE_MASK;
2911 }
2912
2913 gen_tb_start(tb);
2914 do {
2915 tcg_gen_insn_start(ctx.pc);
2916 num_insns++;
2917
2918 if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) {
2919 ret = gen_excp(&ctx, EXCP_DEBUG, 0);
2920
2921
2922
2923
2924 ctx.pc += 4;
2925 break;
2926 }
2927 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
2928 gen_io_start();
2929 }
2930 insn = cpu_ldl_code(env, ctx.pc);
2931
2932 TCGV_UNUSED_I64(ctx.zero);
2933 TCGV_UNUSED_I64(ctx.sink);
2934 TCGV_UNUSED_I64(ctx.lit);
2935
2936 ctx.pc += 4;
2937 ret = translate_one(ctxp, insn);
2938
2939 if (!TCGV_IS_UNUSED_I64(ctx.sink)) {
2940 tcg_gen_discard_i64(ctx.sink);
2941 tcg_temp_free(ctx.sink);
2942 }
2943 if (!TCGV_IS_UNUSED_I64(ctx.zero)) {
2944 tcg_temp_free(ctx.zero);
2945 }
2946 if (!TCGV_IS_UNUSED_I64(ctx.lit)) {
2947 tcg_temp_free(ctx.lit);
2948 }
2949
2950
2951
2952 if (ret == NO_EXIT
2953 && ((ctx.pc & pc_mask) == 0
2954 || tcg_op_buf_full()
2955 || num_insns >= max_insns
2956 || singlestep
2957 || ctx.singlestep_enabled)) {
2958 ret = EXIT_PC_STALE;
2959 }
2960 } while (ret == NO_EXIT);
2961
2962 if (tb->cflags & CF_LAST_IO) {
2963 gen_io_end();
2964 }
2965
2966 switch (ret) {
2967 case EXIT_GOTO_TB:
2968 case EXIT_NORETURN:
2969 break;
2970 case EXIT_PC_STALE:
2971 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2972
2973 case EXIT_PC_UPDATED:
2974 if (ctx.singlestep_enabled) {
2975 gen_excp_1(EXCP_DEBUG, 0);
2976 } else {
2977 tcg_gen_exit_tb(0);
2978 }
2979 break;
2980 default:
2981 abort();
2982 }
2983
2984 gen_tb_end(tb, num_insns);
2985
2986 tb->size = ctx.pc - pc_start;
2987 tb->icount = num_insns;
2988
2989#ifdef DEBUG_DISAS
2990 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2991 qemu_log("IN: %s\n", lookup_symbol(pc_start));
2992 log_target_disas(cs, pc_start, ctx.pc - pc_start, 1);
2993 qemu_log("\n");
2994 }
2995#endif
2996}
2997
2998void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
2999 target_ulong *data)
3000{
3001 env->pc = data[0];
3002}
3003