1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include "qemu/osdep.h"
21#include "cpu.h"
22#include "sysemu/cpus.h"
23#include "sysemu/cpu-timers.h"
24#include "disas/disas.h"
25#include "qemu/host-utils.h"
26#include "exec/exec-all.h"
27#include "tcg/tcg-op.h"
28#include "exec/cpu_ldst.h"
29#include "exec/helper-proto.h"
30#include "exec/helper-gen.h"
31#include "trace-tcg.h"
32#include "exec/translator.h"
33#include "exec/log.h"
34
35
36#undef ALPHA_DEBUG_DISAS
37#define CONFIG_SOFTFLOAT_INLINE
38
39#ifdef ALPHA_DEBUG_DISAS
40# define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
41#else
42# define LOG_DISAS(...) do { } while (0)
43#endif
44
45typedef struct DisasContext DisasContext;
46struct DisasContext {
47 DisasContextBase base;
48
49#ifndef CONFIG_USER_ONLY
50 uint64_t palbr;
51#endif
52 uint32_t tbflags;
53 int mem_idx;
54
55
56 int implver;
57 int amask;
58
59
60 int tb_rm;
61
62 int tb_ftz;
63
64
65 TCGv *ir;
66
67
68 TCGv zero;
69 TCGv sink;
70
71 TCGv lit;
72};
73
74
75
76
77#define DISAS_PC_UPDATED_NOCHAIN DISAS_TARGET_0
78#define DISAS_PC_UPDATED DISAS_TARGET_1
79#define DISAS_PC_STALE DISAS_TARGET_2
80
81
82static TCGv cpu_std_ir[31];
83static TCGv cpu_fir[31];
84static TCGv cpu_pc;
85static TCGv cpu_lock_addr;
86static TCGv cpu_lock_value;
87
88#ifndef CONFIG_USER_ONLY
89static TCGv cpu_pal_ir[31];
90#endif
91
92#include "exec/gen-icount.h"
93
94void alpha_translate_init(void)
95{
96#define DEF_VAR(V) { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
97
98 typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
99 static const GlobalVar vars[] = {
100 DEF_VAR(pc),
101 DEF_VAR(lock_addr),
102 DEF_VAR(lock_value),
103 };
104
105#undef DEF_VAR
106
107
108 static const char greg_names[31][4] = {
109 "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
110 "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
111 "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
112 "t10", "t11", "ra", "t12", "at", "gp", "sp"
113 };
114 static const char freg_names[31][4] = {
115 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
116 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
117 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
118 "f24", "f25", "f26", "f27", "f28", "f29", "f30"
119 };
120#ifndef CONFIG_USER_ONLY
121 static const char shadow_names[8][8] = {
122 "pal_t7", "pal_s0", "pal_s1", "pal_s2",
123 "pal_s3", "pal_s4", "pal_s5", "pal_t11"
124 };
125#endif
126
127 int i;
128
129 for (i = 0; i < 31; i++) {
130 cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
131 offsetof(CPUAlphaState, ir[i]),
132 greg_names[i]);
133 }
134
135 for (i = 0; i < 31; i++) {
136 cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
137 offsetof(CPUAlphaState, fir[i]),
138 freg_names[i]);
139 }
140
141#ifndef CONFIG_USER_ONLY
142 memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
143 for (i = 0; i < 8; i++) {
144 int r = (i == 7 ? 25 : i + 8);
145 cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
146 offsetof(CPUAlphaState,
147 shadow[i]),
148 shadow_names[i]);
149 }
150#endif
151
152 for (i = 0; i < ARRAY_SIZE(vars); ++i) {
153 const GlobalVar *v = &vars[i];
154 *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
155 }
156}
157
158static TCGv load_zero(DisasContext *ctx)
159{
160 if (!ctx->zero) {
161 ctx->zero = tcg_const_i64(0);
162 }
163 return ctx->zero;
164}
165
166static TCGv dest_sink(DisasContext *ctx)
167{
168 if (!ctx->sink) {
169 ctx->sink = tcg_temp_new();
170 }
171 return ctx->sink;
172}
173
174static void free_context_temps(DisasContext *ctx)
175{
176 if (ctx->sink) {
177 tcg_gen_discard_i64(ctx->sink);
178 tcg_temp_free(ctx->sink);
179 ctx->sink = NULL;
180 }
181 if (ctx->zero) {
182 tcg_temp_free(ctx->zero);
183 ctx->zero = NULL;
184 }
185 if (ctx->lit) {
186 tcg_temp_free(ctx->lit);
187 ctx->lit = NULL;
188 }
189}
190
191static TCGv load_gpr(DisasContext *ctx, unsigned reg)
192{
193 if (likely(reg < 31)) {
194 return ctx->ir[reg];
195 } else {
196 return load_zero(ctx);
197 }
198}
199
200static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
201 uint8_t lit, bool islit)
202{
203 if (islit) {
204 ctx->lit = tcg_const_i64(lit);
205 return ctx->lit;
206 } else if (likely(reg < 31)) {
207 return ctx->ir[reg];
208 } else {
209 return load_zero(ctx);
210 }
211}
212
213static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
214{
215 if (likely(reg < 31)) {
216 return ctx->ir[reg];
217 } else {
218 return dest_sink(ctx);
219 }
220}
221
222static TCGv load_fpr(DisasContext *ctx, unsigned reg)
223{
224 if (likely(reg < 31)) {
225 return cpu_fir[reg];
226 } else {
227 return load_zero(ctx);
228 }
229}
230
231static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
232{
233 if (likely(reg < 31)) {
234 return cpu_fir[reg];
235 } else {
236 return dest_sink(ctx);
237 }
238}
239
240static int get_flag_ofs(unsigned shift)
241{
242 int ofs = offsetof(CPUAlphaState, flags);
243#ifdef HOST_WORDS_BIGENDIAN
244 ofs += 3 - (shift / 8);
245#else
246 ofs += shift / 8;
247#endif
248 return ofs;
249}
250
251static void ld_flag_byte(TCGv val, unsigned shift)
252{
253 tcg_gen_ld8u_i64(val, cpu_env, get_flag_ofs(shift));
254}
255
256static void st_flag_byte(TCGv val, unsigned shift)
257{
258 tcg_gen_st8_i64(val, cpu_env, get_flag_ofs(shift));
259}
260
261static void gen_excp_1(int exception, int error_code)
262{
263 TCGv_i32 tmp1, tmp2;
264
265 tmp1 = tcg_const_i32(exception);
266 tmp2 = tcg_const_i32(error_code);
267 gen_helper_excp(cpu_env, tmp1, tmp2);
268 tcg_temp_free_i32(tmp2);
269 tcg_temp_free_i32(tmp1);
270}
271
272static DisasJumpType gen_excp(DisasContext *ctx, int exception, int error_code)
273{
274 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
275 gen_excp_1(exception, error_code);
276 return DISAS_NORETURN;
277}
278
279static inline DisasJumpType gen_invalid(DisasContext *ctx)
280{
281 return gen_excp(ctx, EXCP_OPCDEC, 0);
282}
283
284static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
285{
286 TCGv_i32 tmp32 = tcg_temp_new_i32();
287 tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
288 gen_helper_memory_to_f(t0, tmp32);
289 tcg_temp_free_i32(tmp32);
290}
291
292static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
293{
294 TCGv tmp = tcg_temp_new();
295 tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
296 gen_helper_memory_to_g(t0, tmp);
297 tcg_temp_free(tmp);
298}
299
300static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
301{
302 TCGv_i32 tmp32 = tcg_temp_new_i32();
303 tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
304 gen_helper_memory_to_s(t0, tmp32);
305 tcg_temp_free_i32(tmp32);
306}
307
308static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
309{
310 tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
311 tcg_gen_mov_i64(cpu_lock_addr, t1);
312 tcg_gen_mov_i64(cpu_lock_value, t0);
313}
314
315static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
316{
317 tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
318 tcg_gen_mov_i64(cpu_lock_addr, t1);
319 tcg_gen_mov_i64(cpu_lock_value, t0);
320}
321
322static inline void gen_load_mem(DisasContext *ctx,
323 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
324 int flags),
325 int ra, int rb, int32_t disp16, bool fp,
326 bool clear)
327{
328 TCGv tmp, addr, va;
329
330
331
332
333 if (unlikely(ra == 31)) {
334 return;
335 }
336
337 tmp = tcg_temp_new();
338 addr = load_gpr(ctx, rb);
339
340 if (disp16) {
341 tcg_gen_addi_i64(tmp, addr, disp16);
342 addr = tmp;
343 }
344 if (clear) {
345 tcg_gen_andi_i64(tmp, addr, ~0x7);
346 addr = tmp;
347 }
348
349 va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
350 tcg_gen_qemu_load(va, addr, ctx->mem_idx);
351
352 tcg_temp_free(tmp);
353}
354
355static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
356{
357 TCGv_i32 tmp32 = tcg_temp_new_i32();
358 gen_helper_f_to_memory(tmp32, t0);
359 tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
360 tcg_temp_free_i32(tmp32);
361}
362
363static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
364{
365 TCGv tmp = tcg_temp_new();
366 gen_helper_g_to_memory(tmp, t0);
367 tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
368 tcg_temp_free(tmp);
369}
370
371static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
372{
373 TCGv_i32 tmp32 = tcg_temp_new_i32();
374 gen_helper_s_to_memory(tmp32, t0);
375 tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
376 tcg_temp_free_i32(tmp32);
377}
378
379static inline void gen_store_mem(DisasContext *ctx,
380 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
381 int flags),
382 int ra, int rb, int32_t disp16, bool fp,
383 bool clear)
384{
385 TCGv tmp, addr, va;
386
387 tmp = tcg_temp_new();
388 addr = load_gpr(ctx, rb);
389
390 if (disp16) {
391 tcg_gen_addi_i64(tmp, addr, disp16);
392 addr = tmp;
393 }
394 if (clear) {
395 tcg_gen_andi_i64(tmp, addr, ~0x7);
396 addr = tmp;
397 }
398
399 va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
400 tcg_gen_qemu_store(va, addr, ctx->mem_idx);
401
402 tcg_temp_free(tmp);
403}
404
405static DisasJumpType gen_store_conditional(DisasContext *ctx, int ra, int rb,
406 int32_t disp16, int mem_idx,
407 MemOp op)
408{
409 TCGLabel *lab_fail, *lab_done;
410 TCGv addr, val;
411
412 addr = tcg_temp_new_i64();
413 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
414 free_context_temps(ctx);
415
416 lab_fail = gen_new_label();
417 lab_done = gen_new_label();
418 tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
419 tcg_temp_free_i64(addr);
420
421 val = tcg_temp_new_i64();
422 tcg_gen_atomic_cmpxchg_i64(val, cpu_lock_addr, cpu_lock_value,
423 load_gpr(ctx, ra), mem_idx, op);
424 free_context_temps(ctx);
425
426 if (ra != 31) {
427 tcg_gen_setcond_i64(TCG_COND_EQ, ctx->ir[ra], val, cpu_lock_value);
428 }
429 tcg_temp_free_i64(val);
430 tcg_gen_br(lab_done);
431
432 gen_set_label(lab_fail);
433 if (ra != 31) {
434 tcg_gen_movi_i64(ctx->ir[ra], 0);
435 }
436
437 gen_set_label(lab_done);
438 tcg_gen_movi_i64(cpu_lock_addr, -1);
439 return DISAS_NEXT;
440}
441
442static bool in_superpage(DisasContext *ctx, int64_t addr)
443{
444#ifndef CONFIG_USER_ONLY
445 return ((ctx->tbflags & ENV_FLAG_PS_USER) == 0
446 && addr >> TARGET_VIRT_ADDR_SPACE_BITS == -1
447 && ((addr >> 41) & 3) == 2);
448#else
449 return false;
450#endif
451}
452
453static bool use_exit_tb(DisasContext *ctx)
454{
455 return ((tb_cflags(ctx->base.tb) & CF_LAST_IO)
456 || ctx->base.singlestep_enabled
457 || singlestep);
458}
459
460static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
461{
462
463 if (unlikely(use_exit_tb(ctx))) {
464 return false;
465 }
466#ifndef CONFIG_USER_ONLY
467
468 if (in_superpage(ctx, dest)) {
469 return true;
470 }
471
472 return ((ctx->base.tb->pc ^ dest) & TARGET_PAGE_MASK) == 0;
473#else
474 return true;
475#endif
476}
477
478static DisasJumpType gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
479{
480 uint64_t dest = ctx->base.pc_next + (disp << 2);
481
482 if (ra != 31) {
483 tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
484 }
485
486
487 if (disp == 0) {
488 return 0;
489 } else if (use_goto_tb(ctx, dest)) {
490 tcg_gen_goto_tb(0);
491 tcg_gen_movi_i64(cpu_pc, dest);
492 tcg_gen_exit_tb(ctx->base.tb, 0);
493 return DISAS_NORETURN;
494 } else {
495 tcg_gen_movi_i64(cpu_pc, dest);
496 return DISAS_PC_UPDATED;
497 }
498}
499
500static DisasJumpType gen_bcond_internal(DisasContext *ctx, TCGCond cond,
501 TCGv cmp, int32_t disp)
502{
503 uint64_t dest = ctx->base.pc_next + (disp << 2);
504 TCGLabel *lab_true = gen_new_label();
505
506 if (use_goto_tb(ctx, dest)) {
507 tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
508
509 tcg_gen_goto_tb(0);
510 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
511 tcg_gen_exit_tb(ctx->base.tb, 0);
512
513 gen_set_label(lab_true);
514 tcg_gen_goto_tb(1);
515 tcg_gen_movi_i64(cpu_pc, dest);
516 tcg_gen_exit_tb(ctx->base.tb, 1);
517
518 return DISAS_NORETURN;
519 } else {
520 TCGv_i64 z = tcg_const_i64(0);
521 TCGv_i64 d = tcg_const_i64(dest);
522 TCGv_i64 p = tcg_const_i64(ctx->base.pc_next);
523
524 tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
525
526 tcg_temp_free_i64(z);
527 tcg_temp_free_i64(d);
528 tcg_temp_free_i64(p);
529 return DISAS_PC_UPDATED;
530 }
531}
532
533static DisasJumpType gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
534 int32_t disp, int mask)
535{
536 if (mask) {
537 TCGv tmp = tcg_temp_new();
538 DisasJumpType ret;
539
540 tcg_gen_andi_i64(tmp, load_gpr(ctx, ra), 1);
541 ret = gen_bcond_internal(ctx, cond, tmp, disp);
542 tcg_temp_free(tmp);
543 return ret;
544 }
545 return gen_bcond_internal(ctx, cond, load_gpr(ctx, ra), disp);
546}
547
548
549
550static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
551{
552 uint64_t mzero = 1ull << 63;
553
554 switch (cond) {
555 case TCG_COND_LE:
556 case TCG_COND_GT:
557
558 tcg_gen_mov_i64(dest, src);
559 break;
560
561 case TCG_COND_EQ:
562 case TCG_COND_NE:
563
564 tcg_gen_andi_i64(dest, src, mzero - 1);
565 break;
566
567 case TCG_COND_GE:
568 case TCG_COND_LT:
569
570 tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
571 tcg_gen_neg_i64(dest, dest);
572 tcg_gen_and_i64(dest, dest, src);
573 break;
574
575 default:
576 abort();
577 }
578}
579
580static DisasJumpType gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
581 int32_t disp)
582{
583 TCGv cmp_tmp = tcg_temp_new();
584 DisasJumpType ret;
585
586 gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
587 ret = gen_bcond_internal(ctx, cond, cmp_tmp, disp);
588 tcg_temp_free(cmp_tmp);
589 return ret;
590}
591
592static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
593{
594 TCGv_i64 va, vb, z;
595
596 z = load_zero(ctx);
597 vb = load_fpr(ctx, rb);
598 va = tcg_temp_new();
599 gen_fold_mzero(cond, va, load_fpr(ctx, ra));
600
601 tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
602
603 tcg_temp_free(va);
604}
605
606#define QUAL_RM_N 0x080
607#define QUAL_RM_C 0x000
608#define QUAL_RM_M 0x040
609#define QUAL_RM_D 0x0c0
610#define QUAL_RM_MASK 0x0c0
611
612#define QUAL_U 0x100
613#define QUAL_V 0x100
614#define QUAL_S 0x400
615#define QUAL_I 0x200
616
617static void gen_qual_roundmode(DisasContext *ctx, int fn11)
618{
619 TCGv_i32 tmp;
620
621 fn11 &= QUAL_RM_MASK;
622 if (fn11 == ctx->tb_rm) {
623 return;
624 }
625 ctx->tb_rm = fn11;
626
627 tmp = tcg_temp_new_i32();
628 switch (fn11) {
629 case QUAL_RM_N:
630 tcg_gen_movi_i32(tmp, float_round_nearest_even);
631 break;
632 case QUAL_RM_C:
633 tcg_gen_movi_i32(tmp, float_round_to_zero);
634 break;
635 case QUAL_RM_M:
636 tcg_gen_movi_i32(tmp, float_round_down);
637 break;
638 case QUAL_RM_D:
639 tcg_gen_ld8u_i32(tmp, cpu_env,
640 offsetof(CPUAlphaState, fpcr_dyn_round));
641 break;
642 }
643
644#if defined(CONFIG_SOFTFLOAT_INLINE)
645
646
647
648 tcg_gen_st8_i32(tmp, cpu_env,
649 offsetof(CPUAlphaState, fp_status.float_rounding_mode));
650#else
651 gen_helper_setroundmode(tmp);
652#endif
653
654 tcg_temp_free_i32(tmp);
655}
656
657static void gen_qual_flushzero(DisasContext *ctx, int fn11)
658{
659 TCGv_i32 tmp;
660
661 fn11 &= QUAL_U;
662 if (fn11 == ctx->tb_ftz) {
663 return;
664 }
665 ctx->tb_ftz = fn11;
666
667 tmp = tcg_temp_new_i32();
668 if (fn11) {
669
670 tcg_gen_ld8u_i32(tmp, cpu_env,
671 offsetof(CPUAlphaState, fpcr_flush_to_zero));
672 } else {
673
674 tcg_gen_movi_i32(tmp, 1);
675 }
676
677#if defined(CONFIG_SOFTFLOAT_INLINE)
678 tcg_gen_st8_i32(tmp, cpu_env,
679 offsetof(CPUAlphaState, fp_status.flush_to_zero));
680#else
681 gen_helper_setflushzero(tmp);
682#endif
683
684 tcg_temp_free_i32(tmp);
685}
686
687static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
688{
689 TCGv val;
690
691 if (unlikely(reg == 31)) {
692 val = load_zero(ctx);
693 } else {
694 val = cpu_fir[reg];
695 if ((fn11 & QUAL_S) == 0) {
696 if (is_cmp) {
697 gen_helper_ieee_input_cmp(cpu_env, val);
698 } else {
699 gen_helper_ieee_input(cpu_env, val);
700 }
701 } else {
702#ifndef CONFIG_USER_ONLY
703
704
705
706 gen_helper_ieee_input_s(cpu_env, val);
707#endif
708 }
709 }
710 return val;
711}
712
713static void gen_fp_exc_raise(int rc, int fn11)
714{
715
716
717
718
719
720 TCGv_i32 reg, ign;
721 uint32_t ignore = 0;
722
723 if (!(fn11 & QUAL_U)) {
724
725 ignore |= FPCR_UNF | FPCR_IOV;
726 }
727 if (!(fn11 & QUAL_I)) {
728 ignore |= FPCR_INE;
729 }
730 ign = tcg_const_i32(ignore);
731
732
733
734
735
736
737 reg = tcg_const_i32(rc + 32);
738 if (fn11 & QUAL_S) {
739 gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
740 } else {
741 gen_helper_fp_exc_raise(cpu_env, ign, reg);
742 }
743
744 tcg_temp_free_i32(reg);
745 tcg_temp_free_i32(ign);
746}
747
748static void gen_cvtlq(TCGv vc, TCGv vb)
749{
750 TCGv tmp = tcg_temp_new();
751
752
753
754 tcg_gen_shri_i64(tmp, vb, 29);
755 tcg_gen_sari_i64(vc, vb, 32);
756 tcg_gen_deposit_i64(vc, vc, tmp, 0, 30);
757
758 tcg_temp_free(tmp);
759}
760
761static void gen_ieee_arith2(DisasContext *ctx,
762 void (*helper)(TCGv, TCGv_ptr, TCGv),
763 int rb, int rc, int fn11)
764{
765 TCGv vb;
766
767 gen_qual_roundmode(ctx, fn11);
768 gen_qual_flushzero(ctx, fn11);
769
770 vb = gen_ieee_input(ctx, rb, fn11, 0);
771 helper(dest_fpr(ctx, rc), cpu_env, vb);
772
773 gen_fp_exc_raise(rc, fn11);
774}
775
776#define IEEE_ARITH2(name) \
777static inline void glue(gen_, name)(DisasContext *ctx, \
778 int rb, int rc, int fn11) \
779{ \
780 gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11); \
781}
782IEEE_ARITH2(sqrts)
783IEEE_ARITH2(sqrtt)
784IEEE_ARITH2(cvtst)
785IEEE_ARITH2(cvtts)
786
787static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
788{
789 TCGv vb, vc;
790
791
792 vb = gen_ieee_input(ctx, rb, fn11, 0);
793 vc = dest_fpr(ctx, rc);
794
795
796
797 if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
798 gen_helper_cvttq_c(vc, cpu_env, vb);
799 } else {
800 gen_qual_roundmode(ctx, fn11);
801 gen_helper_cvttq(vc, cpu_env, vb);
802 }
803 gen_fp_exc_raise(rc, fn11);
804}
805
806static void gen_ieee_intcvt(DisasContext *ctx,
807 void (*helper)(TCGv, TCGv_ptr, TCGv),
808 int rb, int rc, int fn11)
809{
810 TCGv vb, vc;
811
812 gen_qual_roundmode(ctx, fn11);
813 vb = load_fpr(ctx, rb);
814 vc = dest_fpr(ctx, rc);
815
816
817
818
819 if (fn11 & QUAL_I) {
820 helper(vc, cpu_env, vb);
821 gen_fp_exc_raise(rc, fn11);
822 } else {
823 helper(vc, cpu_env, vb);
824 }
825}
826
827#define IEEE_INTCVT(name) \
828static inline void glue(gen_, name)(DisasContext *ctx, \
829 int rb, int rc, int fn11) \
830{ \
831 gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11); \
832}
833IEEE_INTCVT(cvtqs)
834IEEE_INTCVT(cvtqt)
835
836static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
837{
838 TCGv vmask = tcg_const_i64(mask);
839 TCGv tmp = tcg_temp_new_i64();
840
841 if (inv_a) {
842 tcg_gen_andc_i64(tmp, vmask, va);
843 } else {
844 tcg_gen_and_i64(tmp, va, vmask);
845 }
846
847 tcg_gen_andc_i64(vc, vb, vmask);
848 tcg_gen_or_i64(vc, vc, tmp);
849
850 tcg_temp_free(vmask);
851 tcg_temp_free(tmp);
852}
853
854static void gen_ieee_arith3(DisasContext *ctx,
855 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
856 int ra, int rb, int rc, int fn11)
857{
858 TCGv va, vb, vc;
859
860 gen_qual_roundmode(ctx, fn11);
861 gen_qual_flushzero(ctx, fn11);
862
863 va = gen_ieee_input(ctx, ra, fn11, 0);
864 vb = gen_ieee_input(ctx, rb, fn11, 0);
865 vc = dest_fpr(ctx, rc);
866 helper(vc, cpu_env, va, vb);
867
868 gen_fp_exc_raise(rc, fn11);
869}
870
871#define IEEE_ARITH3(name) \
872static inline void glue(gen_, name)(DisasContext *ctx, \
873 int ra, int rb, int rc, int fn11) \
874{ \
875 gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11); \
876}
877IEEE_ARITH3(adds)
878IEEE_ARITH3(subs)
879IEEE_ARITH3(muls)
880IEEE_ARITH3(divs)
881IEEE_ARITH3(addt)
882IEEE_ARITH3(subt)
883IEEE_ARITH3(mult)
884IEEE_ARITH3(divt)
885
886static void gen_ieee_compare(DisasContext *ctx,
887 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
888 int ra, int rb, int rc, int fn11)
889{
890 TCGv va, vb, vc;
891
892 va = gen_ieee_input(ctx, ra, fn11, 1);
893 vb = gen_ieee_input(ctx, rb, fn11, 1);
894 vc = dest_fpr(ctx, rc);
895 helper(vc, cpu_env, va, vb);
896
897 gen_fp_exc_raise(rc, fn11);
898}
899
900#define IEEE_CMP3(name) \
901static inline void glue(gen_, name)(DisasContext *ctx, \
902 int ra, int rb, int rc, int fn11) \
903{ \
904 gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11); \
905}
906IEEE_CMP3(cmptun)
907IEEE_CMP3(cmpteq)
908IEEE_CMP3(cmptlt)
909IEEE_CMP3(cmptle)
910
911static inline uint64_t zapnot_mask(uint8_t lit)
912{
913 uint64_t mask = 0;
914 int i;
915
916 for (i = 0; i < 8; ++i) {
917 if ((lit >> i) & 1) {
918 mask |= 0xffull << (i * 8);
919 }
920 }
921 return mask;
922}
923
924
925
926
927static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
928{
929 switch (lit) {
930 case 0x00:
931 tcg_gen_movi_i64(dest, 0);
932 break;
933 case 0x01:
934 tcg_gen_ext8u_i64(dest, src);
935 break;
936 case 0x03:
937 tcg_gen_ext16u_i64(dest, src);
938 break;
939 case 0x0f:
940 tcg_gen_ext32u_i64(dest, src);
941 break;
942 case 0xff:
943 tcg_gen_mov_i64(dest, src);
944 break;
945 default:
946 tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
947 break;
948 }
949}
950
951
952static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
953 uint8_t lit, uint8_t byte_mask)
954{
955 if (islit) {
956 int pos = (64 - lit * 8) & 0x3f;
957 int len = cto32(byte_mask) * 8;
958 if (pos < len) {
959 tcg_gen_deposit_z_i64(vc, va, pos, len - pos);
960 } else {
961 tcg_gen_movi_i64(vc, 0);
962 }
963 } else {
964 TCGv tmp = tcg_temp_new();
965 tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
966 tcg_gen_neg_i64(tmp, tmp);
967 tcg_gen_andi_i64(tmp, tmp, 0x3f);
968 tcg_gen_shl_i64(vc, va, tmp);
969 tcg_temp_free(tmp);
970 }
971 gen_zapnoti(vc, vc, byte_mask);
972}
973
974
975static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
976 uint8_t lit, uint8_t byte_mask)
977{
978 if (islit) {
979 int pos = (lit & 7) * 8;
980 int len = cto32(byte_mask) * 8;
981 if (pos + len >= 64) {
982 len = 64 - pos;
983 }
984 tcg_gen_extract_i64(vc, va, pos, len);
985 } else {
986 TCGv tmp = tcg_temp_new();
987 tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
988 tcg_gen_shli_i64(tmp, tmp, 3);
989 tcg_gen_shr_i64(vc, va, tmp);
990 tcg_temp_free(tmp);
991 gen_zapnoti(vc, vc, byte_mask);
992 }
993}
994
995
996static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
997 uint8_t lit, uint8_t byte_mask)
998{
999 if (islit) {
1000 int pos = 64 - (lit & 7) * 8;
1001 int len = cto32(byte_mask) * 8;
1002 if (pos < len) {
1003 tcg_gen_extract_i64(vc, va, pos, len - pos);
1004 } else {
1005 tcg_gen_movi_i64(vc, 0);
1006 }
1007 } else {
1008 TCGv tmp = tcg_temp_new();
1009 TCGv shift = tcg_temp_new();
1010
1011
1012
1013
1014
1015 gen_zapnoti(tmp, va, byte_mask);
1016
1017
1018
1019
1020
1021
1022 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1023 tcg_gen_not_i64(shift, shift);
1024 tcg_gen_andi_i64(shift, shift, 0x3f);
1025
1026 tcg_gen_shr_i64(vc, tmp, shift);
1027 tcg_gen_shri_i64(vc, vc, 1);
1028 tcg_temp_free(shift);
1029 tcg_temp_free(tmp);
1030 }
1031}
1032
1033
1034static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1035 uint8_t lit, uint8_t byte_mask)
1036{
1037 if (islit) {
1038 int pos = (lit & 7) * 8;
1039 int len = cto32(byte_mask) * 8;
1040 if (pos + len > 64) {
1041 len = 64 - pos;
1042 }
1043 tcg_gen_deposit_z_i64(vc, va, pos, len);
1044 } else {
1045 TCGv tmp = tcg_temp_new();
1046 TCGv shift = tcg_temp_new();
1047
1048
1049
1050
1051
1052 gen_zapnoti(tmp, va, byte_mask);
1053
1054 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1055 tcg_gen_shli_i64(shift, shift, 3);
1056 tcg_gen_shl_i64(vc, tmp, shift);
1057 tcg_temp_free(shift);
1058 tcg_temp_free(tmp);
1059 }
1060}
1061
1062
1063static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1064 uint8_t lit, uint8_t byte_mask)
1065{
1066 if (islit) {
1067 gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1068 } else {
1069 TCGv shift = tcg_temp_new();
1070 TCGv mask = tcg_temp_new();
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1082 tcg_gen_not_i64(shift, shift);
1083 tcg_gen_andi_i64(shift, shift, 0x3f);
1084 tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1085 tcg_gen_shr_i64(mask, mask, shift);
1086 tcg_gen_shri_i64(mask, mask, 1);
1087
1088 tcg_gen_andc_i64(vc, va, mask);
1089
1090 tcg_temp_free(mask);
1091 tcg_temp_free(shift);
1092 }
1093}
1094
1095
1096static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1097 uint8_t lit, uint8_t byte_mask)
1098{
1099 if (islit) {
1100 gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1101 } else {
1102 TCGv shift = tcg_temp_new();
1103 TCGv mask = tcg_temp_new();
1104
1105 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1106 tcg_gen_shli_i64(shift, shift, 3);
1107 tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1108 tcg_gen_shl_i64(mask, mask, shift);
1109
1110 tcg_gen_andc_i64(vc, va, mask);
1111
1112 tcg_temp_free(mask);
1113 tcg_temp_free(shift);
1114 }
1115}
1116
1117static void gen_rx(DisasContext *ctx, int ra, int set)
1118{
1119 TCGv tmp;
1120
1121 if (ra != 31) {
1122 ld_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1123 }
1124
1125 tmp = tcg_const_i64(set);
1126 st_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1127 tcg_temp_free(tmp);
1128}
1129
1130static DisasJumpType gen_call_pal(DisasContext *ctx, int palcode)
1131{
1132
1133
1134
1135
1136 if (palcode >= 0x80 && palcode < 0xC0) {
1137 switch (palcode) {
1138 case 0x86:
1139
1140
1141 break;
1142 case 0x9E:
1143
1144 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1145 offsetof(CPUAlphaState, unique));
1146 break;
1147 case 0x9F:
1148
1149 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1150 offsetof(CPUAlphaState, unique));
1151 break;
1152 default:
1153 palcode &= 0xbf;
1154 goto do_call_pal;
1155 }
1156 return DISAS_NEXT;
1157 }
1158
1159#ifndef CONFIG_USER_ONLY
1160
1161 if (palcode < 0x40 && (ctx->tbflags & ENV_FLAG_PS_USER) == 0) {
1162 switch (palcode) {
1163 case 0x01:
1164
1165
1166 break;
1167 case 0x02:
1168
1169
1170 break;
1171 case 0x2D:
1172
1173 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1174 offsetof(CPUAlphaState, vptptr));
1175 break;
1176 case 0x31:
1177
1178 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1179 offsetof(CPUAlphaState, sysval));
1180 break;
1181 case 0x32:
1182
1183 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1184 offsetof(CPUAlphaState, sysval));
1185 break;
1186
1187 case 0x35:
1188
1189
1190
1191 ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1192
1193
1194 {
1195 TCGv tmp = tcg_temp_new();
1196 tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1197 st_flag_byte(tmp, ENV_FLAG_PS_SHIFT);
1198 tcg_temp_free(tmp);
1199 }
1200
1201
1202 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
1203 return DISAS_PC_UPDATED_NOCHAIN;
1204
1205 case 0x36:
1206
1207 ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1208 break;
1209
1210 case 0x38:
1211
1212 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1213 offsetof(CPUAlphaState, usp));
1214 break;
1215 case 0x3A:
1216
1217 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1218 offsetof(CPUAlphaState, usp));
1219 break;
1220 case 0x3C:
1221
1222 tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1223 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1224 break;
1225
1226 case 0x3E:
1227
1228 {
1229 TCGv_i32 tmp = tcg_const_i32(1);
1230 tcg_gen_st_i32(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1231 offsetof(CPUState, halted));
1232 tcg_temp_free_i32(tmp);
1233 }
1234 tcg_gen_movi_i64(ctx->ir[IR_V0], 0);
1235 return gen_excp(ctx, EXCP_HALTED, 0);
1236
1237 default:
1238 palcode &= 0x3f;
1239 goto do_call_pal;
1240 }
1241 return DISAS_NEXT;
1242 }
1243#endif
1244 return gen_invalid(ctx);
1245
1246 do_call_pal:
1247#ifdef CONFIG_USER_ONLY
1248 return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1249#else
1250 {
1251 TCGv tmp = tcg_temp_new();
1252 uint64_t exc_addr = ctx->base.pc_next;
1253 uint64_t entry = ctx->palbr;
1254
1255 if (ctx->tbflags & ENV_FLAG_PAL_MODE) {
1256 exc_addr |= 1;
1257 } else {
1258 tcg_gen_movi_i64(tmp, 1);
1259 st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
1260 }
1261
1262 tcg_gen_movi_i64(tmp, exc_addr);
1263 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1264 tcg_temp_free(tmp);
1265
1266 entry += (palcode & 0x80
1267 ? 0x2000 + (palcode - 0x80) * 64
1268 : 0x1000 + palcode * 64);
1269
1270
1271
1272
1273
1274 if (!use_exit_tb(ctx)) {
1275 tcg_gen_goto_tb(0);
1276 tcg_gen_movi_i64(cpu_pc, entry);
1277 tcg_gen_exit_tb(ctx->base.tb, 0);
1278 return DISAS_NORETURN;
1279 } else {
1280 tcg_gen_movi_i64(cpu_pc, entry);
1281 return DISAS_PC_UPDATED;
1282 }
1283 }
1284#endif
1285}
1286
1287#ifndef CONFIG_USER_ONLY
1288
1289#define PR_LONG 0x200000
1290
1291static int cpu_pr_data(int pr)
1292{
1293 switch (pr) {
1294 case 2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1295 case 3: return offsetof(CPUAlphaState, trap_arg0);
1296 case 4: return offsetof(CPUAlphaState, trap_arg1);
1297 case 5: return offsetof(CPUAlphaState, trap_arg2);
1298 case 6: return offsetof(CPUAlphaState, exc_addr);
1299 case 7: return offsetof(CPUAlphaState, palbr);
1300 case 8: return offsetof(CPUAlphaState, ptbr);
1301 case 9: return offsetof(CPUAlphaState, vptptr);
1302 case 10: return offsetof(CPUAlphaState, unique);
1303 case 11: return offsetof(CPUAlphaState, sysval);
1304 case 12: return offsetof(CPUAlphaState, usp);
1305
1306 case 40 ... 63:
1307 return offsetof(CPUAlphaState, scratch[pr - 40]);
1308
1309 case 251:
1310 return offsetof(CPUAlphaState, alarm_expire);
1311 }
1312 return 0;
1313}
1314
1315static DisasJumpType gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1316{
1317 void (*helper)(TCGv);
1318 int data;
1319
1320 switch (regno) {
1321 case 32 ... 39:
1322
1323 regno = regno == 39 ? 25 : regno - 32 + 8;
1324 tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1325 break;
1326
1327 case 250:
1328 helper = gen_helper_get_walltime;
1329 goto do_helper;
1330 case 249:
1331 helper = gen_helper_get_vmtime;
1332 do_helper:
1333 if (icount_enabled()) {
1334 gen_io_start();
1335 helper(va);
1336 return DISAS_PC_STALE;
1337 } else {
1338 helper(va);
1339 }
1340 break;
1341
1342 case 0:
1343 ld_flag_byte(va, ENV_FLAG_PS_SHIFT);
1344 break;
1345 case 1:
1346 ld_flag_byte(va, ENV_FLAG_FEN_SHIFT);
1347 break;
1348
1349 default:
1350
1351
1352 data = cpu_pr_data(regno);
1353 if (data == 0) {
1354 tcg_gen_movi_i64(va, 0);
1355 } else if (data & PR_LONG) {
1356 tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1357 } else {
1358 tcg_gen_ld_i64(va, cpu_env, data);
1359 }
1360 break;
1361 }
1362
1363 return DISAS_NEXT;
1364}
1365
1366static DisasJumpType gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1367{
1368 int data;
1369
1370 switch (regno) {
1371 case 255:
1372
1373 gen_helper_tbia(cpu_env);
1374 break;
1375
1376 case 254:
1377
1378 gen_helper_tbis(cpu_env, vb);
1379 break;
1380
1381 case 253:
1382
1383 {
1384 TCGv_i32 tmp = tcg_const_i32(1);
1385 tcg_gen_st_i32(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1386 offsetof(CPUState, halted));
1387 tcg_temp_free_i32(tmp);
1388 }
1389 return gen_excp(ctx, EXCP_HALTED, 0);
1390
1391 case 252:
1392
1393 gen_helper_halt(vb);
1394 return DISAS_PC_STALE;
1395
1396 case 251:
1397
1398 gen_helper_set_alarm(cpu_env, vb);
1399 break;
1400
1401 case 7:
1402
1403 tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1404
1405
1406
1407 gen_helper_tb_flush(cpu_env);
1408 return DISAS_PC_STALE;
1409
1410 case 32 ... 39:
1411
1412 regno = regno == 39 ? 25 : regno - 32 + 8;
1413 tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1414 break;
1415
1416 case 0:
1417 st_flag_byte(vb, ENV_FLAG_PS_SHIFT);
1418 break;
1419 case 1:
1420 st_flag_byte(vb, ENV_FLAG_FEN_SHIFT);
1421 break;
1422
1423 default:
1424
1425
1426 data = cpu_pr_data(regno);
1427 if (data != 0) {
1428 if (data & PR_LONG) {
1429 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1430 } else {
1431 tcg_gen_st_i64(vb, cpu_env, data);
1432 }
1433 }
1434 break;
1435 }
1436
1437 return DISAS_NEXT;
1438}
1439#endif
1440
1441#define REQUIRE_NO_LIT \
1442 do { \
1443 if (real_islit) { \
1444 goto invalid_opc; \
1445 } \
1446 } while (0)
1447
1448#define REQUIRE_AMASK(FLAG) \
1449 do { \
1450 if ((ctx->amask & AMASK_##FLAG) == 0) { \
1451 goto invalid_opc; \
1452 } \
1453 } while (0)
1454
1455#define REQUIRE_TB_FLAG(FLAG) \
1456 do { \
1457 if ((ctx->tbflags & (FLAG)) == 0) { \
1458 goto invalid_opc; \
1459 } \
1460 } while (0)
1461
1462#define REQUIRE_REG_31(WHICH) \
1463 do { \
1464 if (WHICH != 31) { \
1465 goto invalid_opc; \
1466 } \
1467 } while (0)
1468
1469static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn)
1470{
1471 int32_t disp21, disp16, disp12 __attribute__((unused));
1472 uint16_t fn11;
1473 uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1474 bool islit, real_islit;
1475 TCGv va, vb, vc, tmp, tmp2;
1476 TCGv_i32 t32;
1477 DisasJumpType ret;
1478
1479
1480 opc = extract32(insn, 26, 6);
1481 ra = extract32(insn, 21, 5);
1482 rb = extract32(insn, 16, 5);
1483 rc = extract32(insn, 0, 5);
1484 real_islit = islit = extract32(insn, 12, 1);
1485 lit = extract32(insn, 13, 8);
1486
1487 disp21 = sextract32(insn, 0, 21);
1488 disp16 = sextract32(insn, 0, 16);
1489 disp12 = sextract32(insn, 0, 12);
1490
1491 fn11 = extract32(insn, 5, 11);
1492 fpfn = extract32(insn, 5, 6);
1493 fn7 = extract32(insn, 5, 7);
1494
1495 if (rb == 31 && !islit) {
1496 islit = true;
1497 lit = 0;
1498 }
1499
1500 ret = DISAS_NEXT;
1501 switch (opc) {
1502 case 0x00:
1503
1504 ret = gen_call_pal(ctx, insn & 0x03ffffff);
1505 break;
1506 case 0x01:
1507
1508 goto invalid_opc;
1509 case 0x02:
1510
1511 goto invalid_opc;
1512 case 0x03:
1513
1514 goto invalid_opc;
1515 case 0x04:
1516
1517 goto invalid_opc;
1518 case 0x05:
1519
1520 goto invalid_opc;
1521 case 0x06:
1522
1523 goto invalid_opc;
1524 case 0x07:
1525
1526 goto invalid_opc;
1527
1528 case 0x09:
1529
1530 disp16 = (uint32_t)disp16 << 16;
1531
1532 case 0x08:
1533
1534 va = dest_gpr(ctx, ra);
1535
1536 if (rb == 31) {
1537 tcg_gen_movi_i64(va, disp16);
1538 } else {
1539 tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1540 }
1541 break;
1542
1543 case 0x0A:
1544
1545 REQUIRE_AMASK(BWX);
1546 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1547 break;
1548 case 0x0B:
1549
1550 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1551 break;
1552 case 0x0C:
1553
1554 REQUIRE_AMASK(BWX);
1555 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1556 break;
1557 case 0x0D:
1558
1559 REQUIRE_AMASK(BWX);
1560 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1561 break;
1562 case 0x0E:
1563
1564 REQUIRE_AMASK(BWX);
1565 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1566 break;
1567 case 0x0F:
1568
1569 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1570 break;
1571
1572 case 0x10:
1573 vc = dest_gpr(ctx, rc);
1574 vb = load_gpr_lit(ctx, rb, lit, islit);
1575
1576 if (ra == 31) {
1577 if (fn7 == 0x00) {
1578
1579 tcg_gen_ext32s_i64(vc, vb);
1580 break;
1581 }
1582 if (fn7 == 0x29) {
1583
1584 tcg_gen_neg_i64(vc, vb);
1585 break;
1586 }
1587 }
1588
1589 va = load_gpr(ctx, ra);
1590 switch (fn7) {
1591 case 0x00:
1592
1593 tcg_gen_add_i64(vc, va, vb);
1594 tcg_gen_ext32s_i64(vc, vc);
1595 break;
1596 case 0x02:
1597
1598 tmp = tcg_temp_new();
1599 tcg_gen_shli_i64(tmp, va, 2);
1600 tcg_gen_add_i64(tmp, tmp, vb);
1601 tcg_gen_ext32s_i64(vc, tmp);
1602 tcg_temp_free(tmp);
1603 break;
1604 case 0x09:
1605
1606 tcg_gen_sub_i64(vc, va, vb);
1607 tcg_gen_ext32s_i64(vc, vc);
1608 break;
1609 case 0x0B:
1610
1611 tmp = tcg_temp_new();
1612 tcg_gen_shli_i64(tmp, va, 2);
1613 tcg_gen_sub_i64(tmp, tmp, vb);
1614 tcg_gen_ext32s_i64(vc, tmp);
1615 tcg_temp_free(tmp);
1616 break;
1617 case 0x0F:
1618
1619 if (ra == 31) {
1620
1621 gen_helper_cmpbe0(vc, vb);
1622 } else {
1623 gen_helper_cmpbge(vc, va, vb);
1624 }
1625 break;
1626 case 0x12:
1627
1628 tmp = tcg_temp_new();
1629 tcg_gen_shli_i64(tmp, va, 3);
1630 tcg_gen_add_i64(tmp, tmp, vb);
1631 tcg_gen_ext32s_i64(vc, tmp);
1632 tcg_temp_free(tmp);
1633 break;
1634 case 0x1B:
1635
1636 tmp = tcg_temp_new();
1637 tcg_gen_shli_i64(tmp, va, 3);
1638 tcg_gen_sub_i64(tmp, tmp, vb);
1639 tcg_gen_ext32s_i64(vc, tmp);
1640 tcg_temp_free(tmp);
1641 break;
1642 case 0x1D:
1643
1644 tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1645 break;
1646 case 0x20:
1647
1648 tcg_gen_add_i64(vc, va, vb);
1649 break;
1650 case 0x22:
1651
1652 tmp = tcg_temp_new();
1653 tcg_gen_shli_i64(tmp, va, 2);
1654 tcg_gen_add_i64(vc, tmp, vb);
1655 tcg_temp_free(tmp);
1656 break;
1657 case 0x29:
1658
1659 tcg_gen_sub_i64(vc, va, vb);
1660 break;
1661 case 0x2B:
1662
1663 tmp = tcg_temp_new();
1664 tcg_gen_shli_i64(tmp, va, 2);
1665 tcg_gen_sub_i64(vc, tmp, vb);
1666 tcg_temp_free(tmp);
1667 break;
1668 case 0x2D:
1669
1670 tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1671 break;
1672 case 0x32:
1673
1674 tmp = tcg_temp_new();
1675 tcg_gen_shli_i64(tmp, va, 3);
1676 tcg_gen_add_i64(vc, tmp, vb);
1677 tcg_temp_free(tmp);
1678 break;
1679 case 0x3B:
1680
1681 tmp = tcg_temp_new();
1682 tcg_gen_shli_i64(tmp, va, 3);
1683 tcg_gen_sub_i64(vc, tmp, vb);
1684 tcg_temp_free(tmp);
1685 break;
1686 case 0x3D:
1687
1688 tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1689 break;
1690 case 0x40:
1691
1692 tmp = tcg_temp_new();
1693 tcg_gen_ext32s_i64(tmp, va);
1694 tcg_gen_ext32s_i64(vc, vb);
1695 tcg_gen_add_i64(tmp, tmp, vc);
1696 tcg_gen_ext32s_i64(vc, tmp);
1697 gen_helper_check_overflow(cpu_env, vc, tmp);
1698 tcg_temp_free(tmp);
1699 break;
1700 case 0x49:
1701
1702 tmp = tcg_temp_new();
1703 tcg_gen_ext32s_i64(tmp, va);
1704 tcg_gen_ext32s_i64(vc, vb);
1705 tcg_gen_sub_i64(tmp, tmp, vc);
1706 tcg_gen_ext32s_i64(vc, tmp);
1707 gen_helper_check_overflow(cpu_env, vc, tmp);
1708 tcg_temp_free(tmp);
1709 break;
1710 case 0x4D:
1711
1712 tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1713 break;
1714 case 0x60:
1715
1716 tmp = tcg_temp_new();
1717 tmp2 = tcg_temp_new();
1718 tcg_gen_eqv_i64(tmp, va, vb);
1719 tcg_gen_mov_i64(tmp2, va);
1720 tcg_gen_add_i64(vc, va, vb);
1721 tcg_gen_xor_i64(tmp2, tmp2, vc);
1722 tcg_gen_and_i64(tmp, tmp, tmp2);
1723 tcg_gen_shri_i64(tmp, tmp, 63);
1724 tcg_gen_movi_i64(tmp2, 0);
1725 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1726 tcg_temp_free(tmp);
1727 tcg_temp_free(tmp2);
1728 break;
1729 case 0x69:
1730
1731 tmp = tcg_temp_new();
1732 tmp2 = tcg_temp_new();
1733 tcg_gen_xor_i64(tmp, va, vb);
1734 tcg_gen_mov_i64(tmp2, va);
1735 tcg_gen_sub_i64(vc, va, vb);
1736 tcg_gen_xor_i64(tmp2, tmp2, vc);
1737 tcg_gen_and_i64(tmp, tmp, tmp2);
1738 tcg_gen_shri_i64(tmp, tmp, 63);
1739 tcg_gen_movi_i64(tmp2, 0);
1740 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1741 tcg_temp_free(tmp);
1742 tcg_temp_free(tmp2);
1743 break;
1744 case 0x6D:
1745
1746 tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1747 break;
1748 default:
1749 goto invalid_opc;
1750 }
1751 break;
1752
1753 case 0x11:
1754 if (fn7 == 0x20) {
1755 if (rc == 31) {
1756
1757 break;
1758 }
1759 if (ra == 31) {
1760
1761 vc = dest_gpr(ctx, rc);
1762 if (islit) {
1763 tcg_gen_movi_i64(vc, lit);
1764 } else {
1765 tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1766 }
1767 break;
1768 }
1769 }
1770
1771 vc = dest_gpr(ctx, rc);
1772 vb = load_gpr_lit(ctx, rb, lit, islit);
1773
1774 if (fn7 == 0x28 && ra == 31) {
1775
1776 tcg_gen_not_i64(vc, vb);
1777 break;
1778 }
1779
1780 va = load_gpr(ctx, ra);
1781 switch (fn7) {
1782 case 0x00:
1783
1784 tcg_gen_and_i64(vc, va, vb);
1785 break;
1786 case 0x08:
1787
1788 tcg_gen_andc_i64(vc, va, vb);
1789 break;
1790 case 0x14:
1791
1792 tmp = tcg_temp_new();
1793 tcg_gen_andi_i64(tmp, va, 1);
1794 tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1795 vb, load_gpr(ctx, rc));
1796 tcg_temp_free(tmp);
1797 break;
1798 case 0x16:
1799
1800 tmp = tcg_temp_new();
1801 tcg_gen_andi_i64(tmp, va, 1);
1802 tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1803 vb, load_gpr(ctx, rc));
1804 tcg_temp_free(tmp);
1805 break;
1806 case 0x20:
1807
1808 tcg_gen_or_i64(vc, va, vb);
1809 break;
1810 case 0x24:
1811
1812 tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1813 vb, load_gpr(ctx, rc));
1814 break;
1815 case 0x26:
1816
1817 tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1818 vb, load_gpr(ctx, rc));
1819 break;
1820 case 0x28:
1821
1822 tcg_gen_orc_i64(vc, va, vb);
1823 break;
1824 case 0x40:
1825
1826 tcg_gen_xor_i64(vc, va, vb);
1827 break;
1828 case 0x44:
1829
1830 tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1831 vb, load_gpr(ctx, rc));
1832 break;
1833 case 0x46:
1834
1835 tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1836 vb, load_gpr(ctx, rc));
1837 break;
1838 case 0x48:
1839
1840 tcg_gen_eqv_i64(vc, va, vb);
1841 break;
1842 case 0x61:
1843
1844 REQUIRE_REG_31(ra);
1845 tcg_gen_andi_i64(vc, vb, ~ctx->amask);
1846 break;
1847 case 0x64:
1848
1849 tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1850 vb, load_gpr(ctx, rc));
1851 break;
1852 case 0x66:
1853
1854 tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1855 vb, load_gpr(ctx, rc));
1856 break;
1857 case 0x6C:
1858
1859 REQUIRE_REG_31(ra);
1860 tcg_gen_movi_i64(vc, ctx->implver);
1861 break;
1862 default:
1863 goto invalid_opc;
1864 }
1865 break;
1866
1867 case 0x12:
1868 vc = dest_gpr(ctx, rc);
1869 va = load_gpr(ctx, ra);
1870 switch (fn7) {
1871 case 0x02:
1872
1873 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1874 break;
1875 case 0x06:
1876
1877 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1878 break;
1879 case 0x0B:
1880
1881 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1882 break;
1883 case 0x12:
1884
1885 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1886 break;
1887 case 0x16:
1888
1889 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1890 break;
1891 case 0x1B:
1892
1893 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1894 break;
1895 case 0x22:
1896
1897 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1898 break;
1899 case 0x26:
1900
1901 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1902 break;
1903 case 0x2B:
1904
1905 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1906 break;
1907 case 0x30:
1908
1909 if (islit) {
1910 gen_zapnoti(vc, va, ~lit);
1911 } else {
1912 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1913 }
1914 break;
1915 case 0x31:
1916
1917 if (islit) {
1918 gen_zapnoti(vc, va, lit);
1919 } else {
1920 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1921 }
1922 break;
1923 case 0x32:
1924
1925 gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1926 break;
1927 case 0x34:
1928
1929 if (islit) {
1930 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1931 } else {
1932 tmp = tcg_temp_new();
1933 vb = load_gpr(ctx, rb);
1934 tcg_gen_andi_i64(tmp, vb, 0x3f);
1935 tcg_gen_shr_i64(vc, va, tmp);
1936 tcg_temp_free(tmp);
1937 }
1938 break;
1939 case 0x36:
1940
1941 gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1942 break;
1943 case 0x39:
1944
1945 if (islit) {
1946 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1947 } else {
1948 tmp = tcg_temp_new();
1949 vb = load_gpr(ctx, rb);
1950 tcg_gen_andi_i64(tmp, vb, 0x3f);
1951 tcg_gen_shl_i64(vc, va, tmp);
1952 tcg_temp_free(tmp);
1953 }
1954 break;
1955 case 0x3B:
1956
1957 gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1958 break;
1959 case 0x3C:
1960
1961 if (islit) {
1962 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1963 } else {
1964 tmp = tcg_temp_new();
1965 vb = load_gpr(ctx, rb);
1966 tcg_gen_andi_i64(tmp, vb, 0x3f);
1967 tcg_gen_sar_i64(vc, va, tmp);
1968 tcg_temp_free(tmp);
1969 }
1970 break;
1971 case 0x52:
1972
1973 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1974 break;
1975 case 0x57:
1976
1977 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1978 break;
1979 case 0x5A:
1980
1981 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1982 break;
1983 case 0x62:
1984
1985 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1986 break;
1987 case 0x67:
1988
1989 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1990 break;
1991 case 0x6A:
1992
1993 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1994 break;
1995 case 0x72:
1996
1997 gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1998 break;
1999 case 0x77:
2000
2001 gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
2002 break;
2003 case 0x7A:
2004
2005 gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
2006 break;
2007 default:
2008 goto invalid_opc;
2009 }
2010 break;
2011
2012 case 0x13:
2013 vc = dest_gpr(ctx, rc);
2014 vb = load_gpr_lit(ctx, rb, lit, islit);
2015 va = load_gpr(ctx, ra);
2016 switch (fn7) {
2017 case 0x00:
2018
2019 tcg_gen_mul_i64(vc, va, vb);
2020 tcg_gen_ext32s_i64(vc, vc);
2021 break;
2022 case 0x20:
2023
2024 tcg_gen_mul_i64(vc, va, vb);
2025 break;
2026 case 0x30:
2027
2028 tmp = tcg_temp_new();
2029 tcg_gen_mulu2_i64(tmp, vc, va, vb);
2030 tcg_temp_free(tmp);
2031 break;
2032 case 0x40:
2033
2034 tmp = tcg_temp_new();
2035 tcg_gen_ext32s_i64(tmp, va);
2036 tcg_gen_ext32s_i64(vc, vb);
2037 tcg_gen_mul_i64(tmp, tmp, vc);
2038 tcg_gen_ext32s_i64(vc, tmp);
2039 gen_helper_check_overflow(cpu_env, vc, tmp);
2040 tcg_temp_free(tmp);
2041 break;
2042 case 0x60:
2043
2044 tmp = tcg_temp_new();
2045 tmp2 = tcg_temp_new();
2046 tcg_gen_muls2_i64(vc, tmp, va, vb);
2047 tcg_gen_sari_i64(tmp2, vc, 63);
2048 gen_helper_check_overflow(cpu_env, tmp, tmp2);
2049 tcg_temp_free(tmp);
2050 tcg_temp_free(tmp2);
2051 break;
2052 default:
2053 goto invalid_opc;
2054 }
2055 break;
2056
2057 case 0x14:
2058 REQUIRE_AMASK(FIX);
2059 vc = dest_fpr(ctx, rc);
2060 switch (fpfn) {
2061 case 0x04:
2062
2063 REQUIRE_REG_31(rb);
2064 t32 = tcg_temp_new_i32();
2065 va = load_gpr(ctx, ra);
2066 tcg_gen_extrl_i64_i32(t32, va);
2067 gen_helper_memory_to_s(vc, t32);
2068 tcg_temp_free_i32(t32);
2069 break;
2070 case 0x0A:
2071
2072 REQUIRE_REG_31(ra);
2073 vb = load_fpr(ctx, rb);
2074 gen_helper_sqrtf(vc, cpu_env, vb);
2075 break;
2076 case 0x0B:
2077
2078 REQUIRE_REG_31(ra);
2079 gen_sqrts(ctx, rb, rc, fn11);
2080 break;
2081 case 0x14:
2082
2083 REQUIRE_REG_31(rb);
2084 t32 = tcg_temp_new_i32();
2085 va = load_gpr(ctx, ra);
2086 tcg_gen_extrl_i64_i32(t32, va);
2087 gen_helper_memory_to_f(vc, t32);
2088 tcg_temp_free_i32(t32);
2089 break;
2090 case 0x24:
2091
2092 REQUIRE_REG_31(rb);
2093 va = load_gpr(ctx, ra);
2094 tcg_gen_mov_i64(vc, va);
2095 break;
2096 case 0x2A:
2097
2098 REQUIRE_REG_31(ra);
2099 vb = load_fpr(ctx, rb);
2100 gen_helper_sqrtg(vc, cpu_env, vb);
2101 break;
2102 case 0x02B:
2103
2104 REQUIRE_REG_31(ra);
2105 gen_sqrtt(ctx, rb, rc, fn11);
2106 break;
2107 default:
2108 goto invalid_opc;
2109 }
2110 break;
2111
2112 case 0x15:
2113
2114
2115 vc = dest_fpr(ctx, rc);
2116 vb = load_fpr(ctx, rb);
2117 va = load_fpr(ctx, ra);
2118 switch (fpfn) {
2119 case 0x00:
2120
2121 gen_helper_addf(vc, cpu_env, va, vb);
2122 break;
2123 case 0x01:
2124
2125 gen_helper_subf(vc, cpu_env, va, vb);
2126 break;
2127 case 0x02:
2128
2129 gen_helper_mulf(vc, cpu_env, va, vb);
2130 break;
2131 case 0x03:
2132
2133 gen_helper_divf(vc, cpu_env, va, vb);
2134 break;
2135 case 0x1E:
2136
2137 REQUIRE_REG_31(ra);
2138 goto invalid_opc;
2139 case 0x20:
2140
2141 gen_helper_addg(vc, cpu_env, va, vb);
2142 break;
2143 case 0x21:
2144
2145 gen_helper_subg(vc, cpu_env, va, vb);
2146 break;
2147 case 0x22:
2148
2149 gen_helper_mulg(vc, cpu_env, va, vb);
2150 break;
2151 case 0x23:
2152
2153 gen_helper_divg(vc, cpu_env, va, vb);
2154 break;
2155 case 0x25:
2156
2157 gen_helper_cmpgeq(vc, cpu_env, va, vb);
2158 break;
2159 case 0x26:
2160
2161 gen_helper_cmpglt(vc, cpu_env, va, vb);
2162 break;
2163 case 0x27:
2164
2165 gen_helper_cmpgle(vc, cpu_env, va, vb);
2166 break;
2167 case 0x2C:
2168
2169 REQUIRE_REG_31(ra);
2170 gen_helper_cvtgf(vc, cpu_env, vb);
2171 break;
2172 case 0x2D:
2173
2174 REQUIRE_REG_31(ra);
2175 goto invalid_opc;
2176 case 0x2F:
2177
2178 REQUIRE_REG_31(ra);
2179 gen_helper_cvtgq(vc, cpu_env, vb);
2180 break;
2181 case 0x3C:
2182
2183 REQUIRE_REG_31(ra);
2184 gen_helper_cvtqf(vc, cpu_env, vb);
2185 break;
2186 case 0x3E:
2187
2188 REQUIRE_REG_31(ra);
2189 gen_helper_cvtqg(vc, cpu_env, vb);
2190 break;
2191 default:
2192 goto invalid_opc;
2193 }
2194 break;
2195
2196 case 0x16:
2197
2198 switch (fpfn) {
2199 case 0x00:
2200
2201 gen_adds(ctx, ra, rb, rc, fn11);
2202 break;
2203 case 0x01:
2204
2205 gen_subs(ctx, ra, rb, rc, fn11);
2206 break;
2207 case 0x02:
2208
2209 gen_muls(ctx, ra, rb, rc, fn11);
2210 break;
2211 case 0x03:
2212
2213 gen_divs(ctx, ra, rb, rc, fn11);
2214 break;
2215 case 0x20:
2216
2217 gen_addt(ctx, ra, rb, rc, fn11);
2218 break;
2219 case 0x21:
2220
2221 gen_subt(ctx, ra, rb, rc, fn11);
2222 break;
2223 case 0x22:
2224
2225 gen_mult(ctx, ra, rb, rc, fn11);
2226 break;
2227 case 0x23:
2228
2229 gen_divt(ctx, ra, rb, rc, fn11);
2230 break;
2231 case 0x24:
2232
2233 gen_cmptun(ctx, ra, rb, rc, fn11);
2234 break;
2235 case 0x25:
2236
2237 gen_cmpteq(ctx, ra, rb, rc, fn11);
2238 break;
2239 case 0x26:
2240
2241 gen_cmptlt(ctx, ra, rb, rc, fn11);
2242 break;
2243 case 0x27:
2244
2245 gen_cmptle(ctx, ra, rb, rc, fn11);
2246 break;
2247 case 0x2C:
2248 REQUIRE_REG_31(ra);
2249 if (fn11 == 0x2AC || fn11 == 0x6AC) {
2250
2251 gen_cvtst(ctx, rb, rc, fn11);
2252 } else {
2253
2254 gen_cvtts(ctx, rb, rc, fn11);
2255 }
2256 break;
2257 case 0x2F:
2258
2259 REQUIRE_REG_31(ra);
2260 gen_cvttq(ctx, rb, rc, fn11);
2261 break;
2262 case 0x3C:
2263
2264 REQUIRE_REG_31(ra);
2265 gen_cvtqs(ctx, rb, rc, fn11);
2266 break;
2267 case 0x3E:
2268
2269 REQUIRE_REG_31(ra);
2270 gen_cvtqt(ctx, rb, rc, fn11);
2271 break;
2272 default:
2273 goto invalid_opc;
2274 }
2275 break;
2276
2277 case 0x17:
2278 switch (fn11) {
2279 case 0x010:
2280
2281 REQUIRE_REG_31(ra);
2282 vc = dest_fpr(ctx, rc);
2283 vb = load_fpr(ctx, rb);
2284 gen_cvtlq(vc, vb);
2285 break;
2286 case 0x020:
2287
2288 if (rc == 31) {
2289
2290 } else {
2291 vc = dest_fpr(ctx, rc);
2292 va = load_fpr(ctx, ra);
2293 if (ra == rb) {
2294
2295 tcg_gen_mov_i64(vc, va);
2296 } else {
2297 vb = load_fpr(ctx, rb);
2298 gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2299 }
2300 }
2301 break;
2302 case 0x021:
2303
2304 vc = dest_fpr(ctx, rc);
2305 vb = load_fpr(ctx, rb);
2306 va = load_fpr(ctx, ra);
2307 gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2308 break;
2309 case 0x022:
2310
2311 vc = dest_fpr(ctx, rc);
2312 vb = load_fpr(ctx, rb);
2313 va = load_fpr(ctx, ra);
2314 gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2315 break;
2316 case 0x024:
2317
2318 va = load_fpr(ctx, ra);
2319 gen_helper_store_fpcr(cpu_env, va);
2320 if (ctx->tb_rm == QUAL_RM_D) {
2321
2322
2323 ctx->tb_rm = -1;
2324 }
2325 break;
2326 case 0x025:
2327
2328 va = dest_fpr(ctx, ra);
2329 gen_helper_load_fpcr(va, cpu_env);
2330 break;
2331 case 0x02A:
2332
2333 gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2334 break;
2335 case 0x02B:
2336
2337 gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2338 break;
2339 case 0x02C:
2340
2341 gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2342 break;
2343 case 0x02D:
2344
2345 gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2346 break;
2347 case 0x02E:
2348
2349 gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2350 break;
2351 case 0x02F:
2352
2353 gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2354 break;
2355 case 0x030:
2356 case 0x130:
2357 case 0x530:
2358 REQUIRE_REG_31(ra);
2359 vc = dest_fpr(ctx, rc);
2360 vb = load_fpr(ctx, rb);
2361 gen_helper_cvtql(vc, cpu_env, vb);
2362 gen_fp_exc_raise(rc, fn11);
2363 break;
2364 default:
2365 goto invalid_opc;
2366 }
2367 break;
2368
2369 case 0x18:
2370 switch ((uint16_t)disp16) {
2371 case 0x0000:
2372
2373
2374 break;
2375 case 0x0400:
2376
2377
2378 break;
2379 case 0x4000:
2380
2381 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2382 break;
2383 case 0x4400:
2384
2385 tcg_gen_mb(TCG_MO_ST_ST | TCG_BAR_SC);
2386 break;
2387 case 0x8000:
2388
2389
2390 break;
2391 case 0xA000:
2392
2393
2394 break;
2395 case 0xC000:
2396
2397 va = dest_gpr(ctx, ra);
2398 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2399 gen_io_start();
2400 gen_helper_load_pcc(va, cpu_env);
2401 ret = DISAS_PC_STALE;
2402 } else {
2403 gen_helper_load_pcc(va, cpu_env);
2404 }
2405 break;
2406 case 0xE000:
2407
2408 gen_rx(ctx, ra, 0);
2409 break;
2410 case 0xE800:
2411
2412 break;
2413 case 0xF000:
2414
2415 gen_rx(ctx, ra, 1);
2416 break;
2417 case 0xF800:
2418
2419
2420 break;
2421 case 0xFC00:
2422
2423
2424 break;
2425 default:
2426 goto invalid_opc;
2427 }
2428 break;
2429
2430 case 0x19:
2431
2432#ifndef CONFIG_USER_ONLY
2433 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2434 va = dest_gpr(ctx, ra);
2435 ret = gen_mfpr(ctx, va, insn & 0xffff);
2436 break;
2437#else
2438 goto invalid_opc;
2439#endif
2440
2441 case 0x1A:
2442
2443
2444 vb = load_gpr(ctx, rb);
2445 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2446 if (ra != 31) {
2447 tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
2448 }
2449 ret = DISAS_PC_UPDATED;
2450 break;
2451
2452 case 0x1B:
2453
2454#ifndef CONFIG_USER_ONLY
2455 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2456 {
2457 TCGv addr = tcg_temp_new();
2458 vb = load_gpr(ctx, rb);
2459 va = dest_gpr(ctx, ra);
2460
2461 tcg_gen_addi_i64(addr, vb, disp12);
2462 switch ((insn >> 12) & 0xF) {
2463 case 0x0:
2464
2465 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2466 break;
2467 case 0x1:
2468
2469 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEQ);
2470 break;
2471 case 0x2:
2472
2473 gen_qemu_ldl_l(va, addr, MMU_PHYS_IDX);
2474 break;
2475 case 0x3:
2476
2477 gen_qemu_ldq_l(va, addr, MMU_PHYS_IDX);
2478 break;
2479 case 0x4:
2480
2481 goto invalid_opc;
2482 case 0x5:
2483
2484 goto invalid_opc;
2485 break;
2486 case 0x6:
2487
2488 goto invalid_opc;
2489 case 0x7:
2490
2491 goto invalid_opc;
2492 case 0x8:
2493
2494 goto invalid_opc;
2495 case 0x9:
2496
2497 goto invalid_opc;
2498 case 0xA:
2499
2500 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2501 break;
2502 case 0xB:
2503
2504 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2505 break;
2506 case 0xC:
2507
2508 goto invalid_opc;
2509 case 0xD:
2510
2511 goto invalid_opc;
2512 case 0xE:
2513
2514
2515 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2516 break;
2517 case 0xF:
2518
2519
2520 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2521 break;
2522 }
2523 tcg_temp_free(addr);
2524 break;
2525 }
2526#else
2527 goto invalid_opc;
2528#endif
2529
2530 case 0x1C:
2531 vc = dest_gpr(ctx, rc);
2532 if (fn7 == 0x70) {
2533
2534 REQUIRE_AMASK(FIX);
2535 REQUIRE_REG_31(rb);
2536 va = load_fpr(ctx, ra);
2537 tcg_gen_mov_i64(vc, va);
2538 break;
2539 } else if (fn7 == 0x78) {
2540
2541 REQUIRE_AMASK(FIX);
2542 REQUIRE_REG_31(rb);
2543 t32 = tcg_temp_new_i32();
2544 va = load_fpr(ctx, ra);
2545 gen_helper_s_to_memory(t32, va);
2546 tcg_gen_ext_i32_i64(vc, t32);
2547 tcg_temp_free_i32(t32);
2548 break;
2549 }
2550
2551 vb = load_gpr_lit(ctx, rb, lit, islit);
2552 switch (fn7) {
2553 case 0x00:
2554
2555 REQUIRE_AMASK(BWX);
2556 REQUIRE_REG_31(ra);
2557 tcg_gen_ext8s_i64(vc, vb);
2558 break;
2559 case 0x01:
2560
2561 REQUIRE_AMASK(BWX);
2562 REQUIRE_REG_31(ra);
2563 tcg_gen_ext16s_i64(vc, vb);
2564 break;
2565 case 0x30:
2566
2567 REQUIRE_AMASK(CIX);
2568 REQUIRE_REG_31(ra);
2569 REQUIRE_NO_LIT;
2570 tcg_gen_ctpop_i64(vc, vb);
2571 break;
2572 case 0x31:
2573
2574 REQUIRE_AMASK(MVI);
2575 REQUIRE_NO_LIT;
2576 va = load_gpr(ctx, ra);
2577 gen_helper_perr(vc, va, vb);
2578 break;
2579 case 0x32:
2580
2581 REQUIRE_AMASK(CIX);
2582 REQUIRE_REG_31(ra);
2583 REQUIRE_NO_LIT;
2584 tcg_gen_clzi_i64(vc, vb, 64);
2585 break;
2586 case 0x33:
2587
2588 REQUIRE_AMASK(CIX);
2589 REQUIRE_REG_31(ra);
2590 REQUIRE_NO_LIT;
2591 tcg_gen_ctzi_i64(vc, vb, 64);
2592 break;
2593 case 0x34:
2594
2595 REQUIRE_AMASK(MVI);
2596 REQUIRE_REG_31(ra);
2597 REQUIRE_NO_LIT;
2598 gen_helper_unpkbw(vc, vb);
2599 break;
2600 case 0x35:
2601
2602 REQUIRE_AMASK(MVI);
2603 REQUIRE_REG_31(ra);
2604 REQUIRE_NO_LIT;
2605 gen_helper_unpkbl(vc, vb);
2606 break;
2607 case 0x36:
2608
2609 REQUIRE_AMASK(MVI);
2610 REQUIRE_REG_31(ra);
2611 REQUIRE_NO_LIT;
2612 gen_helper_pkwb(vc, vb);
2613 break;
2614 case 0x37:
2615
2616 REQUIRE_AMASK(MVI);
2617 REQUIRE_REG_31(ra);
2618 REQUIRE_NO_LIT;
2619 gen_helper_pklb(vc, vb);
2620 break;
2621 case 0x38:
2622
2623 REQUIRE_AMASK(MVI);
2624 va = load_gpr(ctx, ra);
2625 gen_helper_minsb8(vc, va, vb);
2626 break;
2627 case 0x39:
2628
2629 REQUIRE_AMASK(MVI);
2630 va = load_gpr(ctx, ra);
2631 gen_helper_minsw4(vc, va, vb);
2632 break;
2633 case 0x3A:
2634
2635 REQUIRE_AMASK(MVI);
2636 va = load_gpr(ctx, ra);
2637 gen_helper_minub8(vc, va, vb);
2638 break;
2639 case 0x3B:
2640
2641 REQUIRE_AMASK(MVI);
2642 va = load_gpr(ctx, ra);
2643 gen_helper_minuw4(vc, va, vb);
2644 break;
2645 case 0x3C:
2646
2647 REQUIRE_AMASK(MVI);
2648 va = load_gpr(ctx, ra);
2649 gen_helper_maxub8(vc, va, vb);
2650 break;
2651 case 0x3D:
2652
2653 REQUIRE_AMASK(MVI);
2654 va = load_gpr(ctx, ra);
2655 gen_helper_maxuw4(vc, va, vb);
2656 break;
2657 case 0x3E:
2658
2659 REQUIRE_AMASK(MVI);
2660 va = load_gpr(ctx, ra);
2661 gen_helper_maxsb8(vc, va, vb);
2662 break;
2663 case 0x3F:
2664
2665 REQUIRE_AMASK(MVI);
2666 va = load_gpr(ctx, ra);
2667 gen_helper_maxsw4(vc, va, vb);
2668 break;
2669 default:
2670 goto invalid_opc;
2671 }
2672 break;
2673
2674 case 0x1D:
2675
2676#ifndef CONFIG_USER_ONLY
2677 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2678 vb = load_gpr(ctx, rb);
2679 ret = gen_mtpr(ctx, vb, insn & 0xffff);
2680 break;
2681#else
2682 goto invalid_opc;
2683#endif
2684
2685 case 0x1E:
2686
2687#ifndef CONFIG_USER_ONLY
2688 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2689 if (rb == 31) {
2690
2691
2692
2693 ctx->lit = vb = tcg_temp_new();
2694 tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2695 } else {
2696 vb = load_gpr(ctx, rb);
2697 }
2698 tcg_gen_movi_i64(cpu_lock_addr, -1);
2699 tmp = tcg_temp_new();
2700 tcg_gen_movi_i64(tmp, 0);
2701 st_flag_byte(tmp, ENV_FLAG_RX_SHIFT);
2702 tcg_gen_andi_i64(tmp, vb, 1);
2703 st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
2704 tcg_temp_free(tmp);
2705 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2706
2707 ret = DISAS_PC_UPDATED_NOCHAIN;
2708 break;
2709#else
2710 goto invalid_opc;
2711#endif
2712
2713 case 0x1F:
2714
2715#ifndef CONFIG_USER_ONLY
2716 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2717 {
2718 switch ((insn >> 12) & 0xF) {
2719 case 0x0:
2720
2721 va = load_gpr(ctx, ra);
2722 vb = load_gpr(ctx, rb);
2723 tmp = tcg_temp_new();
2724 tcg_gen_addi_i64(tmp, vb, disp12);
2725 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
2726 tcg_temp_free(tmp);
2727 break;
2728 case 0x1:
2729
2730 va = load_gpr(ctx, ra);
2731 vb = load_gpr(ctx, rb);
2732 tmp = tcg_temp_new();
2733 tcg_gen_addi_i64(tmp, vb, disp12);
2734 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEQ);
2735 tcg_temp_free(tmp);
2736 break;
2737 case 0x2:
2738
2739 ret = gen_store_conditional(ctx, ra, rb, disp12,
2740 MMU_PHYS_IDX, MO_LESL);
2741 break;
2742 case 0x3:
2743
2744 ret = gen_store_conditional(ctx, ra, rb, disp12,
2745 MMU_PHYS_IDX, MO_LEQ);
2746 break;
2747 case 0x4:
2748
2749 goto invalid_opc;
2750 case 0x5:
2751
2752 goto invalid_opc;
2753 case 0x6:
2754
2755 goto invalid_opc;
2756 case 0x7:
2757
2758 goto invalid_opc;
2759 case 0x8:
2760
2761 goto invalid_opc;
2762 case 0x9:
2763
2764 goto invalid_opc;
2765 case 0xA:
2766
2767 goto invalid_opc;
2768 case 0xB:
2769
2770 goto invalid_opc;
2771 case 0xC:
2772
2773 goto invalid_opc;
2774 case 0xD:
2775
2776 goto invalid_opc;
2777 case 0xE:
2778
2779 goto invalid_opc;
2780 case 0xF:
2781
2782 goto invalid_opc;
2783 }
2784 break;
2785 }
2786#else
2787 goto invalid_opc;
2788#endif
2789 case 0x20:
2790
2791 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2792 break;
2793 case 0x21:
2794
2795 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2796 break;
2797 case 0x22:
2798
2799 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2800 break;
2801 case 0x23:
2802
2803 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2804 break;
2805 case 0x24:
2806
2807 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2808 break;
2809 case 0x25:
2810
2811 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2812 break;
2813 case 0x26:
2814
2815 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2816 break;
2817 case 0x27:
2818
2819 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2820 break;
2821 case 0x28:
2822
2823 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2824 break;
2825 case 0x29:
2826
2827 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2828 break;
2829 case 0x2A:
2830
2831 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2832 break;
2833 case 0x2B:
2834
2835 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2836 break;
2837 case 0x2C:
2838
2839 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2840 break;
2841 case 0x2D:
2842
2843 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2844 break;
2845 case 0x2E:
2846
2847 ret = gen_store_conditional(ctx, ra, rb, disp16,
2848 ctx->mem_idx, MO_LESL);
2849 break;
2850 case 0x2F:
2851
2852 ret = gen_store_conditional(ctx, ra, rb, disp16,
2853 ctx->mem_idx, MO_LEQ);
2854 break;
2855 case 0x30:
2856
2857 ret = gen_bdirect(ctx, ra, disp21);
2858 break;
2859 case 0x31:
2860 ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2861 break;
2862 case 0x32:
2863 ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2864 break;
2865 case 0x33:
2866 ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2867 break;
2868 case 0x34:
2869
2870 ret = gen_bdirect(ctx, ra, disp21);
2871 break;
2872 case 0x35:
2873 ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2874 break;
2875 case 0x36:
2876 ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2877 break;
2878 case 0x37:
2879 ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2880 break;
2881 case 0x38:
2882
2883 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2884 break;
2885 case 0x39:
2886
2887 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2888 break;
2889 case 0x3A:
2890
2891 ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2892 break;
2893 case 0x3B:
2894
2895 ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2896 break;
2897 case 0x3C:
2898
2899 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2900 break;
2901 case 0x3D:
2902
2903 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2904 break;
2905 case 0x3E:
2906
2907 ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2908 break;
2909 case 0x3F:
2910
2911 ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2912 break;
2913 invalid_opc:
2914 ret = gen_invalid(ctx);
2915 break;
2916 }
2917
2918 return ret;
2919}
2920
2921static void alpha_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
2922{
2923 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2924 CPUAlphaState *env = cpu->env_ptr;
2925 int64_t bound, mask;
2926
2927 ctx->tbflags = ctx->base.tb->flags;
2928 ctx->mem_idx = cpu_mmu_index(env, false);
2929 ctx->implver = env->implver;
2930 ctx->amask = env->amask;
2931
2932#ifdef CONFIG_USER_ONLY
2933 ctx->ir = cpu_std_ir;
2934#else
2935 ctx->palbr = env->palbr;
2936 ctx->ir = (ctx->tbflags & ENV_FLAG_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2937#endif
2938
2939
2940
2941
2942
2943
2944
2945 ctx->tb_rm = -1;
2946
2947 ctx->tb_ftz = -1;
2948
2949 ctx->zero = NULL;
2950 ctx->sink = NULL;
2951 ctx->lit = NULL;
2952
2953
2954 if (in_superpage(ctx, ctx->base.pc_first)) {
2955 mask = -1ULL << 41;
2956 } else {
2957 mask = TARGET_PAGE_MASK;
2958 }
2959 bound = -(ctx->base.pc_first | mask) / 4;
2960 ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
2961}
2962
2963static void alpha_tr_tb_start(DisasContextBase *db, CPUState *cpu)
2964{
2965}
2966
2967static void alpha_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
2968{
2969 tcg_gen_insn_start(dcbase->pc_next);
2970}
2971
2972static bool alpha_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
2973 const CPUBreakpoint *bp)
2974{
2975 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2976
2977 ctx->base.is_jmp = gen_excp(ctx, EXCP_DEBUG, 0);
2978
2979
2980
2981
2982
2983 ctx->base.pc_next += 4;
2984 return true;
2985}
2986
2987static void alpha_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
2988{
2989 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2990 CPUAlphaState *env = cpu->env_ptr;
2991 uint32_t insn = translator_ldl(env, ctx->base.pc_next);
2992
2993 ctx->base.pc_next += 4;
2994 ctx->base.is_jmp = translate_one(ctx, insn);
2995
2996 free_context_temps(ctx);
2997 translator_loop_temp_check(&ctx->base);
2998}
2999
3000static void alpha_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
3001{
3002 DisasContext *ctx = container_of(dcbase, DisasContext, base);
3003
3004 switch (ctx->base.is_jmp) {
3005 case DISAS_NORETURN:
3006 break;
3007 case DISAS_TOO_MANY:
3008 if (use_goto_tb(ctx, ctx->base.pc_next)) {
3009 tcg_gen_goto_tb(0);
3010 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3011 tcg_gen_exit_tb(ctx->base.tb, 0);
3012 }
3013
3014 case DISAS_PC_STALE:
3015 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3016
3017 case DISAS_PC_UPDATED:
3018 if (!use_exit_tb(ctx)) {
3019 tcg_gen_lookup_and_goto_ptr();
3020 break;
3021 }
3022
3023 case DISAS_PC_UPDATED_NOCHAIN:
3024 if (ctx->base.singlestep_enabled) {
3025 gen_excp_1(EXCP_DEBUG, 0);
3026 } else {
3027 tcg_gen_exit_tb(NULL, 0);
3028 }
3029 break;
3030 default:
3031 g_assert_not_reached();
3032 }
3033}
3034
3035static void alpha_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
3036{
3037 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
3038 log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
3039}
3040
3041static const TranslatorOps alpha_tr_ops = {
3042 .init_disas_context = alpha_tr_init_disas_context,
3043 .tb_start = alpha_tr_tb_start,
3044 .insn_start = alpha_tr_insn_start,
3045 .breakpoint_check = alpha_tr_breakpoint_check,
3046 .translate_insn = alpha_tr_translate_insn,
3047 .tb_stop = alpha_tr_tb_stop,
3048 .disas_log = alpha_tr_disas_log,
3049};
3050
3051void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
3052{
3053 DisasContext dc;
3054 translator_loop(&alpha_tr_ops, &dc.base, cpu, tb, max_insns);
3055}
3056
3057void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
3058 target_ulong *data)
3059{
3060 env->pc = data[0];
3061}
3062