1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include "qemu/osdep.h"
21#include "cpu.h"
22#include "sysemu/cpus.h"
23#include "sysemu/cpu-timers.h"
24#include "disas/disas.h"
25#include "qemu/host-utils.h"
26#include "exec/exec-all.h"
27#include "tcg/tcg-op.h"
28#include "exec/cpu_ldst.h"
29#include "exec/helper-proto.h"
30#include "exec/helper-gen.h"
31#include "trace-tcg.h"
32#include "exec/translator.h"
33#include "exec/log.h"
34
35
36#undef ALPHA_DEBUG_DISAS
37#define CONFIG_SOFTFLOAT_INLINE
38
39#ifdef ALPHA_DEBUG_DISAS
40# define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
41#else
42# define LOG_DISAS(...) do { } while (0)
43#endif
44
45typedef struct DisasContext DisasContext;
46struct DisasContext {
47 DisasContextBase base;
48
49#ifndef CONFIG_USER_ONLY
50 uint64_t palbr;
51#endif
52 uint32_t tbflags;
53 int mem_idx;
54
55
56 int implver;
57 int amask;
58
59
60 int tb_rm;
61
62 int tb_ftz;
63
64
65 TCGv *ir;
66
67
68 TCGv zero;
69 TCGv sink;
70
71 TCGv lit;
72};
73
74
75
76
77#define DISAS_PC_UPDATED_NOCHAIN DISAS_TARGET_0
78#define DISAS_PC_UPDATED DISAS_TARGET_1
79#define DISAS_PC_STALE DISAS_TARGET_2
80
81
82static TCGv cpu_std_ir[31];
83static TCGv cpu_fir[31];
84static TCGv cpu_pc;
85static TCGv cpu_lock_addr;
86static TCGv cpu_lock_value;
87
88#ifndef CONFIG_USER_ONLY
89static TCGv cpu_pal_ir[31];
90#endif
91
92#include "exec/gen-icount.h"
93
94void alpha_translate_init(void)
95{
96#define DEF_VAR(V) { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
97
98 typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
99 static const GlobalVar vars[] = {
100 DEF_VAR(pc),
101 DEF_VAR(lock_addr),
102 DEF_VAR(lock_value),
103 };
104
105#undef DEF_VAR
106
107
108 static const char greg_names[31][4] = {
109 "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
110 "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
111 "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
112 "t10", "t11", "ra", "t12", "at", "gp", "sp"
113 };
114 static const char freg_names[31][4] = {
115 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
116 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
117 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
118 "f24", "f25", "f26", "f27", "f28", "f29", "f30"
119 };
120#ifndef CONFIG_USER_ONLY
121 static const char shadow_names[8][8] = {
122 "pal_t7", "pal_s0", "pal_s1", "pal_s2",
123 "pal_s3", "pal_s4", "pal_s5", "pal_t11"
124 };
125#endif
126
127 int i;
128
129 for (i = 0; i < 31; i++) {
130 cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
131 offsetof(CPUAlphaState, ir[i]),
132 greg_names[i]);
133 }
134
135 for (i = 0; i < 31; i++) {
136 cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
137 offsetof(CPUAlphaState, fir[i]),
138 freg_names[i]);
139 }
140
141#ifndef CONFIG_USER_ONLY
142 memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
143 for (i = 0; i < 8; i++) {
144 int r = (i == 7 ? 25 : i + 8);
145 cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
146 offsetof(CPUAlphaState,
147 shadow[i]),
148 shadow_names[i]);
149 }
150#endif
151
152 for (i = 0; i < ARRAY_SIZE(vars); ++i) {
153 const GlobalVar *v = &vars[i];
154 *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
155 }
156}
157
158static TCGv load_zero(DisasContext *ctx)
159{
160 if (!ctx->zero) {
161 ctx->zero = tcg_const_i64(0);
162 }
163 return ctx->zero;
164}
165
166static TCGv dest_sink(DisasContext *ctx)
167{
168 if (!ctx->sink) {
169 ctx->sink = tcg_temp_new();
170 }
171 return ctx->sink;
172}
173
174static void free_context_temps(DisasContext *ctx)
175{
176 if (ctx->sink) {
177 tcg_gen_discard_i64(ctx->sink);
178 tcg_temp_free(ctx->sink);
179 ctx->sink = NULL;
180 }
181 if (ctx->zero) {
182 tcg_temp_free(ctx->zero);
183 ctx->zero = NULL;
184 }
185 if (ctx->lit) {
186 tcg_temp_free(ctx->lit);
187 ctx->lit = NULL;
188 }
189}
190
191static TCGv load_gpr(DisasContext *ctx, unsigned reg)
192{
193 if (likely(reg < 31)) {
194 return ctx->ir[reg];
195 } else {
196 return load_zero(ctx);
197 }
198}
199
200static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
201 uint8_t lit, bool islit)
202{
203 if (islit) {
204 ctx->lit = tcg_const_i64(lit);
205 return ctx->lit;
206 } else if (likely(reg < 31)) {
207 return ctx->ir[reg];
208 } else {
209 return load_zero(ctx);
210 }
211}
212
213static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
214{
215 if (likely(reg < 31)) {
216 return ctx->ir[reg];
217 } else {
218 return dest_sink(ctx);
219 }
220}
221
222static TCGv load_fpr(DisasContext *ctx, unsigned reg)
223{
224 if (likely(reg < 31)) {
225 return cpu_fir[reg];
226 } else {
227 return load_zero(ctx);
228 }
229}
230
231static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
232{
233 if (likely(reg < 31)) {
234 return cpu_fir[reg];
235 } else {
236 return dest_sink(ctx);
237 }
238}
239
240static int get_flag_ofs(unsigned shift)
241{
242 int ofs = offsetof(CPUAlphaState, flags);
243#ifdef HOST_WORDS_BIGENDIAN
244 ofs += 3 - (shift / 8);
245#else
246 ofs += shift / 8;
247#endif
248 return ofs;
249}
250
251static void ld_flag_byte(TCGv val, unsigned shift)
252{
253 tcg_gen_ld8u_i64(val, cpu_env, get_flag_ofs(shift));
254}
255
256static void st_flag_byte(TCGv val, unsigned shift)
257{
258 tcg_gen_st8_i64(val, cpu_env, get_flag_ofs(shift));
259}
260
261static void gen_excp_1(int exception, int error_code)
262{
263 TCGv_i32 tmp1, tmp2;
264
265 tmp1 = tcg_const_i32(exception);
266 tmp2 = tcg_const_i32(error_code);
267 gen_helper_excp(cpu_env, tmp1, tmp2);
268 tcg_temp_free_i32(tmp2);
269 tcg_temp_free_i32(tmp1);
270}
271
272static DisasJumpType gen_excp(DisasContext *ctx, int exception, int error_code)
273{
274 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
275 gen_excp_1(exception, error_code);
276 return DISAS_NORETURN;
277}
278
279static inline DisasJumpType gen_invalid(DisasContext *ctx)
280{
281 return gen_excp(ctx, EXCP_OPCDEC, 0);
282}
283
284static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
285{
286 TCGv_i32 tmp32 = tcg_temp_new_i32();
287 tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
288 gen_helper_memory_to_f(t0, tmp32);
289 tcg_temp_free_i32(tmp32);
290}
291
292static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
293{
294 TCGv tmp = tcg_temp_new();
295 tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
296 gen_helper_memory_to_g(t0, tmp);
297 tcg_temp_free(tmp);
298}
299
300static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
301{
302 TCGv_i32 tmp32 = tcg_temp_new_i32();
303 tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
304 gen_helper_memory_to_s(t0, tmp32);
305 tcg_temp_free_i32(tmp32);
306}
307
308static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
309{
310 tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
311 tcg_gen_mov_i64(cpu_lock_addr, t1);
312 tcg_gen_mov_i64(cpu_lock_value, t0);
313}
314
315static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
316{
317 tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
318 tcg_gen_mov_i64(cpu_lock_addr, t1);
319 tcg_gen_mov_i64(cpu_lock_value, t0);
320}
321
322static inline void gen_load_mem(DisasContext *ctx,
323 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
324 int flags),
325 int ra, int rb, int32_t disp16, bool fp,
326 bool clear)
327{
328 TCGv tmp, addr, va;
329
330
331
332
333 if (unlikely(ra == 31)) {
334 return;
335 }
336
337 tmp = tcg_temp_new();
338 addr = load_gpr(ctx, rb);
339
340 if (disp16) {
341 tcg_gen_addi_i64(tmp, addr, disp16);
342 addr = tmp;
343 }
344 if (clear) {
345 tcg_gen_andi_i64(tmp, addr, ~0x7);
346 addr = tmp;
347 }
348
349 va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
350 tcg_gen_qemu_load(va, addr, ctx->mem_idx);
351
352 tcg_temp_free(tmp);
353}
354
355static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
356{
357 TCGv_i32 tmp32 = tcg_temp_new_i32();
358 gen_helper_f_to_memory(tmp32, t0);
359 tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
360 tcg_temp_free_i32(tmp32);
361}
362
363static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
364{
365 TCGv tmp = tcg_temp_new();
366 gen_helper_g_to_memory(tmp, t0);
367 tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
368 tcg_temp_free(tmp);
369}
370
371static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
372{
373 TCGv_i32 tmp32 = tcg_temp_new_i32();
374 gen_helper_s_to_memory(tmp32, t0);
375 tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
376 tcg_temp_free_i32(tmp32);
377}
378
379static inline void gen_store_mem(DisasContext *ctx,
380 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
381 int flags),
382 int ra, int rb, int32_t disp16, bool fp,
383 bool clear)
384{
385 TCGv tmp, addr, va;
386
387 tmp = tcg_temp_new();
388 addr = load_gpr(ctx, rb);
389
390 if (disp16) {
391 tcg_gen_addi_i64(tmp, addr, disp16);
392 addr = tmp;
393 }
394 if (clear) {
395 tcg_gen_andi_i64(tmp, addr, ~0x7);
396 addr = tmp;
397 }
398
399 va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
400 tcg_gen_qemu_store(va, addr, ctx->mem_idx);
401
402 tcg_temp_free(tmp);
403}
404
405static DisasJumpType gen_store_conditional(DisasContext *ctx, int ra, int rb,
406 int32_t disp16, int mem_idx,
407 MemOp op)
408{
409 TCGLabel *lab_fail, *lab_done;
410 TCGv addr, val;
411
412 addr = tcg_temp_new_i64();
413 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
414 free_context_temps(ctx);
415
416 lab_fail = gen_new_label();
417 lab_done = gen_new_label();
418 tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
419 tcg_temp_free_i64(addr);
420
421 val = tcg_temp_new_i64();
422 tcg_gen_atomic_cmpxchg_i64(val, cpu_lock_addr, cpu_lock_value,
423 load_gpr(ctx, ra), mem_idx, op);
424 free_context_temps(ctx);
425
426 if (ra != 31) {
427 tcg_gen_setcond_i64(TCG_COND_EQ, ctx->ir[ra], val, cpu_lock_value);
428 }
429 tcg_temp_free_i64(val);
430 tcg_gen_br(lab_done);
431
432 gen_set_label(lab_fail);
433 if (ra != 31) {
434 tcg_gen_movi_i64(ctx->ir[ra], 0);
435 }
436
437 gen_set_label(lab_done);
438 tcg_gen_movi_i64(cpu_lock_addr, -1);
439 return DISAS_NEXT;
440}
441
442static bool in_superpage(DisasContext *ctx, int64_t addr)
443{
444#ifndef CONFIG_USER_ONLY
445 return ((ctx->tbflags & ENV_FLAG_PS_USER) == 0
446 && addr >> TARGET_VIRT_ADDR_SPACE_BITS == -1
447 && ((addr >> 41) & 3) == 2);
448#else
449 return false;
450#endif
451}
452
453static bool use_exit_tb(DisasContext *ctx)
454{
455 return ((tb_cflags(ctx->base.tb) & CF_LAST_IO)
456 || ctx->base.singlestep_enabled
457 || singlestep);
458}
459
460static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
461{
462
463 if (unlikely(use_exit_tb(ctx))) {
464 return false;
465 }
466#ifndef CONFIG_USER_ONLY
467
468 if (in_superpage(ctx, dest)) {
469 return true;
470 }
471
472 return ((ctx->base.tb->pc ^ dest) & TARGET_PAGE_MASK) == 0;
473#else
474 return true;
475#endif
476}
477
478static DisasJumpType gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
479{
480 uint64_t dest = ctx->base.pc_next + (disp << 2);
481
482 if (ra != 31) {
483 tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
484 }
485
486
487 if (disp == 0) {
488 return 0;
489 } else if (use_goto_tb(ctx, dest)) {
490 tcg_gen_goto_tb(0);
491 tcg_gen_movi_i64(cpu_pc, dest);
492 tcg_gen_exit_tb(ctx->base.tb, 0);
493 return DISAS_NORETURN;
494 } else {
495 tcg_gen_movi_i64(cpu_pc, dest);
496 return DISAS_PC_UPDATED;
497 }
498}
499
500static DisasJumpType gen_bcond_internal(DisasContext *ctx, TCGCond cond,
501 TCGv cmp, int32_t disp)
502{
503 uint64_t dest = ctx->base.pc_next + (disp << 2);
504 TCGLabel *lab_true = gen_new_label();
505
506 if (use_goto_tb(ctx, dest)) {
507 tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
508
509 tcg_gen_goto_tb(0);
510 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
511 tcg_gen_exit_tb(ctx->base.tb, 0);
512
513 gen_set_label(lab_true);
514 tcg_gen_goto_tb(1);
515 tcg_gen_movi_i64(cpu_pc, dest);
516 tcg_gen_exit_tb(ctx->base.tb, 1);
517
518 return DISAS_NORETURN;
519 } else {
520 TCGv_i64 z = tcg_const_i64(0);
521 TCGv_i64 d = tcg_const_i64(dest);
522 TCGv_i64 p = tcg_const_i64(ctx->base.pc_next);
523
524 tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
525
526 tcg_temp_free_i64(z);
527 tcg_temp_free_i64(d);
528 tcg_temp_free_i64(p);
529 return DISAS_PC_UPDATED;
530 }
531}
532
533static DisasJumpType gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
534 int32_t disp, int mask)
535{
536 if (mask) {
537 TCGv tmp = tcg_temp_new();
538 DisasJumpType ret;
539
540 tcg_gen_andi_i64(tmp, load_gpr(ctx, ra), 1);
541 ret = gen_bcond_internal(ctx, cond, tmp, disp);
542 tcg_temp_free(tmp);
543 return ret;
544 }
545 return gen_bcond_internal(ctx, cond, load_gpr(ctx, ra), disp);
546}
547
548
549
550static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
551{
552 uint64_t mzero = 1ull << 63;
553
554 switch (cond) {
555 case TCG_COND_LE:
556 case TCG_COND_GT:
557
558 tcg_gen_mov_i64(dest, src);
559 break;
560
561 case TCG_COND_EQ:
562 case TCG_COND_NE:
563
564 tcg_gen_andi_i64(dest, src, mzero - 1);
565 break;
566
567 case TCG_COND_GE:
568 case TCG_COND_LT:
569
570 tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
571 tcg_gen_neg_i64(dest, dest);
572 tcg_gen_and_i64(dest, dest, src);
573 break;
574
575 default:
576 abort();
577 }
578}
579
580static DisasJumpType gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
581 int32_t disp)
582{
583 TCGv cmp_tmp = tcg_temp_new();
584 DisasJumpType ret;
585
586 gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
587 ret = gen_bcond_internal(ctx, cond, cmp_tmp, disp);
588 tcg_temp_free(cmp_tmp);
589 return ret;
590}
591
592static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
593{
594 TCGv_i64 va, vb, z;
595
596 z = load_zero(ctx);
597 vb = load_fpr(ctx, rb);
598 va = tcg_temp_new();
599 gen_fold_mzero(cond, va, load_fpr(ctx, ra));
600
601 tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
602
603 tcg_temp_free(va);
604}
605
606#define QUAL_RM_N 0x080
607#define QUAL_RM_C 0x000
608#define QUAL_RM_M 0x040
609#define QUAL_RM_D 0x0c0
610#define QUAL_RM_MASK 0x0c0
611
612#define QUAL_U 0x100
613#define QUAL_V 0x100
614#define QUAL_S 0x400
615#define QUAL_I 0x200
616
617static void gen_qual_roundmode(DisasContext *ctx, int fn11)
618{
619 TCGv_i32 tmp;
620
621 fn11 &= QUAL_RM_MASK;
622 if (fn11 == ctx->tb_rm) {
623 return;
624 }
625 ctx->tb_rm = fn11;
626
627 tmp = tcg_temp_new_i32();
628 switch (fn11) {
629 case QUAL_RM_N:
630 tcg_gen_movi_i32(tmp, float_round_nearest_even);
631 break;
632 case QUAL_RM_C:
633 tcg_gen_movi_i32(tmp, float_round_to_zero);
634 break;
635 case QUAL_RM_M:
636 tcg_gen_movi_i32(tmp, float_round_down);
637 break;
638 case QUAL_RM_D:
639 tcg_gen_ld8u_i32(tmp, cpu_env,
640 offsetof(CPUAlphaState, fpcr_dyn_round));
641 break;
642 }
643
644#if defined(CONFIG_SOFTFLOAT_INLINE)
645
646
647
648 tcg_gen_st8_i32(tmp, cpu_env,
649 offsetof(CPUAlphaState, fp_status.float_rounding_mode));
650#else
651 gen_helper_setroundmode(tmp);
652#endif
653
654 tcg_temp_free_i32(tmp);
655}
656
657static void gen_qual_flushzero(DisasContext *ctx, int fn11)
658{
659 TCGv_i32 tmp;
660
661 fn11 &= QUAL_U;
662 if (fn11 == ctx->tb_ftz) {
663 return;
664 }
665 ctx->tb_ftz = fn11;
666
667 tmp = tcg_temp_new_i32();
668 if (fn11) {
669
670 tcg_gen_ld8u_i32(tmp, cpu_env,
671 offsetof(CPUAlphaState, fpcr_flush_to_zero));
672 } else {
673
674 tcg_gen_movi_i32(tmp, 1);
675 }
676
677#if defined(CONFIG_SOFTFLOAT_INLINE)
678 tcg_gen_st8_i32(tmp, cpu_env,
679 offsetof(CPUAlphaState, fp_status.flush_to_zero));
680#else
681 gen_helper_setflushzero(tmp);
682#endif
683
684 tcg_temp_free_i32(tmp);
685}
686
687static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
688{
689 TCGv val;
690
691 if (unlikely(reg == 31)) {
692 val = load_zero(ctx);
693 } else {
694 val = cpu_fir[reg];
695 if ((fn11 & QUAL_S) == 0) {
696 if (is_cmp) {
697 gen_helper_ieee_input_cmp(cpu_env, val);
698 } else {
699 gen_helper_ieee_input(cpu_env, val);
700 }
701 } else {
702#ifndef CONFIG_USER_ONLY
703
704
705
706 gen_helper_ieee_input_s(cpu_env, val);
707#endif
708 }
709 }
710 return val;
711}
712
713static void gen_fp_exc_raise(int rc, int fn11)
714{
715
716
717
718
719
720 TCGv_i32 reg, ign;
721 uint32_t ignore = 0;
722
723 if (!(fn11 & QUAL_U)) {
724
725 ignore |= FPCR_UNF | FPCR_IOV;
726 }
727 if (!(fn11 & QUAL_I)) {
728 ignore |= FPCR_INE;
729 }
730 ign = tcg_const_i32(ignore);
731
732
733
734
735
736
737 reg = tcg_const_i32(rc + 32);
738 if (fn11 & QUAL_S) {
739 gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
740 } else {
741 gen_helper_fp_exc_raise(cpu_env, ign, reg);
742 }
743
744 tcg_temp_free_i32(reg);
745 tcg_temp_free_i32(ign);
746}
747
748static void gen_cvtlq(TCGv vc, TCGv vb)
749{
750 TCGv tmp = tcg_temp_new();
751
752
753
754 tcg_gen_shri_i64(tmp, vb, 29);
755 tcg_gen_sari_i64(vc, vb, 32);
756 tcg_gen_deposit_i64(vc, vc, tmp, 0, 30);
757
758 tcg_temp_free(tmp);
759}
760
761static void gen_ieee_arith2(DisasContext *ctx,
762 void (*helper)(TCGv, TCGv_ptr, TCGv),
763 int rb, int rc, int fn11)
764{
765 TCGv vb;
766
767 gen_qual_roundmode(ctx, fn11);
768 gen_qual_flushzero(ctx, fn11);
769
770 vb = gen_ieee_input(ctx, rb, fn11, 0);
771 helper(dest_fpr(ctx, rc), cpu_env, vb);
772
773 gen_fp_exc_raise(rc, fn11);
774}
775
776#define IEEE_ARITH2(name) \
777static inline void glue(gen_, name)(DisasContext *ctx, \
778 int rb, int rc, int fn11) \
779{ \
780 gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11); \
781}
782IEEE_ARITH2(sqrts)
783IEEE_ARITH2(sqrtt)
784IEEE_ARITH2(cvtst)
785IEEE_ARITH2(cvtts)
786
787static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
788{
789 TCGv vb, vc;
790
791
792 vb = gen_ieee_input(ctx, rb, fn11, 0);
793 vc = dest_fpr(ctx, rc);
794
795
796
797 if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
798 gen_helper_cvttq_c(vc, cpu_env, vb);
799 } else {
800 gen_qual_roundmode(ctx, fn11);
801 gen_helper_cvttq(vc, cpu_env, vb);
802 }
803 gen_fp_exc_raise(rc, fn11);
804}
805
806static void gen_ieee_intcvt(DisasContext *ctx,
807 void (*helper)(TCGv, TCGv_ptr, TCGv),
808 int rb, int rc, int fn11)
809{
810 TCGv vb, vc;
811
812 gen_qual_roundmode(ctx, fn11);
813 vb = load_fpr(ctx, rb);
814 vc = dest_fpr(ctx, rc);
815
816
817
818
819 if (fn11 & QUAL_I) {
820 helper(vc, cpu_env, vb);
821 gen_fp_exc_raise(rc, fn11);
822 } else {
823 helper(vc, cpu_env, vb);
824 }
825}
826
827#define IEEE_INTCVT(name) \
828static inline void glue(gen_, name)(DisasContext *ctx, \
829 int rb, int rc, int fn11) \
830{ \
831 gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11); \
832}
833IEEE_INTCVT(cvtqs)
834IEEE_INTCVT(cvtqt)
835
836static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
837{
838 TCGv vmask = tcg_const_i64(mask);
839 TCGv tmp = tcg_temp_new_i64();
840
841 if (inv_a) {
842 tcg_gen_andc_i64(tmp, vmask, va);
843 } else {
844 tcg_gen_and_i64(tmp, va, vmask);
845 }
846
847 tcg_gen_andc_i64(vc, vb, vmask);
848 tcg_gen_or_i64(vc, vc, tmp);
849
850 tcg_temp_free(vmask);
851 tcg_temp_free(tmp);
852}
853
854static void gen_ieee_arith3(DisasContext *ctx,
855 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
856 int ra, int rb, int rc, int fn11)
857{
858 TCGv va, vb, vc;
859
860 gen_qual_roundmode(ctx, fn11);
861 gen_qual_flushzero(ctx, fn11);
862
863 va = gen_ieee_input(ctx, ra, fn11, 0);
864 vb = gen_ieee_input(ctx, rb, fn11, 0);
865 vc = dest_fpr(ctx, rc);
866 helper(vc, cpu_env, va, vb);
867
868 gen_fp_exc_raise(rc, fn11);
869}
870
871#define IEEE_ARITH3(name) \
872static inline void glue(gen_, name)(DisasContext *ctx, \
873 int ra, int rb, int rc, int fn11) \
874{ \
875 gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11); \
876}
877IEEE_ARITH3(adds)
878IEEE_ARITH3(subs)
879IEEE_ARITH3(muls)
880IEEE_ARITH3(divs)
881IEEE_ARITH3(addt)
882IEEE_ARITH3(subt)
883IEEE_ARITH3(mult)
884IEEE_ARITH3(divt)
885
886static void gen_ieee_compare(DisasContext *ctx,
887 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
888 int ra, int rb, int rc, int fn11)
889{
890 TCGv va, vb, vc;
891
892 va = gen_ieee_input(ctx, ra, fn11, 1);
893 vb = gen_ieee_input(ctx, rb, fn11, 1);
894 vc = dest_fpr(ctx, rc);
895 helper(vc, cpu_env, va, vb);
896
897 gen_fp_exc_raise(rc, fn11);
898}
899
900#define IEEE_CMP3(name) \
901static inline void glue(gen_, name)(DisasContext *ctx, \
902 int ra, int rb, int rc, int fn11) \
903{ \
904 gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11); \
905}
906IEEE_CMP3(cmptun)
907IEEE_CMP3(cmpteq)
908IEEE_CMP3(cmptlt)
909IEEE_CMP3(cmptle)
910
911static inline uint64_t zapnot_mask(uint8_t lit)
912{
913 uint64_t mask = 0;
914 int i;
915
916 for (i = 0; i < 8; ++i) {
917 if ((lit >> i) & 1) {
918 mask |= 0xffull << (i * 8);
919 }
920 }
921 return mask;
922}
923
924
925
926
927static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
928{
929 switch (lit) {
930 case 0x00:
931 tcg_gen_movi_i64(dest, 0);
932 break;
933 case 0x01:
934 tcg_gen_ext8u_i64(dest, src);
935 break;
936 case 0x03:
937 tcg_gen_ext16u_i64(dest, src);
938 break;
939 case 0x0f:
940 tcg_gen_ext32u_i64(dest, src);
941 break;
942 case 0xff:
943 tcg_gen_mov_i64(dest, src);
944 break;
945 default:
946 tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
947 break;
948 }
949}
950
951
952static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
953 uint8_t lit, uint8_t byte_mask)
954{
955 if (islit) {
956 int pos = (64 - lit * 8) & 0x3f;
957 int len = cto32(byte_mask) * 8;
958 if (pos < len) {
959 tcg_gen_deposit_z_i64(vc, va, pos, len - pos);
960 } else {
961 tcg_gen_movi_i64(vc, 0);
962 }
963 } else {
964 TCGv tmp = tcg_temp_new();
965 tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
966 tcg_gen_neg_i64(tmp, tmp);
967 tcg_gen_andi_i64(tmp, tmp, 0x3f);
968 tcg_gen_shl_i64(vc, va, tmp);
969 tcg_temp_free(tmp);
970 }
971 gen_zapnoti(vc, vc, byte_mask);
972}
973
974
975static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
976 uint8_t lit, uint8_t byte_mask)
977{
978 if (islit) {
979 int pos = (lit & 7) * 8;
980 int len = cto32(byte_mask) * 8;
981 if (pos + len >= 64) {
982 len = 64 - pos;
983 }
984 tcg_gen_extract_i64(vc, va, pos, len);
985 } else {
986 TCGv tmp = tcg_temp_new();
987 tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
988 tcg_gen_shli_i64(tmp, tmp, 3);
989 tcg_gen_shr_i64(vc, va, tmp);
990 tcg_temp_free(tmp);
991 gen_zapnoti(vc, vc, byte_mask);
992 }
993}
994
995
996static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
997 uint8_t lit, uint8_t byte_mask)
998{
999 if (islit) {
1000 int pos = 64 - (lit & 7) * 8;
1001 int len = cto32(byte_mask) * 8;
1002 if (pos < len) {
1003 tcg_gen_extract_i64(vc, va, pos, len - pos);
1004 } else {
1005 tcg_gen_movi_i64(vc, 0);
1006 }
1007 } else {
1008 TCGv tmp = tcg_temp_new();
1009 TCGv shift = tcg_temp_new();
1010
1011
1012
1013
1014
1015 gen_zapnoti(tmp, va, byte_mask);
1016
1017
1018
1019
1020
1021
1022 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1023 tcg_gen_not_i64(shift, shift);
1024 tcg_gen_andi_i64(shift, shift, 0x3f);
1025
1026 tcg_gen_shr_i64(vc, tmp, shift);
1027 tcg_gen_shri_i64(vc, vc, 1);
1028 tcg_temp_free(shift);
1029 tcg_temp_free(tmp);
1030 }
1031}
1032
1033
1034static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1035 uint8_t lit, uint8_t byte_mask)
1036{
1037 if (islit) {
1038 int pos = (lit & 7) * 8;
1039 int len = cto32(byte_mask) * 8;
1040 if (pos + len > 64) {
1041 len = 64 - pos;
1042 }
1043 tcg_gen_deposit_z_i64(vc, va, pos, len);
1044 } else {
1045 TCGv tmp = tcg_temp_new();
1046 TCGv shift = tcg_temp_new();
1047
1048
1049
1050
1051
1052 gen_zapnoti(tmp, va, byte_mask);
1053
1054 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1055 tcg_gen_shli_i64(shift, shift, 3);
1056 tcg_gen_shl_i64(vc, tmp, shift);
1057 tcg_temp_free(shift);
1058 tcg_temp_free(tmp);
1059 }
1060}
1061
1062
1063static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1064 uint8_t lit, uint8_t byte_mask)
1065{
1066 if (islit) {
1067 gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1068 } else {
1069 TCGv shift = tcg_temp_new();
1070 TCGv mask = tcg_temp_new();
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1082 tcg_gen_not_i64(shift, shift);
1083 tcg_gen_andi_i64(shift, shift, 0x3f);
1084 tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1085 tcg_gen_shr_i64(mask, mask, shift);
1086 tcg_gen_shri_i64(mask, mask, 1);
1087
1088 tcg_gen_andc_i64(vc, va, mask);
1089
1090 tcg_temp_free(mask);
1091 tcg_temp_free(shift);
1092 }
1093}
1094
1095
1096static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1097 uint8_t lit, uint8_t byte_mask)
1098{
1099 if (islit) {
1100 gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1101 } else {
1102 TCGv shift = tcg_temp_new();
1103 TCGv mask = tcg_temp_new();
1104
1105 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1106 tcg_gen_shli_i64(shift, shift, 3);
1107 tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1108 tcg_gen_shl_i64(mask, mask, shift);
1109
1110 tcg_gen_andc_i64(vc, va, mask);
1111
1112 tcg_temp_free(mask);
1113 tcg_temp_free(shift);
1114 }
1115}
1116
1117static void gen_rx(DisasContext *ctx, int ra, int set)
1118{
1119 TCGv tmp;
1120
1121 if (ra != 31) {
1122 ld_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1123 }
1124
1125 tmp = tcg_const_i64(set);
1126 st_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1127 tcg_temp_free(tmp);
1128}
1129
1130static DisasJumpType gen_call_pal(DisasContext *ctx, int palcode)
1131{
1132
1133
1134
1135
1136 if (palcode >= 0x80 && palcode < 0xC0) {
1137 switch (palcode) {
1138 case 0x86:
1139
1140
1141 break;
1142 case 0x9E:
1143
1144 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1145 offsetof(CPUAlphaState, unique));
1146 break;
1147 case 0x9F:
1148
1149 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1150 offsetof(CPUAlphaState, unique));
1151 break;
1152 default:
1153 palcode &= 0xbf;
1154 goto do_call_pal;
1155 }
1156 return DISAS_NEXT;
1157 }
1158
1159#ifndef CONFIG_USER_ONLY
1160
1161 if (palcode < 0x40 && (ctx->tbflags & ENV_FLAG_PS_USER) == 0) {
1162 switch (palcode) {
1163 case 0x01:
1164
1165
1166 break;
1167 case 0x02:
1168
1169
1170 break;
1171 case 0x2D:
1172
1173 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1174 offsetof(CPUAlphaState, vptptr));
1175 break;
1176 case 0x31:
1177
1178 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1179 offsetof(CPUAlphaState, sysval));
1180 break;
1181 case 0x32:
1182
1183 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1184 offsetof(CPUAlphaState, sysval));
1185 break;
1186
1187 case 0x35:
1188
1189
1190
1191 ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1192
1193
1194 {
1195 TCGv tmp = tcg_temp_new();
1196 tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1197 st_flag_byte(tmp, ENV_FLAG_PS_SHIFT);
1198 tcg_temp_free(tmp);
1199 }
1200
1201
1202 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
1203 return DISAS_PC_UPDATED_NOCHAIN;
1204
1205 case 0x36:
1206
1207 ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1208 break;
1209
1210 case 0x38:
1211
1212 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1213 offsetof(CPUAlphaState, usp));
1214 break;
1215 case 0x3A:
1216
1217 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1218 offsetof(CPUAlphaState, usp));
1219 break;
1220 case 0x3C:
1221
1222 tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1223 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1224 break;
1225
1226 case 0x3E:
1227
1228 {
1229 TCGv_i32 tmp = tcg_const_i32(1);
1230 tcg_gen_st_i32(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1231 offsetof(CPUState, halted));
1232 tcg_temp_free_i32(tmp);
1233 }
1234 tcg_gen_movi_i64(ctx->ir[IR_V0], 0);
1235 return gen_excp(ctx, EXCP_HALTED, 0);
1236
1237 default:
1238 palcode &= 0x3f;
1239 goto do_call_pal;
1240 }
1241 return DISAS_NEXT;
1242 }
1243#endif
1244 return gen_invalid(ctx);
1245
1246 do_call_pal:
1247#ifdef CONFIG_USER_ONLY
1248 return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1249#else
1250 {
1251 TCGv tmp = tcg_temp_new();
1252 uint64_t exc_addr = ctx->base.pc_next;
1253 uint64_t entry = ctx->palbr;
1254
1255 if (ctx->tbflags & ENV_FLAG_PAL_MODE) {
1256 exc_addr |= 1;
1257 } else {
1258 tcg_gen_movi_i64(tmp, 1);
1259 st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
1260 }
1261
1262 tcg_gen_movi_i64(tmp, exc_addr);
1263 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1264 tcg_temp_free(tmp);
1265
1266 entry += (palcode & 0x80
1267 ? 0x2000 + (palcode - 0x80) * 64
1268 : 0x1000 + palcode * 64);
1269
1270
1271
1272
1273
1274 if (!use_exit_tb(ctx)) {
1275 tcg_gen_goto_tb(0);
1276 tcg_gen_movi_i64(cpu_pc, entry);
1277 tcg_gen_exit_tb(ctx->base.tb, 0);
1278 return DISAS_NORETURN;
1279 } else {
1280 tcg_gen_movi_i64(cpu_pc, entry);
1281 return DISAS_PC_UPDATED;
1282 }
1283 }
1284#endif
1285}
1286
1287#ifndef CONFIG_USER_ONLY
1288
1289#define PR_LONG 0x200000
1290
1291static int cpu_pr_data(int pr)
1292{
1293 switch (pr) {
1294 case 2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1295 case 3: return offsetof(CPUAlphaState, trap_arg0);
1296 case 4: return offsetof(CPUAlphaState, trap_arg1);
1297 case 5: return offsetof(CPUAlphaState, trap_arg2);
1298 case 6: return offsetof(CPUAlphaState, exc_addr);
1299 case 7: return offsetof(CPUAlphaState, palbr);
1300 case 8: return offsetof(CPUAlphaState, ptbr);
1301 case 9: return offsetof(CPUAlphaState, vptptr);
1302 case 10: return offsetof(CPUAlphaState, unique);
1303 case 11: return offsetof(CPUAlphaState, sysval);
1304 case 12: return offsetof(CPUAlphaState, usp);
1305
1306 case 40 ... 63:
1307 return offsetof(CPUAlphaState, scratch[pr - 40]);
1308
1309 case 251:
1310 return offsetof(CPUAlphaState, alarm_expire);
1311 }
1312 return 0;
1313}
1314
1315static DisasJumpType gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1316{
1317 void (*helper)(TCGv);
1318 int data;
1319
1320 switch (regno) {
1321 case 32 ... 39:
1322
1323 regno = regno == 39 ? 25 : regno - 32 + 8;
1324 tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1325 break;
1326
1327 case 250:
1328 helper = gen_helper_get_walltime;
1329 goto do_helper;
1330 case 249:
1331 helper = gen_helper_get_vmtime;
1332 do_helper:
1333 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1334 gen_io_start();
1335 helper(va);
1336 return DISAS_PC_STALE;
1337 } else {
1338 helper(va);
1339 }
1340 break;
1341
1342 case 0:
1343 ld_flag_byte(va, ENV_FLAG_PS_SHIFT);
1344 break;
1345 case 1:
1346 ld_flag_byte(va, ENV_FLAG_FEN_SHIFT);
1347 break;
1348
1349 default:
1350
1351
1352 data = cpu_pr_data(regno);
1353 if (data == 0) {
1354 tcg_gen_movi_i64(va, 0);
1355 } else if (data & PR_LONG) {
1356 tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1357 } else {
1358 tcg_gen_ld_i64(va, cpu_env, data);
1359 }
1360 break;
1361 }
1362
1363 return DISAS_NEXT;
1364}
1365
1366static DisasJumpType gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1367{
1368 int data;
1369 DisasJumpType ret = DISAS_NEXT;
1370
1371 switch (regno) {
1372 case 255:
1373
1374 gen_helper_tbia(cpu_env);
1375 break;
1376
1377 case 254:
1378
1379 gen_helper_tbis(cpu_env, vb);
1380 break;
1381
1382 case 253:
1383
1384 {
1385 TCGv_i32 tmp = tcg_const_i32(1);
1386 tcg_gen_st_i32(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1387 offsetof(CPUState, halted));
1388 tcg_temp_free_i32(tmp);
1389 }
1390 return gen_excp(ctx, EXCP_HALTED, 0);
1391
1392 case 252:
1393
1394 gen_helper_halt(vb);
1395 return DISAS_PC_STALE;
1396
1397 case 251:
1398
1399 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1400 gen_io_start();
1401 ret = DISAS_PC_STALE;
1402 }
1403 gen_helper_set_alarm(cpu_env, vb);
1404 break;
1405
1406 case 7:
1407
1408 tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1409
1410
1411
1412 gen_helper_tb_flush(cpu_env);
1413 return DISAS_PC_STALE;
1414
1415 case 32 ... 39:
1416
1417 regno = regno == 39 ? 25 : regno - 32 + 8;
1418 tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1419 break;
1420
1421 case 0:
1422 st_flag_byte(vb, ENV_FLAG_PS_SHIFT);
1423 break;
1424 case 1:
1425 st_flag_byte(vb, ENV_FLAG_FEN_SHIFT);
1426 break;
1427
1428 default:
1429
1430
1431 data = cpu_pr_data(regno);
1432 if (data != 0) {
1433 if (data & PR_LONG) {
1434 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1435 } else {
1436 tcg_gen_st_i64(vb, cpu_env, data);
1437 }
1438 }
1439 break;
1440 }
1441
1442 return ret;
1443}
1444#endif
1445
1446#define REQUIRE_NO_LIT \
1447 do { \
1448 if (real_islit) { \
1449 goto invalid_opc; \
1450 } \
1451 } while (0)
1452
1453#define REQUIRE_AMASK(FLAG) \
1454 do { \
1455 if ((ctx->amask & AMASK_##FLAG) == 0) { \
1456 goto invalid_opc; \
1457 } \
1458 } while (0)
1459
1460#define REQUIRE_TB_FLAG(FLAG) \
1461 do { \
1462 if ((ctx->tbflags & (FLAG)) == 0) { \
1463 goto invalid_opc; \
1464 } \
1465 } while (0)
1466
1467#define REQUIRE_REG_31(WHICH) \
1468 do { \
1469 if (WHICH != 31) { \
1470 goto invalid_opc; \
1471 } \
1472 } while (0)
1473
1474static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn)
1475{
1476 int32_t disp21, disp16, disp12 __attribute__((unused));
1477 uint16_t fn11;
1478 uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1479 bool islit, real_islit;
1480 TCGv va, vb, vc, tmp, tmp2;
1481 TCGv_i32 t32;
1482 DisasJumpType ret;
1483
1484
1485 opc = extract32(insn, 26, 6);
1486 ra = extract32(insn, 21, 5);
1487 rb = extract32(insn, 16, 5);
1488 rc = extract32(insn, 0, 5);
1489 real_islit = islit = extract32(insn, 12, 1);
1490 lit = extract32(insn, 13, 8);
1491
1492 disp21 = sextract32(insn, 0, 21);
1493 disp16 = sextract32(insn, 0, 16);
1494 disp12 = sextract32(insn, 0, 12);
1495
1496 fn11 = extract32(insn, 5, 11);
1497 fpfn = extract32(insn, 5, 6);
1498 fn7 = extract32(insn, 5, 7);
1499
1500 if (rb == 31 && !islit) {
1501 islit = true;
1502 lit = 0;
1503 }
1504
1505 ret = DISAS_NEXT;
1506 switch (opc) {
1507 case 0x00:
1508
1509 ret = gen_call_pal(ctx, insn & 0x03ffffff);
1510 break;
1511 case 0x01:
1512
1513 goto invalid_opc;
1514 case 0x02:
1515
1516 goto invalid_opc;
1517 case 0x03:
1518
1519 goto invalid_opc;
1520 case 0x04:
1521
1522 goto invalid_opc;
1523 case 0x05:
1524
1525 goto invalid_opc;
1526 case 0x06:
1527
1528 goto invalid_opc;
1529 case 0x07:
1530
1531 goto invalid_opc;
1532
1533 case 0x09:
1534
1535 disp16 = (uint32_t)disp16 << 16;
1536
1537 case 0x08:
1538
1539 va = dest_gpr(ctx, ra);
1540
1541 if (rb == 31) {
1542 tcg_gen_movi_i64(va, disp16);
1543 } else {
1544 tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1545 }
1546 break;
1547
1548 case 0x0A:
1549
1550 REQUIRE_AMASK(BWX);
1551 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1552 break;
1553 case 0x0B:
1554
1555 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1556 break;
1557 case 0x0C:
1558
1559 REQUIRE_AMASK(BWX);
1560 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1561 break;
1562 case 0x0D:
1563
1564 REQUIRE_AMASK(BWX);
1565 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1566 break;
1567 case 0x0E:
1568
1569 REQUIRE_AMASK(BWX);
1570 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1571 break;
1572 case 0x0F:
1573
1574 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1575 break;
1576
1577 case 0x10:
1578 vc = dest_gpr(ctx, rc);
1579 vb = load_gpr_lit(ctx, rb, lit, islit);
1580
1581 if (ra == 31) {
1582 if (fn7 == 0x00) {
1583
1584 tcg_gen_ext32s_i64(vc, vb);
1585 break;
1586 }
1587 if (fn7 == 0x29) {
1588
1589 tcg_gen_neg_i64(vc, vb);
1590 break;
1591 }
1592 }
1593
1594 va = load_gpr(ctx, ra);
1595 switch (fn7) {
1596 case 0x00:
1597
1598 tcg_gen_add_i64(vc, va, vb);
1599 tcg_gen_ext32s_i64(vc, vc);
1600 break;
1601 case 0x02:
1602
1603 tmp = tcg_temp_new();
1604 tcg_gen_shli_i64(tmp, va, 2);
1605 tcg_gen_add_i64(tmp, tmp, vb);
1606 tcg_gen_ext32s_i64(vc, tmp);
1607 tcg_temp_free(tmp);
1608 break;
1609 case 0x09:
1610
1611 tcg_gen_sub_i64(vc, va, vb);
1612 tcg_gen_ext32s_i64(vc, vc);
1613 break;
1614 case 0x0B:
1615
1616 tmp = tcg_temp_new();
1617 tcg_gen_shli_i64(tmp, va, 2);
1618 tcg_gen_sub_i64(tmp, tmp, vb);
1619 tcg_gen_ext32s_i64(vc, tmp);
1620 tcg_temp_free(tmp);
1621 break;
1622 case 0x0F:
1623
1624 if (ra == 31) {
1625
1626 gen_helper_cmpbe0(vc, vb);
1627 } else {
1628 gen_helper_cmpbge(vc, va, vb);
1629 }
1630 break;
1631 case 0x12:
1632
1633 tmp = tcg_temp_new();
1634 tcg_gen_shli_i64(tmp, va, 3);
1635 tcg_gen_add_i64(tmp, tmp, vb);
1636 tcg_gen_ext32s_i64(vc, tmp);
1637 tcg_temp_free(tmp);
1638 break;
1639 case 0x1B:
1640
1641 tmp = tcg_temp_new();
1642 tcg_gen_shli_i64(tmp, va, 3);
1643 tcg_gen_sub_i64(tmp, tmp, vb);
1644 tcg_gen_ext32s_i64(vc, tmp);
1645 tcg_temp_free(tmp);
1646 break;
1647 case 0x1D:
1648
1649 tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1650 break;
1651 case 0x20:
1652
1653 tcg_gen_add_i64(vc, va, vb);
1654 break;
1655 case 0x22:
1656
1657 tmp = tcg_temp_new();
1658 tcg_gen_shli_i64(tmp, va, 2);
1659 tcg_gen_add_i64(vc, tmp, vb);
1660 tcg_temp_free(tmp);
1661 break;
1662 case 0x29:
1663
1664 tcg_gen_sub_i64(vc, va, vb);
1665 break;
1666 case 0x2B:
1667
1668 tmp = tcg_temp_new();
1669 tcg_gen_shli_i64(tmp, va, 2);
1670 tcg_gen_sub_i64(vc, tmp, vb);
1671 tcg_temp_free(tmp);
1672 break;
1673 case 0x2D:
1674
1675 tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1676 break;
1677 case 0x32:
1678
1679 tmp = tcg_temp_new();
1680 tcg_gen_shli_i64(tmp, va, 3);
1681 tcg_gen_add_i64(vc, tmp, vb);
1682 tcg_temp_free(tmp);
1683 break;
1684 case 0x3B:
1685
1686 tmp = tcg_temp_new();
1687 tcg_gen_shli_i64(tmp, va, 3);
1688 tcg_gen_sub_i64(vc, tmp, vb);
1689 tcg_temp_free(tmp);
1690 break;
1691 case 0x3D:
1692
1693 tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1694 break;
1695 case 0x40:
1696
1697 tmp = tcg_temp_new();
1698 tcg_gen_ext32s_i64(tmp, va);
1699 tcg_gen_ext32s_i64(vc, vb);
1700 tcg_gen_add_i64(tmp, tmp, vc);
1701 tcg_gen_ext32s_i64(vc, tmp);
1702 gen_helper_check_overflow(cpu_env, vc, tmp);
1703 tcg_temp_free(tmp);
1704 break;
1705 case 0x49:
1706
1707 tmp = tcg_temp_new();
1708 tcg_gen_ext32s_i64(tmp, va);
1709 tcg_gen_ext32s_i64(vc, vb);
1710 tcg_gen_sub_i64(tmp, tmp, vc);
1711 tcg_gen_ext32s_i64(vc, tmp);
1712 gen_helper_check_overflow(cpu_env, vc, tmp);
1713 tcg_temp_free(tmp);
1714 break;
1715 case 0x4D:
1716
1717 tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1718 break;
1719 case 0x60:
1720
1721 tmp = tcg_temp_new();
1722 tmp2 = tcg_temp_new();
1723 tcg_gen_eqv_i64(tmp, va, vb);
1724 tcg_gen_mov_i64(tmp2, va);
1725 tcg_gen_add_i64(vc, va, vb);
1726 tcg_gen_xor_i64(tmp2, tmp2, vc);
1727 tcg_gen_and_i64(tmp, tmp, tmp2);
1728 tcg_gen_shri_i64(tmp, tmp, 63);
1729 tcg_gen_movi_i64(tmp2, 0);
1730 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1731 tcg_temp_free(tmp);
1732 tcg_temp_free(tmp2);
1733 break;
1734 case 0x69:
1735
1736 tmp = tcg_temp_new();
1737 tmp2 = tcg_temp_new();
1738 tcg_gen_xor_i64(tmp, va, vb);
1739 tcg_gen_mov_i64(tmp2, va);
1740 tcg_gen_sub_i64(vc, va, vb);
1741 tcg_gen_xor_i64(tmp2, tmp2, vc);
1742 tcg_gen_and_i64(tmp, tmp, tmp2);
1743 tcg_gen_shri_i64(tmp, tmp, 63);
1744 tcg_gen_movi_i64(tmp2, 0);
1745 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1746 tcg_temp_free(tmp);
1747 tcg_temp_free(tmp2);
1748 break;
1749 case 0x6D:
1750
1751 tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1752 break;
1753 default:
1754 goto invalid_opc;
1755 }
1756 break;
1757
1758 case 0x11:
1759 if (fn7 == 0x20) {
1760 if (rc == 31) {
1761
1762 break;
1763 }
1764 if (ra == 31) {
1765
1766 vc = dest_gpr(ctx, rc);
1767 if (islit) {
1768 tcg_gen_movi_i64(vc, lit);
1769 } else {
1770 tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1771 }
1772 break;
1773 }
1774 }
1775
1776 vc = dest_gpr(ctx, rc);
1777 vb = load_gpr_lit(ctx, rb, lit, islit);
1778
1779 if (fn7 == 0x28 && ra == 31) {
1780
1781 tcg_gen_not_i64(vc, vb);
1782 break;
1783 }
1784
1785 va = load_gpr(ctx, ra);
1786 switch (fn7) {
1787 case 0x00:
1788
1789 tcg_gen_and_i64(vc, va, vb);
1790 break;
1791 case 0x08:
1792
1793 tcg_gen_andc_i64(vc, va, vb);
1794 break;
1795 case 0x14:
1796
1797 tmp = tcg_temp_new();
1798 tcg_gen_andi_i64(tmp, va, 1);
1799 tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1800 vb, load_gpr(ctx, rc));
1801 tcg_temp_free(tmp);
1802 break;
1803 case 0x16:
1804
1805 tmp = tcg_temp_new();
1806 tcg_gen_andi_i64(tmp, va, 1);
1807 tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1808 vb, load_gpr(ctx, rc));
1809 tcg_temp_free(tmp);
1810 break;
1811 case 0x20:
1812
1813 tcg_gen_or_i64(vc, va, vb);
1814 break;
1815 case 0x24:
1816
1817 tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1818 vb, load_gpr(ctx, rc));
1819 break;
1820 case 0x26:
1821
1822 tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1823 vb, load_gpr(ctx, rc));
1824 break;
1825 case 0x28:
1826
1827 tcg_gen_orc_i64(vc, va, vb);
1828 break;
1829 case 0x40:
1830
1831 tcg_gen_xor_i64(vc, va, vb);
1832 break;
1833 case 0x44:
1834
1835 tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1836 vb, load_gpr(ctx, rc));
1837 break;
1838 case 0x46:
1839
1840 tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1841 vb, load_gpr(ctx, rc));
1842 break;
1843 case 0x48:
1844
1845 tcg_gen_eqv_i64(vc, va, vb);
1846 break;
1847 case 0x61:
1848
1849 REQUIRE_REG_31(ra);
1850 tcg_gen_andi_i64(vc, vb, ~ctx->amask);
1851 break;
1852 case 0x64:
1853
1854 tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1855 vb, load_gpr(ctx, rc));
1856 break;
1857 case 0x66:
1858
1859 tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1860 vb, load_gpr(ctx, rc));
1861 break;
1862 case 0x6C:
1863
1864 REQUIRE_REG_31(ra);
1865 tcg_gen_movi_i64(vc, ctx->implver);
1866 break;
1867 default:
1868 goto invalid_opc;
1869 }
1870 break;
1871
1872 case 0x12:
1873 vc = dest_gpr(ctx, rc);
1874 va = load_gpr(ctx, ra);
1875 switch (fn7) {
1876 case 0x02:
1877
1878 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1879 break;
1880 case 0x06:
1881
1882 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1883 break;
1884 case 0x0B:
1885
1886 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1887 break;
1888 case 0x12:
1889
1890 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1891 break;
1892 case 0x16:
1893
1894 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1895 break;
1896 case 0x1B:
1897
1898 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1899 break;
1900 case 0x22:
1901
1902 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1903 break;
1904 case 0x26:
1905
1906 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1907 break;
1908 case 0x2B:
1909
1910 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1911 break;
1912 case 0x30:
1913
1914 if (islit) {
1915 gen_zapnoti(vc, va, ~lit);
1916 } else {
1917 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1918 }
1919 break;
1920 case 0x31:
1921
1922 if (islit) {
1923 gen_zapnoti(vc, va, lit);
1924 } else {
1925 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1926 }
1927 break;
1928 case 0x32:
1929
1930 gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1931 break;
1932 case 0x34:
1933
1934 if (islit) {
1935 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1936 } else {
1937 tmp = tcg_temp_new();
1938 vb = load_gpr(ctx, rb);
1939 tcg_gen_andi_i64(tmp, vb, 0x3f);
1940 tcg_gen_shr_i64(vc, va, tmp);
1941 tcg_temp_free(tmp);
1942 }
1943 break;
1944 case 0x36:
1945
1946 gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1947 break;
1948 case 0x39:
1949
1950 if (islit) {
1951 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1952 } else {
1953 tmp = tcg_temp_new();
1954 vb = load_gpr(ctx, rb);
1955 tcg_gen_andi_i64(tmp, vb, 0x3f);
1956 tcg_gen_shl_i64(vc, va, tmp);
1957 tcg_temp_free(tmp);
1958 }
1959 break;
1960 case 0x3B:
1961
1962 gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1963 break;
1964 case 0x3C:
1965
1966 if (islit) {
1967 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1968 } else {
1969 tmp = tcg_temp_new();
1970 vb = load_gpr(ctx, rb);
1971 tcg_gen_andi_i64(tmp, vb, 0x3f);
1972 tcg_gen_sar_i64(vc, va, tmp);
1973 tcg_temp_free(tmp);
1974 }
1975 break;
1976 case 0x52:
1977
1978 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1979 break;
1980 case 0x57:
1981
1982 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1983 break;
1984 case 0x5A:
1985
1986 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1987 break;
1988 case 0x62:
1989
1990 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1991 break;
1992 case 0x67:
1993
1994 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1995 break;
1996 case 0x6A:
1997
1998 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1999 break;
2000 case 0x72:
2001
2002 gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
2003 break;
2004 case 0x77:
2005
2006 gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
2007 break;
2008 case 0x7A:
2009
2010 gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
2011 break;
2012 default:
2013 goto invalid_opc;
2014 }
2015 break;
2016
2017 case 0x13:
2018 vc = dest_gpr(ctx, rc);
2019 vb = load_gpr_lit(ctx, rb, lit, islit);
2020 va = load_gpr(ctx, ra);
2021 switch (fn7) {
2022 case 0x00:
2023
2024 tcg_gen_mul_i64(vc, va, vb);
2025 tcg_gen_ext32s_i64(vc, vc);
2026 break;
2027 case 0x20:
2028
2029 tcg_gen_mul_i64(vc, va, vb);
2030 break;
2031 case 0x30:
2032
2033 tmp = tcg_temp_new();
2034 tcg_gen_mulu2_i64(tmp, vc, va, vb);
2035 tcg_temp_free(tmp);
2036 break;
2037 case 0x40:
2038
2039 tmp = tcg_temp_new();
2040 tcg_gen_ext32s_i64(tmp, va);
2041 tcg_gen_ext32s_i64(vc, vb);
2042 tcg_gen_mul_i64(tmp, tmp, vc);
2043 tcg_gen_ext32s_i64(vc, tmp);
2044 gen_helper_check_overflow(cpu_env, vc, tmp);
2045 tcg_temp_free(tmp);
2046 break;
2047 case 0x60:
2048
2049 tmp = tcg_temp_new();
2050 tmp2 = tcg_temp_new();
2051 tcg_gen_muls2_i64(vc, tmp, va, vb);
2052 tcg_gen_sari_i64(tmp2, vc, 63);
2053 gen_helper_check_overflow(cpu_env, tmp, tmp2);
2054 tcg_temp_free(tmp);
2055 tcg_temp_free(tmp2);
2056 break;
2057 default:
2058 goto invalid_opc;
2059 }
2060 break;
2061
2062 case 0x14:
2063 REQUIRE_AMASK(FIX);
2064 vc = dest_fpr(ctx, rc);
2065 switch (fpfn) {
2066 case 0x04:
2067
2068 REQUIRE_REG_31(rb);
2069 t32 = tcg_temp_new_i32();
2070 va = load_gpr(ctx, ra);
2071 tcg_gen_extrl_i64_i32(t32, va);
2072 gen_helper_memory_to_s(vc, t32);
2073 tcg_temp_free_i32(t32);
2074 break;
2075 case 0x0A:
2076
2077 REQUIRE_REG_31(ra);
2078 vb = load_fpr(ctx, rb);
2079 gen_helper_sqrtf(vc, cpu_env, vb);
2080 break;
2081 case 0x0B:
2082
2083 REQUIRE_REG_31(ra);
2084 gen_sqrts(ctx, rb, rc, fn11);
2085 break;
2086 case 0x14:
2087
2088 REQUIRE_REG_31(rb);
2089 t32 = tcg_temp_new_i32();
2090 va = load_gpr(ctx, ra);
2091 tcg_gen_extrl_i64_i32(t32, va);
2092 gen_helper_memory_to_f(vc, t32);
2093 tcg_temp_free_i32(t32);
2094 break;
2095 case 0x24:
2096
2097 REQUIRE_REG_31(rb);
2098 va = load_gpr(ctx, ra);
2099 tcg_gen_mov_i64(vc, va);
2100 break;
2101 case 0x2A:
2102
2103 REQUIRE_REG_31(ra);
2104 vb = load_fpr(ctx, rb);
2105 gen_helper_sqrtg(vc, cpu_env, vb);
2106 break;
2107 case 0x02B:
2108
2109 REQUIRE_REG_31(ra);
2110 gen_sqrtt(ctx, rb, rc, fn11);
2111 break;
2112 default:
2113 goto invalid_opc;
2114 }
2115 break;
2116
2117 case 0x15:
2118
2119
2120 vc = dest_fpr(ctx, rc);
2121 vb = load_fpr(ctx, rb);
2122 va = load_fpr(ctx, ra);
2123 switch (fpfn) {
2124 case 0x00:
2125
2126 gen_helper_addf(vc, cpu_env, va, vb);
2127 break;
2128 case 0x01:
2129
2130 gen_helper_subf(vc, cpu_env, va, vb);
2131 break;
2132 case 0x02:
2133
2134 gen_helper_mulf(vc, cpu_env, va, vb);
2135 break;
2136 case 0x03:
2137
2138 gen_helper_divf(vc, cpu_env, va, vb);
2139 break;
2140 case 0x1E:
2141
2142 REQUIRE_REG_31(ra);
2143 goto invalid_opc;
2144 case 0x20:
2145
2146 gen_helper_addg(vc, cpu_env, va, vb);
2147 break;
2148 case 0x21:
2149
2150 gen_helper_subg(vc, cpu_env, va, vb);
2151 break;
2152 case 0x22:
2153
2154 gen_helper_mulg(vc, cpu_env, va, vb);
2155 break;
2156 case 0x23:
2157
2158 gen_helper_divg(vc, cpu_env, va, vb);
2159 break;
2160 case 0x25:
2161
2162 gen_helper_cmpgeq(vc, cpu_env, va, vb);
2163 break;
2164 case 0x26:
2165
2166 gen_helper_cmpglt(vc, cpu_env, va, vb);
2167 break;
2168 case 0x27:
2169
2170 gen_helper_cmpgle(vc, cpu_env, va, vb);
2171 break;
2172 case 0x2C:
2173
2174 REQUIRE_REG_31(ra);
2175 gen_helper_cvtgf(vc, cpu_env, vb);
2176 break;
2177 case 0x2D:
2178
2179 REQUIRE_REG_31(ra);
2180 goto invalid_opc;
2181 case 0x2F:
2182
2183 REQUIRE_REG_31(ra);
2184 gen_helper_cvtgq(vc, cpu_env, vb);
2185 break;
2186 case 0x3C:
2187
2188 REQUIRE_REG_31(ra);
2189 gen_helper_cvtqf(vc, cpu_env, vb);
2190 break;
2191 case 0x3E:
2192
2193 REQUIRE_REG_31(ra);
2194 gen_helper_cvtqg(vc, cpu_env, vb);
2195 break;
2196 default:
2197 goto invalid_opc;
2198 }
2199 break;
2200
2201 case 0x16:
2202
2203 switch (fpfn) {
2204 case 0x00:
2205
2206 gen_adds(ctx, ra, rb, rc, fn11);
2207 break;
2208 case 0x01:
2209
2210 gen_subs(ctx, ra, rb, rc, fn11);
2211 break;
2212 case 0x02:
2213
2214 gen_muls(ctx, ra, rb, rc, fn11);
2215 break;
2216 case 0x03:
2217
2218 gen_divs(ctx, ra, rb, rc, fn11);
2219 break;
2220 case 0x20:
2221
2222 gen_addt(ctx, ra, rb, rc, fn11);
2223 break;
2224 case 0x21:
2225
2226 gen_subt(ctx, ra, rb, rc, fn11);
2227 break;
2228 case 0x22:
2229
2230 gen_mult(ctx, ra, rb, rc, fn11);
2231 break;
2232 case 0x23:
2233
2234 gen_divt(ctx, ra, rb, rc, fn11);
2235 break;
2236 case 0x24:
2237
2238 gen_cmptun(ctx, ra, rb, rc, fn11);
2239 break;
2240 case 0x25:
2241
2242 gen_cmpteq(ctx, ra, rb, rc, fn11);
2243 break;
2244 case 0x26:
2245
2246 gen_cmptlt(ctx, ra, rb, rc, fn11);
2247 break;
2248 case 0x27:
2249
2250 gen_cmptle(ctx, ra, rb, rc, fn11);
2251 break;
2252 case 0x2C:
2253 REQUIRE_REG_31(ra);
2254 if (fn11 == 0x2AC || fn11 == 0x6AC) {
2255
2256 gen_cvtst(ctx, rb, rc, fn11);
2257 } else {
2258
2259 gen_cvtts(ctx, rb, rc, fn11);
2260 }
2261 break;
2262 case 0x2F:
2263
2264 REQUIRE_REG_31(ra);
2265 gen_cvttq(ctx, rb, rc, fn11);
2266 break;
2267 case 0x3C:
2268
2269 REQUIRE_REG_31(ra);
2270 gen_cvtqs(ctx, rb, rc, fn11);
2271 break;
2272 case 0x3E:
2273
2274 REQUIRE_REG_31(ra);
2275 gen_cvtqt(ctx, rb, rc, fn11);
2276 break;
2277 default:
2278 goto invalid_opc;
2279 }
2280 break;
2281
2282 case 0x17:
2283 switch (fn11) {
2284 case 0x010:
2285
2286 REQUIRE_REG_31(ra);
2287 vc = dest_fpr(ctx, rc);
2288 vb = load_fpr(ctx, rb);
2289 gen_cvtlq(vc, vb);
2290 break;
2291 case 0x020:
2292
2293 if (rc == 31) {
2294
2295 } else {
2296 vc = dest_fpr(ctx, rc);
2297 va = load_fpr(ctx, ra);
2298 if (ra == rb) {
2299
2300 tcg_gen_mov_i64(vc, va);
2301 } else {
2302 vb = load_fpr(ctx, rb);
2303 gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2304 }
2305 }
2306 break;
2307 case 0x021:
2308
2309 vc = dest_fpr(ctx, rc);
2310 vb = load_fpr(ctx, rb);
2311 va = load_fpr(ctx, ra);
2312 gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2313 break;
2314 case 0x022:
2315
2316 vc = dest_fpr(ctx, rc);
2317 vb = load_fpr(ctx, rb);
2318 va = load_fpr(ctx, ra);
2319 gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2320 break;
2321 case 0x024:
2322
2323 va = load_fpr(ctx, ra);
2324 gen_helper_store_fpcr(cpu_env, va);
2325 if (ctx->tb_rm == QUAL_RM_D) {
2326
2327
2328 ctx->tb_rm = -1;
2329 }
2330 break;
2331 case 0x025:
2332
2333 va = dest_fpr(ctx, ra);
2334 gen_helper_load_fpcr(va, cpu_env);
2335 break;
2336 case 0x02A:
2337
2338 gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2339 break;
2340 case 0x02B:
2341
2342 gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2343 break;
2344 case 0x02C:
2345
2346 gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2347 break;
2348 case 0x02D:
2349
2350 gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2351 break;
2352 case 0x02E:
2353
2354 gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2355 break;
2356 case 0x02F:
2357
2358 gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2359 break;
2360 case 0x030:
2361 case 0x130:
2362 case 0x530:
2363 REQUIRE_REG_31(ra);
2364 vc = dest_fpr(ctx, rc);
2365 vb = load_fpr(ctx, rb);
2366 gen_helper_cvtql(vc, cpu_env, vb);
2367 gen_fp_exc_raise(rc, fn11);
2368 break;
2369 default:
2370 goto invalid_opc;
2371 }
2372 break;
2373
2374 case 0x18:
2375 switch ((uint16_t)disp16) {
2376 case 0x0000:
2377
2378
2379 break;
2380 case 0x0400:
2381
2382
2383 break;
2384 case 0x4000:
2385
2386 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2387 break;
2388 case 0x4400:
2389
2390 tcg_gen_mb(TCG_MO_ST_ST | TCG_BAR_SC);
2391 break;
2392 case 0x8000:
2393
2394
2395 break;
2396 case 0xA000:
2397
2398
2399 break;
2400 case 0xC000:
2401
2402 va = dest_gpr(ctx, ra);
2403 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2404 gen_io_start();
2405 gen_helper_load_pcc(va, cpu_env);
2406 ret = DISAS_PC_STALE;
2407 } else {
2408 gen_helper_load_pcc(va, cpu_env);
2409 }
2410 break;
2411 case 0xE000:
2412
2413 gen_rx(ctx, ra, 0);
2414 break;
2415 case 0xE800:
2416
2417 break;
2418 case 0xF000:
2419
2420 gen_rx(ctx, ra, 1);
2421 break;
2422 case 0xF800:
2423
2424
2425 break;
2426 case 0xFC00:
2427
2428
2429 break;
2430 default:
2431 goto invalid_opc;
2432 }
2433 break;
2434
2435 case 0x19:
2436
2437#ifndef CONFIG_USER_ONLY
2438 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2439 va = dest_gpr(ctx, ra);
2440 ret = gen_mfpr(ctx, va, insn & 0xffff);
2441 break;
2442#else
2443 goto invalid_opc;
2444#endif
2445
2446 case 0x1A:
2447
2448
2449 vb = load_gpr(ctx, rb);
2450 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2451 if (ra != 31) {
2452 tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
2453 }
2454 ret = DISAS_PC_UPDATED;
2455 break;
2456
2457 case 0x1B:
2458
2459#ifndef CONFIG_USER_ONLY
2460 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2461 {
2462 TCGv addr = tcg_temp_new();
2463 vb = load_gpr(ctx, rb);
2464 va = dest_gpr(ctx, ra);
2465
2466 tcg_gen_addi_i64(addr, vb, disp12);
2467 switch ((insn >> 12) & 0xF) {
2468 case 0x0:
2469
2470 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2471 break;
2472 case 0x1:
2473
2474 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEQ);
2475 break;
2476 case 0x2:
2477
2478 gen_qemu_ldl_l(va, addr, MMU_PHYS_IDX);
2479 break;
2480 case 0x3:
2481
2482 gen_qemu_ldq_l(va, addr, MMU_PHYS_IDX);
2483 break;
2484 case 0x4:
2485
2486 goto invalid_opc;
2487 case 0x5:
2488
2489 goto invalid_opc;
2490 break;
2491 case 0x6:
2492
2493 goto invalid_opc;
2494 case 0x7:
2495
2496 goto invalid_opc;
2497 case 0x8:
2498
2499 goto invalid_opc;
2500 case 0x9:
2501
2502 goto invalid_opc;
2503 case 0xA:
2504
2505 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2506 break;
2507 case 0xB:
2508
2509 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2510 break;
2511 case 0xC:
2512
2513 goto invalid_opc;
2514 case 0xD:
2515
2516 goto invalid_opc;
2517 case 0xE:
2518
2519
2520 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2521 break;
2522 case 0xF:
2523
2524
2525 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2526 break;
2527 }
2528 tcg_temp_free(addr);
2529 break;
2530 }
2531#else
2532 goto invalid_opc;
2533#endif
2534
2535 case 0x1C:
2536 vc = dest_gpr(ctx, rc);
2537 if (fn7 == 0x70) {
2538
2539 REQUIRE_AMASK(FIX);
2540 REQUIRE_REG_31(rb);
2541 va = load_fpr(ctx, ra);
2542 tcg_gen_mov_i64(vc, va);
2543 break;
2544 } else if (fn7 == 0x78) {
2545
2546 REQUIRE_AMASK(FIX);
2547 REQUIRE_REG_31(rb);
2548 t32 = tcg_temp_new_i32();
2549 va = load_fpr(ctx, ra);
2550 gen_helper_s_to_memory(t32, va);
2551 tcg_gen_ext_i32_i64(vc, t32);
2552 tcg_temp_free_i32(t32);
2553 break;
2554 }
2555
2556 vb = load_gpr_lit(ctx, rb, lit, islit);
2557 switch (fn7) {
2558 case 0x00:
2559
2560 REQUIRE_AMASK(BWX);
2561 REQUIRE_REG_31(ra);
2562 tcg_gen_ext8s_i64(vc, vb);
2563 break;
2564 case 0x01:
2565
2566 REQUIRE_AMASK(BWX);
2567 REQUIRE_REG_31(ra);
2568 tcg_gen_ext16s_i64(vc, vb);
2569 break;
2570 case 0x30:
2571
2572 REQUIRE_AMASK(CIX);
2573 REQUIRE_REG_31(ra);
2574 REQUIRE_NO_LIT;
2575 tcg_gen_ctpop_i64(vc, vb);
2576 break;
2577 case 0x31:
2578
2579 REQUIRE_AMASK(MVI);
2580 REQUIRE_NO_LIT;
2581 va = load_gpr(ctx, ra);
2582 gen_helper_perr(vc, va, vb);
2583 break;
2584 case 0x32:
2585
2586 REQUIRE_AMASK(CIX);
2587 REQUIRE_REG_31(ra);
2588 REQUIRE_NO_LIT;
2589 tcg_gen_clzi_i64(vc, vb, 64);
2590 break;
2591 case 0x33:
2592
2593 REQUIRE_AMASK(CIX);
2594 REQUIRE_REG_31(ra);
2595 REQUIRE_NO_LIT;
2596 tcg_gen_ctzi_i64(vc, vb, 64);
2597 break;
2598 case 0x34:
2599
2600 REQUIRE_AMASK(MVI);
2601 REQUIRE_REG_31(ra);
2602 REQUIRE_NO_LIT;
2603 gen_helper_unpkbw(vc, vb);
2604 break;
2605 case 0x35:
2606
2607 REQUIRE_AMASK(MVI);
2608 REQUIRE_REG_31(ra);
2609 REQUIRE_NO_LIT;
2610 gen_helper_unpkbl(vc, vb);
2611 break;
2612 case 0x36:
2613
2614 REQUIRE_AMASK(MVI);
2615 REQUIRE_REG_31(ra);
2616 REQUIRE_NO_LIT;
2617 gen_helper_pkwb(vc, vb);
2618 break;
2619 case 0x37:
2620
2621 REQUIRE_AMASK(MVI);
2622 REQUIRE_REG_31(ra);
2623 REQUIRE_NO_LIT;
2624 gen_helper_pklb(vc, vb);
2625 break;
2626 case 0x38:
2627
2628 REQUIRE_AMASK(MVI);
2629 va = load_gpr(ctx, ra);
2630 gen_helper_minsb8(vc, va, vb);
2631 break;
2632 case 0x39:
2633
2634 REQUIRE_AMASK(MVI);
2635 va = load_gpr(ctx, ra);
2636 gen_helper_minsw4(vc, va, vb);
2637 break;
2638 case 0x3A:
2639
2640 REQUIRE_AMASK(MVI);
2641 va = load_gpr(ctx, ra);
2642 gen_helper_minub8(vc, va, vb);
2643 break;
2644 case 0x3B:
2645
2646 REQUIRE_AMASK(MVI);
2647 va = load_gpr(ctx, ra);
2648 gen_helper_minuw4(vc, va, vb);
2649 break;
2650 case 0x3C:
2651
2652 REQUIRE_AMASK(MVI);
2653 va = load_gpr(ctx, ra);
2654 gen_helper_maxub8(vc, va, vb);
2655 break;
2656 case 0x3D:
2657
2658 REQUIRE_AMASK(MVI);
2659 va = load_gpr(ctx, ra);
2660 gen_helper_maxuw4(vc, va, vb);
2661 break;
2662 case 0x3E:
2663
2664 REQUIRE_AMASK(MVI);
2665 va = load_gpr(ctx, ra);
2666 gen_helper_maxsb8(vc, va, vb);
2667 break;
2668 case 0x3F:
2669
2670 REQUIRE_AMASK(MVI);
2671 va = load_gpr(ctx, ra);
2672 gen_helper_maxsw4(vc, va, vb);
2673 break;
2674 default:
2675 goto invalid_opc;
2676 }
2677 break;
2678
2679 case 0x1D:
2680
2681#ifndef CONFIG_USER_ONLY
2682 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2683 vb = load_gpr(ctx, rb);
2684 ret = gen_mtpr(ctx, vb, insn & 0xffff);
2685 break;
2686#else
2687 goto invalid_opc;
2688#endif
2689
2690 case 0x1E:
2691
2692#ifndef CONFIG_USER_ONLY
2693 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2694 if (rb == 31) {
2695
2696
2697
2698 ctx->lit = vb = tcg_temp_new();
2699 tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2700 } else {
2701 vb = load_gpr(ctx, rb);
2702 }
2703 tcg_gen_movi_i64(cpu_lock_addr, -1);
2704 tmp = tcg_temp_new();
2705 tcg_gen_movi_i64(tmp, 0);
2706 st_flag_byte(tmp, ENV_FLAG_RX_SHIFT);
2707 tcg_gen_andi_i64(tmp, vb, 1);
2708 st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
2709 tcg_temp_free(tmp);
2710 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2711
2712 ret = DISAS_PC_UPDATED_NOCHAIN;
2713 break;
2714#else
2715 goto invalid_opc;
2716#endif
2717
2718 case 0x1F:
2719
2720#ifndef CONFIG_USER_ONLY
2721 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2722 {
2723 switch ((insn >> 12) & 0xF) {
2724 case 0x0:
2725
2726 va = load_gpr(ctx, ra);
2727 vb = load_gpr(ctx, rb);
2728 tmp = tcg_temp_new();
2729 tcg_gen_addi_i64(tmp, vb, disp12);
2730 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
2731 tcg_temp_free(tmp);
2732 break;
2733 case 0x1:
2734
2735 va = load_gpr(ctx, ra);
2736 vb = load_gpr(ctx, rb);
2737 tmp = tcg_temp_new();
2738 tcg_gen_addi_i64(tmp, vb, disp12);
2739 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEQ);
2740 tcg_temp_free(tmp);
2741 break;
2742 case 0x2:
2743
2744 ret = gen_store_conditional(ctx, ra, rb, disp12,
2745 MMU_PHYS_IDX, MO_LESL);
2746 break;
2747 case 0x3:
2748
2749 ret = gen_store_conditional(ctx, ra, rb, disp12,
2750 MMU_PHYS_IDX, MO_LEQ);
2751 break;
2752 case 0x4:
2753
2754 goto invalid_opc;
2755 case 0x5:
2756
2757 goto invalid_opc;
2758 case 0x6:
2759
2760 goto invalid_opc;
2761 case 0x7:
2762
2763 goto invalid_opc;
2764 case 0x8:
2765
2766 goto invalid_opc;
2767 case 0x9:
2768
2769 goto invalid_opc;
2770 case 0xA:
2771
2772 goto invalid_opc;
2773 case 0xB:
2774
2775 goto invalid_opc;
2776 case 0xC:
2777
2778 goto invalid_opc;
2779 case 0xD:
2780
2781 goto invalid_opc;
2782 case 0xE:
2783
2784 goto invalid_opc;
2785 case 0xF:
2786
2787 goto invalid_opc;
2788 }
2789 break;
2790 }
2791#else
2792 goto invalid_opc;
2793#endif
2794 case 0x20:
2795
2796 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2797 break;
2798 case 0x21:
2799
2800 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2801 break;
2802 case 0x22:
2803
2804 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2805 break;
2806 case 0x23:
2807
2808 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2809 break;
2810 case 0x24:
2811
2812 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2813 break;
2814 case 0x25:
2815
2816 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2817 break;
2818 case 0x26:
2819
2820 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2821 break;
2822 case 0x27:
2823
2824 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2825 break;
2826 case 0x28:
2827
2828 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2829 break;
2830 case 0x29:
2831
2832 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2833 break;
2834 case 0x2A:
2835
2836 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2837 break;
2838 case 0x2B:
2839
2840 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2841 break;
2842 case 0x2C:
2843
2844 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2845 break;
2846 case 0x2D:
2847
2848 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2849 break;
2850 case 0x2E:
2851
2852 ret = gen_store_conditional(ctx, ra, rb, disp16,
2853 ctx->mem_idx, MO_LESL);
2854 break;
2855 case 0x2F:
2856
2857 ret = gen_store_conditional(ctx, ra, rb, disp16,
2858 ctx->mem_idx, MO_LEQ);
2859 break;
2860 case 0x30:
2861
2862 ret = gen_bdirect(ctx, ra, disp21);
2863 break;
2864 case 0x31:
2865 ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2866 break;
2867 case 0x32:
2868 ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2869 break;
2870 case 0x33:
2871 ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2872 break;
2873 case 0x34:
2874
2875 ret = gen_bdirect(ctx, ra, disp21);
2876 break;
2877 case 0x35:
2878 ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2879 break;
2880 case 0x36:
2881 ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2882 break;
2883 case 0x37:
2884 ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2885 break;
2886 case 0x38:
2887
2888 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2889 break;
2890 case 0x39:
2891
2892 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2893 break;
2894 case 0x3A:
2895
2896 ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2897 break;
2898 case 0x3B:
2899
2900 ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2901 break;
2902 case 0x3C:
2903
2904 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2905 break;
2906 case 0x3D:
2907
2908 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2909 break;
2910 case 0x3E:
2911
2912 ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2913 break;
2914 case 0x3F:
2915
2916 ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2917 break;
2918 invalid_opc:
2919 ret = gen_invalid(ctx);
2920 break;
2921 }
2922
2923 return ret;
2924}
2925
2926static void alpha_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
2927{
2928 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2929 CPUAlphaState *env = cpu->env_ptr;
2930 int64_t bound, mask;
2931
2932 ctx->tbflags = ctx->base.tb->flags;
2933 ctx->mem_idx = cpu_mmu_index(env, false);
2934 ctx->implver = env->implver;
2935 ctx->amask = env->amask;
2936
2937#ifdef CONFIG_USER_ONLY
2938 ctx->ir = cpu_std_ir;
2939#else
2940 ctx->palbr = env->palbr;
2941 ctx->ir = (ctx->tbflags & ENV_FLAG_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2942#endif
2943
2944
2945
2946
2947
2948
2949
2950 ctx->tb_rm = -1;
2951
2952 ctx->tb_ftz = -1;
2953
2954 ctx->zero = NULL;
2955 ctx->sink = NULL;
2956 ctx->lit = NULL;
2957
2958
2959 if (in_superpage(ctx, ctx->base.pc_first)) {
2960 mask = -1ULL << 41;
2961 } else {
2962 mask = TARGET_PAGE_MASK;
2963 }
2964 bound = -(ctx->base.pc_first | mask) / 4;
2965 ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
2966}
2967
2968static void alpha_tr_tb_start(DisasContextBase *db, CPUState *cpu)
2969{
2970}
2971
2972static void alpha_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
2973{
2974 tcg_gen_insn_start(dcbase->pc_next);
2975}
2976
2977static bool alpha_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
2978 const CPUBreakpoint *bp)
2979{
2980 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2981
2982 ctx->base.is_jmp = gen_excp(ctx, EXCP_DEBUG, 0);
2983
2984
2985
2986
2987
2988 ctx->base.pc_next += 4;
2989 return true;
2990}
2991
2992static void alpha_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
2993{
2994 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2995 CPUAlphaState *env = cpu->env_ptr;
2996 uint32_t insn = translator_ldl(env, ctx->base.pc_next);
2997
2998 ctx->base.pc_next += 4;
2999 ctx->base.is_jmp = translate_one(ctx, insn);
3000
3001 free_context_temps(ctx);
3002 translator_loop_temp_check(&ctx->base);
3003}
3004
3005static void alpha_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
3006{
3007 DisasContext *ctx = container_of(dcbase, DisasContext, base);
3008
3009 switch (ctx->base.is_jmp) {
3010 case DISAS_NORETURN:
3011 break;
3012 case DISAS_TOO_MANY:
3013 if (use_goto_tb(ctx, ctx->base.pc_next)) {
3014 tcg_gen_goto_tb(0);
3015 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3016 tcg_gen_exit_tb(ctx->base.tb, 0);
3017 }
3018
3019 case DISAS_PC_STALE:
3020 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3021
3022 case DISAS_PC_UPDATED:
3023 if (!use_exit_tb(ctx)) {
3024 tcg_gen_lookup_and_goto_ptr();
3025 break;
3026 }
3027
3028 case DISAS_PC_UPDATED_NOCHAIN:
3029 if (ctx->base.singlestep_enabled) {
3030 gen_excp_1(EXCP_DEBUG, 0);
3031 } else {
3032 tcg_gen_exit_tb(NULL, 0);
3033 }
3034 break;
3035 default:
3036 g_assert_not_reached();
3037 }
3038}
3039
3040static void alpha_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
3041{
3042 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
3043 log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
3044}
3045
3046static const TranslatorOps alpha_tr_ops = {
3047 .init_disas_context = alpha_tr_init_disas_context,
3048 .tb_start = alpha_tr_tb_start,
3049 .insn_start = alpha_tr_insn_start,
3050 .breakpoint_check = alpha_tr_breakpoint_check,
3051 .translate_insn = alpha_tr_translate_insn,
3052 .tb_stop = alpha_tr_tb_stop,
3053 .disas_log = alpha_tr_disas_log,
3054};
3055
3056void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
3057{
3058 DisasContext dc;
3059 translator_loop(&alpha_tr_ops, &dc.base, cpu, tb, max_insns);
3060}
3061
3062void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
3063 target_ulong *data)
3064{
3065 env->pc = data[0];
3066}
3067