1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include "qemu/osdep.h"
21#include "cpu.h"
22#include "sysemu/cpus.h"
23#include "disas/disas.h"
24#include "qemu/host-utils.h"
25#include "exec/exec-all.h"
26#include "tcg/tcg-op.h"
27#include "exec/cpu_ldst.h"
28#include "exec/helper-proto.h"
29#include "exec/helper-gen.h"
30#include "exec/translator.h"
31#include "exec/log.h"
32
33
34#undef ALPHA_DEBUG_DISAS
35#define CONFIG_SOFTFLOAT_INLINE
36
37#ifdef ALPHA_DEBUG_DISAS
38# define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
39#else
40# define LOG_DISAS(...) do { } while (0)
41#endif
42
43typedef struct DisasContext DisasContext;
44struct DisasContext {
45 DisasContextBase base;
46
47#ifdef CONFIG_USER_ONLY
48 MemOp unalign;
49#else
50 uint64_t palbr;
51#endif
52 uint32_t tbflags;
53 int mem_idx;
54
55
56 int implver;
57 int amask;
58
59
60 int tb_rm;
61
62 int tb_ftz;
63
64
65 TCGv *ir;
66
67
68 TCGv zero;
69 TCGv sink;
70};
71
72#ifdef CONFIG_USER_ONLY
73#define UNALIGN(C) (C)->unalign
74#else
75#define UNALIGN(C) 0
76#endif
77
78
79
80
81#define DISAS_PC_UPDATED_NOCHAIN DISAS_TARGET_0
82#define DISAS_PC_UPDATED DISAS_TARGET_1
83#define DISAS_PC_STALE DISAS_TARGET_2
84
85
86static TCGv cpu_std_ir[31];
87static TCGv cpu_fir[31];
88static TCGv cpu_pc;
89static TCGv cpu_lock_addr;
90static TCGv cpu_lock_value;
91
92#ifndef CONFIG_USER_ONLY
93static TCGv cpu_pal_ir[31];
94#endif
95
96#include "exec/gen-icount.h"
97
98void alpha_translate_init(void)
99{
100#define DEF_VAR(V) { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
101
102 typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
103 static const GlobalVar vars[] = {
104 DEF_VAR(pc),
105 DEF_VAR(lock_addr),
106 DEF_VAR(lock_value),
107 };
108
109#undef DEF_VAR
110
111
112 static const char greg_names[31][4] = {
113 "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
114 "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
115 "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
116 "t10", "t11", "ra", "t12", "at", "gp", "sp"
117 };
118 static const char freg_names[31][4] = {
119 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
120 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
121 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
122 "f24", "f25", "f26", "f27", "f28", "f29", "f30"
123 };
124#ifndef CONFIG_USER_ONLY
125 static const char shadow_names[8][8] = {
126 "pal_t7", "pal_s0", "pal_s1", "pal_s2",
127 "pal_s3", "pal_s4", "pal_s5", "pal_t11"
128 };
129#endif
130
131 int i;
132
133 for (i = 0; i < 31; i++) {
134 cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
135 offsetof(CPUAlphaState, ir[i]),
136 greg_names[i]);
137 }
138
139 for (i = 0; i < 31; i++) {
140 cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
141 offsetof(CPUAlphaState, fir[i]),
142 freg_names[i]);
143 }
144
145#ifndef CONFIG_USER_ONLY
146 memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
147 for (i = 0; i < 8; i++) {
148 int r = (i == 7 ? 25 : i + 8);
149 cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
150 offsetof(CPUAlphaState,
151 shadow[i]),
152 shadow_names[i]);
153 }
154#endif
155
156 for (i = 0; i < ARRAY_SIZE(vars); ++i) {
157 const GlobalVar *v = &vars[i];
158 *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
159 }
160}
161
162static TCGv load_zero(DisasContext *ctx)
163{
164 if (!ctx->zero) {
165 ctx->zero = tcg_constant_i64(0);
166 }
167 return ctx->zero;
168}
169
170static TCGv dest_sink(DisasContext *ctx)
171{
172 if (!ctx->sink) {
173 ctx->sink = tcg_temp_new();
174 }
175 return ctx->sink;
176}
177
178static void free_context_temps(DisasContext *ctx)
179{
180 if (ctx->sink) {
181 tcg_gen_discard_i64(ctx->sink);
182 tcg_temp_free(ctx->sink);
183 ctx->sink = NULL;
184 }
185}
186
187static TCGv load_gpr(DisasContext *ctx, unsigned reg)
188{
189 if (likely(reg < 31)) {
190 return ctx->ir[reg];
191 } else {
192 return load_zero(ctx);
193 }
194}
195
196static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
197 uint8_t lit, bool islit)
198{
199 if (islit) {
200 return tcg_constant_i64(lit);
201 } else if (likely(reg < 31)) {
202 return ctx->ir[reg];
203 } else {
204 return load_zero(ctx);
205 }
206}
207
208static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
209{
210 if (likely(reg < 31)) {
211 return ctx->ir[reg];
212 } else {
213 return dest_sink(ctx);
214 }
215}
216
217static TCGv load_fpr(DisasContext *ctx, unsigned reg)
218{
219 if (likely(reg < 31)) {
220 return cpu_fir[reg];
221 } else {
222 return load_zero(ctx);
223 }
224}
225
226static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
227{
228 if (likely(reg < 31)) {
229 return cpu_fir[reg];
230 } else {
231 return dest_sink(ctx);
232 }
233}
234
235static int get_flag_ofs(unsigned shift)
236{
237 int ofs = offsetof(CPUAlphaState, flags);
238#if HOST_BIG_ENDIAN
239 ofs += 3 - (shift / 8);
240#else
241 ofs += shift / 8;
242#endif
243 return ofs;
244}
245
246static void ld_flag_byte(TCGv val, unsigned shift)
247{
248 tcg_gen_ld8u_i64(val, cpu_env, get_flag_ofs(shift));
249}
250
251static void st_flag_byte(TCGv val, unsigned shift)
252{
253 tcg_gen_st8_i64(val, cpu_env, get_flag_ofs(shift));
254}
255
256static void gen_excp_1(int exception, int error_code)
257{
258 TCGv_i32 tmp1, tmp2;
259
260 tmp1 = tcg_constant_i32(exception);
261 tmp2 = tcg_constant_i32(error_code);
262 gen_helper_excp(cpu_env, tmp1, tmp2);
263}
264
265static DisasJumpType gen_excp(DisasContext *ctx, int exception, int error_code)
266{
267 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
268 gen_excp_1(exception, error_code);
269 return DISAS_NORETURN;
270}
271
272static inline DisasJumpType gen_invalid(DisasContext *ctx)
273{
274 return gen_excp(ctx, EXCP_OPCDEC, 0);
275}
276
277static void gen_ldf(DisasContext *ctx, TCGv dest, TCGv addr)
278{
279 TCGv_i32 tmp32 = tcg_temp_new_i32();
280 tcg_gen_qemu_ld_i32(tmp32, addr, ctx->mem_idx, MO_LEUL | UNALIGN(ctx));
281 gen_helper_memory_to_f(dest, tmp32);
282 tcg_temp_free_i32(tmp32);
283}
284
285static void gen_ldg(DisasContext *ctx, TCGv dest, TCGv addr)
286{
287 TCGv tmp = tcg_temp_new();
288 tcg_gen_qemu_ld_i64(tmp, addr, ctx->mem_idx, MO_LEUQ | UNALIGN(ctx));
289 gen_helper_memory_to_g(dest, tmp);
290 tcg_temp_free(tmp);
291}
292
293static void gen_lds(DisasContext *ctx, TCGv dest, TCGv addr)
294{
295 TCGv_i32 tmp32 = tcg_temp_new_i32();
296 tcg_gen_qemu_ld_i32(tmp32, addr, ctx->mem_idx, MO_LEUL | UNALIGN(ctx));
297 gen_helper_memory_to_s(dest, tmp32);
298 tcg_temp_free_i32(tmp32);
299}
300
301static void gen_ldt(DisasContext *ctx, TCGv dest, TCGv addr)
302{
303 tcg_gen_qemu_ld_i64(dest, addr, ctx->mem_idx, MO_LEUQ | UNALIGN(ctx));
304}
305
306static void gen_load_fp(DisasContext *ctx, int ra, int rb, int32_t disp16,
307 void (*func)(DisasContext *, TCGv, TCGv))
308{
309
310 if (likely(ra != 31)) {
311 TCGv addr = tcg_temp_new();
312 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
313 func(ctx, cpu_fir[ra], addr);
314 tcg_temp_free(addr);
315 }
316}
317
318static void gen_load_int(DisasContext *ctx, int ra, int rb, int32_t disp16,
319 MemOp op, bool clear, bool locked)
320{
321 TCGv addr, dest;
322
323
324
325
326 if (unlikely(ra == 31)) {
327 return;
328 }
329
330 addr = tcg_temp_new();
331 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
332 if (clear) {
333 tcg_gen_andi_i64(addr, addr, ~0x7);
334 } else if (!locked) {
335 op |= UNALIGN(ctx);
336 }
337
338 dest = ctx->ir[ra];
339 tcg_gen_qemu_ld_i64(dest, addr, ctx->mem_idx, op);
340
341 if (locked) {
342 tcg_gen_mov_i64(cpu_lock_addr, addr);
343 tcg_gen_mov_i64(cpu_lock_value, dest);
344 }
345 tcg_temp_free(addr);
346}
347
348static void gen_stf(DisasContext *ctx, TCGv src, TCGv addr)
349{
350 TCGv_i32 tmp32 = tcg_temp_new_i32();
351 gen_helper_f_to_memory(tmp32, addr);
352 tcg_gen_qemu_st_i32(tmp32, addr, ctx->mem_idx, MO_LEUL | UNALIGN(ctx));
353 tcg_temp_free_i32(tmp32);
354}
355
356static void gen_stg(DisasContext *ctx, TCGv src, TCGv addr)
357{
358 TCGv tmp = tcg_temp_new();
359 gen_helper_g_to_memory(tmp, src);
360 tcg_gen_qemu_st_i64(tmp, addr, ctx->mem_idx, MO_LEUQ | UNALIGN(ctx));
361 tcg_temp_free(tmp);
362}
363
364static void gen_sts(DisasContext *ctx, TCGv src, TCGv addr)
365{
366 TCGv_i32 tmp32 = tcg_temp_new_i32();
367 gen_helper_s_to_memory(tmp32, src);
368 tcg_gen_qemu_st_i32(tmp32, addr, ctx->mem_idx, MO_LEUL | UNALIGN(ctx));
369 tcg_temp_free_i32(tmp32);
370}
371
372static void gen_stt(DisasContext *ctx, TCGv src, TCGv addr)
373{
374 tcg_gen_qemu_st_i64(src, addr, ctx->mem_idx, MO_LEUQ | UNALIGN(ctx));
375}
376
377static void gen_store_fp(DisasContext *ctx, int ra, int rb, int32_t disp16,
378 void (*func)(DisasContext *, TCGv, TCGv))
379{
380 TCGv addr = tcg_temp_new();
381 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
382 func(ctx, load_fpr(ctx, ra), addr);
383 tcg_temp_free(addr);
384}
385
386static void gen_store_int(DisasContext *ctx, int ra, int rb, int32_t disp16,
387 MemOp op, bool clear)
388{
389 TCGv addr, src;
390
391 addr = tcg_temp_new();
392 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
393 if (clear) {
394 tcg_gen_andi_i64(addr, addr, ~0x7);
395 } else {
396 op |= UNALIGN(ctx);
397 }
398
399 src = load_gpr(ctx, ra);
400 tcg_gen_qemu_st_i64(src, addr, ctx->mem_idx, op);
401
402 tcg_temp_free(addr);
403}
404
405static DisasJumpType gen_store_conditional(DisasContext *ctx, int ra, int rb,
406 int32_t disp16, int mem_idx,
407 MemOp op)
408{
409 TCGLabel *lab_fail, *lab_done;
410 TCGv addr, val;
411
412 addr = tcg_temp_new_i64();
413 tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
414 free_context_temps(ctx);
415
416 lab_fail = gen_new_label();
417 lab_done = gen_new_label();
418 tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
419 tcg_temp_free_i64(addr);
420
421 val = tcg_temp_new_i64();
422 tcg_gen_atomic_cmpxchg_i64(val, cpu_lock_addr, cpu_lock_value,
423 load_gpr(ctx, ra), mem_idx, op);
424 free_context_temps(ctx);
425
426 if (ra != 31) {
427 tcg_gen_setcond_i64(TCG_COND_EQ, ctx->ir[ra], val, cpu_lock_value);
428 }
429 tcg_temp_free_i64(val);
430 tcg_gen_br(lab_done);
431
432 gen_set_label(lab_fail);
433 if (ra != 31) {
434 tcg_gen_movi_i64(ctx->ir[ra], 0);
435 }
436
437 gen_set_label(lab_done);
438 tcg_gen_movi_i64(cpu_lock_addr, -1);
439 return DISAS_NEXT;
440}
441
442static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
443{
444 return translator_use_goto_tb(&ctx->base, dest);
445}
446
447static DisasJumpType gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
448{
449 uint64_t dest = ctx->base.pc_next + (disp << 2);
450
451 if (ra != 31) {
452 tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
453 }
454
455
456 if (disp == 0) {
457 return 0;
458 } else if (use_goto_tb(ctx, dest)) {
459 tcg_gen_goto_tb(0);
460 tcg_gen_movi_i64(cpu_pc, dest);
461 tcg_gen_exit_tb(ctx->base.tb, 0);
462 return DISAS_NORETURN;
463 } else {
464 tcg_gen_movi_i64(cpu_pc, dest);
465 return DISAS_PC_UPDATED;
466 }
467}
468
469static DisasJumpType gen_bcond_internal(DisasContext *ctx, TCGCond cond,
470 TCGv cmp, int32_t disp)
471{
472 uint64_t dest = ctx->base.pc_next + (disp << 2);
473 TCGLabel *lab_true = gen_new_label();
474
475 if (use_goto_tb(ctx, dest)) {
476 tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
477
478 tcg_gen_goto_tb(0);
479 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
480 tcg_gen_exit_tb(ctx->base.tb, 0);
481
482 gen_set_label(lab_true);
483 tcg_gen_goto_tb(1);
484 tcg_gen_movi_i64(cpu_pc, dest);
485 tcg_gen_exit_tb(ctx->base.tb, 1);
486
487 return DISAS_NORETURN;
488 } else {
489 TCGv_i64 z = load_zero(ctx);
490 TCGv_i64 d = tcg_constant_i64(dest);
491 TCGv_i64 p = tcg_constant_i64(ctx->base.pc_next);
492
493 tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
494 return DISAS_PC_UPDATED;
495 }
496}
497
498static DisasJumpType gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
499 int32_t disp, int mask)
500{
501 if (mask) {
502 TCGv tmp = tcg_temp_new();
503 DisasJumpType ret;
504
505 tcg_gen_andi_i64(tmp, load_gpr(ctx, ra), 1);
506 ret = gen_bcond_internal(ctx, cond, tmp, disp);
507 tcg_temp_free(tmp);
508 return ret;
509 }
510 return gen_bcond_internal(ctx, cond, load_gpr(ctx, ra), disp);
511}
512
513
514
515static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
516{
517 uint64_t mzero = 1ull << 63;
518
519 switch (cond) {
520 case TCG_COND_LE:
521 case TCG_COND_GT:
522
523 tcg_gen_mov_i64(dest, src);
524 break;
525
526 case TCG_COND_EQ:
527 case TCG_COND_NE:
528
529 tcg_gen_andi_i64(dest, src, mzero - 1);
530 break;
531
532 case TCG_COND_GE:
533 case TCG_COND_LT:
534
535 tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
536 tcg_gen_neg_i64(dest, dest);
537 tcg_gen_and_i64(dest, dest, src);
538 break;
539
540 default:
541 abort();
542 }
543}
544
545static DisasJumpType gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
546 int32_t disp)
547{
548 TCGv cmp_tmp = tcg_temp_new();
549 DisasJumpType ret;
550
551 gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
552 ret = gen_bcond_internal(ctx, cond, cmp_tmp, disp);
553 tcg_temp_free(cmp_tmp);
554 return ret;
555}
556
557static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
558{
559 TCGv_i64 va, vb, z;
560
561 z = load_zero(ctx);
562 vb = load_fpr(ctx, rb);
563 va = tcg_temp_new();
564 gen_fold_mzero(cond, va, load_fpr(ctx, ra));
565
566 tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
567
568 tcg_temp_free(va);
569}
570
571#define QUAL_RM_N 0x080
572#define QUAL_RM_C 0x000
573#define QUAL_RM_M 0x040
574#define QUAL_RM_D 0x0c0
575#define QUAL_RM_MASK 0x0c0
576
577#define QUAL_U 0x100
578#define QUAL_V 0x100
579#define QUAL_S 0x400
580#define QUAL_I 0x200
581
582static void gen_qual_roundmode(DisasContext *ctx, int fn11)
583{
584 TCGv_i32 tmp;
585
586 fn11 &= QUAL_RM_MASK;
587 if (fn11 == ctx->tb_rm) {
588 return;
589 }
590 ctx->tb_rm = fn11;
591
592 tmp = tcg_temp_new_i32();
593 switch (fn11) {
594 case QUAL_RM_N:
595 tcg_gen_movi_i32(tmp, float_round_nearest_even);
596 break;
597 case QUAL_RM_C:
598 tcg_gen_movi_i32(tmp, float_round_to_zero);
599 break;
600 case QUAL_RM_M:
601 tcg_gen_movi_i32(tmp, float_round_down);
602 break;
603 case QUAL_RM_D:
604 tcg_gen_ld8u_i32(tmp, cpu_env,
605 offsetof(CPUAlphaState, fpcr_dyn_round));
606 break;
607 }
608
609#if defined(CONFIG_SOFTFLOAT_INLINE)
610
611
612
613 tcg_gen_st8_i32(tmp, cpu_env,
614 offsetof(CPUAlphaState, fp_status.float_rounding_mode));
615#else
616 gen_helper_setroundmode(tmp);
617#endif
618
619 tcg_temp_free_i32(tmp);
620}
621
622static void gen_qual_flushzero(DisasContext *ctx, int fn11)
623{
624 TCGv_i32 tmp;
625
626 fn11 &= QUAL_U;
627 if (fn11 == ctx->tb_ftz) {
628 return;
629 }
630 ctx->tb_ftz = fn11;
631
632 tmp = tcg_temp_new_i32();
633 if (fn11) {
634
635 tcg_gen_ld8u_i32(tmp, cpu_env,
636 offsetof(CPUAlphaState, fpcr_flush_to_zero));
637 } else {
638
639 tcg_gen_movi_i32(tmp, 1);
640 }
641
642#if defined(CONFIG_SOFTFLOAT_INLINE)
643 tcg_gen_st8_i32(tmp, cpu_env,
644 offsetof(CPUAlphaState, fp_status.flush_to_zero));
645#else
646 gen_helper_setflushzero(tmp);
647#endif
648
649 tcg_temp_free_i32(tmp);
650}
651
652static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
653{
654 TCGv val;
655
656 if (unlikely(reg == 31)) {
657 val = load_zero(ctx);
658 } else {
659 val = cpu_fir[reg];
660 if ((fn11 & QUAL_S) == 0) {
661 if (is_cmp) {
662 gen_helper_ieee_input_cmp(cpu_env, val);
663 } else {
664 gen_helper_ieee_input(cpu_env, val);
665 }
666 } else {
667#ifndef CONFIG_USER_ONLY
668
669
670
671 gen_helper_ieee_input_s(cpu_env, val);
672#endif
673 }
674 }
675 return val;
676}
677
678static void gen_fp_exc_raise(int rc, int fn11)
679{
680
681
682
683
684
685 TCGv_i32 reg, ign;
686 uint32_t ignore = 0;
687
688 if (!(fn11 & QUAL_U)) {
689
690 ignore |= FPCR_UNF | FPCR_IOV;
691 }
692 if (!(fn11 & QUAL_I)) {
693 ignore |= FPCR_INE;
694 }
695 ign = tcg_constant_i32(ignore);
696
697
698
699
700
701
702 reg = tcg_constant_i32(rc + 32);
703 if (fn11 & QUAL_S) {
704 gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
705 } else {
706 gen_helper_fp_exc_raise(cpu_env, ign, reg);
707 }
708}
709
710static void gen_cvtlq(TCGv vc, TCGv vb)
711{
712 TCGv tmp = tcg_temp_new();
713
714
715
716 tcg_gen_shri_i64(tmp, vb, 29);
717 tcg_gen_sari_i64(vc, vb, 32);
718 tcg_gen_deposit_i64(vc, vc, tmp, 0, 30);
719
720 tcg_temp_free(tmp);
721}
722
723static void gen_ieee_arith2(DisasContext *ctx,
724 void (*helper)(TCGv, TCGv_ptr, TCGv),
725 int rb, int rc, int fn11)
726{
727 TCGv vb;
728
729 gen_qual_roundmode(ctx, fn11);
730 gen_qual_flushzero(ctx, fn11);
731
732 vb = gen_ieee_input(ctx, rb, fn11, 0);
733 helper(dest_fpr(ctx, rc), cpu_env, vb);
734
735 gen_fp_exc_raise(rc, fn11);
736}
737
738#define IEEE_ARITH2(name) \
739static inline void glue(gen_, name)(DisasContext *ctx, \
740 int rb, int rc, int fn11) \
741{ \
742 gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11); \
743}
744IEEE_ARITH2(sqrts)
745IEEE_ARITH2(sqrtt)
746IEEE_ARITH2(cvtst)
747IEEE_ARITH2(cvtts)
748
749static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
750{
751 TCGv vb, vc;
752
753
754 vb = gen_ieee_input(ctx, rb, fn11, 0);
755 vc = dest_fpr(ctx, rc);
756
757
758
759 if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
760 gen_helper_cvttq_c(vc, cpu_env, vb);
761 } else {
762 gen_qual_roundmode(ctx, fn11);
763 gen_helper_cvttq(vc, cpu_env, vb);
764 }
765 gen_fp_exc_raise(rc, fn11);
766}
767
768static void gen_ieee_intcvt(DisasContext *ctx,
769 void (*helper)(TCGv, TCGv_ptr, TCGv),
770 int rb, int rc, int fn11)
771{
772 TCGv vb, vc;
773
774 gen_qual_roundmode(ctx, fn11);
775 vb = load_fpr(ctx, rb);
776 vc = dest_fpr(ctx, rc);
777
778
779
780
781 if (fn11 & QUAL_I) {
782 helper(vc, cpu_env, vb);
783 gen_fp_exc_raise(rc, fn11);
784 } else {
785 helper(vc, cpu_env, vb);
786 }
787}
788
789#define IEEE_INTCVT(name) \
790static inline void glue(gen_, name)(DisasContext *ctx, \
791 int rb, int rc, int fn11) \
792{ \
793 gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11); \
794}
795IEEE_INTCVT(cvtqs)
796IEEE_INTCVT(cvtqt)
797
798static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
799{
800 TCGv vmask = tcg_constant_i64(mask);
801 TCGv tmp = tcg_temp_new_i64();
802
803 if (inv_a) {
804 tcg_gen_andc_i64(tmp, vmask, va);
805 } else {
806 tcg_gen_and_i64(tmp, va, vmask);
807 }
808
809 tcg_gen_andc_i64(vc, vb, vmask);
810 tcg_gen_or_i64(vc, vc, tmp);
811
812 tcg_temp_free(tmp);
813}
814
815static void gen_ieee_arith3(DisasContext *ctx,
816 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
817 int ra, int rb, int rc, int fn11)
818{
819 TCGv va, vb, vc;
820
821 gen_qual_roundmode(ctx, fn11);
822 gen_qual_flushzero(ctx, fn11);
823
824 va = gen_ieee_input(ctx, ra, fn11, 0);
825 vb = gen_ieee_input(ctx, rb, fn11, 0);
826 vc = dest_fpr(ctx, rc);
827 helper(vc, cpu_env, va, vb);
828
829 gen_fp_exc_raise(rc, fn11);
830}
831
832#define IEEE_ARITH3(name) \
833static inline void glue(gen_, name)(DisasContext *ctx, \
834 int ra, int rb, int rc, int fn11) \
835{ \
836 gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11); \
837}
838IEEE_ARITH3(adds)
839IEEE_ARITH3(subs)
840IEEE_ARITH3(muls)
841IEEE_ARITH3(divs)
842IEEE_ARITH3(addt)
843IEEE_ARITH3(subt)
844IEEE_ARITH3(mult)
845IEEE_ARITH3(divt)
846
847static void gen_ieee_compare(DisasContext *ctx,
848 void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
849 int ra, int rb, int rc, int fn11)
850{
851 TCGv va, vb, vc;
852
853 va = gen_ieee_input(ctx, ra, fn11, 1);
854 vb = gen_ieee_input(ctx, rb, fn11, 1);
855 vc = dest_fpr(ctx, rc);
856 helper(vc, cpu_env, va, vb);
857
858 gen_fp_exc_raise(rc, fn11);
859}
860
861#define IEEE_CMP3(name) \
862static inline void glue(gen_, name)(DisasContext *ctx, \
863 int ra, int rb, int rc, int fn11) \
864{ \
865 gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11); \
866}
867IEEE_CMP3(cmptun)
868IEEE_CMP3(cmpteq)
869IEEE_CMP3(cmptlt)
870IEEE_CMP3(cmptle)
871
872static inline uint64_t zapnot_mask(uint8_t lit)
873{
874 uint64_t mask = 0;
875 int i;
876
877 for (i = 0; i < 8; ++i) {
878 if ((lit >> i) & 1) {
879 mask |= 0xffull << (i * 8);
880 }
881 }
882 return mask;
883}
884
885
886
887
888static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
889{
890 switch (lit) {
891 case 0x00:
892 tcg_gen_movi_i64(dest, 0);
893 break;
894 case 0x01:
895 tcg_gen_ext8u_i64(dest, src);
896 break;
897 case 0x03:
898 tcg_gen_ext16u_i64(dest, src);
899 break;
900 case 0x0f:
901 tcg_gen_ext32u_i64(dest, src);
902 break;
903 case 0xff:
904 tcg_gen_mov_i64(dest, src);
905 break;
906 default:
907 tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
908 break;
909 }
910}
911
912
913static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
914 uint8_t lit, uint8_t byte_mask)
915{
916 if (islit) {
917 int pos = (64 - lit * 8) & 0x3f;
918 int len = cto32(byte_mask) * 8;
919 if (pos < len) {
920 tcg_gen_deposit_z_i64(vc, va, pos, len - pos);
921 } else {
922 tcg_gen_movi_i64(vc, 0);
923 }
924 } else {
925 TCGv tmp = tcg_temp_new();
926 tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
927 tcg_gen_neg_i64(tmp, tmp);
928 tcg_gen_andi_i64(tmp, tmp, 0x3f);
929 tcg_gen_shl_i64(vc, va, tmp);
930 tcg_temp_free(tmp);
931 }
932 gen_zapnoti(vc, vc, byte_mask);
933}
934
935
936static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
937 uint8_t lit, uint8_t byte_mask)
938{
939 if (islit) {
940 int pos = (lit & 7) * 8;
941 int len = cto32(byte_mask) * 8;
942 if (pos + len >= 64) {
943 len = 64 - pos;
944 }
945 tcg_gen_extract_i64(vc, va, pos, len);
946 } else {
947 TCGv tmp = tcg_temp_new();
948 tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
949 tcg_gen_shli_i64(tmp, tmp, 3);
950 tcg_gen_shr_i64(vc, va, tmp);
951 tcg_temp_free(tmp);
952 gen_zapnoti(vc, vc, byte_mask);
953 }
954}
955
956
957static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
958 uint8_t lit, uint8_t byte_mask)
959{
960 if (islit) {
961 int pos = 64 - (lit & 7) * 8;
962 int len = cto32(byte_mask) * 8;
963 if (pos < len) {
964 tcg_gen_extract_i64(vc, va, pos, len - pos);
965 } else {
966 tcg_gen_movi_i64(vc, 0);
967 }
968 } else {
969 TCGv tmp = tcg_temp_new();
970 TCGv shift = tcg_temp_new();
971
972
973
974
975
976 gen_zapnoti(tmp, va, byte_mask);
977
978
979
980
981
982
983 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
984 tcg_gen_not_i64(shift, shift);
985 tcg_gen_andi_i64(shift, shift, 0x3f);
986
987 tcg_gen_shr_i64(vc, tmp, shift);
988 tcg_gen_shri_i64(vc, vc, 1);
989 tcg_temp_free(shift);
990 tcg_temp_free(tmp);
991 }
992}
993
994
995static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
996 uint8_t lit, uint8_t byte_mask)
997{
998 if (islit) {
999 int pos = (lit & 7) * 8;
1000 int len = cto32(byte_mask) * 8;
1001 if (pos + len > 64) {
1002 len = 64 - pos;
1003 }
1004 tcg_gen_deposit_z_i64(vc, va, pos, len);
1005 } else {
1006 TCGv tmp = tcg_temp_new();
1007 TCGv shift = tcg_temp_new();
1008
1009
1010
1011
1012
1013 gen_zapnoti(tmp, va, byte_mask);
1014
1015 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1016 tcg_gen_shli_i64(shift, shift, 3);
1017 tcg_gen_shl_i64(vc, tmp, shift);
1018 tcg_temp_free(shift);
1019 tcg_temp_free(tmp);
1020 }
1021}
1022
1023
1024static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1025 uint8_t lit, uint8_t byte_mask)
1026{
1027 if (islit) {
1028 gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1029 } else {
1030 TCGv shift = tcg_temp_new();
1031 TCGv mask = tcg_temp_new();
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042 tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1043 tcg_gen_not_i64(shift, shift);
1044 tcg_gen_andi_i64(shift, shift, 0x3f);
1045 tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1046 tcg_gen_shr_i64(mask, mask, shift);
1047 tcg_gen_shri_i64(mask, mask, 1);
1048
1049 tcg_gen_andc_i64(vc, va, mask);
1050
1051 tcg_temp_free(mask);
1052 tcg_temp_free(shift);
1053 }
1054}
1055
1056
1057static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1058 uint8_t lit, uint8_t byte_mask)
1059{
1060 if (islit) {
1061 gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1062 } else {
1063 TCGv shift = tcg_temp_new();
1064 TCGv mask = tcg_temp_new();
1065
1066 tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1067 tcg_gen_shli_i64(shift, shift, 3);
1068 tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1069 tcg_gen_shl_i64(mask, mask, shift);
1070
1071 tcg_gen_andc_i64(vc, va, mask);
1072
1073 tcg_temp_free(mask);
1074 tcg_temp_free(shift);
1075 }
1076}
1077
1078static void gen_rx(DisasContext *ctx, int ra, int set)
1079{
1080 if (ra != 31) {
1081 ld_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1082 }
1083
1084 st_flag_byte(tcg_constant_i64(set), ENV_FLAG_RX_SHIFT);
1085}
1086
1087static DisasJumpType gen_call_pal(DisasContext *ctx, int palcode)
1088{
1089
1090
1091
1092
1093 if (palcode >= 0x80 && palcode < 0xC0) {
1094 switch (palcode) {
1095 case 0x86:
1096
1097
1098 break;
1099 case 0x9E:
1100
1101 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1102 offsetof(CPUAlphaState, unique));
1103 break;
1104 case 0x9F:
1105
1106 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1107 offsetof(CPUAlphaState, unique));
1108 break;
1109 default:
1110 palcode &= 0xbf;
1111 goto do_call_pal;
1112 }
1113 return DISAS_NEXT;
1114 }
1115
1116#ifndef CONFIG_USER_ONLY
1117
1118 if (palcode < 0x40 && (ctx->tbflags & ENV_FLAG_PS_USER) == 0) {
1119 switch (palcode) {
1120 case 0x01:
1121
1122
1123 break;
1124 case 0x02:
1125
1126
1127 break;
1128 case 0x2D:
1129
1130 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1131 offsetof(CPUAlphaState, vptptr));
1132 break;
1133 case 0x31:
1134
1135 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1136 offsetof(CPUAlphaState, sysval));
1137 break;
1138 case 0x32:
1139
1140 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1141 offsetof(CPUAlphaState, sysval));
1142 break;
1143
1144 case 0x35:
1145
1146
1147
1148 ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1149
1150
1151 {
1152 TCGv tmp = tcg_temp_new();
1153 tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1154 st_flag_byte(tmp, ENV_FLAG_PS_SHIFT);
1155 tcg_temp_free(tmp);
1156 }
1157
1158
1159 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
1160 return DISAS_PC_UPDATED_NOCHAIN;
1161
1162 case 0x36:
1163
1164 ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1165 break;
1166
1167 case 0x38:
1168
1169 tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1170 offsetof(CPUAlphaState, usp));
1171 break;
1172 case 0x3A:
1173
1174 tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1175 offsetof(CPUAlphaState, usp));
1176 break;
1177 case 0x3C:
1178
1179 tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1180 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1181 break;
1182
1183 case 0x3E:
1184
1185 tcg_gen_st_i32(tcg_constant_i32(1), cpu_env,
1186 -offsetof(AlphaCPU, env) +
1187 offsetof(CPUState, halted));
1188 tcg_gen_movi_i64(ctx->ir[IR_V0], 0);
1189 return gen_excp(ctx, EXCP_HALTED, 0);
1190
1191 default:
1192 palcode &= 0x3f;
1193 goto do_call_pal;
1194 }
1195 return DISAS_NEXT;
1196 }
1197#endif
1198 return gen_invalid(ctx);
1199
1200 do_call_pal:
1201#ifdef CONFIG_USER_ONLY
1202 return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1203#else
1204 {
1205 TCGv tmp = tcg_temp_new();
1206 uint64_t exc_addr = ctx->base.pc_next;
1207 uint64_t entry = ctx->palbr;
1208
1209 if (ctx->tbflags & ENV_FLAG_PAL_MODE) {
1210 exc_addr |= 1;
1211 } else {
1212 tcg_gen_movi_i64(tmp, 1);
1213 st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
1214 }
1215
1216 tcg_gen_movi_i64(tmp, exc_addr);
1217 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1218 tcg_temp_free(tmp);
1219
1220 entry += (palcode & 0x80
1221 ? 0x2000 + (palcode - 0x80) * 64
1222 : 0x1000 + palcode * 64);
1223
1224 tcg_gen_movi_i64(cpu_pc, entry);
1225 return DISAS_PC_UPDATED;
1226 }
1227#endif
1228}
1229
1230#ifndef CONFIG_USER_ONLY
1231
1232#define PR_LONG 0x200000
1233
1234static int cpu_pr_data(int pr)
1235{
1236 switch (pr) {
1237 case 2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1238 case 3: return offsetof(CPUAlphaState, trap_arg0);
1239 case 4: return offsetof(CPUAlphaState, trap_arg1);
1240 case 5: return offsetof(CPUAlphaState, trap_arg2);
1241 case 6: return offsetof(CPUAlphaState, exc_addr);
1242 case 7: return offsetof(CPUAlphaState, palbr);
1243 case 8: return offsetof(CPUAlphaState, ptbr);
1244 case 9: return offsetof(CPUAlphaState, vptptr);
1245 case 10: return offsetof(CPUAlphaState, unique);
1246 case 11: return offsetof(CPUAlphaState, sysval);
1247 case 12: return offsetof(CPUAlphaState, usp);
1248
1249 case 40 ... 63:
1250 return offsetof(CPUAlphaState, scratch[pr - 40]);
1251
1252 case 251:
1253 return offsetof(CPUAlphaState, alarm_expire);
1254 }
1255 return 0;
1256}
1257
1258static DisasJumpType gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1259{
1260 void (*helper)(TCGv);
1261 int data;
1262
1263 switch (regno) {
1264 case 32 ... 39:
1265
1266 regno = regno == 39 ? 25 : regno - 32 + 8;
1267 tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1268 break;
1269
1270 case 250:
1271 helper = gen_helper_get_walltime;
1272 goto do_helper;
1273 case 249:
1274 helper = gen_helper_get_vmtime;
1275 do_helper:
1276 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1277 gen_io_start();
1278 helper(va);
1279 return DISAS_PC_STALE;
1280 } else {
1281 helper(va);
1282 }
1283 break;
1284
1285 case 0:
1286 ld_flag_byte(va, ENV_FLAG_PS_SHIFT);
1287 break;
1288 case 1:
1289 ld_flag_byte(va, ENV_FLAG_FEN_SHIFT);
1290 break;
1291
1292 default:
1293
1294
1295 data = cpu_pr_data(regno);
1296 if (data == 0) {
1297 tcg_gen_movi_i64(va, 0);
1298 } else if (data & PR_LONG) {
1299 tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1300 } else {
1301 tcg_gen_ld_i64(va, cpu_env, data);
1302 }
1303 break;
1304 }
1305
1306 return DISAS_NEXT;
1307}
1308
1309static DisasJumpType gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1310{
1311 int data;
1312 DisasJumpType ret = DISAS_NEXT;
1313
1314 switch (regno) {
1315 case 255:
1316
1317 gen_helper_tbia(cpu_env);
1318 break;
1319
1320 case 254:
1321
1322 gen_helper_tbis(cpu_env, vb);
1323 break;
1324
1325 case 253:
1326
1327 tcg_gen_st_i32(tcg_constant_i32(1), cpu_env,
1328 -offsetof(AlphaCPU, env) + offsetof(CPUState, halted));
1329 return gen_excp(ctx, EXCP_HALTED, 0);
1330
1331 case 252:
1332
1333 gen_helper_halt(vb);
1334 return DISAS_PC_STALE;
1335
1336 case 251:
1337
1338 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1339 gen_io_start();
1340 ret = DISAS_PC_STALE;
1341 }
1342 gen_helper_set_alarm(cpu_env, vb);
1343 break;
1344
1345 case 7:
1346
1347 tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1348
1349
1350
1351 gen_helper_tb_flush(cpu_env);
1352 return DISAS_PC_STALE;
1353
1354 case 32 ... 39:
1355
1356 regno = regno == 39 ? 25 : regno - 32 + 8;
1357 tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1358 break;
1359
1360 case 0:
1361 st_flag_byte(vb, ENV_FLAG_PS_SHIFT);
1362 break;
1363 case 1:
1364 st_flag_byte(vb, ENV_FLAG_FEN_SHIFT);
1365 break;
1366
1367 default:
1368
1369
1370 data = cpu_pr_data(regno);
1371 if (data != 0) {
1372 if (data & PR_LONG) {
1373 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1374 } else {
1375 tcg_gen_st_i64(vb, cpu_env, data);
1376 }
1377 }
1378 break;
1379 }
1380
1381 return ret;
1382}
1383#endif
1384
1385#define REQUIRE_NO_LIT \
1386 do { \
1387 if (real_islit) { \
1388 goto invalid_opc; \
1389 } \
1390 } while (0)
1391
1392#define REQUIRE_AMASK(FLAG) \
1393 do { \
1394 if ((ctx->amask & AMASK_##FLAG) == 0) { \
1395 goto invalid_opc; \
1396 } \
1397 } while (0)
1398
1399#define REQUIRE_TB_FLAG(FLAG) \
1400 do { \
1401 if ((ctx->tbflags & (FLAG)) == 0) { \
1402 goto invalid_opc; \
1403 } \
1404 } while (0)
1405
1406#define REQUIRE_REG_31(WHICH) \
1407 do { \
1408 if (WHICH != 31) { \
1409 goto invalid_opc; \
1410 } \
1411 } while (0)
1412
1413#define REQUIRE_FEN \
1414 do { \
1415 if (!(ctx->tbflags & ENV_FLAG_FEN)) { \
1416 goto raise_fen; \
1417 } \
1418 } while (0)
1419
1420static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn)
1421{
1422 int32_t disp21, disp16, disp12 __attribute__((unused));
1423 uint16_t fn11;
1424 uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1425 bool islit, real_islit;
1426 TCGv va, vb, vc, tmp, tmp2;
1427 TCGv_i32 t32;
1428 DisasJumpType ret;
1429
1430
1431 opc = extract32(insn, 26, 6);
1432 ra = extract32(insn, 21, 5);
1433 rb = extract32(insn, 16, 5);
1434 rc = extract32(insn, 0, 5);
1435 real_islit = islit = extract32(insn, 12, 1);
1436 lit = extract32(insn, 13, 8);
1437
1438 disp21 = sextract32(insn, 0, 21);
1439 disp16 = sextract32(insn, 0, 16);
1440 disp12 = sextract32(insn, 0, 12);
1441
1442 fn11 = extract32(insn, 5, 11);
1443 fpfn = extract32(insn, 5, 6);
1444 fn7 = extract32(insn, 5, 7);
1445
1446 if (rb == 31 && !islit) {
1447 islit = true;
1448 lit = 0;
1449 }
1450
1451 ret = DISAS_NEXT;
1452 switch (opc) {
1453 case 0x00:
1454
1455 ret = gen_call_pal(ctx, insn & 0x03ffffff);
1456 break;
1457 case 0x01:
1458
1459 goto invalid_opc;
1460 case 0x02:
1461
1462 goto invalid_opc;
1463 case 0x03:
1464
1465 goto invalid_opc;
1466 case 0x04:
1467
1468 goto invalid_opc;
1469 case 0x05:
1470
1471 goto invalid_opc;
1472 case 0x06:
1473
1474 goto invalid_opc;
1475 case 0x07:
1476
1477 goto invalid_opc;
1478
1479 case 0x09:
1480
1481 disp16 = (uint32_t)disp16 << 16;
1482
1483 case 0x08:
1484
1485 va = dest_gpr(ctx, ra);
1486
1487 if (rb == 31) {
1488 tcg_gen_movi_i64(va, disp16);
1489 } else {
1490 tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1491 }
1492 break;
1493
1494 case 0x0A:
1495
1496 REQUIRE_AMASK(BWX);
1497 gen_load_int(ctx, ra, rb, disp16, MO_UB, 0, 0);
1498 break;
1499 case 0x0B:
1500
1501 gen_load_int(ctx, ra, rb, disp16, MO_LEUQ, 1, 0);
1502 break;
1503 case 0x0C:
1504
1505 REQUIRE_AMASK(BWX);
1506 gen_load_int(ctx, ra, rb, disp16, MO_LEUW, 0, 0);
1507 break;
1508 case 0x0D:
1509
1510 REQUIRE_AMASK(BWX);
1511 gen_store_int(ctx, ra, rb, disp16, MO_LEUW, 0);
1512 break;
1513 case 0x0E:
1514
1515 REQUIRE_AMASK(BWX);
1516 gen_store_int(ctx, ra, rb, disp16, MO_UB, 0);
1517 break;
1518 case 0x0F:
1519
1520 gen_store_int(ctx, ra, rb, disp16, MO_LEUQ, 1);
1521 break;
1522
1523 case 0x10:
1524 vc = dest_gpr(ctx, rc);
1525 vb = load_gpr_lit(ctx, rb, lit, islit);
1526
1527 if (ra == 31) {
1528 if (fn7 == 0x00) {
1529
1530 tcg_gen_ext32s_i64(vc, vb);
1531 break;
1532 }
1533 if (fn7 == 0x29) {
1534
1535 tcg_gen_neg_i64(vc, vb);
1536 break;
1537 }
1538 }
1539
1540 va = load_gpr(ctx, ra);
1541 switch (fn7) {
1542 case 0x00:
1543
1544 tcg_gen_add_i64(vc, va, vb);
1545 tcg_gen_ext32s_i64(vc, vc);
1546 break;
1547 case 0x02:
1548
1549 tmp = tcg_temp_new();
1550 tcg_gen_shli_i64(tmp, va, 2);
1551 tcg_gen_add_i64(tmp, tmp, vb);
1552 tcg_gen_ext32s_i64(vc, tmp);
1553 tcg_temp_free(tmp);
1554 break;
1555 case 0x09:
1556
1557 tcg_gen_sub_i64(vc, va, vb);
1558 tcg_gen_ext32s_i64(vc, vc);
1559 break;
1560 case 0x0B:
1561
1562 tmp = tcg_temp_new();
1563 tcg_gen_shli_i64(tmp, va, 2);
1564 tcg_gen_sub_i64(tmp, tmp, vb);
1565 tcg_gen_ext32s_i64(vc, tmp);
1566 tcg_temp_free(tmp);
1567 break;
1568 case 0x0F:
1569
1570 if (ra == 31) {
1571
1572 gen_helper_cmpbe0(vc, vb);
1573 } else {
1574 gen_helper_cmpbge(vc, va, vb);
1575 }
1576 break;
1577 case 0x12:
1578
1579 tmp = tcg_temp_new();
1580 tcg_gen_shli_i64(tmp, va, 3);
1581 tcg_gen_add_i64(tmp, tmp, vb);
1582 tcg_gen_ext32s_i64(vc, tmp);
1583 tcg_temp_free(tmp);
1584 break;
1585 case 0x1B:
1586
1587 tmp = tcg_temp_new();
1588 tcg_gen_shli_i64(tmp, va, 3);
1589 tcg_gen_sub_i64(tmp, tmp, vb);
1590 tcg_gen_ext32s_i64(vc, tmp);
1591 tcg_temp_free(tmp);
1592 break;
1593 case 0x1D:
1594
1595 tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1596 break;
1597 case 0x20:
1598
1599 tcg_gen_add_i64(vc, va, vb);
1600 break;
1601 case 0x22:
1602
1603 tmp = tcg_temp_new();
1604 tcg_gen_shli_i64(tmp, va, 2);
1605 tcg_gen_add_i64(vc, tmp, vb);
1606 tcg_temp_free(tmp);
1607 break;
1608 case 0x29:
1609
1610 tcg_gen_sub_i64(vc, va, vb);
1611 break;
1612 case 0x2B:
1613
1614 tmp = tcg_temp_new();
1615 tcg_gen_shli_i64(tmp, va, 2);
1616 tcg_gen_sub_i64(vc, tmp, vb);
1617 tcg_temp_free(tmp);
1618 break;
1619 case 0x2D:
1620
1621 tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1622 break;
1623 case 0x32:
1624
1625 tmp = tcg_temp_new();
1626 tcg_gen_shli_i64(tmp, va, 3);
1627 tcg_gen_add_i64(vc, tmp, vb);
1628 tcg_temp_free(tmp);
1629 break;
1630 case 0x3B:
1631
1632 tmp = tcg_temp_new();
1633 tcg_gen_shli_i64(tmp, va, 3);
1634 tcg_gen_sub_i64(vc, tmp, vb);
1635 tcg_temp_free(tmp);
1636 break;
1637 case 0x3D:
1638
1639 tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1640 break;
1641 case 0x40:
1642
1643 tmp = tcg_temp_new();
1644 tcg_gen_ext32s_i64(tmp, va);
1645 tcg_gen_ext32s_i64(vc, vb);
1646 tcg_gen_add_i64(tmp, tmp, vc);
1647 tcg_gen_ext32s_i64(vc, tmp);
1648 gen_helper_check_overflow(cpu_env, vc, tmp);
1649 tcg_temp_free(tmp);
1650 break;
1651 case 0x49:
1652
1653 tmp = tcg_temp_new();
1654 tcg_gen_ext32s_i64(tmp, va);
1655 tcg_gen_ext32s_i64(vc, vb);
1656 tcg_gen_sub_i64(tmp, tmp, vc);
1657 tcg_gen_ext32s_i64(vc, tmp);
1658 gen_helper_check_overflow(cpu_env, vc, tmp);
1659 tcg_temp_free(tmp);
1660 break;
1661 case 0x4D:
1662
1663 tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1664 break;
1665 case 0x60:
1666
1667 tmp = tcg_temp_new();
1668 tmp2 = tcg_temp_new();
1669 tcg_gen_eqv_i64(tmp, va, vb);
1670 tcg_gen_mov_i64(tmp2, va);
1671 tcg_gen_add_i64(vc, va, vb);
1672 tcg_gen_xor_i64(tmp2, tmp2, vc);
1673 tcg_gen_and_i64(tmp, tmp, tmp2);
1674 tcg_gen_shri_i64(tmp, tmp, 63);
1675 tcg_gen_movi_i64(tmp2, 0);
1676 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1677 tcg_temp_free(tmp);
1678 tcg_temp_free(tmp2);
1679 break;
1680 case 0x69:
1681
1682 tmp = tcg_temp_new();
1683 tmp2 = tcg_temp_new();
1684 tcg_gen_xor_i64(tmp, va, vb);
1685 tcg_gen_mov_i64(tmp2, va);
1686 tcg_gen_sub_i64(vc, va, vb);
1687 tcg_gen_xor_i64(tmp2, tmp2, vc);
1688 tcg_gen_and_i64(tmp, tmp, tmp2);
1689 tcg_gen_shri_i64(tmp, tmp, 63);
1690 tcg_gen_movi_i64(tmp2, 0);
1691 gen_helper_check_overflow(cpu_env, tmp, tmp2);
1692 tcg_temp_free(tmp);
1693 tcg_temp_free(tmp2);
1694 break;
1695 case 0x6D:
1696
1697 tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1698 break;
1699 default:
1700 goto invalid_opc;
1701 }
1702 break;
1703
1704 case 0x11:
1705 if (fn7 == 0x20) {
1706 if (rc == 31) {
1707
1708 break;
1709 }
1710 if (ra == 31) {
1711
1712 vc = dest_gpr(ctx, rc);
1713 if (islit) {
1714 tcg_gen_movi_i64(vc, lit);
1715 } else {
1716 tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1717 }
1718 break;
1719 }
1720 }
1721
1722 vc = dest_gpr(ctx, rc);
1723 vb = load_gpr_lit(ctx, rb, lit, islit);
1724
1725 if (fn7 == 0x28 && ra == 31) {
1726
1727 tcg_gen_not_i64(vc, vb);
1728 break;
1729 }
1730
1731 va = load_gpr(ctx, ra);
1732 switch (fn7) {
1733 case 0x00:
1734
1735 tcg_gen_and_i64(vc, va, vb);
1736 break;
1737 case 0x08:
1738
1739 tcg_gen_andc_i64(vc, va, vb);
1740 break;
1741 case 0x14:
1742
1743 tmp = tcg_temp_new();
1744 tcg_gen_andi_i64(tmp, va, 1);
1745 tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1746 vb, load_gpr(ctx, rc));
1747 tcg_temp_free(tmp);
1748 break;
1749 case 0x16:
1750
1751 tmp = tcg_temp_new();
1752 tcg_gen_andi_i64(tmp, va, 1);
1753 tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1754 vb, load_gpr(ctx, rc));
1755 tcg_temp_free(tmp);
1756 break;
1757 case 0x20:
1758
1759 tcg_gen_or_i64(vc, va, vb);
1760 break;
1761 case 0x24:
1762
1763 tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1764 vb, load_gpr(ctx, rc));
1765 break;
1766 case 0x26:
1767
1768 tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1769 vb, load_gpr(ctx, rc));
1770 break;
1771 case 0x28:
1772
1773 tcg_gen_orc_i64(vc, va, vb);
1774 break;
1775 case 0x40:
1776
1777 tcg_gen_xor_i64(vc, va, vb);
1778 break;
1779 case 0x44:
1780
1781 tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1782 vb, load_gpr(ctx, rc));
1783 break;
1784 case 0x46:
1785
1786 tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1787 vb, load_gpr(ctx, rc));
1788 break;
1789 case 0x48:
1790
1791 tcg_gen_eqv_i64(vc, va, vb);
1792 break;
1793 case 0x61:
1794
1795 REQUIRE_REG_31(ra);
1796 tcg_gen_andi_i64(vc, vb, ~ctx->amask);
1797 break;
1798 case 0x64:
1799
1800 tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1801 vb, load_gpr(ctx, rc));
1802 break;
1803 case 0x66:
1804
1805 tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1806 vb, load_gpr(ctx, rc));
1807 break;
1808 case 0x6C:
1809
1810 REQUIRE_REG_31(ra);
1811 tcg_gen_movi_i64(vc, ctx->implver);
1812 break;
1813 default:
1814 goto invalid_opc;
1815 }
1816 break;
1817
1818 case 0x12:
1819 vc = dest_gpr(ctx, rc);
1820 va = load_gpr(ctx, ra);
1821 switch (fn7) {
1822 case 0x02:
1823
1824 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1825 break;
1826 case 0x06:
1827
1828 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1829 break;
1830 case 0x0B:
1831
1832 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1833 break;
1834 case 0x12:
1835
1836 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1837 break;
1838 case 0x16:
1839
1840 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1841 break;
1842 case 0x1B:
1843
1844 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1845 break;
1846 case 0x22:
1847
1848 gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1849 break;
1850 case 0x26:
1851
1852 gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1853 break;
1854 case 0x2B:
1855
1856 gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1857 break;
1858 case 0x30:
1859
1860 if (islit) {
1861 gen_zapnoti(vc, va, ~lit);
1862 } else {
1863 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1864 }
1865 break;
1866 case 0x31:
1867
1868 if (islit) {
1869 gen_zapnoti(vc, va, lit);
1870 } else {
1871 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1872 }
1873 break;
1874 case 0x32:
1875
1876 gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1877 break;
1878 case 0x34:
1879
1880 if (islit) {
1881 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1882 } else {
1883 tmp = tcg_temp_new();
1884 vb = load_gpr(ctx, rb);
1885 tcg_gen_andi_i64(tmp, vb, 0x3f);
1886 tcg_gen_shr_i64(vc, va, tmp);
1887 tcg_temp_free(tmp);
1888 }
1889 break;
1890 case 0x36:
1891
1892 gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1893 break;
1894 case 0x39:
1895
1896 if (islit) {
1897 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1898 } else {
1899 tmp = tcg_temp_new();
1900 vb = load_gpr(ctx, rb);
1901 tcg_gen_andi_i64(tmp, vb, 0x3f);
1902 tcg_gen_shl_i64(vc, va, tmp);
1903 tcg_temp_free(tmp);
1904 }
1905 break;
1906 case 0x3B:
1907
1908 gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1909 break;
1910 case 0x3C:
1911
1912 if (islit) {
1913 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1914 } else {
1915 tmp = tcg_temp_new();
1916 vb = load_gpr(ctx, rb);
1917 tcg_gen_andi_i64(tmp, vb, 0x3f);
1918 tcg_gen_sar_i64(vc, va, tmp);
1919 tcg_temp_free(tmp);
1920 }
1921 break;
1922 case 0x52:
1923
1924 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1925 break;
1926 case 0x57:
1927
1928 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1929 break;
1930 case 0x5A:
1931
1932 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1933 break;
1934 case 0x62:
1935
1936 gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1937 break;
1938 case 0x67:
1939
1940 gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1941 break;
1942 case 0x6A:
1943
1944 gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1945 break;
1946 case 0x72:
1947
1948 gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1949 break;
1950 case 0x77:
1951
1952 gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
1953 break;
1954 case 0x7A:
1955
1956 gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
1957 break;
1958 default:
1959 goto invalid_opc;
1960 }
1961 break;
1962
1963 case 0x13:
1964 vc = dest_gpr(ctx, rc);
1965 vb = load_gpr_lit(ctx, rb, lit, islit);
1966 va = load_gpr(ctx, ra);
1967 switch (fn7) {
1968 case 0x00:
1969
1970 tcg_gen_mul_i64(vc, va, vb);
1971 tcg_gen_ext32s_i64(vc, vc);
1972 break;
1973 case 0x20:
1974
1975 tcg_gen_mul_i64(vc, va, vb);
1976 break;
1977 case 0x30:
1978
1979 tmp = tcg_temp_new();
1980 tcg_gen_mulu2_i64(tmp, vc, va, vb);
1981 tcg_temp_free(tmp);
1982 break;
1983 case 0x40:
1984
1985 tmp = tcg_temp_new();
1986 tcg_gen_ext32s_i64(tmp, va);
1987 tcg_gen_ext32s_i64(vc, vb);
1988 tcg_gen_mul_i64(tmp, tmp, vc);
1989 tcg_gen_ext32s_i64(vc, tmp);
1990 gen_helper_check_overflow(cpu_env, vc, tmp);
1991 tcg_temp_free(tmp);
1992 break;
1993 case 0x60:
1994
1995 tmp = tcg_temp_new();
1996 tmp2 = tcg_temp_new();
1997 tcg_gen_muls2_i64(vc, tmp, va, vb);
1998 tcg_gen_sari_i64(tmp2, vc, 63);
1999 gen_helper_check_overflow(cpu_env, tmp, tmp2);
2000 tcg_temp_free(tmp);
2001 tcg_temp_free(tmp2);
2002 break;
2003 default:
2004 goto invalid_opc;
2005 }
2006 break;
2007
2008 case 0x14:
2009 REQUIRE_AMASK(FIX);
2010 vc = dest_fpr(ctx, rc);
2011 switch (fpfn) {
2012 case 0x04:
2013
2014 REQUIRE_REG_31(rb);
2015 REQUIRE_FEN;
2016 t32 = tcg_temp_new_i32();
2017 va = load_gpr(ctx, ra);
2018 tcg_gen_extrl_i64_i32(t32, va);
2019 gen_helper_memory_to_s(vc, t32);
2020 tcg_temp_free_i32(t32);
2021 break;
2022 case 0x0A:
2023
2024 REQUIRE_REG_31(ra);
2025 REQUIRE_FEN;
2026 vb = load_fpr(ctx, rb);
2027 gen_helper_sqrtf(vc, cpu_env, vb);
2028 break;
2029 case 0x0B:
2030
2031 REQUIRE_REG_31(ra);
2032 REQUIRE_FEN;
2033 gen_sqrts(ctx, rb, rc, fn11);
2034 break;
2035 case 0x14:
2036
2037 REQUIRE_REG_31(rb);
2038 REQUIRE_FEN;
2039 t32 = tcg_temp_new_i32();
2040 va = load_gpr(ctx, ra);
2041 tcg_gen_extrl_i64_i32(t32, va);
2042 gen_helper_memory_to_f(vc, t32);
2043 tcg_temp_free_i32(t32);
2044 break;
2045 case 0x24:
2046
2047 REQUIRE_REG_31(rb);
2048 REQUIRE_FEN;
2049 va = load_gpr(ctx, ra);
2050 tcg_gen_mov_i64(vc, va);
2051 break;
2052 case 0x2A:
2053
2054 REQUIRE_REG_31(ra);
2055 REQUIRE_FEN;
2056 vb = load_fpr(ctx, rb);
2057 gen_helper_sqrtg(vc, cpu_env, vb);
2058 break;
2059 case 0x02B:
2060
2061 REQUIRE_REG_31(ra);
2062 REQUIRE_FEN;
2063 gen_sqrtt(ctx, rb, rc, fn11);
2064 break;
2065 default:
2066 goto invalid_opc;
2067 }
2068 break;
2069
2070 case 0x15:
2071
2072
2073 vc = dest_fpr(ctx, rc);
2074 vb = load_fpr(ctx, rb);
2075 va = load_fpr(ctx, ra);
2076 switch (fpfn) {
2077 case 0x00:
2078
2079 REQUIRE_FEN;
2080 gen_helper_addf(vc, cpu_env, va, vb);
2081 break;
2082 case 0x01:
2083
2084 REQUIRE_FEN;
2085 gen_helper_subf(vc, cpu_env, va, vb);
2086 break;
2087 case 0x02:
2088
2089 REQUIRE_FEN;
2090 gen_helper_mulf(vc, cpu_env, va, vb);
2091 break;
2092 case 0x03:
2093
2094 REQUIRE_FEN;
2095 gen_helper_divf(vc, cpu_env, va, vb);
2096 break;
2097 case 0x1E:
2098
2099 REQUIRE_REG_31(ra);
2100 goto invalid_opc;
2101 case 0x20:
2102
2103 REQUIRE_FEN;
2104 gen_helper_addg(vc, cpu_env, va, vb);
2105 break;
2106 case 0x21:
2107
2108 REQUIRE_FEN;
2109 gen_helper_subg(vc, cpu_env, va, vb);
2110 break;
2111 case 0x22:
2112
2113 REQUIRE_FEN;
2114 gen_helper_mulg(vc, cpu_env, va, vb);
2115 break;
2116 case 0x23:
2117
2118 REQUIRE_FEN;
2119 gen_helper_divg(vc, cpu_env, va, vb);
2120 break;
2121 case 0x25:
2122
2123 REQUIRE_FEN;
2124 gen_helper_cmpgeq(vc, cpu_env, va, vb);
2125 break;
2126 case 0x26:
2127
2128 REQUIRE_FEN;
2129 gen_helper_cmpglt(vc, cpu_env, va, vb);
2130 break;
2131 case 0x27:
2132
2133 REQUIRE_FEN;
2134 gen_helper_cmpgle(vc, cpu_env, va, vb);
2135 break;
2136 case 0x2C:
2137
2138 REQUIRE_REG_31(ra);
2139 REQUIRE_FEN;
2140 gen_helper_cvtgf(vc, cpu_env, vb);
2141 break;
2142 case 0x2D:
2143
2144 REQUIRE_REG_31(ra);
2145 goto invalid_opc;
2146 case 0x2F:
2147
2148 REQUIRE_REG_31(ra);
2149 REQUIRE_FEN;
2150 gen_helper_cvtgq(vc, cpu_env, vb);
2151 break;
2152 case 0x3C:
2153
2154 REQUIRE_REG_31(ra);
2155 REQUIRE_FEN;
2156 gen_helper_cvtqf(vc, cpu_env, vb);
2157 break;
2158 case 0x3E:
2159
2160 REQUIRE_REG_31(ra);
2161 REQUIRE_FEN;
2162 gen_helper_cvtqg(vc, cpu_env, vb);
2163 break;
2164 default:
2165 goto invalid_opc;
2166 }
2167 break;
2168
2169 case 0x16:
2170
2171 switch (fpfn) {
2172 case 0x00:
2173
2174 REQUIRE_FEN;
2175 gen_adds(ctx, ra, rb, rc, fn11);
2176 break;
2177 case 0x01:
2178
2179 REQUIRE_FEN;
2180 gen_subs(ctx, ra, rb, rc, fn11);
2181 break;
2182 case 0x02:
2183
2184 REQUIRE_FEN;
2185 gen_muls(ctx, ra, rb, rc, fn11);
2186 break;
2187 case 0x03:
2188
2189 REQUIRE_FEN;
2190 gen_divs(ctx, ra, rb, rc, fn11);
2191 break;
2192 case 0x20:
2193
2194 REQUIRE_FEN;
2195 gen_addt(ctx, ra, rb, rc, fn11);
2196 break;
2197 case 0x21:
2198
2199 REQUIRE_FEN;
2200 gen_subt(ctx, ra, rb, rc, fn11);
2201 break;
2202 case 0x22:
2203
2204 REQUIRE_FEN;
2205 gen_mult(ctx, ra, rb, rc, fn11);
2206 break;
2207 case 0x23:
2208
2209 REQUIRE_FEN;
2210 gen_divt(ctx, ra, rb, rc, fn11);
2211 break;
2212 case 0x24:
2213
2214 REQUIRE_FEN;
2215 gen_cmptun(ctx, ra, rb, rc, fn11);
2216 break;
2217 case 0x25:
2218
2219 REQUIRE_FEN;
2220 gen_cmpteq(ctx, ra, rb, rc, fn11);
2221 break;
2222 case 0x26:
2223
2224 REQUIRE_FEN;
2225 gen_cmptlt(ctx, ra, rb, rc, fn11);
2226 break;
2227 case 0x27:
2228
2229 REQUIRE_FEN;
2230 gen_cmptle(ctx, ra, rb, rc, fn11);
2231 break;
2232 case 0x2C:
2233 REQUIRE_REG_31(ra);
2234 REQUIRE_FEN;
2235 if (fn11 == 0x2AC || fn11 == 0x6AC) {
2236
2237 gen_cvtst(ctx, rb, rc, fn11);
2238 } else {
2239
2240 gen_cvtts(ctx, rb, rc, fn11);
2241 }
2242 break;
2243 case 0x2F:
2244
2245 REQUIRE_REG_31(ra);
2246 REQUIRE_FEN;
2247 gen_cvttq(ctx, rb, rc, fn11);
2248 break;
2249 case 0x3C:
2250
2251 REQUIRE_REG_31(ra);
2252 REQUIRE_FEN;
2253 gen_cvtqs(ctx, rb, rc, fn11);
2254 break;
2255 case 0x3E:
2256
2257 REQUIRE_REG_31(ra);
2258 REQUIRE_FEN;
2259 gen_cvtqt(ctx, rb, rc, fn11);
2260 break;
2261 default:
2262 goto invalid_opc;
2263 }
2264 break;
2265
2266 case 0x17:
2267 switch (fn11) {
2268 case 0x010:
2269
2270 REQUIRE_REG_31(ra);
2271 REQUIRE_FEN;
2272 vc = dest_fpr(ctx, rc);
2273 vb = load_fpr(ctx, rb);
2274 gen_cvtlq(vc, vb);
2275 break;
2276 case 0x020:
2277
2278 REQUIRE_FEN;
2279 if (rc == 31) {
2280
2281 } else {
2282 vc = dest_fpr(ctx, rc);
2283 va = load_fpr(ctx, ra);
2284 if (ra == rb) {
2285
2286 tcg_gen_mov_i64(vc, va);
2287 } else {
2288 vb = load_fpr(ctx, rb);
2289 gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2290 }
2291 }
2292 break;
2293 case 0x021:
2294
2295 REQUIRE_FEN;
2296 vc = dest_fpr(ctx, rc);
2297 vb = load_fpr(ctx, rb);
2298 va = load_fpr(ctx, ra);
2299 gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2300 break;
2301 case 0x022:
2302
2303 REQUIRE_FEN;
2304 vc = dest_fpr(ctx, rc);
2305 vb = load_fpr(ctx, rb);
2306 va = load_fpr(ctx, ra);
2307 gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2308 break;
2309 case 0x024:
2310
2311 REQUIRE_FEN;
2312 va = load_fpr(ctx, ra);
2313 gen_helper_store_fpcr(cpu_env, va);
2314 if (ctx->tb_rm == QUAL_RM_D) {
2315
2316
2317 ctx->tb_rm = -1;
2318 }
2319 break;
2320 case 0x025:
2321
2322 REQUIRE_FEN;
2323 va = dest_fpr(ctx, ra);
2324 gen_helper_load_fpcr(va, cpu_env);
2325 break;
2326 case 0x02A:
2327
2328 REQUIRE_FEN;
2329 gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2330 break;
2331 case 0x02B:
2332
2333 REQUIRE_FEN;
2334 gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2335 break;
2336 case 0x02C:
2337
2338 REQUIRE_FEN;
2339 gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2340 break;
2341 case 0x02D:
2342
2343 REQUIRE_FEN;
2344 gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2345 break;
2346 case 0x02E:
2347
2348 REQUIRE_FEN;
2349 gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2350 break;
2351 case 0x02F:
2352
2353 REQUIRE_FEN;
2354 gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2355 break;
2356 case 0x030:
2357 case 0x130:
2358 case 0x530:
2359 REQUIRE_REG_31(ra);
2360 REQUIRE_FEN;
2361 vc = dest_fpr(ctx, rc);
2362 vb = load_fpr(ctx, rb);
2363 gen_helper_cvtql(vc, cpu_env, vb);
2364 gen_fp_exc_raise(rc, fn11);
2365 break;
2366 default:
2367 goto invalid_opc;
2368 }
2369 break;
2370
2371 case 0x18:
2372 switch ((uint16_t)disp16) {
2373 case 0x0000:
2374
2375
2376 break;
2377 case 0x0400:
2378
2379
2380 break;
2381 case 0x4000:
2382
2383 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2384 break;
2385 case 0x4400:
2386
2387 tcg_gen_mb(TCG_MO_ST_ST | TCG_BAR_SC);
2388 break;
2389 case 0x8000:
2390
2391
2392 break;
2393 case 0xA000:
2394
2395
2396 break;
2397 case 0xC000:
2398
2399 va = dest_gpr(ctx, ra);
2400 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2401 gen_io_start();
2402 gen_helper_load_pcc(va, cpu_env);
2403 ret = DISAS_PC_STALE;
2404 } else {
2405 gen_helper_load_pcc(va, cpu_env);
2406 }
2407 break;
2408 case 0xE000:
2409
2410 gen_rx(ctx, ra, 0);
2411 break;
2412 case 0xE800:
2413
2414 break;
2415 case 0xF000:
2416
2417 gen_rx(ctx, ra, 1);
2418 break;
2419 case 0xF800:
2420
2421
2422 break;
2423 case 0xFC00:
2424
2425
2426 break;
2427 default:
2428 goto invalid_opc;
2429 }
2430 break;
2431
2432 case 0x19:
2433
2434#ifndef CONFIG_USER_ONLY
2435 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2436 va = dest_gpr(ctx, ra);
2437 ret = gen_mfpr(ctx, va, insn & 0xffff);
2438 break;
2439#else
2440 goto invalid_opc;
2441#endif
2442
2443 case 0x1A:
2444
2445
2446 vb = load_gpr(ctx, rb);
2447 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2448 if (ra != 31) {
2449 tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
2450 }
2451 ret = DISAS_PC_UPDATED;
2452 break;
2453
2454 case 0x1B:
2455
2456#ifndef CONFIG_USER_ONLY
2457 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2458 {
2459 TCGv addr = tcg_temp_new();
2460 vb = load_gpr(ctx, rb);
2461 va = dest_gpr(ctx, ra);
2462
2463 tcg_gen_addi_i64(addr, vb, disp12);
2464 switch ((insn >> 12) & 0xF) {
2465 case 0x0:
2466
2467 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2468 break;
2469 case 0x1:
2470
2471 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ);
2472 break;
2473 case 0x2:
2474
2475 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2476 tcg_gen_mov_i64(cpu_lock_addr, addr);
2477 tcg_gen_mov_i64(cpu_lock_value, va);
2478 break;
2479 case 0x3:
2480
2481 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ);
2482 tcg_gen_mov_i64(cpu_lock_addr, addr);
2483 tcg_gen_mov_i64(cpu_lock_value, va);
2484 break;
2485 case 0x4:
2486
2487 goto invalid_opc;
2488 case 0x5:
2489
2490 goto invalid_opc;
2491 break;
2492 case 0x6:
2493
2494 goto invalid_opc;
2495 case 0x7:
2496
2497 goto invalid_opc;
2498 case 0x8:
2499
2500 goto invalid_opc;
2501 case 0x9:
2502
2503 goto invalid_opc;
2504 case 0xA:
2505
2506 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2507 break;
2508 case 0xB:
2509
2510 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEUQ);
2511 break;
2512 case 0xC:
2513
2514 goto invalid_opc;
2515 case 0xD:
2516
2517 goto invalid_opc;
2518 case 0xE:
2519
2520
2521 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2522 break;
2523 case 0xF:
2524
2525
2526 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEUQ);
2527 break;
2528 }
2529 tcg_temp_free(addr);
2530 break;
2531 }
2532#else
2533 goto invalid_opc;
2534#endif
2535
2536 case 0x1C:
2537 vc = dest_gpr(ctx, rc);
2538 if (fn7 == 0x70) {
2539
2540 REQUIRE_AMASK(FIX);
2541 REQUIRE_REG_31(rb);
2542 va = load_fpr(ctx, ra);
2543 tcg_gen_mov_i64(vc, va);
2544 break;
2545 } else if (fn7 == 0x78) {
2546
2547 REQUIRE_AMASK(FIX);
2548 REQUIRE_REG_31(rb);
2549 t32 = tcg_temp_new_i32();
2550 va = load_fpr(ctx, ra);
2551 gen_helper_s_to_memory(t32, va);
2552 tcg_gen_ext_i32_i64(vc, t32);
2553 tcg_temp_free_i32(t32);
2554 break;
2555 }
2556
2557 vb = load_gpr_lit(ctx, rb, lit, islit);
2558 switch (fn7) {
2559 case 0x00:
2560
2561 REQUIRE_AMASK(BWX);
2562 REQUIRE_REG_31(ra);
2563 tcg_gen_ext8s_i64(vc, vb);
2564 break;
2565 case 0x01:
2566
2567 REQUIRE_AMASK(BWX);
2568 REQUIRE_REG_31(ra);
2569 tcg_gen_ext16s_i64(vc, vb);
2570 break;
2571 case 0x30:
2572
2573 REQUIRE_AMASK(CIX);
2574 REQUIRE_REG_31(ra);
2575 REQUIRE_NO_LIT;
2576 tcg_gen_ctpop_i64(vc, vb);
2577 break;
2578 case 0x31:
2579
2580 REQUIRE_AMASK(MVI);
2581 REQUIRE_NO_LIT;
2582 va = load_gpr(ctx, ra);
2583 gen_helper_perr(vc, va, vb);
2584 break;
2585 case 0x32:
2586
2587 REQUIRE_AMASK(CIX);
2588 REQUIRE_REG_31(ra);
2589 REQUIRE_NO_LIT;
2590 tcg_gen_clzi_i64(vc, vb, 64);
2591 break;
2592 case 0x33:
2593
2594 REQUIRE_AMASK(CIX);
2595 REQUIRE_REG_31(ra);
2596 REQUIRE_NO_LIT;
2597 tcg_gen_ctzi_i64(vc, vb, 64);
2598 break;
2599 case 0x34:
2600
2601 REQUIRE_AMASK(MVI);
2602 REQUIRE_REG_31(ra);
2603 REQUIRE_NO_LIT;
2604 gen_helper_unpkbw(vc, vb);
2605 break;
2606 case 0x35:
2607
2608 REQUIRE_AMASK(MVI);
2609 REQUIRE_REG_31(ra);
2610 REQUIRE_NO_LIT;
2611 gen_helper_unpkbl(vc, vb);
2612 break;
2613 case 0x36:
2614
2615 REQUIRE_AMASK(MVI);
2616 REQUIRE_REG_31(ra);
2617 REQUIRE_NO_LIT;
2618 gen_helper_pkwb(vc, vb);
2619 break;
2620 case 0x37:
2621
2622 REQUIRE_AMASK(MVI);
2623 REQUIRE_REG_31(ra);
2624 REQUIRE_NO_LIT;
2625 gen_helper_pklb(vc, vb);
2626 break;
2627 case 0x38:
2628
2629 REQUIRE_AMASK(MVI);
2630 va = load_gpr(ctx, ra);
2631 gen_helper_minsb8(vc, va, vb);
2632 break;
2633 case 0x39:
2634
2635 REQUIRE_AMASK(MVI);
2636 va = load_gpr(ctx, ra);
2637 gen_helper_minsw4(vc, va, vb);
2638 break;
2639 case 0x3A:
2640
2641 REQUIRE_AMASK(MVI);
2642 va = load_gpr(ctx, ra);
2643 gen_helper_minub8(vc, va, vb);
2644 break;
2645 case 0x3B:
2646
2647 REQUIRE_AMASK(MVI);
2648 va = load_gpr(ctx, ra);
2649 gen_helper_minuw4(vc, va, vb);
2650 break;
2651 case 0x3C:
2652
2653 REQUIRE_AMASK(MVI);
2654 va = load_gpr(ctx, ra);
2655 gen_helper_maxub8(vc, va, vb);
2656 break;
2657 case 0x3D:
2658
2659 REQUIRE_AMASK(MVI);
2660 va = load_gpr(ctx, ra);
2661 gen_helper_maxuw4(vc, va, vb);
2662 break;
2663 case 0x3E:
2664
2665 REQUIRE_AMASK(MVI);
2666 va = load_gpr(ctx, ra);
2667 gen_helper_maxsb8(vc, va, vb);
2668 break;
2669 case 0x3F:
2670
2671 REQUIRE_AMASK(MVI);
2672 va = load_gpr(ctx, ra);
2673 gen_helper_maxsw4(vc, va, vb);
2674 break;
2675 default:
2676 goto invalid_opc;
2677 }
2678 break;
2679
2680 case 0x1D:
2681
2682#ifndef CONFIG_USER_ONLY
2683 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2684 vb = load_gpr(ctx, rb);
2685 ret = gen_mtpr(ctx, vb, insn & 0xffff);
2686 break;
2687#else
2688 goto invalid_opc;
2689#endif
2690
2691 case 0x1E:
2692
2693#ifndef CONFIG_USER_ONLY
2694 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2695 if (rb == 31) {
2696
2697
2698
2699 vb = dest_sink(ctx);
2700 tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2701 } else {
2702 vb = load_gpr(ctx, rb);
2703 }
2704 tcg_gen_movi_i64(cpu_lock_addr, -1);
2705 st_flag_byte(load_zero(ctx), ENV_FLAG_RX_SHIFT);
2706 tmp = tcg_temp_new();
2707 tcg_gen_andi_i64(tmp, vb, 1);
2708 st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
2709 tcg_temp_free(tmp);
2710 tcg_gen_andi_i64(cpu_pc, vb, ~3);
2711
2712 ret = DISAS_PC_UPDATED_NOCHAIN;
2713 break;
2714#else
2715 goto invalid_opc;
2716#endif
2717
2718 case 0x1F:
2719
2720#ifndef CONFIG_USER_ONLY
2721 REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2722 {
2723 switch ((insn >> 12) & 0xF) {
2724 case 0x0:
2725
2726 va = load_gpr(ctx, ra);
2727 vb = load_gpr(ctx, rb);
2728 tmp = tcg_temp_new();
2729 tcg_gen_addi_i64(tmp, vb, disp12);
2730 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
2731 tcg_temp_free(tmp);
2732 break;
2733 case 0x1:
2734
2735 va = load_gpr(ctx, ra);
2736 vb = load_gpr(ctx, rb);
2737 tmp = tcg_temp_new();
2738 tcg_gen_addi_i64(tmp, vb, disp12);
2739 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEUQ);
2740 tcg_temp_free(tmp);
2741 break;
2742 case 0x2:
2743
2744 ret = gen_store_conditional(ctx, ra, rb, disp12,
2745 MMU_PHYS_IDX, MO_LESL);
2746 break;
2747 case 0x3:
2748
2749 ret = gen_store_conditional(ctx, ra, rb, disp12,
2750 MMU_PHYS_IDX, MO_LEUQ);
2751 break;
2752 case 0x4:
2753
2754 goto invalid_opc;
2755 case 0x5:
2756
2757 goto invalid_opc;
2758 case 0x6:
2759
2760 goto invalid_opc;
2761 case 0x7:
2762
2763 goto invalid_opc;
2764 case 0x8:
2765
2766 goto invalid_opc;
2767 case 0x9:
2768
2769 goto invalid_opc;
2770 case 0xA:
2771
2772 goto invalid_opc;
2773 case 0xB:
2774
2775 goto invalid_opc;
2776 case 0xC:
2777
2778 goto invalid_opc;
2779 case 0xD:
2780
2781 goto invalid_opc;
2782 case 0xE:
2783
2784 goto invalid_opc;
2785 case 0xF:
2786
2787 goto invalid_opc;
2788 }
2789 break;
2790 }
2791#else
2792 goto invalid_opc;
2793#endif
2794 case 0x20:
2795
2796 REQUIRE_FEN;
2797 gen_load_fp(ctx, ra, rb, disp16, gen_ldf);
2798 break;
2799 case 0x21:
2800
2801 REQUIRE_FEN;
2802 gen_load_fp(ctx, ra, rb, disp16, gen_ldg);
2803 break;
2804 case 0x22:
2805
2806 REQUIRE_FEN;
2807 gen_load_fp(ctx, ra, rb, disp16, gen_lds);
2808 break;
2809 case 0x23:
2810
2811 REQUIRE_FEN;
2812 gen_load_fp(ctx, ra, rb, disp16, gen_ldt);
2813 break;
2814 case 0x24:
2815
2816 REQUIRE_FEN;
2817 gen_store_fp(ctx, ra, rb, disp16, gen_stf);
2818 break;
2819 case 0x25:
2820
2821 REQUIRE_FEN;
2822 gen_store_fp(ctx, ra, rb, disp16, gen_stg);
2823 break;
2824 case 0x26:
2825
2826 REQUIRE_FEN;
2827 gen_store_fp(ctx, ra, rb, disp16, gen_sts);
2828 break;
2829 case 0x27:
2830
2831 REQUIRE_FEN;
2832 gen_store_fp(ctx, ra, rb, disp16, gen_stt);
2833 break;
2834 case 0x28:
2835
2836 gen_load_int(ctx, ra, rb, disp16, MO_LESL, 0, 0);
2837 break;
2838 case 0x29:
2839
2840 gen_load_int(ctx, ra, rb, disp16, MO_LEUQ, 0, 0);
2841 break;
2842 case 0x2A:
2843
2844 gen_load_int(ctx, ra, rb, disp16, MO_LESL, 0, 1);
2845 break;
2846 case 0x2B:
2847
2848 gen_load_int(ctx, ra, rb, disp16, MO_LEUQ, 0, 1);
2849 break;
2850 case 0x2C:
2851
2852 gen_store_int(ctx, ra, rb, disp16, MO_LEUL, 0);
2853 break;
2854 case 0x2D:
2855
2856 gen_store_int(ctx, ra, rb, disp16, MO_LEUQ, 0);
2857 break;
2858 case 0x2E:
2859
2860 ret = gen_store_conditional(ctx, ra, rb, disp16,
2861 ctx->mem_idx, MO_LESL);
2862 break;
2863 case 0x2F:
2864
2865 ret = gen_store_conditional(ctx, ra, rb, disp16,
2866 ctx->mem_idx, MO_LEUQ);
2867 break;
2868 case 0x30:
2869
2870 ret = gen_bdirect(ctx, ra, disp21);
2871 break;
2872 case 0x31:
2873 REQUIRE_FEN;
2874 ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2875 break;
2876 case 0x32:
2877 REQUIRE_FEN;
2878 ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2879 break;
2880 case 0x33:
2881 REQUIRE_FEN;
2882 ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2883 break;
2884 case 0x34:
2885
2886 ret = gen_bdirect(ctx, ra, disp21);
2887 break;
2888 case 0x35:
2889 REQUIRE_FEN;
2890 ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2891 break;
2892 case 0x36:
2893 REQUIRE_FEN;
2894 ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2895 break;
2896 case 0x37:
2897 REQUIRE_FEN;
2898 ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2899 break;
2900 case 0x38:
2901
2902 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2903 break;
2904 case 0x39:
2905
2906 ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2907 break;
2908 case 0x3A:
2909
2910 ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2911 break;
2912 case 0x3B:
2913
2914 ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2915 break;
2916 case 0x3C:
2917
2918 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2919 break;
2920 case 0x3D:
2921
2922 ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2923 break;
2924 case 0x3E:
2925
2926 ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2927 break;
2928 case 0x3F:
2929
2930 ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2931 break;
2932 invalid_opc:
2933 ret = gen_invalid(ctx);
2934 break;
2935 raise_fen:
2936 ret = gen_excp(ctx, EXCP_FEN, 0);
2937 break;
2938 }
2939
2940 return ret;
2941}
2942
2943static void alpha_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
2944{
2945 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2946 CPUAlphaState *env = cpu->env_ptr;
2947 int64_t bound;
2948
2949 ctx->tbflags = ctx->base.tb->flags;
2950 ctx->mem_idx = cpu_mmu_index(env, false);
2951 ctx->implver = env->implver;
2952 ctx->amask = env->amask;
2953
2954#ifdef CONFIG_USER_ONLY
2955 ctx->ir = cpu_std_ir;
2956 ctx->unalign = (ctx->tbflags & TB_FLAG_UNALIGN ? MO_UNALN : MO_ALIGN);
2957#else
2958 ctx->palbr = env->palbr;
2959 ctx->ir = (ctx->tbflags & ENV_FLAG_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2960#endif
2961
2962
2963
2964
2965
2966
2967
2968 ctx->tb_rm = -1;
2969
2970 ctx->tb_ftz = -1;
2971
2972 ctx->zero = NULL;
2973 ctx->sink = NULL;
2974
2975
2976 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
2977 ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
2978}
2979
2980static void alpha_tr_tb_start(DisasContextBase *db, CPUState *cpu)
2981{
2982}
2983
2984static void alpha_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
2985{
2986 tcg_gen_insn_start(dcbase->pc_next);
2987}
2988
2989static void alpha_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
2990{
2991 DisasContext *ctx = container_of(dcbase, DisasContext, base);
2992 CPUAlphaState *env = cpu->env_ptr;
2993 uint32_t insn = translator_ldl(env, &ctx->base, ctx->base.pc_next);
2994
2995 ctx->base.pc_next += 4;
2996 ctx->base.is_jmp = translate_one(ctx, insn);
2997
2998 free_context_temps(ctx);
2999 translator_loop_temp_check(&ctx->base);
3000}
3001
3002static void alpha_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
3003{
3004 DisasContext *ctx = container_of(dcbase, DisasContext, base);
3005
3006 switch (ctx->base.is_jmp) {
3007 case DISAS_NORETURN:
3008 break;
3009 case DISAS_TOO_MANY:
3010 if (use_goto_tb(ctx, ctx->base.pc_next)) {
3011 tcg_gen_goto_tb(0);
3012 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3013 tcg_gen_exit_tb(ctx->base.tb, 0);
3014 }
3015
3016 case DISAS_PC_STALE:
3017 tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3018
3019 case DISAS_PC_UPDATED:
3020 tcg_gen_lookup_and_goto_ptr();
3021 break;
3022 case DISAS_PC_UPDATED_NOCHAIN:
3023 tcg_gen_exit_tb(NULL, 0);
3024 break;
3025 default:
3026 g_assert_not_reached();
3027 }
3028}
3029
3030static void alpha_tr_disas_log(const DisasContextBase *dcbase,
3031 CPUState *cpu, FILE *logfile)
3032{
3033 fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
3034 target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
3035}
3036
3037static const TranslatorOps alpha_tr_ops = {
3038 .init_disas_context = alpha_tr_init_disas_context,
3039 .tb_start = alpha_tr_tb_start,
3040 .insn_start = alpha_tr_insn_start,
3041 .translate_insn = alpha_tr_translate_insn,
3042 .tb_stop = alpha_tr_tb_stop,
3043 .disas_log = alpha_tr_disas_log,
3044};
3045
3046void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns,
3047 target_ulong pc, void *host_pc)
3048{
3049 DisasContext dc;
3050 translator_loop(cpu, tb, max_insns, pc, host_pc, &alpha_tr_ops, &dc.base);
3051}
3052