1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#define DEBUG_DISAS
21
22#include "qemu/osdep.h"
23#include "cpu.h"
24#include "disas/disas.h"
25#include "exec/exec-all.h"
26#include "tcg-op.h"
27#include "exec/cpu_ldst.h"
28
29#include "exec/helper-proto.h"
30#include "exec/helper-gen.h"
31
32#include "trace-tcg.h"
33#include "exec/log.h"
34
35
36typedef struct DisasContext {
37 struct TranslationBlock *tb;
38 target_ulong pc;
39 uint16_t opcode;
40 uint32_t tbflags;
41 uint32_t envflags;
42 int bstate;
43 int memidx;
44 int gbank;
45 int fbank;
46 uint32_t delayed_pc;
47 int singlestep_enabled;
48 uint32_t features;
49 int has_movcal;
50} DisasContext;
51
52#if defined(CONFIG_USER_ONLY)
53#define IS_USER(ctx) 1
54#else
55#define IS_USER(ctx) (!(ctx->tbflags & (1u << SR_MD)))
56#endif
57
58enum {
59 BS_NONE = 0,
60
61
62 BS_STOP = 1,
63 BS_BRANCH = 2,
64 BS_EXCP = 3,
65};
66
67
68static TCGv_env cpu_env;
69static TCGv cpu_gregs[32];
70static TCGv cpu_sr, cpu_sr_m, cpu_sr_q, cpu_sr_t;
71static TCGv cpu_pc, cpu_ssr, cpu_spc, cpu_gbr;
72static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
73static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
74static TCGv cpu_fregs[32];
75
76
77static TCGv cpu_flags, cpu_delayed_pc, cpu_delayed_cond;
78
79#include "exec/gen-icount.h"
80
81void sh4_translate_init(void)
82{
83 int i;
84 static int done_init = 0;
85 static const char * const gregnames[24] = {
86 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
87 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
88 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
89 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
90 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
91 };
92 static const char * const fregnames[32] = {
93 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
94 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
95 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
96 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
97 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
98 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
99 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
100 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
101 };
102
103 if (done_init) {
104 return;
105 }
106
107 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
108 tcg_ctx.tcg_env = cpu_env;
109
110 for (i = 0; i < 24; i++) {
111 cpu_gregs[i] = tcg_global_mem_new_i32(cpu_env,
112 offsetof(CPUSH4State, gregs[i]),
113 gregnames[i]);
114 }
115 memcpy(cpu_gregs + 24, cpu_gregs + 8, 8 * sizeof(TCGv));
116
117 cpu_pc = tcg_global_mem_new_i32(cpu_env,
118 offsetof(CPUSH4State, pc), "PC");
119 cpu_sr = tcg_global_mem_new_i32(cpu_env,
120 offsetof(CPUSH4State, sr), "SR");
121 cpu_sr_m = tcg_global_mem_new_i32(cpu_env,
122 offsetof(CPUSH4State, sr_m), "SR_M");
123 cpu_sr_q = tcg_global_mem_new_i32(cpu_env,
124 offsetof(CPUSH4State, sr_q), "SR_Q");
125 cpu_sr_t = tcg_global_mem_new_i32(cpu_env,
126 offsetof(CPUSH4State, sr_t), "SR_T");
127 cpu_ssr = tcg_global_mem_new_i32(cpu_env,
128 offsetof(CPUSH4State, ssr), "SSR");
129 cpu_spc = tcg_global_mem_new_i32(cpu_env,
130 offsetof(CPUSH4State, spc), "SPC");
131 cpu_gbr = tcg_global_mem_new_i32(cpu_env,
132 offsetof(CPUSH4State, gbr), "GBR");
133 cpu_vbr = tcg_global_mem_new_i32(cpu_env,
134 offsetof(CPUSH4State, vbr), "VBR");
135 cpu_sgr = tcg_global_mem_new_i32(cpu_env,
136 offsetof(CPUSH4State, sgr), "SGR");
137 cpu_dbr = tcg_global_mem_new_i32(cpu_env,
138 offsetof(CPUSH4State, dbr), "DBR");
139 cpu_mach = tcg_global_mem_new_i32(cpu_env,
140 offsetof(CPUSH4State, mach), "MACH");
141 cpu_macl = tcg_global_mem_new_i32(cpu_env,
142 offsetof(CPUSH4State, macl), "MACL");
143 cpu_pr = tcg_global_mem_new_i32(cpu_env,
144 offsetof(CPUSH4State, pr), "PR");
145 cpu_fpscr = tcg_global_mem_new_i32(cpu_env,
146 offsetof(CPUSH4State, fpscr), "FPSCR");
147 cpu_fpul = tcg_global_mem_new_i32(cpu_env,
148 offsetof(CPUSH4State, fpul), "FPUL");
149
150 cpu_flags = tcg_global_mem_new_i32(cpu_env,
151 offsetof(CPUSH4State, flags), "_flags_");
152 cpu_delayed_pc = tcg_global_mem_new_i32(cpu_env,
153 offsetof(CPUSH4State, delayed_pc),
154 "_delayed_pc_");
155 cpu_delayed_cond = tcg_global_mem_new_i32(cpu_env,
156 offsetof(CPUSH4State,
157 delayed_cond),
158 "_delayed_cond_");
159 cpu_ldst = tcg_global_mem_new_i32(cpu_env,
160 offsetof(CPUSH4State, ldst), "_ldst_");
161
162 for (i = 0; i < 32; i++)
163 cpu_fregs[i] = tcg_global_mem_new_i32(cpu_env,
164 offsetof(CPUSH4State, fregs[i]),
165 fregnames[i]);
166
167 done_init = 1;
168}
169
170void superh_cpu_dump_state(CPUState *cs, FILE *f,
171 fprintf_function cpu_fprintf, int flags)
172{
173 SuperHCPU *cpu = SUPERH_CPU(cs);
174 CPUSH4State *env = &cpu->env;
175 int i;
176 cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
177 env->pc, cpu_read_sr(env), env->pr, env->fpscr);
178 cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
179 env->spc, env->ssr, env->gbr, env->vbr);
180 cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
181 env->sgr, env->dbr, env->delayed_pc, env->fpul);
182 for (i = 0; i < 24; i += 4) {
183 cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
184 i, env->gregs[i], i + 1, env->gregs[i + 1],
185 i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
186 }
187 if (env->flags & DELAY_SLOT) {
188 cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
189 env->delayed_pc);
190 } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
191 cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
192 env->delayed_pc);
193 } else if (env->flags & DELAY_SLOT_RTE) {
194 cpu_fprintf(f, "in rte delay slot (delayed_pc=0x%08x)\n",
195 env->delayed_pc);
196 }
197}
198
199static void gen_read_sr(TCGv dst)
200{
201 TCGv t0 = tcg_temp_new();
202 tcg_gen_shli_i32(t0, cpu_sr_q, SR_Q);
203 tcg_gen_or_i32(dst, dst, t0);
204 tcg_gen_shli_i32(t0, cpu_sr_m, SR_M);
205 tcg_gen_or_i32(dst, dst, t0);
206 tcg_gen_shli_i32(t0, cpu_sr_t, SR_T);
207 tcg_gen_or_i32(dst, cpu_sr, t0);
208 tcg_temp_free_i32(t0);
209}
210
211static void gen_write_sr(TCGv src)
212{
213 tcg_gen_andi_i32(cpu_sr, src,
214 ~((1u << SR_Q) | (1u << SR_M) | (1u << SR_T)));
215 tcg_gen_extract_i32(cpu_sr_q, src, SR_Q, 1);
216 tcg_gen_extract_i32(cpu_sr_m, src, SR_M, 1);
217 tcg_gen_extract_i32(cpu_sr_t, src, SR_T, 1);
218}
219
220static inline void gen_save_cpu_state(DisasContext *ctx, bool save_pc)
221{
222 if (save_pc) {
223 tcg_gen_movi_i32(cpu_pc, ctx->pc);
224 }
225 if (ctx->delayed_pc != (uint32_t) -1) {
226 tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
227 }
228 if ((ctx->tbflags & TB_FLAG_ENVFLAGS_MASK) != ctx->envflags) {
229 tcg_gen_movi_i32(cpu_flags, ctx->envflags);
230 }
231}
232
233static inline bool use_exit_tb(DisasContext *ctx)
234{
235 return (ctx->tbflags & GUSA_EXCLUSIVE) != 0;
236}
237
238static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
239{
240
241 if (unlikely(ctx->singlestep_enabled || use_exit_tb(ctx))) {
242 return false;
243 }
244#ifndef CONFIG_USER_ONLY
245 return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
246#else
247 return true;
248#endif
249}
250
251static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
252{
253 if (use_goto_tb(ctx, dest)) {
254 tcg_gen_goto_tb(n);
255 tcg_gen_movi_i32(cpu_pc, dest);
256 tcg_gen_exit_tb((uintptr_t)ctx->tb + n);
257 } else {
258 tcg_gen_movi_i32(cpu_pc, dest);
259 if (ctx->singlestep_enabled) {
260 gen_helper_debug(cpu_env);
261 } else if (use_exit_tb(ctx)) {
262 tcg_gen_exit_tb(0);
263 } else {
264 tcg_gen_lookup_and_goto_ptr(cpu_pc);
265 }
266 }
267}
268
269static void gen_jump(DisasContext * ctx)
270{
271 if (ctx->delayed_pc == -1) {
272
273
274 tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
275 tcg_gen_discard_i32(cpu_delayed_pc);
276 if (ctx->singlestep_enabled) {
277 gen_helper_debug(cpu_env);
278 } else if (use_exit_tb(ctx)) {
279 tcg_gen_exit_tb(0);
280 } else {
281 tcg_gen_lookup_and_goto_ptr(cpu_pc);
282 }
283 } else {
284 gen_goto_tb(ctx, 0, ctx->delayed_pc);
285 }
286}
287
288
289static void gen_conditional_jump(DisasContext *ctx, target_ulong dest,
290 bool jump_if_true)
291{
292 TCGLabel *l1 = gen_new_label();
293 TCGCond cond_not_taken = jump_if_true ? TCG_COND_EQ : TCG_COND_NE;
294
295 if (ctx->tbflags & GUSA_EXCLUSIVE) {
296
297
298
299 tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1);
300 tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK);
301
302
303 gen_goto_tb(ctx, 0, dest);
304 gen_set_label(l1);
305 return;
306 }
307
308 gen_save_cpu_state(ctx, false);
309 tcg_gen_brcondi_i32(cond_not_taken, cpu_sr_t, 0, l1);
310 gen_goto_tb(ctx, 0, dest);
311 gen_set_label(l1);
312 gen_goto_tb(ctx, 1, ctx->pc + 2);
313 ctx->bstate = BS_BRANCH;
314}
315
316
317static void gen_delayed_conditional_jump(DisasContext * ctx)
318{
319 TCGLabel *l1 = gen_new_label();
320 TCGv ds = tcg_temp_new();
321
322 tcg_gen_mov_i32(ds, cpu_delayed_cond);
323 tcg_gen_discard_i32(cpu_delayed_cond);
324
325 if (ctx->tbflags & GUSA_EXCLUSIVE) {
326
327
328
329 tcg_gen_brcondi_i32(TCG_COND_EQ, ds, 0, l1);
330
331
332 tcg_gen_movi_i32(cpu_flags, ctx->envflags & ~GUSA_MASK);
333 gen_jump(ctx);
334
335 gen_set_label(l1);
336 return;
337 }
338
339 tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1);
340 gen_goto_tb(ctx, 1, ctx->pc + 2);
341 gen_set_label(l1);
342 gen_jump(ctx);
343}
344
345static inline void gen_load_fpr64(DisasContext *ctx, TCGv_i64 t, int reg)
346{
347
348 tcg_debug_assert((reg & 1) == 0);
349 reg ^= ctx->fbank;
350 tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
351}
352
353static inline void gen_store_fpr64(DisasContext *ctx, TCGv_i64 t, int reg)
354{
355
356 tcg_debug_assert((reg & 1) == 0);
357 reg ^= ctx->fbank;
358 tcg_gen_extr_i64_i32(cpu_fregs[reg + 1], cpu_fregs[reg], t);
359}
360
361#define B3_0 (ctx->opcode & 0xf)
362#define B6_4 ((ctx->opcode >> 4) & 0x7)
363#define B7_4 ((ctx->opcode >> 4) & 0xf)
364#define B7_0 (ctx->opcode & 0xff)
365#define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
366#define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
367 (ctx->opcode & 0xfff))
368#define B11_8 ((ctx->opcode >> 8) & 0xf)
369#define B15_12 ((ctx->opcode >> 12) & 0xf)
370
371#define REG(x) cpu_gregs[(x) ^ ctx->gbank]
372#define ALTREG(x) cpu_gregs[(x) ^ ctx->gbank ^ 0x10]
373#define FREG(x) cpu_fregs[(x) ^ ctx->fbank]
374
375#define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
376
377#define CHECK_NOT_DELAY_SLOT \
378 if (ctx->envflags & DELAY_SLOT_MASK) { \
379 goto do_illegal_slot; \
380 }
381
382#define CHECK_PRIVILEGED \
383 if (IS_USER(ctx)) { \
384 goto do_illegal; \
385 }
386
387#define CHECK_FPU_ENABLED \
388 if (ctx->tbflags & (1u << SR_FD)) { \
389 goto do_fpu_disabled; \
390 }
391
392#define CHECK_FPSCR_PR_0 \
393 if (ctx->tbflags & FPSCR_PR) { \
394 goto do_illegal; \
395 }
396
397#define CHECK_FPSCR_PR_1 \
398 if (!(ctx->tbflags & FPSCR_PR)) { \
399 goto do_illegal; \
400 }
401
402#define CHECK_SH4A \
403 if (!(ctx->features & SH_FEATURE_SH4A)) { \
404 goto do_illegal; \
405 }
406
407static void _decode_opc(DisasContext * ctx)
408{
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429 if (ctx->has_movcal)
430 {
431 int opcode = ctx->opcode & 0xf0ff;
432 if (opcode != 0x0093
433 && opcode != 0x00c3 )
434 {
435 gen_helper_discard_movcal_backup(cpu_env);
436 ctx->has_movcal = 0;
437 }
438 }
439
440#if 0
441 fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
442#endif
443
444 switch (ctx->opcode) {
445 case 0x0019:
446 tcg_gen_movi_i32(cpu_sr_m, 0);
447 tcg_gen_movi_i32(cpu_sr_q, 0);
448 tcg_gen_movi_i32(cpu_sr_t, 0);
449 return;
450 case 0x000b:
451 CHECK_NOT_DELAY_SLOT
452 tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
453 ctx->envflags |= DELAY_SLOT;
454 ctx->delayed_pc = (uint32_t) - 1;
455 return;
456 case 0x0028:
457 tcg_gen_movi_i32(cpu_mach, 0);
458 tcg_gen_movi_i32(cpu_macl, 0);
459 return;
460 case 0x0048:
461 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(1u << SR_S));
462 return;
463 case 0x0008:
464 tcg_gen_movi_i32(cpu_sr_t, 0);
465 return;
466 case 0x0038:
467 CHECK_PRIVILEGED
468 gen_helper_ldtlb(cpu_env);
469 return;
470 case 0x002b:
471 CHECK_PRIVILEGED
472 CHECK_NOT_DELAY_SLOT
473 gen_write_sr(cpu_ssr);
474 tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
475 ctx->envflags |= DELAY_SLOT_RTE;
476 ctx->delayed_pc = (uint32_t) - 1;
477 ctx->bstate = BS_STOP;
478 return;
479 case 0x0058:
480 tcg_gen_ori_i32(cpu_sr, cpu_sr, (1u << SR_S));
481 return;
482 case 0x0018:
483 tcg_gen_movi_i32(cpu_sr_t, 1);
484 return;
485 case 0xfbfd:
486 CHECK_FPSCR_PR_0
487 tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
488 ctx->bstate = BS_STOP;
489 return;
490 case 0xf3fd:
491 CHECK_FPSCR_PR_0
492 tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
493 ctx->bstate = BS_STOP;
494 return;
495 case 0xf7fd:
496 CHECK_SH4A
497 tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_PR);
498 ctx->bstate = BS_STOP;
499 return;
500 case 0x0009:
501 return;
502 case 0x001b:
503 CHECK_PRIVILEGED
504 tcg_gen_movi_i32(cpu_pc, ctx->pc + 2);
505 gen_helper_sleep(cpu_env);
506 return;
507 }
508
509 switch (ctx->opcode & 0xf000) {
510 case 0x1000:
511 {
512 TCGv addr = tcg_temp_new();
513 tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
514 tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL);
515 tcg_temp_free(addr);
516 }
517 return;
518 case 0x5000:
519 {
520 TCGv addr = tcg_temp_new();
521 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
522 tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL);
523 tcg_temp_free(addr);
524 }
525 return;
526 case 0xe000:
527#ifdef CONFIG_USER_ONLY
528
529
530
531 if (B11_8 == 15 && B7_0s < 0 && parallel_cpus) {
532 ctx->envflags = deposit32(ctx->envflags, GUSA_SHIFT, 8, B7_0s);
533 ctx->bstate = BS_STOP;
534 }
535#endif
536 tcg_gen_movi_i32(REG(B11_8), B7_0s);
537 return;
538 case 0x9000:
539 {
540 TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
541 tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW);
542 tcg_temp_free(addr);
543 }
544 return;
545 case 0xd000:
546 {
547 TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
548 tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL);
549 tcg_temp_free(addr);
550 }
551 return;
552 case 0x7000:
553 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
554 return;
555 case 0xa000:
556 CHECK_NOT_DELAY_SLOT
557 ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
558 ctx->envflags |= DELAY_SLOT;
559 return;
560 case 0xb000:
561 CHECK_NOT_DELAY_SLOT
562 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
563 ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
564 ctx->envflags |= DELAY_SLOT;
565 return;
566 }
567
568 switch (ctx->opcode & 0xf00f) {
569 case 0x6003:
570 tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
571 return;
572 case 0x2000:
573 tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_UB);
574 return;
575 case 0x2001:
576 tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUW);
577 return;
578 case 0x2002:
579 tcg_gen_qemu_st_i32(REG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL);
580 return;
581 case 0x6000:
582 tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB);
583 return;
584 case 0x6001:
585 tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW);
586 return;
587 case 0x6002:
588 tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL);
589 return;
590 case 0x2004:
591 {
592 TCGv addr = tcg_temp_new();
593 tcg_gen_subi_i32(addr, REG(B11_8), 1);
594
595 tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB);
596 tcg_gen_mov_i32(REG(B11_8), addr);
597 tcg_temp_free(addr);
598 }
599 return;
600 case 0x2005:
601 {
602 TCGv addr = tcg_temp_new();
603 tcg_gen_subi_i32(addr, REG(B11_8), 2);
604 tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW);
605 tcg_gen_mov_i32(REG(B11_8), addr);
606 tcg_temp_free(addr);
607 }
608 return;
609 case 0x2006:
610 {
611 TCGv addr = tcg_temp_new();
612 tcg_gen_subi_i32(addr, REG(B11_8), 4);
613 tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL);
614 tcg_gen_mov_i32(REG(B11_8), addr);
615 }
616 return;
617 case 0x6004:
618 tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_SB);
619 if ( B11_8 != B7_4 )
620 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
621 return;
622 case 0x6005:
623 tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESW);
624 if ( B11_8 != B7_4 )
625 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
626 return;
627 case 0x6006:
628 tcg_gen_qemu_ld_i32(REG(B11_8), REG(B7_4), ctx->memidx, MO_TESL);
629 if ( B11_8 != B7_4 )
630 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
631 return;
632 case 0x0004:
633 {
634 TCGv addr = tcg_temp_new();
635 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
636 tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_UB);
637 tcg_temp_free(addr);
638 }
639 return;
640 case 0x0005:
641 {
642 TCGv addr = tcg_temp_new();
643 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
644 tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUW);
645 tcg_temp_free(addr);
646 }
647 return;
648 case 0x0006:
649 {
650 TCGv addr = tcg_temp_new();
651 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
652 tcg_gen_qemu_st_i32(REG(B7_4), addr, ctx->memidx, MO_TEUL);
653 tcg_temp_free(addr);
654 }
655 return;
656 case 0x000c:
657 {
658 TCGv addr = tcg_temp_new();
659 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
660 tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_SB);
661 tcg_temp_free(addr);
662 }
663 return;
664 case 0x000d:
665 {
666 TCGv addr = tcg_temp_new();
667 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
668 tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESW);
669 tcg_temp_free(addr);
670 }
671 return;
672 case 0x000e:
673 {
674 TCGv addr = tcg_temp_new();
675 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
676 tcg_gen_qemu_ld_i32(REG(B11_8), addr, ctx->memidx, MO_TESL);
677 tcg_temp_free(addr);
678 }
679 return;
680 case 0x6008:
681 {
682 TCGv low = tcg_temp_new();;
683 tcg_gen_ext16u_i32(low, REG(B7_4));
684 tcg_gen_bswap16_i32(low, low);
685 tcg_gen_deposit_i32(REG(B11_8), REG(B7_4), low, 0, 16);
686 tcg_temp_free(low);
687 }
688 return;
689 case 0x6009:
690 tcg_gen_rotli_i32(REG(B11_8), REG(B7_4), 16);
691 return;
692 case 0x200d:
693 {
694 TCGv high, low;
695 high = tcg_temp_new();
696 tcg_gen_shli_i32(high, REG(B7_4), 16);
697 low = tcg_temp_new();
698 tcg_gen_shri_i32(low, REG(B11_8), 16);
699 tcg_gen_or_i32(REG(B11_8), high, low);
700 tcg_temp_free(low);
701 tcg_temp_free(high);
702 }
703 return;
704 case 0x300c:
705 tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
706 return;
707 case 0x300e:
708 {
709 TCGv t0, t1;
710 t0 = tcg_const_tl(0);
711 t1 = tcg_temp_new();
712 tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0);
713 tcg_gen_add2_i32(REG(B11_8), cpu_sr_t,
714 REG(B11_8), t0, t1, cpu_sr_t);
715 tcg_temp_free(t0);
716 tcg_temp_free(t1);
717 }
718 return;
719 case 0x300f:
720 {
721 TCGv t0, t1, t2;
722 t0 = tcg_temp_new();
723 tcg_gen_add_i32(t0, REG(B7_4), REG(B11_8));
724 t1 = tcg_temp_new();
725 tcg_gen_xor_i32(t1, t0, REG(B11_8));
726 t2 = tcg_temp_new();
727 tcg_gen_xor_i32(t2, REG(B7_4), REG(B11_8));
728 tcg_gen_andc_i32(cpu_sr_t, t1, t2);
729 tcg_temp_free(t2);
730 tcg_gen_shri_i32(cpu_sr_t, cpu_sr_t, 31);
731 tcg_temp_free(t1);
732 tcg_gen_mov_i32(REG(B7_4), t0);
733 tcg_temp_free(t0);
734 }
735 return;
736 case 0x2009:
737 tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
738 return;
739 case 0x3000:
740 tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), REG(B7_4));
741 return;
742 case 0x3003:
743 tcg_gen_setcond_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), REG(B7_4));
744 return;
745 case 0x3007:
746 tcg_gen_setcond_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), REG(B7_4));
747 return;
748 case 0x3006:
749 tcg_gen_setcond_i32(TCG_COND_GTU, cpu_sr_t, REG(B11_8), REG(B7_4));
750 return;
751 case 0x3002:
752 tcg_gen_setcond_i32(TCG_COND_GEU, cpu_sr_t, REG(B11_8), REG(B7_4));
753 return;
754 case 0x200c:
755 {
756 TCGv cmp1 = tcg_temp_new();
757 TCGv cmp2 = tcg_temp_new();
758 tcg_gen_xor_i32(cmp2, REG(B7_4), REG(B11_8));
759 tcg_gen_subi_i32(cmp1, cmp2, 0x01010101);
760 tcg_gen_andc_i32(cmp1, cmp1, cmp2);
761 tcg_gen_andi_i32(cmp1, cmp1, 0x80808080);
762 tcg_gen_setcondi_i32(TCG_COND_NE, cpu_sr_t, cmp1, 0);
763 tcg_temp_free(cmp2);
764 tcg_temp_free(cmp1);
765 }
766 return;
767 case 0x2007:
768 tcg_gen_shri_i32(cpu_sr_q, REG(B11_8), 31);
769 tcg_gen_shri_i32(cpu_sr_m, REG(B7_4), 31);
770 tcg_gen_xor_i32(cpu_sr_t, cpu_sr_q, cpu_sr_m);
771 return;
772 case 0x3004:
773 {
774 TCGv t0 = tcg_temp_new();
775 TCGv t1 = tcg_temp_new();
776 TCGv t2 = tcg_temp_new();
777 TCGv zero = tcg_const_i32(0);
778
779
780
781 tcg_gen_shri_i32(t0, REG(B11_8), 31);
782 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
783 tcg_gen_or_i32(REG(B11_8), REG(B11_8), cpu_sr_t);
784
785
786
787
788
789 tcg_gen_xor_i32(t1, cpu_sr_q, cpu_sr_m);
790 tcg_gen_subi_i32(t1, t1, 1);
791 tcg_gen_neg_i32(t2, REG(B7_4));
792 tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, zero, REG(B7_4), t2);
793 tcg_gen_add2_i32(REG(B11_8), t1, REG(B11_8), zero, t2, t1);
794
795
796 tcg_gen_andi_i32(t1, t1, 1);
797 tcg_gen_xor_i32(t1, t1, t0);
798 tcg_gen_xori_i32(cpu_sr_t, t1, 1);
799 tcg_gen_xor_i32(cpu_sr_q, cpu_sr_m, t1);
800
801 tcg_temp_free(zero);
802 tcg_temp_free(t2);
803 tcg_temp_free(t1);
804 tcg_temp_free(t0);
805 }
806 return;
807 case 0x300d:
808 tcg_gen_muls2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8));
809 return;
810 case 0x3005:
811 tcg_gen_mulu2_i32(cpu_macl, cpu_mach, REG(B7_4), REG(B11_8));
812 return;
813 case 0x600e:
814 tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
815 return;
816 case 0x600f:
817 tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
818 return;
819 case 0x600c:
820 tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
821 return;
822 case 0x600d:
823 tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
824 return;
825 case 0x000f:
826 {
827 TCGv arg0, arg1;
828 arg0 = tcg_temp_new();
829 tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL);
830 arg1 = tcg_temp_new();
831 tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL);
832 gen_helper_macl(cpu_env, arg0, arg1);
833 tcg_temp_free(arg1);
834 tcg_temp_free(arg0);
835 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
836 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
837 }
838 return;
839 case 0x400f:
840 {
841 TCGv arg0, arg1;
842 arg0 = tcg_temp_new();
843 tcg_gen_qemu_ld_i32(arg0, REG(B7_4), ctx->memidx, MO_TESL);
844 arg1 = tcg_temp_new();
845 tcg_gen_qemu_ld_i32(arg1, REG(B11_8), ctx->memidx, MO_TESL);
846 gen_helper_macw(cpu_env, arg0, arg1);
847 tcg_temp_free(arg1);
848 tcg_temp_free(arg0);
849 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
850 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
851 }
852 return;
853 case 0x0007:
854 tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
855 return;
856 case 0x200f:
857 {
858 TCGv arg0, arg1;
859 arg0 = tcg_temp_new();
860 tcg_gen_ext16s_i32(arg0, REG(B7_4));
861 arg1 = tcg_temp_new();
862 tcg_gen_ext16s_i32(arg1, REG(B11_8));
863 tcg_gen_mul_i32(cpu_macl, arg0, arg1);
864 tcg_temp_free(arg1);
865 tcg_temp_free(arg0);
866 }
867 return;
868 case 0x200e:
869 {
870 TCGv arg0, arg1;
871 arg0 = tcg_temp_new();
872 tcg_gen_ext16u_i32(arg0, REG(B7_4));
873 arg1 = tcg_temp_new();
874 tcg_gen_ext16u_i32(arg1, REG(B11_8));
875 tcg_gen_mul_i32(cpu_macl, arg0, arg1);
876 tcg_temp_free(arg1);
877 tcg_temp_free(arg0);
878 }
879 return;
880 case 0x600b:
881 tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
882 return;
883 case 0x600a:
884 {
885 TCGv t0 = tcg_const_i32(0);
886 tcg_gen_add2_i32(REG(B11_8), cpu_sr_t,
887 REG(B7_4), t0, cpu_sr_t, t0);
888 tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t,
889 t0, t0, REG(B11_8), cpu_sr_t);
890 tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1);
891 tcg_temp_free(t0);
892 }
893 return;
894 case 0x6007:
895 tcg_gen_not_i32(REG(B11_8), REG(B7_4));
896 return;
897 case 0x200b:
898 tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
899 return;
900 case 0x400c:
901 {
902 TCGv t0 = tcg_temp_new();
903 TCGv t1 = tcg_temp_new();
904 TCGv t2 = tcg_temp_new();
905
906 tcg_gen_andi_i32(t0, REG(B7_4), 0x1f);
907
908
909 tcg_gen_shl_i32(t1, REG(B11_8), t0);
910
911
912
913 tcg_gen_xori_i32(t0, t0, 0x1f);
914 tcg_gen_sar_i32(t2, REG(B11_8), t0);
915 tcg_gen_sari_i32(t2, t2, 1);
916
917
918 tcg_gen_movi_i32(t0, 0);
919 tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2);
920
921 tcg_temp_free(t0);
922 tcg_temp_free(t1);
923 tcg_temp_free(t2);
924 }
925 return;
926 case 0x400d:
927 {
928 TCGv t0 = tcg_temp_new();
929 TCGv t1 = tcg_temp_new();
930 TCGv t2 = tcg_temp_new();
931
932 tcg_gen_andi_i32(t0, REG(B7_4), 0x1f);
933
934
935 tcg_gen_shl_i32(t1, REG(B11_8), t0);
936
937
938
939 tcg_gen_xori_i32(t0, t0, 0x1f);
940 tcg_gen_shr_i32(t2, REG(B11_8), t0);
941 tcg_gen_shri_i32(t2, t2, 1);
942
943
944 tcg_gen_movi_i32(t0, 0);
945 tcg_gen_movcond_i32(TCG_COND_GE, REG(B11_8), REG(B7_4), t0, t1, t2);
946
947 tcg_temp_free(t0);
948 tcg_temp_free(t1);
949 tcg_temp_free(t2);
950 }
951 return;
952 case 0x3008:
953 tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
954 return;
955 case 0x300a:
956 {
957 TCGv t0, t1;
958 t0 = tcg_const_tl(0);
959 t1 = tcg_temp_new();
960 tcg_gen_add2_i32(t1, cpu_sr_t, cpu_sr_t, t0, REG(B7_4), t0);
961 tcg_gen_sub2_i32(REG(B11_8), cpu_sr_t,
962 REG(B11_8), t0, t1, cpu_sr_t);
963 tcg_gen_andi_i32(cpu_sr_t, cpu_sr_t, 1);
964 tcg_temp_free(t0);
965 tcg_temp_free(t1);
966 }
967 return;
968 case 0x300b:
969 {
970 TCGv t0, t1, t2;
971 t0 = tcg_temp_new();
972 tcg_gen_sub_i32(t0, REG(B11_8), REG(B7_4));
973 t1 = tcg_temp_new();
974 tcg_gen_xor_i32(t1, t0, REG(B7_4));
975 t2 = tcg_temp_new();
976 tcg_gen_xor_i32(t2, REG(B11_8), REG(B7_4));
977 tcg_gen_and_i32(t1, t1, t2);
978 tcg_temp_free(t2);
979 tcg_gen_shri_i32(cpu_sr_t, t1, 31);
980 tcg_temp_free(t1);
981 tcg_gen_mov_i32(REG(B11_8), t0);
982 tcg_temp_free(t0);
983 }
984 return;
985 case 0x2008:
986 {
987 TCGv val = tcg_temp_new();
988 tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
989 tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0);
990 tcg_temp_free(val);
991 }
992 return;
993 case 0x200a:
994 tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
995 return;
996 case 0xf00c:
997 CHECK_FPU_ENABLED
998 if (ctx->tbflags & FPSCR_SZ) {
999 int xsrc = XHACK(B7_4);
1000 int xdst = XHACK(B11_8);
1001 tcg_gen_mov_i32(FREG(xdst), FREG(xsrc));
1002 tcg_gen_mov_i32(FREG(xdst + 1), FREG(xsrc + 1));
1003 } else {
1004 tcg_gen_mov_i32(FREG(B11_8), FREG(B7_4));
1005 }
1006 return;
1007 case 0xf00a:
1008 CHECK_FPU_ENABLED
1009 if (ctx->tbflags & FPSCR_SZ) {
1010 TCGv_i64 fp = tcg_temp_new_i64();
1011 gen_load_fpr64(ctx, fp, XHACK(B7_4));
1012 tcg_gen_qemu_st_i64(fp, REG(B11_8), ctx->memidx, MO_TEQ);
1013 tcg_temp_free_i64(fp);
1014 } else {
1015 tcg_gen_qemu_st_i32(FREG(B7_4), REG(B11_8), ctx->memidx, MO_TEUL);
1016 }
1017 return;
1018 case 0xf008:
1019 CHECK_FPU_ENABLED
1020 if (ctx->tbflags & FPSCR_SZ) {
1021 TCGv_i64 fp = tcg_temp_new_i64();
1022 tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEQ);
1023 gen_store_fpr64(ctx, fp, XHACK(B11_8));
1024 tcg_temp_free_i64(fp);
1025 } else {
1026 tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL);
1027 }
1028 return;
1029 case 0xf009:
1030 CHECK_FPU_ENABLED
1031 if (ctx->tbflags & FPSCR_SZ) {
1032 TCGv_i64 fp = tcg_temp_new_i64();
1033 tcg_gen_qemu_ld_i64(fp, REG(B7_4), ctx->memidx, MO_TEQ);
1034 gen_store_fpr64(ctx, fp, XHACK(B11_8));
1035 tcg_temp_free_i64(fp);
1036 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1037 } else {
1038 tcg_gen_qemu_ld_i32(FREG(B11_8), REG(B7_4), ctx->memidx, MO_TEUL);
1039 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1040 }
1041 return;
1042 case 0xf00b:
1043 CHECK_FPU_ENABLED
1044 {
1045 TCGv addr = tcg_temp_new_i32();
1046 if (ctx->tbflags & FPSCR_SZ) {
1047 TCGv_i64 fp = tcg_temp_new_i64();
1048 gen_load_fpr64(ctx, fp, XHACK(B7_4));
1049 tcg_gen_subi_i32(addr, REG(B11_8), 8);
1050 tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEQ);
1051 tcg_temp_free_i64(fp);
1052 } else {
1053 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1054 tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL);
1055 }
1056 tcg_gen_mov_i32(REG(B11_8), addr);
1057 tcg_temp_free(addr);
1058 }
1059 return;
1060 case 0xf006:
1061 CHECK_FPU_ENABLED
1062 {
1063 TCGv addr = tcg_temp_new_i32();
1064 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1065 if (ctx->tbflags & FPSCR_SZ) {
1066 TCGv_i64 fp = tcg_temp_new_i64();
1067 tcg_gen_qemu_ld_i64(fp, addr, ctx->memidx, MO_TEQ);
1068 gen_store_fpr64(ctx, fp, XHACK(B11_8));
1069 tcg_temp_free_i64(fp);
1070 } else {
1071 tcg_gen_qemu_ld_i32(FREG(B11_8), addr, ctx->memidx, MO_TEUL);
1072 }
1073 tcg_temp_free(addr);
1074 }
1075 return;
1076 case 0xf007:
1077 CHECK_FPU_ENABLED
1078 {
1079 TCGv addr = tcg_temp_new();
1080 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1081 if (ctx->tbflags & FPSCR_SZ) {
1082 TCGv_i64 fp = tcg_temp_new_i64();
1083 gen_load_fpr64(ctx, fp, XHACK(B7_4));
1084 tcg_gen_qemu_st_i64(fp, addr, ctx->memidx, MO_TEQ);
1085 tcg_temp_free_i64(fp);
1086 } else {
1087 tcg_gen_qemu_st_i32(FREG(B7_4), addr, ctx->memidx, MO_TEUL);
1088 }
1089 tcg_temp_free(addr);
1090 }
1091 return;
1092 case 0xf000:
1093 case 0xf001:
1094 case 0xf002:
1095 case 0xf003:
1096 case 0xf004:
1097 case 0xf005:
1098 {
1099 CHECK_FPU_ENABLED
1100 if (ctx->tbflags & FPSCR_PR) {
1101 TCGv_i64 fp0, fp1;
1102
1103 if (ctx->opcode & 0x0110) {
1104 goto do_illegal;
1105 }
1106 fp0 = tcg_temp_new_i64();
1107 fp1 = tcg_temp_new_i64();
1108 gen_load_fpr64(ctx, fp0, B11_8);
1109 gen_load_fpr64(ctx, fp1, B7_4);
1110 switch (ctx->opcode & 0xf00f) {
1111 case 0xf000:
1112 gen_helper_fadd_DT(fp0, cpu_env, fp0, fp1);
1113 break;
1114 case 0xf001:
1115 gen_helper_fsub_DT(fp0, cpu_env, fp0, fp1);
1116 break;
1117 case 0xf002:
1118 gen_helper_fmul_DT(fp0, cpu_env, fp0, fp1);
1119 break;
1120 case 0xf003:
1121 gen_helper_fdiv_DT(fp0, cpu_env, fp0, fp1);
1122 break;
1123 case 0xf004:
1124 gen_helper_fcmp_eq_DT(cpu_sr_t, cpu_env, fp0, fp1);
1125 return;
1126 case 0xf005:
1127 gen_helper_fcmp_gt_DT(cpu_sr_t, cpu_env, fp0, fp1);
1128 return;
1129 }
1130 gen_store_fpr64(ctx, fp0, B11_8);
1131 tcg_temp_free_i64(fp0);
1132 tcg_temp_free_i64(fp1);
1133 } else {
1134 switch (ctx->opcode & 0xf00f) {
1135 case 0xf000:
1136 gen_helper_fadd_FT(FREG(B11_8), cpu_env,
1137 FREG(B11_8), FREG(B7_4));
1138 break;
1139 case 0xf001:
1140 gen_helper_fsub_FT(FREG(B11_8), cpu_env,
1141 FREG(B11_8), FREG(B7_4));
1142 break;
1143 case 0xf002:
1144 gen_helper_fmul_FT(FREG(B11_8), cpu_env,
1145 FREG(B11_8), FREG(B7_4));
1146 break;
1147 case 0xf003:
1148 gen_helper_fdiv_FT(FREG(B11_8), cpu_env,
1149 FREG(B11_8), FREG(B7_4));
1150 break;
1151 case 0xf004:
1152 gen_helper_fcmp_eq_FT(cpu_sr_t, cpu_env,
1153 FREG(B11_8), FREG(B7_4));
1154 return;
1155 case 0xf005:
1156 gen_helper_fcmp_gt_FT(cpu_sr_t, cpu_env,
1157 FREG(B11_8), FREG(B7_4));
1158 return;
1159 }
1160 }
1161 }
1162 return;
1163 case 0xf00e:
1164 CHECK_FPU_ENABLED
1165 CHECK_FPSCR_PR_0
1166 gen_helper_fmac_FT(FREG(B11_8), cpu_env,
1167 FREG(0), FREG(B7_4), FREG(B11_8));
1168 return;
1169 }
1170
1171 switch (ctx->opcode & 0xff00) {
1172 case 0xc900:
1173 tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1174 return;
1175 case 0xcd00:
1176 {
1177 TCGv addr, val;
1178 addr = tcg_temp_new();
1179 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1180 val = tcg_temp_new();
1181 tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB);
1182 tcg_gen_andi_i32(val, val, B7_0);
1183 tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB);
1184 tcg_temp_free(val);
1185 tcg_temp_free(addr);
1186 }
1187 return;
1188 case 0x8b00:
1189 CHECK_NOT_DELAY_SLOT
1190 gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2, false);
1191 return;
1192 case 0x8f00:
1193 CHECK_NOT_DELAY_SLOT
1194 tcg_gen_xori_i32(cpu_delayed_cond, cpu_sr_t, 1);
1195 ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2;
1196 ctx->envflags |= DELAY_SLOT_CONDITIONAL;
1197 return;
1198 case 0x8900:
1199 CHECK_NOT_DELAY_SLOT
1200 gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2, true);
1201 return;
1202 case 0x8d00:
1203 CHECK_NOT_DELAY_SLOT
1204 tcg_gen_mov_i32(cpu_delayed_cond, cpu_sr_t);
1205 ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2;
1206 ctx->envflags |= DELAY_SLOT_CONDITIONAL;
1207 return;
1208 case 0x8800:
1209 tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(0), B7_0s);
1210 return;
1211 case 0xc400:
1212 {
1213 TCGv addr = tcg_temp_new();
1214 tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1215 tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB);
1216 tcg_temp_free(addr);
1217 }
1218 return;
1219 case 0xc500:
1220 {
1221 TCGv addr = tcg_temp_new();
1222 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1223 tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW);
1224 tcg_temp_free(addr);
1225 }
1226 return;
1227 case 0xc600:
1228 {
1229 TCGv addr = tcg_temp_new();
1230 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1231 tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESL);
1232 tcg_temp_free(addr);
1233 }
1234 return;
1235 case 0xc000:
1236 {
1237 TCGv addr = tcg_temp_new();
1238 tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1239 tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB);
1240 tcg_temp_free(addr);
1241 }
1242 return;
1243 case 0xc100:
1244 {
1245 TCGv addr = tcg_temp_new();
1246 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1247 tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW);
1248 tcg_temp_free(addr);
1249 }
1250 return;
1251 case 0xc200:
1252 {
1253 TCGv addr = tcg_temp_new();
1254 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1255 tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUL);
1256 tcg_temp_free(addr);
1257 }
1258 return;
1259 case 0x8000:
1260 {
1261 TCGv addr = tcg_temp_new();
1262 tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1263 tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_UB);
1264 tcg_temp_free(addr);
1265 }
1266 return;
1267 case 0x8100:
1268 {
1269 TCGv addr = tcg_temp_new();
1270 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1271 tcg_gen_qemu_st_i32(REG(0), addr, ctx->memidx, MO_TEUW);
1272 tcg_temp_free(addr);
1273 }
1274 return;
1275 case 0x8400:
1276 {
1277 TCGv addr = tcg_temp_new();
1278 tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1279 tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_SB);
1280 tcg_temp_free(addr);
1281 }
1282 return;
1283 case 0x8500:
1284 {
1285 TCGv addr = tcg_temp_new();
1286 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1287 tcg_gen_qemu_ld_i32(REG(0), addr, ctx->memidx, MO_TESW);
1288 tcg_temp_free(addr);
1289 }
1290 return;
1291 case 0xc700:
1292 tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1293 return;
1294 case 0xcb00:
1295 tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1296 return;
1297 case 0xcf00:
1298 {
1299 TCGv addr, val;
1300 addr = tcg_temp_new();
1301 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1302 val = tcg_temp_new();
1303 tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB);
1304 tcg_gen_ori_i32(val, val, B7_0);
1305 tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB);
1306 tcg_temp_free(val);
1307 tcg_temp_free(addr);
1308 }
1309 return;
1310 case 0xc300:
1311 {
1312 TCGv imm;
1313 CHECK_NOT_DELAY_SLOT
1314 gen_save_cpu_state(ctx, true);
1315 imm = tcg_const_i32(B7_0);
1316 gen_helper_trapa(cpu_env, imm);
1317 tcg_temp_free(imm);
1318 ctx->bstate = BS_EXCP;
1319 }
1320 return;
1321 case 0xc800:
1322 {
1323 TCGv val = tcg_temp_new();
1324 tcg_gen_andi_i32(val, REG(0), B7_0);
1325 tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0);
1326 tcg_temp_free(val);
1327 }
1328 return;
1329 case 0xcc00:
1330 {
1331 TCGv val = tcg_temp_new();
1332 tcg_gen_add_i32(val, REG(0), cpu_gbr);
1333 tcg_gen_qemu_ld_i32(val, val, ctx->memidx, MO_UB);
1334 tcg_gen_andi_i32(val, val, B7_0);
1335 tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0);
1336 tcg_temp_free(val);
1337 }
1338 return;
1339 case 0xca00:
1340 tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1341 return;
1342 case 0xce00:
1343 {
1344 TCGv addr, val;
1345 addr = tcg_temp_new();
1346 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1347 val = tcg_temp_new();
1348 tcg_gen_qemu_ld_i32(val, addr, ctx->memidx, MO_UB);
1349 tcg_gen_xori_i32(val, val, B7_0);
1350 tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_UB);
1351 tcg_temp_free(val);
1352 tcg_temp_free(addr);
1353 }
1354 return;
1355 }
1356
1357 switch (ctx->opcode & 0xf08f) {
1358 case 0x408e:
1359 CHECK_PRIVILEGED
1360 tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1361 return;
1362 case 0x4087:
1363 CHECK_PRIVILEGED
1364 tcg_gen_qemu_ld_i32(ALTREG(B6_4), REG(B11_8), ctx->memidx, MO_TESL);
1365 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1366 return;
1367 case 0x0082:
1368 CHECK_PRIVILEGED
1369 tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1370 return;
1371 case 0x4083:
1372 CHECK_PRIVILEGED
1373 {
1374 TCGv addr = tcg_temp_new();
1375 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1376 tcg_gen_qemu_st_i32(ALTREG(B6_4), addr, ctx->memidx, MO_TEUL);
1377 tcg_gen_mov_i32(REG(B11_8), addr);
1378 tcg_temp_free(addr);
1379 }
1380 return;
1381 }
1382
1383 switch (ctx->opcode & 0xf0ff) {
1384 case 0x0023:
1385 CHECK_NOT_DELAY_SLOT
1386 tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1387 ctx->envflags |= DELAY_SLOT;
1388 ctx->delayed_pc = (uint32_t) - 1;
1389 return;
1390 case 0x0003:
1391 CHECK_NOT_DELAY_SLOT
1392 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1393 tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1394 ctx->envflags |= DELAY_SLOT;
1395 ctx->delayed_pc = (uint32_t) - 1;
1396 return;
1397 case 0x4015:
1398 tcg_gen_setcondi_i32(TCG_COND_GT, cpu_sr_t, REG(B11_8), 0);
1399 return;
1400 case 0x4011:
1401 tcg_gen_setcondi_i32(TCG_COND_GE, cpu_sr_t, REG(B11_8), 0);
1402 return;
1403 case 0x4010:
1404 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1405 tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, REG(B11_8), 0);
1406 return;
1407 case 0x402b:
1408 CHECK_NOT_DELAY_SLOT
1409 tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1410 ctx->envflags |= DELAY_SLOT;
1411 ctx->delayed_pc = (uint32_t) - 1;
1412 return;
1413 case 0x400b:
1414 CHECK_NOT_DELAY_SLOT
1415 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1416 tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1417 ctx->envflags |= DELAY_SLOT;
1418 ctx->delayed_pc = (uint32_t) - 1;
1419 return;
1420 case 0x400e:
1421 CHECK_PRIVILEGED
1422 {
1423 TCGv val = tcg_temp_new();
1424 tcg_gen_andi_i32(val, REG(B11_8), 0x700083f3);
1425 gen_write_sr(val);
1426 tcg_temp_free(val);
1427 ctx->bstate = BS_STOP;
1428 }
1429 return;
1430 case 0x4007:
1431 CHECK_PRIVILEGED
1432 {
1433 TCGv val = tcg_temp_new();
1434 tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TESL);
1435 tcg_gen_andi_i32(val, val, 0x700083f3);
1436 gen_write_sr(val);
1437 tcg_temp_free(val);
1438 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1439 ctx->bstate = BS_STOP;
1440 }
1441 return;
1442 case 0x0002:
1443 CHECK_PRIVILEGED
1444 gen_read_sr(REG(B11_8));
1445 return;
1446 case 0x4003:
1447 CHECK_PRIVILEGED
1448 {
1449 TCGv addr = tcg_temp_new();
1450 TCGv val = tcg_temp_new();
1451 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1452 gen_read_sr(val);
1453 tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL);
1454 tcg_gen_mov_i32(REG(B11_8), addr);
1455 tcg_temp_free(val);
1456 tcg_temp_free(addr);
1457 }
1458 return;
1459#define LD(reg,ldnum,ldpnum,prechk) \
1460 case ldnum: \
1461 prechk \
1462 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1463 return; \
1464 case ldpnum: \
1465 prechk \
1466 tcg_gen_qemu_ld_i32(cpu_##reg, REG(B11_8), ctx->memidx, MO_TESL); \
1467 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1468 return;
1469#define ST(reg,stnum,stpnum,prechk) \
1470 case stnum: \
1471 prechk \
1472 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1473 return; \
1474 case stpnum: \
1475 prechk \
1476 { \
1477 TCGv addr = tcg_temp_new(); \
1478 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1479 tcg_gen_qemu_st_i32(cpu_##reg, addr, ctx->memidx, MO_TEUL); \
1480 tcg_gen_mov_i32(REG(B11_8), addr); \
1481 tcg_temp_free(addr); \
1482 } \
1483 return;
1484#define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1485 LD(reg,ldnum,ldpnum,prechk) \
1486 ST(reg,stnum,stpnum,prechk)
1487 LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {})
1488 LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1489 LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1490 LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1491 ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED)
1492 LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED CHECK_SH4A)
1493 LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1494 LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1495 LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1496 LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {})
1497 LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1498 case 0x406a:
1499 CHECK_FPU_ENABLED
1500 gen_helper_ld_fpscr(cpu_env, REG(B11_8));
1501 ctx->bstate = BS_STOP;
1502 return;
1503 case 0x4066:
1504 CHECK_FPU_ENABLED
1505 {
1506 TCGv addr = tcg_temp_new();
1507 tcg_gen_qemu_ld_i32(addr, REG(B11_8), ctx->memidx, MO_TESL);
1508 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1509 gen_helper_ld_fpscr(cpu_env, addr);
1510 tcg_temp_free(addr);
1511 ctx->bstate = BS_STOP;
1512 }
1513 return;
1514 case 0x006a:
1515 CHECK_FPU_ENABLED
1516 tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1517 return;
1518 case 0x4062:
1519 CHECK_FPU_ENABLED
1520 {
1521 TCGv addr, val;
1522 val = tcg_temp_new();
1523 tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1524 addr = tcg_temp_new();
1525 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1526 tcg_gen_qemu_st_i32(val, addr, ctx->memidx, MO_TEUL);
1527 tcg_gen_mov_i32(REG(B11_8), addr);
1528 tcg_temp_free(addr);
1529 tcg_temp_free(val);
1530 }
1531 return;
1532 case 0x00c3:
1533 {
1534 TCGv val = tcg_temp_new();
1535 tcg_gen_qemu_ld_i32(val, REG(B11_8), ctx->memidx, MO_TEUL);
1536 gen_helper_movcal(cpu_env, REG(B11_8), val);
1537 tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL);
1538 }
1539 ctx->has_movcal = 1;
1540 return;
1541 case 0x40a9:
1542 CHECK_SH4A
1543
1544 tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx,
1545 MO_TEUL | MO_UNALN);
1546 return;
1547 break;
1548 case 0x40e9:
1549 CHECK_SH4A
1550
1551 tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx,
1552 MO_TEUL | MO_UNALN);
1553 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1554 return;
1555 break;
1556 case 0x0029:
1557 tcg_gen_mov_i32(REG(B11_8), cpu_sr_t);
1558 return;
1559 case 0x0073:
1560
1561
1562
1563
1564
1565 CHECK_SH4A
1566 {
1567 TCGLabel *label = gen_new_label();
1568 tcg_gen_mov_i32(cpu_sr_t, cpu_ldst);
1569 tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1570 tcg_gen_qemu_st_i32(REG(0), REG(B11_8), ctx->memidx, MO_TEUL);
1571 gen_set_label(label);
1572 tcg_gen_movi_i32(cpu_ldst, 0);
1573 return;
1574 }
1575 case 0x0063:
1576
1577
1578
1579
1580
1581
1582 CHECK_SH4A
1583 tcg_gen_movi_i32(cpu_ldst, 0);
1584 tcg_gen_qemu_ld_i32(REG(0), REG(B11_8), ctx->memidx, MO_TESL);
1585 tcg_gen_movi_i32(cpu_ldst, 1);
1586 return;
1587 case 0x0093:
1588 {
1589 gen_helper_ocbi(cpu_env, REG(B11_8));
1590 }
1591 return;
1592 case 0x00a3:
1593 case 0x00b3:
1594
1595
1596
1597 return;
1598 case 0x0083:
1599 return;
1600 case 0x00d3:
1601 CHECK_SH4A
1602 return;
1603 case 0x00e3:
1604 CHECK_SH4A
1605 return;
1606 case 0x00ab:
1607 CHECK_SH4A
1608 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
1609 return;
1610 break;
1611 case 0x4024:
1612 {
1613 TCGv tmp = tcg_temp_new();
1614 tcg_gen_mov_i32(tmp, cpu_sr_t);
1615 tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31);
1616 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1617 tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp);
1618 tcg_temp_free(tmp);
1619 }
1620 return;
1621 case 0x4025:
1622 {
1623 TCGv tmp = tcg_temp_new();
1624 tcg_gen_shli_i32(tmp, cpu_sr_t, 31);
1625 tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1);
1626 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1627 tcg_gen_or_i32(REG(B11_8), REG(B11_8), tmp);
1628 tcg_temp_free(tmp);
1629 }
1630 return;
1631 case 0x4004:
1632 tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1);
1633 tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0);
1634 return;
1635 case 0x4005:
1636 tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 0);
1637 tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1);
1638 return;
1639 case 0x4000:
1640 case 0x4020:
1641 tcg_gen_shri_i32(cpu_sr_t, REG(B11_8), 31);
1642 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1643 return;
1644 case 0x4021:
1645 tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1);
1646 tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1647 return;
1648 case 0x4001:
1649 tcg_gen_andi_i32(cpu_sr_t, REG(B11_8), 1);
1650 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1651 return;
1652 case 0x4008:
1653 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1654 return;
1655 case 0x4018:
1656 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1657 return;
1658 case 0x4028:
1659 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1660 return;
1661 case 0x4009:
1662 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1663 return;
1664 case 0x4019:
1665 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1666 return;
1667 case 0x4029:
1668 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1669 return;
1670 case 0x401b:
1671 {
1672 TCGv val = tcg_const_i32(0x80);
1673 tcg_gen_atomic_fetch_or_i32(val, REG(B11_8), val,
1674 ctx->memidx, MO_UB);
1675 tcg_gen_setcondi_i32(TCG_COND_EQ, cpu_sr_t, val, 0);
1676 tcg_temp_free(val);
1677 }
1678 return;
1679 case 0xf00d:
1680 CHECK_FPU_ENABLED
1681 tcg_gen_mov_i32(FREG(B11_8), cpu_fpul);
1682 return;
1683 case 0xf01d:
1684 CHECK_FPU_ENABLED
1685 tcg_gen_mov_i32(cpu_fpul, FREG(B11_8));
1686 return;
1687 case 0xf02d:
1688 CHECK_FPU_ENABLED
1689 if (ctx->tbflags & FPSCR_PR) {
1690 TCGv_i64 fp;
1691 if (ctx->opcode & 0x0100) {
1692 goto do_illegal;
1693 }
1694 fp = tcg_temp_new_i64();
1695 gen_helper_float_DT(fp, cpu_env, cpu_fpul);
1696 gen_store_fpr64(ctx, fp, B11_8);
1697 tcg_temp_free_i64(fp);
1698 }
1699 else {
1700 gen_helper_float_FT(FREG(B11_8), cpu_env, cpu_fpul);
1701 }
1702 return;
1703 case 0xf03d:
1704 CHECK_FPU_ENABLED
1705 if (ctx->tbflags & FPSCR_PR) {
1706 TCGv_i64 fp;
1707 if (ctx->opcode & 0x0100) {
1708 goto do_illegal;
1709 }
1710 fp = tcg_temp_new_i64();
1711 gen_load_fpr64(ctx, fp, B11_8);
1712 gen_helper_ftrc_DT(cpu_fpul, cpu_env, fp);
1713 tcg_temp_free_i64(fp);
1714 }
1715 else {
1716 gen_helper_ftrc_FT(cpu_fpul, cpu_env, FREG(B11_8));
1717 }
1718 return;
1719 case 0xf04d:
1720 CHECK_FPU_ENABLED
1721 tcg_gen_xori_i32(FREG(B11_8), FREG(B11_8), 0x80000000);
1722 return;
1723 case 0xf05d:
1724 CHECK_FPU_ENABLED
1725 tcg_gen_andi_i32(FREG(B11_8), FREG(B11_8), 0x7fffffff);
1726 return;
1727 case 0xf06d:
1728 CHECK_FPU_ENABLED
1729 if (ctx->tbflags & FPSCR_PR) {
1730 if (ctx->opcode & 0x0100) {
1731 goto do_illegal;
1732 }
1733 TCGv_i64 fp = tcg_temp_new_i64();
1734 gen_load_fpr64(ctx, fp, B11_8);
1735 gen_helper_fsqrt_DT(fp, cpu_env, fp);
1736 gen_store_fpr64(ctx, fp, B11_8);
1737 tcg_temp_free_i64(fp);
1738 } else {
1739 gen_helper_fsqrt_FT(FREG(B11_8), cpu_env, FREG(B11_8));
1740 }
1741 return;
1742 case 0xf07d:
1743 CHECK_FPU_ENABLED
1744 CHECK_FPSCR_PR_0
1745 gen_helper_fsrra_FT(FREG(B11_8), cpu_env, FREG(B11_8));
1746 break;
1747 case 0xf08d:
1748 CHECK_FPU_ENABLED
1749 CHECK_FPSCR_PR_0
1750 tcg_gen_movi_i32(FREG(B11_8), 0);
1751 return;
1752 case 0xf09d:
1753 CHECK_FPU_ENABLED
1754 CHECK_FPSCR_PR_0
1755 tcg_gen_movi_i32(FREG(B11_8), 0x3f800000);
1756 return;
1757 case 0xf0ad:
1758 CHECK_FPU_ENABLED
1759 {
1760 TCGv_i64 fp = tcg_temp_new_i64();
1761 gen_helper_fcnvsd_FT_DT(fp, cpu_env, cpu_fpul);
1762 gen_store_fpr64(ctx, fp, B11_8);
1763 tcg_temp_free_i64(fp);
1764 }
1765 return;
1766 case 0xf0bd:
1767 CHECK_FPU_ENABLED
1768 {
1769 TCGv_i64 fp = tcg_temp_new_i64();
1770 gen_load_fpr64(ctx, fp, B11_8);
1771 gen_helper_fcnvds_DT_FT(cpu_fpul, cpu_env, fp);
1772 tcg_temp_free_i64(fp);
1773 }
1774 return;
1775 case 0xf0ed:
1776 CHECK_FPU_ENABLED
1777 CHECK_FPSCR_PR_1
1778 {
1779 TCGv m = tcg_const_i32((ctx->opcode >> 8) & 3);
1780 TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3);
1781 gen_helper_fipr(cpu_env, m, n);
1782 tcg_temp_free(m);
1783 tcg_temp_free(n);
1784 return;
1785 }
1786 break;
1787 case 0xf0fd:
1788 CHECK_FPU_ENABLED
1789 CHECK_FPSCR_PR_1
1790 {
1791 if ((ctx->opcode & 0x0300) != 0x0100) {
1792 goto do_illegal;
1793 }
1794 TCGv n = tcg_const_i32((ctx->opcode >> 10) & 3);
1795 gen_helper_ftrv(cpu_env, n);
1796 tcg_temp_free(n);
1797 return;
1798 }
1799 break;
1800 }
1801#if 0
1802 fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1803 ctx->opcode, ctx->pc);
1804 fflush(stderr);
1805#endif
1806 do_illegal:
1807 if (ctx->envflags & DELAY_SLOT_MASK) {
1808 do_illegal_slot:
1809 gen_save_cpu_state(ctx, true);
1810 gen_helper_raise_slot_illegal_instruction(cpu_env);
1811 } else {
1812 gen_save_cpu_state(ctx, true);
1813 gen_helper_raise_illegal_instruction(cpu_env);
1814 }
1815 ctx->bstate = BS_EXCP;
1816 return;
1817
1818 do_fpu_disabled:
1819 gen_save_cpu_state(ctx, true);
1820 if (ctx->envflags & DELAY_SLOT_MASK) {
1821 gen_helper_raise_slot_fpu_disable(cpu_env);
1822 } else {
1823 gen_helper_raise_fpu_disable(cpu_env);
1824 }
1825 ctx->bstate = BS_EXCP;
1826 return;
1827}
1828
1829static void decode_opc(DisasContext * ctx)
1830{
1831 uint32_t old_flags = ctx->envflags;
1832
1833 _decode_opc(ctx);
1834
1835 if (old_flags & DELAY_SLOT_MASK) {
1836
1837 ctx->envflags &= ~DELAY_SLOT_MASK;
1838
1839
1840
1841 if (ctx->tbflags & GUSA_EXCLUSIVE
1842 && old_flags & DELAY_SLOT_CONDITIONAL) {
1843 gen_delayed_conditional_jump(ctx);
1844 return;
1845 }
1846
1847
1848 ctx->envflags &= ~GUSA_MASK;
1849
1850 tcg_gen_movi_i32(cpu_flags, ctx->envflags);
1851 ctx->bstate = BS_BRANCH;
1852 if (old_flags & DELAY_SLOT_CONDITIONAL) {
1853 gen_delayed_conditional_jump(ctx);
1854 } else {
1855 gen_jump(ctx);
1856 }
1857 }
1858}
1859
1860#ifdef CONFIG_USER_ONLY
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870static int decode_gusa(DisasContext *ctx, CPUSH4State *env, int *pmax_insns)
1871{
1872 uint16_t insns[5];
1873 int ld_adr, ld_dst, ld_mop;
1874 int op_dst, op_src, op_opc;
1875 int mv_src, mt_dst, st_src, st_mop;
1876 TCGv op_arg;
1877
1878 uint32_t pc = ctx->pc;
1879 uint32_t pc_end = ctx->tb->cs_base;
1880 int backup = sextract32(ctx->tbflags, GUSA_SHIFT, 8);
1881 int max_insns = (pc_end - pc) / 2;
1882 int i;
1883
1884 if (pc != pc_end + backup || max_insns < 2) {
1885
1886
1887 ctx->envflags &= ~GUSA_MASK;
1888 return 0;
1889 }
1890
1891 if (ctx->tbflags & GUSA_EXCLUSIVE) {
1892
1893
1894
1895 *pmax_insns = max_insns;
1896 return 0;
1897 }
1898
1899
1900
1901 if (max_insns > ARRAY_SIZE(insns)) {
1902 goto fail;
1903 }
1904
1905
1906 for (i = 0; i < max_insns; ++i) {
1907 insns[i] = cpu_lduw_code(env, pc + i * 2);
1908 }
1909
1910 ld_adr = ld_dst = ld_mop = -1;
1911 mv_src = -1;
1912 op_dst = op_src = op_opc = -1;
1913 mt_dst = -1;
1914 st_src = st_mop = -1;
1915 TCGV_UNUSED(op_arg);
1916 i = 0;
1917
1918#define NEXT_INSN \
1919 do { if (i >= max_insns) goto fail; ctx->opcode = insns[i++]; } while (0)
1920
1921
1922
1923
1924 NEXT_INSN;
1925 switch (ctx->opcode & 0xf00f) {
1926 case 0x6000:
1927 ld_mop = MO_SB;
1928 break;
1929 case 0x6001:
1930 ld_mop = MO_TESW;
1931 break;
1932 case 0x6002:
1933 ld_mop = MO_TESL;
1934 break;
1935 default:
1936 goto fail;
1937 }
1938 ld_adr = B7_4;
1939 ld_dst = B11_8;
1940 if (ld_adr == ld_dst) {
1941 goto fail;
1942 }
1943
1944 op_dst = ld_dst;
1945
1946
1947
1948
1949 NEXT_INSN;
1950 switch (ctx->opcode & 0xf00f) {
1951 case 0x6003:
1952
1953
1954
1955 op_dst = B11_8;
1956 mv_src = B7_4;
1957 if (op_dst == ld_dst) {
1958
1959 goto fail;
1960 }
1961 if (mv_src != ld_dst) {
1962
1963 op_src = ld_dst;
1964 }
1965 break;
1966
1967 default:
1968
1969 --i;
1970 }
1971
1972
1973
1974
1975 NEXT_INSN;
1976 switch (ctx->opcode & 0xf00f) {
1977 case 0x300c:
1978 op_opc = INDEX_op_add_i32;
1979 goto do_reg_op;
1980 case 0x2009:
1981 op_opc = INDEX_op_and_i32;
1982 goto do_reg_op;
1983 case 0x200a:
1984 op_opc = INDEX_op_xor_i32;
1985 goto do_reg_op;
1986 case 0x200b:
1987 op_opc = INDEX_op_or_i32;
1988 do_reg_op:
1989
1990
1991 if (op_dst != B11_8) {
1992 goto fail;
1993 }
1994 if (op_src < 0) {
1995
1996 op_src = B7_4;
1997 } else if (op_src == B7_4) {
1998
1999
2000
2001
2002 op_src = mv_src;
2003 } else {
2004 goto fail;
2005 }
2006 op_arg = REG(op_src);
2007 break;
2008
2009 case 0x6007:
2010 if (ld_dst != B7_4 || mv_src >= 0) {
2011 goto fail;
2012 }
2013 op_dst = B11_8;
2014 op_opc = INDEX_op_xor_i32;
2015 op_arg = tcg_const_i32(-1);
2016 break;
2017
2018 case 0x7000 ... 0x700f:
2019 if (op_dst != B11_8 || mv_src >= 0) {
2020 goto fail;
2021 }
2022 op_opc = INDEX_op_add_i32;
2023 op_arg = tcg_const_i32(B7_0s);
2024 break;
2025
2026 case 0x3000:
2027
2028
2029
2030 if ((ld_dst == B11_8) + (ld_dst == B7_4) != 1 || mv_src >= 0) {
2031 goto fail;
2032 }
2033 op_opc = INDEX_op_setcond_i32;
2034 op_src = (ld_dst == B11_8 ? B7_4 : B11_8);
2035 op_arg = REG(op_src);
2036
2037 NEXT_INSN;
2038 switch (ctx->opcode & 0xff00) {
2039 case 0x8b00:
2040 case 0x8f00:
2041 if (pc + (i + 1 + B7_0s) * 2 != pc_end) {
2042 goto fail;
2043 }
2044 if ((ctx->opcode & 0xff00) == 0x8b00) {
2045 break;
2046 }
2047
2048
2049
2050 NEXT_INSN;
2051 if ((ctx->opcode & 0xf0ff) == 0x0029) {
2052 mt_dst = B11_8;
2053 } else {
2054 goto fail;
2055 }
2056 break;
2057
2058 default:
2059 goto fail;
2060 }
2061 break;
2062
2063 case 0x2008:
2064
2065 if (ld_dst != B11_8 || ld_dst != B7_4 || mv_src >= 0) {
2066 goto fail;
2067 }
2068 op_opc = INDEX_op_setcond_i32;
2069 op_arg = tcg_const_i32(0);
2070
2071 NEXT_INSN;
2072 if ((ctx->opcode & 0xff00) != 0x8900
2073 || pc + (i + 1 + B7_0s) * 2 != pc_end) {
2074 goto fail;
2075 }
2076 break;
2077
2078 default:
2079
2080 --i;
2081 }
2082
2083
2084
2085
2086
2087 if (i != max_insns - 1) {
2088 goto fail;
2089 }
2090 NEXT_INSN;
2091 switch (ctx->opcode & 0xf00f) {
2092 case 0x2000:
2093 st_mop = MO_UB;
2094 break;
2095 case 0x2001:
2096 st_mop = MO_UW;
2097 break;
2098 case 0x2002:
2099 st_mop = MO_UL;
2100 break;
2101 default:
2102 goto fail;
2103 }
2104
2105 if (ld_adr != B11_8 || st_mop != (ld_mop & MO_SIZE)) {
2106 goto fail;
2107 }
2108 st_src = B7_4;
2109
2110#undef NEXT_INSN
2111
2112
2113
2114
2115 tcg_gen_insn_start(pc, ctx->envflags);
2116 switch (op_opc) {
2117 case -1:
2118
2119 if (st_src == ld_dst || mv_src >= 0) {
2120 goto fail;
2121 }
2122 tcg_gen_atomic_xchg_i32(REG(ld_dst), REG(ld_adr), REG(st_src),
2123 ctx->memidx, ld_mop);
2124 break;
2125
2126 case INDEX_op_add_i32:
2127 if (op_dst != st_src) {
2128 goto fail;
2129 }
2130 if (op_dst == ld_dst && st_mop == MO_UL) {
2131 tcg_gen_atomic_add_fetch_i32(REG(ld_dst), REG(ld_adr),
2132 op_arg, ctx->memidx, ld_mop);
2133 } else {
2134 tcg_gen_atomic_fetch_add_i32(REG(ld_dst), REG(ld_adr),
2135 op_arg, ctx->memidx, ld_mop);
2136 if (op_dst != ld_dst) {
2137
2138
2139 tcg_gen_add_i32(REG(op_dst), REG(ld_dst), op_arg);
2140 }
2141 }
2142 break;
2143
2144 case INDEX_op_and_i32:
2145 if (op_dst != st_src) {
2146 goto fail;
2147 }
2148 if (op_dst == ld_dst) {
2149 tcg_gen_atomic_and_fetch_i32(REG(ld_dst), REG(ld_adr),
2150 op_arg, ctx->memidx, ld_mop);
2151 } else {
2152 tcg_gen_atomic_fetch_and_i32(REG(ld_dst), REG(ld_adr),
2153 op_arg, ctx->memidx, ld_mop);
2154 tcg_gen_and_i32(REG(op_dst), REG(ld_dst), op_arg);
2155 }
2156 break;
2157
2158 case INDEX_op_or_i32:
2159 if (op_dst != st_src) {
2160 goto fail;
2161 }
2162 if (op_dst == ld_dst) {
2163 tcg_gen_atomic_or_fetch_i32(REG(ld_dst), REG(ld_adr),
2164 op_arg, ctx->memidx, ld_mop);
2165 } else {
2166 tcg_gen_atomic_fetch_or_i32(REG(ld_dst), REG(ld_adr),
2167 op_arg, ctx->memidx, ld_mop);
2168 tcg_gen_or_i32(REG(op_dst), REG(ld_dst), op_arg);
2169 }
2170 break;
2171
2172 case INDEX_op_xor_i32:
2173 if (op_dst != st_src) {
2174 goto fail;
2175 }
2176 if (op_dst == ld_dst) {
2177 tcg_gen_atomic_xor_fetch_i32(REG(ld_dst), REG(ld_adr),
2178 op_arg, ctx->memidx, ld_mop);
2179 } else {
2180 tcg_gen_atomic_fetch_xor_i32(REG(ld_dst), REG(ld_adr),
2181 op_arg, ctx->memidx, ld_mop);
2182 tcg_gen_xor_i32(REG(op_dst), REG(ld_dst), op_arg);
2183 }
2184 break;
2185
2186 case INDEX_op_setcond_i32:
2187 if (st_src == ld_dst) {
2188 goto fail;
2189 }
2190 tcg_gen_atomic_cmpxchg_i32(REG(ld_dst), REG(ld_adr), op_arg,
2191 REG(st_src), ctx->memidx, ld_mop);
2192 tcg_gen_setcond_i32(TCG_COND_EQ, cpu_sr_t, REG(ld_dst), op_arg);
2193 if (mt_dst >= 0) {
2194 tcg_gen_mov_i32(REG(mt_dst), cpu_sr_t);
2195 }
2196 break;
2197
2198 default:
2199 g_assert_not_reached();
2200 }
2201
2202
2203 if (op_src < 0) {
2204 tcg_temp_free_i32(op_arg);
2205 }
2206
2207
2208 ctx->envflags &= ~GUSA_MASK;
2209 ctx->pc = pc_end;
2210 return max_insns;
2211
2212 fail:
2213 qemu_log_mask(LOG_UNIMP, "Unrecognized gUSA sequence %08x-%08x\n",
2214 pc, pc_end);
2215
2216
2217
2218 tcg_gen_insn_start(pc, ctx->envflags);
2219 ctx->envflags |= GUSA_EXCLUSIVE;
2220 gen_save_cpu_state(ctx, false);
2221 gen_helper_exclusive(cpu_env);
2222 ctx->bstate = BS_EXCP;
2223
2224
2225
2226
2227
2228 ctx->pc = pc_end;
2229 return 1;
2230}
2231#endif
2232
2233void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
2234{
2235 CPUSH4State *env = cs->env_ptr;
2236 DisasContext ctx;
2237 target_ulong pc_start;
2238 int num_insns;
2239 int max_insns;
2240
2241 pc_start = tb->pc;
2242 ctx.pc = pc_start;
2243 ctx.tbflags = (uint32_t)tb->flags;
2244 ctx.envflags = tb->flags & TB_FLAG_ENVFLAGS_MASK;
2245 ctx.bstate = BS_NONE;
2246 ctx.memidx = (ctx.tbflags & (1u << SR_MD)) == 0 ? 1 : 0;
2247
2248
2249 ctx.delayed_pc = -1;
2250 ctx.tb = tb;
2251 ctx.singlestep_enabled = cs->singlestep_enabled;
2252 ctx.features = env->features;
2253 ctx.has_movcal = (ctx.tbflags & TB_FLAG_PENDING_MOVCA);
2254 ctx.gbank = ((ctx.tbflags & (1 << SR_MD)) &&
2255 (ctx.tbflags & (1 << SR_RB))) * 0x10;
2256 ctx.fbank = ctx.tbflags & FPSCR_FR ? 0x10 : 0;
2257
2258 max_insns = tb->cflags & CF_COUNT_MASK;
2259 if (max_insns == 0) {
2260 max_insns = CF_COUNT_MASK;
2261 }
2262 max_insns = MIN(max_insns, TCG_MAX_INSNS);
2263
2264
2265
2266 num_insns = -(ctx.pc | TARGET_PAGE_MASK) / 2;
2267 max_insns = MIN(max_insns, num_insns);
2268
2269
2270 if (ctx.singlestep_enabled || singlestep) {
2271 max_insns = 1;
2272 }
2273
2274 gen_tb_start(tb);
2275 num_insns = 0;
2276
2277#ifdef CONFIG_USER_ONLY
2278 if (ctx.tbflags & GUSA_MASK) {
2279 num_insns = decode_gusa(&ctx, env, &max_insns);
2280 }
2281#endif
2282
2283 while (ctx.bstate == BS_NONE
2284 && num_insns < max_insns
2285 && !tcg_op_buf_full()) {
2286 tcg_gen_insn_start(ctx.pc, ctx.envflags);
2287 num_insns++;
2288
2289 if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) {
2290
2291 gen_save_cpu_state(&ctx, true);
2292 gen_helper_debug(cpu_env);
2293 ctx.bstate = BS_EXCP;
2294
2295
2296
2297
2298 ctx.pc += 2;
2299 break;
2300 }
2301
2302 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
2303 gen_io_start();
2304 }
2305
2306 ctx.opcode = cpu_lduw_code(env, ctx.pc);
2307 decode_opc(&ctx);
2308 ctx.pc += 2;
2309 }
2310 if (tb->cflags & CF_LAST_IO) {
2311 gen_io_end();
2312 }
2313
2314 if (ctx.tbflags & GUSA_EXCLUSIVE) {
2315
2316 ctx.envflags &= ~GUSA_MASK;
2317 }
2318
2319 if (cs->singlestep_enabled) {
2320 gen_save_cpu_state(&ctx, true);
2321 gen_helper_debug(cpu_env);
2322 } else {
2323 switch (ctx.bstate) {
2324 case BS_STOP:
2325 gen_save_cpu_state(&ctx, true);
2326 tcg_gen_exit_tb(0);
2327 break;
2328 case BS_NONE:
2329 gen_save_cpu_state(&ctx, false);
2330 gen_goto_tb(&ctx, 0, ctx.pc);
2331 break;
2332 case BS_EXCP:
2333
2334 case BS_BRANCH:
2335 default:
2336 break;
2337 }
2338 }
2339
2340 gen_tb_end(tb, num_insns);
2341
2342 tb->size = ctx.pc - pc_start;
2343 tb->icount = num_insns;
2344
2345#ifdef DEBUG_DISAS
2346 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
2347 && qemu_log_in_addr_range(pc_start)) {
2348 qemu_log_lock();
2349 qemu_log("IN:\n");
2350 log_target_disas(cs, pc_start, ctx.pc - pc_start, 0);
2351 qemu_log("\n");
2352 qemu_log_unlock();
2353 }
2354#endif
2355}
2356
2357void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb,
2358 target_ulong *data)
2359{
2360 env->pc = data[0];
2361 env->flags = data[1];
2362
2363
2364
2365}
2366