1
2
3static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
4{
5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
6}
7
8static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
9{
10 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
11}
12
13static inline void set_cpu_vsrh(int n, TCGv_i64 src)
14{
15 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
16}
17
18static inline void set_cpu_vsrl(int n, TCGv_i64 src)
19{
20 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
21}
22
23#define VSX_LOAD_SCALAR(name, operation) \
24static void gen_##name(DisasContext *ctx) \
25{ \
26 TCGv EA; \
27 TCGv_i64 t0; \
28 if (unlikely(!ctx->vsx_enabled)) { \
29 gen_exception(ctx, POWERPC_EXCP_VSXU); \
30 return; \
31 } \
32 t0 = tcg_temp_new_i64(); \
33 gen_set_access_type(ctx, ACCESS_INT); \
34 EA = tcg_temp_new(); \
35 gen_addr_reg_index(ctx, EA); \
36 gen_qemu_##operation(ctx, t0, EA); \
37 set_cpu_vsrh(xT(ctx->opcode), t0); \
38 \
39 tcg_temp_free(EA); \
40 tcg_temp_free_i64(t0); \
41}
42
43VSX_LOAD_SCALAR(lxsdx, ld64_i64)
44VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
45VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
46VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
47VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
48VSX_LOAD_SCALAR(lxsspx, ld32fs)
49
50static void gen_lxvd2x(DisasContext *ctx)
51{
52 TCGv EA;
53 TCGv_i64 t0;
54 if (unlikely(!ctx->vsx_enabled)) {
55 gen_exception(ctx, POWERPC_EXCP_VSXU);
56 return;
57 }
58 t0 = tcg_temp_new_i64();
59 gen_set_access_type(ctx, ACCESS_INT);
60 EA = tcg_temp_new();
61 gen_addr_reg_index(ctx, EA);
62 gen_qemu_ld64_i64(ctx, t0, EA);
63 set_cpu_vsrh(xT(ctx->opcode), t0);
64 tcg_gen_addi_tl(EA, EA, 8);
65 gen_qemu_ld64_i64(ctx, t0, EA);
66 set_cpu_vsrl(xT(ctx->opcode), t0);
67 tcg_temp_free(EA);
68 tcg_temp_free_i64(t0);
69}
70
71static void gen_lxvdsx(DisasContext *ctx)
72{
73 TCGv EA;
74 TCGv_i64 t0;
75 TCGv_i64 t1;
76 if (unlikely(!ctx->vsx_enabled)) {
77 gen_exception(ctx, POWERPC_EXCP_VSXU);
78 return;
79 }
80 t0 = tcg_temp_new_i64();
81 t1 = tcg_temp_new_i64();
82 gen_set_access_type(ctx, ACCESS_INT);
83 EA = tcg_temp_new();
84 gen_addr_reg_index(ctx, EA);
85 gen_qemu_ld64_i64(ctx, t0, EA);
86 set_cpu_vsrh(xT(ctx->opcode), t0);
87 tcg_gen_mov_i64(t1, t0);
88 set_cpu_vsrl(xT(ctx->opcode), t1);
89 tcg_temp_free(EA);
90 tcg_temp_free_i64(t0);
91 tcg_temp_free_i64(t1);
92}
93
94static void gen_lxvw4x(DisasContext *ctx)
95{
96 TCGv EA;
97 TCGv_i64 xth;
98 TCGv_i64 xtl;
99 if (unlikely(!ctx->vsx_enabled)) {
100 gen_exception(ctx, POWERPC_EXCP_VSXU);
101 return;
102 }
103 xth = tcg_temp_new_i64();
104 xtl = tcg_temp_new_i64();
105
106 gen_set_access_type(ctx, ACCESS_INT);
107 EA = tcg_temp_new();
108
109 gen_addr_reg_index(ctx, EA);
110 if (ctx->le_mode) {
111 TCGv_i64 t0 = tcg_temp_new_i64();
112 TCGv_i64 t1 = tcg_temp_new_i64();
113
114 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
115 tcg_gen_shri_i64(t1, t0, 32);
116 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
117 tcg_gen_addi_tl(EA, EA, 8);
118 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
119 tcg_gen_shri_i64(t1, t0, 32);
120 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
121 tcg_temp_free_i64(t0);
122 tcg_temp_free_i64(t1);
123 } else {
124 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
125 tcg_gen_addi_tl(EA, EA, 8);
126 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
127 }
128 set_cpu_vsrh(xT(ctx->opcode), xth);
129 set_cpu_vsrl(xT(ctx->opcode), xtl);
130 tcg_temp_free(EA);
131 tcg_temp_free_i64(xth);
132 tcg_temp_free_i64(xtl);
133}
134
135static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
136 TCGv_i64 inh, TCGv_i64 inl)
137{
138 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
139 TCGv_i64 t0 = tcg_temp_new_i64();
140 TCGv_i64 t1 = tcg_temp_new_i64();
141
142
143 tcg_gen_and_i64(t0, inh, mask);
144 tcg_gen_shli_i64(t0, t0, 8);
145 tcg_gen_shri_i64(t1, inh, 8);
146 tcg_gen_and_i64(t1, t1, mask);
147 tcg_gen_or_i64(outh, t0, t1);
148
149
150 tcg_gen_and_i64(t0, inl, mask);
151 tcg_gen_shli_i64(t0, t0, 8);
152 tcg_gen_shri_i64(t1, inl, 8);
153 tcg_gen_and_i64(t1, t1, mask);
154 tcg_gen_or_i64(outl, t0, t1);
155
156 tcg_temp_free_i64(t0);
157 tcg_temp_free_i64(t1);
158 tcg_temp_free_i64(mask);
159}
160
161static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
162 TCGv_i64 inh, TCGv_i64 inl)
163{
164 TCGv_i64 hi = tcg_temp_new_i64();
165 TCGv_i64 lo = tcg_temp_new_i64();
166
167 tcg_gen_bswap64_i64(hi, inh);
168 tcg_gen_bswap64_i64(lo, inl);
169 tcg_gen_shri_i64(outh, hi, 32);
170 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
171 tcg_gen_shri_i64(outl, lo, 32);
172 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
173
174 tcg_temp_free_i64(hi);
175 tcg_temp_free_i64(lo);
176}
177static void gen_lxvh8x(DisasContext *ctx)
178{
179 TCGv EA;
180 TCGv_i64 xth;
181 TCGv_i64 xtl;
182
183 if (unlikely(!ctx->vsx_enabled)) {
184 gen_exception(ctx, POWERPC_EXCP_VSXU);
185 return;
186 }
187 xth = tcg_temp_new_i64();
188 xtl = tcg_temp_new_i64();
189 gen_set_access_type(ctx, ACCESS_INT);
190
191 EA = tcg_temp_new();
192 gen_addr_reg_index(ctx, EA);
193 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
194 tcg_gen_addi_tl(EA, EA, 8);
195 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
196 if (ctx->le_mode) {
197 gen_bswap16x8(xth, xtl, xth, xtl);
198 }
199 set_cpu_vsrh(xT(ctx->opcode), xth);
200 set_cpu_vsrl(xT(ctx->opcode), xtl);
201 tcg_temp_free(EA);
202 tcg_temp_free_i64(xth);
203 tcg_temp_free_i64(xtl);
204}
205
206static void gen_lxvb16x(DisasContext *ctx)
207{
208 TCGv EA;
209 TCGv_i64 xth;
210 TCGv_i64 xtl;
211
212 if (unlikely(!ctx->vsx_enabled)) {
213 gen_exception(ctx, POWERPC_EXCP_VSXU);
214 return;
215 }
216 xth = tcg_temp_new_i64();
217 xtl = tcg_temp_new_i64();
218 gen_set_access_type(ctx, ACCESS_INT);
219 EA = tcg_temp_new();
220 gen_addr_reg_index(ctx, EA);
221 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
222 tcg_gen_addi_tl(EA, EA, 8);
223 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
224 set_cpu_vsrh(xT(ctx->opcode), xth);
225 set_cpu_vsrl(xT(ctx->opcode), xtl);
226 tcg_temp_free(EA);
227 tcg_temp_free_i64(xth);
228 tcg_temp_free_i64(xtl);
229}
230
231#define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
232static void gen_##name(DisasContext *ctx) \
233{ \
234 int xt; \
235 TCGv EA; \
236 TCGv_i64 xth; \
237 TCGv_i64 xtl; \
238 \
239 if (indexed) { \
240 xt = xT(ctx->opcode); \
241 } else { \
242 xt = DQxT(ctx->opcode); \
243 } \
244 \
245 if (xt < 32) { \
246 if (unlikely(!ctx->vsx_enabled)) { \
247 gen_exception(ctx, POWERPC_EXCP_VSXU); \
248 return; \
249 } \
250 } else { \
251 if (unlikely(!ctx->altivec_enabled)) { \
252 gen_exception(ctx, POWERPC_EXCP_VPU); \
253 return; \
254 } \
255 } \
256 xth = tcg_temp_new_i64(); \
257 xtl = tcg_temp_new_i64(); \
258 get_cpu_vsrh(xth, xt); \
259 get_cpu_vsrl(xtl, xt); \
260 gen_set_access_type(ctx, ACCESS_INT); \
261 EA = tcg_temp_new(); \
262 if (indexed) { \
263 gen_addr_reg_index(ctx, EA); \
264 } else { \
265 gen_addr_imm_index(ctx, EA, 0x0F); \
266 } \
267 if (ctx->le_mode) { \
268 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
269 set_cpu_vsrl(xt, xtl); \
270 tcg_gen_addi_tl(EA, EA, 8); \
271 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
272 set_cpu_vsrh(xt, xth); \
273 } else { \
274 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
275 set_cpu_vsrh(xt, xth); \
276 tcg_gen_addi_tl(EA, EA, 8); \
277 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
278 set_cpu_vsrl(xt, xtl); \
279 } \
280 tcg_temp_free(EA); \
281 tcg_temp_free_i64(xth); \
282 tcg_temp_free_i64(xtl); \
283}
284
285VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
286VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
287VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
288VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
289
290#ifdef TARGET_PPC64
291#define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
292static void gen_##name(DisasContext *ctx) \
293{ \
294 TCGv EA, xt; \
295 \
296 if (xT(ctx->opcode) < 32) { \
297 if (unlikely(!ctx->vsx_enabled)) { \
298 gen_exception(ctx, POWERPC_EXCP_VSXU); \
299 return; \
300 } \
301 } else { \
302 if (unlikely(!ctx->altivec_enabled)) { \
303 gen_exception(ctx, POWERPC_EXCP_VPU); \
304 return; \
305 } \
306 } \
307 EA = tcg_temp_new(); \
308 xt = tcg_const_tl(xT(ctx->opcode)); \
309 gen_set_access_type(ctx, ACCESS_INT); \
310 gen_addr_register(ctx, EA); \
311 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
312 tcg_temp_free(EA); \
313 tcg_temp_free(xt); \
314}
315
316VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
317VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
318VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
319VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
320#endif
321
322#define VSX_LOAD_SCALAR_DS(name, operation) \
323static void gen_##name(DisasContext *ctx) \
324{ \
325 TCGv EA; \
326 TCGv_i64 xth; \
327 \
328 if (unlikely(!ctx->altivec_enabled)) { \
329 gen_exception(ctx, POWERPC_EXCP_VPU); \
330 return; \
331 } \
332 xth = tcg_temp_new_i64(); \
333 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
334 gen_set_access_type(ctx, ACCESS_INT); \
335 EA = tcg_temp_new(); \
336 gen_addr_imm_index(ctx, EA, 0x03); \
337 gen_qemu_##operation(ctx, xth, EA); \
338 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
339 \
340 tcg_temp_free(EA); \
341 tcg_temp_free_i64(xth); \
342}
343
344VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
345VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
346
347#define VSX_STORE_SCALAR(name, operation) \
348static void gen_##name(DisasContext *ctx) \
349{ \
350 TCGv EA; \
351 TCGv_i64 t0; \
352 if (unlikely(!ctx->vsx_enabled)) { \
353 gen_exception(ctx, POWERPC_EXCP_VSXU); \
354 return; \
355 } \
356 t0 = tcg_temp_new_i64(); \
357 gen_set_access_type(ctx, ACCESS_INT); \
358 EA = tcg_temp_new(); \
359 gen_addr_reg_index(ctx, EA); \
360 get_cpu_vsrh(t0, xS(ctx->opcode)); \
361 gen_qemu_##operation(ctx, t0, EA); \
362 tcg_temp_free(EA); \
363 tcg_temp_free_i64(t0); \
364}
365
366VSX_STORE_SCALAR(stxsdx, st64_i64)
367
368VSX_STORE_SCALAR(stxsibx, st8_i64)
369VSX_STORE_SCALAR(stxsihx, st16_i64)
370VSX_STORE_SCALAR(stxsiwx, st32_i64)
371VSX_STORE_SCALAR(stxsspx, st32fs)
372
373static void gen_stxvd2x(DisasContext *ctx)
374{
375 TCGv EA;
376 TCGv_i64 t0;
377 if (unlikely(!ctx->vsx_enabled)) {
378 gen_exception(ctx, POWERPC_EXCP_VSXU);
379 return;
380 }
381 t0 = tcg_temp_new_i64();
382 gen_set_access_type(ctx, ACCESS_INT);
383 EA = tcg_temp_new();
384 gen_addr_reg_index(ctx, EA);
385 get_cpu_vsrh(t0, xS(ctx->opcode));
386 gen_qemu_st64_i64(ctx, t0, EA);
387 tcg_gen_addi_tl(EA, EA, 8);
388 get_cpu_vsrl(t0, xS(ctx->opcode));
389 gen_qemu_st64_i64(ctx, t0, EA);
390 tcg_temp_free(EA);
391 tcg_temp_free_i64(t0);
392}
393
394static void gen_stxvw4x(DisasContext *ctx)
395{
396 TCGv EA;
397 TCGv_i64 xsh;
398 TCGv_i64 xsl;
399
400 if (unlikely(!ctx->vsx_enabled)) {
401 gen_exception(ctx, POWERPC_EXCP_VSXU);
402 return;
403 }
404 xsh = tcg_temp_new_i64();
405 xsl = tcg_temp_new_i64();
406 get_cpu_vsrh(xsh, xS(ctx->opcode));
407 get_cpu_vsrl(xsl, xS(ctx->opcode));
408 gen_set_access_type(ctx, ACCESS_INT);
409 EA = tcg_temp_new();
410 gen_addr_reg_index(ctx, EA);
411 if (ctx->le_mode) {
412 TCGv_i64 t0 = tcg_temp_new_i64();
413 TCGv_i64 t1 = tcg_temp_new_i64();
414
415 tcg_gen_shri_i64(t0, xsh, 32);
416 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
417 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
418 tcg_gen_addi_tl(EA, EA, 8);
419 tcg_gen_shri_i64(t0, xsl, 32);
420 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
421 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
422 tcg_temp_free_i64(t0);
423 tcg_temp_free_i64(t1);
424 } else {
425 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
426 tcg_gen_addi_tl(EA, EA, 8);
427 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
428 }
429 tcg_temp_free(EA);
430 tcg_temp_free_i64(xsh);
431 tcg_temp_free_i64(xsl);
432}
433
434static void gen_stxvh8x(DisasContext *ctx)
435{
436 TCGv EA;
437 TCGv_i64 xsh;
438 TCGv_i64 xsl;
439
440 if (unlikely(!ctx->vsx_enabled)) {
441 gen_exception(ctx, POWERPC_EXCP_VSXU);
442 return;
443 }
444 xsh = tcg_temp_new_i64();
445 xsl = tcg_temp_new_i64();
446 get_cpu_vsrh(xsh, xS(ctx->opcode));
447 get_cpu_vsrl(xsl, xS(ctx->opcode));
448 gen_set_access_type(ctx, ACCESS_INT);
449 EA = tcg_temp_new();
450 gen_addr_reg_index(ctx, EA);
451 if (ctx->le_mode) {
452 TCGv_i64 outh = tcg_temp_new_i64();
453 TCGv_i64 outl = tcg_temp_new_i64();
454
455 gen_bswap16x8(outh, outl, xsh, xsl);
456 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
457 tcg_gen_addi_tl(EA, EA, 8);
458 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
459 tcg_temp_free_i64(outh);
460 tcg_temp_free_i64(outl);
461 } else {
462 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
463 tcg_gen_addi_tl(EA, EA, 8);
464 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
465 }
466 tcg_temp_free(EA);
467 tcg_temp_free_i64(xsh);
468 tcg_temp_free_i64(xsl);
469}
470
471static void gen_stxvb16x(DisasContext *ctx)
472{
473 TCGv EA;
474 TCGv_i64 xsh;
475 TCGv_i64 xsl;
476
477 if (unlikely(!ctx->vsx_enabled)) {
478 gen_exception(ctx, POWERPC_EXCP_VSXU);
479 return;
480 }
481 xsh = tcg_temp_new_i64();
482 xsl = tcg_temp_new_i64();
483 get_cpu_vsrh(xsh, xS(ctx->opcode));
484 get_cpu_vsrl(xsl, xS(ctx->opcode));
485 gen_set_access_type(ctx, ACCESS_INT);
486 EA = tcg_temp_new();
487 gen_addr_reg_index(ctx, EA);
488 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
489 tcg_gen_addi_tl(EA, EA, 8);
490 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
491 tcg_temp_free(EA);
492 tcg_temp_free_i64(xsh);
493 tcg_temp_free_i64(xsl);
494}
495
496#define VSX_STORE_SCALAR_DS(name, operation) \
497static void gen_##name(DisasContext *ctx) \
498{ \
499 TCGv EA; \
500 TCGv_i64 xth; \
501 \
502 if (unlikely(!ctx->altivec_enabled)) { \
503 gen_exception(ctx, POWERPC_EXCP_VPU); \
504 return; \
505 } \
506 xth = tcg_temp_new_i64(); \
507 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
508 gen_set_access_type(ctx, ACCESS_INT); \
509 EA = tcg_temp_new(); \
510 gen_addr_imm_index(ctx, EA, 0x03); \
511 gen_qemu_##operation(ctx, xth, EA); \
512 \
513 tcg_temp_free(EA); \
514 tcg_temp_free_i64(xth); \
515}
516
517VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
518VSX_LOAD_SCALAR_DS(stxssp, st32fs)
519
520static void gen_mfvsrwz(DisasContext *ctx)
521{
522 if (xS(ctx->opcode) < 32) {
523 if (unlikely(!ctx->fpu_enabled)) {
524 gen_exception(ctx, POWERPC_EXCP_FPU);
525 return;
526 }
527 } else {
528 if (unlikely(!ctx->altivec_enabled)) {
529 gen_exception(ctx, POWERPC_EXCP_VPU);
530 return;
531 }
532 }
533 TCGv_i64 tmp = tcg_temp_new_i64();
534 TCGv_i64 xsh = tcg_temp_new_i64();
535 get_cpu_vsrh(xsh, xS(ctx->opcode));
536 tcg_gen_ext32u_i64(tmp, xsh);
537 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
538 tcg_temp_free_i64(tmp);
539 tcg_temp_free_i64(xsh);
540}
541
542static void gen_mtvsrwa(DisasContext *ctx)
543{
544 if (xS(ctx->opcode) < 32) {
545 if (unlikely(!ctx->fpu_enabled)) {
546 gen_exception(ctx, POWERPC_EXCP_FPU);
547 return;
548 }
549 } else {
550 if (unlikely(!ctx->altivec_enabled)) {
551 gen_exception(ctx, POWERPC_EXCP_VPU);
552 return;
553 }
554 }
555 TCGv_i64 tmp = tcg_temp_new_i64();
556 TCGv_i64 xsh = tcg_temp_new_i64();
557 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
558 tcg_gen_ext32s_i64(xsh, tmp);
559 set_cpu_vsrh(xT(ctx->opcode), xsh);
560 tcg_temp_free_i64(tmp);
561 tcg_temp_free_i64(xsh);
562}
563
564static void gen_mtvsrwz(DisasContext *ctx)
565{
566 if (xS(ctx->opcode) < 32) {
567 if (unlikely(!ctx->fpu_enabled)) {
568 gen_exception(ctx, POWERPC_EXCP_FPU);
569 return;
570 }
571 } else {
572 if (unlikely(!ctx->altivec_enabled)) {
573 gen_exception(ctx, POWERPC_EXCP_VPU);
574 return;
575 }
576 }
577 TCGv_i64 tmp = tcg_temp_new_i64();
578 TCGv_i64 xsh = tcg_temp_new_i64();
579 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
580 tcg_gen_ext32u_i64(xsh, tmp);
581 set_cpu_vsrh(xT(ctx->opcode), xsh);
582 tcg_temp_free_i64(tmp);
583 tcg_temp_free_i64(xsh);
584}
585
586#if defined(TARGET_PPC64)
587static void gen_mfvsrd(DisasContext *ctx)
588{
589 TCGv_i64 t0;
590 if (xS(ctx->opcode) < 32) {
591 if (unlikely(!ctx->fpu_enabled)) {
592 gen_exception(ctx, POWERPC_EXCP_FPU);
593 return;
594 }
595 } else {
596 if (unlikely(!ctx->altivec_enabled)) {
597 gen_exception(ctx, POWERPC_EXCP_VPU);
598 return;
599 }
600 }
601 t0 = tcg_temp_new_i64();
602 get_cpu_vsrh(t0, xS(ctx->opcode));
603 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
604 tcg_temp_free_i64(t0);
605}
606
607static void gen_mtvsrd(DisasContext *ctx)
608{
609 TCGv_i64 t0;
610 if (xS(ctx->opcode) < 32) {
611 if (unlikely(!ctx->fpu_enabled)) {
612 gen_exception(ctx, POWERPC_EXCP_FPU);
613 return;
614 }
615 } else {
616 if (unlikely(!ctx->altivec_enabled)) {
617 gen_exception(ctx, POWERPC_EXCP_VPU);
618 return;
619 }
620 }
621 t0 = tcg_temp_new_i64();
622 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
623 set_cpu_vsrh(xT(ctx->opcode), t0);
624 tcg_temp_free_i64(t0);
625}
626
627static void gen_mfvsrld(DisasContext *ctx)
628{
629 TCGv_i64 t0;
630 if (xS(ctx->opcode) < 32) {
631 if (unlikely(!ctx->vsx_enabled)) {
632 gen_exception(ctx, POWERPC_EXCP_VSXU);
633 return;
634 }
635 } else {
636 if (unlikely(!ctx->altivec_enabled)) {
637 gen_exception(ctx, POWERPC_EXCP_VPU);
638 return;
639 }
640 }
641 t0 = tcg_temp_new_i64();
642 get_cpu_vsrl(t0, xS(ctx->opcode));
643 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
644 tcg_temp_free_i64(t0);
645}
646
647static void gen_mtvsrdd(DisasContext *ctx)
648{
649 TCGv_i64 t0;
650 if (xT(ctx->opcode) < 32) {
651 if (unlikely(!ctx->vsx_enabled)) {
652 gen_exception(ctx, POWERPC_EXCP_VSXU);
653 return;
654 }
655 } else {
656 if (unlikely(!ctx->altivec_enabled)) {
657 gen_exception(ctx, POWERPC_EXCP_VPU);
658 return;
659 }
660 }
661
662 t0 = tcg_temp_new_i64();
663 if (!rA(ctx->opcode)) {
664 tcg_gen_movi_i64(t0, 0);
665 } else {
666 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
667 }
668 set_cpu_vsrh(xT(ctx->opcode), t0);
669
670 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
671 set_cpu_vsrl(xT(ctx->opcode), t0);
672 tcg_temp_free_i64(t0);
673}
674
675static void gen_mtvsrws(DisasContext *ctx)
676{
677 TCGv_i64 t0;
678 if (xT(ctx->opcode) < 32) {
679 if (unlikely(!ctx->vsx_enabled)) {
680 gen_exception(ctx, POWERPC_EXCP_VSXU);
681 return;
682 }
683 } else {
684 if (unlikely(!ctx->altivec_enabled)) {
685 gen_exception(ctx, POWERPC_EXCP_VPU);
686 return;
687 }
688 }
689
690 t0 = tcg_temp_new_i64();
691 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
692 cpu_gpr[rA(ctx->opcode)], 32, 32);
693 set_cpu_vsrl(xT(ctx->opcode), t0);
694 set_cpu_vsrh(xT(ctx->opcode), t0);
695 tcg_temp_free_i64(t0);
696}
697
698#endif
699
700static void gen_xxpermdi(DisasContext *ctx)
701{
702 TCGv_i64 xh, xl;
703
704 if (unlikely(!ctx->vsx_enabled)) {
705 gen_exception(ctx, POWERPC_EXCP_VSXU);
706 return;
707 }
708
709 xh = tcg_temp_new_i64();
710 xl = tcg_temp_new_i64();
711
712 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
713 (xT(ctx->opcode) == xB(ctx->opcode)))) {
714 if ((DM(ctx->opcode) & 2) == 0) {
715 get_cpu_vsrh(xh, xA(ctx->opcode));
716 } else {
717 get_cpu_vsrl(xh, xA(ctx->opcode));
718 }
719 if ((DM(ctx->opcode) & 1) == 0) {
720 get_cpu_vsrh(xl, xB(ctx->opcode));
721 } else {
722 get_cpu_vsrl(xl, xB(ctx->opcode));
723 }
724
725 set_cpu_vsrh(xT(ctx->opcode), xh);
726 set_cpu_vsrl(xT(ctx->opcode), xl);
727 } else {
728 if ((DM(ctx->opcode) & 2) == 0) {
729 get_cpu_vsrh(xh, xA(ctx->opcode));
730 set_cpu_vsrh(xT(ctx->opcode), xh);
731 } else {
732 get_cpu_vsrl(xh, xA(ctx->opcode));
733 set_cpu_vsrh(xT(ctx->opcode), xh);
734 }
735 if ((DM(ctx->opcode) & 1) == 0) {
736 get_cpu_vsrh(xl, xB(ctx->opcode));
737 set_cpu_vsrl(xT(ctx->opcode), xl);
738 } else {
739 get_cpu_vsrl(xl, xB(ctx->opcode));
740 set_cpu_vsrl(xT(ctx->opcode), xl);
741 }
742 }
743 tcg_temp_free_i64(xh);
744 tcg_temp_free_i64(xl);
745}
746
747#define OP_ABS 1
748#define OP_NABS 2
749#define OP_NEG 3
750#define OP_CPSGN 4
751#define SGN_MASK_DP 0x8000000000000000ull
752#define SGN_MASK_SP 0x8000000080000000ull
753
754#define VSX_SCALAR_MOVE(name, op, sgn_mask) \
755static void glue(gen_, name)(DisasContext * ctx) \
756 { \
757 TCGv_i64 xb, sgm; \
758 if (unlikely(!ctx->vsx_enabled)) { \
759 gen_exception(ctx, POWERPC_EXCP_VSXU); \
760 return; \
761 } \
762 xb = tcg_temp_new_i64(); \
763 sgm = tcg_temp_new_i64(); \
764 get_cpu_vsrh(xb, xB(ctx->opcode)); \
765 tcg_gen_movi_i64(sgm, sgn_mask); \
766 switch (op) { \
767 case OP_ABS: { \
768 tcg_gen_andc_i64(xb, xb, sgm); \
769 break; \
770 } \
771 case OP_NABS: { \
772 tcg_gen_or_i64(xb, xb, sgm); \
773 break; \
774 } \
775 case OP_NEG: { \
776 tcg_gen_xor_i64(xb, xb, sgm); \
777 break; \
778 } \
779 case OP_CPSGN: { \
780 TCGv_i64 xa = tcg_temp_new_i64(); \
781 get_cpu_vsrh(xa, xA(ctx->opcode)); \
782 tcg_gen_and_i64(xa, xa, sgm); \
783 tcg_gen_andc_i64(xb, xb, sgm); \
784 tcg_gen_or_i64(xb, xb, xa); \
785 tcg_temp_free_i64(xa); \
786 break; \
787 } \
788 } \
789 set_cpu_vsrh(xT(ctx->opcode), xb); \
790 tcg_temp_free_i64(xb); \
791 tcg_temp_free_i64(sgm); \
792 }
793
794VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
795VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
796VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
797VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
798
799#define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
800static void glue(gen_, name)(DisasContext *ctx) \
801{ \
802 int xa; \
803 int xt = rD(ctx->opcode) + 32; \
804 int xb = rB(ctx->opcode) + 32; \
805 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
806 \
807 if (unlikely(!ctx->vsx_enabled)) { \
808 gen_exception(ctx, POWERPC_EXCP_VSXU); \
809 return; \
810 } \
811 xbh = tcg_temp_new_i64(); \
812 xbl = tcg_temp_new_i64(); \
813 sgm = tcg_temp_new_i64(); \
814 tmp = tcg_temp_new_i64(); \
815 get_cpu_vsrh(xbh, xb); \
816 get_cpu_vsrl(xbl, xb); \
817 tcg_gen_movi_i64(sgm, sgn_mask); \
818 switch (op) { \
819 case OP_ABS: \
820 tcg_gen_andc_i64(xbh, xbh, sgm); \
821 break; \
822 case OP_NABS: \
823 tcg_gen_or_i64(xbh, xbh, sgm); \
824 break; \
825 case OP_NEG: \
826 tcg_gen_xor_i64(xbh, xbh, sgm); \
827 break; \
828 case OP_CPSGN: \
829 xah = tcg_temp_new_i64(); \
830 xa = rA(ctx->opcode) + 32; \
831 get_cpu_vsrh(tmp, xa); \
832 tcg_gen_and_i64(xah, tmp, sgm); \
833 tcg_gen_andc_i64(xbh, xbh, sgm); \
834 tcg_gen_or_i64(xbh, xbh, xah); \
835 tcg_temp_free_i64(xah); \
836 break; \
837 } \
838 set_cpu_vsrh(xt, xbh); \
839 set_cpu_vsrl(xt, xbl); \
840 tcg_temp_free_i64(xbl); \
841 tcg_temp_free_i64(xbh); \
842 tcg_temp_free_i64(sgm); \
843 tcg_temp_free_i64(tmp); \
844}
845
846VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
847VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
848VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
849VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
850
851#define VSX_VECTOR_MOVE(name, op, sgn_mask) \
852static void glue(gen_, name)(DisasContext * ctx) \
853 { \
854 TCGv_i64 xbh, xbl, sgm; \
855 if (unlikely(!ctx->vsx_enabled)) { \
856 gen_exception(ctx, POWERPC_EXCP_VSXU); \
857 return; \
858 } \
859 xbh = tcg_temp_new_i64(); \
860 xbl = tcg_temp_new_i64(); \
861 sgm = tcg_temp_new_i64(); \
862 get_cpu_vsrh(xbh, xB(ctx->opcode)); \
863 get_cpu_vsrl(xbl, xB(ctx->opcode)); \
864 tcg_gen_movi_i64(sgm, sgn_mask); \
865 switch (op) { \
866 case OP_ABS: { \
867 tcg_gen_andc_i64(xbh, xbh, sgm); \
868 tcg_gen_andc_i64(xbl, xbl, sgm); \
869 break; \
870 } \
871 case OP_NABS: { \
872 tcg_gen_or_i64(xbh, xbh, sgm); \
873 tcg_gen_or_i64(xbl, xbl, sgm); \
874 break; \
875 } \
876 case OP_NEG: { \
877 tcg_gen_xor_i64(xbh, xbh, sgm); \
878 tcg_gen_xor_i64(xbl, xbl, sgm); \
879 break; \
880 } \
881 case OP_CPSGN: { \
882 TCGv_i64 xah = tcg_temp_new_i64(); \
883 TCGv_i64 xal = tcg_temp_new_i64(); \
884 get_cpu_vsrh(xah, xA(ctx->opcode)); \
885 get_cpu_vsrl(xal, xA(ctx->opcode)); \
886 tcg_gen_and_i64(xah, xah, sgm); \
887 tcg_gen_and_i64(xal, xal, sgm); \
888 tcg_gen_andc_i64(xbh, xbh, sgm); \
889 tcg_gen_andc_i64(xbl, xbl, sgm); \
890 tcg_gen_or_i64(xbh, xbh, xah); \
891 tcg_gen_or_i64(xbl, xbl, xal); \
892 tcg_temp_free_i64(xah); \
893 tcg_temp_free_i64(xal); \
894 break; \
895 } \
896 } \
897 set_cpu_vsrh(xT(ctx->opcode), xbh); \
898 set_cpu_vsrl(xT(ctx->opcode), xbl); \
899 tcg_temp_free_i64(xbh); \
900 tcg_temp_free_i64(xbl); \
901 tcg_temp_free_i64(sgm); \
902 }
903
904VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
905VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
906VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
907VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
908VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
909VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
910VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
911VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
912
913#define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
914static void gen_##name(DisasContext * ctx) \
915{ \
916 TCGv_i32 opc; \
917 if (unlikely(!ctx->vsx_enabled)) { \
918 gen_exception(ctx, POWERPC_EXCP_VSXU); \
919 return; \
920 } \
921 opc = tcg_const_i32(ctx->opcode); \
922 gen_helper_##name(cpu_env, opc); \
923 tcg_temp_free_i32(opc); \
924}
925
926#define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
927static void gen_##name(DisasContext * ctx) \
928{ \
929 TCGv_i64 t0; \
930 TCGv_i64 t1; \
931 if (unlikely(!ctx->vsx_enabled)) { \
932 gen_exception(ctx, POWERPC_EXCP_VSXU); \
933 return; \
934 } \
935 t0 = tcg_temp_new_i64(); \
936 t1 = tcg_temp_new_i64(); \
937 get_cpu_vsrh(t0, xB(ctx->opcode)); \
938 gen_helper_##name(t1, cpu_env, t0); \
939 set_cpu_vsrh(xT(ctx->opcode), t1); \
940 tcg_temp_free_i64(t0); \
941 tcg_temp_free_i64(t1); \
942}
943
944GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
945GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
946GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
947GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
948GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
949GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
950GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
951GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
952GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
953GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
954GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
955GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
956GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
957GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
958GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
959GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
960GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
961GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
962GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
963GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
964GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
965GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
966GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
967GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
968GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
969GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
970GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
971GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
972GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
973GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
974GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
975GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
976GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
977GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
978GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
979GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
980GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
981GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
982GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
983GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
984GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
985GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
986GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
987GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
988GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
989GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
990GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
991GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
992GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
993GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
994GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
995GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
996GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
997GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
998GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
999GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1000GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1001GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1002GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1003GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1004GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1005GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1006
1007GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1008GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1009GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1010GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1011
1012GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1013GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1014GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1015GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1016GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1017GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1018GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1019GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
1020GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
1021GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
1022GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
1023GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
1024GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
1025GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
1026GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
1027GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1028GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1029GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1030GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1031GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1032
1033GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1034GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1035GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1036GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1037GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1038GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1039GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1040GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1041GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1042GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
1043GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
1044GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
1045GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
1046GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
1047GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
1048GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
1049GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
1050GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1051GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1052GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1053GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1054GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1055GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1056GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1057GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1058GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1059GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1060GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1061GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1062GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1063GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1064GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1065GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1066GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1067GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1068GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1069GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1070
1071GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1072GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1073GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1074GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1075GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1076GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1077GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1078GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1079GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1080GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
1081GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
1082GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
1083GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
1084GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
1085GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
1086GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
1087GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
1088GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1089GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1090GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1091GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1092GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1093GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1094GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1095GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1096GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1097GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1098GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1099GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1100GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1101GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1102GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1103GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1104GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1105GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1106GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1107GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1108GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1109GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1110GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1111GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1112GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1113GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1114
1115static void gen_xxbrd(DisasContext *ctx)
1116{
1117 TCGv_i64 xth;
1118 TCGv_i64 xtl;
1119 TCGv_i64 xbh;
1120 TCGv_i64 xbl;
1121
1122 if (unlikely(!ctx->vsx_enabled)) {
1123 gen_exception(ctx, POWERPC_EXCP_VSXU);
1124 return;
1125 }
1126 xth = tcg_temp_new_i64();
1127 xtl = tcg_temp_new_i64();
1128 xbh = tcg_temp_new_i64();
1129 xbl = tcg_temp_new_i64();
1130 get_cpu_vsrh(xbh, xB(ctx->opcode));
1131 get_cpu_vsrl(xbl, xB(ctx->opcode));
1132
1133 tcg_gen_bswap64_i64(xth, xbh);
1134 tcg_gen_bswap64_i64(xtl, xbl);
1135 set_cpu_vsrh(xT(ctx->opcode), xth);
1136 set_cpu_vsrl(xT(ctx->opcode), xtl);
1137
1138 tcg_temp_free_i64(xth);
1139 tcg_temp_free_i64(xtl);
1140 tcg_temp_free_i64(xbh);
1141 tcg_temp_free_i64(xbl);
1142}
1143
1144static void gen_xxbrh(DisasContext *ctx)
1145{
1146 TCGv_i64 xth;
1147 TCGv_i64 xtl;
1148 TCGv_i64 xbh;
1149 TCGv_i64 xbl;
1150
1151 if (unlikely(!ctx->vsx_enabled)) {
1152 gen_exception(ctx, POWERPC_EXCP_VSXU);
1153 return;
1154 }
1155 xth = tcg_temp_new_i64();
1156 xtl = tcg_temp_new_i64();
1157 xbh = tcg_temp_new_i64();
1158 xbl = tcg_temp_new_i64();
1159 get_cpu_vsrh(xbh, xB(ctx->opcode));
1160 get_cpu_vsrl(xbl, xB(ctx->opcode));
1161
1162 gen_bswap16x8(xth, xtl, xbh, xbl);
1163 set_cpu_vsrh(xT(ctx->opcode), xth);
1164 set_cpu_vsrl(xT(ctx->opcode), xtl);
1165
1166 tcg_temp_free_i64(xth);
1167 tcg_temp_free_i64(xtl);
1168 tcg_temp_free_i64(xbh);
1169 tcg_temp_free_i64(xbl);
1170}
1171
1172static void gen_xxbrq(DisasContext *ctx)
1173{
1174 TCGv_i64 xth;
1175 TCGv_i64 xtl;
1176 TCGv_i64 xbh;
1177 TCGv_i64 xbl;
1178 TCGv_i64 t0;
1179
1180 if (unlikely(!ctx->vsx_enabled)) {
1181 gen_exception(ctx, POWERPC_EXCP_VSXU);
1182 return;
1183 }
1184 xth = tcg_temp_new_i64();
1185 xtl = tcg_temp_new_i64();
1186 xbh = tcg_temp_new_i64();
1187 xbl = tcg_temp_new_i64();
1188 get_cpu_vsrh(xbh, xB(ctx->opcode));
1189 get_cpu_vsrl(xbl, xB(ctx->opcode));
1190 t0 = tcg_temp_new_i64();
1191
1192 tcg_gen_bswap64_i64(t0, xbl);
1193 tcg_gen_bswap64_i64(xtl, xbh);
1194 set_cpu_vsrl(xT(ctx->opcode), xtl);
1195 tcg_gen_mov_i64(xth, t0);
1196 set_cpu_vsrh(xT(ctx->opcode), xth);
1197
1198 tcg_temp_free_i64(t0);
1199 tcg_temp_free_i64(xth);
1200 tcg_temp_free_i64(xtl);
1201 tcg_temp_free_i64(xbh);
1202 tcg_temp_free_i64(xbl);
1203}
1204
1205static void gen_xxbrw(DisasContext *ctx)
1206{
1207 TCGv_i64 xth;
1208 TCGv_i64 xtl;
1209 TCGv_i64 xbh;
1210 TCGv_i64 xbl;
1211
1212 if (unlikely(!ctx->vsx_enabled)) {
1213 gen_exception(ctx, POWERPC_EXCP_VSXU);
1214 return;
1215 }
1216 xth = tcg_temp_new_i64();
1217 xtl = tcg_temp_new_i64();
1218 xbh = tcg_temp_new_i64();
1219 xbl = tcg_temp_new_i64();
1220 get_cpu_vsrh(xbh, xB(ctx->opcode));
1221 get_cpu_vsrl(xbl, xB(ctx->opcode));
1222
1223 gen_bswap32x4(xth, xtl, xbh, xbl);
1224 set_cpu_vsrh(xT(ctx->opcode), xth);
1225 set_cpu_vsrl(xT(ctx->opcode), xtl);
1226
1227 tcg_temp_free_i64(xth);
1228 tcg_temp_free_i64(xtl);
1229 tcg_temp_free_i64(xbh);
1230 tcg_temp_free_i64(xbl);
1231}
1232
1233#define VSX_LOGICAL(name, vece, tcg_op) \
1234static void glue(gen_, name)(DisasContext * ctx) \
1235 { \
1236 if (unlikely(!ctx->vsx_enabled)) { \
1237 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1238 return; \
1239 } \
1240 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1241 vsr_full_offset(xA(ctx->opcode)), \
1242 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1243 }
1244
1245VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1246VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1247VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1248VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1249VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1250VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1251VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1252VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1253
1254#define VSX_XXMRG(name, high) \
1255static void glue(gen_, name)(DisasContext * ctx) \
1256 { \
1257 TCGv_i64 a0, a1, b0, b1, tmp; \
1258 if (unlikely(!ctx->vsx_enabled)) { \
1259 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1260 return; \
1261 } \
1262 a0 = tcg_temp_new_i64(); \
1263 a1 = tcg_temp_new_i64(); \
1264 b0 = tcg_temp_new_i64(); \
1265 b1 = tcg_temp_new_i64(); \
1266 tmp = tcg_temp_new_i64(); \
1267 if (high) { \
1268 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1269 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1270 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1271 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1272 } else { \
1273 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1274 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1275 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1276 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1277 } \
1278 tcg_gen_shri_i64(a0, a0, 32); \
1279 tcg_gen_shri_i64(b0, b0, 32); \
1280 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1281 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1282 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1283 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1284 tcg_temp_free_i64(a0); \
1285 tcg_temp_free_i64(a1); \
1286 tcg_temp_free_i64(b0); \
1287 tcg_temp_free_i64(b1); \
1288 tcg_temp_free_i64(tmp); \
1289 }
1290
1291VSX_XXMRG(xxmrghw, 1)
1292VSX_XXMRG(xxmrglw, 0)
1293
1294static void xxsel_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c)
1295{
1296 tcg_gen_and_i64(b, b, c);
1297 tcg_gen_andc_i64(a, a, c);
1298 tcg_gen_or_i64(t, a, b);
1299}
1300
1301static void xxsel_vec(unsigned vece, TCGv_vec t, TCGv_vec a,
1302 TCGv_vec b, TCGv_vec c)
1303{
1304 tcg_gen_and_vec(vece, b, b, c);
1305 tcg_gen_andc_vec(vece, a, a, c);
1306 tcg_gen_or_vec(vece, t, a, b);
1307}
1308
1309static void gen_xxsel(DisasContext *ctx)
1310{
1311 static const GVecGen4 g = {
1312 .fni8 = xxsel_i64,
1313 .fniv = xxsel_vec,
1314 .vece = MO_64,
1315 };
1316 int rt = xT(ctx->opcode);
1317 int ra = xA(ctx->opcode);
1318 int rb = xB(ctx->opcode);
1319 int rc = xC(ctx->opcode);
1320
1321 if (unlikely(!ctx->vsx_enabled)) {
1322 gen_exception(ctx, POWERPC_EXCP_VSXU);
1323 return;
1324 }
1325 tcg_gen_gvec_4(vsr_full_offset(rt), vsr_full_offset(ra),
1326 vsr_full_offset(rb), vsr_full_offset(rc), 16, 16, &g);
1327}
1328
1329static void gen_xxspltw(DisasContext *ctx)
1330{
1331 int rt = xT(ctx->opcode);
1332 int rb = xB(ctx->opcode);
1333 int uim = UIM(ctx->opcode);
1334 int tofs, bofs;
1335
1336 if (unlikely(!ctx->vsx_enabled)) {
1337 gen_exception(ctx, POWERPC_EXCP_VSXU);
1338 return;
1339 }
1340
1341 tofs = vsr_full_offset(rt);
1342 bofs = vsr_full_offset(rb);
1343 bofs += uim << MO_32;
1344#ifndef HOST_WORDS_BIG_ENDIAN
1345 bofs ^= 8 | 4;
1346#endif
1347
1348 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1349}
1350
1351#define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1352
1353static void gen_xxspltib(DisasContext *ctx)
1354{
1355 uint8_t uim8 = IMM8(ctx->opcode);
1356 int rt = xT(ctx->opcode);
1357
1358 if (rt < 32) {
1359 if (unlikely(!ctx->altivec_enabled)) {
1360 gen_exception(ctx, POWERPC_EXCP_VPU);
1361 return;
1362 }
1363 } else {
1364 if (unlikely(!ctx->vsx_enabled)) {
1365 gen_exception(ctx, POWERPC_EXCP_VSXU);
1366 return;
1367 }
1368 }
1369 tcg_gen_gvec_dup8i(vsr_full_offset(rt), 16, 16, uim8);
1370}
1371
1372static void gen_xxsldwi(DisasContext *ctx)
1373{
1374 TCGv_i64 xth, xtl;
1375 if (unlikely(!ctx->vsx_enabled)) {
1376 gen_exception(ctx, POWERPC_EXCP_VSXU);
1377 return;
1378 }
1379 xth = tcg_temp_new_i64();
1380 xtl = tcg_temp_new_i64();
1381
1382 switch (SHW(ctx->opcode)) {
1383 case 0: {
1384 get_cpu_vsrh(xth, xA(ctx->opcode));
1385 get_cpu_vsrl(xtl, xA(ctx->opcode));
1386 break;
1387 }
1388 case 1: {
1389 TCGv_i64 t0 = tcg_temp_new_i64();
1390 get_cpu_vsrh(xth, xA(ctx->opcode));
1391 tcg_gen_shli_i64(xth, xth, 32);
1392 get_cpu_vsrl(t0, xA(ctx->opcode));
1393 tcg_gen_shri_i64(t0, t0, 32);
1394 tcg_gen_or_i64(xth, xth, t0);
1395 get_cpu_vsrl(xtl, xA(ctx->opcode));
1396 tcg_gen_shli_i64(xtl, xtl, 32);
1397 get_cpu_vsrh(t0, xB(ctx->opcode));
1398 tcg_gen_shri_i64(t0, t0, 32);
1399 tcg_gen_or_i64(xtl, xtl, t0);
1400 tcg_temp_free_i64(t0);
1401 break;
1402 }
1403 case 2: {
1404 get_cpu_vsrl(xth, xA(ctx->opcode));
1405 get_cpu_vsrh(xtl, xB(ctx->opcode));
1406 break;
1407 }
1408 case 3: {
1409 TCGv_i64 t0 = tcg_temp_new_i64();
1410 get_cpu_vsrl(xth, xA(ctx->opcode));
1411 tcg_gen_shli_i64(xth, xth, 32);
1412 get_cpu_vsrh(t0, xB(ctx->opcode));
1413 tcg_gen_shri_i64(t0, t0, 32);
1414 tcg_gen_or_i64(xth, xth, t0);
1415 get_cpu_vsrh(xtl, xB(ctx->opcode));
1416 tcg_gen_shli_i64(xtl, xtl, 32);
1417 get_cpu_vsrl(t0, xB(ctx->opcode));
1418 tcg_gen_shri_i64(t0, t0, 32);
1419 tcg_gen_or_i64(xtl, xtl, t0);
1420 tcg_temp_free_i64(t0);
1421 break;
1422 }
1423 }
1424
1425 set_cpu_vsrh(xT(ctx->opcode), xth);
1426 set_cpu_vsrl(xT(ctx->opcode), xtl);
1427
1428 tcg_temp_free_i64(xth);
1429 tcg_temp_free_i64(xtl);
1430}
1431
1432#define VSX_EXTRACT_INSERT(name) \
1433static void gen_##name(DisasContext *ctx) \
1434{ \
1435 TCGv xt, xb; \
1436 TCGv_i32 t0; \
1437 TCGv_i64 t1; \
1438 uint8_t uimm = UIMM4(ctx->opcode); \
1439 \
1440 if (unlikely(!ctx->vsx_enabled)) { \
1441 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1442 return; \
1443 } \
1444 xt = tcg_const_tl(xT(ctx->opcode)); \
1445 xb = tcg_const_tl(xB(ctx->opcode)); \
1446 t0 = tcg_temp_new_i32(); \
1447 t1 = tcg_temp_new_i64(); \
1448
1449
1450 \
1451 if (uimm > 15) { \
1452 tcg_gen_movi_i64(t1, 0); \
1453 set_cpu_vsrh(xT(ctx->opcode), t1); \
1454 set_cpu_vsrl(xT(ctx->opcode), t1); \
1455 return; \
1456 } \
1457 tcg_gen_movi_i32(t0, uimm); \
1458 gen_helper_##name(cpu_env, xt, xb, t0); \
1459 tcg_temp_free(xb); \
1460 tcg_temp_free(xt); \
1461 tcg_temp_free_i32(t0); \
1462 tcg_temp_free_i64(t1); \
1463}
1464
1465VSX_EXTRACT_INSERT(xxextractuw)
1466VSX_EXTRACT_INSERT(xxinsertw)
1467
1468#ifdef TARGET_PPC64
1469static void gen_xsxexpdp(DisasContext *ctx)
1470{
1471 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1472 TCGv_i64 t0;
1473 if (unlikely(!ctx->vsx_enabled)) {
1474 gen_exception(ctx, POWERPC_EXCP_VSXU);
1475 return;
1476 }
1477 t0 = tcg_temp_new_i64();
1478 get_cpu_vsrh(t0, xB(ctx->opcode));
1479 tcg_gen_extract_i64(rt, t0, 52, 11);
1480 tcg_temp_free_i64(t0);
1481}
1482
1483static void gen_xsxexpqp(DisasContext *ctx)
1484{
1485 TCGv_i64 xth;
1486 TCGv_i64 xtl;
1487 TCGv_i64 xbh;
1488
1489 if (unlikely(!ctx->vsx_enabled)) {
1490 gen_exception(ctx, POWERPC_EXCP_VSXU);
1491 return;
1492 }
1493 xth = tcg_temp_new_i64();
1494 xtl = tcg_temp_new_i64();
1495 xbh = tcg_temp_new_i64();
1496 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1497
1498 tcg_gen_extract_i64(xth, xbh, 48, 15);
1499 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1500 tcg_gen_movi_i64(xtl, 0);
1501 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1502
1503 tcg_temp_free_i64(xbh);
1504 tcg_temp_free_i64(xth);
1505 tcg_temp_free_i64(xtl);
1506}
1507
1508static void gen_xsiexpdp(DisasContext *ctx)
1509{
1510 TCGv_i64 xth;
1511 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1512 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1513 TCGv_i64 t0;
1514
1515 if (unlikely(!ctx->vsx_enabled)) {
1516 gen_exception(ctx, POWERPC_EXCP_VSXU);
1517 return;
1518 }
1519 t0 = tcg_temp_new_i64();
1520 xth = tcg_temp_new_i64();
1521 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1522 tcg_gen_andi_i64(t0, rb, 0x7FF);
1523 tcg_gen_shli_i64(t0, t0, 52);
1524 tcg_gen_or_i64(xth, xth, t0);
1525 set_cpu_vsrh(xT(ctx->opcode), xth);
1526
1527 tcg_temp_free_i64(t0);
1528 tcg_temp_free_i64(xth);
1529}
1530
1531static void gen_xsiexpqp(DisasContext *ctx)
1532{
1533 TCGv_i64 xth;
1534 TCGv_i64 xtl;
1535 TCGv_i64 xah;
1536 TCGv_i64 xal;
1537 TCGv_i64 xbh;
1538 TCGv_i64 t0;
1539
1540 if (unlikely(!ctx->vsx_enabled)) {
1541 gen_exception(ctx, POWERPC_EXCP_VSXU);
1542 return;
1543 }
1544 xth = tcg_temp_new_i64();
1545 xtl = tcg_temp_new_i64();
1546 xah = tcg_temp_new_i64();
1547 xal = tcg_temp_new_i64();
1548 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1549 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1550 xbh = tcg_temp_new_i64();
1551 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1552 t0 = tcg_temp_new_i64();
1553
1554 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1555 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1556 tcg_gen_shli_i64(t0, t0, 48);
1557 tcg_gen_or_i64(xth, xth, t0);
1558 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1559 tcg_gen_mov_i64(xtl, xal);
1560 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1561
1562 tcg_temp_free_i64(t0);
1563 tcg_temp_free_i64(xth);
1564 tcg_temp_free_i64(xtl);
1565 tcg_temp_free_i64(xah);
1566 tcg_temp_free_i64(xal);
1567 tcg_temp_free_i64(xbh);
1568}
1569
1570static void gen_xsxsigdp(DisasContext *ctx)
1571{
1572 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1573 TCGv_i64 t0, t1, zr, nan, exp;
1574
1575 if (unlikely(!ctx->vsx_enabled)) {
1576 gen_exception(ctx, POWERPC_EXCP_VSXU);
1577 return;
1578 }
1579 exp = tcg_temp_new_i64();
1580 t0 = tcg_temp_new_i64();
1581 t1 = tcg_temp_new_i64();
1582 zr = tcg_const_i64(0);
1583 nan = tcg_const_i64(2047);
1584
1585 get_cpu_vsrh(t1, xB(ctx->opcode));
1586 tcg_gen_extract_i64(exp, t1, 52, 11);
1587 tcg_gen_movi_i64(t0, 0x0010000000000000);
1588 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1589 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1590 get_cpu_vsrh(t1, xB(ctx->opcode));
1591 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1592
1593 tcg_temp_free_i64(t0);
1594 tcg_temp_free_i64(t1);
1595 tcg_temp_free_i64(exp);
1596 tcg_temp_free_i64(zr);
1597 tcg_temp_free_i64(nan);
1598}
1599
1600static void gen_xsxsigqp(DisasContext *ctx)
1601{
1602 TCGv_i64 t0, zr, nan, exp;
1603 TCGv_i64 xth;
1604 TCGv_i64 xtl;
1605 TCGv_i64 xbh;
1606 TCGv_i64 xbl;
1607
1608 if (unlikely(!ctx->vsx_enabled)) {
1609 gen_exception(ctx, POWERPC_EXCP_VSXU);
1610 return;
1611 }
1612 xth = tcg_temp_new_i64();
1613 xtl = tcg_temp_new_i64();
1614 xbh = tcg_temp_new_i64();
1615 xbl = tcg_temp_new_i64();
1616 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1617 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1618 exp = tcg_temp_new_i64();
1619 t0 = tcg_temp_new_i64();
1620 zr = tcg_const_i64(0);
1621 nan = tcg_const_i64(32767);
1622
1623 tcg_gen_extract_i64(exp, xbh, 48, 15);
1624 tcg_gen_movi_i64(t0, 0x0001000000000000);
1625 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1626 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1627 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1628 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1629 tcg_gen_mov_i64(xtl, xbl);
1630 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1631
1632 tcg_temp_free_i64(t0);
1633 tcg_temp_free_i64(exp);
1634 tcg_temp_free_i64(zr);
1635 tcg_temp_free_i64(nan);
1636 tcg_temp_free_i64(xth);
1637 tcg_temp_free_i64(xtl);
1638 tcg_temp_free_i64(xbh);
1639 tcg_temp_free_i64(xbl);
1640}
1641#endif
1642
1643static void gen_xviexpsp(DisasContext *ctx)
1644{
1645 TCGv_i64 xth;
1646 TCGv_i64 xtl;
1647 TCGv_i64 xah;
1648 TCGv_i64 xal;
1649 TCGv_i64 xbh;
1650 TCGv_i64 xbl;
1651 TCGv_i64 t0;
1652
1653 if (unlikely(!ctx->vsx_enabled)) {
1654 gen_exception(ctx, POWERPC_EXCP_VSXU);
1655 return;
1656 }
1657 xth = tcg_temp_new_i64();
1658 xtl = tcg_temp_new_i64();
1659 xah = tcg_temp_new_i64();
1660 xal = tcg_temp_new_i64();
1661 xbh = tcg_temp_new_i64();
1662 xbl = tcg_temp_new_i64();
1663 get_cpu_vsrh(xah, xA(ctx->opcode));
1664 get_cpu_vsrl(xal, xA(ctx->opcode));
1665 get_cpu_vsrh(xbh, xB(ctx->opcode));
1666 get_cpu_vsrl(xbl, xB(ctx->opcode));
1667 t0 = tcg_temp_new_i64();
1668
1669 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1670 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1671 tcg_gen_shli_i64(t0, t0, 23);
1672 tcg_gen_or_i64(xth, xth, t0);
1673 set_cpu_vsrh(xT(ctx->opcode), xth);
1674 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1675 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1676 tcg_gen_shli_i64(t0, t0, 23);
1677 tcg_gen_or_i64(xtl, xtl, t0);
1678 set_cpu_vsrl(xT(ctx->opcode), xtl);
1679
1680 tcg_temp_free_i64(t0);
1681 tcg_temp_free_i64(xth);
1682 tcg_temp_free_i64(xtl);
1683 tcg_temp_free_i64(xah);
1684 tcg_temp_free_i64(xal);
1685 tcg_temp_free_i64(xbh);
1686 tcg_temp_free_i64(xbl);
1687}
1688
1689static void gen_xviexpdp(DisasContext *ctx)
1690{
1691 TCGv_i64 xth;
1692 TCGv_i64 xtl;
1693 TCGv_i64 xah;
1694 TCGv_i64 xal;
1695 TCGv_i64 xbh;
1696 TCGv_i64 xbl;
1697
1698 if (unlikely(!ctx->vsx_enabled)) {
1699 gen_exception(ctx, POWERPC_EXCP_VSXU);
1700 return;
1701 }
1702 xth = tcg_temp_new_i64();
1703 xtl = tcg_temp_new_i64();
1704 xah = tcg_temp_new_i64();
1705 xal = tcg_temp_new_i64();
1706 xbh = tcg_temp_new_i64();
1707 xbl = tcg_temp_new_i64();
1708 get_cpu_vsrh(xah, xA(ctx->opcode));
1709 get_cpu_vsrl(xal, xA(ctx->opcode));
1710 get_cpu_vsrh(xbh, xB(ctx->opcode));
1711 get_cpu_vsrl(xbl, xB(ctx->opcode));
1712
1713 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1714 set_cpu_vsrh(xT(ctx->opcode), xth);
1715
1716 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1717 set_cpu_vsrl(xT(ctx->opcode), xtl);
1718
1719 tcg_temp_free_i64(xth);
1720 tcg_temp_free_i64(xtl);
1721 tcg_temp_free_i64(xah);
1722 tcg_temp_free_i64(xal);
1723 tcg_temp_free_i64(xbh);
1724 tcg_temp_free_i64(xbl);
1725}
1726
1727static void gen_xvxexpsp(DisasContext *ctx)
1728{
1729 TCGv_i64 xth;
1730 TCGv_i64 xtl;
1731 TCGv_i64 xbh;
1732 TCGv_i64 xbl;
1733
1734 if (unlikely(!ctx->vsx_enabled)) {
1735 gen_exception(ctx, POWERPC_EXCP_VSXU);
1736 return;
1737 }
1738 xth = tcg_temp_new_i64();
1739 xtl = tcg_temp_new_i64();
1740 xbh = tcg_temp_new_i64();
1741 xbl = tcg_temp_new_i64();
1742 get_cpu_vsrh(xbh, xB(ctx->opcode));
1743 get_cpu_vsrl(xbl, xB(ctx->opcode));
1744
1745 tcg_gen_shri_i64(xth, xbh, 23);
1746 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1747 set_cpu_vsrh(xT(ctx->opcode), xth);
1748 tcg_gen_shri_i64(xtl, xbl, 23);
1749 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1750 set_cpu_vsrl(xT(ctx->opcode), xtl);
1751
1752 tcg_temp_free_i64(xth);
1753 tcg_temp_free_i64(xtl);
1754 tcg_temp_free_i64(xbh);
1755 tcg_temp_free_i64(xbl);
1756}
1757
1758static void gen_xvxexpdp(DisasContext *ctx)
1759{
1760 TCGv_i64 xth;
1761 TCGv_i64 xtl;
1762 TCGv_i64 xbh;
1763 TCGv_i64 xbl;
1764
1765 if (unlikely(!ctx->vsx_enabled)) {
1766 gen_exception(ctx, POWERPC_EXCP_VSXU);
1767 return;
1768 }
1769 xth = tcg_temp_new_i64();
1770 xtl = tcg_temp_new_i64();
1771 xbh = tcg_temp_new_i64();
1772 xbl = tcg_temp_new_i64();
1773 get_cpu_vsrh(xbh, xB(ctx->opcode));
1774 get_cpu_vsrl(xbl, xB(ctx->opcode));
1775
1776 tcg_gen_extract_i64(xth, xbh, 52, 11);
1777 set_cpu_vsrh(xT(ctx->opcode), xth);
1778 tcg_gen_extract_i64(xtl, xbl, 52, 11);
1779 set_cpu_vsrl(xT(ctx->opcode), xtl);
1780
1781 tcg_temp_free_i64(xth);
1782 tcg_temp_free_i64(xtl);
1783 tcg_temp_free_i64(xbh);
1784 tcg_temp_free_i64(xbl);
1785}
1786
1787GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1788
1789static void gen_xvxsigdp(DisasContext *ctx)
1790{
1791 TCGv_i64 xth;
1792 TCGv_i64 xtl;
1793 TCGv_i64 xbh;
1794 TCGv_i64 xbl;
1795 TCGv_i64 t0, zr, nan, exp;
1796
1797 if (unlikely(!ctx->vsx_enabled)) {
1798 gen_exception(ctx, POWERPC_EXCP_VSXU);
1799 return;
1800 }
1801 xth = tcg_temp_new_i64();
1802 xtl = tcg_temp_new_i64();
1803 xbh = tcg_temp_new_i64();
1804 xbl = tcg_temp_new_i64();
1805 get_cpu_vsrh(xbh, xB(ctx->opcode));
1806 get_cpu_vsrl(xbl, xB(ctx->opcode));
1807 exp = tcg_temp_new_i64();
1808 t0 = tcg_temp_new_i64();
1809 zr = tcg_const_i64(0);
1810 nan = tcg_const_i64(2047);
1811
1812 tcg_gen_extract_i64(exp, xbh, 52, 11);
1813 tcg_gen_movi_i64(t0, 0x0010000000000000);
1814 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1815 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1816 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
1817 set_cpu_vsrh(xT(ctx->opcode), xth);
1818
1819 tcg_gen_extract_i64(exp, xbl, 52, 11);
1820 tcg_gen_movi_i64(t0, 0x0010000000000000);
1821 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1822 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1823 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
1824 set_cpu_vsrl(xT(ctx->opcode), xtl);
1825
1826 tcg_temp_free_i64(t0);
1827 tcg_temp_free_i64(exp);
1828 tcg_temp_free_i64(zr);
1829 tcg_temp_free_i64(nan);
1830 tcg_temp_free_i64(xth);
1831 tcg_temp_free_i64(xtl);
1832 tcg_temp_free_i64(xbh);
1833 tcg_temp_free_i64(xbl);
1834}
1835
1836#undef GEN_XX2FORM
1837#undef GEN_XX3FORM
1838#undef GEN_XX2IFORM
1839#undef GEN_XX3_RC_FORM
1840#undef GEN_XX3FORM_DM
1841#undef VSX_LOGICAL
1842