1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/kernel.h>
21#include <linux/mm.h>
22#include <asm/processor.h>
23#include <asm/uaccess.h>
24#include <asm/system.h>
25#include <asm/cache.h>
26#include <asm/cputable.h>
27#include <asm/emulated_ops.h>
28
29struct aligninfo {
30 unsigned char len;
31 unsigned char flags;
32};
33
34#define IS_XFORM(inst) (((inst) >> 26) == 31)
35#define IS_DSFORM(inst) (((inst) >> 26) >= 56)
36
37#define INVALID { 0, 0 }
38
39
40#define LD 0
41#define ST 1
42#define SE 2
43#define F 4
44#define U 8
45#define M 0x10
46#define SW 0x20
47#define S 0x40
48#define SX 0x40
49#define HARD 0x80
50#define E4 0x40
51#define E8 0x80
52#define SPLT 0x80
53
54
55#define DCBZ 0x5f
56
57#define SWAP(a, b) (t = (a), (a) = (b), (b) = t)
58
59
60
61
62
63
64
65static struct aligninfo aligninfo[128] = {
66 { 4, LD },
67 INVALID,
68 { 4, ST },
69 INVALID,
70 { 2, LD },
71 { 2, LD+SE },
72 { 2, ST },
73 { 4, LD+M },
74 { 4, LD+F+S },
75 { 8, LD+F },
76 { 4, ST+F+S },
77 { 8, ST+F },
78 INVALID,
79 { 8, LD },
80 INVALID,
81 { 8, ST },
82 { 4, LD+U },
83 INVALID,
84 { 4, ST+U },
85 INVALID,
86 { 2, LD+U },
87 { 2, LD+SE+U },
88 { 2, ST+U },
89 { 4, ST+M },
90 { 4, LD+F+S+U },
91 { 8, LD+F+U },
92 { 4, ST+F+S+U },
93 { 8, ST+F+U },
94 { 16, LD+F },
95 INVALID,
96 { 16, ST+F },
97 INVALID,
98 { 8, LD },
99 INVALID,
100 { 8, ST },
101 INVALID,
102 INVALID,
103 { 4, LD+SE },
104 INVALID,
105 INVALID,
106 { 4, LD+M+HARD+SX },
107 { 4, LD+M+HARD },
108 { 4, ST+M+HARD+SX },
109 { 4, ST+M+HARD },
110 INVALID,
111 { 8, LD+U },
112 INVALID,
113 { 8, ST+U },
114 { 8, LD+U },
115 INVALID,
116 { 8, ST+U },
117 INVALID,
118 INVALID,
119 { 4, LD+SE+U },
120 INVALID,
121 INVALID,
122 INVALID,
123 INVALID,
124 INVALID,
125 INVALID,
126 INVALID,
127 INVALID,
128 INVALID,
129 INVALID,
130 INVALID,
131 INVALID,
132 INVALID,
133 INVALID,
134 INVALID,
135 INVALID,
136 INVALID,
137 INVALID,
138 { 4, LD+SW },
139 INVALID,
140 { 4, ST+SW },
141 INVALID,
142 { 2, LD+SW },
143 { 4, LD+SE },
144 { 2, ST+SW },
145 INVALID,
146 INVALID,
147 INVALID,
148 INVALID,
149 INVALID,
150 INVALID,
151 INVALID,
152 INVALID,
153 INVALID,
154 INVALID,
155 INVALID,
156 INVALID,
157 INVALID,
158 INVALID,
159 INVALID,
160 INVALID,
161 { 0, ST+HARD },
162 { 4, LD },
163 INVALID,
164 { 4, ST },
165 INVALID,
166 { 2, LD },
167 { 2, LD+SE },
168 { 2, ST },
169 INVALID,
170 { 4, LD+F+S },
171 { 8, LD+F },
172 { 4, ST+F+S },
173 { 8, ST+F },
174 { 16, LD+F },
175 { 4, LD+F+SE },
176 { 16, ST+F },
177 { 4, ST+F },
178 { 4, LD+U },
179 INVALID,
180 { 4, ST+U },
181 INVALID,
182 { 2, LD+U },
183 { 2, LD+SE+U },
184 { 2, ST+U },
185 INVALID,
186 { 4, LD+F+S+U },
187 { 8, LD+F+U },
188 { 4, ST+F+S+U },
189 { 8, ST+F+U },
190 INVALID,
191 { 4, LD+F },
192 INVALID,
193 INVALID,
194};
195
196
197
198
199static inline unsigned make_dsisr(unsigned instr)
200{
201 unsigned dsisr;
202
203
204
205 dsisr = (instr & 0x03ff0000) >> 16;
206
207 if (IS_XFORM(instr)) {
208
209 dsisr |= (instr & 0x00000006) << 14;
210
211 dsisr |= (instr & 0x00000040) << 8;
212
213 dsisr |= (instr & 0x00000780) << 3;
214 } else {
215
216 dsisr |= (instr & 0x04000000) >> 12;
217
218 dsisr |= (instr & 0x78000000) >> 17;
219
220 if (IS_DSFORM(instr))
221 dsisr |= (instr & 0x00000003) << 18;
222 }
223
224 return dsisr;
225}
226
227
228
229
230
231
232
233
234static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr)
235{
236 long __user *p;
237 int i, size;
238
239#ifdef __powerpc64__
240 size = ppc64_caches.dline_size;
241#else
242 size = L1_CACHE_BYTES;
243#endif
244 p = (long __user *) (regs->dar & -size);
245 if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size))
246 return -EFAULT;
247 for (i = 0; i < size / sizeof(long); ++i)
248 if (__put_user_inatomic(0, p+i))
249 return -EFAULT;
250 return 1;
251}
252
253
254
255
256
257
258
259#ifdef CONFIG_PPC64
260#define REG_BYTE(rp, i) *((u8 *)((rp) + ((i) >> 2)) + ((i) & 3) + 4)
261#else
262#define REG_BYTE(rp, i) *((u8 *)(rp) + (i))
263#endif
264
265#define SWIZ_PTR(p) ((unsigned char __user *)((p) ^ swiz))
266
267static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
268 unsigned int reg, unsigned int nb,
269 unsigned int flags, unsigned int instr,
270 unsigned long swiz)
271{
272 unsigned long *rptr;
273 unsigned int nb0, i, bswiz;
274 unsigned long p;
275
276
277
278
279
280
281
282 if (unlikely((nb > 4) || !user_mode(regs)))
283 return 0;
284
285
286 nb0 = 0;
287 if (flags & HARD) {
288 if (flags & SX) {
289 nb = regs->xer & 127;
290 if (nb == 0)
291 return 1;
292 } else {
293 unsigned long pc = regs->nip ^ (swiz & 4);
294
295 if (__get_user_inatomic(instr,
296 (unsigned int __user *)pc))
297 return -EFAULT;
298 if (swiz == 0 && (flags & SW))
299 instr = cpu_to_le32(instr);
300 nb = (instr >> 11) & 0x1f;
301 if (nb == 0)
302 nb = 32;
303 }
304 if (nb + reg * 4 > 128) {
305 nb0 = nb + reg * 4 - 128;
306 nb = 128 - reg * 4;
307 }
308 } else {
309
310 nb = (32 - reg) * 4;
311 }
312
313 if (!access_ok((flags & ST ? VERIFY_WRITE: VERIFY_READ), addr, nb+nb0))
314 return -EFAULT;
315
316 rptr = ®s->gpr[reg];
317 p = (unsigned long) addr;
318 bswiz = (flags & SW)? 3: 0;
319
320 if (!(flags & ST)) {
321
322
323
324
325
326 memset(rptr, 0, ((nb + 3) / 4) * sizeof(unsigned long));
327 if (nb0 > 0)
328 memset(®s->gpr[0], 0,
329 ((nb0 + 3) / 4) * sizeof(unsigned long));
330
331 for (i = 0; i < nb; ++i, ++p)
332 if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
333 SWIZ_PTR(p)))
334 return -EFAULT;
335 if (nb0 > 0) {
336 rptr = ®s->gpr[0];
337 addr += nb;
338 for (i = 0; i < nb0; ++i, ++p)
339 if (__get_user_inatomic(REG_BYTE(rptr,
340 i ^ bswiz),
341 SWIZ_PTR(p)))
342 return -EFAULT;
343 }
344
345 } else {
346 for (i = 0; i < nb; ++i, ++p)
347 if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
348 SWIZ_PTR(p)))
349 return -EFAULT;
350 if (nb0 > 0) {
351 rptr = ®s->gpr[0];
352 addr += nb;
353 for (i = 0; i < nb0; ++i, ++p)
354 if (__put_user_inatomic(REG_BYTE(rptr,
355 i ^ bswiz),
356 SWIZ_PTR(p)))
357 return -EFAULT;
358 }
359 }
360 return 1;
361}
362
363
364
365
366
367
368static int emulate_fp_pair(unsigned char __user *addr, unsigned int reg,
369 unsigned int flags)
370{
371 char *ptr0 = (char *) ¤t->thread.TS_FPR(reg);
372 char *ptr1 = (char *) ¤t->thread.TS_FPR(reg+1);
373 int i, ret, sw = 0;
374
375 if (!(flags & F))
376 return 0;
377 if (reg & 1)
378 return 0;
379 if (flags & SW)
380 sw = 7;
381 ret = 0;
382 for (i = 0; i < 8; ++i) {
383 if (!(flags & ST)) {
384 ret |= __get_user(ptr0[i^sw], addr + i);
385 ret |= __get_user(ptr1[i^sw], addr + i + 8);
386 } else {
387 ret |= __put_user(ptr0[i^sw], addr + i);
388 ret |= __put_user(ptr1[i^sw], addr + i + 8);
389 }
390 }
391 if (ret)
392 return -EFAULT;
393 return 1;
394}
395
396#ifdef CONFIG_SPE
397
398static struct aligninfo spe_aligninfo[32] = {
399 { 8, LD+E8 },
400 { 8, LD+E4 },
401 { 8, LD },
402 INVALID,
403 { 2, LD },
404 INVALID,
405 { 2, LD },
406 { 2, LD+SE },
407 { 4, LD },
408 INVALID,
409 { 4, LD },
410 { 4, LD+SE },
411 { 4, LD+E4 },
412 INVALID,
413 { 4, LD },
414 INVALID,
415
416 { 8, ST+E8 },
417 { 8, ST+E4 },
418 { 8, ST },
419 INVALID,
420 INVALID,
421 INVALID,
422 INVALID,
423 INVALID,
424 { 4, ST },
425 INVALID,
426 { 4, ST },
427 INVALID,
428 { 4, ST+E4 },
429 INVALID,
430 { 4, ST+E4 },
431 INVALID,
432};
433
434#define EVLDD 0x00
435#define EVLDW 0x01
436#define EVLDH 0x02
437#define EVLHHESPLAT 0x04
438#define EVLHHOUSPLAT 0x06
439#define EVLHHOSSPLAT 0x07
440#define EVLWHE 0x08
441#define EVLWHOU 0x0A
442#define EVLWHOS 0x0B
443#define EVLWWSPLAT 0x0C
444#define EVLWHSPLAT 0x0E
445#define EVSTDD 0x10
446#define EVSTDW 0x11
447#define EVSTDH 0x12
448#define EVSTWHE 0x18
449#define EVSTWHO 0x1A
450#define EVSTWWE 0x1C
451#define EVSTWWO 0x1E
452
453
454
455
456
457
458static int emulate_spe(struct pt_regs *regs, unsigned int reg,
459 unsigned int instr)
460{
461 int t, ret;
462 union {
463 u64 ll;
464 u32 w[2];
465 u16 h[4];
466 u8 v[8];
467 } data, temp;
468 unsigned char __user *p, *addr;
469 unsigned long *evr = ¤t->thread.evr[reg];
470 unsigned int nb, flags;
471
472 instr = (instr >> 1) & 0x1f;
473
474
475 addr = (unsigned char __user *)regs->dar;
476
477 nb = spe_aligninfo[instr].len;
478 flags = spe_aligninfo[instr].flags;
479
480
481 if (unlikely(user_mode(regs) &&
482 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
483 addr, nb)))
484 return -EFAULT;
485
486
487 if (unlikely(!user_mode(regs)))
488 return 0;
489
490 flush_spe_to_thread(current);
491
492
493
494
495 if (flags & ST) {
496 data.ll = 0;
497 switch (instr) {
498 case EVSTDD:
499 case EVSTDW:
500 case EVSTDH:
501 data.w[0] = *evr;
502 data.w[1] = regs->gpr[reg];
503 break;
504 case EVSTWHE:
505 data.h[2] = *evr >> 16;
506 data.h[3] = regs->gpr[reg] >> 16;
507 break;
508 case EVSTWHO:
509 data.h[2] = *evr & 0xffff;
510 data.h[3] = regs->gpr[reg] & 0xffff;
511 break;
512 case EVSTWWE:
513 data.w[1] = *evr;
514 break;
515 case EVSTWWO:
516 data.w[1] = regs->gpr[reg];
517 break;
518 default:
519 return -EINVAL;
520 }
521 } else {
522 temp.ll = data.ll = 0;
523 ret = 0;
524 p = addr;
525
526 switch (nb) {
527 case 8:
528 ret |= __get_user_inatomic(temp.v[0], p++);
529 ret |= __get_user_inatomic(temp.v[1], p++);
530 ret |= __get_user_inatomic(temp.v[2], p++);
531 ret |= __get_user_inatomic(temp.v[3], p++);
532 case 4:
533 ret |= __get_user_inatomic(temp.v[4], p++);
534 ret |= __get_user_inatomic(temp.v[5], p++);
535 case 2:
536 ret |= __get_user_inatomic(temp.v[6], p++);
537 ret |= __get_user_inatomic(temp.v[7], p++);
538 if (unlikely(ret))
539 return -EFAULT;
540 }
541
542 switch (instr) {
543 case EVLDD:
544 case EVLDW:
545 case EVLDH:
546 data.ll = temp.ll;
547 break;
548 case EVLHHESPLAT:
549 data.h[0] = temp.h[3];
550 data.h[2] = temp.h[3];
551 break;
552 case EVLHHOUSPLAT:
553 case EVLHHOSSPLAT:
554 data.h[1] = temp.h[3];
555 data.h[3] = temp.h[3];
556 break;
557 case EVLWHE:
558 data.h[0] = temp.h[2];
559 data.h[2] = temp.h[3];
560 break;
561 case EVLWHOU:
562 case EVLWHOS:
563 data.h[1] = temp.h[2];
564 data.h[3] = temp.h[3];
565 break;
566 case EVLWWSPLAT:
567 data.w[0] = temp.w[1];
568 data.w[1] = temp.w[1];
569 break;
570 case EVLWHSPLAT:
571 data.h[0] = temp.h[2];
572 data.h[1] = temp.h[2];
573 data.h[2] = temp.h[3];
574 data.h[3] = temp.h[3];
575 break;
576 default:
577 return -EINVAL;
578 }
579 }
580
581 if (flags & SW) {
582 switch (flags & 0xf0) {
583 case E8:
584 SWAP(data.v[0], data.v[7]);
585 SWAP(data.v[1], data.v[6]);
586 SWAP(data.v[2], data.v[5]);
587 SWAP(data.v[3], data.v[4]);
588 break;
589 case E4:
590
591 SWAP(data.v[0], data.v[3]);
592 SWAP(data.v[1], data.v[2]);
593 SWAP(data.v[4], data.v[7]);
594 SWAP(data.v[5], data.v[6]);
595 break;
596
597 default:
598 SWAP(data.v[0], data.v[1]);
599 SWAP(data.v[2], data.v[3]);
600 SWAP(data.v[4], data.v[5]);
601 SWAP(data.v[6], data.v[7]);
602 break;
603 }
604 }
605
606 if (flags & SE) {
607 data.w[0] = (s16)data.h[1];
608 data.w[1] = (s16)data.h[3];
609 }
610
611
612 if (flags & ST) {
613 ret = 0;
614 p = addr;
615 switch (nb) {
616 case 8:
617 ret |= __put_user_inatomic(data.v[0], p++);
618 ret |= __put_user_inatomic(data.v[1], p++);
619 ret |= __put_user_inatomic(data.v[2], p++);
620 ret |= __put_user_inatomic(data.v[3], p++);
621 case 4:
622 ret |= __put_user_inatomic(data.v[4], p++);
623 ret |= __put_user_inatomic(data.v[5], p++);
624 case 2:
625 ret |= __put_user_inatomic(data.v[6], p++);
626 ret |= __put_user_inatomic(data.v[7], p++);
627 }
628 if (unlikely(ret))
629 return -EFAULT;
630 } else {
631 *evr = data.w[0];
632 regs->gpr[reg] = data.w[1];
633 }
634
635 return 1;
636}
637#endif
638
639#ifdef CONFIG_VSX
640
641
642
643static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
644 unsigned int areg, struct pt_regs *regs,
645 unsigned int flags, unsigned int length)
646{
647 char *ptr;
648 int ret = 0;
649
650 flush_vsx_to_thread(current);
651
652 if (reg < 32)
653 ptr = (char *) ¤t->thread.TS_FPR(reg);
654 else
655 ptr = (char *) ¤t->thread.vr[reg - 32];
656
657 if (flags & ST)
658 ret = __copy_to_user(addr, ptr, length);
659 else {
660 if (flags & SPLT){
661 ret = __copy_from_user(ptr, addr, length);
662 ptr += length;
663 }
664 ret |= __copy_from_user(ptr, addr, length);
665 }
666 if (flags & U)
667 regs->gpr[areg] = regs->dar;
668 if (ret)
669 return -EFAULT;
670 return 1;
671}
672#endif
673
674
675
676
677
678
679
680
681
682int fix_alignment(struct pt_regs *regs)
683{
684 unsigned int instr, nb, flags, instruction = 0;
685 unsigned int reg, areg;
686 unsigned int dsisr;
687 unsigned char __user *addr;
688 unsigned long p, swiz;
689 int ret, t;
690 union {
691 u64 ll;
692 double dd;
693 unsigned char v[8];
694 struct {
695 unsigned hi32;
696 int low32;
697 } x32;
698 struct {
699 unsigned char hi48[6];
700 short low16;
701 } x16;
702 } data;
703
704
705
706
707
708 CHECK_FULL_REGS(regs);
709
710 dsisr = regs->dsisr;
711
712
713
714
715 if (cpu_has_feature(CPU_FTR_NODSISRALIGN)) {
716 unsigned long pc = regs->nip;
717
718 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE))
719 pc ^= 4;
720 if (unlikely(__get_user_inatomic(instr,
721 (unsigned int __user *)pc)))
722 return -EFAULT;
723 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE))
724 instr = cpu_to_le32(instr);
725 dsisr = make_dsisr(instr);
726 instruction = instr;
727 }
728
729
730 reg = (dsisr >> 5) & 0x1f;
731 areg = dsisr & 0x1f;
732
733#ifdef CONFIG_SPE
734 if ((instr >> 26) == 0x4) {
735 PPC_WARN_EMULATED(spe);
736 return emulate_spe(regs, reg, instr);
737 }
738#endif
739
740 instr = (dsisr >> 10) & 0x7f;
741 instr |= (dsisr >> 13) & 0x60;
742
743
744 nb = aligninfo[instr].len;
745 flags = aligninfo[instr].flags;
746
747
748 swiz = 0;
749 if (regs->msr & MSR_LE) {
750 flags ^= SW;
751
752
753
754
755
756
757
758
759
760
761 if (cpu_has_feature(CPU_FTR_PPC_LE))
762 swiz = 7;
763 }
764
765
766 addr = (unsigned char __user *)regs->dar;
767
768#ifdef CONFIG_VSX
769 if ((instruction & 0xfc00003e) == 0x7c000018) {
770
771 reg |= (instruction & 0x1) << 5;
772
773 if (instruction & 0x200)
774 nb = 16;
775 else if (instruction & 0x080)
776 nb = 8;
777 else
778 nb = 4;
779 flags = 0;
780 if (instruction & 0x100)
781 flags |= ST;
782 if (instruction & 0x040)
783 flags |= U;
784
785 if ((instruction & 0x400) == 0){
786 flags |= SPLT;
787 nb = 8;
788 }
789 PPC_WARN_EMULATED(vsx);
790 return emulate_vsx(addr, reg, areg, regs, flags, nb);
791 }
792#endif
793
794
795
796 if (instr == DCBZ) {
797 PPC_WARN_EMULATED(dcbz);
798 return emulate_dcbz(regs, addr);
799 }
800 if (unlikely(nb == 0))
801 return 0;
802
803
804
805
806 if (flags & M) {
807 PPC_WARN_EMULATED(multiple);
808 return emulate_multiple(regs, addr, reg, nb,
809 flags, instr, swiz);
810 }
811
812
813 if (unlikely(user_mode(regs) &&
814 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
815 addr, nb)))
816 return -EFAULT;
817
818
819 if (flags & F) {
820
821 if (unlikely(!user_mode(regs)))
822 return 0;
823 flush_fp_to_thread(current);
824 }
825
826
827 if (nb == 16) {
828 PPC_WARN_EMULATED(fp_pair);
829 return emulate_fp_pair(addr, reg, flags);
830 }
831
832 PPC_WARN_EMULATED(unaligned);
833
834
835
836
837 if (!(flags & ST)) {
838 data.ll = 0;
839 ret = 0;
840 p = (unsigned long) addr;
841 switch (nb) {
842 case 8:
843 ret |= __get_user_inatomic(data.v[0], SWIZ_PTR(p++));
844 ret |= __get_user_inatomic(data.v[1], SWIZ_PTR(p++));
845 ret |= __get_user_inatomic(data.v[2], SWIZ_PTR(p++));
846 ret |= __get_user_inatomic(data.v[3], SWIZ_PTR(p++));
847 case 4:
848 ret |= __get_user_inatomic(data.v[4], SWIZ_PTR(p++));
849 ret |= __get_user_inatomic(data.v[5], SWIZ_PTR(p++));
850 case 2:
851 ret |= __get_user_inatomic(data.v[6], SWIZ_PTR(p++));
852 ret |= __get_user_inatomic(data.v[7], SWIZ_PTR(p++));
853 if (unlikely(ret))
854 return -EFAULT;
855 }
856 } else if (flags & F) {
857 data.dd = current->thread.TS_FPR(reg);
858 if (flags & S) {
859
860#ifdef CONFIG_PPC_FPU
861 preempt_disable();
862 enable_kernel_fp();
863 cvt_df(&data.dd, (float *)&data.v[4], ¤t->thread);
864 preempt_enable();
865#else
866 return 0;
867#endif
868 }
869 } else
870 data.ll = regs->gpr[reg];
871
872 if (flags & SW) {
873 switch (nb) {
874 case 8:
875 SWAP(data.v[0], data.v[7]);
876 SWAP(data.v[1], data.v[6]);
877 SWAP(data.v[2], data.v[5]);
878 SWAP(data.v[3], data.v[4]);
879 break;
880 case 4:
881 SWAP(data.v[4], data.v[7]);
882 SWAP(data.v[5], data.v[6]);
883 break;
884 case 2:
885 SWAP(data.v[6], data.v[7]);
886 break;
887 }
888 }
889
890
891
892
893 switch (flags & ~(U|SW)) {
894 case LD+SE:
895 case LD+F+SE:
896 if ( nb == 2 )
897 data.ll = data.x16.low16;
898 else
899 data.ll = data.x32.low32;
900 break;
901
902
903 case LD+F+S:
904#ifdef CONFIG_PPC_FPU
905 preempt_disable();
906 enable_kernel_fp();
907 cvt_fd((float *)&data.v[4], &data.dd, ¤t->thread);
908 preempt_enable();
909#else
910 return 0;
911#endif
912 break;
913 }
914
915
916 if (flags & ST) {
917 ret = 0;
918 p = (unsigned long) addr;
919 switch (nb) {
920 case 8:
921 ret |= __put_user_inatomic(data.v[0], SWIZ_PTR(p++));
922 ret |= __put_user_inatomic(data.v[1], SWIZ_PTR(p++));
923 ret |= __put_user_inatomic(data.v[2], SWIZ_PTR(p++));
924 ret |= __put_user_inatomic(data.v[3], SWIZ_PTR(p++));
925 case 4:
926 ret |= __put_user_inatomic(data.v[4], SWIZ_PTR(p++));
927 ret |= __put_user_inatomic(data.v[5], SWIZ_PTR(p++));
928 case 2:
929 ret |= __put_user_inatomic(data.v[6], SWIZ_PTR(p++));
930 ret |= __put_user_inatomic(data.v[7], SWIZ_PTR(p++));
931 }
932 if (unlikely(ret))
933 return -EFAULT;
934 } else if (flags & F)
935 current->thread.TS_FPR(reg) = data.dd;
936 else
937 regs->gpr[reg] = data.ll;
938
939
940 if (flags & U)
941 regs->gpr[areg] = regs->dar;
942
943 return 1;
944}
945