1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/kernel.h>
21#include <linux/mm.h>
22#include <asm/processor.h>
23#include <asm/uaccess.h>
24#include <asm/cache.h>
25#include <asm/cputable.h>
26#include <asm/emulated_ops.h>
27#include <asm/switch_to.h>
28#include <asm/disassemble.h>
29
30struct aligninfo {
31 unsigned char len;
32 unsigned char flags;
33};
34
35
36#define INVALID { 0, 0 }
37
38
39#define LD 0
40#define ST 1
41#define SE 2
42#define F 4
43#define U 8
44#define M 0x10
45#define SW 0x20
46#define S 0x40
47#define SX 0x40
48#define HARD 0x80
49#define E4 0x40
50#define E8 0x80
51#define SPLT 0x80
52
53
54#define DCBZ 0x5f
55
56
57
58
59
60
61
62static struct aligninfo aligninfo[128] = {
63 { 4, LD },
64 INVALID,
65 { 4, ST },
66 INVALID,
67 { 2, LD },
68 { 2, LD+SE },
69 { 2, ST },
70 { 4, LD+M },
71 { 4, LD+F+S },
72 { 8, LD+F },
73 { 4, ST+F+S },
74 { 8, ST+F },
75 { 16, LD },
76 { 8, LD },
77 INVALID,
78 { 8, ST },
79 { 4, LD+U },
80 INVALID,
81 { 4, ST+U },
82 INVALID,
83 { 2, LD+U },
84 { 2, LD+SE+U },
85 { 2, ST+U },
86 { 4, ST+M },
87 { 4, LD+F+S+U },
88 { 8, LD+F+U },
89 { 4, ST+F+S+U },
90 { 8, ST+F+U },
91 { 16, LD+F },
92 INVALID,
93 { 16, ST+F },
94 INVALID,
95 { 8, LD },
96 INVALID,
97 { 8, ST },
98 INVALID,
99 INVALID,
100 { 4, LD+SE },
101 INVALID,
102 INVALID,
103 { 4, LD+M+HARD+SX },
104 { 4, LD+M+HARD },
105 { 4, ST+M+HARD+SX },
106 { 4, ST+M+HARD },
107 INVALID,
108 { 8, LD+U },
109 INVALID,
110 { 8, ST+U },
111 { 8, LD+U },
112 INVALID,
113 { 8, ST+U },
114 INVALID,
115 INVALID,
116 { 4, LD+SE+U },
117 INVALID,
118 INVALID,
119 INVALID,
120 INVALID,
121 INVALID,
122 INVALID,
123 INVALID,
124 INVALID,
125 INVALID,
126 INVALID,
127 INVALID,
128 INVALID,
129 INVALID,
130 INVALID,
131 INVALID,
132 INVALID,
133 INVALID,
134 INVALID,
135 { 4, LD+SW },
136 INVALID,
137 { 4, ST+SW },
138 INVALID,
139 { 2, LD+SW },
140 { 4, LD+SE },
141 { 2, ST+SW },
142 { 16, ST },
143 INVALID,
144 INVALID,
145 INVALID,
146 INVALID,
147 INVALID,
148 INVALID,
149 INVALID,
150 INVALID,
151 INVALID,
152 INVALID,
153 INVALID,
154 INVALID,
155 INVALID,
156 INVALID,
157 INVALID,
158 { 0, ST+HARD },
159 { 4, LD },
160 INVALID,
161 { 4, ST },
162 INVALID,
163 { 2, LD },
164 { 2, LD+SE },
165 { 2, ST },
166 INVALID,
167 { 4, LD+F+S },
168 { 8, LD+F },
169 { 4, ST+F+S },
170 { 8, ST+F },
171 { 16, LD+F },
172 { 4, LD+F+SE },
173 { 16, ST+F },
174 { 4, ST+F },
175 { 4, LD+U },
176 INVALID,
177 { 4, ST+U },
178 INVALID,
179 { 2, LD+U },
180 { 2, LD+SE+U },
181 { 2, ST+U },
182 INVALID,
183 { 4, LD+F+S+U },
184 { 8, LD+F+U },
185 { 4, ST+F+S+U },
186 { 8, ST+F+U },
187 INVALID,
188 { 4, LD+F },
189 INVALID,
190 INVALID,
191};
192
193
194
195
196
197
198
199
200static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr)
201{
202 long __user *p;
203 int i, size;
204
205#ifdef __powerpc64__
206 size = ppc64_caches.dline_size;
207#else
208 size = L1_CACHE_BYTES;
209#endif
210 p = (long __user *) (regs->dar & -size);
211 if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size))
212 return -EFAULT;
213 for (i = 0; i < size / sizeof(long); ++i)
214 if (__put_user_inatomic(0, p+i))
215 return -EFAULT;
216 return 1;
217}
218
219
220
221
222
223
224
225#ifdef __BIG_ENDIAN__
226#ifdef CONFIG_PPC64
227#define REG_BYTE(rp, i) *((u8 *)((rp) + ((i) >> 2)) + ((i) & 3) + 4)
228#else
229#define REG_BYTE(rp, i) *((u8 *)(rp) + (i))
230#endif
231#endif
232
233#ifdef __LITTLE_ENDIAN__
234#define REG_BYTE(rp, i) (*(((u8 *)((rp) + ((i)>>2)) + ((i)&3))))
235#endif
236
237#define SWIZ_PTR(p) ((unsigned char __user *)((p) ^ swiz))
238
239static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
240 unsigned int reg, unsigned int nb,
241 unsigned int flags, unsigned int instr,
242 unsigned long swiz)
243{
244 unsigned long *rptr;
245 unsigned int nb0, i, bswiz;
246 unsigned long p;
247
248
249
250
251
252
253
254 if (unlikely((nb > 4) || !user_mode(regs)))
255 return 0;
256
257
258 nb0 = 0;
259 if (flags & HARD) {
260 if (flags & SX) {
261 nb = regs->xer & 127;
262 if (nb == 0)
263 return 1;
264 } else {
265 unsigned long pc = regs->nip ^ (swiz & 4);
266
267 if (__get_user_inatomic(instr,
268 (unsigned int __user *)pc))
269 return -EFAULT;
270 if (swiz == 0 && (flags & SW))
271 instr = cpu_to_le32(instr);
272 nb = (instr >> 11) & 0x1f;
273 if (nb == 0)
274 nb = 32;
275 }
276 if (nb + reg * 4 > 128) {
277 nb0 = nb + reg * 4 - 128;
278 nb = 128 - reg * 4;
279 }
280#ifdef __LITTLE_ENDIAN__
281
282
283
284
285
286
287 flags ^= SW;
288#endif
289 } else {
290
291 nb = (32 - reg) * 4;
292 }
293
294 if (!access_ok((flags & ST ? VERIFY_WRITE: VERIFY_READ), addr, nb+nb0))
295 return -EFAULT;
296
297 rptr = ®s->gpr[reg];
298 p = (unsigned long) addr;
299 bswiz = (flags & SW)? 3: 0;
300
301 if (!(flags & ST)) {
302
303
304
305
306
307 memset(rptr, 0, ((nb + 3) / 4) * sizeof(unsigned long));
308 if (nb0 > 0)
309 memset(®s->gpr[0], 0,
310 ((nb0 + 3) / 4) * sizeof(unsigned long));
311
312 for (i = 0; i < nb; ++i, ++p)
313 if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
314 SWIZ_PTR(p)))
315 return -EFAULT;
316 if (nb0 > 0) {
317 rptr = ®s->gpr[0];
318 addr += nb;
319 for (i = 0; i < nb0; ++i, ++p)
320 if (__get_user_inatomic(REG_BYTE(rptr,
321 i ^ bswiz),
322 SWIZ_PTR(p)))
323 return -EFAULT;
324 }
325
326 } else {
327 for (i = 0; i < nb; ++i, ++p)
328 if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
329 SWIZ_PTR(p)))
330 return -EFAULT;
331 if (nb0 > 0) {
332 rptr = ®s->gpr[0];
333 addr += nb;
334 for (i = 0; i < nb0; ++i, ++p)
335 if (__put_user_inatomic(REG_BYTE(rptr,
336 i ^ bswiz),
337 SWIZ_PTR(p)))
338 return -EFAULT;
339 }
340 }
341 return 1;
342}
343
344
345
346
347
348
349static int emulate_fp_pair(unsigned char __user *addr, unsigned int reg,
350 unsigned int flags)
351{
352 char *ptr0 = (char *) ¤t->thread.TS_FPR(reg);
353 char *ptr1 = (char *) ¤t->thread.TS_FPR(reg+1);
354 int i, ret, sw = 0;
355
356 if (reg & 1)
357 return 0;
358 if (flags & SW)
359 sw = 7;
360 ret = 0;
361 for (i = 0; i < 8; ++i) {
362 if (!(flags & ST)) {
363 ret |= __get_user(ptr0[i^sw], addr + i);
364 ret |= __get_user(ptr1[i^sw], addr + i + 8);
365 } else {
366 ret |= __put_user(ptr0[i^sw], addr + i);
367 ret |= __put_user(ptr1[i^sw], addr + i + 8);
368 }
369 }
370 if (ret)
371 return -EFAULT;
372 return 1;
373}
374
375#ifdef CONFIG_PPC64
376static int emulate_lq_stq(struct pt_regs *regs, unsigned char __user *addr,
377 unsigned int reg, unsigned int flags)
378{
379 char *ptr0 = (char *)®s->gpr[reg];
380 char *ptr1 = (char *)®s->gpr[reg+1];
381 int i, ret, sw = 0;
382
383 if (reg & 1)
384 return 0;
385 if (flags & SW)
386 sw = 7;
387 ret = 0;
388 for (i = 0; i < 8; ++i) {
389 if (!(flags & ST)) {
390 ret |= __get_user(ptr0[i^sw], addr + i);
391 ret |= __get_user(ptr1[i^sw], addr + i + 8);
392 } else {
393 ret |= __put_user(ptr0[i^sw], addr + i);
394 ret |= __put_user(ptr1[i^sw], addr + i + 8);
395 }
396 }
397 if (ret)
398 return -EFAULT;
399 return 1;
400}
401#endif
402
403#ifdef CONFIG_SPE
404
405static struct aligninfo spe_aligninfo[32] = {
406 { 8, LD+E8 },
407 { 8, LD+E4 },
408 { 8, LD },
409 INVALID,
410 { 2, LD },
411 INVALID,
412 { 2, LD },
413 { 2, LD+SE },
414 { 4, LD },
415 INVALID,
416 { 4, LD },
417 { 4, LD+SE },
418 { 4, LD+E4 },
419 INVALID,
420 { 4, LD },
421 INVALID,
422
423 { 8, ST+E8 },
424 { 8, ST+E4 },
425 { 8, ST },
426 INVALID,
427 INVALID,
428 INVALID,
429 INVALID,
430 INVALID,
431 { 4, ST },
432 INVALID,
433 { 4, ST },
434 INVALID,
435 { 4, ST+E4 },
436 INVALID,
437 { 4, ST+E4 },
438 INVALID,
439};
440
441#define EVLDD 0x00
442#define EVLDW 0x01
443#define EVLDH 0x02
444#define EVLHHESPLAT 0x04
445#define EVLHHOUSPLAT 0x06
446#define EVLHHOSSPLAT 0x07
447#define EVLWHE 0x08
448#define EVLWHOU 0x0A
449#define EVLWHOS 0x0B
450#define EVLWWSPLAT 0x0C
451#define EVLWHSPLAT 0x0E
452#define EVSTDD 0x10
453#define EVSTDW 0x11
454#define EVSTDH 0x12
455#define EVSTWHE 0x18
456#define EVSTWHO 0x1A
457#define EVSTWWE 0x1C
458#define EVSTWWO 0x1E
459
460
461
462
463
464
465static int emulate_spe(struct pt_regs *regs, unsigned int reg,
466 unsigned int instr)
467{
468 int ret;
469 union {
470 u64 ll;
471 u32 w[2];
472 u16 h[4];
473 u8 v[8];
474 } data, temp;
475 unsigned char __user *p, *addr;
476 unsigned long *evr = ¤t->thread.evr[reg];
477 unsigned int nb, flags;
478
479 instr = (instr >> 1) & 0x1f;
480
481
482 addr = (unsigned char __user *)regs->dar;
483
484 nb = spe_aligninfo[instr].len;
485 flags = spe_aligninfo[instr].flags;
486
487
488 if (unlikely(user_mode(regs) &&
489 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
490 addr, nb)))
491 return -EFAULT;
492
493
494 if (unlikely(!user_mode(regs)))
495 return 0;
496
497 flush_spe_to_thread(current);
498
499
500
501
502 if (flags & ST) {
503 data.ll = 0;
504 switch (instr) {
505 case EVSTDD:
506 case EVSTDW:
507 case EVSTDH:
508 data.w[0] = *evr;
509 data.w[1] = regs->gpr[reg];
510 break;
511 case EVSTWHE:
512 data.h[2] = *evr >> 16;
513 data.h[3] = regs->gpr[reg] >> 16;
514 break;
515 case EVSTWHO:
516 data.h[2] = *evr & 0xffff;
517 data.h[3] = regs->gpr[reg] & 0xffff;
518 break;
519 case EVSTWWE:
520 data.w[1] = *evr;
521 break;
522 case EVSTWWO:
523 data.w[1] = regs->gpr[reg];
524 break;
525 default:
526 return -EINVAL;
527 }
528 } else {
529 temp.ll = data.ll = 0;
530 ret = 0;
531 p = addr;
532
533 switch (nb) {
534 case 8:
535 ret |= __get_user_inatomic(temp.v[0], p++);
536 ret |= __get_user_inatomic(temp.v[1], p++);
537 ret |= __get_user_inatomic(temp.v[2], p++);
538 ret |= __get_user_inatomic(temp.v[3], p++);
539 case 4:
540 ret |= __get_user_inatomic(temp.v[4], p++);
541 ret |= __get_user_inatomic(temp.v[5], p++);
542 case 2:
543 ret |= __get_user_inatomic(temp.v[6], p++);
544 ret |= __get_user_inatomic(temp.v[7], p++);
545 if (unlikely(ret))
546 return -EFAULT;
547 }
548
549 switch (instr) {
550 case EVLDD:
551 case EVLDW:
552 case EVLDH:
553 data.ll = temp.ll;
554 break;
555 case EVLHHESPLAT:
556 data.h[0] = temp.h[3];
557 data.h[2] = temp.h[3];
558 break;
559 case EVLHHOUSPLAT:
560 case EVLHHOSSPLAT:
561 data.h[1] = temp.h[3];
562 data.h[3] = temp.h[3];
563 break;
564 case EVLWHE:
565 data.h[0] = temp.h[2];
566 data.h[2] = temp.h[3];
567 break;
568 case EVLWHOU:
569 case EVLWHOS:
570 data.h[1] = temp.h[2];
571 data.h[3] = temp.h[3];
572 break;
573 case EVLWWSPLAT:
574 data.w[0] = temp.w[1];
575 data.w[1] = temp.w[1];
576 break;
577 case EVLWHSPLAT:
578 data.h[0] = temp.h[2];
579 data.h[1] = temp.h[2];
580 data.h[2] = temp.h[3];
581 data.h[3] = temp.h[3];
582 break;
583 default:
584 return -EINVAL;
585 }
586 }
587
588 if (flags & SW) {
589 switch (flags & 0xf0) {
590 case E8:
591 data.ll = swab64(data.ll);
592 break;
593 case E4:
594 data.w[0] = swab32(data.w[0]);
595 data.w[1] = swab32(data.w[1]);
596 break;
597
598 default:
599 data.h[0] = swab16(data.h[0]);
600 data.h[1] = swab16(data.h[1]);
601 data.h[2] = swab16(data.h[2]);
602 data.h[3] = swab16(data.h[3]);
603 break;
604 }
605 }
606
607 if (flags & SE) {
608 data.w[0] = (s16)data.h[1];
609 data.w[1] = (s16)data.h[3];
610 }
611
612
613 if (flags & ST) {
614 ret = 0;
615 p = addr;
616 switch (nb) {
617 case 8:
618 ret |= __put_user_inatomic(data.v[0], p++);
619 ret |= __put_user_inatomic(data.v[1], p++);
620 ret |= __put_user_inatomic(data.v[2], p++);
621 ret |= __put_user_inatomic(data.v[3], p++);
622 case 4:
623 ret |= __put_user_inatomic(data.v[4], p++);
624 ret |= __put_user_inatomic(data.v[5], p++);
625 case 2:
626 ret |= __put_user_inatomic(data.v[6], p++);
627 ret |= __put_user_inatomic(data.v[7], p++);
628 }
629 if (unlikely(ret))
630 return -EFAULT;
631 } else {
632 *evr = data.w[0];
633 regs->gpr[reg] = data.w[1];
634 }
635
636 return 1;
637}
638#endif
639
640#ifdef CONFIG_VSX
641
642
643
644static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
645 unsigned int areg, struct pt_regs *regs,
646 unsigned int flags, unsigned int length,
647 unsigned int elsize)
648{
649 char *ptr;
650 unsigned long *lptr;
651 int ret = 0;
652 int sw = 0;
653 int i, j;
654
655
656 if (unlikely(!user_mode(regs)))
657 return 0;
658
659 flush_vsx_to_thread(current);
660
661 if (reg < 32)
662 ptr = (char *) ¤t->thread.fp_state.fpr[reg][0];
663 else
664 ptr = (char *) ¤t->thread.vr_state.vr[reg - 32];
665
666 lptr = (unsigned long *) ptr;
667
668#ifdef __LITTLE_ENDIAN__
669 if (flags & SW) {
670 elsize = length;
671 sw = length-1;
672 } else {
673
674
675
676
677 addr += length - elsize;
678
679
680 if (length == 8)
681 ptr += 8;
682 }
683#else
684 if (flags & SW)
685 sw = elsize-1;
686#endif
687
688 for (j = 0; j < length; j += elsize) {
689 for (i = 0; i < elsize; ++i) {
690 if (flags & ST)
691 ret |= __put_user(ptr[i^sw], addr + i);
692 else
693 ret |= __get_user(ptr[i^sw], addr + i);
694 }
695 ptr += elsize;
696#ifdef __LITTLE_ENDIAN__
697 addr -= elsize;
698#else
699 addr += elsize;
700#endif
701 }
702
703#ifdef __BIG_ENDIAN__
704#define VSX_HI 0
705#define VSX_LO 1
706#else
707#define VSX_HI 1
708#define VSX_LO 0
709#endif
710
711 if (!ret) {
712 if (flags & U)
713 regs->gpr[areg] = regs->dar;
714
715
716 if (flags & SPLT)
717 lptr[VSX_LO] = lptr[VSX_HI];
718
719 else if (!(flags & ST) && (8 == length))
720 lptr[VSX_LO] = 0;
721 } else
722 return -EFAULT;
723
724 return 1;
725}
726#endif
727
728
729
730
731
732
733
734
735
736int fix_alignment(struct pt_regs *regs)
737{
738 unsigned int instr, nb, flags, instruction = 0;
739 unsigned int reg, areg;
740 unsigned int dsisr;
741 unsigned char __user *addr;
742 unsigned long p, swiz;
743 int ret, i;
744 union data {
745 u64 ll;
746 double dd;
747 unsigned char v[8];
748 struct {
749#ifdef __LITTLE_ENDIAN__
750 int low32;
751 unsigned hi32;
752#else
753 unsigned hi32;
754 int low32;
755#endif
756 } x32;
757 struct {
758#ifdef __LITTLE_ENDIAN__
759 short low16;
760 unsigned char hi48[6];
761#else
762 unsigned char hi48[6];
763 short low16;
764#endif
765 } x16;
766 } data;
767
768
769
770
771
772 CHECK_FULL_REGS(regs);
773
774 dsisr = regs->dsisr;
775
776
777
778
779 if (cpu_has_feature(CPU_FTR_NODSISRALIGN)) {
780 unsigned long pc = regs->nip;
781
782 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE))
783 pc ^= 4;
784 if (unlikely(__get_user_inatomic(instr,
785 (unsigned int __user *)pc)))
786 return -EFAULT;
787 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE))
788 instr = cpu_to_le32(instr);
789 dsisr = make_dsisr(instr);
790 instruction = instr;
791 }
792
793
794 reg = (dsisr >> 5) & 0x1f;
795 areg = dsisr & 0x1f;
796
797#ifdef CONFIG_SPE
798 if ((instr >> 26) == 0x4) {
799 PPC_WARN_ALIGNMENT(spe, regs);
800 return emulate_spe(regs, reg, instr);
801 }
802#endif
803
804 instr = (dsisr >> 10) & 0x7f;
805 instr |= (dsisr >> 13) & 0x60;
806
807
808 nb = aligninfo[instr].len;
809 flags = aligninfo[instr].flags;
810
811
812 if (IS_XFORM(instruction) && ((instruction >> 1) & 0x3ff) == 532) {
813 nb = 8;
814 flags = LD+SW;
815 } else if (IS_XFORM(instruction) &&
816 ((instruction >> 1) & 0x3ff) == 660) {
817 nb = 8;
818 flags = ST+SW;
819 }
820
821
822 swiz = 0;
823 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) {
824 flags ^= SW;
825#ifdef __BIG_ENDIAN__
826
827
828
829
830
831
832
833
834
835
836 if (cpu_has_feature(CPU_FTR_PPC_LE))
837 swiz = 7;
838#endif
839 }
840
841
842 addr = (unsigned char __user *)regs->dar;
843
844#ifdef CONFIG_VSX
845 if ((instruction & 0xfc00003e) == 0x7c000018) {
846 unsigned int elsize;
847
848
849 reg |= (instruction & 0x1) << 5;
850
851
852 nb = 8;
853 if (instruction & 0x200)
854 nb = 16;
855
856
857
858 elsize = 4;
859 if (instruction & 0x80)
860 elsize = 8;
861
862 flags = 0;
863 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE))
864 flags |= SW;
865 if (instruction & 0x100)
866 flags |= ST;
867 if (instruction & 0x040)
868 flags |= U;
869
870 if ((instruction & 0x400) == 0){
871 flags |= SPLT;
872 nb = 8;
873 }
874 PPC_WARN_ALIGNMENT(vsx, regs);
875 return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
876 }
877#endif
878
879
880
881 if (instr == DCBZ) {
882 PPC_WARN_ALIGNMENT(dcbz, regs);
883 return emulate_dcbz(regs, addr);
884 }
885 if (unlikely(nb == 0))
886 return 0;
887
888
889
890
891 if (flags & M) {
892 PPC_WARN_ALIGNMENT(multiple, regs);
893 return emulate_multiple(regs, addr, reg, nb,
894 flags, instr, swiz);
895 }
896
897
898 if (unlikely(user_mode(regs) &&
899 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
900 addr, nb)))
901 return -EFAULT;
902
903
904 if (flags & F) {
905
906 if (unlikely(!user_mode(regs)))
907 return 0;
908 flush_fp_to_thread(current);
909 }
910
911 if (nb == 16) {
912 if (flags & F) {
913
914 PPC_WARN_ALIGNMENT(fp_pair, regs);
915 return emulate_fp_pair(addr, reg, flags);
916 } else {
917#ifdef CONFIG_PPC64
918
919 PPC_WARN_ALIGNMENT(lq_stq, regs);
920 return emulate_lq_stq(regs, addr, reg, flags);
921#else
922 return 0;
923#endif
924 }
925 }
926
927 PPC_WARN_ALIGNMENT(unaligned, regs);
928
929
930
931
932 if (!(flags & ST)) {
933 unsigned int start = 0;
934
935 switch (nb) {
936 case 4:
937 start = offsetof(union data, x32.low32);
938 break;
939 case 2:
940 start = offsetof(union data, x16.low16);
941 break;
942 }
943
944 data.ll = 0;
945 ret = 0;
946 p = (unsigned long)addr;
947
948 for (i = 0; i < nb; i++)
949 ret |= __get_user_inatomic(data.v[start + i],
950 SWIZ_PTR(p++));
951
952 if (unlikely(ret))
953 return -EFAULT;
954
955 } else if (flags & F) {
956 data.ll = current->thread.TS_FPR(reg);
957 if (flags & S) {
958
959#ifdef CONFIG_PPC_FPU
960 preempt_disable();
961 enable_kernel_fp();
962 cvt_df(&data.dd, (float *)&data.x32.low32);
963 disable_kernel_fp();
964 preempt_enable();
965#else
966 return 0;
967#endif
968 }
969 } else
970 data.ll = regs->gpr[reg];
971
972 if (flags & SW) {
973 switch (nb) {
974 case 8:
975 data.ll = swab64(data.ll);
976 break;
977 case 4:
978 data.x32.low32 = swab32(data.x32.low32);
979 break;
980 case 2:
981 data.x16.low16 = swab16(data.x16.low16);
982 break;
983 }
984 }
985
986
987
988
989 switch (flags & ~(U|SW)) {
990 case LD+SE:
991 case LD+F+SE:
992 if ( nb == 2 )
993 data.ll = data.x16.low16;
994 else
995 data.ll = data.x32.low32;
996 break;
997
998
999 case LD+F+S:
1000#ifdef CONFIG_PPC_FPU
1001 preempt_disable();
1002 enable_kernel_fp();
1003 cvt_fd((float *)&data.x32.low32, &data.dd);
1004 disable_kernel_fp();
1005 preempt_enable();
1006#else
1007 return 0;
1008#endif
1009 break;
1010 }
1011
1012
1013 if (flags & ST) {
1014 unsigned int start = 0;
1015
1016 switch (nb) {
1017 case 4:
1018 start = offsetof(union data, x32.low32);
1019 break;
1020 case 2:
1021 start = offsetof(union data, x16.low16);
1022 break;
1023 }
1024
1025 ret = 0;
1026 p = (unsigned long)addr;
1027
1028 for (i = 0; i < nb; i++)
1029 ret |= __put_user_inatomic(data.v[start + i],
1030 SWIZ_PTR(p++));
1031
1032 if (unlikely(ret))
1033 return -EFAULT;
1034 } else if (flags & F)
1035 current->thread.TS_FPR(reg) = data.ll;
1036 else
1037 regs->gpr[reg] = data.ll;
1038
1039
1040 if (flags & U)
1041 regs->gpr[areg] = regs->dar;
1042
1043 return 1;
1044}
1045