1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/kernel.h>
21#include <linux/mm.h>
22#include <asm/processor.h>
23#include <asm/uaccess.h>
24#include <asm/system.h>
25#include <asm/cache.h>
26#include <asm/cputable.h>
27#include <asm/emulated_ops.h>
28
29struct aligninfo {
30 unsigned char len;
31 unsigned char flags;
32};
33
34#define IS_XFORM(inst) (((inst) >> 26) == 31)
35#define IS_DSFORM(inst) (((inst) >> 26) >= 56)
36
37#define INVALID { 0, 0 }
38
39
40#define LD 0
41#define ST 1
42#define SE 2
43#define F 4
44#define U 8
45#define M 0x10
46#define SW 0x20
47#define S 0x40
48#define SX 0x40
49#define HARD 0x80
50#define E4 0x40
51#define E8 0x80
52#define SPLT 0x80
53
54
55#define DCBZ 0x5f
56
57#define SWAP(a, b) (t = (a), (a) = (b), (b) = t)
58
59
60
61
62
63
64
65static struct aligninfo aligninfo[128] = {
66 { 4, LD },
67 INVALID,
68 { 4, ST },
69 INVALID,
70 { 2, LD },
71 { 2, LD+SE },
72 { 2, ST },
73 { 4, LD+M },
74 { 4, LD+F+S },
75 { 8, LD+F },
76 { 4, ST+F+S },
77 { 8, ST+F },
78 INVALID,
79 { 8, LD },
80 INVALID,
81 { 8, ST },
82 { 4, LD+U },
83 INVALID,
84 { 4, ST+U },
85 INVALID,
86 { 2, LD+U },
87 { 2, LD+SE+U },
88 { 2, ST+U },
89 { 4, ST+M },
90 { 4, LD+F+S+U },
91 { 8, LD+F+U },
92 { 4, ST+F+S+U },
93 { 8, ST+F+U },
94 { 16, LD+F },
95 INVALID,
96 { 16, ST+F },
97 INVALID,
98 { 8, LD },
99 INVALID,
100 { 8, ST },
101 INVALID,
102 INVALID,
103 { 4, LD+SE },
104 INVALID,
105 INVALID,
106 { 4, LD+M+HARD+SX },
107 { 4, LD+M+HARD },
108 { 4, ST+M+HARD+SX },
109 { 4, ST+M+HARD },
110 INVALID,
111 { 8, LD+U },
112 INVALID,
113 { 8, ST+U },
114 { 8, LD+U },
115 INVALID,
116 { 8, ST+U },
117 INVALID,
118 INVALID,
119 { 4, LD+SE+U },
120 INVALID,
121 INVALID,
122 INVALID,
123 INVALID,
124 INVALID,
125 INVALID,
126 INVALID,
127 INVALID,
128 INVALID,
129 INVALID,
130 INVALID,
131 INVALID,
132 INVALID,
133 INVALID,
134 INVALID,
135 INVALID,
136 INVALID,
137 INVALID,
138 { 4, LD+SW },
139 INVALID,
140 { 4, ST+SW },
141 INVALID,
142 { 2, LD+SW },
143 { 4, LD+SE },
144 { 2, ST+SW },
145 INVALID,
146 INVALID,
147 INVALID,
148 INVALID,
149 INVALID,
150 INVALID,
151 INVALID,
152 INVALID,
153 INVALID,
154 INVALID,
155 INVALID,
156 INVALID,
157 INVALID,
158 INVALID,
159 INVALID,
160 INVALID,
161 { 0, ST+HARD },
162 { 4, LD },
163 INVALID,
164 { 4, ST },
165 INVALID,
166 { 2, LD },
167 { 2, LD+SE },
168 { 2, ST },
169 INVALID,
170 { 4, LD+F+S },
171 { 8, LD+F },
172 { 4, ST+F+S },
173 { 8, ST+F },
174 { 16, LD+F },
175 { 4, LD+F+SE },
176 { 16, ST+F },
177 { 4, ST+F },
178 { 4, LD+U },
179 INVALID,
180 { 4, ST+U },
181 INVALID,
182 { 2, LD+U },
183 { 2, LD+SE+U },
184 { 2, ST+U },
185 INVALID,
186 { 4, LD+F+S+U },
187 { 8, LD+F+U },
188 { 4, ST+F+S+U },
189 { 8, ST+F+U },
190 INVALID,
191 { 4, LD+F },
192 INVALID,
193 INVALID,
194};
195
196
197
198
199static inline unsigned make_dsisr(unsigned instr)
200{
201 unsigned dsisr;
202
203
204
205 dsisr = (instr & 0x03ff0000) >> 16;
206
207 if (IS_XFORM(instr)) {
208
209 dsisr |= (instr & 0x00000006) << 14;
210
211 dsisr |= (instr & 0x00000040) << 8;
212
213 dsisr |= (instr & 0x00000780) << 3;
214 } else {
215
216 dsisr |= (instr & 0x04000000) >> 12;
217
218 dsisr |= (instr & 0x78000000) >> 17;
219
220 if (IS_DSFORM(instr))
221 dsisr |= (instr & 0x00000003) << 18;
222 }
223
224 return dsisr;
225}
226
227
228
229
230
231
232
233
234static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr)
235{
236 long __user *p;
237 int i, size;
238
239#ifdef __powerpc64__
240 size = ppc64_caches.dline_size;
241#else
242 size = L1_CACHE_BYTES;
243#endif
244 p = (long __user *) (regs->dar & -size);
245 if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size))
246 return -EFAULT;
247 for (i = 0; i < size / sizeof(long); ++i)
248 if (__put_user_inatomic(0, p+i))
249 return -EFAULT;
250 return 1;
251}
252
253
254
255
256
257
258
259#ifdef CONFIG_PPC64
260#define REG_BYTE(rp, i) *((u8 *)((rp) + ((i) >> 2)) + ((i) & 3) + 4)
261#else
262#define REG_BYTE(rp, i) *((u8 *)(rp) + (i))
263#endif
264
265#define SWIZ_PTR(p) ((unsigned char __user *)((p) ^ swiz))
266
267static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
268 unsigned int reg, unsigned int nb,
269 unsigned int flags, unsigned int instr,
270 unsigned long swiz)
271{
272 unsigned long *rptr;
273 unsigned int nb0, i, bswiz;
274 unsigned long p;
275
276
277
278
279
280
281
282 if (unlikely((nb > 4) || !user_mode(regs)))
283 return 0;
284
285
286 nb0 = 0;
287 if (flags & HARD) {
288 if (flags & SX) {
289 nb = regs->xer & 127;
290 if (nb == 0)
291 return 1;
292 } else {
293 unsigned long pc = regs->nip ^ (swiz & 4);
294
295 if (__get_user_inatomic(instr,
296 (unsigned int __user *)pc))
297 return -EFAULT;
298 if (swiz == 0 && (flags & SW))
299 instr = cpu_to_le32(instr);
300 nb = (instr >> 11) & 0x1f;
301 if (nb == 0)
302 nb = 32;
303 }
304 if (nb + reg * 4 > 128) {
305 nb0 = nb + reg * 4 - 128;
306 nb = 128 - reg * 4;
307 }
308 } else {
309
310 nb = (32 - reg) * 4;
311 }
312
313 if (!access_ok((flags & ST ? VERIFY_WRITE: VERIFY_READ), addr, nb+nb0))
314 return -EFAULT;
315
316 rptr = ®s->gpr[reg];
317 p = (unsigned long) addr;
318 bswiz = (flags & SW)? 3: 0;
319
320 if (!(flags & ST)) {
321
322
323
324
325
326 memset(rptr, 0, ((nb + 3) / 4) * sizeof(unsigned long));
327 if (nb0 > 0)
328 memset(®s->gpr[0], 0,
329 ((nb0 + 3) / 4) * sizeof(unsigned long));
330
331 for (i = 0; i < nb; ++i, ++p)
332 if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
333 SWIZ_PTR(p)))
334 return -EFAULT;
335 if (nb0 > 0) {
336 rptr = ®s->gpr[0];
337 addr += nb;
338 for (i = 0; i < nb0; ++i, ++p)
339 if (__get_user_inatomic(REG_BYTE(rptr,
340 i ^ bswiz),
341 SWIZ_PTR(p)))
342 return -EFAULT;
343 }
344
345 } else {
346 for (i = 0; i < nb; ++i, ++p)
347 if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
348 SWIZ_PTR(p)))
349 return -EFAULT;
350 if (nb0 > 0) {
351 rptr = ®s->gpr[0];
352 addr += nb;
353 for (i = 0; i < nb0; ++i, ++p)
354 if (__put_user_inatomic(REG_BYTE(rptr,
355 i ^ bswiz),
356 SWIZ_PTR(p)))
357 return -EFAULT;
358 }
359 }
360 return 1;
361}
362
363
364
365
366
367
368static int emulate_fp_pair(unsigned char __user *addr, unsigned int reg,
369 unsigned int flags)
370{
371 char *ptr0 = (char *) ¤t->thread.TS_FPR(reg);
372 char *ptr1 = (char *) ¤t->thread.TS_FPR(reg+1);
373 int i, ret, sw = 0;
374
375 if (!(flags & F))
376 return 0;
377 if (reg & 1)
378 return 0;
379 if (flags & SW)
380 sw = 7;
381 ret = 0;
382 for (i = 0; i < 8; ++i) {
383 if (!(flags & ST)) {
384 ret |= __get_user(ptr0[i^sw], addr + i);
385 ret |= __get_user(ptr1[i^sw], addr + i + 8);
386 } else {
387 ret |= __put_user(ptr0[i^sw], addr + i);
388 ret |= __put_user(ptr1[i^sw], addr + i + 8);
389 }
390 }
391 if (ret)
392 return -EFAULT;
393 return 1;
394}
395
396#ifdef CONFIG_SPE
397
398static struct aligninfo spe_aligninfo[32] = {
399 { 8, LD+E8 },
400 { 8, LD+E4 },
401 { 8, LD },
402 INVALID,
403 { 2, LD },
404 INVALID,
405 { 2, LD },
406 { 2, LD+SE },
407 { 4, LD },
408 INVALID,
409 { 4, LD },
410 { 4, LD+SE },
411 { 4, LD+E4 },
412 INVALID,
413 { 4, LD },
414 INVALID,
415
416 { 8, ST+E8 },
417 { 8, ST+E4 },
418 { 8, ST },
419 INVALID,
420 INVALID,
421 INVALID,
422 INVALID,
423 INVALID,
424 { 4, ST },
425 INVALID,
426 { 4, ST },
427 INVALID,
428 { 4, ST+E4 },
429 INVALID,
430 { 4, ST+E4 },
431 INVALID,
432};
433
434#define EVLDD 0x00
435#define EVLDW 0x01
436#define EVLDH 0x02
437#define EVLHHESPLAT 0x04
438#define EVLHHOUSPLAT 0x06
439#define EVLHHOSSPLAT 0x07
440#define EVLWHE 0x08
441#define EVLWHOU 0x0A
442#define EVLWHOS 0x0B
443#define EVLWWSPLAT 0x0C
444#define EVLWHSPLAT 0x0E
445#define EVSTDD 0x10
446#define EVSTDW 0x11
447#define EVSTDH 0x12
448#define EVSTWHE 0x18
449#define EVSTWHO 0x1A
450#define EVSTWWE 0x1C
451#define EVSTWWO 0x1E
452
453
454
455
456
457
458static int emulate_spe(struct pt_regs *regs, unsigned int reg,
459 unsigned int instr)
460{
461 int t, ret;
462 union {
463 u64 ll;
464 u32 w[2];
465 u16 h[4];
466 u8 v[8];
467 } data, temp;
468 unsigned char __user *p, *addr;
469 unsigned long *evr = ¤t->thread.evr[reg];
470 unsigned int nb, flags;
471
472 instr = (instr >> 1) & 0x1f;
473
474
475 addr = (unsigned char __user *)regs->dar;
476
477 nb = spe_aligninfo[instr].len;
478 flags = spe_aligninfo[instr].flags;
479
480
481 if (unlikely(user_mode(regs) &&
482 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
483 addr, nb)))
484 return -EFAULT;
485
486
487 if (unlikely(!user_mode(regs)))
488 return 0;
489
490 flush_spe_to_thread(current);
491
492
493
494
495 if (flags & ST) {
496 data.ll = 0;
497 switch (instr) {
498 case EVSTDD:
499 case EVSTDW:
500 case EVSTDH:
501 data.w[0] = *evr;
502 data.w[1] = regs->gpr[reg];
503 break;
504 case EVSTWHE:
505 data.h[2] = *evr >> 16;
506 data.h[3] = regs->gpr[reg] >> 16;
507 break;
508 case EVSTWHO:
509 data.h[2] = *evr & 0xffff;
510 data.h[3] = regs->gpr[reg] & 0xffff;
511 break;
512 case EVSTWWE:
513 data.w[1] = *evr;
514 break;
515 case EVSTWWO:
516 data.w[1] = regs->gpr[reg];
517 break;
518 default:
519 return -EINVAL;
520 }
521 } else {
522 temp.ll = data.ll = 0;
523 ret = 0;
524 p = addr;
525
526 switch (nb) {
527 case 8:
528 ret |= __get_user_inatomic(temp.v[0], p++);
529 ret |= __get_user_inatomic(temp.v[1], p++);
530 ret |= __get_user_inatomic(temp.v[2], p++);
531 ret |= __get_user_inatomic(temp.v[3], p++);
532 case 4:
533 ret |= __get_user_inatomic(temp.v[4], p++);
534 ret |= __get_user_inatomic(temp.v[5], p++);
535 case 2:
536 ret |= __get_user_inatomic(temp.v[6], p++);
537 ret |= __get_user_inatomic(temp.v[7], p++);
538 if (unlikely(ret))
539 return -EFAULT;
540 }
541
542 switch (instr) {
543 case EVLDD:
544 case EVLDW:
545 case EVLDH:
546 data.ll = temp.ll;
547 break;
548 case EVLHHESPLAT:
549 data.h[0] = temp.h[3];
550 data.h[2] = temp.h[3];
551 break;
552 case EVLHHOUSPLAT:
553 case EVLHHOSSPLAT:
554 data.h[1] = temp.h[3];
555 data.h[3] = temp.h[3];
556 break;
557 case EVLWHE:
558 data.h[0] = temp.h[2];
559 data.h[2] = temp.h[3];
560 break;
561 case EVLWHOU:
562 case EVLWHOS:
563 data.h[1] = temp.h[2];
564 data.h[3] = temp.h[3];
565 break;
566 case EVLWWSPLAT:
567 data.w[0] = temp.w[1];
568 data.w[1] = temp.w[1];
569 break;
570 case EVLWHSPLAT:
571 data.h[0] = temp.h[2];
572 data.h[1] = temp.h[2];
573 data.h[2] = temp.h[3];
574 data.h[3] = temp.h[3];
575 break;
576 default:
577 return -EINVAL;
578 }
579 }
580
581 if (flags & SW) {
582 switch (flags & 0xf0) {
583 case E8:
584 SWAP(data.v[0], data.v[7]);
585 SWAP(data.v[1], data.v[6]);
586 SWAP(data.v[2], data.v[5]);
587 SWAP(data.v[3], data.v[4]);
588 break;
589 case E4:
590
591 SWAP(data.v[0], data.v[3]);
592 SWAP(data.v[1], data.v[2]);
593 SWAP(data.v[4], data.v[7]);
594 SWAP(data.v[5], data.v[6]);
595 break;
596
597 default:
598 SWAP(data.v[0], data.v[1]);
599 SWAP(data.v[2], data.v[3]);
600 SWAP(data.v[4], data.v[5]);
601 SWAP(data.v[6], data.v[7]);
602 break;
603 }
604 }
605
606 if (flags & SE) {
607 data.w[0] = (s16)data.h[1];
608 data.w[1] = (s16)data.h[3];
609 }
610
611
612 if (flags & ST) {
613 ret = 0;
614 p = addr;
615 switch (nb) {
616 case 8:
617 ret |= __put_user_inatomic(data.v[0], p++);
618 ret |= __put_user_inatomic(data.v[1], p++);
619 ret |= __put_user_inatomic(data.v[2], p++);
620 ret |= __put_user_inatomic(data.v[3], p++);
621 case 4:
622 ret |= __put_user_inatomic(data.v[4], p++);
623 ret |= __put_user_inatomic(data.v[5], p++);
624 case 2:
625 ret |= __put_user_inatomic(data.v[6], p++);
626 ret |= __put_user_inatomic(data.v[7], p++);
627 }
628 if (unlikely(ret))
629 return -EFAULT;
630 } else {
631 *evr = data.w[0];
632 regs->gpr[reg] = data.w[1];
633 }
634
635 return 1;
636}
637#endif
638
639#ifdef CONFIG_VSX
640
641
642
643static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
644 unsigned int areg, struct pt_regs *regs,
645 unsigned int flags, unsigned int length,
646 unsigned int elsize)
647{
648 char *ptr;
649 unsigned long *lptr;
650 int ret = 0;
651 int sw = 0;
652 int i, j;
653
654 flush_vsx_to_thread(current);
655
656 if (reg < 32)
657 ptr = (char *) ¤t->thread.TS_FPR(reg);
658 else
659 ptr = (char *) ¤t->thread.vr[reg - 32];
660
661 lptr = (unsigned long *) ptr;
662
663 if (flags & SW)
664 sw = elsize-1;
665
666 for (j = 0; j < length; j += elsize) {
667 for (i = 0; i < elsize; ++i) {
668 if (flags & ST)
669 ret |= __put_user(ptr[i^sw], addr + i);
670 else
671 ret |= __get_user(ptr[i^sw], addr + i);
672 }
673 ptr += elsize;
674 addr += elsize;
675 }
676
677 if (!ret) {
678 if (flags & U)
679 regs->gpr[areg] = regs->dar;
680
681
682 if (flags & SPLT)
683 lptr[1] = lptr[0];
684
685 else if (!(flags & ST) && (8 == length))
686 lptr[1] = 0;
687 } else
688 return -EFAULT;
689
690 return 1;
691}
692#endif
693
694
695
696
697
698
699
700
701
702int fix_alignment(struct pt_regs *regs)
703{
704 unsigned int instr, nb, flags, instruction = 0;
705 unsigned int reg, areg;
706 unsigned int dsisr;
707 unsigned char __user *addr;
708 unsigned long p, swiz;
709 int ret, t;
710 union {
711 u64 ll;
712 double dd;
713 unsigned char v[8];
714 struct {
715 unsigned hi32;
716 int low32;
717 } x32;
718 struct {
719 unsigned char hi48[6];
720 short low16;
721 } x16;
722 } data;
723
724
725
726
727
728 CHECK_FULL_REGS(regs);
729
730 dsisr = regs->dsisr;
731
732
733
734
735 if (cpu_has_feature(CPU_FTR_NODSISRALIGN)) {
736 unsigned long pc = regs->nip;
737
738 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE))
739 pc ^= 4;
740 if (unlikely(__get_user_inatomic(instr,
741 (unsigned int __user *)pc)))
742 return -EFAULT;
743 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE))
744 instr = cpu_to_le32(instr);
745 dsisr = make_dsisr(instr);
746 instruction = instr;
747 }
748
749
750 reg = (dsisr >> 5) & 0x1f;
751 areg = dsisr & 0x1f;
752
753#ifdef CONFIG_SPE
754 if ((instr >> 26) == 0x4) {
755 PPC_WARN_ALIGNMENT(spe, regs);
756 return emulate_spe(regs, reg, instr);
757 }
758#endif
759
760 instr = (dsisr >> 10) & 0x7f;
761 instr |= (dsisr >> 13) & 0x60;
762
763
764 nb = aligninfo[instr].len;
765 flags = aligninfo[instr].flags;
766
767
768 swiz = 0;
769 if (regs->msr & MSR_LE) {
770 flags ^= SW;
771
772
773
774
775
776
777
778
779
780
781 if (cpu_has_feature(CPU_FTR_PPC_LE))
782 swiz = 7;
783 }
784
785
786 addr = (unsigned char __user *)regs->dar;
787
788#ifdef CONFIG_VSX
789 if ((instruction & 0xfc00003e) == 0x7c000018) {
790 unsigned int elsize;
791
792
793 reg |= (instruction & 0x1) << 5;
794
795
796 nb = 8;
797 if (instruction & 0x200)
798 nb = 16;
799
800
801
802 elsize = 4;
803 if (instruction & 0x80)
804 elsize = 8;
805
806 flags = 0;
807 if (regs->msr & MSR_LE)
808 flags |= SW;
809 if (instruction & 0x100)
810 flags |= ST;
811 if (instruction & 0x040)
812 flags |= U;
813
814 if ((instruction & 0x400) == 0){
815 flags |= SPLT;
816 nb = 8;
817 }
818 PPC_WARN_ALIGNMENT(vsx, regs);
819 return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
820 }
821#endif
822
823
824
825 if (instr == DCBZ) {
826 PPC_WARN_ALIGNMENT(dcbz, regs);
827 return emulate_dcbz(regs, addr);
828 }
829 if (unlikely(nb == 0))
830 return 0;
831
832
833
834
835 if (flags & M) {
836 PPC_WARN_ALIGNMENT(multiple, regs);
837 return emulate_multiple(regs, addr, reg, nb,
838 flags, instr, swiz);
839 }
840
841
842 if (unlikely(user_mode(regs) &&
843 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
844 addr, nb)))
845 return -EFAULT;
846
847
848 if (flags & F) {
849
850 if (unlikely(!user_mode(regs)))
851 return 0;
852 flush_fp_to_thread(current);
853 }
854
855
856 if (nb == 16) {
857 PPC_WARN_ALIGNMENT(fp_pair, regs);
858 return emulate_fp_pair(addr, reg, flags);
859 }
860
861 PPC_WARN_ALIGNMENT(unaligned, regs);
862
863
864
865
866 if (!(flags & ST)) {
867 data.ll = 0;
868 ret = 0;
869 p = (unsigned long) addr;
870 switch (nb) {
871 case 8:
872 ret |= __get_user_inatomic(data.v[0], SWIZ_PTR(p++));
873 ret |= __get_user_inatomic(data.v[1], SWIZ_PTR(p++));
874 ret |= __get_user_inatomic(data.v[2], SWIZ_PTR(p++));
875 ret |= __get_user_inatomic(data.v[3], SWIZ_PTR(p++));
876 case 4:
877 ret |= __get_user_inatomic(data.v[4], SWIZ_PTR(p++));
878 ret |= __get_user_inatomic(data.v[5], SWIZ_PTR(p++));
879 case 2:
880 ret |= __get_user_inatomic(data.v[6], SWIZ_PTR(p++));
881 ret |= __get_user_inatomic(data.v[7], SWIZ_PTR(p++));
882 if (unlikely(ret))
883 return -EFAULT;
884 }
885 } else if (flags & F) {
886 data.dd = current->thread.TS_FPR(reg);
887 if (flags & S) {
888
889#ifdef CONFIG_PPC_FPU
890 preempt_disable();
891 enable_kernel_fp();
892 cvt_df(&data.dd, (float *)&data.v[4]);
893 preempt_enable();
894#else
895 return 0;
896#endif
897 }
898 } else
899 data.ll = regs->gpr[reg];
900
901 if (flags & SW) {
902 switch (nb) {
903 case 8:
904 SWAP(data.v[0], data.v[7]);
905 SWAP(data.v[1], data.v[6]);
906 SWAP(data.v[2], data.v[5]);
907 SWAP(data.v[3], data.v[4]);
908 break;
909 case 4:
910 SWAP(data.v[4], data.v[7]);
911 SWAP(data.v[5], data.v[6]);
912 break;
913 case 2:
914 SWAP(data.v[6], data.v[7]);
915 break;
916 }
917 }
918
919
920
921
922 switch (flags & ~(U|SW)) {
923 case LD+SE:
924 case LD+F+SE:
925 if ( nb == 2 )
926 data.ll = data.x16.low16;
927 else
928 data.ll = data.x32.low32;
929 break;
930
931
932 case LD+F+S:
933#ifdef CONFIG_PPC_FPU
934 preempt_disable();
935 enable_kernel_fp();
936 cvt_fd((float *)&data.v[4], &data.dd);
937 preempt_enable();
938#else
939 return 0;
940#endif
941 break;
942 }
943
944
945 if (flags & ST) {
946 ret = 0;
947 p = (unsigned long) addr;
948 switch (nb) {
949 case 8:
950 ret |= __put_user_inatomic(data.v[0], SWIZ_PTR(p++));
951 ret |= __put_user_inatomic(data.v[1], SWIZ_PTR(p++));
952 ret |= __put_user_inatomic(data.v[2], SWIZ_PTR(p++));
953 ret |= __put_user_inatomic(data.v[3], SWIZ_PTR(p++));
954 case 4:
955 ret |= __put_user_inatomic(data.v[4], SWIZ_PTR(p++));
956 ret |= __put_user_inatomic(data.v[5], SWIZ_PTR(p++));
957 case 2:
958 ret |= __put_user_inatomic(data.v[6], SWIZ_PTR(p++));
959 ret |= __put_user_inatomic(data.v[7], SWIZ_PTR(p++));
960 }
961 if (unlikely(ret))
962 return -EFAULT;
963 } else if (flags & F)
964 current->thread.TS_FPR(reg) = data.dd;
965 else
966 regs->gpr[reg] = data.ll;
967
968
969 if (flags & U)
970 regs->gpr[areg] = regs->dar;
971
972 return 1;
973}
974