1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/kernel.h>
21#include <linux/mm.h>
22#include <asm/processor.h>
23#include <linux/uaccess.h>
24#include <asm/cache.h>
25#include <asm/cputable.h>
26#include <asm/emulated_ops.h>
27#include <asm/switch_to.h>
28#include <asm/disassemble.h>
29#include <asm/cpu_has_feature.h>
30
31struct aligninfo {
32 unsigned char len;
33 unsigned char flags;
34};
35
36
37#define INVALID { 0, 0 }
38
39
40#define LD 0
41#define ST 1
42#define SE 2
43#define F 4
44#define U 8
45#define M 0x10
46#define SW 0x20
47#define S 0x40
48#define SX 0x40
49#define HARD 0x80
50#define E4 0x40
51#define E8 0x80
52#define SPLT 0x80
53
54
55#define DCBZ 0x5f
56
57
58
59
60
61
62
63static struct aligninfo aligninfo[128] = {
64 { 4, LD },
65 INVALID,
66 { 4, ST },
67 INVALID,
68 { 2, LD },
69 { 2, LD+SE },
70 { 2, ST },
71 { 4, LD+M },
72 { 4, LD+F+S },
73 { 8, LD+F },
74 { 4, ST+F+S },
75 { 8, ST+F },
76 { 16, LD },
77 { 8, LD },
78 INVALID,
79 { 8, ST },
80 { 4, LD+U },
81 INVALID,
82 { 4, ST+U },
83 INVALID,
84 { 2, LD+U },
85 { 2, LD+SE+U },
86 { 2, ST+U },
87 { 4, ST+M },
88 { 4, LD+F+S+U },
89 { 8, LD+F+U },
90 { 4, ST+F+S+U },
91 { 8, ST+F+U },
92 { 16, LD+F },
93 INVALID,
94 { 16, ST+F },
95 INVALID,
96 { 8, LD },
97 INVALID,
98 { 8, ST },
99 INVALID,
100 INVALID,
101 { 4, LD+SE },
102 INVALID,
103 INVALID,
104 { 4, LD+M+HARD+SX },
105 { 4, LD+M+HARD },
106 { 4, ST+M+HARD+SX },
107 { 4, ST+M+HARD },
108 INVALID,
109 { 8, LD+U },
110 INVALID,
111 { 8, ST+U },
112 { 8, LD+U },
113 INVALID,
114 { 8, ST+U },
115 INVALID,
116 INVALID,
117 { 4, LD+SE+U },
118 INVALID,
119 INVALID,
120 INVALID,
121 INVALID,
122 INVALID,
123 INVALID,
124 INVALID,
125 INVALID,
126 INVALID,
127 INVALID,
128 INVALID,
129 INVALID,
130 INVALID,
131 INVALID,
132 INVALID,
133 INVALID,
134 INVALID,
135 INVALID,
136 { 4, LD+SW },
137 INVALID,
138 { 4, ST+SW },
139 INVALID,
140 { 2, LD+SW },
141 { 4, LD+SE },
142 { 2, ST+SW },
143 { 16, ST },
144 INVALID,
145 INVALID,
146 INVALID,
147 INVALID,
148 INVALID,
149 INVALID,
150 INVALID,
151 INVALID,
152 INVALID,
153 INVALID,
154 INVALID,
155 INVALID,
156 INVALID,
157 INVALID,
158 INVALID,
159 { 0, ST+HARD },
160 { 4, LD },
161 INVALID,
162 { 4, ST },
163 INVALID,
164 { 2, LD },
165 { 2, LD+SE },
166 { 2, ST },
167 INVALID,
168 { 4, LD+F+S },
169 { 8, LD+F },
170 { 4, ST+F+S },
171 { 8, ST+F },
172 { 16, LD+F },
173 { 4, LD+F+SE },
174 { 16, ST+F },
175 { 4, ST+F },
176 { 4, LD+U },
177 INVALID,
178 { 4, ST+U },
179 INVALID,
180 { 2, LD+U },
181 { 2, LD+SE+U },
182 { 2, ST+U },
183 INVALID,
184 { 4, LD+F+S+U },
185 { 8, LD+F+U },
186 { 4, ST+F+S+U },
187 { 8, ST+F+U },
188 INVALID,
189 { 4, LD+F },
190 INVALID,
191 INVALID,
192};
193
194
195
196
197
198
199
200
201static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr)
202{
203 long __user *p;
204 int i, size;
205
206#ifdef __powerpc64__
207 size = ppc64_caches.dline_size;
208#else
209 size = L1_CACHE_BYTES;
210#endif
211 p = (long __user *) (regs->dar & -size);
212 if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size))
213 return -EFAULT;
214 for (i = 0; i < size / sizeof(long); ++i)
215 if (__put_user_inatomic(0, p+i))
216 return -EFAULT;
217 return 1;
218}
219
220
221
222
223
224
225
226#ifdef __BIG_ENDIAN__
227#ifdef CONFIG_PPC64
228#define REG_BYTE(rp, i) *((u8 *)((rp) + ((i) >> 2)) + ((i) & 3) + 4)
229#else
230#define REG_BYTE(rp, i) *((u8 *)(rp) + (i))
231#endif
232#else
233#define REG_BYTE(rp, i) (*(((u8 *)((rp) + ((i)>>2)) + ((i)&3))))
234#endif
235
236#define SWIZ_PTR(p) ((unsigned char __user *)((p) ^ swiz))
237
238static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
239 unsigned int reg, unsigned int nb,
240 unsigned int flags, unsigned int instr,
241 unsigned long swiz)
242{
243 unsigned long *rptr;
244 unsigned int nb0, i, bswiz;
245 unsigned long p;
246
247
248
249
250
251
252
253 if (unlikely((nb > 4) || !user_mode(regs)))
254 return 0;
255
256
257 nb0 = 0;
258 if (flags & HARD) {
259 if (flags & SX) {
260 nb = regs->xer & 127;
261 if (nb == 0)
262 return 1;
263 } else {
264 unsigned long pc = regs->nip ^ (swiz & 4);
265
266 if (__get_user_inatomic(instr,
267 (unsigned int __user *)pc))
268 return -EFAULT;
269 if (swiz == 0 && (flags & SW))
270 instr = cpu_to_le32(instr);
271 nb = (instr >> 11) & 0x1f;
272 if (nb == 0)
273 nb = 32;
274 }
275 if (nb + reg * 4 > 128) {
276 nb0 = nb + reg * 4 - 128;
277 nb = 128 - reg * 4;
278 }
279#ifdef __LITTLE_ENDIAN__
280
281
282
283
284
285
286 flags ^= SW;
287#endif
288 } else {
289
290 nb = (32 - reg) * 4;
291 }
292
293 if (!access_ok((flags & ST ? VERIFY_WRITE: VERIFY_READ), addr, nb+nb0))
294 return -EFAULT;
295
296 rptr = ®s->gpr[reg];
297 p = (unsigned long) addr;
298 bswiz = (flags & SW)? 3: 0;
299
300 if (!(flags & ST)) {
301
302
303
304
305
306 memset(rptr, 0, ((nb + 3) / 4) * sizeof(unsigned long));
307 if (nb0 > 0)
308 memset(®s->gpr[0], 0,
309 ((nb0 + 3) / 4) * sizeof(unsigned long));
310
311 for (i = 0; i < nb; ++i, ++p)
312 if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
313 SWIZ_PTR(p)))
314 return -EFAULT;
315 if (nb0 > 0) {
316 rptr = ®s->gpr[0];
317 addr += nb;
318 for (i = 0; i < nb0; ++i, ++p)
319 if (__get_user_inatomic(REG_BYTE(rptr,
320 i ^ bswiz),
321 SWIZ_PTR(p)))
322 return -EFAULT;
323 }
324
325 } else {
326 for (i = 0; i < nb; ++i, ++p)
327 if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
328 SWIZ_PTR(p)))
329 return -EFAULT;
330 if (nb0 > 0) {
331 rptr = ®s->gpr[0];
332 addr += nb;
333 for (i = 0; i < nb0; ++i, ++p)
334 if (__put_user_inatomic(REG_BYTE(rptr,
335 i ^ bswiz),
336 SWIZ_PTR(p)))
337 return -EFAULT;
338 }
339 }
340 return 1;
341}
342
343
344
345
346
347
348static int emulate_fp_pair(unsigned char __user *addr, unsigned int reg,
349 unsigned int flags)
350{
351 char *ptr0 = (char *) ¤t->thread.TS_FPR(reg);
352 char *ptr1 = (char *) ¤t->thread.TS_FPR(reg+1);
353 int i, ret, sw = 0;
354
355 if (reg & 1)
356 return 0;
357 if (flags & SW)
358 sw = 7;
359 ret = 0;
360 for (i = 0; i < 8; ++i) {
361 if (!(flags & ST)) {
362 ret |= __get_user(ptr0[i^sw], addr + i);
363 ret |= __get_user(ptr1[i^sw], addr + i + 8);
364 } else {
365 ret |= __put_user(ptr0[i^sw], addr + i);
366 ret |= __put_user(ptr1[i^sw], addr + i + 8);
367 }
368 }
369 if (ret)
370 return -EFAULT;
371 return 1;
372}
373
374#ifdef CONFIG_PPC64
375static int emulate_lq_stq(struct pt_regs *regs, unsigned char __user *addr,
376 unsigned int reg, unsigned int flags)
377{
378 char *ptr0 = (char *)®s->gpr[reg];
379 char *ptr1 = (char *)®s->gpr[reg+1];
380 int i, ret, sw = 0;
381
382 if (reg & 1)
383 return 0;
384 if (flags & SW)
385 sw = 7;
386 ret = 0;
387 for (i = 0; i < 8; ++i) {
388 if (!(flags & ST)) {
389 ret |= __get_user(ptr0[i^sw], addr + i);
390 ret |= __get_user(ptr1[i^sw], addr + i + 8);
391 } else {
392 ret |= __put_user(ptr0[i^sw], addr + i);
393 ret |= __put_user(ptr1[i^sw], addr + i + 8);
394 }
395 }
396 if (ret)
397 return -EFAULT;
398 return 1;
399}
400#endif
401
402#ifdef CONFIG_SPE
403
404static struct aligninfo spe_aligninfo[32] = {
405 { 8, LD+E8 },
406 { 8, LD+E4 },
407 { 8, LD },
408 INVALID,
409 { 2, LD },
410 INVALID,
411 { 2, LD },
412 { 2, LD+SE },
413 { 4, LD },
414 INVALID,
415 { 4, LD },
416 { 4, LD+SE },
417 { 4, LD+E4 },
418 INVALID,
419 { 4, LD },
420 INVALID,
421
422 { 8, ST+E8 },
423 { 8, ST+E4 },
424 { 8, ST },
425 INVALID,
426 INVALID,
427 INVALID,
428 INVALID,
429 INVALID,
430 { 4, ST },
431 INVALID,
432 { 4, ST },
433 INVALID,
434 { 4, ST+E4 },
435 INVALID,
436 { 4, ST+E4 },
437 INVALID,
438};
439
440#define EVLDD 0x00
441#define EVLDW 0x01
442#define EVLDH 0x02
443#define EVLHHESPLAT 0x04
444#define EVLHHOUSPLAT 0x06
445#define EVLHHOSSPLAT 0x07
446#define EVLWHE 0x08
447#define EVLWHOU 0x0A
448#define EVLWHOS 0x0B
449#define EVLWWSPLAT 0x0C
450#define EVLWHSPLAT 0x0E
451#define EVSTDD 0x10
452#define EVSTDW 0x11
453#define EVSTDH 0x12
454#define EVSTWHE 0x18
455#define EVSTWHO 0x1A
456#define EVSTWWE 0x1C
457#define EVSTWWO 0x1E
458
459
460
461
462
463
464static int emulate_spe(struct pt_regs *regs, unsigned int reg,
465 unsigned int instr)
466{
467 int ret;
468 union {
469 u64 ll;
470 u32 w[2];
471 u16 h[4];
472 u8 v[8];
473 } data, temp;
474 unsigned char __user *p, *addr;
475 unsigned long *evr = ¤t->thread.evr[reg];
476 unsigned int nb, flags;
477
478 instr = (instr >> 1) & 0x1f;
479
480
481 addr = (unsigned char __user *)regs->dar;
482
483 nb = spe_aligninfo[instr].len;
484 flags = spe_aligninfo[instr].flags;
485
486
487 if (unlikely(user_mode(regs) &&
488 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
489 addr, nb)))
490 return -EFAULT;
491
492
493 if (unlikely(!user_mode(regs)))
494 return 0;
495
496 flush_spe_to_thread(current);
497
498
499
500
501 if (flags & ST) {
502 data.ll = 0;
503 switch (instr) {
504 case EVSTDD:
505 case EVSTDW:
506 case EVSTDH:
507 data.w[0] = *evr;
508 data.w[1] = regs->gpr[reg];
509 break;
510 case EVSTWHE:
511 data.h[2] = *evr >> 16;
512 data.h[3] = regs->gpr[reg] >> 16;
513 break;
514 case EVSTWHO:
515 data.h[2] = *evr & 0xffff;
516 data.h[3] = regs->gpr[reg] & 0xffff;
517 break;
518 case EVSTWWE:
519 data.w[1] = *evr;
520 break;
521 case EVSTWWO:
522 data.w[1] = regs->gpr[reg];
523 break;
524 default:
525 return -EINVAL;
526 }
527 } else {
528 temp.ll = data.ll = 0;
529 ret = 0;
530 p = addr;
531
532 switch (nb) {
533 case 8:
534 ret |= __get_user_inatomic(temp.v[0], p++);
535 ret |= __get_user_inatomic(temp.v[1], p++);
536 ret |= __get_user_inatomic(temp.v[2], p++);
537 ret |= __get_user_inatomic(temp.v[3], p++);
538 case 4:
539 ret |= __get_user_inatomic(temp.v[4], p++);
540 ret |= __get_user_inatomic(temp.v[5], p++);
541 case 2:
542 ret |= __get_user_inatomic(temp.v[6], p++);
543 ret |= __get_user_inatomic(temp.v[7], p++);
544 if (unlikely(ret))
545 return -EFAULT;
546 }
547
548 switch (instr) {
549 case EVLDD:
550 case EVLDW:
551 case EVLDH:
552 data.ll = temp.ll;
553 break;
554 case EVLHHESPLAT:
555 data.h[0] = temp.h[3];
556 data.h[2] = temp.h[3];
557 break;
558 case EVLHHOUSPLAT:
559 case EVLHHOSSPLAT:
560 data.h[1] = temp.h[3];
561 data.h[3] = temp.h[3];
562 break;
563 case EVLWHE:
564 data.h[0] = temp.h[2];
565 data.h[2] = temp.h[3];
566 break;
567 case EVLWHOU:
568 case EVLWHOS:
569 data.h[1] = temp.h[2];
570 data.h[3] = temp.h[3];
571 break;
572 case EVLWWSPLAT:
573 data.w[0] = temp.w[1];
574 data.w[1] = temp.w[1];
575 break;
576 case EVLWHSPLAT:
577 data.h[0] = temp.h[2];
578 data.h[1] = temp.h[2];
579 data.h[2] = temp.h[3];
580 data.h[3] = temp.h[3];
581 break;
582 default:
583 return -EINVAL;
584 }
585 }
586
587 if (flags & SW) {
588 switch (flags & 0xf0) {
589 case E8:
590 data.ll = swab64(data.ll);
591 break;
592 case E4:
593 data.w[0] = swab32(data.w[0]);
594 data.w[1] = swab32(data.w[1]);
595 break;
596
597 default:
598 data.h[0] = swab16(data.h[0]);
599 data.h[1] = swab16(data.h[1]);
600 data.h[2] = swab16(data.h[2]);
601 data.h[3] = swab16(data.h[3]);
602 break;
603 }
604 }
605
606 if (flags & SE) {
607 data.w[0] = (s16)data.h[1];
608 data.w[1] = (s16)data.h[3];
609 }
610
611
612 if (flags & ST) {
613 ret = 0;
614 p = addr;
615 switch (nb) {
616 case 8:
617 ret |= __put_user_inatomic(data.v[0], p++);
618 ret |= __put_user_inatomic(data.v[1], p++);
619 ret |= __put_user_inatomic(data.v[2], p++);
620 ret |= __put_user_inatomic(data.v[3], p++);
621 case 4:
622 ret |= __put_user_inatomic(data.v[4], p++);
623 ret |= __put_user_inatomic(data.v[5], p++);
624 case 2:
625 ret |= __put_user_inatomic(data.v[6], p++);
626 ret |= __put_user_inatomic(data.v[7], p++);
627 }
628 if (unlikely(ret))
629 return -EFAULT;
630 } else {
631 *evr = data.w[0];
632 regs->gpr[reg] = data.w[1];
633 }
634
635 return 1;
636}
637#endif
638
639#ifdef CONFIG_VSX
640
641
642
643static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
644 unsigned int areg, struct pt_regs *regs,
645 unsigned int flags, unsigned int length,
646 unsigned int elsize)
647{
648 char *ptr;
649 unsigned long *lptr;
650 int ret = 0;
651 int sw = 0;
652 int i, j;
653
654
655 if (unlikely(!user_mode(regs)))
656 return 0;
657
658 flush_vsx_to_thread(current);
659
660 if (reg < 32)
661 ptr = (char *) ¤t->thread.fp_state.fpr[reg][0];
662 else
663 ptr = (char *) ¤t->thread.vr_state.vr[reg - 32];
664
665 lptr = (unsigned long *) ptr;
666
667#ifdef __LITTLE_ENDIAN__
668 if (flags & SW) {
669 elsize = length;
670 sw = length-1;
671 } else {
672
673
674
675
676 addr += length - elsize;
677
678
679 if (length == 8)
680 ptr += 8;
681 }
682#else
683 if (flags & SW)
684 sw = elsize-1;
685#endif
686
687 for (j = 0; j < length; j += elsize) {
688 for (i = 0; i < elsize; ++i) {
689 if (flags & ST)
690 ret |= __put_user(ptr[i^sw], addr + i);
691 else
692 ret |= __get_user(ptr[i^sw], addr + i);
693 }
694 ptr += elsize;
695#ifdef __LITTLE_ENDIAN__
696 addr -= elsize;
697#else
698 addr += elsize;
699#endif
700 }
701
702#ifdef __BIG_ENDIAN__
703#define VSX_HI 0
704#define VSX_LO 1
705#else
706#define VSX_HI 1
707#define VSX_LO 0
708#endif
709
710 if (!ret) {
711 if (flags & U)
712 regs->gpr[areg] = regs->dar;
713
714
715 if (flags & SPLT)
716 lptr[VSX_LO] = lptr[VSX_HI];
717
718 else if (!(flags & ST) && (8 == length))
719 lptr[VSX_LO] = 0;
720 } else
721 return -EFAULT;
722
723 return 1;
724}
725#endif
726
727
728
729
730
731
732
733
734
735int fix_alignment(struct pt_regs *regs)
736{
737 unsigned int instr, nb, flags, instruction = 0;
738 unsigned int reg, areg;
739 unsigned int dsisr;
740 unsigned char __user *addr;
741 unsigned long p, swiz;
742 int ret, i;
743 union data {
744 u64 ll;
745 double dd;
746 unsigned char v[8];
747 struct {
748#ifdef __LITTLE_ENDIAN__
749 int low32;
750 unsigned hi32;
751#else
752 unsigned hi32;
753 int low32;
754#endif
755 } x32;
756 struct {
757#ifdef __LITTLE_ENDIAN__
758 short low16;
759 unsigned char hi48[6];
760#else
761 unsigned char hi48[6];
762 short low16;
763#endif
764 } x16;
765 } data;
766
767
768
769
770
771 CHECK_FULL_REGS(regs);
772
773 dsisr = regs->dsisr;
774
775
776
777
778 if (cpu_has_feature(CPU_FTR_NODSISRALIGN)) {
779 unsigned long pc = regs->nip;
780
781 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE))
782 pc ^= 4;
783 if (unlikely(__get_user_inatomic(instr,
784 (unsigned int __user *)pc)))
785 return -EFAULT;
786 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE))
787 instr = cpu_to_le32(instr);
788 dsisr = make_dsisr(instr);
789 instruction = instr;
790 }
791
792
793 reg = (dsisr >> 5) & 0x1f;
794 areg = dsisr & 0x1f;
795
796#ifdef CONFIG_SPE
797 if ((instr >> 26) == 0x4) {
798 PPC_WARN_ALIGNMENT(spe, regs);
799 return emulate_spe(regs, reg, instr);
800 }
801#endif
802
803 instr = (dsisr >> 10) & 0x7f;
804 instr |= (dsisr >> 13) & 0x60;
805
806
807 nb = aligninfo[instr].len;
808 flags = aligninfo[instr].flags;
809
810
811 if (IS_XFORM(instruction) && ((instruction >> 1) & 0x3ff) == 532) {
812 nb = 8;
813 flags = LD+SW;
814 } else if (IS_XFORM(instruction) &&
815 ((instruction >> 1) & 0x3ff) == 660) {
816 nb = 8;
817 flags = ST+SW;
818 }
819
820
821 swiz = 0;
822 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) {
823 flags ^= SW;
824#ifdef __BIG_ENDIAN__
825
826
827
828
829
830
831
832
833
834
835 if (cpu_has_feature(CPU_FTR_PPC_LE))
836 swiz = 7;
837#endif
838 }
839
840
841 addr = (unsigned char __user *)regs->dar;
842
843#ifdef CONFIG_VSX
844 if ((instruction & 0xfc00003e) == 0x7c000018) {
845 unsigned int elsize;
846
847
848 reg |= (instruction & 0x1) << 5;
849
850
851 nb = 8;
852 if (instruction & 0x200)
853 nb = 16;
854
855
856
857 elsize = 4;
858 if (instruction & 0x80)
859 elsize = 8;
860
861 flags = 0;
862 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE))
863 flags |= SW;
864 if (instruction & 0x100)
865 flags |= ST;
866 if (instruction & 0x040)
867 flags |= U;
868
869 if ((instruction & 0x400) == 0){
870 flags |= SPLT;
871 nb = 8;
872 }
873 PPC_WARN_ALIGNMENT(vsx, regs);
874 return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
875 }
876#endif
877
878
879
880
881
882
883
884
885
886
887
888 if ((instruction & 0xfc0006fe) == PPC_INST_COPY)
889 return -EIO;
890
891
892
893
894 if (instr == DCBZ) {
895 PPC_WARN_ALIGNMENT(dcbz, regs);
896 return emulate_dcbz(regs, addr);
897 }
898 if (unlikely(nb == 0))
899 return 0;
900
901
902
903
904 if (flags & M) {
905 PPC_WARN_ALIGNMENT(multiple, regs);
906 return emulate_multiple(regs, addr, reg, nb,
907 flags, instr, swiz);
908 }
909
910
911 if (unlikely(user_mode(regs) &&
912 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
913 addr, nb)))
914 return -EFAULT;
915
916
917 if (flags & F) {
918
919 if (unlikely(!user_mode(regs)))
920 return 0;
921 flush_fp_to_thread(current);
922 }
923
924 if (nb == 16) {
925 if (flags & F) {
926
927 PPC_WARN_ALIGNMENT(fp_pair, regs);
928 return emulate_fp_pair(addr, reg, flags);
929 } else {
930#ifdef CONFIG_PPC64
931
932 PPC_WARN_ALIGNMENT(lq_stq, regs);
933 return emulate_lq_stq(regs, addr, reg, flags);
934#else
935 return 0;
936#endif
937 }
938 }
939
940 PPC_WARN_ALIGNMENT(unaligned, regs);
941
942
943
944
945 if (!(flags & ST)) {
946 unsigned int start = 0;
947
948 switch (nb) {
949 case 4:
950 start = offsetof(union data, x32.low32);
951 break;
952 case 2:
953 start = offsetof(union data, x16.low16);
954 break;
955 }
956
957 data.ll = 0;
958 ret = 0;
959 p = (unsigned long)addr;
960
961 for (i = 0; i < nb; i++)
962 ret |= __get_user_inatomic(data.v[start + i],
963 SWIZ_PTR(p++));
964
965 if (unlikely(ret))
966 return -EFAULT;
967
968 } else if (flags & F) {
969 data.ll = current->thread.TS_FPR(reg);
970 if (flags & S) {
971
972#ifdef CONFIG_PPC_FPU
973 preempt_disable();
974 enable_kernel_fp();
975 cvt_df(&data.dd, (float *)&data.x32.low32);
976 disable_kernel_fp();
977 preempt_enable();
978#else
979 return 0;
980#endif
981 }
982 } else
983 data.ll = regs->gpr[reg];
984
985 if (flags & SW) {
986 switch (nb) {
987 case 8:
988 data.ll = swab64(data.ll);
989 break;
990 case 4:
991 data.x32.low32 = swab32(data.x32.low32);
992 break;
993 case 2:
994 data.x16.low16 = swab16(data.x16.low16);
995 break;
996 }
997 }
998
999
1000
1001
1002 switch (flags & ~(U|SW)) {
1003 case LD+SE:
1004 case LD+F+SE:
1005 if ( nb == 2 )
1006 data.ll = data.x16.low16;
1007 else
1008 data.ll = data.x32.low32;
1009 break;
1010
1011
1012 case LD+F+S:
1013#ifdef CONFIG_PPC_FPU
1014 preempt_disable();
1015 enable_kernel_fp();
1016 cvt_fd((float *)&data.x32.low32, &data.dd);
1017 disable_kernel_fp();
1018 preempt_enable();
1019#else
1020 return 0;
1021#endif
1022 break;
1023 }
1024
1025
1026 if (flags & ST) {
1027 unsigned int start = 0;
1028
1029 switch (nb) {
1030 case 4:
1031 start = offsetof(union data, x32.low32);
1032 break;
1033 case 2:
1034 start = offsetof(union data, x16.low16);
1035 break;
1036 }
1037
1038 ret = 0;
1039 p = (unsigned long)addr;
1040
1041 for (i = 0; i < nb; i++)
1042 ret |= __put_user_inatomic(data.v[start + i],
1043 SWIZ_PTR(p++));
1044
1045 if (unlikely(ret))
1046 return -EFAULT;
1047 } else if (flags & F)
1048 current->thread.TS_FPR(reg) = data.ll;
1049 else
1050 regs->gpr[reg] = data.ll;
1051
1052
1053 if (flags & U)
1054 regs->gpr[areg] = regs->dar;
1055
1056 return 1;
1057}
1058