1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/kernel.h>
21#include <linux/mm.h>
22#include <asm/processor.h>
23#include <asm/uaccess.h>
24#include <asm/cache.h>
25#include <asm/cputable.h>
26#include <asm/emulated_ops.h>
27#include <asm/switch_to.h>
28
29struct aligninfo {
30 unsigned char len;
31 unsigned char flags;
32};
33
34#define IS_XFORM(inst) (((inst) >> 26) == 31)
35#define IS_DSFORM(inst) (((inst) >> 26) >= 56)
36
37#define INVALID { 0, 0 }
38
39
40#define LD 0
41#define ST 1
42#define SE 2
43#define F 4
44#define U 8
45#define M 0x10
46#define SW 0x20
47#define S 0x40
48#define SX 0x40
49#define HARD 0x80
50#define E4 0x40
51#define E8 0x80
52#define SPLT 0x80
53
54
55#define DCBZ 0x5f
56
57
58
59
60
61
62
63static struct aligninfo aligninfo[128] = {
64 { 4, LD },
65 INVALID,
66 { 4, ST },
67 INVALID,
68 { 2, LD },
69 { 2, LD+SE },
70 { 2, ST },
71 { 4, LD+M },
72 { 4, LD+F+S },
73 { 8, LD+F },
74 { 4, ST+F+S },
75 { 8, ST+F },
76 { 16, LD },
77 { 8, LD },
78 INVALID,
79 { 8, ST },
80 { 4, LD+U },
81 INVALID,
82 { 4, ST+U },
83 INVALID,
84 { 2, LD+U },
85 { 2, LD+SE+U },
86 { 2, ST+U },
87 { 4, ST+M },
88 { 4, LD+F+S+U },
89 { 8, LD+F+U },
90 { 4, ST+F+S+U },
91 { 8, ST+F+U },
92 { 16, LD+F },
93 INVALID,
94 { 16, ST+F },
95 INVALID,
96 { 8, LD },
97 INVALID,
98 { 8, ST },
99 INVALID,
100 INVALID,
101 { 4, LD+SE },
102 INVALID,
103 INVALID,
104 { 4, LD+M+HARD+SX },
105 { 4, LD+M+HARD },
106 { 4, ST+M+HARD+SX },
107 { 4, ST+M+HARD },
108 INVALID,
109 { 8, LD+U },
110 INVALID,
111 { 8, ST+U },
112 { 8, LD+U },
113 INVALID,
114 { 8, ST+U },
115 INVALID,
116 INVALID,
117 { 4, LD+SE+U },
118 INVALID,
119 INVALID,
120 INVALID,
121 INVALID,
122 INVALID,
123 INVALID,
124 INVALID,
125 INVALID,
126 INVALID,
127 INVALID,
128 INVALID,
129 INVALID,
130 INVALID,
131 INVALID,
132 INVALID,
133 INVALID,
134 INVALID,
135 INVALID,
136 { 4, LD+SW },
137 INVALID,
138 { 4, ST+SW },
139 INVALID,
140 { 2, LD+SW },
141 { 4, LD+SE },
142 { 2, ST+SW },
143 { 16, ST },
144 INVALID,
145 INVALID,
146 INVALID,
147 INVALID,
148 INVALID,
149 INVALID,
150 INVALID,
151 INVALID,
152 INVALID,
153 INVALID,
154 INVALID,
155 INVALID,
156 INVALID,
157 INVALID,
158 INVALID,
159 { 0, ST+HARD },
160 { 4, LD },
161 INVALID,
162 { 4, ST },
163 INVALID,
164 { 2, LD },
165 { 2, LD+SE },
166 { 2, ST },
167 INVALID,
168 { 4, LD+F+S },
169 { 8, LD+F },
170 { 4, ST+F+S },
171 { 8, ST+F },
172 { 16, LD+F },
173 { 4, LD+F+SE },
174 { 16, ST+F },
175 { 4, ST+F },
176 { 4, LD+U },
177 INVALID,
178 { 4, ST+U },
179 INVALID,
180 { 2, LD+U },
181 { 2, LD+SE+U },
182 { 2, ST+U },
183 INVALID,
184 { 4, LD+F+S+U },
185 { 8, LD+F+U },
186 { 4, ST+F+S+U },
187 { 8, ST+F+U },
188 INVALID,
189 { 4, LD+F },
190 INVALID,
191 INVALID,
192};
193
194
195
196
197static inline unsigned make_dsisr(unsigned instr)
198{
199 unsigned dsisr;
200
201
202
203 dsisr = (instr & 0x03ff0000) >> 16;
204
205 if (IS_XFORM(instr)) {
206
207 dsisr |= (instr & 0x00000006) << 14;
208
209 dsisr |= (instr & 0x00000040) << 8;
210
211 dsisr |= (instr & 0x00000780) << 3;
212 } else {
213
214 dsisr |= (instr & 0x04000000) >> 12;
215
216 dsisr |= (instr & 0x78000000) >> 17;
217
218 if (IS_DSFORM(instr))
219 dsisr |= (instr & 0x00000003) << 18;
220 }
221
222 return dsisr;
223}
224
225
226
227
228
229
230
231
232static int emulate_dcbz(struct pt_regs *regs, unsigned char __user *addr)
233{
234 long __user *p;
235 int i, size;
236
237#ifdef __powerpc64__
238 size = ppc64_caches.dline_size;
239#else
240 size = L1_CACHE_BYTES;
241#endif
242 p = (long __user *) (regs->dar & -size);
243 if (user_mode(regs) && !access_ok(VERIFY_WRITE, p, size))
244 return -EFAULT;
245 for (i = 0; i < size / sizeof(long); ++i)
246 if (__put_user_inatomic(0, p+i))
247 return -EFAULT;
248 return 1;
249}
250
251
252
253
254
255
256
257#ifdef __BIG_ENDIAN__
258#ifdef CONFIG_PPC64
259#define REG_BYTE(rp, i) *((u8 *)((rp) + ((i) >> 2)) + ((i) & 3) + 4)
260#else
261#define REG_BYTE(rp, i) *((u8 *)(rp) + (i))
262#endif
263#endif
264
265#ifdef __LITTLE_ENDIAN__
266#define REG_BYTE(rp, i) (*(((u8 *)((rp) + ((i)>>2)) + ((i)&3))))
267#endif
268
269#define SWIZ_PTR(p) ((unsigned char __user *)((p) ^ swiz))
270
271static int emulate_multiple(struct pt_regs *regs, unsigned char __user *addr,
272 unsigned int reg, unsigned int nb,
273 unsigned int flags, unsigned int instr,
274 unsigned long swiz)
275{
276 unsigned long *rptr;
277 unsigned int nb0, i, bswiz;
278 unsigned long p;
279
280
281
282
283
284
285
286 if (unlikely((nb > 4) || !user_mode(regs)))
287 return 0;
288
289
290 nb0 = 0;
291 if (flags & HARD) {
292 if (flags & SX) {
293 nb = regs->xer & 127;
294 if (nb == 0)
295 return 1;
296 } else {
297 unsigned long pc = regs->nip ^ (swiz & 4);
298
299 if (__get_user_inatomic(instr,
300 (unsigned int __user *)pc))
301 return -EFAULT;
302 if (swiz == 0 && (flags & SW))
303 instr = cpu_to_le32(instr);
304 nb = (instr >> 11) & 0x1f;
305 if (nb == 0)
306 nb = 32;
307 }
308 if (nb + reg * 4 > 128) {
309 nb0 = nb + reg * 4 - 128;
310 nb = 128 - reg * 4;
311 }
312#ifdef __LITTLE_ENDIAN__
313
314
315
316
317
318
319 flags ^= SW;
320#endif
321 } else {
322
323 nb = (32 - reg) * 4;
324 }
325
326 if (!access_ok((flags & ST ? VERIFY_WRITE: VERIFY_READ), addr, nb+nb0))
327 return -EFAULT;
328
329 rptr = ®s->gpr[reg];
330 p = (unsigned long) addr;
331 bswiz = (flags & SW)? 3: 0;
332
333 if (!(flags & ST)) {
334
335
336
337
338
339 memset(rptr, 0, ((nb + 3) / 4) * sizeof(unsigned long));
340 if (nb0 > 0)
341 memset(®s->gpr[0], 0,
342 ((nb0 + 3) / 4) * sizeof(unsigned long));
343
344 for (i = 0; i < nb; ++i, ++p)
345 if (__get_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
346 SWIZ_PTR(p)))
347 return -EFAULT;
348 if (nb0 > 0) {
349 rptr = ®s->gpr[0];
350 addr += nb;
351 for (i = 0; i < nb0; ++i, ++p)
352 if (__get_user_inatomic(REG_BYTE(rptr,
353 i ^ bswiz),
354 SWIZ_PTR(p)))
355 return -EFAULT;
356 }
357
358 } else {
359 for (i = 0; i < nb; ++i, ++p)
360 if (__put_user_inatomic(REG_BYTE(rptr, i ^ bswiz),
361 SWIZ_PTR(p)))
362 return -EFAULT;
363 if (nb0 > 0) {
364 rptr = ®s->gpr[0];
365 addr += nb;
366 for (i = 0; i < nb0; ++i, ++p)
367 if (__put_user_inatomic(REG_BYTE(rptr,
368 i ^ bswiz),
369 SWIZ_PTR(p)))
370 return -EFAULT;
371 }
372 }
373 return 1;
374}
375
376
377
378
379
380
381static int emulate_fp_pair(unsigned char __user *addr, unsigned int reg,
382 unsigned int flags)
383{
384 char *ptr0 = (char *) ¤t->thread.TS_FPR(reg);
385 char *ptr1 = (char *) ¤t->thread.TS_FPR(reg+1);
386 int i, ret, sw = 0;
387
388 if (reg & 1)
389 return 0;
390 if (flags & SW)
391 sw = 7;
392 ret = 0;
393 for (i = 0; i < 8; ++i) {
394 if (!(flags & ST)) {
395 ret |= __get_user(ptr0[i^sw], addr + i);
396 ret |= __get_user(ptr1[i^sw], addr + i + 8);
397 } else {
398 ret |= __put_user(ptr0[i^sw], addr + i);
399 ret |= __put_user(ptr1[i^sw], addr + i + 8);
400 }
401 }
402 if (ret)
403 return -EFAULT;
404 return 1;
405}
406
407#ifdef CONFIG_PPC64
408static int emulate_lq_stq(struct pt_regs *regs, unsigned char __user *addr,
409 unsigned int reg, unsigned int flags)
410{
411 char *ptr0 = (char *)®s->gpr[reg];
412 char *ptr1 = (char *)®s->gpr[reg+1];
413 int i, ret, sw = 0;
414
415 if (reg & 1)
416 return 0;
417 if (flags & SW)
418 sw = 7;
419 ret = 0;
420 for (i = 0; i < 8; ++i) {
421 if (!(flags & ST)) {
422 ret |= __get_user(ptr0[i^sw], addr + i);
423 ret |= __get_user(ptr1[i^sw], addr + i + 8);
424 } else {
425 ret |= __put_user(ptr0[i^sw], addr + i);
426 ret |= __put_user(ptr1[i^sw], addr + i + 8);
427 }
428 }
429 if (ret)
430 return -EFAULT;
431 return 1;
432}
433#endif
434
435#ifdef CONFIG_SPE
436
437static struct aligninfo spe_aligninfo[32] = {
438 { 8, LD+E8 },
439 { 8, LD+E4 },
440 { 8, LD },
441 INVALID,
442 { 2, LD },
443 INVALID,
444 { 2, LD },
445 { 2, LD+SE },
446 { 4, LD },
447 INVALID,
448 { 4, LD },
449 { 4, LD+SE },
450 { 4, LD+E4 },
451 INVALID,
452 { 4, LD },
453 INVALID,
454
455 { 8, ST+E8 },
456 { 8, ST+E4 },
457 { 8, ST },
458 INVALID,
459 INVALID,
460 INVALID,
461 INVALID,
462 INVALID,
463 { 4, ST },
464 INVALID,
465 { 4, ST },
466 INVALID,
467 { 4, ST+E4 },
468 INVALID,
469 { 4, ST+E4 },
470 INVALID,
471};
472
473#define EVLDD 0x00
474#define EVLDW 0x01
475#define EVLDH 0x02
476#define EVLHHESPLAT 0x04
477#define EVLHHOUSPLAT 0x06
478#define EVLHHOSSPLAT 0x07
479#define EVLWHE 0x08
480#define EVLWHOU 0x0A
481#define EVLWHOS 0x0B
482#define EVLWWSPLAT 0x0C
483#define EVLWHSPLAT 0x0E
484#define EVSTDD 0x10
485#define EVSTDW 0x11
486#define EVSTDH 0x12
487#define EVSTWHE 0x18
488#define EVSTWHO 0x1A
489#define EVSTWWE 0x1C
490#define EVSTWWO 0x1E
491
492
493
494
495
496
497static int emulate_spe(struct pt_regs *regs, unsigned int reg,
498 unsigned int instr)
499{
500 int ret;
501 union {
502 u64 ll;
503 u32 w[2];
504 u16 h[4];
505 u8 v[8];
506 } data, temp;
507 unsigned char __user *p, *addr;
508 unsigned long *evr = ¤t->thread.evr[reg];
509 unsigned int nb, flags;
510
511 instr = (instr >> 1) & 0x1f;
512
513
514 addr = (unsigned char __user *)regs->dar;
515
516 nb = spe_aligninfo[instr].len;
517 flags = spe_aligninfo[instr].flags;
518
519
520 if (unlikely(user_mode(regs) &&
521 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
522 addr, nb)))
523 return -EFAULT;
524
525
526 if (unlikely(!user_mode(regs)))
527 return 0;
528
529 flush_spe_to_thread(current);
530
531
532
533
534 if (flags & ST) {
535 data.ll = 0;
536 switch (instr) {
537 case EVSTDD:
538 case EVSTDW:
539 case EVSTDH:
540 data.w[0] = *evr;
541 data.w[1] = regs->gpr[reg];
542 break;
543 case EVSTWHE:
544 data.h[2] = *evr >> 16;
545 data.h[3] = regs->gpr[reg] >> 16;
546 break;
547 case EVSTWHO:
548 data.h[2] = *evr & 0xffff;
549 data.h[3] = regs->gpr[reg] & 0xffff;
550 break;
551 case EVSTWWE:
552 data.w[1] = *evr;
553 break;
554 case EVSTWWO:
555 data.w[1] = regs->gpr[reg];
556 break;
557 default:
558 return -EINVAL;
559 }
560 } else {
561 temp.ll = data.ll = 0;
562 ret = 0;
563 p = addr;
564
565 switch (nb) {
566 case 8:
567 ret |= __get_user_inatomic(temp.v[0], p++);
568 ret |= __get_user_inatomic(temp.v[1], p++);
569 ret |= __get_user_inatomic(temp.v[2], p++);
570 ret |= __get_user_inatomic(temp.v[3], p++);
571 case 4:
572 ret |= __get_user_inatomic(temp.v[4], p++);
573 ret |= __get_user_inatomic(temp.v[5], p++);
574 case 2:
575 ret |= __get_user_inatomic(temp.v[6], p++);
576 ret |= __get_user_inatomic(temp.v[7], p++);
577 if (unlikely(ret))
578 return -EFAULT;
579 }
580
581 switch (instr) {
582 case EVLDD:
583 case EVLDW:
584 case EVLDH:
585 data.ll = temp.ll;
586 break;
587 case EVLHHESPLAT:
588 data.h[0] = temp.h[3];
589 data.h[2] = temp.h[3];
590 break;
591 case EVLHHOUSPLAT:
592 case EVLHHOSSPLAT:
593 data.h[1] = temp.h[3];
594 data.h[3] = temp.h[3];
595 break;
596 case EVLWHE:
597 data.h[0] = temp.h[2];
598 data.h[2] = temp.h[3];
599 break;
600 case EVLWHOU:
601 case EVLWHOS:
602 data.h[1] = temp.h[2];
603 data.h[3] = temp.h[3];
604 break;
605 case EVLWWSPLAT:
606 data.w[0] = temp.w[1];
607 data.w[1] = temp.w[1];
608 break;
609 case EVLWHSPLAT:
610 data.h[0] = temp.h[2];
611 data.h[1] = temp.h[2];
612 data.h[2] = temp.h[3];
613 data.h[3] = temp.h[3];
614 break;
615 default:
616 return -EINVAL;
617 }
618 }
619
620 if (flags & SW) {
621 switch (flags & 0xf0) {
622 case E8:
623 data.ll = swab64(data.ll);
624 break;
625 case E4:
626 data.w[0] = swab32(data.w[0]);
627 data.w[1] = swab32(data.w[1]);
628 break;
629
630 default:
631 data.h[0] = swab16(data.h[0]);
632 data.h[1] = swab16(data.h[1]);
633 data.h[2] = swab16(data.h[2]);
634 data.h[3] = swab16(data.h[3]);
635 break;
636 }
637 }
638
639 if (flags & SE) {
640 data.w[0] = (s16)data.h[1];
641 data.w[1] = (s16)data.h[3];
642 }
643
644
645 if (flags & ST) {
646 ret = 0;
647 p = addr;
648 switch (nb) {
649 case 8:
650 ret |= __put_user_inatomic(data.v[0], p++);
651 ret |= __put_user_inatomic(data.v[1], p++);
652 ret |= __put_user_inatomic(data.v[2], p++);
653 ret |= __put_user_inatomic(data.v[3], p++);
654 case 4:
655 ret |= __put_user_inatomic(data.v[4], p++);
656 ret |= __put_user_inatomic(data.v[5], p++);
657 case 2:
658 ret |= __put_user_inatomic(data.v[6], p++);
659 ret |= __put_user_inatomic(data.v[7], p++);
660 }
661 if (unlikely(ret))
662 return -EFAULT;
663 } else {
664 *evr = data.w[0];
665 regs->gpr[reg] = data.w[1];
666 }
667
668 return 1;
669}
670#endif
671
672#ifdef CONFIG_VSX
673
674
675
676static int emulate_vsx(unsigned char __user *addr, unsigned int reg,
677 unsigned int areg, struct pt_regs *regs,
678 unsigned int flags, unsigned int length,
679 unsigned int elsize)
680{
681 char *ptr;
682 unsigned long *lptr;
683 int ret = 0;
684 int sw = 0;
685 int i, j;
686
687
688 if (unlikely(!user_mode(regs)))
689 return 0;
690
691 flush_vsx_to_thread(current);
692
693 if (reg < 32)
694 ptr = (char *) ¤t->thread.fp_state.fpr[reg][0];
695 else
696 ptr = (char *) ¤t->thread.vr_state.vr[reg - 32];
697
698 lptr = (unsigned long *) ptr;
699
700#ifdef __LITTLE_ENDIAN__
701 if (flags & SW) {
702 elsize = length;
703 sw = length-1;
704 } else {
705
706
707
708
709 addr += length - elsize;
710
711
712 if (length == 8)
713 ptr += 8;
714 }
715#else
716 if (flags & SW)
717 sw = elsize-1;
718#endif
719
720 for (j = 0; j < length; j += elsize) {
721 for (i = 0; i < elsize; ++i) {
722 if (flags & ST)
723 ret |= __put_user(ptr[i^sw], addr + i);
724 else
725 ret |= __get_user(ptr[i^sw], addr + i);
726 }
727 ptr += elsize;
728#ifdef __LITTLE_ENDIAN__
729 addr -= elsize;
730#else
731 addr += elsize;
732#endif
733 }
734
735#ifdef __BIG_ENDIAN__
736#define VSX_HI 0
737#define VSX_LO 1
738#else
739#define VSX_HI 1
740#define VSX_LO 0
741#endif
742
743 if (!ret) {
744 if (flags & U)
745 regs->gpr[areg] = regs->dar;
746
747
748 if (flags & SPLT)
749 lptr[VSX_LO] = lptr[VSX_HI];
750
751 else if (!(flags & ST) && (8 == length))
752 lptr[VSX_LO] = 0;
753 } else
754 return -EFAULT;
755
756 return 1;
757}
758#endif
759
760
761
762
763
764
765
766
767
768int fix_alignment(struct pt_regs *regs)
769{
770 unsigned int instr, nb, flags, instruction = 0;
771 unsigned int reg, areg;
772 unsigned int dsisr;
773 unsigned char __user *addr;
774 unsigned long p, swiz;
775 int ret, i;
776 union data {
777 u64 ll;
778 double dd;
779 unsigned char v[8];
780 struct {
781#ifdef __LITTLE_ENDIAN__
782 int low32;
783 unsigned hi32;
784#else
785 unsigned hi32;
786 int low32;
787#endif
788 } x32;
789 struct {
790#ifdef __LITTLE_ENDIAN__
791 short low16;
792 unsigned char hi48[6];
793#else
794 unsigned char hi48[6];
795 short low16;
796#endif
797 } x16;
798 } data;
799
800
801
802
803
804 CHECK_FULL_REGS(regs);
805
806 dsisr = regs->dsisr;
807
808
809
810
811 if (cpu_has_feature(CPU_FTR_NODSISRALIGN)) {
812 unsigned long pc = regs->nip;
813
814 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE))
815 pc ^= 4;
816 if (unlikely(__get_user_inatomic(instr,
817 (unsigned int __user *)pc)))
818 return -EFAULT;
819 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE))
820 instr = cpu_to_le32(instr);
821 dsisr = make_dsisr(instr);
822 instruction = instr;
823 }
824
825
826 reg = (dsisr >> 5) & 0x1f;
827 areg = dsisr & 0x1f;
828
829#ifdef CONFIG_SPE
830 if ((instr >> 26) == 0x4) {
831 PPC_WARN_ALIGNMENT(spe, regs);
832 return emulate_spe(regs, reg, instr);
833 }
834#endif
835
836 instr = (dsisr >> 10) & 0x7f;
837 instr |= (dsisr >> 13) & 0x60;
838
839
840 nb = aligninfo[instr].len;
841 flags = aligninfo[instr].flags;
842
843
844 if (IS_XFORM(instruction) && ((instruction >> 1) & 0x3ff) == 532) {
845 nb = 8;
846 flags = LD+SW;
847 } else if (IS_XFORM(instruction) &&
848 ((instruction >> 1) & 0x3ff) == 660) {
849 nb = 8;
850 flags = ST+SW;
851 }
852
853
854 swiz = 0;
855 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) {
856 flags ^= SW;
857#ifdef __BIG_ENDIAN__
858
859
860
861
862
863
864
865
866
867
868 if (cpu_has_feature(CPU_FTR_PPC_LE))
869 swiz = 7;
870#endif
871 }
872
873
874 addr = (unsigned char __user *)regs->dar;
875
876#ifdef CONFIG_VSX
877 if ((instruction & 0xfc00003e) == 0x7c000018) {
878 unsigned int elsize;
879
880
881 reg |= (instruction & 0x1) << 5;
882
883
884 nb = 8;
885 if (instruction & 0x200)
886 nb = 16;
887
888
889
890 elsize = 4;
891 if (instruction & 0x80)
892 elsize = 8;
893
894 flags = 0;
895 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE))
896 flags |= SW;
897 if (instruction & 0x100)
898 flags |= ST;
899 if (instruction & 0x040)
900 flags |= U;
901
902 if ((instruction & 0x400) == 0){
903 flags |= SPLT;
904 nb = 8;
905 }
906 PPC_WARN_ALIGNMENT(vsx, regs);
907 return emulate_vsx(addr, reg, areg, regs, flags, nb, elsize);
908 }
909#endif
910
911
912
913 if (instr == DCBZ) {
914 PPC_WARN_ALIGNMENT(dcbz, regs);
915 return emulate_dcbz(regs, addr);
916 }
917 if (unlikely(nb == 0))
918 return 0;
919
920
921
922
923 if (flags & M) {
924 PPC_WARN_ALIGNMENT(multiple, regs);
925 return emulate_multiple(regs, addr, reg, nb,
926 flags, instr, swiz);
927 }
928
929
930 if (unlikely(user_mode(regs) &&
931 !access_ok((flags & ST ? VERIFY_WRITE : VERIFY_READ),
932 addr, nb)))
933 return -EFAULT;
934
935
936 if (flags & F) {
937
938 if (unlikely(!user_mode(regs)))
939 return 0;
940 flush_fp_to_thread(current);
941 }
942
943 if ((nb == 16)) {
944 if (flags & F) {
945
946 PPC_WARN_ALIGNMENT(fp_pair, regs);
947 return emulate_fp_pair(addr, reg, flags);
948 } else {
949#ifdef CONFIG_PPC64
950
951 PPC_WARN_ALIGNMENT(lq_stq, regs);
952 return emulate_lq_stq(regs, addr, reg, flags);
953#else
954 return 0;
955#endif
956 }
957 }
958
959 PPC_WARN_ALIGNMENT(unaligned, regs);
960
961
962
963
964 if (!(flags & ST)) {
965 unsigned int start = 0;
966
967 switch (nb) {
968 case 4:
969 start = offsetof(union data, x32.low32);
970 break;
971 case 2:
972 start = offsetof(union data, x16.low16);
973 break;
974 }
975
976 data.ll = 0;
977 ret = 0;
978 p = (unsigned long)addr;
979
980 for (i = 0; i < nb; i++)
981 ret |= __get_user_inatomic(data.v[start + i],
982 SWIZ_PTR(p++));
983
984 if (unlikely(ret))
985 return -EFAULT;
986
987 } else if (flags & F) {
988 data.ll = current->thread.TS_FPR(reg);
989 if (flags & S) {
990
991#ifdef CONFIG_PPC_FPU
992 preempt_disable();
993 enable_kernel_fp();
994 cvt_df(&data.dd, (float *)&data.x32.low32);
995 preempt_enable();
996#else
997 return 0;
998#endif
999 }
1000 } else
1001 data.ll = regs->gpr[reg];
1002
1003 if (flags & SW) {
1004 switch (nb) {
1005 case 8:
1006 data.ll = swab64(data.ll);
1007 break;
1008 case 4:
1009 data.x32.low32 = swab32(data.x32.low32);
1010 break;
1011 case 2:
1012 data.x16.low16 = swab16(data.x16.low16);
1013 break;
1014 }
1015 }
1016
1017
1018
1019
1020 switch (flags & ~(U|SW)) {
1021 case LD+SE:
1022 case LD+F+SE:
1023 if ( nb == 2 )
1024 data.ll = data.x16.low16;
1025 else
1026 data.ll = data.x32.low32;
1027 break;
1028
1029
1030 case LD+F+S:
1031#ifdef CONFIG_PPC_FPU
1032 preempt_disable();
1033 enable_kernel_fp();
1034 cvt_fd((float *)&data.x32.low32, &data.dd);
1035 preempt_enable();
1036#else
1037 return 0;
1038#endif
1039 break;
1040 }
1041
1042
1043 if (flags & ST) {
1044 unsigned int start = 0;
1045
1046 switch (nb) {
1047 case 4:
1048 start = offsetof(union data, x32.low32);
1049 break;
1050 case 2:
1051 start = offsetof(union data, x16.low16);
1052 break;
1053 }
1054
1055 ret = 0;
1056 p = (unsigned long)addr;
1057
1058 for (i = 0; i < nb; i++)
1059 ret |= __put_user_inatomic(data.v[start + i],
1060 SWIZ_PTR(p++));
1061
1062 if (unlikely(ret))
1063 return -EFAULT;
1064 } else if (flags & F)
1065 current->thread.TS_FPR(reg) = data.ll;
1066 else
1067 regs->gpr[reg] = data.ll;
1068
1069
1070 if (flags & U)
1071 regs->gpr[areg] = regs->dar;
1072
1073 return 1;
1074}
1075