1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#ifdef CONFIG_DMA_NONCOHERENT
26#undef CONFIG_CPU_HAS_PREFETCH
27#endif
28#ifdef CONFIG_MIPS_MALTA
29#undef CONFIG_CPU_HAS_PREFETCH
30#endif
31
32#include <asm/asm.h>
33#include <asm/asm-offsets.h>
34#include <asm/regdef.h>
35
36#define dst a0
37#define src a1
38#define len a2
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90#define LD_INSN 1
91#define ST_INSN 2
92
93#define SRC_PREFETCH 1
94#define DST_PREFETCH 2
95#define LEGACY_MODE 1
96#define EVA_MODE 2
97#define USEROP 1
98#define KERNELOP 2
99
100
101
102
103
104
105
106
107
108
109
110
111#define EXC(insn, type, reg, addr, handler) \
112 .if \mode == LEGACY_MODE; \
1139: insn reg, addr; \
114 .section __ex_table,"a"; \
115 PTR 9b, handler; \
116 .previous; \
117 \
118 .else; \
119 \
120 .if ((\from == USEROP) && (type == LD_INSN)) || \
121 ((\to == USEROP) && (type == ST_INSN)); \
1229: __BUILD_EVA_INSN(insn
123 .section __ex_table,"a"; \
124 PTR 9b, handler; \
125 .previous; \
126 .else; \
127
128
129
130 \
131 insn reg, addr; \
132 .endif; \
133 .endif
134
135
136
137
138#ifdef CONFIG_64BIT
139#define USE_DOUBLE
140#endif
141
142#ifdef USE_DOUBLE
143
144#define LOADK ld
145#define LOAD(reg, addr, handler) EXC(ld, LD_INSN, reg, addr, handler)
146#define LOADL(reg, addr, handler) EXC(ldl, LD_INSN, reg, addr, handler)
147#define LOADR(reg, addr, handler) EXC(ldr, LD_INSN, reg, addr, handler)
148#define STOREL(reg, addr, handler) EXC(sdl, ST_INSN, reg, addr, handler)
149#define STORER(reg, addr, handler) EXC(sdr, ST_INSN, reg, addr, handler)
150#define STORE(reg, addr, handler) EXC(sd, ST_INSN, reg, addr, handler)
151#define ADD daddu
152#define SUB dsubu
153#define SRL dsrl
154#define SRA dsra
155#define SLL dsll
156#define SLLV dsllv
157#define SRLV dsrlv
158#define NBYTES 8
159#define LOG_NBYTES 3
160
161
162
163
164
165
166#undef t0
167#undef t1
168#undef t2
169#undef t3
170#define t0 $8
171#define t1 $9
172#define t2 $10
173#define t3 $11
174#define t4 $12
175#define t5 $13
176#define t6 $14
177#define t7 $15
178
179#else
180
181#define LOADK lw
182#define LOAD(reg, addr, handler) EXC(lw, LD_INSN, reg, addr, handler)
183#define LOADL(reg, addr, handler) EXC(lwl, LD_INSN, reg, addr, handler)
184#define LOADR(reg, addr, handler) EXC(lwr, LD_INSN, reg, addr, handler)
185#define STOREL(reg, addr, handler) EXC(swl, ST_INSN, reg, addr, handler)
186#define STORER(reg, addr, handler) EXC(swr, ST_INSN, reg, addr, handler)
187#define STORE(reg, addr, handler) EXC(sw, ST_INSN, reg, addr, handler)
188#define ADD addu
189#define SUB subu
190#define SRL srl
191#define SLL sll
192#define SRA sra
193#define SLLV sllv
194#define SRLV srlv
195#define NBYTES 4
196#define LOG_NBYTES 2
197
198#endif
199
200#define LOADB(reg, addr, handler) EXC(lb, LD_INSN, reg, addr, handler)
201#define STOREB(reg, addr, handler) EXC(sb, ST_INSN, reg, addr, handler)
202
203#define _PREF(hint, addr, type) \
204 .if \mode == LEGACY_MODE; \
205 PREF(hint, addr); \
206 .else; \
207 .if ((\from == USEROP) && (type == SRC_PREFETCH)) || \
208 ((\to == USEROP) && (type == DST_PREFETCH)); \
209
210
211
212
213
214
215 \
216 .set at=v1; \
217 PREFE(hint, addr); \
218 .set noat; \
219 .else; \
220 PREF(hint, addr); \
221 .endif; \
222 .endif
223
224#define PREFS(hint, addr) _PREF(hint, addr, SRC_PREFETCH)
225#define PREFD(hint, addr) _PREF(hint, addr, DST_PREFETCH)
226
227#ifdef CONFIG_CPU_LITTLE_ENDIAN
228#define LDFIRST LOADR
229#define LDREST LOADL
230#define STFIRST STORER
231#define STREST STOREL
232#define SHIFT_DISCARD SLLV
233#else
234#define LDFIRST LOADL
235#define LDREST LOADR
236#define STFIRST STOREL
237#define STREST STORER
238#define SHIFT_DISCARD SRLV
239#endif
240
241#define FIRST(unit) ((unit)*NBYTES)
242#define REST(unit) (FIRST(unit)+NBYTES-1)
243#define UNIT(unit) FIRST(unit)
244
245#define ADDRMASK (NBYTES-1)
246
247 .text
248 .set noreorder
249#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
250 .set noat
251#else
252 .set at=v1
253#endif
254
255 .align 5
256
257
258
259
260
261
262
263
264 .macro __BUILD_COPY_USER mode, from, to
265
266
267 .ifnotdef __memcpy
268 .set __memcpy, 1
269 .hidden __memcpy
270 .endif
271
272
273
274
275
276#define rem t8
277
278 R10KCBARRIER(0(ra))
279
280
281
282
283
284
285
286 PREFS( 0, 0(src) )
287 PREFD( 1, 0(dst) )
288 sltu t2, len, NBYTES
289 and t1, dst, ADDRMASK
290 PREFS( 0, 1*32(src) )
291 PREFD( 1, 1*32(dst) )
292 bnez t2, .Lcopy_bytes_checklen\@
293 and t0, src, ADDRMASK
294 PREFS( 0, 2*32(src) )
295 PREFD( 1, 2*32(dst) )
296#ifndef CONFIG_CPU_MIPSR6
297 bnez t1, .Ldst_unaligned\@
298 nop
299 bnez t0, .Lsrc_unaligned_dst_aligned\@
300#else
301 or t0, t0, t1
302 bnez t0, .Lcopy_unaligned_bytes\@
303#endif
304
305
306
307
308.Lboth_aligned\@:
309 SRL t0, len, LOG_NBYTES+3
310 beqz t0, .Lcleanup_both_aligned\@
311 and rem, len, (8*NBYTES-1)
312 PREFS( 0, 3*32(src) )
313 PREFD( 1, 3*32(dst) )
314 .align 4
3151:
316 R10KCBARRIER(0(ra))
317 LOAD(t0, UNIT(0)(src), .Ll_exc\@)
318 LOAD(t1, UNIT(1)(src), .Ll_exc_copy\@)
319 LOAD(t2, UNIT(2)(src), .Ll_exc_copy\@)
320 LOAD(t3, UNIT(3)(src), .Ll_exc_copy\@)
321 SUB len, len, 8*NBYTES
322 LOAD(t4, UNIT(4)(src), .Ll_exc_copy\@)
323 LOAD(t7, UNIT(5)(src), .Ll_exc_copy\@)
324 STORE(t0, UNIT(0)(dst), .Ls_exc_p8u\@)
325 STORE(t1, UNIT(1)(dst), .Ls_exc_p7u\@)
326 LOAD(t0, UNIT(6)(src), .Ll_exc_copy\@)
327 LOAD(t1, UNIT(7)(src), .Ll_exc_copy\@)
328 ADD src, src, 8*NBYTES
329 ADD dst, dst, 8*NBYTES
330 STORE(t2, UNIT(-6)(dst), .Ls_exc_p6u\@)
331 STORE(t3, UNIT(-5)(dst), .Ls_exc_p5u\@)
332 STORE(t4, UNIT(-4)(dst), .Ls_exc_p4u\@)
333 STORE(t7, UNIT(-3)(dst), .Ls_exc_p3u\@)
334 STORE(t0, UNIT(-2)(dst), .Ls_exc_p2u\@)
335 STORE(t1, UNIT(-1)(dst), .Ls_exc_p1u\@)
336 PREFS( 0, 8*32(src) )
337 PREFD( 1, 8*32(dst) )
338 bne len, rem, 1b
339 nop
340
341
342
343
344.Lcleanup_both_aligned\@:
345 beqz len, .Ldone\@
346 sltu t0, len, 4*NBYTES
347 bnez t0, .Lless_than_4units\@
348 and rem, len, (NBYTES-1)
349
350
351
352 LOAD( t0, UNIT(0)(src), .Ll_exc\@)
353 LOAD( t1, UNIT(1)(src), .Ll_exc_copy\@)
354 LOAD( t2, UNIT(2)(src), .Ll_exc_copy\@)
355 LOAD( t3, UNIT(3)(src), .Ll_exc_copy\@)
356 SUB len, len, 4*NBYTES
357 ADD src, src, 4*NBYTES
358 R10KCBARRIER(0(ra))
359 STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
360 STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
361 STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
362 STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
363 .set reorder
364 ADD dst, dst, 4*NBYTES
365 beqz len, .Ldone\@
366 .set noreorder
367.Lless_than_4units\@:
368
369
370
371 beq rem, len, .Lcopy_bytes\@
372 nop
3731:
374 R10KCBARRIER(0(ra))
375 LOAD(t0, 0(src), .Ll_exc\@)
376 ADD src, src, NBYTES
377 SUB len, len, NBYTES
378 STORE(t0, 0(dst), .Ls_exc_p1u\@)
379 .set reorder
380 ADD dst, dst, NBYTES
381 bne rem, len, 1b
382 .set noreorder
383
384#ifndef CONFIG_CPU_MIPSR6
385
386
387
388
389
390
391
392
393
394
395
396#define bits t2
397 beqz len, .Ldone\@
398 ADD t1, dst, len
399 li bits, 8*NBYTES
400 SLL rem, len, 3
401 LOAD(t0, 0(src), .Ll_exc\@)
402 SUB bits, bits, rem
403 SHIFT_DISCARD t0, t0, bits
404 STREST(t0, -1(t1), .Ls_exc\@)
405 jr ra
406 move len, zero
407.Ldst_unaligned\@:
408
409
410
411
412
413
414
415
416
417#define match rem
418 LDFIRST(t3, FIRST(0)(src), .Ll_exc\@)
419 ADD t2, zero, NBYTES
420 LDREST(t3, REST(0)(src), .Ll_exc_copy\@)
421 SUB t2, t2, t1
422 xor match, t0, t1
423 R10KCBARRIER(0(ra))
424 STFIRST(t3, FIRST(0)(dst), .Ls_exc\@)
425 beq len, t2, .Ldone\@
426 SUB len, len, t2
427 ADD dst, dst, t2
428 beqz match, .Lboth_aligned\@
429 ADD src, src, t2
430
431.Lsrc_unaligned_dst_aligned\@:
432 SRL t0, len, LOG_NBYTES+2
433 PREFS( 0, 3*32(src) )
434 beqz t0, .Lcleanup_src_unaligned\@
435 and rem, len, (4*NBYTES-1)
436 PREFD( 1, 3*32(dst) )
4371:
438
439
440
441
442
443
444 R10KCBARRIER(0(ra))
445 LDFIRST(t0, FIRST(0)(src), .Ll_exc\@)
446 LDFIRST(t1, FIRST(1)(src), .Ll_exc_copy\@)
447 SUB len, len, 4*NBYTES
448 LDREST(t0, REST(0)(src), .Ll_exc_copy\@)
449 LDREST(t1, REST(1)(src), .Ll_exc_copy\@)
450 LDFIRST(t2, FIRST(2)(src), .Ll_exc_copy\@)
451 LDFIRST(t3, FIRST(3)(src), .Ll_exc_copy\@)
452 LDREST(t2, REST(2)(src), .Ll_exc_copy\@)
453 LDREST(t3, REST(3)(src), .Ll_exc_copy\@)
454 PREFS( 0, 9*32(src) )
455 ADD src, src, 4*NBYTES
456#ifdef CONFIG_CPU_SB1
457 nop
458#endif
459 STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
460 STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
461 STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
462 STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
463 PREFD( 1, 9*32(dst) )
464 .set reorder
465 ADD dst, dst, 4*NBYTES
466 bne len, rem, 1b
467 .set noreorder
468
469.Lcleanup_src_unaligned\@:
470 beqz len, .Ldone\@
471 and rem, len, NBYTES-1
472 beq rem, len, .Lcopy_bytes\@
473 nop
4741:
475 R10KCBARRIER(0(ra))
476 LDFIRST(t0, FIRST(0)(src), .Ll_exc\@)
477 LDREST(t0, REST(0)(src), .Ll_exc_copy\@)
478 ADD src, src, NBYTES
479 SUB len, len, NBYTES
480 STORE(t0, 0(dst), .Ls_exc_p1u\@)
481 .set reorder
482 ADD dst, dst, NBYTES
483 bne len, rem, 1b
484 .set noreorder
485
486#endif
487.Lcopy_bytes_checklen\@:
488 beqz len, .Ldone\@
489 nop
490.Lcopy_bytes\@:
491
492 R10KCBARRIER(0(ra))
493#define COPY_BYTE(N) \
494 LOADB(t0, N(src), .Ll_exc\@); \
495 SUB len, len, 1; \
496 beqz len, .Ldone\@; \
497 STOREB(t0, N(dst), .Ls_exc_p1\@)
498
499 COPY_BYTE(0)
500 COPY_BYTE(1)
501#ifdef USE_DOUBLE
502 COPY_BYTE(2)
503 COPY_BYTE(3)
504 COPY_BYTE(4)
505 COPY_BYTE(5)
506#endif
507 LOADB(t0, NBYTES-2(src), .Ll_exc\@)
508 SUB len, len, 1
509 jr ra
510 STOREB(t0, NBYTES-2(dst), .Ls_exc_p1\@)
511.Ldone\@:
512 jr ra
513 nop
514
515#ifdef CONFIG_CPU_MIPSR6
516.Lcopy_unaligned_bytes\@:
5171:
518 COPY_BYTE(0)
519 COPY_BYTE(1)
520 COPY_BYTE(2)
521 COPY_BYTE(3)
522 COPY_BYTE(4)
523 COPY_BYTE(5)
524 COPY_BYTE(6)
525 COPY_BYTE(7)
526 ADD src, src, 8
527 b 1b
528 ADD dst, dst, 8
529#endif
530 .if __memcpy == 1
531 END(memcpy)
532 .set __memcpy, 0
533 .hidden __memcpy
534 .endif
535
536.Ll_exc_copy\@:
537
538
539
540
541
542
543
544
545
546
547 LOADK t0, TI_TASK($28)
548 nop
549 LOADK t0, THREAD_BUADDR(t0)
5501:
551 LOADB(t1, 0(src), .Ll_exc\@)
552 ADD src, src, 1
553 sb t1, 0(dst)
554 .set reorder
555 ADD dst, dst, 1
556 bne src, t0, 1b
557 .set noreorder
558.Ll_exc\@:
559 LOADK t0, TI_TASK($28)
560 nop
561 LOADK t0, THREAD_BUADDR(t0)
562 nop
563 SUB len, AT, t0
564 bnez t6, .Ldone\@
565
566
567
568
569
570 ADD dst, t0
571 SUB dst, src
572
573
574
575
576 .set reorder
577 SUB src, len, 1
578 beqz len, .Ldone\@
579 .set noreorder
5801: sb zero, 0(dst)
581 ADD dst, dst, 1
582#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
583 bnez src, 1b
584 SUB src, src, 1
585#else
586 .set push
587 .set noat
588 li v1, 1
589 bnez src, 1b
590 SUB src, src, v1
591 .set pop
592#endif
593 jr ra
594 nop
595
596
597#define SEXC(n) \
598 .set reorder; \
599.Ls_exc_p
600 ADD len, len, n*NBYTES; \
601 jr ra; \
602 .set noreorder
603
604SEXC(8)
605SEXC(7)
606SEXC(6)
607SEXC(5)
608SEXC(4)
609SEXC(3)
610SEXC(2)
611SEXC(1)
612
613.Ls_exc_p1\@:
614 .set reorder
615 ADD len, len, 1
616 jr ra
617 .set noreorder
618.Ls_exc\@:
619 jr ra
620 nop
621 .endm
622
623 .align 5
624LEAF(memmove)
625 ADD t0, a0, a2
626 ADD t1, a1, a2
627 sltu t0, a1, t0
628 sltu t1, a0, t1
629 and t0, t1
630 beqz t0, .L__memcpy
631 move v0, a0
632 beqz a2, .Lr_out
633 END(memmove)
634
635
636LEAF(__rmemcpy)
637 sltu t0, a1, a0
638 beqz t0, .Lr_end_bytes_up
639 nop
640 ADD a0, a2
641 ADD a1, a2
642
643.Lr_end_bytes:
644 R10KCBARRIER(0(ra))
645 lb t0, -1(a1)
646 SUB a2, a2, 0x1
647 sb t0, -1(a0)
648 SUB a1, a1, 0x1
649 .set reorder
650 SUB a0, a0, 0x1
651 bnez a2, .Lr_end_bytes
652 .set noreorder
653
654.Lr_out:
655 jr ra
656 move a2, zero
657
658.Lr_end_bytes_up:
659 R10KCBARRIER(0(ra))
660 lb t0, (a1)
661 SUB a2, a2, 0x1
662 sb t0, (a0)
663 ADD a1, a1, 0x1
664 .set reorder
665 ADD a0, a0, 0x1
666 bnez a2, .Lr_end_bytes_up
667 .set noreorder
668
669 jr ra
670 move a2, zero
671 END(__rmemcpy)
672
673
674
675
676LEAF(__copy_user_inatomic)
677 b __copy_user_common
678 li t6, 1
679 END(__copy_user_inatomic)
680
681
682
683
684
685
686
687 .align 5
688LEAF(memcpy)
689 move v0, dst
690.L__memcpy:
691FEXPORT(__copy_user)
692 li t6, 0
693__copy_user_common:
694
695 __BUILD_COPY_USER LEGACY_MODE USEROP USEROP
696
697#ifdef CONFIG_EVA
698
699
700
701
702
703
704
705
706LEAF(__copy_user_inatomic_eva)
707 b __copy_from_user_common
708 li t6, 1
709 END(__copy_user_inatomic_eva)
710
711
712
713
714
715LEAF(__copy_from_user_eva)
716 li t6, 0
717__copy_from_user_common:
718 __BUILD_COPY_USER EVA_MODE USEROP KERNELOP
719END(__copy_from_user_eva)
720
721
722
723
724
725
726
727LEAF(__copy_to_user_eva)
728__BUILD_COPY_USER EVA_MODE KERNELOP USEROP
729END(__copy_to_user_eva)
730
731
732
733
734
735LEAF(__copy_in_user_eva)
736__BUILD_COPY_USER EVA_MODE USEROP USEROP
737END(__copy_in_user_eva)
738
739#endif
740