1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#ifdef CONFIG_DMA_NONCOHERENT
26#undef CONFIG_CPU_HAS_PREFETCH
27#endif
28#ifdef CONFIG_MIPS_MALTA
29#undef CONFIG_CPU_HAS_PREFETCH
30#endif
31#ifdef CONFIG_CPU_MIPSR6
32#undef CONFIG_CPU_HAS_PREFETCH
33#endif
34
35#include <asm/asm.h>
36#include <asm/asm-offsets.h>
37#include <asm/export.h>
38#include <asm/regdef.h>
39
40#define dst a0
41#define src a1
42#define len a2
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94#define LD_INSN 1
95#define ST_INSN 2
96
97#define SRC_PREFETCH 1
98#define DST_PREFETCH 2
99#define LEGACY_MODE 1
100#define EVA_MODE 2
101#define USEROP 1
102#define KERNELOP 2
103
104
105
106
107
108
109
110
111
112
113
114
115#define EXC(insn, type, reg, addr, handler) \
116 .if \mode == LEGACY_MODE; \
1179: insn reg, addr; \
118 .section __ex_table,"a"; \
119 PTR 9b, handler; \
120 .previous; \
121 \
122 .else; \
123 \
124 .if ((\from == USEROP) && (type == LD_INSN)) || \
125 ((\to == USEROP) && (type == ST_INSN)); \
1269: __BUILD_EVA_INSN(insn
127 .section __ex_table,"a"; \
128 PTR 9b, handler; \
129 .previous; \
130 .else; \
131
132
133
134 \
135 insn reg, addr; \
136 .endif; \
137 .endif
138
139
140
141
142#ifdef CONFIG_64BIT
143#define USE_DOUBLE
144#endif
145
146#ifdef USE_DOUBLE
147
148#define LOADK ld
149#define LOAD(reg, addr, handler) EXC(ld, LD_INSN, reg, addr, handler)
150#define LOADL(reg, addr, handler) EXC(ldl, LD_INSN, reg, addr, handler)
151#define LOADR(reg, addr, handler) EXC(ldr, LD_INSN, reg, addr, handler)
152#define STOREL(reg, addr, handler) EXC(sdl, ST_INSN, reg, addr, handler)
153#define STORER(reg, addr, handler) EXC(sdr, ST_INSN, reg, addr, handler)
154#define STORE(reg, addr, handler) EXC(sd, ST_INSN, reg, addr, handler)
155#define ADD daddu
156#define SUB dsubu
157#define SRL dsrl
158#define SRA dsra
159#define SLL dsll
160#define SLLV dsllv
161#define SRLV dsrlv
162#define NBYTES 8
163#define LOG_NBYTES 3
164
165
166
167
168
169
170#undef t0
171#undef t1
172#undef t2
173#undef t3
174#define t0 $8
175#define t1 $9
176#define t2 $10
177#define t3 $11
178#define t4 $12
179#define t5 $13
180#define t6 $14
181#define t7 $15
182
183#else
184
185#define LOADK lw
186#define LOAD(reg, addr, handler) EXC(lw, LD_INSN, reg, addr, handler)
187#define LOADL(reg, addr, handler) EXC(lwl, LD_INSN, reg, addr, handler)
188#define LOADR(reg, addr, handler) EXC(lwr, LD_INSN, reg, addr, handler)
189#define STOREL(reg, addr, handler) EXC(swl, ST_INSN, reg, addr, handler)
190#define STORER(reg, addr, handler) EXC(swr, ST_INSN, reg, addr, handler)
191#define STORE(reg, addr, handler) EXC(sw, ST_INSN, reg, addr, handler)
192#define ADD addu
193#define SUB subu
194#define SRL srl
195#define SLL sll
196#define SRA sra
197#define SLLV sllv
198#define SRLV srlv
199#define NBYTES 4
200#define LOG_NBYTES 2
201
202#endif
203
204#define LOADB(reg, addr, handler) EXC(lb, LD_INSN, reg, addr, handler)
205#define STOREB(reg, addr, handler) EXC(sb, ST_INSN, reg, addr, handler)
206
207#define _PREF(hint, addr, type) \
208 .if \mode == LEGACY_MODE; \
209 PREF(hint, addr); \
210 .else; \
211 .if ((\from == USEROP) && (type == SRC_PREFETCH)) || \
212 ((\to == USEROP) && (type == DST_PREFETCH)); \
213
214
215
216
217
218
219 \
220 .set at=v1; \
221 PREFE(hint, addr); \
222 .set noat; \
223 .else; \
224 PREF(hint, addr); \
225 .endif; \
226 .endif
227
228#define PREFS(hint, addr) _PREF(hint, addr, SRC_PREFETCH)
229#define PREFD(hint, addr) _PREF(hint, addr, DST_PREFETCH)
230
231#ifdef CONFIG_CPU_LITTLE_ENDIAN
232#define LDFIRST LOADR
233#define LDREST LOADL
234#define STFIRST STORER
235#define STREST STOREL
236#define SHIFT_DISCARD SLLV
237#else
238#define LDFIRST LOADL
239#define LDREST LOADR
240#define STFIRST STOREL
241#define STREST STORER
242#define SHIFT_DISCARD SRLV
243#endif
244
245#define FIRST(unit) ((unit)*NBYTES)
246#define REST(unit) (FIRST(unit)+NBYTES-1)
247#define UNIT(unit) FIRST(unit)
248
249#define ADDRMASK (NBYTES-1)
250
251 .text
252 .set noreorder
253#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
254 .set noat
255#else
256 .set at=v1
257#endif
258
259 .align 5
260
261
262
263
264
265
266
267
268 .macro __BUILD_COPY_USER mode, from, to
269
270
271 .ifnotdef __memcpy
272 .set __memcpy, 1
273 .hidden __memcpy
274 .endif
275
276
277
278
279
280#define rem t8
281
282 R10KCBARRIER(0(ra))
283
284
285
286
287
288
289
290 PREFS( 0, 0(src) )
291 PREFD( 1, 0(dst) )
292 sltu t2, len, NBYTES
293 and t1, dst, ADDRMASK
294 PREFS( 0, 1*32(src) )
295 PREFD( 1, 1*32(dst) )
296 bnez t2, .Lcopy_bytes_checklen\@
297 and t0, src, ADDRMASK
298 PREFS( 0, 2*32(src) )
299 PREFD( 1, 2*32(dst) )
300#ifndef CONFIG_CPU_MIPSR6
301 bnez t1, .Ldst_unaligned\@
302 nop
303 bnez t0, .Lsrc_unaligned_dst_aligned\@
304#else
305 or t0, t0, t1
306 bnez t0, .Lcopy_unaligned_bytes\@
307#endif
308
309
310
311
312.Lboth_aligned\@:
313 SRL t0, len, LOG_NBYTES+3
314 beqz t0, .Lcleanup_both_aligned\@
315 and rem, len, (8*NBYTES-1)
316 PREFS( 0, 3*32(src) )
317 PREFD( 1, 3*32(dst) )
318 .align 4
3191:
320 R10KCBARRIER(0(ra))
321 LOAD(t0, UNIT(0)(src), .Ll_exc\@)
322 LOAD(t1, UNIT(1)(src), .Ll_exc_copy\@)
323 LOAD(t2, UNIT(2)(src), .Ll_exc_copy\@)
324 LOAD(t3, UNIT(3)(src), .Ll_exc_copy\@)
325 SUB len, len, 8*NBYTES
326 LOAD(t4, UNIT(4)(src), .Ll_exc_copy\@)
327 LOAD(t7, UNIT(5)(src), .Ll_exc_copy\@)
328 STORE(t0, UNIT(0)(dst), .Ls_exc_p8u\@)
329 STORE(t1, UNIT(1)(dst), .Ls_exc_p7u\@)
330 LOAD(t0, UNIT(6)(src), .Ll_exc_copy\@)
331 LOAD(t1, UNIT(7)(src), .Ll_exc_copy\@)
332 ADD src, src, 8*NBYTES
333 ADD dst, dst, 8*NBYTES
334 STORE(t2, UNIT(-6)(dst), .Ls_exc_p6u\@)
335 STORE(t3, UNIT(-5)(dst), .Ls_exc_p5u\@)
336 STORE(t4, UNIT(-4)(dst), .Ls_exc_p4u\@)
337 STORE(t7, UNIT(-3)(dst), .Ls_exc_p3u\@)
338 STORE(t0, UNIT(-2)(dst), .Ls_exc_p2u\@)
339 STORE(t1, UNIT(-1)(dst), .Ls_exc_p1u\@)
340 PREFS( 0, 8*32(src) )
341 PREFD( 1, 8*32(dst) )
342 bne len, rem, 1b
343 nop
344
345
346
347
348.Lcleanup_both_aligned\@:
349 beqz len, .Ldone\@
350 sltu t0, len, 4*NBYTES
351 bnez t0, .Lless_than_4units\@
352 and rem, len, (NBYTES-1)
353
354
355
356 LOAD( t0, UNIT(0)(src), .Ll_exc\@)
357 LOAD( t1, UNIT(1)(src), .Ll_exc_copy\@)
358 LOAD( t2, UNIT(2)(src), .Ll_exc_copy\@)
359 LOAD( t3, UNIT(3)(src), .Ll_exc_copy\@)
360 SUB len, len, 4*NBYTES
361 ADD src, src, 4*NBYTES
362 R10KCBARRIER(0(ra))
363 STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
364 STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
365 STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
366 STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
367 .set reorder
368 ADD dst, dst, 4*NBYTES
369 beqz len, .Ldone\@
370 .set noreorder
371.Lless_than_4units\@:
372
373
374
375 beq rem, len, .Lcopy_bytes\@
376 nop
3771:
378 R10KCBARRIER(0(ra))
379 LOAD(t0, 0(src), .Ll_exc\@)
380 ADD src, src, NBYTES
381 SUB len, len, NBYTES
382 STORE(t0, 0(dst), .Ls_exc_p1u\@)
383 .set reorder
384 ADD dst, dst, NBYTES
385 bne rem, len, 1b
386 .set noreorder
387
388#ifndef CONFIG_CPU_MIPSR6
389
390
391
392
393
394
395
396
397
398
399
400#define bits t2
401 beqz len, .Ldone\@
402 ADD t1, dst, len
403 li bits, 8*NBYTES
404 SLL rem, len, 3
405 LOAD(t0, 0(src), .Ll_exc\@)
406 SUB bits, bits, rem
407 SHIFT_DISCARD t0, t0, bits
408 STREST(t0, -1(t1), .Ls_exc\@)
409 jr ra
410 move len, zero
411.Ldst_unaligned\@:
412
413
414
415
416
417
418
419
420
421#define match rem
422 LDFIRST(t3, FIRST(0)(src), .Ll_exc\@)
423 ADD t2, zero, NBYTES
424 LDREST(t3, REST(0)(src), .Ll_exc_copy\@)
425 SUB t2, t2, t1
426 xor match, t0, t1
427 R10KCBARRIER(0(ra))
428 STFIRST(t3, FIRST(0)(dst), .Ls_exc\@)
429 beq len, t2, .Ldone\@
430 SUB len, len, t2
431 ADD dst, dst, t2
432 beqz match, .Lboth_aligned\@
433 ADD src, src, t2
434
435.Lsrc_unaligned_dst_aligned\@:
436 SRL t0, len, LOG_NBYTES+2
437 PREFS( 0, 3*32(src) )
438 beqz t0, .Lcleanup_src_unaligned\@
439 and rem, len, (4*NBYTES-1)
440 PREFD( 1, 3*32(dst) )
4411:
442
443
444
445
446
447
448 R10KCBARRIER(0(ra))
449 LDFIRST(t0, FIRST(0)(src), .Ll_exc\@)
450 LDFIRST(t1, FIRST(1)(src), .Ll_exc_copy\@)
451 SUB len, len, 4*NBYTES
452 LDREST(t0, REST(0)(src), .Ll_exc_copy\@)
453 LDREST(t1, REST(1)(src), .Ll_exc_copy\@)
454 LDFIRST(t2, FIRST(2)(src), .Ll_exc_copy\@)
455 LDFIRST(t3, FIRST(3)(src), .Ll_exc_copy\@)
456 LDREST(t2, REST(2)(src), .Ll_exc_copy\@)
457 LDREST(t3, REST(3)(src), .Ll_exc_copy\@)
458 PREFS( 0, 9*32(src) )
459 ADD src, src, 4*NBYTES
460#ifdef CONFIG_CPU_SB1
461 nop
462#endif
463 STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
464 STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
465 STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
466 STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
467 PREFD( 1, 9*32(dst) )
468 .set reorder
469 ADD dst, dst, 4*NBYTES
470 bne len, rem, 1b
471 .set noreorder
472
473.Lcleanup_src_unaligned\@:
474 beqz len, .Ldone\@
475 and rem, len, NBYTES-1
476 beq rem, len, .Lcopy_bytes\@
477 nop
4781:
479 R10KCBARRIER(0(ra))
480 LDFIRST(t0, FIRST(0)(src), .Ll_exc\@)
481 LDREST(t0, REST(0)(src), .Ll_exc_copy\@)
482 ADD src, src, NBYTES
483 SUB len, len, NBYTES
484 STORE(t0, 0(dst), .Ls_exc_p1u\@)
485 .set reorder
486 ADD dst, dst, NBYTES
487 bne len, rem, 1b
488 .set noreorder
489
490#endif
491.Lcopy_bytes_checklen\@:
492 beqz len, .Ldone\@
493 nop
494.Lcopy_bytes\@:
495
496 R10KCBARRIER(0(ra))
497#define COPY_BYTE(N) \
498 LOADB(t0, N(src), .Ll_exc\@); \
499 SUB len, len, 1; \
500 beqz len, .Ldone\@; \
501 STOREB(t0, N(dst), .Ls_exc_p1\@)
502
503 COPY_BYTE(0)
504 COPY_BYTE(1)
505#ifdef USE_DOUBLE
506 COPY_BYTE(2)
507 COPY_BYTE(3)
508 COPY_BYTE(4)
509 COPY_BYTE(5)
510#endif
511 LOADB(t0, NBYTES-2(src), .Ll_exc\@)
512 SUB len, len, 1
513 jr ra
514 STOREB(t0, NBYTES-2(dst), .Ls_exc_p1\@)
515.Ldone\@:
516 jr ra
517 nop
518
519#ifdef CONFIG_CPU_MIPSR6
520.Lcopy_unaligned_bytes\@:
5211:
522 COPY_BYTE(0)
523 COPY_BYTE(1)
524 COPY_BYTE(2)
525 COPY_BYTE(3)
526 COPY_BYTE(4)
527 COPY_BYTE(5)
528 COPY_BYTE(6)
529 COPY_BYTE(7)
530 ADD src, src, 8
531 b 1b
532 ADD dst, dst, 8
533#endif
534 .if __memcpy == 1
535 END(memcpy)
536 .set __memcpy, 0
537 .hidden __memcpy
538 .endif
539
540.Ll_exc_copy\@:
541
542
543
544
545
546
547
548
549
550
551 LOADK t0, TI_TASK($28)
552 nop
553 LOADK t0, THREAD_BUADDR(t0)
5541:
555 LOADB(t1, 0(src), .Ll_exc\@)
556 ADD src, src, 1
557 sb t1, 0(dst)
558 .set reorder
559 ADD dst, dst, 1
560 bne src, t0, 1b
561 .set noreorder
562.Ll_exc\@:
563 LOADK t0, TI_TASK($28)
564 nop
565 LOADK t0, THREAD_BUADDR(t0)
566 nop
567 SUB len, AT, t0
568 jr ra
569 nop
570
571#define SEXC(n) \
572 .set reorder; \
573.Ls_exc_p
574 ADD len, len, n*NBYTES; \
575 jr ra; \
576 .set noreorder
577
578SEXC(8)
579SEXC(7)
580SEXC(6)
581SEXC(5)
582SEXC(4)
583SEXC(3)
584SEXC(2)
585SEXC(1)
586
587.Ls_exc_p1\@:
588 .set reorder
589 ADD len, len, 1
590 jr ra
591 .set noreorder
592.Ls_exc\@:
593 jr ra
594 nop
595 .endm
596
597 .align 5
598LEAF(memmove)
599EXPORT_SYMBOL(memmove)
600 ADD t0, a0, a2
601 ADD t1, a1, a2
602 sltu t0, a1, t0
603 sltu t1, a0, t1
604 and t0, t1
605 beqz t0, .L__memcpy
606 move v0, a0
607 beqz a2, .Lr_out
608 END(memmove)
609
610
611LEAF(__rmemcpy)
612 sltu t0, a1, a0
613 beqz t0, .Lr_end_bytes_up
614 nop
615 ADD a0, a2
616 ADD a1, a2
617
618.Lr_end_bytes:
619 R10KCBARRIER(0(ra))
620 lb t0, -1(a1)
621 SUB a2, a2, 0x1
622 sb t0, -1(a0)
623 SUB a1, a1, 0x1
624 .set reorder
625 SUB a0, a0, 0x1
626 bnez a2, .Lr_end_bytes
627 .set noreorder
628
629.Lr_out:
630 jr ra
631 move a2, zero
632
633.Lr_end_bytes_up:
634 R10KCBARRIER(0(ra))
635 lb t0, (a1)
636 SUB a2, a2, 0x1
637 sb t0, (a0)
638 ADD a1, a1, 0x1
639 .set reorder
640 ADD a0, a0, 0x1
641 bnez a2, .Lr_end_bytes_up
642 .set noreorder
643
644 jr ra
645 move a2, zero
646 END(__rmemcpy)
647
648
649
650
651
652
653
654 .align 5
655LEAF(memcpy)
656EXPORT_SYMBOL(memcpy)
657 move v0, dst
658.L__memcpy:
659FEXPORT(__copy_user)
660EXPORT_SYMBOL(__copy_user)
661
662 __BUILD_COPY_USER LEGACY_MODE USEROP USEROP
663
664#ifdef CONFIG_EVA
665
666
667
668
669
670
671
672
673
674
675
676
677LEAF(__copy_from_user_eva)
678EXPORT_SYMBOL(__copy_from_user_eva)
679 __BUILD_COPY_USER EVA_MODE USEROP KERNELOP
680END(__copy_from_user_eva)
681
682
683
684
685
686
687
688LEAF(__copy_to_user_eva)
689EXPORT_SYMBOL(__copy_to_user_eva)
690__BUILD_COPY_USER EVA_MODE KERNELOP USEROP
691END(__copy_to_user_eva)
692
693
694
695
696
697LEAF(__copy_in_user_eva)
698EXPORT_SYMBOL(__copy_in_user_eva)
699__BUILD_COPY_USER EVA_MODE USEROP USEROP
700END(__copy_in_user_eva)
701
702#endif
703