1
2
3
4
5
6
7
8
9
10
11#include <linux/linkage.h>
12#include <asm/assembler.h>
13
14
15
16
17
18
19
20
21#ifdef DEBUG
22
23
24
25
26 .macro loadsp, rb, tmp
27 .endm
28 .macro writeb, ch, rb
29 mcr p14, 0, \ch, c0, c5, 0
30 .endm
31
32 .macro loadsp, rb, tmp
33 .endm
34 .macro writeb, ch, rb
35 mcr p14, 0, \ch, c8, c0, 0
36 .endm
37#else
38 .macro loadsp, rb, tmp
39 .endm
40 .macro writeb, ch, rb
41 mcr p14, 0, \ch, c1, c0, 0
42 .endm
43#endif
44
45#else
46
47
48
49 .macro writeb, ch, rb
50 senduart \ch, \rb
51 .endm
52
53
54 .macro loadsp, rb, tmp
55 mov \rb,
56#ifdef CONFIG_DEBUG_LL_SER3
57 add \rb, \rb,
58#else
59 add \rb, \rb,
60#endif
61 .endm
62
63 .macro loadsp, rb, tmp
64 mov \rb,
65 add \rb, \rb,
66 .endm
67#else
68 .macro loadsp, rb, tmp
69 addruart \rb, \tmp
70 .endm
71#endif
72#endif
73#endif
74
75 .macro kputc,val
76 mov r0, \val
77 bl putc
78 .endm
79
80 .macro kphex,val,len
81 mov r0, \val
82 mov r1,
83 bl phex
84 .endm
85
86 .macro debug_reloc_start
87#ifdef DEBUG
88 kputc
89 kphex r6, 8
90 kputc
91 kphex r7, 8
92#ifdef CONFIG_CPU_CP15
93 kputc
94 mrc p15, 0, r0, c1, c0
95 kphex r0, 8
96#endif
97 kputc
98 kphex r5, 8
99 kputc
100 kphex r9, 8
101 kputc
102 kphex r4, 8
103 kputc
104#endif
105 .endm
106
107 .macro debug_reloc_end
108#ifdef DEBUG
109 kphex r5, 8
110 kputc
111 mov r0, r4
112 bl memdump
113#endif
114 .endm
115
116 .section ".start",
117
118
119
120 .align
121 .arm @ Always enter in ARM state
122start:
123 .type start,
124 .rept 7
125 mov r0, r0
126 .endr
127 ARM( mov r0, r0 )
128 ARM( b 1f )
129 THUMB( adr r12, BSYM(1f) )
130 THUMB( bx r12 )
131
132 .word 0x016f2818 @ Magic numbers to help the loader
133 .word start @ absolute load/run zImage address
134 .word _edata @ zImage end address
135 THUMB( .thumb )
1361:
137 mrs r9, cpsr
138#ifdef CONFIG_ARM_VIRT_EXT
139 bl __hyp_stub_install @ get into SVC mode, reversibly
140#endif
141 mov r7, r1 @ save architecture ID
142 mov r8, r2 @ save atags pointer
143
144#ifndef __ARM_ARCH_2__
145
146
147
148
149
150 mrs r2, cpsr @ get current mode
151 tst r2,
152 bne not_angel
153 mov r0,
154 ARM( swi 0x123456 ) @ angel_SWI_ARM
155 THUMB( svc 0xab ) @ angel_SWI_THUMB
156not_angel:
157 safe_svcmode_maskall r0
158 msr spsr_cxsf, r9 @ Save the CPU boot mode in
159 @ SPSR
160#else
161 teqp pc,
162#endif
163
164
165
166
167
168
169
170
171
172
173
174 .text
175
176#ifdef CONFIG_AUTO_ZRELADDR
177 @ determine final kernel image address
178 mov r4, pc
179 and r4, r4,
180 add r4, r4,
181#else
182 ldr r4, =zreladdr
183#endif
184
185 bl cache_on
186
187restart: adr r0, LC0
188 ldmia r0, {r1, r2, r3, r6, r10, r11, r12}
189 ldr sp, [r0,
190
191
192
193
194
195 sub r0, r0, r1 @ calculate the delta offset
196 add r6, r6, r0 @ _edata
197 add r10, r10, r0 @ inflated kernel size location
198
199
200
201
202
203
204 ldrb r9, [r10,
205 ldrb lr, [r10,
206 orr r9, r9, lr, lsl
207 ldrb lr, [r10,
208 ldrb r10, [r10,
209 orr r9, r9, lr, lsl
210 orr r9, r9, r10, lsl
211
212#ifndef CONFIG_ZBOOT_ROM
213
214 add sp, sp, r0
215 add r10, sp,
216#else
217
218
219
220
221
222 mov r10, r6
223#endif
224
225 mov r5,
226#ifdef CONFIG_ARM_APPENDED_DTB
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246 ldr lr, [r6,
247#ifndef __ARMEB__
248 ldr r1, =0xedfe0dd0 @ sig is 0xd00dfeed big endian
249#else
250 ldr r1, =0xd00dfeed
251#endif
252 cmp lr, r1
253 bne dtb_check_done @ not found
254
255#ifdef CONFIG_ARM_ATAG_DTB_COMPAT
256
257
258
259
260
261
262
263
264
265 add sp, sp,
266 stmfd sp!, {r0-r3, ip, lr}
267 mov r0, r8
268 mov r1, r6
269 sub r2, sp, r6
270 bl atags_to_fdt
271
272
273
274
275
276
277 cmp r0,
278 sub r0, r4,
279 add r0, r0,
280 mov r1, r6
281 sub r2, sp, r6
282 bleq atags_to_fdt
283
284 ldmfd sp!, {r0-r3, ip, lr}
285 sub sp, sp,
286#endif
287
288 mov r8, r6 @ use the appended device tree
289
290
291
292
293
294
295
296 ldr r5, =_kernel_bss_size
297 adr r1, wont_overwrite
298 sub r1, r6, r1
299 subs r1, r5, r1
300 addhi r9, r9, r1
301
302
303 ldr r5, [r6,
304#ifndef __ARMEB__
305
306 eor r1, r5, r5, ror
307 bic r1, r1,
308 mov r5, r5, ror
309 eor r5, r5, r1, lsr
310#endif
311
312
313 add r5, r5,
314 bic r5, r5,
315
316
317 add r6, r6, r5
318 add r10, r10, r5
319 add sp, sp, r5
320dtb_check_done:
321#endif
322
323
324
325
326
327
328
329
330
331
332 add r10, r10,
333 cmp r4, r10
334 bhs wont_overwrite
335 add r10, r4, r9
336 adr r9, wont_overwrite
337 cmp r10, r9
338 bls wont_overwrite
339
340
341
342
343
344
345
346
347
348
349
350
351
352 add r10, r10,
353 bic r10, r10,
354
355
356 adr r5, restart
357 bic r5, r5,
358
359
360#ifdef CONFIG_ARM_VIRT_EXT
361 mrs r0, spsr
362 and r0, r0,
363 cmp r0,
364 bne 1f
365
366 bl __hyp_get_vectors
367 sub r0, r0, r5
368 add r0, r0, r10
369 bl __hyp_set_vectors
3701:
371#endif
372
373 sub r9, r6, r5 @ size to copy
374 add r9, r9,
375 bic r9, r9,
376 add r6, r9, r5
377 add r9, r9, r10
378
3791: ldmdb r6!, {r0 - r3, r10 - r12, lr}
380 cmp r6, r5
381 stmdb r9!, {r0 - r3, r10 - r12, lr}
382 bhi 1b
383
384
385 sub r6, r9, r6
386
387#ifndef CONFIG_ZBOOT_ROM
388
389 add sp, sp, r6
390#endif
391
392 bl cache_clean_flush
393
394 adr r0, BSYM(restart)
395 add r0, r0, r6
396 mov pc, r0
397
398wont_overwrite:
399
400
401
402
403
404
405
406
407
408
409
410
411
412 orrs r1, r0, r5
413 beq not_relocated
414
415 add r11, r11, r0
416 add r12, r12, r0
417
418#ifndef CONFIG_ZBOOT_ROM
419
420
421
422
423
424 add r2, r2, r0
425 add r3, r3, r0
426
427
428
429
430
4311: ldr r1, [r11,
432 add r1, r1, r0 @ This fixes up C references
433 cmp r1, r2 @ if entry >= bss_start &&
434 cmphs r3, r1 @ bss_end > entry
435 addhi r1, r1, r5 @ entry += dtb size
436 str r1, [r11],
437 cmp r11, r12
438 blo 1b
439
440
441 add r2, r2, r5
442 add r3, r3, r5
443
444#else
445
446
447
448
449
4501: ldr r1, [r11,
451 cmp r1, r2 @ entry < bss_start ||
452 cmphs r3, r1 @ _end < entry
453 addlo r1, r1, r0 @ table. This fixes up the
454 str r1, [r11],
455 cmp r11, r12
456 blo 1b
457#endif
458
459not_relocated: mov r0,
4601: str r0, [r2],
461 str r0, [r2],
462 str r0, [r2],
463 str r0, [r2],
464 cmp r2, r3
465 blo 1b
466
467
468
469
470
471
472
473
474 mov r0, r4
475 mov r1, sp @ malloc space above stack
476 add r2, sp,
477 mov r3, r7
478 bl decompress_kernel
479 bl cache_clean_flush
480 bl cache_off
481 mov r1, r7 @ restore architecture number
482 mov r2, r8 @ restore atags pointer
483
484#ifdef CONFIG_ARM_VIRT_EXT
485 mrs r0, spsr @ Get saved CPU boot mode
486 and r0, r0,
487 cmp r0,
488 bne __enter_kernel @ boot kernel directly
489
490 adr r12, .L__hyp_reentry_vectors_offset
491 ldr r0, [r12]
492 add r0, r0, r12
493
494 bl __hyp_set_vectors
495 __HVC(0) @ otherwise bounce to hyp mode
496
497 b . @ should never be reached
498
499 .align 2
500.L__hyp_reentry_vectors_offset: .long __hyp_reentry_vectors - .
501#else
502 b __enter_kernel
503#endif
504
505 .align 2
506 .type LC0,
507LC0: .word LC0 @ r1
508 .word __bss_start @ r2
509 .word _end @ r3
510 .word _edata @ r6
511 .word input_data_end - 4 @ r10 (inflated size location)
512 .word _got_start @ r11
513 .word _got_end @ ip
514 .word .L_user_stack_end @ sp
515 .size LC0, . - LC0
516
517#ifdef CONFIG_ARCH_RPC
518 .globl params
519params: ldr r0, =0x10000100 @ params_phys for RPC
520 mov pc, lr
521 .ltorg
522 .align
523#endif
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542 .align 5
543cache_on: mov r3,
544 b call_cache_fn
545
546
547
548
549
550__armv4_mpu_cache_on:
551 mov r0,
552 mcr p15, 0, r0, c6, c7, 0 @ PR7 Area Setting
553 mcr p15, 0, r0, c6, c7, 1
554
555 mov r0,
556 mcr p15, 0, r0, c2, c0, 0 @ D-cache on
557 mcr p15, 0, r0, c2, c0, 1 @ I-cache on
558 mcr p15, 0, r0, c3, c0, 0 @ write-buffer on
559
560 mov r0,
561 mcr p15, 0, r0, c5, c0, 1 @ I-access permission
562 mcr p15, 0, r0, c5, c0, 0 @ D-access permission
563
564 mov r0,
565 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
566 mcr p15, 0, r0, c7, c5, 0 @ flush(inval) I-Cache
567 mcr p15, 0, r0, c7, c6, 0 @ flush(inval) D-Cache
568 mrc p15, 0, r0, c1, c0, 0 @ read control reg
569 @ ...I .... ..D. WC.M
570 orr r0, r0,
571 orr r0, r0,
572
573 mcr p15, 0, r0, c1, c0, 0 @ write control reg
574
575 mov r0,
576 mcr p15, 0, r0, c7, c5, 0 @ flush(inval) I-Cache
577 mcr p15, 0, r0, c7, c6, 0 @ flush(inval) D-Cache
578 mov pc, lr
579
580__armv3_mpu_cache_on:
581 mov r0,
582 mcr p15, 0, r0, c6, c7, 0 @ PR7 Area Setting
583
584 mov r0,
585 mcr p15, 0, r0, c2, c0, 0 @ cache on
586 mcr p15, 0, r0, c3, c0, 0 @ write-buffer on
587
588 mov r0,
589 mcr p15, 0, r0, c5, c0, 0 @ access permission
590
591 mov r0,
592 mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
593
594
595
596
597 mrc p15, 0, r0, c1, c0, 0 @ read control reg
598 @ .... .... .... WC.M
599 orr r0, r0,
600
601 mov r0,
602 mcr p15, 0, r0, c1, c0, 0 @ write control reg
603
604
605 mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
606 mov pc, lr
607
608#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
609#define CB_BITS 0x08
610#else
611#define CB_BITS 0x0c
612#endif
613
614__setup_mmu: sub r3, r4,
615 bic r3, r3,
616 bic r3, r3,
617
618
619
620
621 mov r0, r3
622 mov r9, r0, lsr
623 mov r9, r9, lsl
624 add r10, r9,
625 mov r1,
626 orr r1, r1,
627 add r2, r3,
6281: cmp r1, r9 @ if virt > start of RAM
629 cmphs r10, r1 @ && end of RAM > virt
630 bic r1, r1,
631 orrlo r1, r1,
632 orrhs r1, r1, r6 @ set RAM section settings
633 str r1, [r0],
634 add r1, r1,
635 teq r0, r2
636 bne 1b
637
638
639
640
641
642
643 orr r1, r6,
644 orr r1, r1,
645 mov r2, pc
646 mov r2, r2, lsr
647 orr r1, r1, r2, lsl
648 add r0, r3, r2, lsl
649 str r1, [r0],
650 add r1, r1,
651 str r1, [r0]
652 mov pc, lr
653ENDPROC(__setup_mmu)
654
655@ Enable unaligned access on v6, to allow better code generation
656@ for the decompressor C code:
657__armv6_mmu_cache_on:
658 mrc p15, 0, r0, c1, c0, 0 @ read SCTLR
659 bic r0, r0,
660 orr r0, r0,
661 mcr p15, 0, r0, c1, c0, 0 @ write SCTLR
662 b __armv4_mmu_cache_on
663
664__arm926ejs_mmu_cache_on:
665#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
666 mov r0,
667 mcr p15, 7, r0, c15, c0, 0
668#endif
669
670__armv4_mmu_cache_on:
671 mov r12, lr
672#ifdef CONFIG_MMU
673 mov r6,
674 bl __setup_mmu
675 mov r0,
676 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
677 mcr p15, 0, r0, c8, c7, 0 @ flush I,D TLBs
678 mrc p15, 0, r0, c1, c0, 0 @ read control reg
679 orr r0, r0,
680 orr r0, r0,
681#ifdef CONFIG_CPU_ENDIAN_BE8
682 orr r0, r0,
683#endif
684 bl __common_mmu_cache_on
685 mov r0,
686 mcr p15, 0, r0, c8, c7, 0 @ flush I,D TLBs
687#endif
688 mov pc, r12
689
690__armv7_mmu_cache_on:
691 mov r12, lr
692#ifdef CONFIG_MMU
693 mrc p15, 0, r11, c0, c1, 4 @ read ID_MMFR0
694 tst r11,
695 movne r6,
696 blne __setup_mmu
697 mov r0,
698 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
699 tst r11,
700 mcrne p15, 0, r0, c8, c7, 0 @ flush I,D TLBs
701#endif
702 mrc p15, 0, r0, c1, c0, 0 @ read control reg
703 bic r0, r0,
704 orr r0, r0,
705 orr r0, r0,
706 bic r0, r0,
707 orr r0, r0,
708 @ (needed for ARM1176)
709#ifdef CONFIG_MMU
710#ifdef CONFIG_CPU_ENDIAN_BE8
711 orr r0, r0,
712#endif
713 mrcne p15, 0, r6, c2, c0, 2 @ read ttb control reg
714 orrne r0, r0,
715 movne r1,
716 bic r6, r6,
717 bic r6, r6,
718 mcrne p15, 0, r3, c2, c0, 0 @ load page table pointer
719 mcrne p15, 0, r1, c3, c0, 0 @ load domain access control
720 mcrne p15, 0, r6, c2, c0, 2 @ load ttb control
721#endif
722 mcr p15, 0, r0, c7, c5, 4 @ ISB
723 mcr p15, 0, r0, c1, c0, 0 @ load control register
724 mrc p15, 0, r0, c1, c0, 0 @ and read it back
725 mov r0,
726 mcr p15, 0, r0, c7, c5, 4 @ ISB
727 mov pc, r12
728
729__fa526_cache_on:
730 mov r12, lr
731 mov r6,
732 bl __setup_mmu
733 mov r0,
734 mcr p15, 0, r0, c7, c7, 0 @ Invalidate whole cache
735 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
736 mcr p15, 0, r0, c8, c7, 0 @ flush UTLB
737 mrc p15, 0, r0, c1, c0, 0 @ read control reg
738 orr r0, r0,
739 bl __common_mmu_cache_on
740 mov r0,
741 mcr p15, 0, r0, c8, c7, 0 @ flush UTLB
742 mov pc, r12
743
744__common_mmu_cache_on:
745#ifndef CONFIG_THUMB2_KERNEL
746#ifndef DEBUG
747 orr r0, r0,
748#endif
749 mov r1,
750 mcr p15, 0, r3, c2, c0, 0 @ load page table pointer
751 mcr p15, 0, r1, c3, c0, 0 @ load domain access control
752 b 1f
753 .align 5 @ cache line aligned
7541: mcr p15, 0, r0, c1, c0, 0 @ load control register
755 mrc p15, 0, r0, c1, c0, 0 @ and read it back to
756 sub pc, lr, r0, lsr
757#endif
758
759#define PROC_ENTRY_SIZE (4*5)
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775call_cache_fn: adr r12, proc_types
776#ifdef CONFIG_CPU_CP15
777 mrc p15, 0, r9, c0, c0 @ get processor ID
778#else
779 ldr r9, =CONFIG_PROCESSOR_ID
780#endif
7811: ldr r1, [r12,
782 ldr r2, [r12,
783 eor r1, r1, r9 @ (real ^ match)
784 tst r1, r2 @ & mask
785 ARM( addeq pc, r12, r3 ) @ call cache function
786 THUMB( addeq r12, r3 )
787 THUMB( moveq pc, r12 ) @ call cache function
788 add r12, r12,
789 b 1b
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805 .align 2
806 .type proc_types,
807proc_types:
808 .word 0x00000000 @ old ARM ID
809 .word 0x0000f000
810 mov pc, lr
811 THUMB( nop )
812 mov pc, lr
813 THUMB( nop )
814 mov pc, lr
815 THUMB( nop )
816
817 .word 0x41007000 @ ARM7/710
818 .word 0xfff8fe00
819 mov pc, lr
820 THUMB( nop )
821 mov pc, lr
822 THUMB( nop )
823 mov pc, lr
824 THUMB( nop )
825
826 .word 0x41807200 @ ARM720T (writethrough)
827 .word 0xffffff00
828 W(b) __armv4_mmu_cache_on
829 W(b) __armv4_mmu_cache_off
830 mov pc, lr
831 THUMB( nop )
832
833 .word 0x41007400 @ ARM74x
834 .word 0xff00ff00
835 W(b) __armv3_mpu_cache_on
836 W(b) __armv3_mpu_cache_off
837 W(b) __armv3_mpu_cache_flush
838
839 .word 0x41009400 @ ARM94x
840 .word 0xff00ff00
841 W(b) __armv4_mpu_cache_on
842 W(b) __armv4_mpu_cache_off
843 W(b) __armv4_mpu_cache_flush
844
845 .word 0x41069260 @ ARM926EJ-S (v5TEJ)
846 .word 0xff0ffff0
847 W(b) __arm926ejs_mmu_cache_on
848 W(b) __armv4_mmu_cache_off
849 W(b) __armv5tej_mmu_cache_flush
850
851 .word 0x00007000 @ ARM7 IDs
852 .word 0x0000f000
853 mov pc, lr
854 THUMB( nop )
855 mov pc, lr
856 THUMB( nop )
857 mov pc, lr
858 THUMB( nop )
859
860 @ Everything from here on will be the new ID system.
861
862 .word 0x4401a100 @ sa110 / sa1100
863 .word 0xffffffe0
864 W(b) __armv4_mmu_cache_on
865 W(b) __armv4_mmu_cache_off
866 W(b) __armv4_mmu_cache_flush
867
868 .word 0x6901b110 @ sa1110
869 .word 0xfffffff0
870 W(b) __armv4_mmu_cache_on
871 W(b) __armv4_mmu_cache_off
872 W(b) __armv4_mmu_cache_flush
873
874 .word 0x56056900
875 .word 0xffffff00 @ PXA9xx
876 W(b) __armv4_mmu_cache_on
877 W(b) __armv4_mmu_cache_off
878 W(b) __armv4_mmu_cache_flush
879
880 .word 0x56158000 @ PXA168
881 .word 0xfffff000
882 W(b) __armv4_mmu_cache_on
883 W(b) __armv4_mmu_cache_off
884 W(b) __armv5tej_mmu_cache_flush
885
886 .word 0x56050000 @ Feroceon
887 .word 0xff0f0000
888 W(b) __armv4_mmu_cache_on
889 W(b) __armv4_mmu_cache_off
890 W(b) __armv5tej_mmu_cache_flush
891
892#ifdef CONFIG_CPU_FEROCEON_OLD_ID
893
894 .long 0x41009260 @ Old Feroceon
895 .long 0xff00fff0
896 b __armv4_mmu_cache_on
897 b __armv4_mmu_cache_off
898 b __armv5tej_mmu_cache_flush
899#endif
900
901 .word 0x66015261 @ FA526
902 .word 0xff01fff1
903 W(b) __fa526_cache_on
904 W(b) __armv4_mmu_cache_off
905 W(b) __fa526_cache_flush
906
907 @ These match on the architecture ID
908
909 .word 0x00020000 @ ARMv4T
910 .word 0x000f0000
911 W(b) __armv4_mmu_cache_on
912 W(b) __armv4_mmu_cache_off
913 W(b) __armv4_mmu_cache_flush
914
915 .word 0x00050000 @ ARMv5TE
916 .word 0x000f0000
917 W(b) __armv4_mmu_cache_on
918 W(b) __armv4_mmu_cache_off
919 W(b) __armv4_mmu_cache_flush
920
921 .word 0x00060000 @ ARMv5TEJ
922 .word 0x000f0000
923 W(b) __armv4_mmu_cache_on
924 W(b) __armv4_mmu_cache_off
925 W(b) __armv5tej_mmu_cache_flush
926
927 .word 0x0007b000 @ ARMv6
928 .word 0x000ff000
929 W(b) __armv6_mmu_cache_on
930 W(b) __armv4_mmu_cache_off
931 W(b) __armv6_mmu_cache_flush
932
933 .word 0x000f0000 @ new CPU Id
934 .word 0x000f0000
935 W(b) __armv7_mmu_cache_on
936 W(b) __armv7_mmu_cache_off
937 W(b) __armv7_mmu_cache_flush
938
939 .word 0 @ unrecognised type
940 .word 0
941 mov pc, lr
942 THUMB( nop )
943 mov pc, lr
944 THUMB( nop )
945 mov pc, lr
946 THUMB( nop )
947
948 .size proc_types, . - proc_types
949
950
951
952
953
954
955
956 .if (. - proc_types) % PROC_ENTRY_SIZE != 0
957 .error "The size of one or more proc_types entries is wrong."
958 .endif
959
960
961
962
963
964
965
966
967
968
969 .align 5
970cache_off: mov r3,
971 b call_cache_fn
972
973__armv4_mpu_cache_off:
974 mrc p15, 0, r0, c1, c0
975 bic r0, r0,
976 mcr p15, 0, r0, c1, c0 @ turn MPU and cache off
977 mov r0,
978 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
979 mcr p15, 0, r0, c7, c6, 0 @ flush D-Cache
980 mcr p15, 0, r0, c7, c5, 0 @ flush I-Cache
981 mov pc, lr
982
983__armv3_mpu_cache_off:
984 mrc p15, 0, r0, c1, c0
985 bic r0, r0,
986 mcr p15, 0, r0, c1, c0, 0 @ turn MPU and cache off
987 mov r0,
988 mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
989 mov pc, lr
990
991__armv4_mmu_cache_off:
992#ifdef CONFIG_MMU
993 mrc p15, 0, r0, c1, c0
994 bic r0, r0,
995 mcr p15, 0, r0, c1, c0 @ turn MMU and cache off
996 mov r0,
997 mcr p15, 0, r0, c7, c7 @ invalidate whole cache v4
998 mcr p15, 0, r0, c8, c7 @ invalidate whole TLB v4
999#endif
1000 mov pc, lr
1001
1002__armv7_mmu_cache_off:
1003 mrc p15, 0, r0, c1, c0
1004#ifdef CONFIG_MMU
1005 bic r0, r0,
1006#else
1007 bic r0, r0,
1008#endif
1009 mcr p15, 0, r0, c1, c0 @ turn MMU and cache off
1010 mov r12, lr
1011 bl __armv7_mmu_cache_flush
1012 mov r0,
1013#ifdef CONFIG_MMU
1014 mcr p15, 0, r0, c8, c7, 0 @ invalidate whole TLB
1015#endif
1016 mcr p15, 0, r0, c7, c5, 6 @ invalidate BTC
1017 mcr p15, 0, r0, c7, c10, 4 @ DSB
1018 mcr p15, 0, r0, c7, c5, 4 @ ISB
1019 mov pc, r12
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029 .align 5
1030cache_clean_flush:
1031 mov r3,
1032 b call_cache_fn
1033
1034__armv4_mpu_cache_flush:
1035 mov r2,
1036 mov r3,
1037 mcr p15, 0, ip, c7, c6, 0 @ invalidate D cache
1038 mov r1,
10391: orr r3, r1,
10402: mcr p15, 0, r3, c7, c14, 2 @ clean & invalidate D index
1041 subs r3, r3,
1042 bcs 2b @ entries 63 to 0
1043 subs r1, r1,
1044 bcs 1b @ segments 7 to 0
1045
1046 teq r2,
1047 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache
1048 mcr p15, 0, ip, c7, c10, 4 @ drain WB
1049 mov pc, lr
1050
1051__fa526_cache_flush:
1052 mov r1,
1053 mcr p15, 0, r1, c7, c14, 0 @ clean and invalidate D cache
1054 mcr p15, 0, r1, c7, c5, 0 @ flush I cache
1055 mcr p15, 0, r1, c7, c10, 4 @ drain WB
1056 mov pc, lr
1057
1058__armv6_mmu_cache_flush:
1059 mov r1,
1060 mcr p15, 0, r1, c7, c14, 0 @ clean+invalidate D
1061 mcr p15, 0, r1, c7, c5, 0 @ invalidate I+BTB
1062 mcr p15, 0, r1, c7, c15, 0 @ clean+invalidate unified
1063 mcr p15, 0, r1, c7, c10, 4 @ drain WB
1064 mov pc, lr
1065
1066__armv7_mmu_cache_flush:
1067 mrc p15, 0, r10, c0, c1, 5 @ read ID_MMFR1
1068 tst r10,
1069 mov r10,
1070 beq hierarchical
1071 mcr p15, 0, r10, c7, c14, 0 @ clean+invalidate D
1072 b iflush
1073hierarchical:
1074 mcr p15, 0, r10, c7, c10, 5 @ DMB
1075 stmfd sp!, {r0-r7, r9-r11}
1076 mrc p15, 1, r0, c0, c0, 1 @ read clidr
1077 ands r3, r0,
1078 mov r3, r3, lsr
1079 beq finished @ if loc is 0, then no need to clean
1080 mov r10,
1081loop1:
1082 add r2, r10, r10, lsr
1083 mov r1, r0, lsr r2 @ extract cache type bits from clidr
1084 and r1, r1,
1085 cmp r1,
1086 blt skip @ skip if no cache, or just i-cache
1087 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
1088 mcr p15, 0, r10, c7, c5, 4 @ isb to sych the new cssr&csidr
1089 mrc p15, 1, r1, c0, c0, 0 @ read the new csidr
1090 and r2, r1,
1091 add r2, r2,
1092 ldr r4, =0x3ff
1093 ands r4, r4, r1, lsr
1094 clz r5, r4 @ find bit position of way size increment
1095 ldr r7, =0x7fff
1096 ands r7, r7, r1, lsr
1097loop2:
1098 mov r9, r4 @ create working copy of max way size
1099loop3:
1100 ARM( orr r11, r10, r9, lsl r5 ) @ factor way and cache number into r11
1101 ARM( orr r11, r11, r7, lsl r2 ) @ factor index number into r11
1102 THUMB( lsl r6, r9, r5 )
1103 THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11
1104 THUMB( lsl r6, r7, r2 )
1105 THUMB( orr r11, r11, r6 ) @ factor index number into r11
1106 mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way
1107 subs r9, r9,
1108 bge loop3
1109 subs r7, r7,
1110 bge loop2
1111skip:
1112 add r10, r10,
1113 cmp r3, r10
1114 bgt loop1
1115finished:
1116 ldmfd sp!, {r0-r7, r9-r11}
1117 mov r10,
1118 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr
1119iflush:
1120 mcr p15, 0, r10, c7, c10, 4 @ DSB
1121 mcr p15, 0, r10, c7, c5, 0 @ invalidate I+BTB
1122 mcr p15, 0, r10, c7, c10, 4 @ DSB
1123 mcr p15, 0, r10, c7, c5, 4 @ ISB
1124 mov pc, lr
1125
1126__armv5tej_mmu_cache_flush:
11271: mrc p15, 0, r15, c7, c14, 3 @ test,clean,invalidate D cache
1128 bne 1b
1129 mcr p15, 0, r0, c7, c5, 0 @ flush I cache
1130 mcr p15, 0, r0, c7, c10, 4 @ drain WB
1131 mov pc, lr
1132
1133__armv4_mmu_cache_flush:
1134 mov r2,
1135 mov r11,
1136 mrc p15, 0, r3, c0, c0, 1 @ read cache type
1137 teq r3, r9 @ cache ID register present?
1138 beq no_cache_id
1139 mov r1, r3, lsr
1140 and r1, r1,
1141 mov r2,
1142 mov r2, r2, lsl r1 @ base dcache size *2
1143 tst r3,
1144 addne r2, r2, r2, lsr
1145 mov r3, r3, lsr
1146 and r3, r3,
1147 mov r11,
1148 mov r11, r11, lsl r3 @ cache line size in bytes
1149no_cache_id:
1150 mov r1, pc
1151 bic r1, r1,
1152 add r2, r1, r2
11531:
1154 ARM( ldr r3, [r1], r11 ) @ s/w flush D cache
1155 THUMB( ldr r3, [r1] ) @ s/w flush D cache
1156 THUMB( add r1, r1, r11 )
1157 teq r1, r2
1158 bne 1b
1159
1160 mcr p15, 0, r1, c7, c5, 0 @ flush I cache
1161 mcr p15, 0, r1, c7, c6, 0 @ flush D cache
1162 mcr p15, 0, r1, c7, c10, 4 @ drain WB
1163 mov pc, lr
1164
1165__armv3_mmu_cache_flush:
1166__armv3_mpu_cache_flush:
1167 mov r1,
1168 mcr p15, 0, r1, c7, c0, 0 @ invalidate whole cache v3
1169 mov pc, lr
1170
1171
1172
1173
1174
1175#ifdef DEBUG
1176 .align 2
1177 .type phexbuf,
1178phexbuf: .space 12
1179 .size phexbuf, . - phexbuf
1180
1181@ phex corrupts {r0, r1, r2, r3}
1182phex: adr r3, phexbuf
1183 mov r2,
1184 strb r2, [r3, r1]
11851: subs r1, r1,
1186 movmi r0, r3
1187 bmi puts
1188 and r2, r0,
1189 mov r0, r0, lsr
1190 cmp r2,
1191 addge r2, r2,
1192 add r2, r2,
1193 strb r2, [r3, r1]
1194 b 1b
1195
1196@ puts corrupts {r0, r1, r2, r3}
1197puts: loadsp r3, r1
11981: ldrb r2, [r0],
1199 teq r2,
1200 moveq pc, lr
12012: writeb r2, r3
1202 mov r1,
12033: subs r1, r1,
1204 bne 3b
1205 teq r2,
1206 moveq r2,
1207 beq 2b
1208 teq r0,
1209 bne 1b
1210 mov pc, lr
1211@ putc corrupts {r0, r1, r2, r3}
1212putc:
1213 mov r2, r0
1214 mov r0,
1215 loadsp r3, r1
1216 b 2b
1217
1218@ memdump corrupts {r0, r1, r2, r3, r10, r11, r12, lr}
1219memdump: mov r12, r0
1220 mov r10, lr
1221 mov r11,
12222: mov r0, r11, lsl
1223 add r0, r0, r12
1224 mov r1,
1225 bl phex
1226 mov r0,
1227 bl putc
12281: mov r0,
1229 bl putc
1230 ldr r0, [r12, r11, lsl
1231 mov r1,
1232 bl phex
1233 and r0, r11,
1234 teq r0,
1235 moveq r0,
1236 bleq putc
1237 and r0, r11,
1238 add r11, r11,
1239 teq r0,
1240 bne 1b
1241 mov r0,
1242 bl putc
1243 cmp r11,
1244 blt 2b
1245 mov pc, r10
1246#endif
1247
1248 .ltorg
1249
1250#ifdef CONFIG_ARM_VIRT_EXT
1251.align 5
1252__hyp_reentry_vectors:
1253 W(b) . @ reset
1254 W(b) . @ undef
1255 W(b) . @ svc
1256 W(b) . @ pabort
1257 W(b) . @ dabort
1258 W(b) __enter_kernel @ hyp
1259 W(b) . @ irq
1260 W(b) . @ fiq
1261#endif
1262
1263__enter_kernel:
1264 mov r0,
1265 ARM( mov pc, r4 ) @ call kernel
1266 THUMB( bx r4 ) @ entry point is always ARM
1267
1268reloc_code_end:
1269
1270 .align
1271 .section ".stack", "aw", %nobits
1272.L_user_stack: .space 4096
1273.L_user_stack_end:
1274