1
2
3
4
5
6
7
8#include <linux/linkage.h>
9#include <asm/assembler.h>
10#include <asm/v7m.h>
11
12#include "efi-header.S"
13
14#ifdef __ARMEB__
15#define OF_DT_MAGIC 0xd00dfeed
16#else
17#define OF_DT_MAGIC 0xedfe0dd0
18#endif
19
20 AR_CLASS( .arch armv7-a )
21 M_CLASS( .arch armv7-m )
22
23
24
25
26
27
28
29
30#ifdef DEBUG
31
32
33
34
35 .macro loadsp, rb, tmp1, tmp2
36 .endm
37 .macro writeb, ch, rb, tmp
38 mcr p14, 0, \ch, c0, c5, 0
39 .endm
40
41 .macro loadsp, rb, tmp1, tmp2
42 .endm
43 .macro writeb, ch, rb, tmp
44 mcr p14, 0, \ch, c8, c0, 0
45 .endm
46#else
47 .macro loadsp, rb, tmp1, tmp2
48 .endm
49 .macro writeb, ch, rb, tmp
50 mcr p14, 0, \ch, c1, c0, 0
51 .endm
52#endif
53
54#else
55
56
57
58 .macro writeb, ch, rb, tmp
59#ifdef CONFIG_DEBUG_UART_FLOW_CONTROL
60 waituartcts \tmp, \rb
61#endif
62 waituarttxrdy \tmp, \rb
63 senduart \ch, \rb
64 busyuart \tmp, \rb
65 .endm
66
67
68 .macro loadsp, rb, tmp1, tmp2
69 mov \rb,
70#ifdef CONFIG_DEBUG_LL_SER3
71 add \rb, \rb,
72#else
73 add \rb, \rb,
74#endif
75 .endm
76#else
77 .macro loadsp, rb, tmp1, tmp2
78 addruart \rb, \tmp1, \tmp2
79 .endm
80#endif
81#endif
82#endif
83
84 .macro kputc,val
85 mov r0, \val
86 bl putc
87 .endm
88
89 .macro kphex,val,len
90 mov r0, \val
91 mov r1,
92 bl phex
93 .endm
94
95
96
97
98 .macro dbgkc, begin, end, cbegin, cend
99#ifdef DEBUG
100 kputc
101 kputc
102 kputc
103 kputc
104 kphex \begin, 8
105 kputc
106 kputc
107 kputc
108 kphex \end, 8
109 kputc
110 kputc
111 kputc
112 kputc
113 kphex \cbegin, 8
114 kputc
115 kputc
116 kputc
117 kphex \cend, 8
118 kputc
119#endif
120 .endm
121
122
123
124
125 .macro dbgadtb, begin, size
126#ifdef DEBUG
127 kputc
128 kputc
129 kputc
130 kputc
131 kputc
132 kputc
133 kphex \begin, 8
134 kputc
135 kputc
136 kputc
137 kputc
138 kphex \size, 8
139 kputc
140 kputc
141#endif
142 .endm
143
144 .macro enable_cp15_barriers, reg
145 mrc p15, 0, \reg, c1, c0, 0 @ read SCTLR
146 tst \reg,
147 bne .L_\@
148 orr \reg, \reg,
149 mcr p15, 0, \reg, c1, c0, 0 @ write SCTLR
150 ARM( .inst 0xf57ff06f @ v7+ isb )
151 THUMB( isb )
152.L_\@:
153 .endm
154
155
156
157
158
159
160 .macro get_inflated_image_size, res:req, tmp1:req, tmp2:req
161 adr \res, .Linflated_image_size_offset
162 ldr \tmp1, [\res]
163 add \tmp1, \tmp1, \res @ address of inflated image size
164
165 ldrb \res, [\tmp1] @ get_unaligned_le32
166 ldrb \tmp2, [\tmp1,
167 orr \res, \res, \tmp2, lsl
168 ldrb \tmp2, [\tmp1,
169 ldrb \tmp1, [\tmp1,
170 orr \res, \res, \tmp2, lsl
171 orr \res, \res, \tmp1, lsl
172 .endm
173
174 .macro be32tocpu, val, tmp
175#ifndef __ARMEB__
176
177 rev_l \val, \tmp
178#endif
179 .endm
180
181 .section ".start", "ax"
182
183
184
185 .align
186
187
188
189
190
191 AR_CLASS( .arm )
192start:
193 .type start,
194
195
196
197
198
199
200
201
202
203
204
205
206 __initial_nops
207 .rept 5
208 __nop
209 .endr
210#ifndef CONFIG_THUMB2_KERNEL
211 __nop
212#else
213 AR_CLASS( sub pc, pc,
214 M_CLASS( nop.w ) @ M: already in Thumb2 mode
215 .thumb
216#endif
217 W(b) 1f
218
219 .word _magic_sig @ Magic numbers to help the loader
220 .word _magic_start @ absolute load/run zImage address
221 .word _magic_end @ zImage end address
222 .word 0x04030201 @ endianness flag
223 .word 0x45454545 @ another magic number to indicate
224 .word _magic_table @ additional data table
225
226 __EFI_HEADER
2271:
228 ARM_BE8( setend be ) @ go BE8 if compiled for BE8
229 AR_CLASS( mrs r9, cpsr )
230#ifdef CONFIG_ARM_VIRT_EXT
231 bl __hyp_stub_install @ get into SVC mode, reversibly
232#endif
233 mov r7, r1 @ save architecture ID
234 mov r8, r2 @ save atags pointer
235
236#ifndef CONFIG_CPU_V7M
237
238
239
240
241
242 mrs r2, cpsr @ get current mode
243 tst r2,
244 bne not_angel
245 mov r0,
246 ARM( swi 0x123456 ) @ angel_SWI_ARM
247 THUMB( svc 0xab ) @ angel_SWI_THUMB
248not_angel:
249 safe_svcmode_maskall r0
250 msr spsr_cxsf, r9 @ Save the CPU boot mode in
251 @ SPSR
252#endif
253
254
255
256
257
258
259
260
261
262
263 .text
264
265#ifdef CONFIG_AUTO_ZRELADDR
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283 mov r0, pc
284 and r0, r0,
285#ifdef CONFIG_USE_OF
286 adr r1, LC1
287#ifdef CONFIG_ARM_APPENDED_DTB
288
289
290
291
292
293
294 ldr r2, [r1,
295 add r2, r2, r1 @ relocate it
296 ldr r2, [r2] @ get DTB signature
297 ldr r3, =OF_DT_MAGIC
298 cmp r2, r3 @ do we have a DTB there?
299 beq 1f @ if yes, skip validation
300#endif
301
302
303
304
305
306
307 ldr sp, [r1] @ get stack location
308 add sp, sp, r1 @ apply relocation
309
310
311 mov r1, r8
312 bl fdt_check_mem_start
3131:
314#endif
315
316 add r4, r0,
317#else
318 ldr r4, =zreladdr
319#endif
320
321
322
323
324
325
326
327 mov r0, pc
328 cmp r0, r4
329 ldrcc r0, .Lheadroom
330 addcc r0, r0, pc
331 cmpcc r4, r0
332 orrcc r4, r4,
333 blcs cache_on
334
335restart: adr r0, LC1
336 ldr sp, [r0]
337 ldr r6, [r0,
338 add sp, sp, r0
339 add r6, r6, r0
340
341 get_inflated_image_size r9, r10, lr
342
343#ifndef CONFIG_ZBOOT_ROM
344
345 add r10, sp,
346#else
347
348
349
350
351
352 mov r10, r6
353#endif
354
355 mov r5,
356#ifdef CONFIG_ARM_APPENDED_DTB
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371 ldr lr, [r6,
372 ldr r1, =OF_DT_MAGIC
373 cmp lr, r1
374 bne dtb_check_done @ not found
375
376#ifdef CONFIG_ARM_ATAG_DTB_COMPAT
377
378
379
380
381
382
383
384
385
386
387 ldr r5, [r6,
388 be32tocpu r5, r1
389 dbgadtb r6, r5
390
391 add r5, r5, r5, lsr
392
393 add r5, r5,
394 bic r5, r5,
395
396 cmp r5,
397 movlo r5,
398 cmp r5,
399 movhi r5,
400
401 add sp, sp, r5
402
403 mov r0, r8
404 mov r1, r6
405 mov r2, r5
406 bl atags_to_fdt
407
408
409
410
411
412
413 cmp r0,
414 sub r0, r4,
415 bic r0, r0,
416 add r0, r0,
417 mov r1, r6
418 mov r2, r5
419 bleq atags_to_fdt
420
421 sub sp, sp, r5
422#endif
423
424 mov r8, r6 @ use the appended device tree
425
426
427
428
429
430
431
432 ldr r5, =_kernel_bss_size
433 adr r1, wont_overwrite
434 sub r1, r6, r1
435 subs r1, r5, r1
436 addhi r9, r9, r1
437
438
439 ldr r5, [r6,
440 be32tocpu r5, r1
441
442
443 add r5, r5,
444 bic r5, r5,
445
446
447 add r6, r6, r5
448 add r10, r10, r5
449 add sp, sp, r5
450dtb_check_done:
451#endif
452
453
454
455
456
457
458
459
460
461
462
463 add r10, r10,
464 cmp r4, r10
465 bhs wont_overwrite
466 add r10, r4, r9
467 adr r9, wont_overwrite
468 cmp r10, r9
469 bls wont_overwrite
470
471
472
473
474
475
476
477
478
479
480
481
482
483 add r10, r10,
484 bic r10, r10,
485
486
487 adr r5, restart
488 bic r5, r5,
489
490
491#ifdef CONFIG_ARM_VIRT_EXT
492 mrs r0, spsr
493 and r0, r0,
494 cmp r0,
495 bne 1f
496
497
498
499
500
501
502 adr_l r0, __hyp_stub_vectors
503 sub r0, r0, r5
504 add r0, r0, r10
505 bl __hyp_set_vectors
5061:
507#endif
508
509 sub r9, r6, r5 @ size to copy
510 add r9, r9,
511 bic r9, r9,
512 add r6, r9, r5
513 add r9, r9, r10
514
515#ifdef DEBUG
516 sub r10, r6, r5
517 sub r10, r9, r10
518
519
520
521
522
523
524
525
526 dbgkc r5, r6, r10, r9
527#endif
528
5291: ldmdb r6!, {r0 - r3, r10 - r12, lr}
530 cmp r6, r5
531 stmdb r9!, {r0 - r3, r10 - r12, lr}
532 bhi 1b
533
534
535 sub r6, r9, r6
536
537 mov r0, r9 @ start of relocated zImage
538 add r1, sp, r6 @ end of relocated zImage
539 bl cache_clean_flush
540
541 badr r0, restart
542 add r0, r0, r6
543 mov pc, r0
544
545wont_overwrite:
546 adr r0, LC0
547 ldmia r0, {r1, r2, r3, r11, r12}
548 sub r0, r0, r1 @ calculate the delta offset
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563 orrs r1, r0, r5
564 beq not_relocated
565
566 add r11, r11, r0
567 add r12, r12, r0
568
569#ifndef CONFIG_ZBOOT_ROM
570
571
572
573
574
575 add r2, r2, r0
576 add r3, r3, r0
577
578
579
580
581
5821: ldr r1, [r11,
583 add r1, r1, r0 @ This fixes up C references
584 cmp r1, r2 @ if entry >= bss_start &&
585 cmphs r3, r1 @ bss_end > entry
586 addhi r1, r1, r5 @ entry += dtb size
587 str r1, [r11],
588 cmp r11, r12
589 blo 1b
590
591
592 add r2, r2, r5
593 add r3, r3, r5
594
595#else
596
597
598
599
600
6011: ldr r1, [r11,
602 cmp r1, r2 @ entry < bss_start ||
603 cmphs r3, r1 @ _end < entry
604 addlo r1, r1, r0 @ table. This fixes up the
605 str r1, [r11],
606 cmp r11, r12
607 blo 1b
608#endif
609
610not_relocated: mov r0,
6111: str r0, [r2],
612 str r0, [r2],
613 str r0, [r2],
614 str r0, [r2],
615 cmp r2, r3
616 blo 1b
617
618
619
620
621
622
623 tst r4,
624 bic r4, r4,
625 blne cache_on
626
627
628
629
630
631
632
633
634 mov r0, r4
635 mov r1, sp @ malloc space above stack
636 add r2, sp,
637 mov r3, r7
638 bl decompress_kernel
639
640 get_inflated_image_size r1, r2, r3
641
642 mov r0, r4 @ start of inflated image
643 add r1, r1, r0 @ end of inflated image
644 bl cache_clean_flush
645 bl cache_off
646
647#ifdef CONFIG_ARM_VIRT_EXT
648 mrs r0, spsr @ Get saved CPU boot mode
649 and r0, r0,
650 cmp r0,
651 bne __enter_kernel @ boot kernel directly
652
653 adr_l r0, __hyp_reentry_vectors
654 bl __hyp_set_vectors
655 __HVC(0) @ otherwise bounce to hyp mode
656
657 b . @ should never be reached
658#else
659 b __enter_kernel
660#endif
661
662 .align 2
663 .type LC0,
664LC0: .word LC0 @ r1
665 .word __bss_start @ r2
666 .word _end @ r3
667 .word _got_start @ r11
668 .word _got_end @ ip
669 .size LC0, . - LC0
670
671 .type LC1,
672LC1: .word .L_user_stack_end - LC1 @ sp
673 .word _edata - LC1 @ r6
674 .size LC1, . - LC1
675
676.Lheadroom:
677 .word _end - restart + 16384 + 1024*1024
678
679.Linflated_image_size_offset:
680 .long (input_data_end - 4) - .
681
682#ifdef CONFIG_ARCH_RPC
683 .globl params
684params: ldr r0, =0x10000100 @ params_phys for RPC
685 mov pc, lr
686 .ltorg
687 .align
688#endif
689
690
691
692
693
694 .macro dcache_line_size, reg, tmp
695#ifdef CONFIG_CPU_V7M
696 movw \tmp,
697 movt \tmp,
698 ldr \tmp, [\tmp]
699#else
700 mrc p15, 0, \tmp, c0, c0, 1 @ read ctr
701#endif
702 lsr \tmp, \tmp,
703 and \tmp, \tmp,
704 mov \reg,
705 mov \reg, \reg, lsl \tmp @ actual cache line size
706 .endm
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725 .align 5
726cache_on: mov r3,
727 b call_cache_fn
728
729
730
731
732
733__armv4_mpu_cache_on:
734 mov r0,
735 mcr p15, 0, r0, c6, c7, 0 @ PR7 Area Setting
736 mcr p15, 0, r0, c6, c7, 1
737
738 mov r0,
739 mcr p15, 0, r0, c2, c0, 0 @ D-cache on
740 mcr p15, 0, r0, c2, c0, 1 @ I-cache on
741 mcr p15, 0, r0, c3, c0, 0 @ write-buffer on
742
743 mov r0,
744 mcr p15, 0, r0, c5, c0, 1 @ I-access permission
745 mcr p15, 0, r0, c5, c0, 0 @ D-access permission
746
747 mov r0,
748 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
749 mcr p15, 0, r0, c7, c5, 0 @ flush(inval) I-Cache
750 mcr p15, 0, r0, c7, c6, 0 @ flush(inval) D-Cache
751 mrc p15, 0, r0, c1, c0, 0 @ read control reg
752 @ ...I .... ..D. WC.M
753 orr r0, r0,
754 orr r0, r0,
755
756 mcr p15, 0, r0, c1, c0, 0 @ write control reg
757
758 mov r0,
759 mcr p15, 0, r0, c7, c5, 0 @ flush(inval) I-Cache
760 mcr p15, 0, r0, c7, c6, 0 @ flush(inval) D-Cache
761 mov pc, lr
762
763__armv3_mpu_cache_on:
764 mov r0,
765 mcr p15, 0, r0, c6, c7, 0 @ PR7 Area Setting
766
767 mov r0,
768 mcr p15, 0, r0, c2, c0, 0 @ cache on
769 mcr p15, 0, r0, c3, c0, 0 @ write-buffer on
770
771 mov r0,
772 mcr p15, 0, r0, c5, c0, 0 @ access permission
773
774 mov r0,
775 mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
776
777
778
779
780 mrc p15, 0, r0, c1, c0, 0 @ read control reg
781 @ .... .... .... WC.M
782 orr r0, r0,
783
784 mov r0,
785 mcr p15, 0, r0, c1, c0, 0 @ write control reg
786
787
788 mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
789 mov pc, lr
790
791#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
792#define CB_BITS 0x08
793#else
794#define CB_BITS 0x0c
795#endif
796
797__setup_mmu: sub r3, r4,
798 bic r3, r3,
799 bic r3, r3,
800
801
802
803
804 mov r0, r3
805 mov r9, r0, lsr
806 mov r9, r9, lsl
807 add r10, r9,
808 mov r1,
809 orr r1, r1,
810 add r2, r3,
8111: cmp r1, r9 @ if virt > start of RAM
812 cmphs r10, r1 @ && end of RAM > virt
813 bic r1, r1,
814 orrlo r1, r1,
815 orrhs r1, r1, r6 @ set RAM section settings
816 str r1, [r0],
817 add r1, r1,
818 teq r0, r2
819 bne 1b
820
821
822
823
824
825
826 orr r1, r6,
827 orr r1, r1,
828 mov r2, pc
829 mov r2, r2, lsr
830 orr r1, r1, r2, lsl
831 add r0, r3, r2, lsl
832 str r1, [r0],
833 add r1, r1,
834 str r1, [r0]
835 mov pc, lr
836ENDPROC(__setup_mmu)
837
838@ Enable unaligned access on v6, to allow better code generation
839@ for the decompressor C code:
840__armv6_mmu_cache_on:
841 mrc p15, 0, r0, c1, c0, 0 @ read SCTLR
842 bic r0, r0,
843 orr r0, r0,
844 mcr p15, 0, r0, c1, c0, 0 @ write SCTLR
845 b __armv4_mmu_cache_on
846
847__arm926ejs_mmu_cache_on:
848#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
849 mov r0,
850 mcr p15, 7, r0, c15, c0, 0
851#endif
852
853__armv4_mmu_cache_on:
854 mov r12, lr
855#ifdef CONFIG_MMU
856 mov r6,
857 bl __setup_mmu
858 mov r0,
859 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
860 mcr p15, 0, r0, c8, c7, 0 @ flush I,D TLBs
861 mrc p15, 0, r0, c1, c0, 0 @ read control reg
862 orr r0, r0,
863 orr r0, r0,
864 ARM_BE8( orr r0, r0,
865 bl __common_mmu_cache_on
866 mov r0,
867 mcr p15, 0, r0, c8, c7, 0 @ flush I,D TLBs
868#endif
869 mov pc, r12
870
871__armv7_mmu_cache_on:
872 enable_cp15_barriers r11
873 mov r12, lr
874#ifdef CONFIG_MMU
875 mrc p15, 0, r11, c0, c1, 4 @ read ID_MMFR0
876 tst r11,
877 movne r6,
878 blne __setup_mmu
879 mov r0,
880 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
881 tst r11,
882 mcrne p15, 0, r0, c8, c7, 0 @ flush I,D TLBs
883#endif
884 mrc p15, 0, r0, c1, c0, 0 @ read control reg
885 bic r0, r0,
886 orr r0, r0,
887 orr r0, r0,
888 bic r0, r0,
889 orr r0, r0,
890 @ (needed for ARM1176)
891#ifdef CONFIG_MMU
892 ARM_BE8( orr r0, r0,
893 mrcne p15, 0, r6, c2, c0, 2 @ read ttb control reg
894 orrne r0, r0,
895 movne r1,
896 bic r6, r6,
897 bic r6, r6,
898 mcrne p15, 0, r3, c2, c0, 0 @ load page table pointer
899 mcrne p15, 0, r1, c3, c0, 0 @ load domain access control
900 mcrne p15, 0, r6, c2, c0, 2 @ load ttb control
901#endif
902 mcr p15, 0, r0, c7, c5, 4 @ ISB
903 mcr p15, 0, r0, c1, c0, 0 @ load control register
904 mrc p15, 0, r0, c1, c0, 0 @ and read it back
905 mov r0,
906 mcr p15, 0, r0, c7, c5, 4 @ ISB
907 mov pc, r12
908
909__fa526_cache_on:
910 mov r12, lr
911 mov r6,
912 bl __setup_mmu
913 mov r0,
914 mcr p15, 0, r0, c7, c7, 0 @ Invalidate whole cache
915 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
916 mcr p15, 0, r0, c8, c7, 0 @ flush UTLB
917 mrc p15, 0, r0, c1, c0, 0 @ read control reg
918 orr r0, r0,
919 bl __common_mmu_cache_on
920 mov r0,
921 mcr p15, 0, r0, c8, c7, 0 @ flush UTLB
922 mov pc, r12
923
924__common_mmu_cache_on:
925#ifndef CONFIG_THUMB2_KERNEL
926#ifndef DEBUG
927 orr r0, r0,
928#endif
929 mov r1,
930 mcr p15, 0, r3, c2, c0, 0 @ load page table pointer
931 mcr p15, 0, r1, c3, c0, 0 @ load domain access control
932 b 1f
933 .align 5 @ cache line aligned
9341: mcr p15, 0, r0, c1, c0, 0 @ load control register
935 mrc p15, 0, r0, c1, c0, 0 @ and read it back to
936 sub pc, lr, r0, lsr
937#endif
938
939#define PROC_ENTRY_SIZE (4*5)
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955call_cache_fn: adr r12, proc_types
956#ifdef CONFIG_CPU_CP15
957 mrc p15, 0, r9, c0, c0 @ get processor ID
958
959
960
961
962
963
964
965
966
967 bx lr
968#else
969 ldr r9, =CONFIG_PROCESSOR_ID
970#endif
9711: ldr r1, [r12,
972 ldr r2, [r12,
973 eor r1, r1, r9 @ (real ^ match)
974 tst r1, r2 @ & mask
975 ARM( addeq pc, r12, r3 ) @ call cache function
976 THUMB( addeq r12, r3 )
977 THUMB( moveq pc, r12 ) @ call cache function
978 add r12, r12,
979 b 1b
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995 .align 2
996 .type proc_types,
997proc_types:
998 .word 0x41000000 @ old ARM ID
999 .word 0xff00f000
1000 mov pc, lr
1001 THUMB( nop )
1002 mov pc, lr
1003 THUMB( nop )
1004 mov pc, lr
1005 THUMB( nop )
1006
1007 .word 0x41007000 @ ARM7/710
1008 .word 0xfff8fe00
1009 mov pc, lr
1010 THUMB( nop )
1011 mov pc, lr
1012 THUMB( nop )
1013 mov pc, lr
1014 THUMB( nop )
1015
1016 .word 0x41807200 @ ARM720T (writethrough)
1017 .word 0xffffff00
1018 W(b) __armv4_mmu_cache_on
1019 W(b) __armv4_mmu_cache_off
1020 mov pc, lr
1021 THUMB( nop )
1022
1023 .word 0x41007400 @ ARM74x
1024 .word 0xff00ff00
1025 W(b) __armv3_mpu_cache_on
1026 W(b) __armv3_mpu_cache_off
1027 W(b) __armv3_mpu_cache_flush
1028
1029 .word 0x41009400 @ ARM94x
1030 .word 0xff00ff00
1031 W(b) __armv4_mpu_cache_on
1032 W(b) __armv4_mpu_cache_off
1033 W(b) __armv4_mpu_cache_flush
1034
1035 .word 0x41069260 @ ARM926EJ-S (v5TEJ)
1036 .word 0xff0ffff0
1037 W(b) __arm926ejs_mmu_cache_on
1038 W(b) __armv4_mmu_cache_off
1039 W(b) __armv5tej_mmu_cache_flush
1040
1041 .word 0x00007000 @ ARM7 IDs
1042 .word 0x0000f000
1043 mov pc, lr
1044 THUMB( nop )
1045 mov pc, lr
1046 THUMB( nop )
1047 mov pc, lr
1048 THUMB( nop )
1049
1050 @ Everything from here on will be the new ID system.
1051
1052 .word 0x4401a100 @ sa110 / sa1100
1053 .word 0xffffffe0
1054 W(b) __armv4_mmu_cache_on
1055 W(b) __armv4_mmu_cache_off
1056 W(b) __armv4_mmu_cache_flush
1057
1058 .word 0x6901b110 @ sa1110
1059 .word 0xfffffff0
1060 W(b) __armv4_mmu_cache_on
1061 W(b) __armv4_mmu_cache_off
1062 W(b) __armv4_mmu_cache_flush
1063
1064 .word 0x56056900
1065 .word 0xffffff00 @ PXA9xx
1066 W(b) __armv4_mmu_cache_on
1067 W(b) __armv4_mmu_cache_off
1068 W(b) __armv4_mmu_cache_flush
1069
1070 .word 0x56158000 @ PXA168
1071 .word 0xfffff000
1072 W(b) __armv4_mmu_cache_on
1073 W(b) __armv4_mmu_cache_off
1074 W(b) __armv5tej_mmu_cache_flush
1075
1076 .word 0x56050000 @ Feroceon
1077 .word 0xff0f0000
1078 W(b) __armv4_mmu_cache_on
1079 W(b) __armv4_mmu_cache_off
1080 W(b) __armv5tej_mmu_cache_flush
1081
1082#ifdef CONFIG_CPU_FEROCEON_OLD_ID
1083
1084 .long 0x41009260 @ Old Feroceon
1085 .long 0xff00fff0
1086 b __armv4_mmu_cache_on
1087 b __armv4_mmu_cache_off
1088 b __armv5tej_mmu_cache_flush
1089#endif
1090
1091 .word 0x66015261 @ FA526
1092 .word 0xff01fff1
1093 W(b) __fa526_cache_on
1094 W(b) __armv4_mmu_cache_off
1095 W(b) __fa526_cache_flush
1096
1097 @ These match on the architecture ID
1098
1099 .word 0x00020000 @ ARMv4T
1100 .word 0x000f0000
1101 W(b) __armv4_mmu_cache_on
1102 W(b) __armv4_mmu_cache_off
1103 W(b) __armv4_mmu_cache_flush
1104
1105 .word 0x00050000 @ ARMv5TE
1106 .word 0x000f0000
1107 W(b) __armv4_mmu_cache_on
1108 W(b) __armv4_mmu_cache_off
1109 W(b) __armv4_mmu_cache_flush
1110
1111 .word 0x00060000 @ ARMv5TEJ
1112 .word 0x000f0000
1113 W(b) __armv4_mmu_cache_on
1114 W(b) __armv4_mmu_cache_off
1115 W(b) __armv5tej_mmu_cache_flush
1116
1117 .word 0x0007b000 @ ARMv6
1118 .word 0x000ff000
1119 W(b) __armv6_mmu_cache_on
1120 W(b) __armv4_mmu_cache_off
1121 W(b) __armv6_mmu_cache_flush
1122
1123 .word 0x000f0000 @ new CPU Id
1124 .word 0x000f0000
1125 W(b) __armv7_mmu_cache_on
1126 W(b) __armv7_mmu_cache_off
1127 W(b) __armv7_mmu_cache_flush
1128
1129 .word 0 @ unrecognised type
1130 .word 0
1131 mov pc, lr
1132 THUMB( nop )
1133 mov pc, lr
1134 THUMB( nop )
1135 mov pc, lr
1136 THUMB( nop )
1137
1138 .size proc_types, . - proc_types
1139
1140
1141
1142
1143
1144
1145
1146 .if (. - proc_types) % PROC_ENTRY_SIZE != 0
1147 .error "The size of one or more proc_types entries is wrong."
1148 .endif
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159 .align 5
1160cache_off: mov r3,
1161 b call_cache_fn
1162
1163__armv4_mpu_cache_off:
1164 mrc p15, 0, r0, c1, c0
1165 bic r0, r0,
1166 mcr p15, 0, r0, c1, c0 @ turn MPU and cache off
1167 mov r0,
1168 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
1169 mcr p15, 0, r0, c7, c6, 0 @ flush D-Cache
1170 mcr p15, 0, r0, c7, c5, 0 @ flush I-Cache
1171 mov pc, lr
1172
1173__armv3_mpu_cache_off:
1174 mrc p15, 0, r0, c1, c0
1175 bic r0, r0,
1176 mcr p15, 0, r0, c1, c0, 0 @ turn MPU and cache off
1177 mov r0,
1178 mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3
1179 mov pc, lr
1180
1181__armv4_mmu_cache_off:
1182#ifdef CONFIG_MMU
1183 mrc p15, 0, r0, c1, c0
1184 bic r0, r0,
1185 mcr p15, 0, r0, c1, c0 @ turn MMU and cache off
1186 mov r0,
1187 mcr p15, 0, r0, c7, c7 @ invalidate whole cache v4
1188 mcr p15, 0, r0, c8, c7 @ invalidate whole TLB v4
1189#endif
1190 mov pc, lr
1191
1192__armv7_mmu_cache_off:
1193 mrc p15, 0, r0, c1, c0
1194#ifdef CONFIG_MMU
1195 bic r0, r0,
1196#else
1197 bic r0, r0,
1198#endif
1199 mcr p15, 0, r0, c1, c0 @ turn MMU and cache off
1200 mov r0,
1201#ifdef CONFIG_MMU
1202 mcr p15, 0, r0, c8, c7, 0 @ invalidate whole TLB
1203#endif
1204 mcr p15, 0, r0, c7, c5, 6 @ invalidate BTC
1205 mcr p15, 0, r0, c7, c10, 4 @ DSB
1206 mcr p15, 0, r0, c7, c5, 4 @ ISB
1207 mov pc, lr
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220 .align 5
1221cache_clean_flush:
1222 mov r3,
1223 mov r11, r1
1224 b call_cache_fn
1225
1226__armv4_mpu_cache_flush:
1227 tst r4,
1228 movne pc, lr
1229 mov r2,
1230 mov r3,
1231 mcr p15, 0, ip, c7, c6, 0 @ invalidate D cache
1232 mov r1,
12331: orr r3, r1,
12342: mcr p15, 0, r3, c7, c14, 2 @ clean & invalidate D index
1235 subs r3, r3,
1236 bcs 2b @ entries 63 to 0
1237 subs r1, r1,
1238 bcs 1b @ segments 7 to 0
1239
1240 teq r2,
1241 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache
1242 mcr p15, 0, ip, c7, c10, 4 @ drain WB
1243 mov pc, lr
1244
1245__fa526_cache_flush:
1246 tst r4,
1247 movne pc, lr
1248 mov r1,
1249 mcr p15, 0, r1, c7, c14, 0 @ clean and invalidate D cache
1250 mcr p15, 0, r1, c7, c5, 0 @ flush I cache
1251 mcr p15, 0, r1, c7, c10, 4 @ drain WB
1252 mov pc, lr
1253
1254__armv6_mmu_cache_flush:
1255 mov r1,
1256 tst r4,
1257 mcreq p15, 0, r1, c7, c14, 0 @ clean+invalidate D
1258 mcr p15, 0, r1, c7, c5, 0 @ invalidate I+BTB
1259 mcreq p15, 0, r1, c7, c15, 0 @ clean+invalidate unified
1260 mcr p15, 0, r1, c7, c10, 4 @ drain WB
1261 mov pc, lr
1262
1263__armv7_mmu_cache_flush:
1264 enable_cp15_barriers r10
1265 tst r4,
1266 bne iflush
1267 mrc p15, 0, r10, c0, c1, 5 @ read ID_MMFR1
1268 tst r10,
1269 mov r10,
1270 beq hierarchical
1271 mcr p15, 0, r10, c7, c14, 0 @ clean+invalidate D
1272 b iflush
1273hierarchical:
1274 dcache_line_size r1, r2 @ r1 := dcache min line size
1275 sub r2, r1,
1276 bic r0, r0, r2 @ round down start to line size
1277 sub r11, r11,
1278 bic r11, r11, r2 @ round down end to line size
12790: cmp r0, r11 @ finished?
1280 bgt iflush
1281 mcr p15, 0, r0, c7, c14, 1 @ Dcache clean/invalidate by VA
1282 add r0, r0, r1
1283 b 0b
1284iflush:
1285 mcr p15, 0, r10, c7, c10, 4 @ DSB
1286 mcr p15, 0, r10, c7, c5, 0 @ invalidate I+BTB
1287 mcr p15, 0, r10, c7, c10, 4 @ DSB
1288 mcr p15, 0, r10, c7, c5, 4 @ ISB
1289 mov pc, lr
1290
1291__armv5tej_mmu_cache_flush:
1292 tst r4,
1293 movne pc, lr
12941: mrc p15, 0, APSR_nzcv, c7, c14, 3 @ test,clean,invalidate D cache
1295 bne 1b
1296 mcr p15, 0, r0, c7, c5, 0 @ flush I cache
1297 mcr p15, 0, r0, c7, c10, 4 @ drain WB
1298 mov pc, lr
1299
1300__armv4_mmu_cache_flush:
1301 tst r4,
1302 movne pc, lr
1303 mov r2,
1304 mov r11,
1305 mrc p15, 0, r3, c0, c0, 1 @ read cache type
1306 teq r3, r9 @ cache ID register present?
1307 beq no_cache_id
1308 mov r1, r3, lsr
1309 and r1, r1,
1310 mov r2,
1311 mov r2, r2, lsl r1 @ base dcache size *2
1312 tst r3,
1313 addne r2, r2, r2, lsr
1314 mov r3, r3, lsr
1315 and r3, r3,
1316 mov r11,
1317 mov r11, r11, lsl r3 @ cache line size in bytes
1318no_cache_id:
1319 mov r1, pc
1320 bic r1, r1,
1321 add r2, r1, r2
13221:
1323 ARM( ldr r3, [r1], r11 ) @ s/w flush D cache
1324 THUMB( ldr r3, [r1] ) @ s/w flush D cache
1325 THUMB( add r1, r1, r11 )
1326 teq r1, r2
1327 bne 1b
1328
1329 mcr p15, 0, r1, c7, c5, 0 @ flush I cache
1330 mcr p15, 0, r1, c7, c6, 0 @ flush D cache
1331 mcr p15, 0, r1, c7, c10, 4 @ drain WB
1332 mov pc, lr
1333
1334__armv3_mmu_cache_flush:
1335__armv3_mpu_cache_flush:
1336 tst r4,
1337 movne pc, lr
1338 mov r1,
1339 mcr p15, 0, r1, c7, c0, 0 @ invalidate whole cache v3
1340 mov pc, lr
1341
1342
1343
1344
1345
1346#ifdef DEBUG
1347 .align 2
1348 .type phexbuf,
1349phexbuf: .space 12
1350 .size phexbuf, . - phexbuf
1351
1352@ phex corrupts {r0, r1, r2, r3}
1353phex: adr r3, phexbuf
1354 mov r2,
1355 strb r2, [r3, r1]
13561: subs r1, r1,
1357 movmi r0, r3
1358 bmi puts
1359 and r2, r0,
1360 mov r0, r0, lsr
1361 cmp r2,
1362 addge r2, r2,
1363 add r2, r2,
1364 strb r2, [r3, r1]
1365 b 1b
1366
1367@ puts corrupts {r0, r1, r2, r3}
1368puts: loadsp r3, r2, r1
13691: ldrb r2, [r0],
1370 teq r2,
1371 moveq pc, lr
13722: writeb r2, r3, r1
1373 mov r1,
13743: subs r1, r1,
1375 bne 3b
1376 teq r2,
1377 moveq r2,
1378 beq 2b
1379 teq r0,
1380 bne 1b
1381 mov pc, lr
1382@ putc corrupts {r0, r1, r2, r3}
1383putc:
1384 mov r2, r0
1385 loadsp r3, r1, r0
1386 mov r0,
1387 b 2b
1388
1389@ memdump corrupts {r0, r1, r2, r3, r10, r11, r12, lr}
1390memdump: mov r12, r0
1391 mov r10, lr
1392 mov r11,
13932: mov r0, r11, lsl
1394 add r0, r0, r12
1395 mov r1,
1396 bl phex
1397 mov r0,
1398 bl putc
13991: mov r0,
1400 bl putc
1401 ldr r0, [r12, r11, lsl
1402 mov r1,
1403 bl phex
1404 and r0, r11,
1405 teq r0,
1406 moveq r0,
1407 bleq putc
1408 and r0, r11,
1409 add r11, r11,
1410 teq r0,
1411 bne 1b
1412 mov r0,
1413 bl putc
1414 cmp r11,
1415 blt 2b
1416 mov pc, r10
1417#endif
1418
1419 .ltorg
1420
1421#ifdef CONFIG_ARM_VIRT_EXT
1422.align 5
1423__hyp_reentry_vectors:
1424 W(b) . @ reset
1425 W(b) . @ undef
1426#ifdef CONFIG_EFI_STUB
1427 W(b) __enter_kernel_from_hyp @ hvc from HYP
1428#else
1429 W(b) . @ svc
1430#endif
1431 W(b) . @ pabort
1432 W(b) . @ dabort
1433 W(b) __enter_kernel @ hyp
1434 W(b) . @ irq
1435 W(b) . @ fiq
1436#endif
1437
1438__enter_kernel:
1439 mov r0,
1440 mov r1, r7 @ restore architecture number
1441 mov r2, r8 @ restore atags pointer
1442 ARM( mov pc, r4 ) @ call kernel
1443 M_CLASS( add r4, r4,
1444 THUMB( bx r4 ) @ entry point is always ARM for A/R classes
1445
1446reloc_code_end:
1447
1448#ifdef CONFIG_EFI_STUB
1449__enter_kernel_from_hyp:
1450 mrc p15, 4, r0, c1, c0, 0 @ read HSCTLR
1451 bic r0, r0,
1452 mcr p15, 4, r0, c1, c0, 0 @ write HSCTLR
1453 isb
1454 b __enter_kernel
1455
1456ENTRY(efi_enter_kernel)
1457 mov r4, r0 @ preserve image base
1458 mov r8, r1 @ preserve DT pointer
1459
1460 adr_l r0, call_cache_fn
1461 adr r1, 0f @ clean the region of code we
1462 bl cache_clean_flush @ may run with the MMU off
1463
1464#ifdef CONFIG_ARM_VIRT_EXT
1465 @
1466 @ The EFI spec does not support booting on ARM in HYP mode,
1467 @ since it mandates that the MMU and caches are on, with all
1468 @ 32-bit addressable DRAM mapped 1:1 using short descriptors.
1469 @
1470 @ While the EDK2 reference implementation adheres to this,
1471 @ U-Boot might decide to enter the EFI stub in HYP mode
1472 @ anyway, with the MMU and caches either on or off.
1473 @
1474 mrs r0, cpsr @ get the current mode
1475 msr spsr_cxsf, r0 @ record boot mode
1476 and r0, r0,
1477 cmp r0,
1478 bne .Lefi_svc
1479
1480 mrc p15, 4, r1, c1, c0, 0 @ read HSCTLR
1481 tst r1,
1482 beq 1f
1483
1484 @
1485 @ When running in HYP mode with the caches on, we're better
1486 @ off just carrying on using the cached 1:1 mapping that the
1487 @ firmware provided. Set up the HYP vectors so HVC instructions
1488 @ issued from HYP mode take us to the correct handler code. We
1489 @ will disable the MMU before jumping to the kernel proper.
1490 @
1491 ARM( bic r1, r1,
1492 THUMB( orr r1, r1,
1493 mcr p15, 4, r1, c1, c0, 0
1494 adr r0, __hyp_reentry_vectors
1495 mcr p15, 4, r0, c12, c0, 0 @ set HYP vector base (HVBAR)
1496 isb
1497 b .Lefi_hyp
1498
1499 @
1500 @ When running in HYP mode with the caches off, we need to drop
1501 @ into SVC mode now, and let the decompressor set up its cached
1502 @ 1:1 mapping as usual.
1503 @
15041: mov r9, r4 @ preserve image base
1505 bl __hyp_stub_install @ install HYP stub vectors
1506 safe_svcmode_maskall r1 @ drop to SVC mode
1507 msr spsr_cxsf, r0 @ record boot mode
1508 orr r4, r9,
1509 b .Lefi_hyp
1510.Lefi_svc:
1511#endif
1512 mrc p15, 0, r0, c1, c0, 0 @ read SCTLR
1513 tst r0,
1514 orreq r4, r4,
1515
1516.Lefi_hyp:
1517 mov r0, r8 @ DT start
1518 add r1, r8, r2 @ DT end
1519 bl cache_clean_flush
1520
1521 adr r0, 0f @ switch to our stack
1522 ldr sp, [r0]
1523 add sp, sp, r0
1524
1525 mov r5,
1526 mov r7,
1527 b wont_overwrite
1528ENDPROC(efi_enter_kernel)
15290: .long .L_user_stack_end - .
1530#endif
1531
1532 .align
1533 .section ".stack", "aw", %nobits
1534.L_user_stack: .space 4096
1535.L_user_stack_end:
1536