1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include <linux/init.h>
22#include <linux/linkage.h>
23
24#include <asm/alternative.h>
25#include <asm/assembler.h>
26#include <asm/asm-offsets.h>
27#include <asm/cpufeature.h>
28#include <asm/errno.h>
29#include <asm/esr.h>
30#include <asm/irq.h>
31#include <asm/memory.h>
32#include <asm/mmu.h>
33#include <asm/processor.h>
34#include <asm/ptrace.h>
35#include <asm/thread_info.h>
36#include <asm/asm-uaccess.h>
37#include <asm/unistd.h>
38
39
40
41
42
43 .macro ct_user_exit, syscall = 0
44#ifdef CONFIG_CONTEXT_TRACKING
45 bl context_tracking_user_exit
46 .if \syscall == 1
47
48
49
50
51 ldp x0, x1, [sp]
52 ldp x2, x3, [sp,
53 ldp x4, x5, [sp,
54 ldp x6, x7, [sp,
55 .endif
56#endif
57 .endm
58
59 .macro ct_user_enter
60#ifdef CONFIG_CONTEXT_TRACKING
61 bl context_tracking_user_enter
62#endif
63 .endm
64
65
66
67
68
69#define BAD_SYNC 0
70#define BAD_IRQ 1
71#define BAD_FIQ 2
72#define BAD_ERROR 3
73
74 .macro kernel_ventry, el, label, regsize = 64
75 .align 7
76#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
77alternative_if ARM64_UNMAP_KERNEL_AT_EL0
78 .if \el == 0
79 .if \regsize == 64
80 mrs x30, tpidrro_el0
81 msr tpidrro_el0, xzr
82 .else
83 mov x30, xzr
84 .endif
85 .endif
86alternative_else_nop_endif
87#endif
88
89 sub sp, sp,
90#ifdef CONFIG_VMAP_STACK
91
92
93
94
95 add sp, sp, x0
96 sub x0, sp, x0
97 tbnz x0,
98 sub x0, sp, x0
99 sub sp, sp, x0
100 b el\()\el\()_\label
101
1020:
103
104
105
106
107
108
109
110 msr tpidr_el0, x0
111
112
113 sub x0, sp, x0
114 msr tpidrro_el0, x0
115
116
117 adr_this_cpu sp, overflow_stack + OVERFLOW_STACK_SIZE, x0
118
119
120
121
122
123 mrs x0, tpidr_el0
124 sub x0, sp, x0
125 tst x0,
126 b.ne __bad_stack
127
128
129 sub sp, sp, x0
130 mrs x0, tpidrro_el0
131#endif
132 b el\()\el\()_\label
133 .endm
134
135 .macro tramp_alias, dst, sym
136 mov_q \dst, TRAMP_VALIAS
137 add \dst, \dst,
138 .endm
139
140 .macro kernel_entry, el, regsize = 64
141 .if \regsize == 32
142 mov w0, w0
143 .endif
144 stp x0, x1, [sp,
145 stp x2, x3, [sp,
146 stp x4, x5, [sp,
147 stp x6, x7, [sp,
148 stp x8, x9, [sp,
149 stp x10, x11, [sp,
150 stp x12, x13, [sp,
151 stp x14, x15, [sp,
152 stp x16, x17, [sp,
153 stp x18, x19, [sp,
154 stp x20, x21, [sp,
155 stp x22, x23, [sp,
156 stp x24, x25, [sp,
157 stp x26, x27, [sp,
158 stp x28, x29, [sp,
159
160 .if \el == 0
161 mrs x21, sp_el0
162 ldr_this_cpu tsk, __entry_task, x20
163 ldr x19, [tsk,
164 disable_step_tsk x19, x20
165
166 mov x29, xzr
167 .else
168 add x21, sp,
169 get_thread_info tsk
170
171 ldr x20, [tsk,
172 str x20, [sp,
173 mov x20,
174 str x20, [tsk,
175
176 .endif
177 mrs x22, elr_el1
178 mrs x23, spsr_el1
179 stp lr, x21, [sp,
180
181
182
183
184
185
186 .if \el == 0
187 stp xzr, xzr, [sp,
188 .else
189 stp x29, x22, [sp,
190 .endif
191 add x29, sp,
192
193#ifdef CONFIG_ARM64_SW_TTBR0_PAN
194
195
196
197
198
199
200
201
202alternative_if ARM64_HAS_PAN
203 b 1f
204alternative_else_nop_endif
205
206 .if \el != 0
207 mrs x21, ttbr0_el1
208 tst x21,
209 orr x23, x23,
210 b.eq 1f
211 and x23, x23,
212 .endif
213
214 __uaccess_ttbr0_disable x21
2151:
216#endif
217
218 stp x22, x23, [sp,
219
220
221 .if \el == 0
222 mov w21,
223 str w21, [sp,
224 .endif
225
226
227
228
229 .if \el == 0
230 msr sp_el0, tsk
231 .endif
232
233
234
235
236
237
238
239
240 .endm
241
242 .macro kernel_exit, el
243 .if \el != 0
244 disable_daif
245
246
247 ldr x20, [sp,
248 str x20, [tsk,
249
250
251 .endif
252
253 ldp x21, x22, [sp,
254 .if \el == 0
255 ct_user_enter
256 .endif
257
258#ifdef CONFIG_ARM64_SW_TTBR0_PAN
259
260
261
262
263alternative_if ARM64_HAS_PAN
264 b 2f
265alternative_else_nop_endif
266
267 .if \el != 0
268 tbnz x22,
269 .endif
270
271 __uaccess_ttbr0_enable x0, x1
272
273 .if \el == 0
274
275
276
277
278
279
280 bl post_ttbr_update_workaround
281 .endif
2821:
283 .if \el != 0
284 and x22, x22,
285 .endif
2862:
287#endif
288
289 .if \el == 0
290 ldr x23, [sp,
291 msr sp_el0, x23
292 tst x22,
293 b.eq 3f
294
295#ifdef CONFIG_ARM64_ERRATUM_845719
296alternative_if ARM64_WORKAROUND_845719
297#ifdef CONFIG_PID_IN_CONTEXTIDR
298 mrs x29, contextidr_el1
299 msr contextidr_el1, x29
300#else
301 msr contextidr_el1, xzr
302#endif
303alternative_else_nop_endif
304#endif
3053:
306 .endif
307
308 msr elr_el1, x21
309 msr spsr_el1, x22
310 ldp x0, x1, [sp,
311 ldp x2, x3, [sp,
312 ldp x4, x5, [sp,
313 ldp x6, x7, [sp,
314 ldp x8, x9, [sp,
315 ldp x10, x11, [sp,
316 ldp x12, x13, [sp,
317 ldp x14, x15, [sp,
318 ldp x16, x17, [sp,
319 ldp x18, x19, [sp,
320 ldp x20, x21, [sp,
321 ldp x22, x23, [sp,
322 ldp x24, x25, [sp,
323 ldp x26, x27, [sp,
324 ldp x28, x29, [sp,
325 ldr lr, [sp,
326 add sp, sp,
327
328
329
330
331
332 .if \el == 0
333alternative_insn eret, nop, ARM64_UNMAP_KERNEL_AT_EL0
334#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
335 bne 4f
336 msr far_el1, x30
337 tramp_alias x30, tramp_exit_native
338 br x30
3394:
340 tramp_alias x30, tramp_exit_compat
341 br x30
342#endif
343 .else
344 eret
345 .endif
346 .endm
347
348 .macro irq_stack_entry
349 mov x19, sp
350
351
352
353
354
355
356 ldr x25, [tsk, TSK_STACK]
357 eor x25, x25, x19
358 and x25, x25,
359 cbnz x25, 9998f
360
361 ldr_this_cpu x25, irq_stack_ptr, x26
362 mov x26,
363 add x26, x25, x26
364
365
366 mov sp, x26
3679998:
368 .endm
369
370
371
372
373
374 .macro irq_stack_exit
375 mov sp, x19
376 .endm
377
378
379
380
381
382
383
384wsc_nr .req w25
385xsc_nr .req x25
386wscno .req w26
387xscno .req x26
388stbl .req x27
389tsk .req x28
390
391
392
393
394 .macro irq_handler
395 ldr_l x1, handle_arch_irq
396 mov x0, sp
397 irq_stack_entry
398 blr x1
399 irq_stack_exit
400 .endm
401
402 .text
403
404
405
406
407 .pushsection ".entry.text", "ax"
408
409 .align 11
410ENTRY(vectors)
411 kernel_ventry 1, sync_invalid
412 kernel_ventry 1, irq_invalid
413 kernel_ventry 1, fiq_invalid
414 kernel_ventry 1, error_invalid
415
416 kernel_ventry 1, sync
417 kernel_ventry 1, irq
418 kernel_ventry 1, fiq_invalid
419 kernel_ventry 1, error
420
421 kernel_ventry 0, sync
422 kernel_ventry 0, irq
423 kernel_ventry 0, fiq_invalid
424 kernel_ventry 0, error
425
426#ifdef CONFIG_COMPAT
427 kernel_ventry 0, sync_compat, 32
428 kernel_ventry 0, irq_compat, 32
429 kernel_ventry 0, fiq_invalid_compat, 32
430 kernel_ventry 0, error_compat, 32
431#else
432 kernel_ventry 0, sync_invalid, 32
433 kernel_ventry 0, irq_invalid, 32
434 kernel_ventry 0, fiq_invalid, 32
435 kernel_ventry 0, error_invalid, 32
436#endif
437END(vectors)
438
439#ifdef CONFIG_VMAP_STACK
440
441
442
443
444
445__bad_stack:
446
447 mrs x0, tpidrro_el0
448
449
450
451
452
453 sub sp, sp,
454 kernel_entry 1
455 mrs x0, tpidr_el0
456 add x0, x0,
457 str x0, [sp,
458
459
460 mov x0, sp
461
462
463 bl handle_bad_stack
464 ASM_BUG()
465#endif
466
467
468
469
470 .macro inv_entry, el, reason, regsize = 64
471 kernel_entry \el, \regsize
472 mov x0, sp
473 mov x1,
474 mrs x2, esr_el1
475 bl bad_mode
476 ASM_BUG()
477 .endm
478
479el0_sync_invalid:
480 inv_entry 0, BAD_SYNC
481ENDPROC(el0_sync_invalid)
482
483el0_irq_invalid:
484 inv_entry 0, BAD_IRQ
485ENDPROC(el0_irq_invalid)
486
487el0_fiq_invalid:
488 inv_entry 0, BAD_FIQ
489ENDPROC(el0_fiq_invalid)
490
491el0_error_invalid:
492 inv_entry 0, BAD_ERROR
493ENDPROC(el0_error_invalid)
494
495#ifdef CONFIG_COMPAT
496el0_fiq_invalid_compat:
497 inv_entry 0, BAD_FIQ, 32
498ENDPROC(el0_fiq_invalid_compat)
499#endif
500
501el1_sync_invalid:
502 inv_entry 1, BAD_SYNC
503ENDPROC(el1_sync_invalid)
504
505el1_irq_invalid:
506 inv_entry 1, BAD_IRQ
507ENDPROC(el1_irq_invalid)
508
509el1_fiq_invalid:
510 inv_entry 1, BAD_FIQ
511ENDPROC(el1_fiq_invalid)
512
513el1_error_invalid:
514 inv_entry 1, BAD_ERROR
515ENDPROC(el1_error_invalid)
516
517
518
519
520 .align 6
521el1_sync:
522 kernel_entry 1
523 mrs x1, esr_el1
524 lsr x24, x1,
525 cmp x24,
526 b.eq el1_da
527 cmp x24,
528 b.eq el1_ia
529 cmp x24,
530 b.eq el1_undef
531 cmp x24,
532 b.eq el1_sp_pc
533 cmp x24,
534 b.eq el1_sp_pc
535 cmp x24,
536 b.eq el1_undef
537 cmp x24,
538 b.ge el1_dbg
539 b el1_inv
540
541el1_ia:
542
543
544
545el1_da:
546
547
548
549 mrs x3, far_el1
550 inherit_daif pstate=x23, tmp=x2
551 clear_address_tag x0, x3
552 mov x2, sp
553 bl do_mem_abort
554
555 kernel_exit 1
556el1_sp_pc:
557
558
559
560 mrs x0, far_el1
561 inherit_daif pstate=x23, tmp=x2
562 mov x2, sp
563 bl do_sp_pc_abort
564 ASM_BUG()
565el1_undef:
566
567
568
569 inherit_daif pstate=x23, tmp=x2
570 mov x0, sp
571 bl do_undefinstr
572 ASM_BUG()
573el1_dbg:
574
575
576
577 cmp x24,
578 cinc x24, x24, eq
579 tbz x24,
580 mrs x0, far_el1
581 mov x2, sp
582 bl do_debug_exception
583 kernel_exit 1
584el1_inv:
585
586 inherit_daif pstate=x23, tmp=x2
587 mov x0, sp
588 mov x2, x1
589 mov x1,
590 bl bad_mode
591 ASM_BUG()
592ENDPROC(el1_sync)
593
594 .align 6
595el1_irq:
596 kernel_entry 1
597 enable_da_f
598#ifdef CONFIG_TRACE_IRQFLAGS
599 bl trace_hardirqs_off
600#endif
601
602 irq_handler
603
604#ifdef CONFIG_PREEMPT
605 ldr w24, [tsk,
606 cbnz w24, 1f
607 ldr x0, [tsk,
608 tbz x0,
609 bl el1_preempt
6101:
611#endif
612#ifdef CONFIG_TRACE_IRQFLAGS
613 bl trace_hardirqs_on
614#endif
615 kernel_exit 1
616ENDPROC(el1_irq)
617
618#ifdef CONFIG_PREEMPT
619el1_preempt:
620 mov x24, lr
6211: bl preempt_schedule_irq
622 ldr x0, [tsk,
623 tbnz x0,
624 ret x24
625#endif
626
627
628
629
630 .align 6
631el0_sync:
632 kernel_entry 0
633 mrs x25, esr_el1
634 lsr x24, x25,
635 cmp x24,
636 b.eq el0_svc
637 cmp x24,
638 b.eq el0_da
639 cmp x24,
640 b.eq el0_ia
641 cmp x24,
642 b.eq el0_fpsimd_acc
643 cmp x24,
644 b.eq el0_sve_acc
645 cmp x24,
646 b.eq el0_fpsimd_exc
647 cmp x24,
648 b.eq el0_sys
649 cmp x24,
650 b.eq el0_sp_pc
651 cmp x24,
652 b.eq el0_sp_pc
653 cmp x24,
654 b.eq el0_undef
655 cmp x24,
656 b.ge el0_dbg
657 b el0_inv
658
659#ifdef CONFIG_COMPAT
660 .align 6
661el0_sync_compat:
662 kernel_entry 0, 32
663 mrs x25, esr_el1
664 lsr x24, x25,
665 cmp x24,
666 b.eq el0_svc_compat
667 cmp x24,
668 b.eq el0_da
669 cmp x24,
670 b.eq el0_ia
671 cmp x24,
672 b.eq el0_fpsimd_acc
673 cmp x24,
674 b.eq el0_fpsimd_exc
675 cmp x24,
676 b.eq el0_sp_pc
677 cmp x24,
678 b.eq el0_undef
679 cmp x24,
680 b.eq el0_undef
681 cmp x24,
682 b.eq el0_undef
683 cmp x24,
684 b.eq el0_undef
685 cmp x24,
686 b.eq el0_undef
687 cmp x24,
688 b.eq el0_undef
689 cmp x24,
690 b.ge el0_dbg
691 b el0_inv
692el0_svc_compat:
693
694
695
696 ldr x16, [tsk,
697 adrp stbl, compat_sys_call_table
698 mov wscno, w7
699 mov wsc_nr,
700 b el0_svc_naked
701
702 .align 6
703el0_irq_compat:
704 kernel_entry 0, 32
705 b el0_irq_naked
706
707el0_error_compat:
708 kernel_entry 0, 32
709 b el0_error_naked
710#endif
711
712el0_da:
713
714
715
716 mrs x26, far_el1
717 enable_daif
718 ct_user_exit
719 clear_address_tag x0, x26
720 mov x1, x25
721 mov x2, sp
722 bl do_mem_abort
723 b ret_to_user
724el0_ia:
725
726
727
728 mrs x26, far_el1
729 enable_da_f
730#ifdef CONFIG_TRACE_IRQFLAGS
731 bl trace_hardirqs_off
732#endif
733 ct_user_exit
734 mov x0, x26
735 mov x1, x25
736 mov x2, sp
737 bl do_el0_ia_bp_hardening
738 b ret_to_user
739el0_fpsimd_acc:
740
741
742
743 enable_daif
744 ct_user_exit
745 mov x0, x25
746 mov x1, sp
747 bl do_fpsimd_acc
748 b ret_to_user
749el0_sve_acc:
750
751
752
753 enable_daif
754 ct_user_exit
755 mov x0, x25
756 mov x1, sp
757 bl do_sve_acc
758 b ret_to_user
759el0_fpsimd_exc:
760
761
762
763 enable_daif
764 ct_user_exit
765 mov x0, x25
766 mov x1, sp
767 bl do_fpsimd_exc
768 b ret_to_user
769el0_sp_pc:
770
771
772
773 mrs x26, far_el1
774 enable_da_f
775#ifdef CONFIG_TRACE_IRQFLAGS
776 bl trace_hardirqs_off
777#endif
778 ct_user_exit
779 mov x0, x26
780 mov x1, x25
781 mov x2, sp
782 bl do_sp_pc_abort
783 b ret_to_user
784el0_undef:
785
786
787
788 enable_daif
789 ct_user_exit
790 mov x0, sp
791 bl do_undefinstr
792 b ret_to_user
793el0_sys:
794
795
796
797 enable_daif
798 ct_user_exit
799 mov x0, x25
800 mov x1, sp
801 bl do_sysinstr
802 b ret_to_user
803el0_dbg:
804
805
806
807 tbnz x24,
808 mrs x0, far_el1
809 mov x1, x25
810 mov x2, sp
811 bl do_debug_exception
812 enable_daif
813 ct_user_exit
814 b ret_to_user
815el0_inv:
816 enable_daif
817 ct_user_exit
818 mov x0, sp
819 mov x1,
820 mov x2, x25
821 bl bad_el0_sync
822 b ret_to_user
823ENDPROC(el0_sync)
824
825 .align 6
826el0_irq:
827 kernel_entry 0
828el0_irq_naked:
829 enable_da_f
830#ifdef CONFIG_TRACE_IRQFLAGS
831 bl trace_hardirqs_off
832#endif
833
834 ct_user_exit
835#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
836 tbz x22,
837 bl do_el0_irq_bp_hardening
8381:
839#endif
840 irq_handler
841
842#ifdef CONFIG_TRACE_IRQFLAGS
843 bl trace_hardirqs_on
844#endif
845 b ret_to_user
846ENDPROC(el0_irq)
847
848el1_error:
849 kernel_entry 1
850 mrs x1, esr_el1
851 enable_dbg
852 mov x0, sp
853 bl do_serror
854 kernel_exit 1
855ENDPROC(el1_error)
856
857el0_error:
858 kernel_entry 0
859el0_error_naked:
860 mrs x1, esr_el1
861 enable_dbg
862 mov x0, sp
863 bl do_serror
864 enable_daif
865 ct_user_exit
866 b ret_to_user
867ENDPROC(el0_error)
868
869
870
871
872
873
874ret_fast_syscall:
875 disable_daif
876 str x0, [sp,
877 ldr x1, [tsk,
878 and x2, x1,
879 cbnz x2, ret_fast_syscall_trace
880 and x2, x1,
881 cbnz x2, work_pending
882 enable_step_tsk x1, x2
883 kernel_exit 0
884ret_fast_syscall_trace:
885 enable_daif
886 b __sys_trace_return_skipped
887
888
889
890
891work_pending:
892 mov x0, sp
893 bl do_notify_resume
894#ifdef CONFIG_TRACE_IRQFLAGS
895 bl trace_hardirqs_on
896#endif
897 ldr x1, [tsk,
898 b finish_ret_to_user
899
900
901
902ret_to_user:
903 disable_daif
904 ldr x1, [tsk,
905 and x2, x1,
906 cbnz x2, work_pending
907finish_ret_to_user:
908 enable_step_tsk x1, x2
909 kernel_exit 0
910ENDPROC(ret_to_user)
911
912
913
914
915 .align 6
916el0_svc:
917 ldr x16, [tsk,
918 adrp stbl, sys_call_table
919 mov wscno, w8
920 mov wsc_nr,
921
922#ifdef CONFIG_ARM64_SVE
923alternative_if_not ARM64_SVE
924 b el0_svc_naked
925alternative_else_nop_endif
926 tbz x16,
927 bic x16, x16,
928 str x16, [tsk,
929
930
931
932
933
934
935
936
937 mrs x9, cpacr_el1
938 bic x9, x9,
939 msr cpacr_el1, x9
940#endif
941
942el0_svc_naked:
943 stp x0, xscno, [sp,
944 enable_daif
945 ct_user_exit 1
946
947 tst x16,
948 b.ne __sys_trace
949 cmp wscno, wsc_nr
950 b.hs ni_sys
951 mask_nospec64 xscno, xsc_nr, x19
952 ldr x16, [stbl, xscno, lsl
953 blr x16
954 b ret_fast_syscall
955ni_sys:
956 mov x0, sp
957 bl do_ni_syscall
958 b ret_fast_syscall
959ENDPROC(el0_svc)
960
961
962
963
964
965__sys_trace:
966 cmp wscno,
967 b.ne 1f
968 mov x0,
969 str x0, [sp,
9701: mov x0, sp
971 bl syscall_trace_enter
972 cmp w0,
973 b.eq __sys_trace_return_skipped
974 mov wscno, w0
975 mov x1, sp
976 cmp wscno, wsc_nr
977 b.hs __ni_sys_trace
978 ldp x0, x1, [sp]
979 ldp x2, x3, [sp,
980 ldp x4, x5, [sp,
981 ldp x6, x7, [sp,
982 ldr x16, [stbl, xscno, lsl
983 blr x16
984
985__sys_trace_return:
986 str x0, [sp,
987__sys_trace_return_skipped:
988 mov x0, sp
989 bl syscall_trace_exit
990 b ret_to_user
991
992__ni_sys_trace:
993 mov x0, sp
994 bl do_ni_syscall
995 b __sys_trace_return
996
997 .popsection
998
999#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
1000
1001
1002
1003 .pushsection ".entry.tramp.text", "ax"
1004
1005 .macro tramp_map_kernel, tmp
1006 mrs \tmp, ttbr1_el1
1007 add \tmp, \tmp,
1008 bic \tmp, \tmp,
1009 msr ttbr1_el1, \tmp
1010#ifdef CONFIG_QCOM_FALKOR_ERRATUM_1003
1011alternative_if ARM64_WORKAROUND_QCOM_FALKOR_E1003
1012
1013 movk \tmp,
1014 movk \tmp,
1015
1016 movk \tmp,
1017 isb
1018 tlbi vae1, \tmp
1019 dsb nsh
1020alternative_else_nop_endif
1021#endif
1022 .endm
1023
1024 .macro tramp_unmap_kernel, tmp
1025 mrs \tmp, ttbr1_el1
1026 sub \tmp, \tmp,
1027 orr \tmp, \tmp,
1028 msr ttbr1_el1, \tmp
1029
1030
1031
1032
1033
1034 .endm
1035
1036 .macro tramp_ventry, regsize = 64
1037 .align 7
10381:
1039 .if \regsize == 64
1040 msr tpidrro_el0, x30
1041 .endif
1042
1043
1044
1045
1046
1047 bl 2f
1048 b .
10492:
1050 tramp_map_kernel x30
1051#ifdef CONFIG_RANDOMIZE_BASE
1052 adr x30, tramp_vectors + PAGE_SIZE
1053alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003
1054 ldr x30, [x30]
1055#else
1056 ldr x30, =vectors
1057#endif
1058 prfm plil1strm, [x30,
1059 msr vbar_el1, x30
1060 add x30, x30,
1061 isb
1062 ret
1063 .endm
1064
1065 .macro tramp_exit, regsize = 64
1066 adr x30, tramp_vectors
1067 msr vbar_el1, x30
1068 tramp_unmap_kernel x30
1069 .if \regsize == 64
1070 mrs x30, far_el1
1071 .endif
1072 eret
1073 .endm
1074
1075 .align 11
1076ENTRY(tramp_vectors)
1077 .space 0x400
1078
1079 tramp_ventry
1080 tramp_ventry
1081 tramp_ventry
1082 tramp_ventry
1083
1084 tramp_ventry 32
1085 tramp_ventry 32
1086 tramp_ventry 32
1087 tramp_ventry 32
1088END(tramp_vectors)
1089
1090ENTRY(tramp_exit_native)
1091 tramp_exit
1092END(tramp_exit_native)
1093
1094ENTRY(tramp_exit_compat)
1095 tramp_exit 32
1096END(tramp_exit_compat)
1097
1098 .ltorg
1099 .popsection
1100#ifdef CONFIG_RANDOMIZE_BASE
1101 .pushsection ".rodata", "a"
1102 .align PAGE_SHIFT
1103 .globl __entry_tramp_data_start
1104__entry_tramp_data_start:
1105 .quad vectors
1106 .popsection
1107#endif
1108#endif
1109
1110
1111
1112
1113ENTRY(sys_rt_sigreturn_wrapper)
1114 mov x0, sp
1115 b sys_rt_sigreturn
1116ENDPROC(sys_rt_sigreturn_wrapper)
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126ENTRY(cpu_switch_to)
1127 mov x10,
1128 add x8, x0, x10
1129 mov x9, sp
1130 stp x19, x20, [x8],
1131 stp x21, x22, [x8],
1132 stp x23, x24, [x8],
1133 stp x25, x26, [x8],
1134 stp x27, x28, [x8],
1135 stp x29, x9, [x8],
1136 str lr, [x8]
1137 add x8, x1, x10
1138 ldp x19, x20, [x8],
1139 ldp x21, x22, [x8],
1140 ldp x23, x24, [x8],
1141 ldp x25, x26, [x8],
1142 ldp x27, x28, [x8],
1143 ldp x29, x9, [x8],
1144 ldr lr, [x8]
1145 mov sp, x9
1146 msr sp_el0, x1
1147 ret
1148ENDPROC(cpu_switch_to)
1149NOKPROBE(cpu_switch_to)
1150
1151
1152
1153
1154ENTRY(ret_from_fork)
1155 bl schedule_tail
1156 cbz x19, 1f
1157 mov x0, x20
1158 blr x19
11591: get_thread_info tsk
1160 b ret_to_user
1161ENDPROC(ret_from_fork)
1162NOKPROBE(ret_from_fork)
1163
1164#ifdef CONFIG_ARM_SDE_INTERFACE
1165
1166#include <asm/sdei.h>
1167#include <uapi/linux/arm_sdei.h>
1168
1169.macro sdei_handler_exit exit_mode
1170
1171 cmp \exit_mode,
1172 b.ne 99f
1173 smc
1174 b .
117599: hvc
1176 b .
1177.endm
1178
1179#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
1180
1181
1182
1183
1184
1185
1186
1187
1188.ltorg
1189.pushsection ".entry.tramp.text", "ax"
1190ENTRY(__sdei_asm_entry_trampoline)
1191 mrs x4, ttbr1_el1
1192 tbz x4,
1193
1194 tramp_map_kernel tmp=x4
1195 isb
1196 mov x4, xzr
1197
1198
1199
1200
1201
12021: str x4, [x1,
1203
1204#ifdef CONFIG_RANDOMIZE_BASE
1205 adr x4, tramp_vectors + PAGE_SIZE
1206 add x4, x4,
1207 ldr x4, [x4]
1208#else
1209 ldr x4, =__sdei_asm_handler
1210#endif
1211 br x4
1212ENDPROC(__sdei_asm_entry_trampoline)
1213NOKPROBE(__sdei_asm_entry_trampoline)
1214
1215
1216
1217
1218
1219
1220
1221
1222ENTRY(__sdei_asm_exit_trampoline)
1223 ldr x4, [x4,
1224 cbnz x4, 1f
1225
1226 tramp_unmap_kernel tmp=x4
1227
12281: sdei_handler_exit exit_mode=x2
1229ENDPROC(__sdei_asm_exit_trampoline)
1230NOKPROBE(__sdei_asm_exit_trampoline)
1231 .ltorg
1232.popsection
1233#ifdef CONFIG_RANDOMIZE_BASE
1234.pushsection ".rodata", "a"
1235__sdei_asm_trampoline_next_handler:
1236 .quad __sdei_asm_handler
1237.popsection
1238#endif
1239#endif
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254ENTRY(__sdei_asm_handler)
1255 stp x2, x3, [x1,
1256 stp x4, x5, [x1,
1257 stp x6, x7, [x1,
1258 stp x8, x9, [x1,
1259 stp x10, x11, [x1,
1260 stp x12, x13, [x1,
1261 stp x14, x15, [x1,
1262 stp x16, x17, [x1,
1263 stp x18, x19, [x1,
1264 stp x20, x21, [x1,
1265 stp x22, x23, [x1,
1266 stp x24, x25, [x1,
1267 stp x26, x27, [x1,
1268 stp x28, x29, [x1,
1269 mov x4, sp
1270 stp lr, x4, [x1,
1271
1272 mov x19, x1
1273
1274#ifdef CONFIG_VMAP_STACK
1275
1276
1277
1278
1279
1280 ldrb w4, [x19,
1281 cbnz w4, 1f
1282 ldr_this_cpu dst=x5, sym=sdei_stack_normal_ptr, tmp=x6
1283 b 2f
12841: ldr_this_cpu dst=x5, sym=sdei_stack_critical_ptr, tmp=x6
12852: mov x6,
1286 add x5, x5, x6
1287 mov sp, x5
1288#endif
1289
1290
1291
1292
1293
1294 mrs x28, sp_el0
1295 ldr_this_cpu dst=x0, sym=__entry_task, tmp=x1
1296 msr sp_el0, x0
1297
1298
1299 and x0, x3,
1300 mrs x1, CurrentEL
1301 cmp x0, x1
1302 csel x29, x29, xzr, eq
1303 csel x4, x2, xzr, eq
1304
1305 stp x29, x4, [sp,
1306 mov x29, sp
1307
1308 add x0, x19,
1309 mov x1, x19
1310 bl __sdei_handler
1311
1312 msr sp_el0, x28
1313
1314 mov x4, x19
1315 ldp x28, x29, [x4,
1316 ldp x18, x19, [x4,
1317 ldp lr, x1, [x4,
1318 mov sp, x1
1319
1320 mov x1, x0
1321
1322 cmp x0,
1323 mov_q x2, SDEI_1_0_FN_SDEI_EVENT_COMPLETE
1324 mov_q x3, SDEI_1_0_FN_SDEI_EVENT_COMPLETE_AND_RESUME
1325 csel x0, x2, x3, ls
1326
1327 ldr_l x2, sdei_exit_mode
1328
1329alternative_if_not ARM64_UNMAP_KERNEL_AT_EL0
1330 sdei_handler_exit exit_mode=x2
1331alternative_else_nop_endif
1332
1333#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
1334 tramp_alias dst=x5, sym=__sdei_asm_exit_trampoline
1335 br x5
1336#endif
1337ENDPROC(__sdei_asm_handler)
1338NOKPROBE(__sdei_asm_handler)
1339#endif
1340