1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include <linux/arm-smccc.h>
22#include <linux/init.h>
23#include <linux/linkage.h>
24
25#include <asm/alternative.h>
26#include <asm/assembler.h>
27#include <asm/asm-offsets.h>
28#include <asm/cpufeature.h>
29#include <asm/errno.h>
30#include <asm/esr.h>
31#include <asm/irq.h>
32#include <asm/memory.h>
33#include <asm/mmu.h>
34#include <asm/processor.h>
35#include <asm/ptrace.h>
36#include <asm/thread_info.h>
37#include <asm/asm-uaccess.h>
38#include <asm/unistd.h>
39
40
41
42
43
44 .macro ct_user_exit, syscall = 0
45#ifdef CONFIG_CONTEXT_TRACKING
46 bl context_tracking_user_exit
47 .if \syscall == 1
48
49
50
51
52 ldp x0, x1, [sp]
53 ldp x2, x3, [sp,
54 ldp x4, x5, [sp,
55 ldp x6, x7, [sp,
56 .endif
57#endif
58 .endm
59
60 .macro ct_user_enter
61#ifdef CONFIG_CONTEXT_TRACKING
62 bl context_tracking_user_enter
63#endif
64 .endm
65
66
67
68
69
70#define BAD_SYNC 0
71#define BAD_IRQ 1
72#define BAD_FIQ 2
73#define BAD_ERROR 3
74
75 .macro kernel_ventry, el, label, regsize = 64
76 .align 7
77#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
78alternative_if ARM64_UNMAP_KERNEL_AT_EL0
79 .if \el == 0
80 .if \regsize == 64
81 mrs x30, tpidrro_el0
82 msr tpidrro_el0, xzr
83 .else
84 mov x30, xzr
85 .endif
86 .endif
87alternative_else_nop_endif
88#endif
89
90 sub sp, sp,
91#ifdef CONFIG_VMAP_STACK
92
93
94
95
96 add sp, sp, x0
97 sub x0, sp, x0
98 tbnz x0,
99 sub x0, sp, x0
100 sub sp, sp, x0
101 b el\()\el\()_\label
102
1030:
104
105
106
107
108
109
110
111 msr tpidr_el0, x0
112
113
114 sub x0, sp, x0
115 msr tpidrro_el0, x0
116
117
118 adr_this_cpu sp, overflow_stack + OVERFLOW_STACK_SIZE, x0
119
120
121
122
123
124 mrs x0, tpidr_el0
125 sub x0, sp, x0
126 tst x0,
127 b.ne __bad_stack
128
129
130 sub sp, sp, x0
131 mrs x0, tpidrro_el0
132#endif
133 b el\()\el\()_\label
134 .endm
135
136 .macro tramp_alias, dst, sym
137 mov_q \dst, TRAMP_VALIAS
138 add \dst, \dst,
139 .endm
140
141
142
143 .macro apply_ssbd, state, targ, tmp1, tmp2
144#ifdef CONFIG_ARM64_SSBD
145alternative_cb arm64_enable_wa2_handling
146 b \targ
147alternative_cb_end
148 ldr_this_cpu \tmp2, arm64_ssbd_callback_required, \tmp1
149 cbz \tmp2, \targ
150 ldr \tmp2, [tsk,
151 tbnz \tmp2,
152 mov w0,
153 mov w1,
154alternative_cb arm64_update_smccc_conduit
155 nop
156alternative_cb_end
157#endif
158 .endm
159
160 .macro kernel_entry, el, regsize = 64
161 .if \regsize == 32
162 mov w0, w0
163 .endif
164 stp x0, x1, [sp,
165 stp x2, x3, [sp,
166 stp x4, x5, [sp,
167 stp x6, x7, [sp,
168 stp x8, x9, [sp,
169 stp x10, x11, [sp,
170 stp x12, x13, [sp,
171 stp x14, x15, [sp,
172 stp x16, x17, [sp,
173 stp x18, x19, [sp,
174 stp x20, x21, [sp,
175 stp x22, x23, [sp,
176 stp x24, x25, [sp,
177 stp x26, x27, [sp,
178 stp x28, x29, [sp,
179
180 .if \el == 0
181 mrs x21, sp_el0
182 ldr_this_cpu tsk, __entry_task, x20
183 ldr x19, [tsk,
184 disable_step_tsk x19, x20
185
186 apply_ssbd 1, 1f, x22, x23
187
188#ifdef CONFIG_ARM64_SSBD
189 ldp x0, x1, [sp,
190 ldp x2, x3, [sp,
191#endif
1921:
193
194 mov x29, xzr
195 .else
196 add x21, sp,
197 get_thread_info tsk
198
199 ldr x20, [tsk,
200 str x20, [sp,
201 mov x20,
202 str x20, [tsk,
203
204 .endif
205 mrs x22, elr_el1
206 mrs x23, spsr_el1
207 stp lr, x21, [sp,
208
209
210
211
212
213
214 .if \el == 0
215 stp xzr, xzr, [sp,
216 .else
217 stp x29, x22, [sp,
218 .endif
219 add x29, sp,
220
221#ifdef CONFIG_ARM64_SW_TTBR0_PAN
222
223
224
225
226
227
228
229
230alternative_if ARM64_HAS_PAN
231 b 1f
232alternative_else_nop_endif
233
234 .if \el != 0
235 mrs x21, ttbr0_el1
236 tst x21,
237 orr x23, x23,
238 b.eq 1f
239 and x23, x23,
240 .endif
241
242 __uaccess_ttbr0_disable x21
2431:
244#endif
245
246 stp x22, x23, [sp,
247
248
249 .if \el == 0
250 mov w21,
251 str w21, [sp,
252 .endif
253
254
255
256
257 .if \el == 0
258 msr sp_el0, tsk
259 .endif
260
261
262alternative_if ARM64_HAS_IRQ_PRIO_MASKING
263 mrs_s x20, SYS_ICC_PMR_EL1
264 str x20, [sp,
265alternative_else_nop_endif
266
267
268
269
270
271
272
273
274
275 .endm
276
277 .macro kernel_exit, el
278 .if \el != 0
279 disable_daif
280
281
282 ldr x20, [sp,
283 str x20, [tsk,
284
285
286 .endif
287
288
289alternative_if ARM64_HAS_IRQ_PRIO_MASKING
290 ldr x20, [sp,
291 msr_s SYS_ICC_PMR_EL1, x20
292 mrs_s x21, SYS_ICC_CTLR_EL1
293 tbz x21,
294 dsb sy
295.L__skip_pmr_sync\@:
296alternative_else_nop_endif
297
298 ldp x21, x22, [sp,
299 .if \el == 0
300 ct_user_enter
301 .endif
302
303#ifdef CONFIG_ARM64_SW_TTBR0_PAN
304
305
306
307
308alternative_if ARM64_HAS_PAN
309 b 2f
310alternative_else_nop_endif
311
312 .if \el != 0
313 tbnz x22,
314 .endif
315
316 __uaccess_ttbr0_enable x0, x1
317
318 .if \el == 0
319
320
321
322
323
324
325 bl post_ttbr_update_workaround
326 .endif
3271:
328 .if \el != 0
329 and x22, x22,
330 .endif
3312:
332#endif
333
334 .if \el == 0
335 ldr x23, [sp,
336 msr sp_el0, x23
337 tst x22,
338 b.eq 3f
339
340#ifdef CONFIG_ARM64_ERRATUM_845719
341alternative_if ARM64_WORKAROUND_845719
342#ifdef CONFIG_PID_IN_CONTEXTIDR
343 mrs x29, contextidr_el1
344 msr contextidr_el1, x29
345#else
346 msr contextidr_el1, xzr
347#endif
348alternative_else_nop_endif
349#endif
3503:
351#ifdef CONFIG_ARM64_ERRATUM_1418040
352alternative_if_not ARM64_WORKAROUND_1418040
353 b 4f
354alternative_else_nop_endif
355
356
357
358
359 mrs x1, cntkctl_el1
360 eon x0, x1, x22, lsr
361 tbz x0,
362 eor x1, x1,
363 msr cntkctl_el1, x1
3644:
365#endif
366 apply_ssbd 0, 5f, x0, x1
3675:
368 .endif
369
370 msr elr_el1, x21
371 msr spsr_el1, x22
372 ldp x0, x1, [sp,
373 ldp x2, x3, [sp,
374 ldp x4, x5, [sp,
375 ldp x6, x7, [sp,
376 ldp x8, x9, [sp,
377 ldp x10, x11, [sp,
378 ldp x12, x13, [sp,
379 ldp x14, x15, [sp,
380 ldp x16, x17, [sp,
381 ldp x18, x19, [sp,
382 ldp x20, x21, [sp,
383 ldp x22, x23, [sp,
384 ldp x24, x25, [sp,
385 ldp x26, x27, [sp,
386 ldp x28, x29, [sp,
387 ldr lr, [sp,
388 add sp, sp,
389
390 .if \el == 0
391alternative_insn eret, nop, ARM64_UNMAP_KERNEL_AT_EL0
392#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
393 bne 5f
394 msr far_el1, x30
395 tramp_alias x30, tramp_exit_native
396 br x30
3975:
398 tramp_alias x30, tramp_exit_compat
399 br x30
400#endif
401 .else
402 eret
403 .endif
404 sb
405 .endm
406
407 .macro irq_stack_entry
408 mov x19, sp
409
410
411
412
413
414
415 ldr x25, [tsk, TSK_STACK]
416 eor x25, x25, x19
417 and x25, x25,
418 cbnz x25, 9998f
419
420 ldr_this_cpu x25, irq_stack_ptr, x26
421 mov x26,
422 add x26, x25, x26
423
424
425 mov sp, x26
4269998:
427 .endm
428
429
430
431
432
433 .macro irq_stack_exit
434 mov sp, x19
435 .endm
436
437
438tsk .req x28
439
440
441
442
443 .macro irq_handler
444 ldr_l x1, handle_arch_irq
445 mov x0, sp
446 irq_stack_entry
447 blr x1
448 irq_stack_exit
449 .endm
450
451#ifdef CONFIG_ARM64_PSEUDO_NMI
452
453
454
455
456 .macro test_irqs_unmasked res:req, pmr:req
457alternative_if ARM64_HAS_IRQ_PRIO_MASKING
458 sub \res, \pmr,
459alternative_else
460 mov \res, xzr
461alternative_endif
462 .endm
463#endif
464
465 .macro gic_prio_kentry_setup, tmp:req
466#ifdef CONFIG_ARM64_PSEUDO_NMI
467 alternative_if ARM64_HAS_IRQ_PRIO_MASKING
468 mov \tmp,
469 msr_s SYS_ICC_PMR_EL1, \tmp
470 alternative_else_nop_endif
471#endif
472 .endm
473
474 .macro gic_prio_irq_setup, pmr:req, tmp:req
475#ifdef CONFIG_ARM64_PSEUDO_NMI
476 alternative_if ARM64_HAS_IRQ_PRIO_MASKING
477 orr \tmp, \pmr,
478 msr_s SYS_ICC_PMR_EL1, \tmp
479 alternative_else_nop_endif
480#endif
481 .endm
482
483 .text
484
485
486
487
488 .pushsection ".entry.text", "ax"
489
490 .align 11
491ENTRY(vectors)
492 kernel_ventry 1, sync_invalid
493 kernel_ventry 1, irq_invalid
494 kernel_ventry 1, fiq_invalid
495 kernel_ventry 1, error_invalid
496
497 kernel_ventry 1, sync
498 kernel_ventry 1, irq
499 kernel_ventry 1, fiq_invalid
500 kernel_ventry 1, error
501
502 kernel_ventry 0, sync
503 kernel_ventry 0, irq
504 kernel_ventry 0, fiq_invalid
505 kernel_ventry 0, error
506
507#ifdef CONFIG_COMPAT
508 kernel_ventry 0, sync_compat, 32
509 kernel_ventry 0, irq_compat, 32
510 kernel_ventry 0, fiq_invalid_compat, 32
511 kernel_ventry 0, error_compat, 32
512#else
513 kernel_ventry 0, sync_invalid, 32
514 kernel_ventry 0, irq_invalid, 32
515 kernel_ventry 0, fiq_invalid, 32
516 kernel_ventry 0, error_invalid, 32
517#endif
518END(vectors)
519
520#ifdef CONFIG_VMAP_STACK
521
522
523
524
525
526__bad_stack:
527
528 mrs x0, tpidrro_el0
529
530
531
532
533
534 sub sp, sp,
535 kernel_entry 1
536 mrs x0, tpidr_el0
537 add x0, x0,
538 str x0, [sp,
539
540
541 mov x0, sp
542
543
544 bl handle_bad_stack
545 ASM_BUG()
546#endif
547
548
549
550
551 .macro inv_entry, el, reason, regsize = 64
552 kernel_entry \el, \regsize
553 mov x0, sp
554 mov x1,
555 mrs x2, esr_el1
556 bl bad_mode
557 ASM_BUG()
558 .endm
559
560el0_sync_invalid:
561 inv_entry 0, BAD_SYNC
562ENDPROC(el0_sync_invalid)
563
564el0_irq_invalid:
565 inv_entry 0, BAD_IRQ
566ENDPROC(el0_irq_invalid)
567
568el0_fiq_invalid:
569 inv_entry 0, BAD_FIQ
570ENDPROC(el0_fiq_invalid)
571
572el0_error_invalid:
573 inv_entry 0, BAD_ERROR
574ENDPROC(el0_error_invalid)
575
576#ifdef CONFIG_COMPAT
577el0_fiq_invalid_compat:
578 inv_entry 0, BAD_FIQ, 32
579ENDPROC(el0_fiq_invalid_compat)
580#endif
581
582el1_sync_invalid:
583 inv_entry 1, BAD_SYNC
584ENDPROC(el1_sync_invalid)
585
586el1_irq_invalid:
587 inv_entry 1, BAD_IRQ
588ENDPROC(el1_irq_invalid)
589
590el1_fiq_invalid:
591 inv_entry 1, BAD_FIQ
592ENDPROC(el1_fiq_invalid)
593
594el1_error_invalid:
595 inv_entry 1, BAD_ERROR
596ENDPROC(el1_error_invalid)
597
598
599
600
601 .align 6
602el1_sync:
603 kernel_entry 1
604 mrs x1, esr_el1
605 lsr x24, x1,
606 cmp x24,
607 b.eq el1_da
608 cmp x24,
609 b.eq el1_ia
610 cmp x24,
611 b.eq el1_undef
612 cmp x24,
613 b.eq el1_sp_pc
614 cmp x24,
615 b.eq el1_sp_pc
616 cmp x24,
617 b.eq el1_undef
618 cmp x24,
619 b.ge el1_dbg
620 b el1_inv
621
622el1_ia:
623
624
625
626el1_da:
627
628
629
630 mrs x3, far_el1
631 inherit_daif pstate=x23, tmp=x2
632 clear_address_tag x0, x3
633 mov x2, sp
634 bl do_mem_abort
635
636 kernel_exit 1
637el1_sp_pc:
638
639
640
641 mrs x0, far_el1
642 inherit_daif pstate=x23, tmp=x2
643 mov x2, sp
644 bl do_sp_pc_abort
645 ASM_BUG()
646el1_undef:
647
648
649
650 inherit_daif pstate=x23, tmp=x2
651 mov x0, sp
652 bl do_undefinstr
653 kernel_exit 1
654el1_dbg:
655
656
657
658 cmp x24,
659 cinc x24, x24, eq
660 tbz x24,
661 gic_prio_kentry_setup tmp=x3
662 mrs x0, far_el1
663 mov x2, sp
664 bl do_debug_exception
665 kernel_exit 1
666el1_inv:
667
668 inherit_daif pstate=x23, tmp=x2
669 mov x0, sp
670 mov x2, x1
671 mov x1,
672 bl bad_mode
673 ASM_BUG()
674ENDPROC(el1_sync)
675
676 .align 6
677el1_irq:
678 kernel_entry 1
679 gic_prio_irq_setup pmr=x20, tmp=x1
680 enable_da_f
681
682#ifdef CONFIG_ARM64_PSEUDO_NMI
683 test_irqs_unmasked res=x0, pmr=x20
684 cbz x0, 1f
685 bl asm_nmi_enter
6861:
687#endif
688
689#ifdef CONFIG_TRACE_IRQFLAGS
690 bl trace_hardirqs_off
691#endif
692
693 irq_handler
694
695#ifdef CONFIG_PREEMPT
696 ldr x24, [tsk,
697alternative_if ARM64_HAS_IRQ_PRIO_MASKING
698
699
700
701
702 mrs x0, daif
703 orr x24, x24, x0
704alternative_else_nop_endif
705 cbnz x24, 1f
706 bl preempt_schedule_irq
7071:
708#endif
709
710#ifdef CONFIG_ARM64_PSEUDO_NMI
711
712
713
714
715
716 test_irqs_unmasked res=x0, pmr=x20
717 cbz x0, 1f
718 bl asm_nmi_exit
7191:
720#endif
721
722#ifdef CONFIG_TRACE_IRQFLAGS
723#ifdef CONFIG_ARM64_PSEUDO_NMI
724 test_irqs_unmasked res=x0, pmr=x20
725 cbnz x0, 1f
726#endif
727 bl trace_hardirqs_on
7281:
729#endif
730
731 kernel_exit 1
732ENDPROC(el1_irq)
733
734
735
736
737 .align 6
738el0_sync:
739 kernel_entry 0
740 mrs x25, esr_el1
741 lsr x24, x25,
742 cmp x24,
743 b.eq el0_svc
744 cmp x24,
745 b.eq el0_da
746 cmp x24,
747 b.eq el0_ia
748 cmp x24,
749 b.eq el0_fpsimd_acc
750 cmp x24,
751 b.eq el0_sve_acc
752 cmp x24,
753 b.eq el0_fpsimd_exc
754 cmp x24,
755 b.eq el0_sys
756 cmp x24,
757 b.eq el0_sp_pc
758 cmp x24,
759 b.eq el0_sp_pc
760 cmp x24,
761 b.eq el0_undef
762 cmp x24,
763 b.ge el0_dbg
764 b el0_inv
765
766#ifdef CONFIG_COMPAT
767 .align 6
768el0_sync_compat:
769 kernel_entry 0, 32
770 mrs x25, esr_el1
771 lsr x24, x25,
772 cmp x24,
773 b.eq el0_svc_compat
774 cmp x24,
775 b.eq el0_da
776 cmp x24,
777 b.eq el0_ia
778 cmp x24,
779 b.eq el0_fpsimd_acc
780 cmp x24,
781 b.eq el0_fpsimd_exc
782 cmp x24,
783 b.eq el0_sp_pc
784 cmp x24,
785 b.eq el0_undef
786 cmp x24,
787 b.eq el0_undef
788 cmp x24,
789 b.eq el0_undef
790 cmp x24,
791 b.eq el0_undef
792 cmp x24,
793 b.eq el0_undef
794 cmp x24,
795 b.eq el0_undef
796 cmp x24,
797 b.ge el0_dbg
798 b el0_inv
799el0_svc_compat:
800 gic_prio_kentry_setup tmp=x1
801 mov x0, sp
802 bl el0_svc_compat_handler
803 b ret_to_user
804
805 .align 6
806el0_irq_compat:
807 kernel_entry 0, 32
808 b el0_irq_naked
809
810el0_error_compat:
811 kernel_entry 0, 32
812 b el0_error_naked
813#endif
814
815el0_da:
816
817
818
819 mrs x26, far_el1
820 enable_daif
821 ct_user_exit
822 clear_address_tag x0, x26
823 mov x1, x25
824 mov x2, sp
825 bl do_mem_abort
826 b ret_to_user
827el0_ia:
828
829
830
831 mrs x26, far_el1
832 gic_prio_kentry_setup tmp=x0
833 enable_da_f
834#ifdef CONFIG_TRACE_IRQFLAGS
835 bl trace_hardirqs_off
836#endif
837 ct_user_exit
838 mov x0, x26
839 mov x1, x25
840 mov x2, sp
841 bl do_el0_ia_bp_hardening
842 b ret_to_user
843el0_fpsimd_acc:
844
845
846
847 enable_daif
848 ct_user_exit
849 mov x0, x25
850 mov x1, sp
851 bl do_fpsimd_acc
852 b ret_to_user
853el0_sve_acc:
854
855
856
857 enable_daif
858 ct_user_exit
859 mov x0, x25
860 mov x1, sp
861 bl do_sve_acc
862 b ret_to_user
863el0_fpsimd_exc:
864
865
866
867 enable_daif
868 ct_user_exit
869 mov x0, x25
870 mov x1, sp
871 bl do_fpsimd_exc
872 b ret_to_user
873el0_sp_pc:
874
875
876
877 mrs x26, far_el1
878 gic_prio_kentry_setup tmp=x0
879 enable_da_f
880#ifdef CONFIG_TRACE_IRQFLAGS
881 bl trace_hardirqs_off
882#endif
883 ct_user_exit
884 mov x0, x26
885 mov x1, x25
886 mov x2, sp
887 bl do_sp_pc_abort
888 b ret_to_user
889el0_undef:
890
891
892
893 enable_daif
894 ct_user_exit
895 mov x0, sp
896 bl do_undefinstr
897 b ret_to_user
898el0_sys:
899
900
901
902 enable_daif
903 ct_user_exit
904 mov x0, x25
905 mov x1, sp
906 bl do_sysinstr
907 b ret_to_user
908el0_dbg:
909
910
911
912 tbnz x24,
913 gic_prio_kentry_setup tmp=x3
914 mrs x0, far_el1
915 mov x1, x25
916 mov x2, sp
917 bl do_debug_exception
918 enable_da_f
919 ct_user_exit
920 b ret_to_user
921el0_inv:
922 enable_daif
923 ct_user_exit
924 mov x0, sp
925 mov x1,
926 mov x2, x25
927 bl bad_el0_sync
928 b ret_to_user
929ENDPROC(el0_sync)
930
931 .align 6
932el0_irq:
933 kernel_entry 0
934el0_irq_naked:
935 gic_prio_irq_setup pmr=x20, tmp=x0
936 enable_da_f
937
938#ifdef CONFIG_TRACE_IRQFLAGS
939 bl trace_hardirqs_off
940#endif
941
942 ct_user_exit
943#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
944 tbz x22,
945 bl do_el0_irq_bp_hardening
9461:
947#endif
948 irq_handler
949
950#ifdef CONFIG_TRACE_IRQFLAGS
951 bl trace_hardirqs_on
952#endif
953 b ret_to_user
954ENDPROC(el0_irq)
955
956el1_error:
957 kernel_entry 1
958 mrs x1, esr_el1
959 gic_prio_kentry_setup tmp=x2
960 enable_dbg
961 mov x0, sp
962 bl do_serror
963 kernel_exit 1
964ENDPROC(el1_error)
965
966el0_error:
967 kernel_entry 0
968el0_error_naked:
969 mrs x1, esr_el1
970 gic_prio_kentry_setup tmp=x2
971 enable_dbg
972 mov x0, sp
973 bl do_serror
974 enable_da_f
975 ct_user_exit
976 b ret_to_user
977ENDPROC(el0_error)
978
979
980
981
982work_pending:
983 mov x0, sp
984 bl do_notify_resume
985#ifdef CONFIG_TRACE_IRQFLAGS
986 bl trace_hardirqs_on
987#endif
988 ldr x1, [tsk,
989 b finish_ret_to_user
990
991
992
993ret_to_user:
994 disable_daif
995 gic_prio_kentry_setup tmp=x3
996 ldr x1, [tsk,
997 and x2, x1,
998 cbnz x2, work_pending
999finish_ret_to_user:
1000 enable_step_tsk x1, x2
1001 kernel_exit 0
1002ENDPROC(ret_to_user)
1003
1004
1005
1006
1007 .align 6
1008el0_svc:
1009 gic_prio_kentry_setup tmp=x1
1010 mov x0, sp
1011 bl el0_svc_handler
1012 b ret_to_user
1013ENDPROC(el0_svc)
1014
1015 .popsection
1016
1017#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
1018
1019
1020
1021 .pushsection ".entry.tramp.text", "ax"
1022
1023 .macro tramp_map_kernel, tmp
1024 mrs \tmp, ttbr1_el1
1025 add \tmp, \tmp,
1026 bic \tmp, \tmp,
1027 msr ttbr1_el1, \tmp
1028#ifdef CONFIG_QCOM_FALKOR_ERRATUM_1003
1029alternative_if ARM64_WORKAROUND_QCOM_FALKOR_E1003
1030
1031 movk \tmp,
1032 movk \tmp,
1033
1034 movk \tmp,
1035 isb
1036 tlbi vae1, \tmp
1037 dsb nsh
1038alternative_else_nop_endif
1039#endif
1040 .endm
1041
1042 .macro tramp_unmap_kernel, tmp
1043 mrs \tmp, ttbr1_el1
1044 sub \tmp, \tmp,
1045 orr \tmp, \tmp,
1046 msr ttbr1_el1, \tmp
1047
1048
1049
1050
1051
1052 .endm
1053
1054 .macro tramp_ventry, regsize = 64
1055 .align 7
10561:
1057 .if \regsize == 64
1058 msr tpidrro_el0, x30
1059 .endif
1060
1061
1062
1063
1064
1065 bl 2f
1066 b .
10672:
1068 tramp_map_kernel x30
1069#ifdef CONFIG_RANDOMIZE_BASE
1070 adr x30, tramp_vectors + PAGE_SIZE
1071alternative_insn isb, nop, ARM64_WORKAROUND_QCOM_FALKOR_E1003
1072 ldr x30, [x30]
1073#else
1074 ldr x30, =vectors
1075#endif
1076alternative_if_not ARM64_WORKAROUND_CAVIUM_TX2_219_PRFM
1077 prfm plil1strm, [x30,
1078alternative_else_nop_endif
1079 msr vbar_el1, x30
1080 add x30, x30,
1081 isb
1082 ret
1083 .endm
1084
1085 .macro tramp_exit, regsize = 64
1086 adr x30, tramp_vectors
1087 msr vbar_el1, x30
1088 tramp_unmap_kernel x30
1089 .if \regsize == 64
1090 mrs x30, far_el1
1091 .endif
1092 eret
1093 sb
1094 .endm
1095
1096 .align 11
1097ENTRY(tramp_vectors)
1098 .space 0x400
1099
1100 tramp_ventry
1101 tramp_ventry
1102 tramp_ventry
1103 tramp_ventry
1104
1105 tramp_ventry 32
1106 tramp_ventry 32
1107 tramp_ventry 32
1108 tramp_ventry 32
1109END(tramp_vectors)
1110
1111ENTRY(tramp_exit_native)
1112 tramp_exit
1113END(tramp_exit_native)
1114
1115ENTRY(tramp_exit_compat)
1116 tramp_exit 32
1117END(tramp_exit_compat)
1118
1119 .ltorg
1120 .popsection
1121#ifdef CONFIG_RANDOMIZE_BASE
1122 .pushsection ".rodata", "a"
1123 .align PAGE_SHIFT
1124 .globl __entry_tramp_data_start
1125__entry_tramp_data_start:
1126 .quad vectors
1127 .popsection
1128#endif
1129#endif
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139ENTRY(cpu_switch_to)
1140 mov x10,
1141 add x8, x0, x10
1142 mov x9, sp
1143 stp x19, x20, [x8],
1144 stp x21, x22, [x8],
1145 stp x23, x24, [x8],
1146 stp x25, x26, [x8],
1147 stp x27, x28, [x8],
1148 stp x29, x9, [x8],
1149 str lr, [x8]
1150 add x8, x1, x10
1151 ldp x19, x20, [x8],
1152 ldp x21, x22, [x8],
1153 ldp x23, x24, [x8],
1154 ldp x25, x26, [x8],
1155 ldp x27, x28, [x8],
1156 ldp x29, x9, [x8],
1157 ldr lr, [x8]
1158 mov sp, x9
1159 msr sp_el0, x1
1160 ret
1161ENDPROC(cpu_switch_to)
1162NOKPROBE(cpu_switch_to)
1163
1164
1165
1166
1167ENTRY(ret_from_fork)
1168 bl schedule_tail
1169 cbz x19, 1f
1170 mov x0, x20
1171 blr x19
11721: get_thread_info tsk
1173 b ret_to_user
1174ENDPROC(ret_from_fork)
1175NOKPROBE(ret_from_fork)
1176
1177#ifdef CONFIG_ARM_SDE_INTERFACE
1178
1179#include <asm/sdei.h>
1180#include <uapi/linux/arm_sdei.h>
1181
1182.macro sdei_handler_exit exit_mode
1183
1184 cmp \exit_mode,
1185 b.ne 99f
1186 smc
1187 b .
118899: hvc
1189 b .
1190.endm
1191
1192#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
1193
1194
1195
1196
1197
1198
1199
1200
1201.ltorg
1202.pushsection ".entry.tramp.text", "ax"
1203ENTRY(__sdei_asm_entry_trampoline)
1204 mrs x4, ttbr1_el1
1205 tbz x4,
1206
1207 tramp_map_kernel tmp=x4
1208 isb
1209 mov x4, xzr
1210
1211
1212
1213
1214
12151: str x4, [x1,
1216
1217#ifdef CONFIG_RANDOMIZE_BASE
1218 adr x4, tramp_vectors + PAGE_SIZE
1219 add x4, x4,
1220 ldr x4, [x4]
1221#else
1222 ldr x4, =__sdei_asm_handler
1223#endif
1224 br x4
1225ENDPROC(__sdei_asm_entry_trampoline)
1226NOKPROBE(__sdei_asm_entry_trampoline)
1227
1228
1229
1230
1231
1232
1233
1234
1235ENTRY(__sdei_asm_exit_trampoline)
1236 ldr x4, [x4,
1237 cbnz x4, 1f
1238
1239 tramp_unmap_kernel tmp=x4
1240
12411: sdei_handler_exit exit_mode=x2
1242ENDPROC(__sdei_asm_exit_trampoline)
1243NOKPROBE(__sdei_asm_exit_trampoline)
1244 .ltorg
1245.popsection
1246#ifdef CONFIG_RANDOMIZE_BASE
1247.pushsection ".rodata", "a"
1248__sdei_asm_trampoline_next_handler:
1249 .quad __sdei_asm_handler
1250.popsection
1251#endif
1252#endif
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267ENTRY(__sdei_asm_handler)
1268 stp x2, x3, [x1,
1269 stp x4, x5, [x1,
1270 stp x6, x7, [x1,
1271 stp x8, x9, [x1,
1272 stp x10, x11, [x1,
1273 stp x12, x13, [x1,
1274 stp x14, x15, [x1,
1275 stp x16, x17, [x1,
1276 stp x18, x19, [x1,
1277 stp x20, x21, [x1,
1278 stp x22, x23, [x1,
1279 stp x24, x25, [x1,
1280 stp x26, x27, [x1,
1281 stp x28, x29, [x1,
1282 mov x4, sp
1283 stp lr, x4, [x1,
1284
1285 mov x19, x1
1286
1287#ifdef CONFIG_VMAP_STACK
1288
1289
1290
1291
1292
1293 ldrb w4, [x19,
1294 cbnz w4, 1f
1295 ldr_this_cpu dst=x5, sym=sdei_stack_normal_ptr, tmp=x6
1296 b 2f
12971: ldr_this_cpu dst=x5, sym=sdei_stack_critical_ptr, tmp=x6
12982: mov x6,
1299 add x5, x5, x6
1300 mov sp, x5
1301#endif
1302
1303
1304
1305
1306
1307 mrs x28, sp_el0
1308 ldr_this_cpu dst=x0, sym=__entry_task, tmp=x1
1309 msr sp_el0, x0
1310
1311
1312 and x0, x3,
1313 mrs x1, CurrentEL
1314 cmp x0, x1
1315 csel x29, x29, xzr, eq
1316 csel x4, x2, xzr, eq
1317
1318 stp x29, x4, [sp,
1319 mov x29, sp
1320
1321 add x0, x19,
1322 mov x1, x19
1323 bl __sdei_handler
1324
1325 msr sp_el0, x28
1326
1327 mov x4, x19
1328 ldp x28, x29, [x4,
1329 ldp x18, x19, [x4,
1330 ldp lr, x1, [x4,
1331 mov sp, x1
1332
1333 mov x1, x0
1334
1335 cmp x0,
1336 mov_q x2, SDEI_1_0_FN_SDEI_EVENT_COMPLETE
1337 mov_q x3, SDEI_1_0_FN_SDEI_EVENT_COMPLETE_AND_RESUME
1338 csel x0, x2, x3, ls
1339
1340 ldr_l x2, sdei_exit_mode
1341
1342alternative_if_not ARM64_UNMAP_KERNEL_AT_EL0
1343 sdei_handler_exit exit_mode=x2
1344alternative_else_nop_endif
1345
1346#ifdef CONFIG_UNMAP_KERNEL_AT_EL0
1347 tramp_alias dst=x5, sym=__sdei_asm_exit_trampoline
1348 br x5
1349#endif
1350ENDPROC(__sdei_asm_handler)
1351NOKPROBE(__sdei_asm_handler)
1352#endif
1353