1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#include <linux/init.h>
22#include <linux/linkage.h>
23
24#include <asm/assembler.h>
25#include <asm/asm-offsets.h>
26#include <asm/errno.h>
27#include <asm/esr.h>
28#include <asm/thread_info.h>
29#include <asm/unistd.h>
30#include <asm/unistd32.h>
31
32
33
34
35
36#define BAD_SYNC 0
37#define BAD_IRQ 1
38#define BAD_FIQ 2
39#define BAD_ERROR 3
40
41 .macro kernel_entry, el, regsize = 64
42 sub sp, sp,
43 .if \regsize == 32
44 mov w0, w0
45 .endif
46 push x28, x29
47 push x26, x27
48 push x24, x25
49 push x22, x23
50 push x20, x21
51 push x18, x19
52 push x16, x17
53 push x14, x15
54 push x12, x13
55 push x10, x11
56 push x8, x9
57 push x6, x7
58 push x4, x5
59 push x2, x3
60 push x0, x1
61 .if \el == 0
62 mrs x21, sp_el0
63 .else
64 add x21, sp,
65 .endif
66 mrs x22, elr_el1
67 mrs x23, spsr_el1
68 stp lr, x21, [sp,
69 stp x22, x23, [sp,
70
71
72
73
74 .if \el == 0
75 mvn x21, xzr
76 str x21, [sp,
77 .endif
78
79
80
81
82
83
84
85
86 .endm
87
88 .macro kernel_exit, el, ret = 0
89 ldp x21, x22, [sp,
90 .if \el == 0
91 ldr x23, [sp,
92 .endif
93 .if \ret
94 ldr x1, [sp,
95 add sp, sp, S_X2
96 .else
97 pop x0, x1
98 .endif
99 pop x2, x3
100 pop x4, x5
101 pop x6, x7
102 pop x8, x9
103 msr elr_el1, x21
104 msr spsr_el1, x22
105 .if \el == 0
106 msr sp_el0, x23
107 .endif
108 pop x10, x11
109 pop x12, x13
110 pop x14, x15
111 pop x16, x17
112 pop x18, x19
113 pop x20, x21
114 pop x22, x23
115 pop x24, x25
116 pop x26, x27
117 pop x28, x29
118 ldr lr, [sp],
119 eret
120 .endm
121
122 .macro get_thread_info, rd
123 mov \rd, sp
124 and \rd, \rd,
125 .endm
126
127
128
129
130
131
132
133sc_nr .req x25
134scno .req x26
135stbl .req x27
136tsk .req x28
137
138
139
140
141 .macro irq_handler
142 ldr x1, handle_arch_irq
143 mov x0, sp
144 blr x1
145 .endm
146
147 .text
148
149
150
151
152
153 .align 11
154ENTRY(vectors)
155 ventry el1_sync_invalid
156 ventry el1_irq_invalid
157 ventry el1_fiq_invalid
158 ventry el1_error_invalid
159
160 ventry el1_sync
161 ventry el1_irq
162 ventry el1_fiq_invalid
163 ventry el1_error_invalid
164
165 ventry el0_sync
166 ventry el0_irq
167 ventry el0_fiq_invalid
168 ventry el0_error_invalid
169
170#ifdef CONFIG_COMPAT
171 ventry el0_sync_compat
172 ventry el0_irq_compat
173 ventry el0_fiq_invalid_compat
174 ventry el0_error_invalid_compat
175#else
176 ventry el0_sync_invalid
177 ventry el0_irq_invalid
178 ventry el0_fiq_invalid
179 ventry el0_error_invalid
180#endif
181END(vectors)
182
183
184
185
186 .macro inv_entry, el, reason, regsize = 64
187 kernel_entry el, \regsize
188 mov x0, sp
189 mov x1,
190 mrs x2, esr_el1
191 b bad_mode
192 .endm
193
194el0_sync_invalid:
195 inv_entry 0, BAD_SYNC
196ENDPROC(el0_sync_invalid)
197
198el0_irq_invalid:
199 inv_entry 0, BAD_IRQ
200ENDPROC(el0_irq_invalid)
201
202el0_fiq_invalid:
203 inv_entry 0, BAD_FIQ
204ENDPROC(el0_fiq_invalid)
205
206el0_error_invalid:
207 inv_entry 0, BAD_ERROR
208ENDPROC(el0_error_invalid)
209
210#ifdef CONFIG_COMPAT
211el0_fiq_invalid_compat:
212 inv_entry 0, BAD_FIQ, 32
213ENDPROC(el0_fiq_invalid_compat)
214
215el0_error_invalid_compat:
216 inv_entry 0, BAD_ERROR, 32
217ENDPROC(el0_error_invalid_compat)
218#endif
219
220el1_sync_invalid:
221 inv_entry 1, BAD_SYNC
222ENDPROC(el1_sync_invalid)
223
224el1_irq_invalid:
225 inv_entry 1, BAD_IRQ
226ENDPROC(el1_irq_invalid)
227
228el1_fiq_invalid:
229 inv_entry 1, BAD_FIQ
230ENDPROC(el1_fiq_invalid)
231
232el1_error_invalid:
233 inv_entry 1, BAD_ERROR
234ENDPROC(el1_error_invalid)
235
236
237
238
239 .align 6
240el1_sync:
241 kernel_entry 1
242 mrs x1, esr_el1
243 lsr x24, x1,
244 cmp x24,
245 b.eq el1_da
246 cmp x24,
247 b.eq el1_undef
248 cmp x24,
249 b.eq el1_sp_pc
250 cmp x24,
251 b.eq el1_sp_pc
252 cmp x24,
253 b.eq el1_undef
254 cmp x24,
255 b.ge el1_dbg
256 b el1_inv
257el1_da:
258
259
260
261 mrs x0, far_el1
262 enable_dbg_if_not_stepping x2
263
264 tbnz x23,
265 enable_irq
2661:
267 mov x2, sp
268 bl do_mem_abort
269
270
271 disable_irq
272 kernel_exit 1
273el1_sp_pc:
274
275
276
277 mrs x0, far_el1
278 mov x1, x25
279 mov x2, sp
280 b do_sp_pc_abort
281el1_undef:
282
283
284
285 mov x0, sp
286 b do_undefinstr
287el1_dbg:
288
289
290
291 tbz x24,
292 mrs x0, far_el1
293 mov x2, sp
294 bl do_debug_exception
295
296 kernel_exit 1
297el1_inv:
298
299 mov x0, sp
300 mov x1,
301 mrs x2, esr_el1
302 b bad_mode
303ENDPROC(el1_sync)
304
305 .align 6
306el1_irq:
307 kernel_entry 1
308 enable_dbg_if_not_stepping x0
309#ifdef CONFIG_TRACE_IRQFLAGS
310 bl trace_hardirqs_off
311#endif
312#ifdef CONFIG_PREEMPT
313 get_thread_info tsk
314 ldr x24, [tsk,
315 add x0, x24,
316 str x0, [tsk,
317#endif
318 irq_handler
319#ifdef CONFIG_PREEMPT
320 str x24, [tsk,
321 cbnz x24, 1f
322 ldr x0, [tsk,
323 tbz x0,
324 bl el1_preempt
3251:
326#endif
327#ifdef CONFIG_TRACE_IRQFLAGS
328 bl trace_hardirqs_on
329#endif
330 kernel_exit 1
331ENDPROC(el1_irq)
332
333#ifdef CONFIG_PREEMPT
334el1_preempt:
335 mov x24, lr
3361: enable_dbg
337 bl preempt_schedule_irq
338 ldr x0, [tsk,
339 tbnz x0,
340 ret x24
341#endif
342
343
344
345
346 .align 6
347el0_sync:
348 kernel_entry 0
349 mrs x25, esr_el1
350 lsr x24, x25,
351 cmp x24,
352 b.eq el0_svc
353 adr lr, ret_from_exception
354 cmp x24,
355 b.eq el0_da
356 cmp x24,
357 b.eq el0_ia
358 cmp x24,
359 b.eq el0_fpsimd_acc
360 cmp x24,
361 b.eq el0_fpsimd_exc
362 cmp x24,
363 b.eq el0_undef
364 cmp x24,
365 b.eq el0_sp_pc
366 cmp x24,
367 b.eq el0_sp_pc
368 cmp x24,
369 b.eq el0_undef
370 cmp x24,
371 b.ge el0_dbg
372 b el0_inv
373
374#ifdef CONFIG_COMPAT
375 .align 6
376el0_sync_compat:
377 kernel_entry 0, 32
378 mrs x25, esr_el1
379 lsr x24, x25,
380 cmp x24,
381 b.eq el0_svc_compat
382 adr lr, ret_from_exception
383 cmp x24,
384 b.eq el0_da
385 cmp x24,
386 b.eq el0_ia
387 cmp x24,
388 b.eq el0_fpsimd_acc
389 cmp x24,
390 b.eq el0_fpsimd_exc
391 cmp x24,
392 b.eq el0_undef
393 cmp x24,
394 b.eq el0_undef
395 cmp x24,
396 b.eq el0_undef
397 cmp x24,
398 b.eq el0_undef
399 cmp x24,
400 b.eq el0_undef
401 cmp x24,
402 b.eq el0_undef
403 cmp x24,
404 b.ge el0_dbg
405 b el0_inv
406el0_svc_compat:
407
408
409
410 adr stbl, compat_sys_call_table
411 uxtw scno, w7
412 mov sc_nr,
413 b el0_svc_naked
414
415 .align 6
416el0_irq_compat:
417 kernel_entry 0, 32
418 b el0_irq_naked
419#endif
420
421el0_da:
422
423
424
425 mrs x0, far_el1
426 bic x0, x0,
427 disable_step x1
428 isb
429 enable_dbg
430
431 enable_irq
432 mov x1, x25
433 mov x2, sp
434 b do_mem_abort
435el0_ia:
436
437
438
439 mrs x0, far_el1
440 disable_step x1
441 isb
442 enable_dbg
443
444 enable_irq
445 orr x1, x25,
446 mov x2, sp
447 b do_mem_abort
448el0_fpsimd_acc:
449
450
451
452 mov x0, x25
453 mov x1, sp
454 b do_fpsimd_acc
455el0_fpsimd_exc:
456
457
458
459 mov x0, x25
460 mov x1, sp
461 b do_fpsimd_exc
462el0_sp_pc:
463
464
465
466 mrs x0, far_el1
467 disable_step x1
468 isb
469 enable_dbg
470
471 enable_irq
472 mov x1, x25
473 mov x2, sp
474 b do_sp_pc_abort
475el0_undef:
476
477
478
479 mov x0, sp
480
481 enable_irq
482 b do_undefinstr
483el0_dbg:
484
485
486
487 tbnz x24,
488 mrs x0, far_el1
489 disable_step x1
490 mov x1, x25
491 mov x2, sp
492 b do_debug_exception
493el0_inv:
494 mov x0, sp
495 mov x1,
496 mrs x2, esr_el1
497 b bad_mode
498ENDPROC(el0_sync)
499
500 .align 6
501el0_irq:
502 kernel_entry 0
503el0_irq_naked:
504 disable_step x1
505 isb
506 enable_dbg
507#ifdef CONFIG_TRACE_IRQFLAGS
508 bl trace_hardirqs_off
509#endif
510 get_thread_info tsk
511#ifdef CONFIG_PREEMPT
512 ldr x24, [tsk,
513 add x23, x24,
514 str x23, [tsk,
515#endif
516 irq_handler
517#ifdef CONFIG_PREEMPT
518 ldr x0, [tsk,
519 str x24, [tsk,
520 cmp x0, x23
521 b.eq 1f
522 mov x1,
523 str x1, [x1]
5241:
525#endif
526#ifdef CONFIG_TRACE_IRQFLAGS
527 bl trace_hardirqs_on
528#endif
529 b ret_to_user
530ENDPROC(el0_irq)
531
532
533
534
535ret_from_exception:
536 get_thread_info tsk
537 b ret_to_user
538ENDPROC(ret_from_exception)
539
540
541
542
543
544
545
546
547
548ENTRY(cpu_switch_to)
549 add x8, x0,
550 mov x9, sp
551 stp x19, x20, [x8],
552 stp x21, x22, [x8],
553 stp x23, x24, [x8],
554 stp x25, x26, [x8],
555 stp x27, x28, [x8],
556 stp x29, x9, [x8],
557 str lr, [x8]
558 add x8, x1,
559 ldp x19, x20, [x8],
560 ldp x21, x22, [x8],
561 ldp x23, x24, [x8],
562 ldp x25, x26, [x8],
563 ldp x27, x28, [x8],
564 ldp x29, x9, [x8],
565 ldr lr, [x8]
566 mov sp, x9
567 ret
568ENDPROC(cpu_switch_to)
569
570
571
572
573
574ret_fast_syscall:
575 disable_irq
576 ldr x1, [tsk,
577 and x2, x1,
578 cbnz x2, fast_work_pending
579 tbz x1,
580 disable_dbg
581 enable_step x2
582fast_exit:
583 kernel_exit 0, ret = 1
584
585
586
587
588fast_work_pending:
589 str x0, [sp,
590work_pending:
591 tbnz x1,
592
593 ldr x2, [sp,
594 mov x0, sp
595 tst x2,
596 b.ne no_work_pending
597 enable_irq
598 bl do_notify_resume
599 b ret_to_user
600work_resched:
601 enable_dbg
602 bl schedule
603
604
605
606
607ret_to_user:
608 disable_irq
609 ldr x1, [tsk,
610 and x2, x1,
611 cbnz x2, work_pending
612 tbz x1,
613 disable_dbg
614 enable_step x2
615no_work_pending:
616 kernel_exit 0, ret = 0
617ENDPROC(ret_to_user)
618
619
620
621
622ENTRY(ret_from_fork)
623 bl schedule_tail
624 cbz x19, 1f
625 mov x0, x20
626 blr x19
6271: get_thread_info tsk
628 b ret_to_user
629ENDPROC(ret_from_fork)
630
631
632
633
634 .align 6
635el0_svc:
636 adrp stbl, sys_call_table
637 uxtw scno, w8
638 mov sc_nr,
639el0_svc_naked:
640 stp x0, scno, [sp,
641 disable_step x16
642 isb
643 enable_dbg
644 enable_irq
645
646 get_thread_info tsk
647 ldr x16, [tsk,
648 tbnz x16,
649 adr lr, ret_fast_syscall
650 cmp scno, sc_nr
651 b.hs ni_sys
652 ldr x16, [stbl, scno, lsl
653 br x16
654ni_sys:
655 mov x0, sp
656 b do_ni_syscall
657ENDPROC(el0_svc)
658
659
660
661
662
663__sys_trace:
664 mov x1, sp
665 mov w0,
666 bl syscall_trace
667 adr lr, __sys_trace_return
668 uxtw scno, w0
669 mov x1, sp
670 cmp scno, sc_nr
671 b.hs ni_sys
672 ldp x0, x1, [sp]
673 ldp x2, x3, [sp,
674 ldp x4, x5, [sp,
675 ldp x6, x7, [sp,
676 ldr x16, [stbl, scno, lsl
677 br x16
678
679__sys_trace_return:
680 str x0, [sp]
681 mov x1, sp
682 mov w0,
683 bl syscall_trace
684 b ret_to_user
685
686
687
688
689ENTRY(sys_rt_sigreturn_wrapper)
690 mov x0, sp
691 b sys_rt_sigreturn
692ENDPROC(sys_rt_sigreturn_wrapper)
693
694ENTRY(handle_arch_irq)
695 .quad 0
696