1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18#include <asm/assembler.h>
19#include <asm/memory.h>
20#include <asm/glue-df.h>
21#include <asm/glue-pf.h>
22#include <asm/vfpmacros.h>
23#ifndef CONFIG_MULTI_IRQ_HANDLER
24#include <mach/entry-macro.S>
25#endif
26#include <asm/thread_notify.h>
27#include <asm/unwind.h>
28#include <asm/unistd.h>
29#include <asm/tls.h>
30#include <asm/system_info.h>
31
32#include "entry-header.S"
33#include <asm/entry-macro-multi.S>
34
35
36
37
38 .macro irq_handler
39#ifdef CONFIG_MULTI_IRQ_HANDLER
40 ldr r1, =handle_arch_irq
41 mov r0, sp
42 adr lr, BSYM(9997f)
43 ldr pc, [r1]
44#else
45 arch_irq_handler_default
46#endif
479997:
48 .endm
49
50 .macro pabt_helper
51 @ PABORT handler takes pt_regs in r2, fault address in r4 and psr in r5
52#ifdef MULTI_PABORT
53 ldr ip, .LCprocfns
54 mov lr, pc
55 ldr pc, [ip,
56#else
57 bl CPU_PABORT_HANDLER
58#endif
59 .endm
60
61 .macro dabt_helper
62
63 @
64 @ Call the processor-specific abort handler:
65 @
66 @ r2 - pt_regs
67 @ r4 - aborted context pc
68 @ r5 - aborted context psr
69 @
70 @ The abort handler must return the aborted address in r0, and
71 @ the fault status register in r1. r9 must be preserved.
72 @
73#ifdef MULTI_DABORT
74 ldr ip, .LCprocfns
75 mov lr, pc
76 ldr pc, [ip,
77#else
78 bl CPU_DABORT_HANDLER
79#endif
80 .endm
81
82#ifdef CONFIG_KPROBES
83 .section .kprobes.text,"ax",%progbits
84#else
85 .text
86#endif
87
88
89
90
91 .macro inv_entry, reason
92 sub sp, sp,
93 ARM( stmib sp, {r1 - lr} )
94 THUMB( stmia sp, {r0 - r12} )
95 THUMB( str sp, [sp,
96 THUMB( str lr, [sp,
97 mov r1,
98 .endm
99
100__pabt_invalid:
101 inv_entry BAD_PREFETCH
102 b common_invalid
103ENDPROC(__pabt_invalid)
104
105__dabt_invalid:
106 inv_entry BAD_DATA
107 b common_invalid
108ENDPROC(__dabt_invalid)
109
110__irq_invalid:
111 inv_entry BAD_IRQ
112 b common_invalid
113ENDPROC(__irq_invalid)
114
115__und_invalid:
116 inv_entry BAD_UNDEFINSTR
117
118 @
119 @ XXX fall through to common_invalid
120 @
121
122@
123@ common_invalid - generic code for failed exception (re-entrant version of handlers)
124@
125common_invalid:
126 zero_fp
127
128 ldmia r0, {r4 - r6}
129 add r0, sp,
130 mov r7,
131 str r4, [sp] @ save preserved r0
132 stmia r0, {r5 - r7} @ lr_<exception>,
133 @ cpsr_<exception>, "old_r0"
134
135 mov r0, sp
136 b bad_mode
137ENDPROC(__und_invalid)
138
139
140
141
142
143
144#define SPFIX(code...) code
145#else
146#define SPFIX(code...)
147#endif
148
149 .macro svc_entry, stack_hole=0
150 UNWIND(.fnstart )
151 UNWIND(.save {r0 - pc} )
152 sub sp, sp,
153#ifdef CONFIG_THUMB2_KERNEL
154 SPFIX( str r0, [sp] ) @ temporarily saved
155 SPFIX( mov r0, sp )
156 SPFIX( tst r0,
157 SPFIX( ldr r0, [sp] ) @ restored
158#else
159 SPFIX( tst sp,
160#endif
161 SPFIX( subeq sp, sp,
162 stmia sp, {r1 - r12}
163
164 ldmia r0, {r3 - r5}
165 add r7, sp,
166 mov r6,
167 add r2, sp,
168 SPFIX( addeq r2, r2,
169 str r3, [sp,
170 @ from the exception stack
171
172 mov r3, lr
173
174 @
175 @ We are now ready to fill in the remaining blanks on the stack:
176 @
177 @ r2 - sp_svc
178 @ r3 - lr_svc
179 @ r4 - lr_<exception>, already fixed up for correct return/restart
180 @ r5 - spsr_<exception>
181 @ r6 - orig_r0 (see pt_regs definition in ptrace.h)
182 @
183 stmia r7, {r2 - r6}
184
185#ifdef CONFIG_TRACE_IRQFLAGS
186 bl trace_hardirqs_off
187#endif
188 .endm
189
190 .align 5
191__dabt_svc:
192 svc_entry
193 mov r2, sp
194 dabt_helper
195 THUMB( ldr r5, [sp,
196 svc_exit r5 @ return from exception
197 UNWIND(.fnend )
198ENDPROC(__dabt_svc)
199
200 .align 5
201__irq_svc:
202 svc_entry
203 irq_handler
204
205#ifdef CONFIG_PREEMPT
206 get_thread_info tsk
207 ldr r8, [tsk,
208 ldr r0, [tsk,
209 teq r8,
210 movne r0,
211 tst r0,
212 blne svc_preempt
213#endif
214
215 svc_exit r5, irq = 1 @ return from exception
216 UNWIND(.fnend )
217ENDPROC(__irq_svc)
218
219 .ltorg
220
221#ifdef CONFIG_PREEMPT
222svc_preempt:
223 mov r8, lr
2241: bl preempt_schedule_irq @ irq en/disable is done inside
225 ldr r0, [tsk,
226 tst r0,
227 moveq pc, r8 @ go again
228 b 1b
229#endif
230
231__und_fault:
232 @ Correct the PC such that it is pointing at the instruction
233 @ which caused the fault. If the faulting instruction was ARM
234 @ the PC will be pointing at the next instruction, and have to
235 @ subtract 4. Otherwise, it is Thumb, and the PC will be
236 @ pointing at the second half of the Thumb instruction. We
237 @ have to subtract 2.
238 ldr r2, [r0,
239 sub r2, r2, r1
240 str r2, [r0,
241 b do_undefinstr
242ENDPROC(__und_fault)
243
244 .align 5
245__und_svc:
246#ifdef CONFIG_KPROBES
247 @ If a kprobe is about to simulate a "stmdb sp..." instruction,
248 @ it obviously needs free stack space which then will belong to
249 @ the saved context.
250 svc_entry 64
251#else
252 svc_entry
253#endif
254 @
255 @ call emulation code, which returns using r9 if it has emulated
256 @ the instruction, or the more conventional lr if we are to treat
257 @ this as a real undefined instruction
258 @
259 @ r0 - instruction
260 @
261#ifndef CONFIG_THUMB2_KERNEL
262 ldr r0, [r4,
263#else
264 mov r1,
265 ldrh r0, [r4,
266 cmp r0,
267 blo __und_svc_fault
268 ldrh r9, [r4] @ bottom 16 bits
269 add r4, r4,
270 str r4, [sp,
271 orr r0, r9, r0, lsl
272#endif
273 adr r9, BSYM(__und_svc_finish)
274 mov r2, r4
275 bl call_fpe
276
277 mov r1,
278__und_svc_fault:
279 mov r0, sp @ struct pt_regs *regs
280 bl __und_fault
281
282__und_svc_finish:
283 ldr r5, [sp,
284 svc_exit r5 @ return from exception
285 UNWIND(.fnend )
286ENDPROC(__und_svc)
287
288 .align 5
289__pabt_svc:
290 svc_entry
291 mov r2, sp @ regs
292 pabt_helper
293 svc_exit r5 @ return from exception
294 UNWIND(.fnend )
295ENDPROC(__pabt_svc)
296
297 .align 5
298.LCcralign:
299 .word cr_alignment
300#ifdef MULTI_DABORT
301.LCprocfns:
302 .word processor
303#endif
304.LCfp:
305 .word fp_enter
306
307
308
309
310
311
312
313
314
315#endif
316
317 .macro usr_entry
318 UNWIND(.fnstart )
319 UNWIND(.cantunwind ) @ don't unwind the user space
320 sub sp, sp,
321 ARM( stmib sp, {r1 - r12} )
322 THUMB( stmia sp, {r0 - r12} )
323
324 ldmia r0, {r3 - r5}
325 add r0, sp,
326 mov r6,
327
328 str r3, [sp] @ save the "real" r0 copied
329 @ from the exception stack
330
331 @
332 @ We are now ready to fill in the remaining blanks on the stack:
333 @
334 @ r4 - lr_<exception>, already fixed up for correct return/restart
335 @ r5 - spsr_<exception>
336 @ r6 - orig_r0 (see pt_regs definition in ptrace.h)
337 @
338 @ Also, separately save sp_usr and lr_usr
339 @
340 stmia r0, {r4 - r6}
341 ARM( stmdb r0, {sp, lr}^ )
342 THUMB( store_user_sp_lr r0, r1, S_SP - S_PC )
343
344 @
345 @ Enable the alignment trap while in kernel mode
346 @
347 alignment_trap r0, .LCcralign
348
349 @
350 @ Clear FP to mark the first stack frame
351 @
352 zero_fp
353
354#ifdef CONFIG_IRQSOFF_TRACER
355 bl trace_hardirqs_off
356#endif
357 ct_user_exit save = 0
358 .endm
359
360 .macro kuser_cmpxchg_check
361
362 !defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG)
363#ifndef CONFIG_MMU
364
365#else
366 @ Make sure our user space atomic helper is restarted
367 @ if it was interrupted in a critical region. Here we
368 @ perform a quick test inline since it should be false
369 @ 99.9999% of the time. The rest is done out of line.
370 cmp r4,
371 blhs kuser_cmpxchg64_fixup
372#endif
373#endif
374 .endm
375
376 .align 5
377__dabt_usr:
378 usr_entry
379 kuser_cmpxchg_check
380 mov r2, sp
381 dabt_helper
382 b ret_from_exception
383 UNWIND(.fnend )
384ENDPROC(__dabt_usr)
385
386 .align 5
387__irq_usr:
388 usr_entry
389 kuser_cmpxchg_check
390 irq_handler
391 get_thread_info tsk
392 mov why,
393 b ret_to_user_from_irq
394 UNWIND(.fnend )
395ENDPROC(__irq_usr)
396
397 .ltorg
398
399 .align 5
400__und_usr:
401 usr_entry
402
403 mov r2, r4
404 mov r3, r5
405
406 @ r2 = regs->ARM_pc, which is either 2 or 4 bytes ahead of the
407 @ faulting instruction depending on Thumb mode.
408 @ r3 = regs->ARM_cpsr
409 @
410 @ The emulation code returns using r9 if it has emulated the
411 @ instruction, or the more conventional lr if we are to treat
412 @ this as a real undefined instruction
413 @
414 adr r9, BSYM(ret_from_exception)
415
416 @ IRQs must be enabled before attempting to read the instruction from
417 @ user space since that could cause a page/translation fault if the
418 @ page table was modified by another CPU.
419 enable_irq
420
421 tst r3,
422 bne __und_usr_thumb
423 sub r4, r2,
4241: ldrt r0, [r4]
425 ARM_BE8(rev r0, r0) @ little endian instruction
426
427 @ r0 = 32-bit ARM instruction which caused the exception
428 @ r2 = PC value for the following instruction (:= regs->ARM_pc)
429 @ r4 = PC value for the faulting instruction
430 @ lr = 32-bit undefined instruction function
431 adr lr, BSYM(__und_usr_fault_32)
432 b call_fpe
433
434__und_usr_thumb:
435 @ Thumb instruction
436 sub r4, r2,
437
438
439
440
441
442
443
444
445
446#define NEED_CPU_ARCHITECTURE
447 ldr r5, .LCcpu_architecture
448 ldr r5, [r5]
449 cmp r5,
450 blo __und_usr_fault_16 @ 16bit undefined instruction
451
452
453
454
455
456 .arch armv6t2
457#endif
4582: ldrht r5, [r4]
459ARM_BE8(rev16 r5, r5) @ little endian instruction
460 cmp r5,
461 blo __und_usr_fault_16 @ 16bit undefined instruction
4623: ldrht r0, [r2]
463ARM_BE8(rev16 r0, r0) @ little endian instruction
464 add r2, r2,
465 str r2, [sp,
466 orr r0, r0, r5, lsl
467 adr lr, BSYM(__und_usr_fault_32)
468 @ r0 = the two 16-bit Thumb instructions which caused the exception
469 @ r2 = PC value for the following Thumb instruction (:= regs->ARM_pc)
470 @ r4 = PC value for the first 16-bit Thumb instruction
471 @ lr = 32bit undefined instruction function
472
473
474
475#ifdef CONFIG_CPU_32v6K
476 .arch armv6k
477#else
478 .arch armv6
479#endif
480#endif
481#else
482 b __und_usr_fault_16
483#endif
484 UNWIND(.fnend)
485ENDPROC(__und_usr)
486
487
488
489
490 .pushsection .fixup, "ax"
491 .align 2
4924: str r4, [sp,
493 mov pc, r9
494 .popsection
495 .pushsection __ex_table,"a"
496 .long 1b, 4b
497
498 .long 2b, 4b
499 .long 3b, 4b
500#endif
501 .popsection
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528 @
529 @ Fall-through from Thumb-2 __und_usr
530 @
531#ifdef CONFIG_NEON
532 get_thread_info r10 @ get current thread
533 adr r6, .LCneon_thumb_opcodes
534 b 2f
535#endif
536call_fpe:
537 get_thread_info r10 @ get current thread
538#ifdef CONFIG_NEON
539 adr r6, .LCneon_arm_opcodes
5402: ldr r5, [r6],
541 ldr r7, [r6],
542 cmp r5,
543 beq 1f
544 and r8, r0, r5
545 cmp r8, r7 @ NEON instruction?
546 bne 2b
547 mov r7,
548 strb r7, [r10,
549 strb r7, [r10,
550 b do_vfp @ let VFP handler handle this
5511:
552#endif
553 tst r0,
554 tstne r0,
555 moveq pc, lr
556 and r8, r0,
557 THUMB( lsr r8, r8,
558 mov r7,
559 add r6, r10,
560 ARM( strb r7, [r6, r8, lsr
561 THUMB( strb r7, [r6, r8] ) @ set appropriate used_cp[]
562#ifdef CONFIG_IWMMXT
563 @ Test if we need to give access to iWMMXt coprocessors
564 ldr r5, [r10,
565 rsbs r7, r8,
566 movcss r7, r5, lsr
567 bcs iwmmxt_task_enable
568#endif
569 ARM( add pc, pc, r8, lsr
570 THUMB( lsl r8, r8,
571 THUMB( add pc, r8 )
572 nop
573
574 movw_pc lr @ CP
575 W(b) do_fpe @ CP
576 W(b) do_fpe @ CP
577 movw_pc lr @ CP
578#ifdef CONFIG_CRUNCH
579 b crunch_task_enable @ CP
580 b crunch_task_enable @ CP
581 b crunch_task_enable @ CP
582#else
583 movw_pc lr @ CP
584 movw_pc lr @ CP
585 movw_pc lr @ CP
586#endif
587 movw_pc lr @ CP
588 movw_pc lr @ CP
589 movw_pc lr @ CP
590#ifdef CONFIG_VFP
591 W(b) do_vfp @ CP
592 W(b) do_vfp @ CP
593#else
594 movw_pc lr @ CP
595 movw_pc lr @ CP
596#endif
597 movw_pc lr @ CP
598 movw_pc lr @ CP
599 movw_pc lr @ CP
600 movw_pc lr @ CP
601
602#ifdef NEED_CPU_ARCHITECTURE
603 .align 2
604.LCcpu_architecture:
605 .word __cpu_architecture
606#endif
607
608#ifdef CONFIG_NEON
609 .align 6
610
611.LCneon_arm_opcodes:
612 .word 0xfe000000 @ mask
613 .word 0xf2000000 @ opcode
614
615 .word 0xff100000 @ mask
616 .word 0xf4000000 @ opcode
617
618 .word 0x00000000 @ mask
619 .word 0x00000000 @ opcode
620
621.LCneon_thumb_opcodes:
622 .word 0xef000000 @ mask
623 .word 0xef000000 @ opcode
624
625 .word 0xff100000 @ mask
626 .word 0xf9000000 @ opcode
627
628 .word 0x00000000 @ mask
629 .word 0x00000000 @ opcode
630#endif
631
632do_fpe:
633 ldr r4, .LCfp
634 add r10, r10,
635 ldr pc, [r4] @ Call FP module USR entry point
636
637
638
639
640
641
642
643
644
645
646 .pushsection .data
647ENTRY(fp_enter)
648 .word no_fp
649 .popsection
650
651ENTRY(no_fp)
652 mov pc, lr
653ENDPROC(no_fp)
654
655__und_usr_fault_32:
656 mov r1,
657 b 1f
658__und_usr_fault_16:
659 mov r1,
6601: mov r0, sp
661 adr lr, BSYM(ret_from_exception)
662 b __und_fault
663ENDPROC(__und_usr_fault_32)
664ENDPROC(__und_usr_fault_16)
665
666 .align 5
667__pabt_usr:
668 usr_entry
669 mov r2, sp @ regs
670 pabt_helper
671 UNWIND(.fnend )
672
673
674
675
676ENTRY(ret_from_exception)
677 UNWIND(.fnstart )
678 UNWIND(.cantunwind )
679 get_thread_info tsk
680 mov why,
681 b ret_to_user
682 UNWIND(.fnend )
683ENDPROC(__pabt_usr)
684ENDPROC(ret_from_exception)
685
686
687
688
689
690
691ENTRY(__switch_to)
692 UNWIND(.fnstart )
693 UNWIND(.cantunwind )
694 add ip, r1,
695 ARM( stmia ip!, {r4 - sl, fp, sp, lr} ) @ Store most regs on stack
696 THUMB( stmia ip!, {r4 - sl, fp} ) @ Store most regs on stack
697 THUMB( str sp, [ip],
698 THUMB( str lr, [ip],
699 ldr r4, [r2,
700 ldr r5, [r2,
701#ifdef CONFIG_CPU_USE_DOMAINS
702 ldr r6, [r2,
703#endif
704 switch_tls r1, r4, r5, r3, r7
705
706 ldr r7, [r2,
707 ldr r8, =__stack_chk_guard
708 ldr r7, [r7,
709#endif
710#ifdef CONFIG_CPU_USE_DOMAINS
711 mcr p15, 0, r6, c3, c0, 0 @ Set domain register
712#endif
713 mov r5, r0
714 add r4, r2,
715 ldr r0, =thread_notify_head
716 mov r1,
717 bl atomic_notifier_call_chain
718
719 str r7, [r8]
720#endif
721 THUMB( mov ip, r4 )
722 mov r0, r5
723 ARM( ldmia r4, {r4 - sl, fp, sp, pc} ) @ Load all regs saved previously
724 THUMB( ldmia ip!, {r4 - sl, fp} ) @ Load all regs saved previously
725 THUMB( ldr sp, [ip],
726 THUMB( ldr pc, [ip] )
727 UNWIND(.fnend )
728ENDPROC(__switch_to)
729
730 __INIT
731
732
733
734
735
736
737
738
739
740
741
742 THUMB( .arm )
743
744 .macro usr_ret, reg
745#ifdef CONFIG_ARM_THUMB
746 bx \reg
747#else
748 mov pc, \reg
749#endif
750 .endm
751
752 .macro kuser_pad, sym, size
753 .if (. - \sym) & 3
754 .rept 4 - (. - \sym) & 3
755 .byte 0
756 .endr
757 .endif
758 .rept (\size - (. - \sym)) / 4
759 .word 0xe7fddef1
760 .endr
761 .endm
762
763#ifdef CONFIG_KUSER_HELPERS
764 .align 5
765 .globl __kuser_helper_start
766__kuser_helper_start:
767
768
769
770
771
772
773__kuser_cmpxchg64: @ 0xffff0f60
774
775
776
777
778
779
780
781
782 stmfd sp!, {r7, lr}
783 ldr r7, 1f @ it's 20 bits
784 swi __ARM_NR_cmpxchg64
785 ldmfd sp!, {r7, pc}
7861: .word __ARM_NR_cmpxchg64
787
788
789
790 stmfd sp!, {r4, r5, r6, r7}
791 ldrd r4, r5, [r0] @ load old val
792 ldrd r6, r7, [r1] @ load new val
793 smp_dmb arm
7941: ldrexd r0, r1, [r2] @ load current val
795 eors r3, r0, r4 @ compare with oldval (1)
796 eoreqs r3, r1, r5 @ compare with oldval (2)
797 strexdeq r3, r6, r7, [r2] @ store newval if eq
798 teqeq r3,
799 beq 1b @ if no then retry
800 smp_dmb arm
801 rsbs r0, r3,
802 ldmfd sp!, {r4, r5, r6, r7}
803 usr_ret lr
804
805
806
807#ifdef CONFIG_MMU
808
809
810
811
812
813
814
815 stmfd sp!, {r4, r5, r6, lr}
816 ldmia r0, {r4, r5} @ load old val
817 ldmia r1, {r6, lr} @ load new val
8181: ldmia r2, {r0, r1} @ load current val
819 eors r3, r0, r4 @ compare with oldval (1)
820 eoreqs r3, r1, r5 @ compare with oldval (2)
8212: stmeqia r2, {r6, lr} @ store newval if eq
822 rsbs r0, r3,
823 ldmfd sp!, {r4, r5, r6, pc}
824
825 .text
826kuser_cmpxchg64_fixup:
827 @ Called from kuser_cmpxchg_fixup.
828 @ r4 = address of interrupted insn (must be preserved).
829 @ sp = saved regs. r7 and r8 are clobbered.
830 @ 1b = first critical insn, 2b = last critical insn.
831 @ If r4 >= 1b and r4 <= 2b then saved pc_usr is set to 1b.
832 mov r7,
833 sub r7, r7,
834 subs r8, r4, r7
835 rsbcss r8, r8,
836 strcs r7, [sp,
837
838 bcc kuser_cmpxchg32_fixup
839#endif
840 mov pc, lr
841 .previous
842
843#else
844
845 mov r0,
846 adds r0, r0,
847 usr_ret lr
848#endif
849
850#else
851
852#endif
853
854 kuser_pad __kuser_cmpxchg64, 64
855
856__kuser_memory_barrier: @ 0xffff0fa0
857 smp_dmb arm
858 usr_ret lr
859
860 kuser_pad __kuser_memory_barrier, 32
861
862__kuser_cmpxchg: @ 0xffff0fc0
863
864
865
866
867
868
869
870
871 stmfd sp!, {r7, lr}
872 ldr r7, 1f @ it's 20 bits
873 swi __ARM_NR_cmpxchg
874 ldmfd sp!, {r7, pc}
8751: .word __ARM_NR_cmpxchg
876
877
878
879#ifdef CONFIG_MMU
880
881
882
883
884
885
886
887
888
889
8901: ldr r3, [r2] @ load current val
891 subs r3, r3, r0 @ compare with oldval
8922: streq r1, [r2] @ store newval if eq
893 rsbs r0, r3,
894 usr_ret lr
895
896 .text
897kuser_cmpxchg32_fixup:
898 @ Called from kuser_cmpxchg_check macro.
899 @ r4 = address of interrupted insn (must be preserved).
900 @ sp = saved regs. r7 and r8 are clobbered.
901 @ 1b = first critical insn, 2b = last critical insn.
902 @ If r4 >= 1b and r4 <= 2b then saved pc_usr is set to 1b.
903 mov r7,
904 sub r7, r7,
905 subs r8, r4, r7
906 rsbcss r8, r8,
907 strcs r7, [sp,
908 mov pc, lr
909 .previous
910
911#else
912
913 mov r0,
914 adds r0, r0,
915 usr_ret lr
916#endif
917
918#else
919
920 smp_dmb arm
9211: ldrex r3, [r2]
922 subs r3, r3, r0
923 strexeq r3, r1, [r2]
924 teqeq r3,
925 beq 1b
926 rsbs r0, r3,
927
928 ALT_SMP(b __kuser_memory_barrier)
929 ALT_UP(usr_ret lr)
930
931#endif
932
933 kuser_pad __kuser_cmpxchg, 32
934
935__kuser_get_tls: @ 0xffff0fe0
936 ldr r0, [pc,
937 usr_ret lr
938 mrc p15, 0, r0, c13, c0, 3 @ 0xffff0fe8 hardware TLS code
939 kuser_pad __kuser_get_tls, 16
940 .rep 3
941 .word 0 @ 0xffff0ff0 software TLS value, then
942 .endr @ pad up to __kuser_helper_version
943
944__kuser_helper_version: @ 0xffff0ffc
945 .word ((__kuser_helper_end - __kuser_helper_start) >> 5)
946
947 .globl __kuser_helper_end
948__kuser_helper_end:
949
950#endif
951
952 THUMB( .thumb )
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967 .macro vector_stub, name, mode, correction=0
968 .align 5
969
970vector_\name:
971 .if \correction
972 sub lr, lr,
973 .endif
974
975 @
976 @ Save r0, lr_<exception> (parent PC) and spsr_<exception>
977 @ (parent CPSR)
978 @
979 stmia sp, {r0, lr} @ save r0, lr
980 mrs lr, spsr
981 str lr, [sp,
982
983 @
984 @ Prepare for SVC32 mode. IRQs remain disabled.
985 @
986 mrs r0, cpsr
987 eor r0, r0,
988 msr spsr_cxsf, r0
989
990 @
991 @ the branch table must immediately follow this code
992 @
993 and lr, lr,
994 THUMB( adr r0, 1f )
995 THUMB( ldr lr, [r0, lr, lsl
996 mov r0, sp
997 ARM( ldr lr, [pc, lr, lsl
998 movs pc, lr @ branch to handler in SVC mode
999ENDPROC(vector_\name)
1000
1001 .align 2
1002 @ handler addresses follow this label
10031:
1004 .endm
1005
1006 .section .stubs, "ax", %progbits
1007__stubs_start:
1008 @ This must be the first word
1009 .word vector_swi
1010
1011vector_rst:
1012 ARM( swi SYS_ERROR0 )
1013 THUMB( svc
1014 THUMB( nop )
1015 b vector_und
1016
1017
1018
1019
1020 vector_stub irq, IRQ_MODE, 4
1021
1022 .long __irq_usr @ 0 (USR_26 / USR_32)
1023 .long __irq_invalid @ 1 (FIQ_26 / FIQ_32)
1024 .long __irq_invalid @ 2 (IRQ_26 / IRQ_32)
1025 .long __irq_svc @ 3 (SVC_26 / SVC_32)
1026 .long __irq_invalid @ 4
1027 .long __irq_invalid @ 5
1028 .long __irq_invalid @ 6
1029 .long __irq_invalid @ 7
1030 .long __irq_invalid @ 8
1031 .long __irq_invalid @ 9
1032 .long __irq_invalid @ a
1033 .long __irq_invalid @ b
1034 .long __irq_invalid @ c
1035 .long __irq_invalid @ d
1036 .long __irq_invalid @ e
1037 .long __irq_invalid @ f
1038
1039
1040
1041
1042
1043 vector_stub dabt, ABT_MODE, 8
1044
1045 .long __dabt_usr @ 0 (USR_26 / USR_32)
1046 .long __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
1047 .long __dabt_invalid @ 2 (IRQ_26 / IRQ_32)
1048 .long __dabt_svc @ 3 (SVC_26 / SVC_32)
1049 .long __dabt_invalid @ 4
1050 .long __dabt_invalid @ 5
1051 .long __dabt_invalid @ 6
1052 .long __dabt_invalid @ 7
1053 .long __dabt_invalid @ 8
1054 .long __dabt_invalid @ 9
1055 .long __dabt_invalid @ a
1056 .long __dabt_invalid @ b
1057 .long __dabt_invalid @ c
1058 .long __dabt_invalid @ d
1059 .long __dabt_invalid @ e
1060 .long __dabt_invalid @ f
1061
1062
1063
1064
1065
1066 vector_stub pabt, ABT_MODE, 4
1067
1068 .long __pabt_usr @ 0 (USR_26 / USR_32)
1069 .long __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
1070 .long __pabt_invalid @ 2 (IRQ_26 / IRQ_32)
1071 .long __pabt_svc @ 3 (SVC_26 / SVC_32)
1072 .long __pabt_invalid @ 4
1073 .long __pabt_invalid @ 5
1074 .long __pabt_invalid @ 6
1075 .long __pabt_invalid @ 7
1076 .long __pabt_invalid @ 8
1077 .long __pabt_invalid @ 9
1078 .long __pabt_invalid @ a
1079 .long __pabt_invalid @ b
1080 .long __pabt_invalid @ c
1081 .long __pabt_invalid @ d
1082 .long __pabt_invalid @ e
1083 .long __pabt_invalid @ f
1084
1085
1086
1087
1088
1089 vector_stub und, UND_MODE
1090
1091 .long __und_usr @ 0 (USR_26 / USR_32)
1092 .long __und_invalid @ 1 (FIQ_26 / FIQ_32)
1093 .long __und_invalid @ 2 (IRQ_26 / IRQ_32)
1094 .long __und_svc @ 3 (SVC_26 / SVC_32)
1095 .long __und_invalid @ 4
1096 .long __und_invalid @ 5
1097 .long __und_invalid @ 6
1098 .long __und_invalid @ 7
1099 .long __und_invalid @ 8
1100 .long __und_invalid @ 9
1101 .long __und_invalid @ a
1102 .long __und_invalid @ b
1103 .long __und_invalid @ c
1104 .long __und_invalid @ d
1105 .long __und_invalid @ e
1106 .long __und_invalid @ f
1107
1108 .align 5
1109
1110
1111
1112
1113
1114
1115
1116
1117vector_addrexcptn:
1118 b vector_addrexcptn
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130vector_fiq:
1131 subs pc, lr,
1132
1133 .globl vector_fiq_offset
1134 .equ vector_fiq_offset, vector_fiq
1135
1136 .section .vectors, "ax", %progbits
1137__vectors_start:
1138 W(b) vector_rst
1139 W(b) vector_und
1140 W(ldr) pc, __vectors_start + 0x1000
1141 W(b) vector_pabt
1142 W(b) vector_dabt
1143 W(b) vector_addrexcptn
1144 W(b) vector_irq
1145 W(b) vector_fiq
1146
1147 .data
1148
1149 .globl cr_alignment
1150cr_alignment:
1151 .space 4
1152
1153#ifdef CONFIG_MULTI_IRQ_HANDLER
1154 .globl handle_arch_irq
1155handle_arch_irq:
1156 .space 4
1157#endif
1158