1
2
3
4
5
6
7
8#ifndef __ASSEMBLY__
9#error "Only include this from assembly code"
10#endif
11
12#ifndef __ASM_ASSEMBLER_H
13#define __ASM_ASSEMBLER_H
14
15#include <asm-generic/export.h>
16
17#include <asm/asm-offsets.h>
18#include <asm/cpufeature.h>
19#include <asm/cputype.h>
20#include <asm/debug-monitors.h>
21#include <asm/page.h>
22#include <asm/pgtable-hwdef.h>
23#include <asm/ptrace.h>
24#include <asm/thread_info.h>
25
26 .macro save_and_disable_daif, flags
27 mrs \flags, daif
28 msr daifset, #0xf
29 .endm
30
31 .macro disable_daif
32 msr daifset, #0xf
33 .endm
34
35 .macro enable_daif
36 msr daifclr, #0xf
37 .endm
38
39 .macro restore_daif, flags:req
40 msr daif, \flags
41 .endm
42
43
44 .macro inherit_daif, pstate:req, tmp:req
45 and \tmp, \pstate, #(PSR_D_BIT | PSR_A_BIT | PSR_I_BIT | PSR_F_BIT)
46 msr daif, \tmp
47 .endm
48
49
50 .macro enable_da_f
51 msr daifclr, #(8 | 4 | 1)
52 .endm
53
54
55
56
57 .macro save_and_disable_irq, flags
58 mrs \flags, daif
59 msr daifset, #2
60 .endm
61
62 .macro restore_irq, flags
63 msr daif, \flags
64 .endm
65
66 .macro enable_dbg
67 msr daifclr, #8
68 .endm
69
70 .macro disable_step_tsk, flgs, tmp
71 tbz \flgs, #TIF_SINGLESTEP, 9990f
72 mrs \tmp, mdscr_el1
73 bic \tmp, \tmp, #DBG_MDSCR_SS
74 msr mdscr_el1, \tmp
75 isb
769990:
77 .endm
78
79
80 .macro enable_step_tsk, flgs, tmp
81 tbz \flgs, #TIF_SINGLESTEP, 9990f
82 mrs \tmp, mdscr_el1
83 orr \tmp, \tmp, #DBG_MDSCR_SS
84 msr mdscr_el1, \tmp
859990:
86 .endm
87
88
89
90
91 .macro smp_dmb, opt
92 dmb \opt
93 .endm
94
95
96
97
98 .macro esb
99 hint #16
100 .endm
101
102
103
104
105 .macro csdb
106 hint #20
107 .endm
108
109
110
111
112 .macro sb
113alternative_if_not ARM64_HAS_SB
114 dsb nsh
115 isb
116alternative_else
117 SB_BARRIER_INSN
118 nop
119alternative_endif
120 .endm
121
122
123
124
125
126 .macro mask_nospec64, idx, limit, tmp
127 sub \tmp, \idx, \limit
128 bic \tmp, \tmp, \idx
129 and \idx, \idx, \tmp, asr #63
130 csdb
131 .endm
132
133
134
135
136 .macro nops, num
137 .rept \num
138 nop
139 .endr
140 .endm
141
142
143
144
145 .macro _asm_extable, from, to
146 .pushsection __ex_table, "a"
147 .align 3
148 .long (\from - .), (\to - .)
149 .popsection
150 .endm
151
152#define USER(l, x...) \
1539999: x; \
154 _asm_extable 9999b, l
155
156
157
158
159lr .req x30
160
161
162
163
164 .macro ventry label
165 .align 7
166 b \label
167 .endm
168
169
170
171
172#ifdef CONFIG_CPU_BIG_ENDIAN
173#define CPU_BE(code...) code
174#else
175#define CPU_BE(code...)
176#endif
177
178
179
180
181#ifdef CONFIG_CPU_BIG_ENDIAN
182#define CPU_LE(code...)
183#else
184#define CPU_LE(code...) code
185#endif
186
187
188
189
190
191
192#ifndef CONFIG_CPU_BIG_ENDIAN
193 .macro regs_to_64, rd, lbits, hbits
194#else
195 .macro regs_to_64, rd, hbits, lbits
196#endif
197 orr \rd, \lbits, \hbits, lsl #32
198 .endm
199
200
201
202
203
204
205
206
207
208 .macro adr_l, dst, sym
209 adrp \dst, \sym
210 add \dst, \dst, :lo12:\sym
211 .endm
212
213
214
215
216
217
218
219
220 .macro ldr_l, dst, sym, tmp=
221 .ifb \tmp
222 adrp \dst, \sym
223 ldr \dst, [\dst, :lo12:\sym]
224 .else
225 adrp \tmp, \sym
226 ldr \dst, [\tmp, :lo12:\sym]
227 .endif
228 .endm
229
230
231
232
233
234
235
236 .macro str_l, src, sym, tmp
237 adrp \tmp, \sym
238 str \src, [\tmp, :lo12:\sym]
239 .endm
240
241
242
243
244
245
246 .macro adr_this_cpu, dst, sym, tmp
247 adrp \tmp, \sym
248 add \dst, \tmp, #:lo12:\sym
249alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
250 mrs \tmp, tpidr_el1
251alternative_else
252 mrs \tmp, tpidr_el2
253alternative_endif
254 add \dst, \dst, \tmp
255 .endm
256
257
258
259
260
261
262 .macro ldr_this_cpu dst, sym, tmp
263 adr_l \dst, \sym
264alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
265 mrs \tmp, tpidr_el1
266alternative_else
267 mrs \tmp, tpidr_el2
268alternative_endif
269 ldr \dst, [\dst, \tmp]
270 .endm
271
272
273
274
275 .macro vma_vm_mm, rd, rn
276 ldr \rd, [\rn, #VMA_VM_MM]
277 .endm
278
279
280
281
282 .macro mmid, rd, rn
283 ldr \rd, [\rn, #MM_CONTEXT_ID]
284 .endm
285
286
287
288
289 .macro read_ctr, reg
290alternative_if_not ARM64_MISMATCHED_CACHE_TYPE
291 mrs \reg, ctr_el0
292 nop
293alternative_else
294 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
295alternative_endif
296 .endm
297
298
299
300
301
302
303 .macro raw_dcache_line_size, reg, tmp
304 mrs \tmp, ctr_el0
305 ubfm \tmp, \tmp, #16, #19
306 mov \reg, #4
307 lsl \reg, \reg, \tmp
308 .endm
309
310
311
312
313 .macro dcache_line_size, reg, tmp
314 read_ctr \tmp
315 ubfm \tmp, \tmp, #16, #19
316 mov \reg, #4
317 lsl \reg, \reg, \tmp
318 .endm
319
320
321
322
323
324 .macro raw_icache_line_size, reg, tmp
325 mrs \tmp, ctr_el0
326 and \tmp, \tmp, #0xf
327 mov \reg, #4
328 lsl \reg, \reg, \tmp
329 .endm
330
331
332
333
334 .macro icache_line_size, reg, tmp
335 read_ctr \tmp
336 and \tmp, \tmp, #0xf
337 mov \reg, #4
338 lsl \reg, \reg, \tmp
339 .endm
340
341
342
343
344 .macro tcr_set_t0sz, valreg, t0sz
345 bfi \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
346 .endm
347
348
349
350
351
352
353
354
355
356 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
357 mrs \tmp0, ID_AA64MMFR0_EL1
358
359 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
360 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
361 cmp \tmp0, \tmp1
362 csel \tmp0, \tmp1, \tmp0, hi
363 bfi \tcr, \tmp0, \pos, #3
364 .endm
365
366
367
368
369
370
371
372
373
374
375
376 .macro __dcache_op_workaround_clean_cache, op, kaddr
377alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
378 dc \op, \kaddr
379alternative_else
380 dc civac, \kaddr
381alternative_endif
382 .endm
383
384 .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
385 dcache_line_size \tmp1, \tmp2
386 add \size, \kaddr, \size
387 sub \tmp2, \tmp1, #1
388 bic \kaddr, \kaddr, \tmp2
3899998:
390 .ifc \op, cvau
391 __dcache_op_workaround_clean_cache \op, \kaddr
392 .else
393 .ifc \op, cvac
394 __dcache_op_workaround_clean_cache \op, \kaddr
395 .else
396 .ifc \op, cvap
397 sys 3, c7, c12, 1, \kaddr
398 .else
399 .ifc \op, cvadp
400 sys 3, c7, c13, 1, \kaddr
401 .else
402 dc \op, \kaddr
403 .endif
404 .endif
405 .endif
406 .endif
407 add \kaddr, \kaddr, \tmp1
408 cmp \kaddr, \size
409 b.lo 9998b
410 dsb \domain
411 .endm
412
413
414
415
416
417
418
419
420
421 .macro invalidate_icache_by_line start, end, tmp1, tmp2, label
422 icache_line_size \tmp1, \tmp2
423 sub \tmp2, \tmp1, #1
424 bic \tmp2, \start, \tmp2
4259997:
426USER(\label, ic ivau, \tmp2)
427 add \tmp2, \tmp2, \tmp1
428 cmp \tmp2, \end
429 b.lo 9997b
430 dsb ish
431 isb
432 .endm
433
434
435
436
437 .macro reset_pmuserenr_el0, tmpreg
438 mrs \tmpreg, id_aa64dfr0_el1
439 sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
440 cmp \tmpreg, #1
441 b.lt 9000f
442 msr pmuserenr_el0, xzr
4439000:
444 .endm
445
446
447
448
449 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
4509998: ldp \t1, \t2, [\src]
451 ldp \t3, \t4, [\src, #16]
452 ldp \t5, \t6, [\src, #32]
453 ldp \t7, \t8, [\src, #48]
454 add \src, \src, #64
455 stnp \t1, \t2, [\dest]
456 stnp \t3, \t4, [\dest, #16]
457 stnp \t5, \t6, [\dest, #32]
458 stnp \t7, \t8, [\dest, #48]
459 add \dest, \dest, #64
460 tst \src, #(PAGE_SIZE - 1)
461 b.ne 9998b
462 .endm
463
464
465
466
467
468#define ENDPIPROC(x) \
469 .globl __pi_##x; \
470 .type __pi_##x, %function; \
471 .set __pi_##x, x; \
472 .size __pi_##x, . - x; \
473 ENDPROC(x)
474
475
476
477
478#ifdef CONFIG_KPROBES
479#define NOKPROBE(x) \
480 .pushsection "_kprobe_blacklist", "aw"; \
481 .quad x; \
482 .popsection;
483#else
484#define NOKPROBE(x)
485#endif
486
487#ifdef CONFIG_KASAN
488#define EXPORT_SYMBOL_NOKASAN(name)
489#else
490#define EXPORT_SYMBOL_NOKASAN(name) EXPORT_SYMBOL(name)
491#endif
492
493
494
495
496
497
498
499 .macro le64sym, sym
500 .long \sym\()_lo32
501 .long \sym\()_hi32
502 .endm
503
504
505
506
507
508
509 .macro mov_q, reg, val
510 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
511 movz \reg, :abs_g1_s:\val
512 .else
513 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
514 movz \reg, :abs_g2_s:\val
515 .else
516 movz \reg, :abs_g3:\val
517 movk \reg, :abs_g2_nc:\val
518 .endif
519 movk \reg, :abs_g1_nc:\val
520 .endif
521 movk \reg, :abs_g0_nc:\val
522 .endm
523
524
525
526
527 .macro get_current_task, rd
528 mrs \rd, sp_el0
529 .endm
530
531
532
533
534
535
536
537 .macro offset_ttbr1, ttbr
538#ifdef CONFIG_ARM64_USER_VA_BITS_52
539 orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
540#endif
541 .endm
542
543
544
545
546
547
548 .macro restore_ttbr1, ttbr
549#ifdef CONFIG_ARM64_USER_VA_BITS_52
550 bic \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
551#endif
552 .endm
553
554
555
556
557
558
559
560
561 .macro phys_to_ttbr, ttbr, phys
562#ifdef CONFIG_ARM64_PA_BITS_52
563 orr \ttbr, \phys, \phys, lsr #46
564 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
565#else
566 mov \ttbr, \phys
567#endif
568 .endm
569
570 .macro phys_to_pte, pte, phys
571#ifdef CONFIG_ARM64_PA_BITS_52
572
573
574
575
576 orr \pte, \phys, \phys, lsr #36
577 and \pte, \pte, #PTE_ADDR_MASK
578#else
579 mov \pte, \phys
580#endif
581 .endm
582
583 .macro pte_to_phys, phys, pte
584#ifdef CONFIG_ARM64_PA_BITS_52
585 ubfiz \phys, \pte, #(48 - 16 - 12), #16
586 bfxil \phys, \pte, #16, #32
587 lsl \phys, \phys, #16
588#else
589 and \phys, \pte, #PTE_ADDR_MASK
590#endif
591 .endm
592
593
594
595
596 .macro tcr_clear_errata_bits, tcr, tmp1, tmp2
597#ifdef CONFIG_FUJITSU_ERRATUM_010001
598 mrs \tmp1, midr_el1
599
600 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001_MASK
601 and \tmp1, \tmp1, \tmp2
602 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001
603 cmp \tmp1, \tmp2
604 b.ne 10f
605
606 mov_q \tmp2, TCR_CLEAR_FUJITSU_ERRATUM_010001
607 bic \tcr, \tcr, \tmp2
60810:
609#endif
610 .endm
611
612
613
614
615
616 .macro pre_disable_mmu_workaround
617#ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
618 isb
619#endif
620 .endm
621
622
623
624
625
626
627
628 .macro frame_push, regcount:req, extra
629 __frame st, \regcount, \extra
630 .endm
631
632
633
634
635
636
637
638 .macro frame_pop
639 __frame ld
640 .endm
641
642 .macro __frame_regs, reg1, reg2, op, num
643 .if .Lframe_regcount == \num
644 \op\()r \reg1, [sp, #(\num + 1) * 8]
645 .elseif .Lframe_regcount > \num
646 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
647 .endif
648 .endm
649
650 .macro __frame, op, regcount, extra=0
651 .ifc \op, st
652 .if (\regcount) < 0 || (\regcount) > 10
653 .error "regcount should be in the range [0 ... 10]"
654 .endif
655 .if ((\extra) % 16) != 0
656 .error "extra should be a multiple of 16 bytes"
657 .endif
658 .ifdef .Lframe_regcount
659 .if .Lframe_regcount != -1
660 .error "frame_push/frame_pop may not be nested"
661 .endif
662 .endif
663 .set .Lframe_regcount, \regcount
664 .set .Lframe_extra, \extra
665 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
666 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
667 mov x29, sp
668 .endif
669
670 __frame_regs x19, x20, \op, 1
671 __frame_regs x21, x22, \op, 3
672 __frame_regs x23, x24, \op, 5
673 __frame_regs x25, x26, \op, 7
674 __frame_regs x27, x28, \op, 9
675
676 .ifc \op, ld
677 .if .Lframe_regcount == -1
678 .error "frame_push/frame_pop may not be nested"
679 .endif
680 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
681 .set .Lframe_regcount, -1
682 .endif
683 .endm
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720 .macro cond_yield_neon, lbl
721 if_will_cond_yield_neon
722 do_cond_yield_neon
723 endif_yield_neon \lbl
724 .endm
725
726 .macro if_will_cond_yield_neon
727#ifdef CONFIG_PREEMPT
728 get_current_task x0
729 ldr x0, [x0, #TSK_TI_PREEMPT]
730 sub x0, x0, #PREEMPT_DISABLE_OFFSET
731 cbz x0, .Lyield_\@
732
733 .subsection 1
734.Lyield_\@ :
735#else
736 .section ".discard.cond_yield_neon", "ax"
737#endif
738 .endm
739
740 .macro do_cond_yield_neon
741 bl kernel_neon_end
742 bl kernel_neon_begin
743 .endm
744
745 .macro endif_yield_neon, lbl
746 .ifnb \lbl
747 b \lbl
748 .else
749 b .Lyield_out_\@
750 .endif
751 .previous
752.Lyield_out_\@ :
753 .endm
754
755#endif
756