1#ifndef __ASM_ARM_SYSTEM_H
2#define __ASM_ARM_SYSTEM_H
3
4#include <common.h>
5#include <linux/compiler.h>
6#include <asm/barriers.h>
7
8#ifdef CONFIG_ARM64
9
10
11
12
13#define CR_M (1 << 0)
14#define CR_A (1 << 1)
15#define CR_C (1 << 2)
16#define CR_SA (1 << 3)
17#define CR_I (1 << 12)
18#define CR_WXN (1 << 19)
19#define CR_EE (1 << 25)
20
21#define ES_TO_AARCH64 1
22#define ES_TO_AARCH32 0
23
24
25
26
27#define SCR_EL3_RW_AARCH64 (1 << 10)
28#define SCR_EL3_RW_AARCH32 (0 << 10)
29#define SCR_EL3_HCE_EN (1 << 8)
30#define SCR_EL3_SMD_DIS (1 << 7)
31#define SCR_EL3_RES1 (3 << 4)
32#define SCR_EL3_NS_EN (1 << 0)
33
34
35
36
37#define SPSR_EL_END_LE (0 << 9)
38#define SPSR_EL_DEBUG_MASK (1 << 9)
39#define SPSR_EL_ASYN_MASK (1 << 8)
40#define SPSR_EL_SERR_MASK (1 << 8)
41#define SPSR_EL_IRQ_MASK (1 << 7)
42#define SPSR_EL_FIQ_MASK (1 << 6)
43#define SPSR_EL_T_A32 (0 << 5)
44#define SPSR_EL_M_AARCH64 (0 << 4)
45#define SPSR_EL_M_AARCH32 (1 << 4)
46#define SPSR_EL_M_SVC (0x3)
47#define SPSR_EL_M_HYP (0xa)
48#define SPSR_EL_M_EL1H (5)
49#define SPSR_EL_M_EL2H (9)
50
51
52
53
54#define CPTR_EL2_RES1 (3 << 12 | 0x3ff)
55
56
57
58
59#define SCTLR_EL2_RES1 (3 << 28 | 3 << 22 | 1 << 18 | 1 << 16 |\
60 1 << 11 | 3 << 4)
61#define SCTLR_EL2_EE_LE (0 << 25)
62#define SCTLR_EL2_WXN_DIS (0 << 19)
63#define SCTLR_EL2_ICACHE_DIS (0 << 12)
64#define SCTLR_EL2_SA_DIS (0 << 3)
65#define SCTLR_EL2_DCACHE_DIS (0 << 2)
66#define SCTLR_EL2_ALIGN_DIS (0 << 1)
67#define SCTLR_EL2_MMU_DIS (0)
68
69
70
71
72#define CNTHCTL_EL2_EL1PCEN_EN (1 << 1)
73#define CNTHCTL_EL2_EL1PCTEN_EN (1 << 0)
74
75
76
77
78#define HCR_EL2_RW_AARCH64 (1 << 31)
79#define HCR_EL2_RW_AARCH32 (0 << 31)
80#define HCR_EL2_HCD_DIS (1 << 29)
81
82
83
84
85#define CPACR_EL1_FPEN_EN (3 << 20)
86
87
88
89
90#define SCTLR_EL1_RES1 (3 << 28 | 3 << 22 | 1 << 20 |\
91 1 << 11)
92#define SCTLR_EL1_UCI_DIS (0 << 26)
93#define SCTLR_EL1_EE_LE (0 << 25)
94#define SCTLR_EL1_WXN_DIS (0 << 19)
95#define SCTLR_EL1_NTWE_DIS (0 << 18)
96#define SCTLR_EL1_NTWI_DIS (0 << 16)
97#define SCTLR_EL1_UCT_DIS (0 << 15)
98#define SCTLR_EL1_DZE_DIS (0 << 14)
99#define SCTLR_EL1_ICACHE_DIS (0 << 12)
100#define SCTLR_EL1_UMA_DIS (0 << 9)
101#define SCTLR_EL1_SED_EN (0 << 8)
102#define SCTLR_EL1_ITD_EN (0 << 7)
103#define SCTLR_EL1_CP15BEN_DIS (0 << 5)
104#define SCTLR_EL1_SA0_DIS (0 << 4)
105#define SCTLR_EL1_SA_DIS (0 << 3)
106#define SCTLR_EL1_DCACHE_DIS (0 << 2)
107#define SCTLR_EL1_ALIGN_DIS (0 << 1)
108#define SCTLR_EL1_MMU_DIS (0)
109
110#ifndef __ASSEMBLY__
111
112u64 get_page_table_size(void);
113#define PGTABLE_SIZE get_page_table_size()
114
115
116#define MMU_SECTION_SHIFT 21
117#define MMU_SECTION_SIZE (1 << MMU_SECTION_SHIFT)
118
119
120enum dcache_option {
121 DCACHE_OFF = 0 << 2,
122 DCACHE_WRITETHROUGH = 3 << 2,
123 DCACHE_WRITEBACK = 4 << 2,
124 DCACHE_WRITEALLOC = 4 << 2,
125};
126
127#define wfi() \
128 ({asm volatile( \
129 "wfi" : : : "memory"); \
130 })
131
132static inline unsigned int current_el(void)
133{
134 unsigned int el;
135 asm volatile("mrs %0, CurrentEL" : "=r" (el) : : "cc");
136 return el >> 2;
137}
138
139static inline unsigned int get_sctlr(void)
140{
141 unsigned int el, val;
142
143 el = current_el();
144 if (el == 1)
145 asm volatile("mrs %0, sctlr_el1" : "=r" (val) : : "cc");
146 else if (el == 2)
147 asm volatile("mrs %0, sctlr_el2" : "=r" (val) : : "cc");
148 else
149 asm volatile("mrs %0, sctlr_el3" : "=r" (val) : : "cc");
150
151 return val;
152}
153
154static inline void set_sctlr(unsigned int val)
155{
156 unsigned int el;
157
158 el = current_el();
159 if (el == 1)
160 asm volatile("msr sctlr_el1, %0" : : "r" (val) : "cc");
161 else if (el == 2)
162 asm volatile("msr sctlr_el2, %0" : : "r" (val) : "cc");
163 else
164 asm volatile("msr sctlr_el3, %0" : : "r" (val) : "cc");
165
166 asm volatile("isb");
167}
168
169static inline unsigned long read_mpidr(void)
170{
171 unsigned long val;
172
173 asm volatile("mrs %0, mpidr_el1" : "=r" (val));
174
175 return val;
176}
177
178#define BSP_COREID 0
179
180void __asm_flush_dcache_all(void);
181void __asm_invalidate_dcache_all(void);
182void __asm_flush_dcache_range(u64 start, u64 end);
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197void __asm_invalidate_dcache_range(u64 start, u64 end);
198void __asm_invalidate_tlb_all(void);
199void __asm_invalidate_icache_all(void);
200int __asm_invalidate_l3_dcache(void);
201int __asm_flush_l3_dcache(void);
202int __asm_invalidate_l3_icache(void);
203void __asm_switch_ttbr(u64 new_ttbr);
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218void __noreturn armv8_switch_to_el2(u64 args, u64 mach_nr, u64 fdt_addr,
219 u64 arg4, u64 entry_point, u64 es_flag);
220
221
222
223
224
225
226
227
228
229
230
231
232
233void armv8_switch_to_el1(u64 args, u64 mach_nr, u64 fdt_addr,
234 u64 arg4, u64 entry_point, u64 es_flag);
235void armv8_el2_to_aarch32(u64 args, u64 mach_nr, u64 fdt_addr,
236 u64 arg4, u64 entry_point);
237void gic_init(void);
238void gic_send_sgi(unsigned long sgino);
239void wait_for_wakeup(void);
240void protect_secure_region(void);
241void smp_kick_all_cpus(void);
242
243void flush_l3_cache(void);
244void mmu_change_region_attr(phys_addr_t start, size_t size, u64 attrs);
245
246
247
248
249
250
251
252
253void smc_call(struct pt_regs *args);
254
255void __noreturn psci_system_reset(void);
256void __noreturn psci_system_off(void);
257
258#ifdef CONFIG_ARMV8_PSCI
259extern char __secure_start[];
260extern char __secure_end[];
261extern char __secure_stack_start[];
262extern char __secure_stack_end[];
263
264void armv8_setup_psci(void);
265void psci_setup_vectors(void);
266void psci_arch_init(void);
267#endif
268
269#endif
270
271#else
272
273#ifdef __KERNEL__
274
275#define CPU_ARCH_UNKNOWN 0
276#define CPU_ARCH_ARMv3 1
277#define CPU_ARCH_ARMv4 2
278#define CPU_ARCH_ARMv4T 3
279#define CPU_ARCH_ARMv5 4
280#define CPU_ARCH_ARMv5T 5
281#define CPU_ARCH_ARMv5TE 6
282#define CPU_ARCH_ARMv5TEJ 7
283#define CPU_ARCH_ARMv6 8
284#define CPU_ARCH_ARMv7 9
285
286
287
288
289#define CR_M (1 << 0)
290#define CR_A (1 << 1)
291#define CR_C (1 << 2)
292#define CR_W (1 << 3)
293#define CR_P (1 << 4)
294#define CR_D (1 << 5)
295#define CR_L (1 << 6)
296#define CR_B (1 << 7)
297#define CR_S (1 << 8)
298#define CR_R (1 << 9)
299#define CR_F (1 << 10)
300#define CR_Z (1 << 11)
301#define CR_I (1 << 12)
302#define CR_V (1 << 13)
303#define CR_RR (1 << 14)
304#define CR_L4 (1 << 15)
305#define CR_DT (1 << 16)
306#define CR_IT (1 << 18)
307#define CR_ST (1 << 19)
308#define CR_FI (1 << 21)
309#define CR_U (1 << 22)
310#define CR_XP (1 << 23)
311#define CR_VE (1 << 24)
312#define CR_EE (1 << 25)
313#define CR_TRE (1 << 28)
314#define CR_AFE (1 << 29)
315#define CR_TE (1 << 30)
316
317#if defined(CONFIG_ARMV7_LPAE) && !defined(PGTABLE_SIZE)
318#define PGTABLE_SIZE (4096 * 5)
319#elif !defined(PGTABLE_SIZE)
320#define PGTABLE_SIZE (4096 * 4)
321#endif
322
323
324
325
326
327
328
329
330
331#define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t"
332
333#ifndef __ASSEMBLY__
334
335#ifdef CONFIG_ARMV7_LPAE
336void switch_to_hypervisor_ret(void);
337#endif
338
339#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
340
341#ifdef __ARM_ARCH_7A__
342#define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
343#else
344#define wfi()
345#endif
346
347static inline unsigned long get_cpsr(void)
348{
349 unsigned long cpsr;
350
351 asm volatile("mrs %0, cpsr" : "=r"(cpsr): );
352 return cpsr;
353}
354
355static inline int is_hyp(void)
356{
357#ifdef CONFIG_ARMV7_LPAE
358
359 return ((get_cpsr() & 0x1f) == 0x1a);
360#else
361
362 return 0;
363#endif
364}
365
366static inline unsigned int get_cr(void)
367{
368 unsigned int val;
369
370 if (is_hyp())
371 asm volatile("mrc p15, 4, %0, c1, c0, 0 @ get CR" : "=r" (val)
372 :
373 : "cc");
374 else
375 asm volatile("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val)
376 :
377 : "cc");
378 return val;
379}
380
381static inline void set_cr(unsigned int val)
382{
383 if (is_hyp())
384 asm volatile("mcr p15, 4, %0, c1, c0, 0 @ set CR" :
385 : "r" (val)
386 : "cc");
387 else
388 asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR" :
389 : "r" (val)
390 : "cc");
391 isb();
392}
393
394static inline unsigned int get_dacr(void)
395{
396 unsigned int val;
397 asm("mrc p15, 0, %0, c3, c0, 0 @ get DACR" : "=r" (val) : : "cc");
398 return val;
399}
400
401static inline void set_dacr(unsigned int val)
402{
403 asm volatile("mcr p15, 0, %0, c3, c0, 0 @ set DACR"
404 : : "r" (val) : "cc");
405 isb();
406}
407
408#ifdef CONFIG_ARMV7_LPAE
409
410#define TTB_SECT_XN_MASK (1ULL << 54)
411#define TTB_SECT_NG_MASK (1 << 11)
412#define TTB_SECT_AF (1 << 10)
413#define TTB_SECT_SH_MASK (3 << 8)
414#define TTB_SECT_NS_MASK (1 << 5)
415#define TTB_SECT_AP (1 << 6)
416
417#define TTB_SECT_MAIR(x) ((x & 0x7) << 2)
418#define TTB_SECT (1 << 0)
419#define TTB_PAGETABLE (3 << 0)
420
421
422#define TTBCR_EAE (1 << 31)
423#define TTBCR_T0SZ(x) ((x) << 0)
424#define TTBCR_T1SZ(x) ((x) << 16)
425#define TTBCR_USING_TTBR0 (TTBCR_T0SZ(0) | TTBCR_T1SZ(0))
426#define TTBCR_IRGN0_NC (0 << 8)
427#define TTBCR_IRGN0_WBWA (1 << 8)
428#define TTBCR_IRGN0_WT (2 << 8)
429#define TTBCR_IRGN0_WBNWA (3 << 8)
430#define TTBCR_IRGN0_MASK (3 << 8)
431#define TTBCR_ORGN0_NC (0 << 10)
432#define TTBCR_ORGN0_WBWA (1 << 10)
433#define TTBCR_ORGN0_WT (2 << 10)
434#define TTBCR_ORGN0_WBNWA (3 << 10)
435#define TTBCR_ORGN0_MASK (3 << 10)
436#define TTBCR_SHARED_NON (0 << 12)
437#define TTBCR_SHARED_OUTER (2 << 12)
438#define TTBCR_SHARED_INNER (3 << 12)
439#define TTBCR_EPD0 (0 << 7)
440
441
442
443
444#define MEMORY_ATTRIBUTES ((0x00 << (0 * 8)) | (0x88 << (1 * 8)) | \
445 (0xcc << (2 * 8)) | (0xff << (3 * 8)))
446
447
448enum dcache_option {
449 DCACHE_OFF = TTB_SECT | TTB_SECT_MAIR(0) | TTB_SECT_XN_MASK,
450 DCACHE_WRITETHROUGH = TTB_SECT | TTB_SECT_MAIR(1),
451 DCACHE_WRITEBACK = TTB_SECT | TTB_SECT_MAIR(2),
452 DCACHE_WRITEALLOC = TTB_SECT | TTB_SECT_MAIR(3),
453};
454#elif defined(CONFIG_CPU_V7)
455
456#define TTB_SECT_NS_MASK (1 << 19)
457#define TTB_SECT_NG_MASK (1 << 17)
458#define TTB_SECT_S_MASK (1 << 16)
459
460#define TTB_SECT_AP (3 << 10)
461#define TTB_SECT_TEX(x) ((x & 0x7) << 12)
462#define TTB_SECT_DOMAIN(x) ((x & 0xf) << 5)
463#define TTB_SECT_XN_MASK (1 << 4)
464#define TTB_SECT_C_MASK (1 << 3)
465#define TTB_SECT_B_MASK (1 << 2)
466#define TTB_SECT (2 << 0)
467
468
469enum dcache_option {
470 DCACHE_OFF = TTB_SECT_DOMAIN(0) | TTB_SECT_XN_MASK | TTB_SECT,
471 DCACHE_WRITETHROUGH = DCACHE_OFF | TTB_SECT_C_MASK,
472 DCACHE_WRITEBACK = DCACHE_WRITETHROUGH | TTB_SECT_B_MASK,
473 DCACHE_WRITEALLOC = DCACHE_WRITEBACK | TTB_SECT_TEX(1),
474};
475#else
476#define TTB_SECT_AP (3 << 10)
477
478enum dcache_option {
479 DCACHE_OFF = 0x12,
480 DCACHE_WRITETHROUGH = 0x1a,
481 DCACHE_WRITEBACK = 0x1e,
482 DCACHE_WRITEALLOC = 0x16,
483};
484#endif
485
486
487enum {
488#ifdef CONFIG_ARMV7_LPAE
489 MMU_SECTION_SHIFT = 21,
490#else
491 MMU_SECTION_SHIFT = 20,
492#endif
493 MMU_SECTION_SIZE = 1 << MMU_SECTION_SHIFT,
494};
495
496#ifdef CONFIG_CPU_V7
497
498#define TTBR0_BASE_ADDR_MASK 0xFFFFC000
499#define TTBR0_RGN_NC (0 << 3)
500#define TTBR0_RGN_WBWA (1 << 3)
501#define TTBR0_RGN_WT (2 << 3)
502#define TTBR0_RGN_WB (3 << 3)
503
504#define TTBR0_IRGN_NC (0 << 0 | 0 << 6)
505#define TTBR0_IRGN_WBWA (0 << 0 | 1 << 6)
506#define TTBR0_IRGN_WT (1 << 0 | 0 << 6)
507#define TTBR0_IRGN_WB (1 << 0 | 1 << 6)
508#endif
509
510
511
512
513
514
515
516void mmu_page_table_flush(unsigned long start, unsigned long stop);
517
518#endif
519
520#define arch_align_stack(x) (x)
521
522#endif
523
524#endif
525
526#ifndef __ASSEMBLY__
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556void save_boot_params_ret(void);
557
558
559
560
561
562
563
564
565void mmu_set_region_dcache_behaviour(phys_addr_t start, size_t size,
566 enum dcache_option option);
567
568#ifdef CONFIG_SYS_NONCACHED_MEMORY
569void noncached_init(void);
570phys_addr_t noncached_alloc(size_t size, size_t align);
571#endif
572
573#endif
574
575#endif
576