1
2
3
4
5
6
7
8
9
10
11
12
13
14#include <linux/memblock.h>
15#include <linux/init.h>
16#include <linux/kasan.h>
17#include <linux/kernel.h>
18#include <linux/mm.h>
19#include <linux/pfn.h>
20#include <linux/slab.h>
21
22#include <asm/page.h>
23#include <asm/pgalloc.h>
24
25#include "kasan.h"
26
27
28
29
30
31
32
33
34unsigned char kasan_early_shadow_page[PAGE_SIZE] __page_aligned_bss;
35
36#if CONFIG_PGTABLE_LEVELS > 4
37p4d_t kasan_early_shadow_p4d[MAX_PTRS_PER_P4D] __page_aligned_bss;
38static inline bool kasan_p4d_table(pgd_t pgd)
39{
40 return pgd_page(pgd) == virt_to_page(lm_alias(kasan_early_shadow_p4d));
41}
42#else
43static inline bool kasan_p4d_table(pgd_t pgd)
44{
45 return false;
46}
47#endif
48#if CONFIG_PGTABLE_LEVELS > 3
49pud_t kasan_early_shadow_pud[PTRS_PER_PUD] __page_aligned_bss;
50static inline bool kasan_pud_table(p4d_t p4d)
51{
52 return p4d_page(p4d) == virt_to_page(lm_alias(kasan_early_shadow_pud));
53}
54#else
55static inline bool kasan_pud_table(p4d_t p4d)
56{
57 return false;
58}
59#endif
60#if CONFIG_PGTABLE_LEVELS > 2
61pmd_t kasan_early_shadow_pmd[PTRS_PER_PMD] __page_aligned_bss;
62static inline bool kasan_pmd_table(pud_t pud)
63{
64 return pud_page(pud) == virt_to_page(lm_alias(kasan_early_shadow_pmd));
65}
66#else
67static inline bool kasan_pmd_table(pud_t pud)
68{
69 return false;
70}
71#endif
72pte_t kasan_early_shadow_pte[PTRS_PER_PTE] __page_aligned_bss;
73
74static inline bool kasan_pte_table(pmd_t pmd)
75{
76 return pmd_page(pmd) == virt_to_page(lm_alias(kasan_early_shadow_pte));
77}
78
79static inline bool kasan_early_shadow_page_entry(pte_t pte)
80{
81 return pte_page(pte) == virt_to_page(lm_alias(kasan_early_shadow_page));
82}
83
84static __init void *early_alloc(size_t size, int node)
85{
86 void *ptr = memblock_alloc_try_nid(size, size, __pa(MAX_DMA_ADDRESS),
87 MEMBLOCK_ALLOC_ACCESSIBLE, node);
88
89 if (!ptr)
90 panic("%s: Failed to allocate %zu bytes align=%zx nid=%d from=%llx\n",
91 __func__, size, size, node, (u64)__pa(MAX_DMA_ADDRESS));
92
93 return ptr;
94}
95
96static void __ref zero_pte_populate(pmd_t *pmd, unsigned long addr,
97 unsigned long end)
98{
99 pte_t *pte = pte_offset_kernel(pmd, addr);
100 pte_t zero_pte;
101
102 zero_pte = pfn_pte(PFN_DOWN(__pa_symbol(kasan_early_shadow_page)),
103 PAGE_KERNEL);
104 zero_pte = pte_wrprotect(zero_pte);
105
106 while (addr + PAGE_SIZE <= end) {
107 set_pte_at(&init_mm, addr, pte, zero_pte);
108 addr += PAGE_SIZE;
109 pte = pte_offset_kernel(pmd, addr);
110 }
111}
112
113static int __ref zero_pmd_populate(pud_t *pud, unsigned long addr,
114 unsigned long end)
115{
116 pmd_t *pmd = pmd_offset(pud, addr);
117 unsigned long next;
118
119 do {
120 next = pmd_addr_end(addr, end);
121
122 if (IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) {
123 pmd_populate_kernel(&init_mm, pmd,
124 lm_alias(kasan_early_shadow_pte));
125 continue;
126 }
127
128 if (pmd_none(*pmd)) {
129 pte_t *p;
130
131 if (slab_is_available())
132 p = pte_alloc_one_kernel(&init_mm);
133 else
134 p = early_alloc(PAGE_SIZE, NUMA_NO_NODE);
135 if (!p)
136 return -ENOMEM;
137
138 pmd_populate_kernel(&init_mm, pmd, p);
139 }
140 zero_pte_populate(pmd, addr, next);
141 } while (pmd++, addr = next, addr != end);
142
143 return 0;
144}
145
146static int __ref zero_pud_populate(p4d_t *p4d, unsigned long addr,
147 unsigned long end)
148{
149 pud_t *pud = pud_offset(p4d, addr);
150 unsigned long next;
151
152 do {
153 next = pud_addr_end(addr, end);
154 if (IS_ALIGNED(addr, PUD_SIZE) && end - addr >= PUD_SIZE) {
155 pmd_t *pmd;
156
157 pud_populate(&init_mm, pud,
158 lm_alias(kasan_early_shadow_pmd));
159 pmd = pmd_offset(pud, addr);
160 pmd_populate_kernel(&init_mm, pmd,
161 lm_alias(kasan_early_shadow_pte));
162 continue;
163 }
164
165 if (pud_none(*pud)) {
166 pmd_t *p;
167
168 if (slab_is_available()) {
169 p = pmd_alloc(&init_mm, pud, addr);
170 if (!p)
171 return -ENOMEM;
172 } else {
173 pud_populate(&init_mm, pud,
174 early_alloc(PAGE_SIZE, NUMA_NO_NODE));
175 }
176 }
177 zero_pmd_populate(pud, addr, next);
178 } while (pud++, addr = next, addr != end);
179
180 return 0;
181}
182
183static int __ref zero_p4d_populate(pgd_t *pgd, unsigned long addr,
184 unsigned long end)
185{
186 p4d_t *p4d = p4d_offset(pgd, addr);
187 unsigned long next;
188
189 do {
190 next = p4d_addr_end(addr, end);
191 if (IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) {
192 pud_t *pud;
193 pmd_t *pmd;
194
195 p4d_populate(&init_mm, p4d,
196 lm_alias(kasan_early_shadow_pud));
197 pud = pud_offset(p4d, addr);
198 pud_populate(&init_mm, pud,
199 lm_alias(kasan_early_shadow_pmd));
200 pmd = pmd_offset(pud, addr);
201 pmd_populate_kernel(&init_mm, pmd,
202 lm_alias(kasan_early_shadow_pte));
203 continue;
204 }
205
206 if (p4d_none(*p4d)) {
207 pud_t *p;
208
209 if (slab_is_available()) {
210 p = pud_alloc(&init_mm, p4d, addr);
211 if (!p)
212 return -ENOMEM;
213 } else {
214 p4d_populate(&init_mm, p4d,
215 early_alloc(PAGE_SIZE, NUMA_NO_NODE));
216 }
217 }
218 zero_pud_populate(p4d, addr, next);
219 } while (p4d++, addr = next, addr != end);
220
221 return 0;
222}
223
224
225
226
227
228
229
230int __ref kasan_populate_early_shadow(const void *shadow_start,
231 const void *shadow_end)
232{
233 unsigned long addr = (unsigned long)shadow_start;
234 unsigned long end = (unsigned long)shadow_end;
235 pgd_t *pgd = pgd_offset_k(addr);
236 unsigned long next;
237
238 do {
239 next = pgd_addr_end(addr, end);
240
241 if (IS_ALIGNED(addr, PGDIR_SIZE) && end - addr >= PGDIR_SIZE) {
242 p4d_t *p4d;
243 pud_t *pud;
244 pmd_t *pmd;
245
246
247
248
249
250
251
252
253
254 pgd_populate(&init_mm, pgd,
255 lm_alias(kasan_early_shadow_p4d));
256 p4d = p4d_offset(pgd, addr);
257 p4d_populate(&init_mm, p4d,
258 lm_alias(kasan_early_shadow_pud));
259 pud = pud_offset(p4d, addr);
260 pud_populate(&init_mm, pud,
261 lm_alias(kasan_early_shadow_pmd));
262 pmd = pmd_offset(pud, addr);
263 pmd_populate_kernel(&init_mm, pmd,
264 lm_alias(kasan_early_shadow_pte));
265 continue;
266 }
267
268 if (pgd_none(*pgd)) {
269 p4d_t *p;
270
271 if (slab_is_available()) {
272 p = p4d_alloc(&init_mm, pgd, addr);
273 if (!p)
274 return -ENOMEM;
275 } else {
276 pgd_populate(&init_mm, pgd,
277 early_alloc(PAGE_SIZE, NUMA_NO_NODE));
278 }
279 }
280 zero_p4d_populate(pgd, addr, next);
281 } while (pgd++, addr = next, addr != end);
282
283 return 0;
284}
285
286static void kasan_free_pte(pte_t *pte_start, pmd_t *pmd)
287{
288 pte_t *pte;
289 int i;
290
291 for (i = 0; i < PTRS_PER_PTE; i++) {
292 pte = pte_start + i;
293 if (!pte_none(*pte))
294 return;
295 }
296
297 pte_free_kernel(&init_mm, (pte_t *)page_to_virt(pmd_page(*pmd)));
298 pmd_clear(pmd);
299}
300
301static void kasan_free_pmd(pmd_t *pmd_start, pud_t *pud)
302{
303 pmd_t *pmd;
304 int i;
305
306 for (i = 0; i < PTRS_PER_PMD; i++) {
307 pmd = pmd_start + i;
308 if (!pmd_none(*pmd))
309 return;
310 }
311
312 pmd_free(&init_mm, (pmd_t *)page_to_virt(pud_page(*pud)));
313 pud_clear(pud);
314}
315
316static void kasan_free_pud(pud_t *pud_start, p4d_t *p4d)
317{
318 pud_t *pud;
319 int i;
320
321 for (i = 0; i < PTRS_PER_PUD; i++) {
322 pud = pud_start + i;
323 if (!pud_none(*pud))
324 return;
325 }
326
327 pud_free(&init_mm, (pud_t *)page_to_virt(p4d_page(*p4d)));
328 p4d_clear(p4d);
329}
330
331static void kasan_free_p4d(p4d_t *p4d_start, pgd_t *pgd)
332{
333 p4d_t *p4d;
334 int i;
335
336 for (i = 0; i < PTRS_PER_P4D; i++) {
337 p4d = p4d_start + i;
338 if (!p4d_none(*p4d))
339 return;
340 }
341
342 p4d_free(&init_mm, (p4d_t *)page_to_virt(pgd_page(*pgd)));
343 pgd_clear(pgd);
344}
345
346static void kasan_remove_pte_table(pte_t *pte, unsigned long addr,
347 unsigned long end)
348{
349 unsigned long next;
350
351 for (; addr < end; addr = next, pte++) {
352 next = (addr + PAGE_SIZE) & PAGE_MASK;
353 if (next > end)
354 next = end;
355
356 if (!pte_present(*pte))
357 continue;
358
359 if (WARN_ON(!kasan_early_shadow_page_entry(*pte)))
360 continue;
361 pte_clear(&init_mm, addr, pte);
362 }
363}
364
365static void kasan_remove_pmd_table(pmd_t *pmd, unsigned long addr,
366 unsigned long end)
367{
368 unsigned long next;
369
370 for (; addr < end; addr = next, pmd++) {
371 pte_t *pte;
372
373 next = pmd_addr_end(addr, end);
374
375 if (!pmd_present(*pmd))
376 continue;
377
378 if (kasan_pte_table(*pmd)) {
379 if (IS_ALIGNED(addr, PMD_SIZE) &&
380 IS_ALIGNED(next, PMD_SIZE))
381 pmd_clear(pmd);
382 continue;
383 }
384 pte = pte_offset_kernel(pmd, addr);
385 kasan_remove_pte_table(pte, addr, next);
386 kasan_free_pte(pte_offset_kernel(pmd, 0), pmd);
387 }
388}
389
390static void kasan_remove_pud_table(pud_t *pud, unsigned long addr,
391 unsigned long end)
392{
393 unsigned long next;
394
395 for (; addr < end; addr = next, pud++) {
396 pmd_t *pmd, *pmd_base;
397
398 next = pud_addr_end(addr, end);
399
400 if (!pud_present(*pud))
401 continue;
402
403 if (kasan_pmd_table(*pud)) {
404 if (IS_ALIGNED(addr, PUD_SIZE) &&
405 IS_ALIGNED(next, PUD_SIZE))
406 pud_clear(pud);
407 continue;
408 }
409 pmd = pmd_offset(pud, addr);
410 pmd_base = pmd_offset(pud, 0);
411 kasan_remove_pmd_table(pmd, addr, next);
412 kasan_free_pmd(pmd_base, pud);
413 }
414}
415
416static void kasan_remove_p4d_table(p4d_t *p4d, unsigned long addr,
417 unsigned long end)
418{
419 unsigned long next;
420
421 for (; addr < end; addr = next, p4d++) {
422 pud_t *pud;
423
424 next = p4d_addr_end(addr, end);
425
426 if (!p4d_present(*p4d))
427 continue;
428
429 if (kasan_pud_table(*p4d)) {
430 if (IS_ALIGNED(addr, P4D_SIZE) &&
431 IS_ALIGNED(next, P4D_SIZE))
432 p4d_clear(p4d);
433 continue;
434 }
435 pud = pud_offset(p4d, addr);
436 kasan_remove_pud_table(pud, addr, next);
437 kasan_free_pud(pud_offset(p4d, 0), p4d);
438 }
439}
440
441void kasan_remove_zero_shadow(void *start, unsigned long size)
442{
443 unsigned long addr, end, next;
444 pgd_t *pgd;
445
446 addr = (unsigned long)kasan_mem_to_shadow(start);
447 end = addr + (size >> KASAN_SHADOW_SCALE_SHIFT);
448
449 if (WARN_ON((unsigned long)start %
450 (KASAN_SHADOW_SCALE_SIZE * PAGE_SIZE)) ||
451 WARN_ON(size % (KASAN_SHADOW_SCALE_SIZE * PAGE_SIZE)))
452 return;
453
454 for (; addr < end; addr = next) {
455 p4d_t *p4d;
456
457 next = pgd_addr_end(addr, end);
458
459 pgd = pgd_offset_k(addr);
460 if (!pgd_present(*pgd))
461 continue;
462
463 if (kasan_p4d_table(*pgd)) {
464 if (IS_ALIGNED(addr, PGDIR_SIZE) &&
465 IS_ALIGNED(next, PGDIR_SIZE))
466 pgd_clear(pgd);
467 continue;
468 }
469
470 p4d = p4d_offset(pgd, addr);
471 kasan_remove_p4d_table(p4d, addr, next);
472 kasan_free_p4d(p4d_offset(pgd, 0), pgd);
473 }
474}
475
476int kasan_add_zero_shadow(void *start, unsigned long size)
477{
478 int ret;
479 void *shadow_start, *shadow_end;
480
481 shadow_start = kasan_mem_to_shadow(start);
482 shadow_end = shadow_start + (size >> KASAN_SHADOW_SCALE_SHIFT);
483
484 if (WARN_ON((unsigned long)start %
485 (KASAN_SHADOW_SCALE_SIZE * PAGE_SIZE)) ||
486 WARN_ON(size % (KASAN_SHADOW_SCALE_SIZE * PAGE_SIZE)))
487 return -EINVAL;
488
489 ret = kasan_populate_early_shadow(shadow_start, shadow_end);
490 if (ret)
491 kasan_remove_zero_shadow(shadow_start,
492 size >> KASAN_SHADOW_SCALE_SHIFT);
493 return ret;
494}
495