linux/arch/arm64/include/asm/kasan.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef __ASM_KASAN_H
   3#define __ASM_KASAN_H
   4
   5#ifndef __ASSEMBLY__
   6
   7#include <linux/linkage.h>
   8#include <asm/memory.h>
   9#include <asm/pgtable-types.h>
  10
  11#define arch_kasan_set_tag(addr, tag)   __tag_set(addr, tag)
  12#define arch_kasan_reset_tag(addr)      __tag_reset(addr)
  13#define arch_kasan_get_tag(addr)        __tag_get(addr)
  14
  15#ifdef CONFIG_KASAN
  16
  17/*
  18 * KASAN_SHADOW_START: beginning of the kernel virtual addresses.
  19 * KASAN_SHADOW_END: KASAN_SHADOW_START + 1/N of kernel virtual addresses,
  20 * where N = (1 << KASAN_SHADOW_SCALE_SHIFT).
  21 */
  22#define KASAN_SHADOW_START      (VA_START)
  23#define KASAN_SHADOW_END        (KASAN_SHADOW_START + KASAN_SHADOW_SIZE)
  24
  25/*
  26 * This value is used to map an address to the corresponding shadow
  27 * address by the following formula:
  28 *     shadow_addr = (address >> KASAN_SHADOW_SCALE_SHIFT) + KASAN_SHADOW_OFFSET
  29 *
  30 * (1 << (64 - KASAN_SHADOW_SCALE_SHIFT)) shadow addresses that lie in range
  31 * [KASAN_SHADOW_OFFSET, KASAN_SHADOW_END) cover all 64-bits of virtual
  32 * addresses. So KASAN_SHADOW_OFFSET should satisfy the following equation:
  33 *      KASAN_SHADOW_OFFSET = KASAN_SHADOW_END -
  34 *                              (1ULL << (64 - KASAN_SHADOW_SCALE_SHIFT))
  35 */
  36#define KASAN_SHADOW_OFFSET     (KASAN_SHADOW_END - (1ULL << \
  37                                        (64 - KASAN_SHADOW_SCALE_SHIFT)))
  38
  39void kasan_init(void);
  40void kasan_copy_shadow(pgd_t *pgdir);
  41asmlinkage void kasan_early_init(void);
  42
  43#else
  44static inline void kasan_init(void) { }
  45static inline void kasan_copy_shadow(pgd_t *pgdir) { }
  46#endif
  47
  48#endif
  49#endif
  50