linux/arch/powerpc/include/asm/cache.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ASM_POWERPC_CACHE_H
   3#define _ASM_POWERPC_CACHE_H
   4
   5#ifdef __KERNEL__
   6
   7
   8/* bytes per L1 cache line */
   9#if defined(CONFIG_PPC_8xx)
  10#define L1_CACHE_SHIFT          4
  11#define MAX_COPY_PREFETCH       1
  12#define IFETCH_ALIGN_SHIFT      2
  13#elif defined(CONFIG_PPC_E500MC)
  14#define L1_CACHE_SHIFT          6
  15#define MAX_COPY_PREFETCH       4
  16#define IFETCH_ALIGN_SHIFT      3
  17#elif defined(CONFIG_PPC32)
  18#define MAX_COPY_PREFETCH       4
  19#define IFETCH_ALIGN_SHIFT      3       /* 603 fetches 2 insn at a time */
  20#if defined(CONFIG_PPC_47x)
  21#define L1_CACHE_SHIFT          7
  22#else
  23#define L1_CACHE_SHIFT          5
  24#endif
  25#else /* CONFIG_PPC64 */
  26#define L1_CACHE_SHIFT          7
  27#define IFETCH_ALIGN_SHIFT      4 /* POWER8,9 */
  28#endif
  29
  30#define L1_CACHE_BYTES          (1 << L1_CACHE_SHIFT)
  31
  32#define SMP_CACHE_BYTES         L1_CACHE_BYTES
  33
  34#define IFETCH_ALIGN_BYTES      (1 << IFETCH_ALIGN_SHIFT)
  35
  36#if !defined(__ASSEMBLY__)
  37#ifdef CONFIG_PPC64
  38
  39struct ppc_cache_info {
  40        u32 size;
  41        u32 line_size;
  42        u32 block_size; /* L1 only */
  43        u32 log_block_size;
  44        u32 blocks_per_page;
  45        u32 sets;
  46        u32 assoc;
  47};
  48
  49struct ppc64_caches {
  50        struct ppc_cache_info l1d;
  51        struct ppc_cache_info l1i;
  52        struct ppc_cache_info l2;
  53        struct ppc_cache_info l3;
  54};
  55
  56extern struct ppc64_caches ppc64_caches;
  57
  58static inline u32 l1_dcache_shift(void)
  59{
  60        return ppc64_caches.l1d.log_block_size;
  61}
  62
  63static inline u32 l1_dcache_bytes(void)
  64{
  65        return ppc64_caches.l1d.block_size;
  66}
  67
  68static inline u32 l1_icache_shift(void)
  69{
  70        return ppc64_caches.l1i.log_block_size;
  71}
  72
  73static inline u32 l1_icache_bytes(void)
  74{
  75        return ppc64_caches.l1i.block_size;
  76}
  77#else
  78static inline u32 l1_dcache_shift(void)
  79{
  80        return L1_CACHE_SHIFT;
  81}
  82
  83static inline u32 l1_dcache_bytes(void)
  84{
  85        return L1_CACHE_BYTES;
  86}
  87
  88static inline u32 l1_icache_shift(void)
  89{
  90        return L1_CACHE_SHIFT;
  91}
  92
  93static inline u32 l1_icache_bytes(void)
  94{
  95        return L1_CACHE_BYTES;
  96}
  97
  98#endif
  99
 100#define __read_mostly __section(".data..read_mostly")
 101
 102#ifdef CONFIG_PPC_BOOK3S_32
 103extern long _get_L2CR(void);
 104extern long _get_L3CR(void);
 105extern void _set_L2CR(unsigned long);
 106extern void _set_L3CR(unsigned long);
 107#else
 108#define _get_L2CR()     0L
 109#define _get_L3CR()     0L
 110#define _set_L2CR(val)  do { } while(0)
 111#define _set_L3CR(val)  do { } while(0)
 112#endif
 113
 114static inline void dcbz(void *addr)
 115{
 116        __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
 117}
 118
 119static inline void dcbi(void *addr)
 120{
 121        __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
 122}
 123
 124static inline void dcbf(void *addr)
 125{
 126        __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
 127}
 128
 129static inline void dcbst(void *addr)
 130{
 131        __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
 132}
 133
 134static inline void icbi(void *addr)
 135{
 136        asm volatile ("icbi 0, %0" : : "r"(addr) : "memory");
 137}
 138
 139static inline void iccci(void *addr)
 140{
 141        asm volatile ("iccci 0, %0" : : "r"(addr) : "memory");
 142}
 143
 144#endif /* !__ASSEMBLY__ */
 145#endif /* __KERNEL__ */
 146#endif /* _ASM_POWERPC_CACHE_H */
 147