uboot/arch/arm/cpu/armv7/cache_v7.c
<<
>>
Prefs
   1/*
   2 * (C) Copyright 2010
   3 * Texas Instruments, <www.ti.com>
   4 * Aneesh V <aneesh@ti.com>
   5 *
   6 * SPDX-License-Identifier:     GPL-2.0+
   7 */
   8#include <linux/types.h>
   9#include <common.h>
  10#include <asm/armv7.h>
  11#include <asm/utils.h>
  12
  13#define ARMV7_DCACHE_INVAL_ALL          1
  14#define ARMV7_DCACHE_CLEAN_INVAL_ALL    2
  15#define ARMV7_DCACHE_INVAL_RANGE        3
  16#define ARMV7_DCACHE_CLEAN_INVAL_RANGE  4
  17
  18#ifndef CONFIG_SYS_DCACHE_OFF
  19/*
  20 * Write the level and type you want to Cache Size Selection Register(CSSELR)
  21 * to get size details from Current Cache Size ID Register(CCSIDR)
  22 */
  23static void set_csselr(u32 level, u32 type)
  24{
  25        u32 csselr = level << 1 | type;
  26
  27        /* Write to Cache Size Selection Register(CSSELR) */
  28        asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr));
  29}
  30
  31static u32 get_ccsidr(void)
  32{
  33        u32 ccsidr;
  34
  35        /* Read current CP15 Cache Size ID Register */
  36        asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
  37        return ccsidr;
  38}
  39
  40static u32 get_clidr(void)
  41{
  42        u32 clidr;
  43
  44        /* Read current CP15 Cache Level ID Register */
  45        asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr));
  46        return clidr;
  47}
  48
  49static void v7_inval_dcache_level_setway(u32 level, u32 num_sets,
  50                                         u32 num_ways, u32 way_shift,
  51                                         u32 log2_line_len)
  52{
  53        int way, set;
  54        u32 setway;
  55
  56        /*
  57         * For optimal assembly code:
  58         *      a. count down
  59         *      b. have bigger loop inside
  60         */
  61        for (way = num_ways - 1; way >= 0 ; way--) {
  62                for (set = num_sets - 1; set >= 0; set--) {
  63                        setway = (level << 1) | (set << log2_line_len) |
  64                                 (way << way_shift);
  65                        /* Invalidate data/unified cache line by set/way */
  66                        asm volatile (" mcr p15, 0, %0, c7, c6, 2"
  67                                        : : "r" (setway));
  68                }
  69        }
  70        /* DSB to make sure the operation is complete */
  71        CP15DSB;
  72}
  73
  74static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets,
  75                                               u32 num_ways, u32 way_shift,
  76                                               u32 log2_line_len)
  77{
  78        int way, set;
  79        u32 setway;
  80
  81        /*
  82         * For optimal assembly code:
  83         *      a. count down
  84         *      b. have bigger loop inside
  85         */
  86        for (way = num_ways - 1; way >= 0 ; way--) {
  87                for (set = num_sets - 1; set >= 0; set--) {
  88                        setway = (level << 1) | (set << log2_line_len) |
  89                                 (way << way_shift);
  90                        /*
  91                         * Clean & Invalidate data/unified
  92                         * cache line by set/way
  93                         */
  94                        asm volatile (" mcr p15, 0, %0, c7, c14, 2"
  95                                        : : "r" (setway));
  96                }
  97        }
  98        /* DSB to make sure the operation is complete */
  99        CP15DSB;
 100}
 101
 102static void v7_maint_dcache_level_setway(u32 level, u32 operation)
 103{
 104        u32 ccsidr;
 105        u32 num_sets, num_ways, log2_line_len, log2_num_ways;
 106        u32 way_shift;
 107
 108        set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED);
 109
 110        ccsidr = get_ccsidr();
 111
 112        log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
 113                                CCSIDR_LINE_SIZE_OFFSET) + 2;
 114        /* Converting from words to bytes */
 115        log2_line_len += 2;
 116
 117        num_ways  = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >>
 118                        CCSIDR_ASSOCIATIVITY_OFFSET) + 1;
 119        num_sets  = ((ccsidr & CCSIDR_NUM_SETS_MASK) >>
 120                        CCSIDR_NUM_SETS_OFFSET) + 1;
 121        /*
 122         * According to ARMv7 ARM number of sets and number of ways need
 123         * not be a power of 2
 124         */
 125        log2_num_ways = log_2_n_round_up(num_ways);
 126
 127        way_shift = (32 - log2_num_ways);
 128        if (operation == ARMV7_DCACHE_INVAL_ALL) {
 129                v7_inval_dcache_level_setway(level, num_sets, num_ways,
 130                                      way_shift, log2_line_len);
 131        } else if (operation == ARMV7_DCACHE_CLEAN_INVAL_ALL) {
 132                v7_clean_inval_dcache_level_setway(level, num_sets, num_ways,
 133                                                   way_shift, log2_line_len);
 134        }
 135}
 136
 137static void v7_maint_dcache_all(u32 operation)
 138{
 139        u32 level, cache_type, level_start_bit = 0;
 140        u32 clidr = get_clidr();
 141
 142        for (level = 0; level < 7; level++) {
 143                cache_type = (clidr >> level_start_bit) & 0x7;
 144                if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) ||
 145                    (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) ||
 146                    (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED))
 147                        v7_maint_dcache_level_setway(level, operation);
 148                level_start_bit += 3;
 149        }
 150}
 151
 152static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
 153{
 154        u32 mva;
 155
 156        /* Align start to cache line boundary */
 157        start &= ~(line_len - 1);
 158        for (mva = start; mva < stop; mva = mva + line_len) {
 159                /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
 160                asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
 161        }
 162}
 163
 164static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
 165{
 166        u32 mva;
 167
 168        /*
 169         * If start address is not aligned to cache-line do not
 170         * invalidate the first cache-line
 171         */
 172        if (start & (line_len - 1)) {
 173                printf("ERROR: %s - start address is not aligned - 0x%08x\n",
 174                        __func__, start);
 175                /* move to next cache line */
 176                start = (start + line_len - 1) & ~(line_len - 1);
 177        }
 178
 179        /*
 180         * If stop address is not aligned to cache-line do not
 181         * invalidate the last cache-line
 182         */
 183        if (stop & (line_len - 1)) {
 184                printf("ERROR: %s - stop address is not aligned - 0x%08x\n",
 185                        __func__, stop);
 186                /* align to the beginning of this cache line */
 187                stop &= ~(line_len - 1);
 188        }
 189
 190        for (mva = start; mva < stop; mva = mva + line_len) {
 191                /* DCIMVAC - Invalidate data cache by MVA to PoC */
 192                asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
 193        }
 194}
 195
 196static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
 197{
 198        u32 line_len, ccsidr;
 199
 200        ccsidr = get_ccsidr();
 201        line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
 202                        CCSIDR_LINE_SIZE_OFFSET) + 2;
 203        /* Converting from words to bytes */
 204        line_len += 2;
 205        /* converting from log2(linelen) to linelen */
 206        line_len = 1 << line_len;
 207
 208        switch (range_op) {
 209        case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
 210                v7_dcache_clean_inval_range(start, stop, line_len);
 211                break;
 212        case ARMV7_DCACHE_INVAL_RANGE:
 213                v7_dcache_inval_range(start, stop, line_len);
 214                break;
 215        }
 216
 217        /* DSB to make sure the operation is complete */
 218        CP15DSB;
 219}
 220
 221/* Invalidate TLB */
 222static void v7_inval_tlb(void)
 223{
 224        /* Invalidate entire unified TLB */
 225        asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
 226        /* Invalidate entire data TLB */
 227        asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
 228        /* Invalidate entire instruction TLB */
 229        asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
 230        /* Full system DSB - make sure that the invalidation is complete */
 231        CP15DSB;
 232        /* Full system ISB - make sure the instruction stream sees it */
 233        CP15ISB;
 234}
 235
 236void invalidate_dcache_all(void)
 237{
 238        v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL);
 239
 240        v7_outer_cache_inval_all();
 241}
 242
 243/*
 244 * Performs a clean & invalidation of the entire data cache
 245 * at all levels
 246 */
 247void flush_dcache_all(void)
 248{
 249        v7_maint_dcache_all(ARMV7_DCACHE_CLEAN_INVAL_ALL);
 250
 251        v7_outer_cache_flush_all();
 252}
 253
 254/*
 255 * Invalidates range in all levels of D-cache/unified cache used:
 256 * Affects the range [start, stop - 1]
 257 */
 258void invalidate_dcache_range(unsigned long start, unsigned long stop)
 259{
 260        v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
 261
 262        v7_outer_cache_inval_range(start, stop);
 263}
 264
 265/*
 266 * Flush range(clean & invalidate) from all levels of D-cache/unified
 267 * cache used:
 268 * Affects the range [start, stop - 1]
 269 */
 270void flush_dcache_range(unsigned long start, unsigned long stop)
 271{
 272        v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
 273
 274        v7_outer_cache_flush_range(start, stop);
 275}
 276
 277void arm_init_before_mmu(void)
 278{
 279        v7_outer_cache_enable();
 280        invalidate_dcache_all();
 281        v7_inval_tlb();
 282}
 283
 284void mmu_page_table_flush(unsigned long start, unsigned long stop)
 285{
 286        flush_dcache_range(start, stop);
 287        v7_inval_tlb();
 288}
 289
 290/*
 291 * Flush range from all levels of d-cache/unified-cache used:
 292 * Affects the range [start, start + size - 1]
 293 */
 294void  flush_cache(unsigned long start, unsigned long size)
 295{
 296        flush_dcache_range(start, start + size);
 297}
 298#else /* #ifndef CONFIG_SYS_DCACHE_OFF */
 299void invalidate_dcache_all(void)
 300{
 301}
 302
 303void flush_dcache_all(void)
 304{
 305}
 306
 307void invalidate_dcache_range(unsigned long start, unsigned long stop)
 308{
 309}
 310
 311void flush_dcache_range(unsigned long start, unsigned long stop)
 312{
 313}
 314
 315void arm_init_before_mmu(void)
 316{
 317}
 318
 319void  flush_cache(unsigned long start, unsigned long size)
 320{
 321}
 322
 323void mmu_page_table_flush(unsigned long start, unsigned long stop)
 324{
 325}
 326
 327void arm_init_domains(void)
 328{
 329}
 330#endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
 331
 332#ifndef CONFIG_SYS_ICACHE_OFF
 333/* Invalidate entire I-cache and branch predictor array */
 334void invalidate_icache_all(void)
 335{
 336        /*
 337         * Invalidate all instruction caches to PoU.
 338         * Also flushes branch target cache.
 339         */
 340        asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
 341
 342        /* Invalidate entire branch predictor array */
 343        asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
 344
 345        /* Full system DSB - make sure that the invalidation is complete */
 346        CP15DSB;
 347
 348        /* ISB - make sure the instruction stream sees it */
 349        CP15ISB;
 350}
 351#else
 352void invalidate_icache_all(void)
 353{
 354}
 355#endif
 356
 357/*  Stub implementations for outer cache operations */
 358__weak void v7_outer_cache_enable(void) {}
 359__weak void v7_outer_cache_disable(void) {}
 360__weak void v7_outer_cache_flush_all(void) {}
 361__weak void v7_outer_cache_inval_all(void) {}
 362__weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
 363__weak void v7_outer_cache_inval_range(u32 start, u32 end) {}
 364