uboot/arch/m68k/lib/cache.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * (C) Copyright 2002
   4 * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
   5 */
   6
   7#include <common.h>
   8#include <cpu_func.h>
   9#include <asm/immap.h>
  10#include <asm/cache.h>
  11
  12volatile int *cf_icache_status = (int *)ICACHE_STATUS;
  13volatile int *cf_dcache_status = (int *)DCACHE_STATUS;
  14
  15void flush_cache(ulong start_addr, ulong size)
  16{
  17        /* Must be implemented for all M68k processors with copy-back data cache */
  18}
  19
  20int icache_status(void)
  21{
  22        return *cf_icache_status;
  23}
  24
  25int dcache_status(void)
  26{
  27        return *cf_dcache_status;
  28}
  29
  30void icache_enable(void)
  31{
  32        icache_invalid();
  33
  34        *cf_icache_status = 1;
  35
  36#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
  37        __asm__ __volatile__("movec %0, %%acr2"::"r"(CONFIG_SYS_CACHE_ACR2));
  38        __asm__ __volatile__("movec %0, %%acr3"::"r"(CONFIG_SYS_CACHE_ACR3));
  39#if defined(CONFIG_CF_V4E)
  40        __asm__ __volatile__("movec %0, %%acr6"::"r"(CONFIG_SYS_CACHE_ACR6));
  41        __asm__ __volatile__("movec %0, %%acr7"::"r"(CONFIG_SYS_CACHE_ACR7));
  42#endif
  43#else
  44        __asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
  45        __asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
  46#endif
  47
  48        __asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_ICACR));
  49}
  50
  51void icache_disable(void)
  52{
  53        u32 temp = 0;
  54
  55        *cf_icache_status = 0;
  56        icache_invalid();
  57
  58#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
  59        __asm__ __volatile__("movec %0, %%acr2"::"r"(temp));
  60        __asm__ __volatile__("movec %0, %%acr3"::"r"(temp));
  61#if defined(CONFIG_CF_V4E)
  62        __asm__ __volatile__("movec %0, %%acr6"::"r"(temp));
  63        __asm__ __volatile__("movec %0, %%acr7"::"r"(temp));
  64#endif
  65#else
  66        __asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
  67        __asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
  68#endif
  69}
  70
  71void icache_invalid(void)
  72{
  73        u32 temp;
  74
  75        temp = CONFIG_SYS_ICACHE_INV;
  76        if (*cf_icache_status)
  77                temp |= CONFIG_SYS_CACHE_ICACR;
  78
  79        __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
  80}
  81
  82/*
  83 * data cache only for ColdFire V4 such as MCF547x_8x, MCF5445x
  84 * the dcache will be dummy in ColdFire V2 and V3
  85 */
  86void dcache_enable(void)
  87{
  88        dcache_invalid();
  89        *cf_dcache_status = 1;
  90
  91#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
  92        __asm__ __volatile__("movec %0, %%acr0"::"r"(CONFIG_SYS_CACHE_ACR0));
  93        __asm__ __volatile__("movec %0, %%acr1"::"r"(CONFIG_SYS_CACHE_ACR1));
  94#if defined(CONFIG_CF_V4E)
  95        __asm__ __volatile__("movec %0, %%acr4"::"r"(CONFIG_SYS_CACHE_ACR4));
  96        __asm__ __volatile__("movec %0, %%acr5"::"r"(CONFIG_SYS_CACHE_ACR5));
  97#endif
  98#endif
  99
 100        __asm__ __volatile__("movec %0, %%cacr"::"r"(CONFIG_SYS_CACHE_DCACR));
 101}
 102
 103void dcache_disable(void)
 104{
 105        u32 temp = 0;
 106
 107        *cf_dcache_status = 0;
 108        dcache_invalid();
 109
 110        __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
 111
 112#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
 113        __asm__ __volatile__("movec %0, %%acr0"::"r"(temp));
 114        __asm__ __volatile__("movec %0, %%acr1"::"r"(temp));
 115#if defined(CONFIG_CF_V4E)
 116        __asm__ __volatile__("movec %0, %%acr4"::"r"(temp));
 117        __asm__ __volatile__("movec %0, %%acr5"::"r"(temp));
 118#endif
 119#endif
 120}
 121
 122void dcache_invalid(void)
 123{
 124#if defined(CONFIG_CF_V4) || defined(CONFIG_CF_V4E)
 125        u32 temp;
 126
 127        temp = CONFIG_SYS_DCACHE_INV;
 128        if (*cf_dcache_status)
 129                temp |= CONFIG_SYS_CACHE_DCACR;
 130        if (*cf_icache_status)
 131                temp |= CONFIG_SYS_CACHE_ICACR;
 132
 133        __asm__ __volatile__("movec %0, %%cacr"::"r"(temp));
 134#endif
 135}
 136
 137__weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
 138{
 139        /* An empty stub, real implementation should be in platform code */
 140}
 141__weak void flush_dcache_range(unsigned long start, unsigned long stop)
 142{
 143        /* An empty stub, real implementation should be in platform code */
 144}
 145