linux/arch/xtensa/include/asm/initialize_mmu.h
<<
>>
Prefs
   1/*
   2 * arch/xtensa/include/asm/initialize_mmu.h
   3 *
   4 * Initializes MMU:
   5 *
   6 *      For the new V3 MMU we remap the TLB from virtual == physical
   7 *      to the standard Linux mapping used in earlier MMU's.
   8 *
   9 *      The the MMU we also support a new configuration register that
  10 *      specifies how the S32C1I instruction operates with the cache
  11 *      controller.
  12 *
  13 * This file is subject to the terms and conditions of the GNU General
  14 * Public License.  See the file "COPYING" in the main directory of
  15 * this archive for more details.
  16 *
  17 * Copyright (C) 2008 - 2012 Tensilica, Inc.
  18 *
  19 *   Marc Gauthier <marc@tensilica.com>
  20 *   Pete Delaney <piet@tensilica.com>
  21 */
  22
  23#ifndef _XTENSA_INITIALIZE_MMU_H
  24#define _XTENSA_INITIALIZE_MMU_H
  25
  26#include <asm/pgtable.h>
  27#include <asm/vectors.h>
  28
  29#if XCHAL_HAVE_PTP_MMU
  30#define CA_BYPASS       (_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
  31#define CA_WRITEBACK    (_PAGE_CA_WB     | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
  32#else
  33#define CA_WRITEBACK    (0x4)
  34#endif
  35
  36#ifndef XCHAL_SPANNING_WAY
  37#define XCHAL_SPANNING_WAY 0
  38#endif
  39
  40#ifdef __ASSEMBLY__
  41
  42#define XTENSA_HWVERSION_RC_2009_0 230000
  43
  44        .macro  initialize_mmu
  45
  46#if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
  47/*
  48 * We Have Atomic Operation Control (ATOMCTL) Register; Initialize it.
  49 * For details see Documentation/xtensa/atomctl.txt
  50 */
  51#if XCHAL_DCACHE_IS_COHERENT
  52        movi    a3, 0x25        /* For SMP/MX -- internal for writeback,
  53                                 * RCW otherwise
  54                                 */
  55#else
  56        movi    a3, 0x29        /* non-MX -- Most cores use Std Memory
  57                                 * Controlers which usually can't use RCW
  58                                 */
  59#endif
  60        wsr     a3, atomctl
  61#endif  /* XCHAL_HAVE_S32C1I &&
  62         * (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
  63         */
  64
  65#if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
  66/*
  67 * Have MMU v3
  68 */
  69
  70#if !XCHAL_HAVE_VECBASE
  71# error "MMU v3 requires reloc vectors"
  72#endif
  73
  74        movi    a1, 0
  75        _call0  1f
  76        _j      2f
  77
  78        .align  4
  791:      movi    a2, 0x10000000
  80
  81#if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul
  82#define TEMP_MAPPING_VADDR 0x40000000
  83#else
  84#define TEMP_MAPPING_VADDR 0x00000000
  85#endif
  86
  87        /* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */
  88
  89        movi    a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY
  90        idtlb   a2
  91        iitlb   a2
  92        isync
  93
  94        /* Step 2: map 0x40000000..0x47FFFFFF to paddr containing this code
  95         * and jump to the new mapping.
  96         */
  97
  98        srli    a3, a0, 27
  99        slli    a3, a3, 27
 100        addi    a3, a3, CA_BYPASS
 101        addi    a7, a2, 5 - XCHAL_SPANNING_WAY
 102        wdtlb   a3, a7
 103        witlb   a3, a7
 104        isync
 105
 106        slli    a4, a0, 5
 107        srli    a4, a4, 5
 108        addi    a5, a2, -XCHAL_SPANNING_WAY
 109        add     a4, a4, a5
 110        jx      a4
 111
 112        /* Step 3: unmap everything other than current area.
 113         *         Start at 0x60000000, wrap around, and end with 0x20000000
 114         */
 1152:      movi    a4, 0x20000000
 116        add     a5, a2, a4
 1173:      idtlb   a5
 118        iitlb   a5
 119        add     a5, a5, a4
 120        bne     a5, a2, 3b
 121
 122        /* Step 4: Setup MMU with the requested static mappings. */
 123
 124        movi    a6, 0x01000000
 125        wsr     a6, ITLBCFG
 126        wsr     a6, DTLBCFG
 127        isync
 128
 129        movi    a5, XCHAL_KSEG_CACHED_VADDR + XCHAL_KSEG_TLB_WAY
 130        movi    a4, XCHAL_KSEG_PADDR + CA_WRITEBACK
 131        wdtlb   a4, a5
 132        witlb   a4, a5
 133
 134        movi    a5, XCHAL_KSEG_BYPASS_VADDR + XCHAL_KSEG_TLB_WAY
 135        movi    a4, XCHAL_KSEG_PADDR + CA_BYPASS
 136        wdtlb   a4, a5
 137        witlb   a4, a5
 138
 139#ifdef CONFIG_XTENSA_KSEG_512M
 140        movi    a5, XCHAL_KSEG_CACHED_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
 141        movi    a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_WRITEBACK
 142        wdtlb   a4, a5
 143        witlb   a4, a5
 144
 145        movi    a5, XCHAL_KSEG_BYPASS_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
 146        movi    a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_BYPASS
 147        wdtlb   a4, a5
 148        witlb   a4, a5
 149#endif
 150
 151        movi    a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY
 152        movi    a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK
 153        wdtlb   a4, a5
 154        witlb   a4, a5
 155
 156        movi    a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY
 157        movi    a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS
 158        wdtlb   a4, a5
 159        witlb   a4, a5
 160
 161        isync
 162
 163        /* Jump to self, using final mappings. */
 164        movi    a4, 1f
 165        jx      a4
 166
 1671:
 168        /* Step 5: remove temporary mapping. */
 169        idtlb   a7
 170        iitlb   a7
 171        isync
 172
 173        movi    a0, 0
 174        wsr     a0, ptevaddr
 175        rsync
 176
 177#endif /* defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU &&
 178          XCHAL_HAVE_SPANNING_WAY */
 179
 180        .endm
 181
 182        .macro  initialize_cacheattr
 183
 184#if !defined(CONFIG_MMU) && XCHAL_HAVE_TLBS
 185#if CONFIG_MEMMAP_CACHEATTR == 0x22222222 && XCHAL_HAVE_PTP_MMU
 186#error Default MEMMAP_CACHEATTR of 0x22222222 does not work with full MMU.
 187#endif
 188
 189        movi    a5, XCHAL_SPANNING_WAY
 190        movi    a6, ~_PAGE_ATTRIB_MASK
 191        movi    a4, CONFIG_MEMMAP_CACHEATTR
 192        movi    a8, 0x20000000
 1931:
 194        rdtlb1  a3, a5
 195        xor     a3, a3, a4
 196        and     a3, a3, a6
 197        xor     a3, a3, a4
 198        wdtlb   a3, a5
 199        ritlb1  a3, a5
 200        xor     a3, a3, a4
 201        and     a3, a3, a6
 202        xor     a3, a3, a4
 203        witlb   a3, a5
 204
 205        add     a5, a5, a8
 206        srli    a4, a4, 4
 207        bgeu    a5, a8, 1b
 208
 209        isync
 210#endif
 211
 212        .endm
 213
 214#endif /*__ASSEMBLY__*/
 215
 216#endif /* _XTENSA_INITIALIZE_MMU_H */
 217