linux/arch/arm/mm/proc-arm1022.S
<<
>>
Prefs
   1/*
   2 *  linux/arch/arm/mm/proc-arm1022.S: MMU functions for ARM1022E
   3 *
   4 *  Copyright (C) 2000 ARM Limited
   5 *  Copyright (C) 2000 Deep Blue Solutions Ltd.
   6 *  hacked for non-paged-MM by Hyok S. Choi, 2003.
   7 *
   8 * This program is free software; you can redistribute it and/or modify
   9 * it under the terms of the GNU General Public License as published by
  10 * the Free Software Foundation; either version 2 of the License, or
  11 * (at your option) any later version.
  12 *
  13 *
  14 * These are the low level assembler for performing cache and TLB
  15 * functions on the ARM1022E.
  16 */
  17#include <linux/linkage.h>
  18#include <linux/init.h>
  19#include <asm/assembler.h>
  20#include <asm/asm-offsets.h>
  21#include <asm/hwcap.h>
  22#include <asm/pgtable-hwdef.h>
  23#include <asm/pgtable.h>
  24#include <asm/ptrace.h>
  25
  26#include "proc-macros.S"
  27
  28/*
  29 * This is the maximum size of an area which will be invalidated
  30 * using the single invalidate entry instructions.  Anything larger
  31 * than this, and we go for the whole cache.
  32 *
  33 * This value should be chosen such that we choose the cheapest
  34 * alternative.
  35 */
  36#define MAX_AREA_SIZE   32768
  37
  38/*
  39 * The size of one data cache line.
  40 */
  41#define CACHE_DLINESIZE 32
  42
  43/*
  44 * The number of data cache segments.
  45 */
  46#define CACHE_DSEGMENTS 16
  47
  48/*
  49 * The number of lines in a cache segment.
  50 */
  51#define CACHE_DENTRIES  64
  52
  53/*
  54 * This is the size at which it becomes more efficient to
  55 * clean the whole cache, rather than using the individual
  56 * cache line maintenance instructions.
  57 */
  58#define CACHE_DLIMIT    32768
  59
  60        .text
  61/*
  62 * cpu_arm1022_proc_init()
  63 */
  64ENTRY(cpu_arm1022_proc_init)
  65        ret     lr
  66
  67/*
  68 * cpu_arm1022_proc_fin()
  69 */
  70ENTRY(cpu_arm1022_proc_fin)
  71        mrc     p15, 0, r0, c1, c0, 0           @ ctrl register
  72        bic     r0, r0, #0x1000                 @ ...i............
  73        bic     r0, r0, #0x000e                 @ ............wca.
  74        mcr     p15, 0, r0, c1, c0, 0           @ disable caches
  75        ret     lr
  76
  77/*
  78 * cpu_arm1022_reset(loc)
  79 *
  80 * Perform a soft reset of the system.  Put the CPU into the
  81 * same state as it would be if it had been reset, and branch
  82 * to what would be the reset vector.
  83 *
  84 * loc: location to jump to for soft reset
  85 */
  86        .align  5
  87        .pushsection    .idmap.text, "ax"
  88ENTRY(cpu_arm1022_reset)
  89        mov     ip, #0
  90        mcr     p15, 0, ip, c7, c7, 0           @ invalidate I,D caches
  91        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
  92#ifdef CONFIG_MMU
  93        mcr     p15, 0, ip, c8, c7, 0           @ invalidate I & D TLBs
  94#endif
  95        mrc     p15, 0, ip, c1, c0, 0           @ ctrl register
  96        bic     ip, ip, #0x000f                 @ ............wcam
  97        bic     ip, ip, #0x1100                 @ ...i...s........
  98        mcr     p15, 0, ip, c1, c0, 0           @ ctrl register
  99        ret     r0
 100ENDPROC(cpu_arm1022_reset)
 101        .popsection
 102
 103/*
 104 * cpu_arm1022_do_idle()
 105 */
 106        .align  5
 107ENTRY(cpu_arm1022_do_idle)
 108        mcr     p15, 0, r0, c7, c0, 4           @ Wait for interrupt
 109        ret     lr
 110
 111/* ================================= CACHE ================================ */
 112
 113        .align  5
 114
 115/*
 116 *      flush_icache_all()
 117 *
 118 *      Unconditionally clean and invalidate the entire icache.
 119 */
 120ENTRY(arm1022_flush_icache_all)
 121#ifndef CONFIG_CPU_ICACHE_DISABLE
 122        mov     r0, #0
 123        mcr     p15, 0, r0, c7, c5, 0           @ invalidate I cache
 124#endif
 125        ret     lr
 126ENDPROC(arm1022_flush_icache_all)
 127
 128/*
 129 *      flush_user_cache_all()
 130 *
 131 *      Invalidate all cache entries in a particular address
 132 *      space.
 133 */
 134ENTRY(arm1022_flush_user_cache_all)
 135        /* FALLTHROUGH */
 136/*
 137 *      flush_kern_cache_all()
 138 *
 139 *      Clean and invalidate the entire cache.
 140 */
 141ENTRY(arm1022_flush_kern_cache_all)
 142        mov     r2, #VM_EXEC
 143        mov     ip, #0
 144__flush_whole_cache:
 145#ifndef CONFIG_CPU_DCACHE_DISABLE
 146        mov     r1, #(CACHE_DSEGMENTS - 1) << 5 @ 16 segments
 1471:      orr     r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries
 1482:      mcr     p15, 0, r3, c7, c14, 2          @ clean+invalidate D index
 149        subs    r3, r3, #1 << 26
 150        bcs     2b                              @ entries 63 to 0
 151        subs    r1, r1, #1 << 5
 152        bcs     1b                              @ segments 15 to 0
 153#endif
 154        tst     r2, #VM_EXEC
 155#ifndef CONFIG_CPU_ICACHE_DISABLE
 156        mcrne   p15, 0, ip, c7, c5, 0           @ invalidate I cache
 157#endif
 158        mcrne   p15, 0, ip, c7, c10, 4          @ drain WB
 159        ret     lr
 160
 161/*
 162 *      flush_user_cache_range(start, end, flags)
 163 *
 164 *      Invalidate a range of cache entries in the specified
 165 *      address space.
 166 *
 167 *      - start - start address (inclusive)
 168 *      - end   - end address (exclusive)
 169 *      - flags - vm_flags for this space
 170 */
 171ENTRY(arm1022_flush_user_cache_range)
 172        mov     ip, #0
 173        sub     r3, r1, r0                      @ calculate total size
 174        cmp     r3, #CACHE_DLIMIT
 175        bhs     __flush_whole_cache
 176
 177#ifndef CONFIG_CPU_DCACHE_DISABLE
 1781:      mcr     p15, 0, r0, c7, c14, 1          @ clean+invalidate D entry
 179        add     r0, r0, #CACHE_DLINESIZE
 180        cmp     r0, r1
 181        blo     1b
 182#endif
 183        tst     r2, #VM_EXEC
 184#ifndef CONFIG_CPU_ICACHE_DISABLE
 185        mcrne   p15, 0, ip, c7, c5, 0           @ invalidate I cache
 186#endif
 187        mcrne   p15, 0, ip, c7, c10, 4          @ drain WB
 188        ret     lr
 189
 190/*
 191 *      coherent_kern_range(start, end)
 192 *
 193 *      Ensure coherency between the Icache and the Dcache in the
 194 *      region described by start.  If you have non-snooping
 195 *      Harvard caches, you need to implement this function.
 196 *
 197 *      - start - virtual start address
 198 *      - end   - virtual end address
 199 */
 200ENTRY(arm1022_coherent_kern_range)
 201        /* FALLTHROUGH */
 202
 203/*
 204 *      coherent_user_range(start, end)
 205 *
 206 *      Ensure coherency between the Icache and the Dcache in the
 207 *      region described by start.  If you have non-snooping
 208 *      Harvard caches, you need to implement this function.
 209 *
 210 *      - start - virtual start address
 211 *      - end   - virtual end address
 212 */
 213ENTRY(arm1022_coherent_user_range)
 214        mov     ip, #0
 215        bic     r0, r0, #CACHE_DLINESIZE - 1
 2161:
 217#ifndef CONFIG_CPU_DCACHE_DISABLE
 218        mcr     p15, 0, r0, c7, c10, 1          @ clean D entry
 219#endif
 220#ifndef CONFIG_CPU_ICACHE_DISABLE
 221        mcr     p15, 0, r0, c7, c5, 1           @ invalidate I entry
 222#endif
 223        add     r0, r0, #CACHE_DLINESIZE
 224        cmp     r0, r1
 225        blo     1b
 226        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
 227        mov     r0, #0
 228        ret     lr
 229
 230/*
 231 *      flush_kern_dcache_area(void *addr, size_t size)
 232 *
 233 *      Ensure no D cache aliasing occurs, either with itself or
 234 *      the I cache
 235 *
 236 *      - addr  - kernel address
 237 *      - size  - region size
 238 */
 239ENTRY(arm1022_flush_kern_dcache_area)
 240        mov     ip, #0
 241#ifndef CONFIG_CPU_DCACHE_DISABLE
 242        add     r1, r0, r1
 2431:      mcr     p15, 0, r0, c7, c14, 1          @ clean+invalidate D entry
 244        add     r0, r0, #CACHE_DLINESIZE
 245        cmp     r0, r1
 246        blo     1b
 247#endif
 248        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
 249        ret     lr
 250
 251/*
 252 *      dma_inv_range(start, end)
 253 *
 254 *      Invalidate (discard) the specified virtual address range.
 255 *      May not write back any entries.  If 'start' or 'end'
 256 *      are not cache line aligned, those lines must be written
 257 *      back.
 258 *
 259 *      - start - virtual start address
 260 *      - end   - virtual end address
 261 *
 262 * (same as v4wb)
 263 */
 264arm1022_dma_inv_range:
 265        mov     ip, #0
 266#ifndef CONFIG_CPU_DCACHE_DISABLE
 267        tst     r0, #CACHE_DLINESIZE - 1
 268        bic     r0, r0, #CACHE_DLINESIZE - 1
 269        mcrne   p15, 0, r0, c7, c10, 1          @ clean D entry
 270        tst     r1, #CACHE_DLINESIZE - 1
 271        mcrne   p15, 0, r1, c7, c10, 1          @ clean D entry
 2721:      mcr     p15, 0, r0, c7, c6, 1           @ invalidate D entry
 273        add     r0, r0, #CACHE_DLINESIZE
 274        cmp     r0, r1
 275        blo     1b
 276#endif
 277        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
 278        ret     lr
 279
 280/*
 281 *      dma_clean_range(start, end)
 282 *
 283 *      Clean the specified virtual address range.
 284 *
 285 *      - start - virtual start address
 286 *      - end   - virtual end address
 287 *
 288 * (same as v4wb)
 289 */
 290arm1022_dma_clean_range:
 291        mov     ip, #0
 292#ifndef CONFIG_CPU_DCACHE_DISABLE
 293        bic     r0, r0, #CACHE_DLINESIZE - 1
 2941:      mcr     p15, 0, r0, c7, c10, 1          @ clean D entry
 295        add     r0, r0, #CACHE_DLINESIZE
 296        cmp     r0, r1
 297        blo     1b
 298#endif
 299        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
 300        ret     lr
 301
 302/*
 303 *      dma_flush_range(start, end)
 304 *
 305 *      Clean and invalidate the specified virtual address range.
 306 *
 307 *      - start - virtual start address
 308 *      - end   - virtual end address
 309 */
 310ENTRY(arm1022_dma_flush_range)
 311        mov     ip, #0
 312#ifndef CONFIG_CPU_DCACHE_DISABLE
 313        bic     r0, r0, #CACHE_DLINESIZE - 1
 3141:      mcr     p15, 0, r0, c7, c14, 1          @ clean+invalidate D entry
 315        add     r0, r0, #CACHE_DLINESIZE
 316        cmp     r0, r1
 317        blo     1b
 318#endif
 319        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
 320        ret     lr
 321
 322/*
 323 *      dma_map_area(start, size, dir)
 324 *      - start - kernel virtual start address
 325 *      - size  - size of region
 326 *      - dir   - DMA direction
 327 */
 328ENTRY(arm1022_dma_map_area)
 329        add     r1, r1, r0
 330        cmp     r2, #DMA_TO_DEVICE
 331        beq     arm1022_dma_clean_range
 332        bcs     arm1022_dma_inv_range
 333        b       arm1022_dma_flush_range
 334ENDPROC(arm1022_dma_map_area)
 335
 336/*
 337 *      dma_unmap_area(start, size, dir)
 338 *      - start - kernel virtual start address
 339 *      - size  - size of region
 340 *      - dir   - DMA direction
 341 */
 342ENTRY(arm1022_dma_unmap_area)
 343        ret     lr
 344ENDPROC(arm1022_dma_unmap_area)
 345
 346        .globl  arm1022_flush_kern_cache_louis
 347        .equ    arm1022_flush_kern_cache_louis, arm1022_flush_kern_cache_all
 348
 349        @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
 350        define_cache_functions arm1022
 351
 352        .align  5
 353ENTRY(cpu_arm1022_dcache_clean_area)
 354#ifndef CONFIG_CPU_DCACHE_DISABLE
 355        mov     ip, #0
 3561:      mcr     p15, 0, r0, c7, c10, 1          @ clean D entry
 357        add     r0, r0, #CACHE_DLINESIZE
 358        subs    r1, r1, #CACHE_DLINESIZE
 359        bhi     1b
 360#endif
 361        ret     lr
 362
 363/* =============================== PageTable ============================== */
 364
 365/*
 366 * cpu_arm1022_switch_mm(pgd)
 367 *
 368 * Set the translation base pointer to be as described by pgd.
 369 *
 370 * pgd: new page tables
 371 */
 372        .align  5
 373ENTRY(cpu_arm1022_switch_mm)
 374#ifdef CONFIG_MMU
 375#ifndef CONFIG_CPU_DCACHE_DISABLE
 376        mov     r1, #(CACHE_DSEGMENTS - 1) << 5 @ 16 segments
 3771:      orr     r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries
 3782:      mcr     p15, 0, r3, c7, c14, 2          @ clean+invalidate D index
 379        subs    r3, r3, #1 << 26
 380        bcs     2b                              @ entries 63 to 0
 381        subs    r1, r1, #1 << 5
 382        bcs     1b                              @ segments 15 to 0
 383#endif
 384        mov     r1, #0
 385#ifndef CONFIG_CPU_ICACHE_DISABLE
 386        mcr     p15, 0, r1, c7, c5, 0           @ invalidate I cache
 387#endif
 388        mcr     p15, 0, r1, c7, c10, 4          @ drain WB
 389        mcr     p15, 0, r0, c2, c0, 0           @ load page table pointer
 390        mcr     p15, 0, r1, c8, c7, 0           @ invalidate I & D TLBs
 391#endif
 392        ret     lr
 393        
 394/*
 395 * cpu_arm1022_set_pte_ext(ptep, pte, ext)
 396 *
 397 * Set a PTE and flush it out
 398 */
 399        .align  5
 400ENTRY(cpu_arm1022_set_pte_ext)
 401#ifdef CONFIG_MMU
 402        armv3_set_pte_ext
 403        mov     r0, r0
 404#ifndef CONFIG_CPU_DCACHE_DISABLE
 405        mcr     p15, 0, r0, c7, c10, 1          @ clean D entry
 406#endif
 407#endif /* CONFIG_MMU */
 408        ret     lr
 409
 410        .type   __arm1022_setup, #function
 411__arm1022_setup:
 412        mov     r0, #0
 413        mcr     p15, 0, r0, c7, c7              @ invalidate I,D caches on v4
 414        mcr     p15, 0, r0, c7, c10, 4          @ drain write buffer on v4
 415#ifdef CONFIG_MMU
 416        mcr     p15, 0, r0, c8, c7              @ invalidate I,D TLBs on v4
 417#endif
 418        adr     r5, arm1022_crval
 419        ldmia   r5, {r5, r6}
 420        mrc     p15, 0, r0, c1, c0              @ get control register v4
 421        bic     r0, r0, r5
 422        orr     r0, r0, r6
 423#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN
 424        orr     r0, r0, #0x4000                 @ .R..............
 425#endif
 426        ret     lr
 427        .size   __arm1022_setup, . - __arm1022_setup
 428
 429        /*
 430         *  R
 431         * .RVI ZFRS BLDP WCAM
 432         * .011 1001 ..11 0101
 433         * 
 434         */
 435        .type   arm1022_crval, #object
 436arm1022_crval:
 437        crval   clear=0x00007f3f, mmuset=0x00003935, ucset=0x00001930
 438
 439        __INITDATA
 440        @ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
 441        define_processor_functions arm1022, dabort=v4t_early_abort, pabort=legacy_pabort
 442
 443        .section ".rodata"
 444
 445        string  cpu_arch_name, "armv5te"
 446        string  cpu_elf_name, "v5"
 447        string  cpu_arm1022_name, "ARM1022"
 448
 449        .align
 450
 451        .section ".proc.info.init", #alloc
 452
 453        .type   __arm1022_proc_info,#object
 454__arm1022_proc_info:
 455        .long   0x4105a220                      @ ARM 1022E (v5TE)
 456        .long   0xff0ffff0
 457        .long   PMD_TYPE_SECT | \
 458                PMD_BIT4 | \
 459                PMD_SECT_AP_WRITE | \
 460                PMD_SECT_AP_READ
 461        .long   PMD_TYPE_SECT | \
 462                PMD_BIT4 | \
 463                PMD_SECT_AP_WRITE | \
 464                PMD_SECT_AP_READ
 465        initfn  __arm1022_setup, __arm1022_proc_info
 466        .long   cpu_arch_name
 467        .long   cpu_elf_name
 468        .long   HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | HWCAP_EDSP
 469        .long   cpu_arm1022_name
 470        .long   arm1022_processor_functions
 471        .long   v4wbi_tlb_fns
 472        .long   v4wb_user_fns
 473        .long   arm1022_cache_fns
 474        .size   __arm1022_proc_info, . - __arm1022_proc_info
 475