linux/arch/arm64/mm/cache.S
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * Cache maintenance
   4 *
   5 * Copyright (C) 2001 Deep Blue Solutions Ltd.
   6 * Copyright (C) 2012 ARM Ltd.
   7 */
   8
   9#include <linux/errno.h>
  10#include <linux/linkage.h>
  11#include <linux/init.h>
  12#include <asm/assembler.h>
  13#include <asm/cpufeature.h>
  14#include <asm/alternative.h>
  15#include <asm/asm-uaccess.h>
  16
  17/*
  18 *      caches_clean_inval_pou_macro(start,end) [fixup]
  19 *
  20 *      Ensure that the I and D caches are coherent within specified region.
  21 *      This is typically used when code has been written to a memory region,
  22 *      and will be executed.
  23 *
  24 *      - start   - virtual start address of region
  25 *      - end     - virtual end address of region
  26 *      - fixup   - optional label to branch to on user fault
  27 */
  28.macro  caches_clean_inval_pou_macro, fixup
  29alternative_if ARM64_HAS_CACHE_IDC
  30        dsb     ishst
  31        b       .Ldc_skip_\@
  32alternative_else_nop_endif
  33        mov     x2, x0
  34        mov     x3, x1
  35        dcache_by_line_op cvau, ish, x2, x3, x4, x5, \fixup
  36.Ldc_skip_\@:
  37alternative_if ARM64_HAS_CACHE_DIC
  38        isb
  39        b       .Lic_skip_\@
  40alternative_else_nop_endif
  41        invalidate_icache_by_line x0, x1, x2, x3, \fixup
  42.Lic_skip_\@:
  43.endm
  44
  45/*
  46 *      caches_clean_inval_pou(start,end)
  47 *
  48 *      Ensure that the I and D caches are coherent within specified region.
  49 *      This is typically used when code has been written to a memory region,
  50 *      and will be executed.
  51 *
  52 *      - start   - virtual start address of region
  53 *      - end     - virtual end address of region
  54 */
  55SYM_FUNC_START(caches_clean_inval_pou)
  56        caches_clean_inval_pou_macro
  57        ret
  58SYM_FUNC_END(caches_clean_inval_pou)
  59
  60/*
  61 *      caches_clean_inval_user_pou(start,end)
  62 *
  63 *      Ensure that the I and D caches are coherent within specified region.
  64 *      This is typically used when code has been written to a memory region,
  65 *      and will be executed.
  66 *
  67 *      - start   - virtual start address of region
  68 *      - end     - virtual end address of region
  69 */
  70SYM_FUNC_START(caches_clean_inval_user_pou)
  71        uaccess_ttbr0_enable x2, x3, x4
  72
  73        caches_clean_inval_pou_macro 2f
  74        mov     x0, xzr
  751:
  76        uaccess_ttbr0_disable x1, x2
  77        ret
  782:
  79        mov     x0, #-EFAULT
  80        b       1b
  81SYM_FUNC_END(caches_clean_inval_user_pou)
  82
  83/*
  84 *      icache_inval_pou(start,end)
  85 *
  86 *      Ensure that the I cache is invalid within specified region.
  87 *
  88 *      - start   - virtual start address of region
  89 *      - end     - virtual end address of region
  90 */
  91SYM_FUNC_START(icache_inval_pou)
  92alternative_if ARM64_HAS_CACHE_DIC
  93        isb
  94        ret
  95alternative_else_nop_endif
  96
  97        invalidate_icache_by_line x0, x1, x2, x3
  98        ret
  99SYM_FUNC_END(icache_inval_pou)
 100
 101/*
 102 *      dcache_clean_inval_poc(start, end)
 103 *
 104 *      Ensure that any D-cache lines for the interval [start, end)
 105 *      are cleaned and invalidated to the PoC.
 106 *
 107 *      - start   - virtual start address of region
 108 *      - end     - virtual end address of region
 109 */
 110SYM_FUNC_START(__pi_dcache_clean_inval_poc)
 111        dcache_by_line_op civac, sy, x0, x1, x2, x3
 112        ret
 113SYM_FUNC_END(__pi_dcache_clean_inval_poc)
 114SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)
 115
 116/*
 117 *      dcache_clean_pou(start, end)
 118 *
 119 *      Ensure that any D-cache lines for the interval [start, end)
 120 *      are cleaned to the PoU.
 121 *
 122 *      - start   - virtual start address of region
 123 *      - end     - virtual end address of region
 124 */
 125SYM_FUNC_START(dcache_clean_pou)
 126alternative_if ARM64_HAS_CACHE_IDC
 127        dsb     ishst
 128        ret
 129alternative_else_nop_endif
 130        dcache_by_line_op cvau, ish, x0, x1, x2, x3
 131        ret
 132SYM_FUNC_END(dcache_clean_pou)
 133
 134/*
 135 *      dcache_inval_poc(start, end)
 136 *
 137 *      Ensure that any D-cache lines for the interval [start, end)
 138 *      are invalidated. Any partial lines at the ends of the interval are
 139 *      also cleaned to PoC to prevent data loss.
 140 *
 141 *      - start   - kernel start address of region
 142 *      - end     - kernel end address of region
 143 */
 144SYM_FUNC_START(__pi_dcache_inval_poc)
 145        dcache_line_size x2, x3
 146        sub     x3, x2, #1
 147        tst     x1, x3                          // end cache line aligned?
 148        bic     x1, x1, x3
 149        b.eq    1f
 150        dc      civac, x1                       // clean & invalidate D / U line
 1511:      tst     x0, x3                          // start cache line aligned?
 152        bic     x0, x0, x3
 153        b.eq    2f
 154        dc      civac, x0                       // clean & invalidate D / U line
 155        b       3f
 1562:      dc      ivac, x0                        // invalidate D / U line
 1573:      add     x0, x0, x2
 158        cmp     x0, x1
 159        b.lo    2b
 160        dsb     sy
 161        ret
 162SYM_FUNC_END(__pi_dcache_inval_poc)
 163SYM_FUNC_ALIAS(dcache_inval_poc, __pi_dcache_inval_poc)
 164
 165/*
 166 *      dcache_clean_poc(start, end)
 167 *
 168 *      Ensure that any D-cache lines for the interval [start, end)
 169 *      are cleaned to the PoC.
 170 *
 171 *      - start   - virtual start address of region
 172 *      - end     - virtual end address of region
 173 */
 174SYM_FUNC_START(__pi_dcache_clean_poc)
 175        dcache_by_line_op cvac, sy, x0, x1, x2, x3
 176        ret
 177SYM_FUNC_END(__pi_dcache_clean_poc)
 178SYM_FUNC_ALIAS(dcache_clean_poc, __pi_dcache_clean_poc)
 179
 180/*
 181 *      dcache_clean_pop(start, end)
 182 *
 183 *      Ensure that any D-cache lines for the interval [start, end)
 184 *      are cleaned to the PoP.
 185 *
 186 *      - start   - virtual start address of region
 187 *      - end     - virtual end address of region
 188 */
 189SYM_FUNC_START(__pi_dcache_clean_pop)
 190        alternative_if_not ARM64_HAS_DCPOP
 191        b       dcache_clean_poc
 192        alternative_else_nop_endif
 193        dcache_by_line_op cvap, sy, x0, x1, x2, x3
 194        ret
 195SYM_FUNC_END(__pi_dcache_clean_pop)
 196SYM_FUNC_ALIAS(dcache_clean_pop, __pi_dcache_clean_pop)
 197
 198/*
 199 *      __dma_flush_area(start, size)
 200 *
 201 *      clean & invalidate D / U line
 202 *
 203 *      - start   - virtual start address of region
 204 *      - size    - size in question
 205 */
 206SYM_FUNC_START(__pi___dma_flush_area)
 207        add     x1, x0, x1
 208        dcache_by_line_op civac, sy, x0, x1, x2, x3
 209        ret
 210SYM_FUNC_END(__pi___dma_flush_area)
 211SYM_FUNC_ALIAS(__dma_flush_area, __pi___dma_flush_area)
 212
 213/*
 214 *      __dma_map_area(start, size, dir)
 215 *      - start - kernel virtual start address
 216 *      - size  - size of region
 217 *      - dir   - DMA direction
 218 */
 219SYM_FUNC_START(__pi___dma_map_area)
 220        add     x1, x0, x1
 221        b       __pi_dcache_clean_poc
 222SYM_FUNC_END(__pi___dma_map_area)
 223SYM_FUNC_ALIAS(__dma_map_area, __pi___dma_map_area)
 224
 225/*
 226 *      __dma_unmap_area(start, size, dir)
 227 *      - start - kernel virtual start address
 228 *      - size  - size of region
 229 *      - dir   - DMA direction
 230 */
 231SYM_FUNC_START(__pi___dma_unmap_area)
 232        add     x1, x0, x1
 233        cmp     w2, #DMA_TO_DEVICE
 234        b.ne    __pi_dcache_inval_poc
 235        ret
 236SYM_FUNC_END(__pi___dma_unmap_area)
 237SYM_FUNC_ALIAS(__dma_unmap_area, __pi___dma_unmap_area)
 238