linux/arch/arm64/lib/clear_user.S
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * Copyright (C) 2021 Arm Ltd.
   4 */
   5
   6#include <linux/linkage.h>
   7#include <asm/assembler.h>
   8
   9        .text
  10
  11/* Prototype: int __arch_clear_user(void *addr, size_t sz)
  12 * Purpose  : clear some user memory
  13 * Params   : addr - user memory address to clear
  14 *          : sz   - number of bytes to clear
  15 * Returns  : number of bytes NOT cleared
  16 *
  17 * Alignment fixed up by hardware.
  18 */
  19
  20        .p2align 4
  21        // Alignment is for the loop, but since the prologue (including BTI)
  22        // is also 16 bytes we can keep any padding outside the function
  23SYM_FUNC_START(__arch_clear_user)
  24        add     x2, x0, x1
  25        subs    x1, x1, #8
  26        b.mi    2f
  271:
  28USER(9f, sttr   xzr, [x0])
  29        add     x0, x0, #8
  30        subs    x1, x1, #8
  31        b.hi    1b
  32USER(9f, sttr   xzr, [x2, #-8])
  33        mov     x0, #0
  34        ret
  35
  362:      tbz     x1, #2, 3f
  37USER(9f, sttr   wzr, [x0])
  38USER(8f, sttr   wzr, [x2, #-4])
  39        mov     x0, #0
  40        ret
  41
  423:      tbz     x1, #1, 4f
  43USER(9f, sttrh  wzr, [x0])
  444:      tbz     x1, #0, 5f
  45USER(7f, sttrb  wzr, [x2, #-1])
  465:      mov     x0, #0
  47        ret
  48SYM_FUNC_END(__arch_clear_user)
  49EXPORT_SYMBOL(__arch_clear_user)
  50
  51        .section .fixup,"ax"
  52        .align  2
  537:      sub     x0, x2, #5      // Adjust for faulting on the final byte...
  548:      add     x0, x0, #4      // ...or the second word of the 4-7 byte case
  559:      sub     x0, x2, x0
  56        ret
  57        .previous
  58