linux/arch/arc/include/asm/futex.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
   4 *
   5 * Vineetg: August 2010: From Android kernel work
   6 */
   7
   8#ifndef _ASM_FUTEX_H
   9#define _ASM_FUTEX_H
  10
  11#include <linux/futex.h>
  12#include <linux/preempt.h>
  13#include <linux/uaccess.h>
  14#include <asm/errno.h>
  15
  16#ifdef CONFIG_ARC_HAS_LLSC
  17
  18#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
  19                                                        \
  20        smp_mb();                                       \
  21        __asm__ __volatile__(                           \
  22        "1:     llock   %1, [%2]                \n"     \
  23                insn                            "\n"    \
  24        "2:     scond   %0, [%2]                \n"     \
  25        "       bnz     1b                      \n"     \
  26        "       mov %0, 0                       \n"     \
  27        "3:                                     \n"     \
  28        "       .section .fixup,\"ax\"          \n"     \
  29        "       .align  4                       \n"     \
  30        "4:     mov %0, %4                      \n"     \
  31        "       j   3b                          \n"     \
  32        "       .previous                       \n"     \
  33        "       .section __ex_table,\"a\"       \n"     \
  34        "       .align  4                       \n"     \
  35        "       .word   1b, 4b                  \n"     \
  36        "       .word   2b, 4b                  \n"     \
  37        "       .previous                       \n"     \
  38                                                        \
  39        : "=&r" (ret), "=&r" (oldval)                   \
  40        : "r" (uaddr), "r" (oparg), "ir" (-EFAULT)      \
  41        : "cc", "memory");                              \
  42        smp_mb()                                        \
  43
  44#else   /* !CONFIG_ARC_HAS_LLSC */
  45
  46#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
  47                                                        \
  48        smp_mb();                                       \
  49        __asm__ __volatile__(                           \
  50        "1:     ld      %1, [%2]                \n"     \
  51                insn                            "\n"    \
  52        "2:     st      %0, [%2]                \n"     \
  53        "       mov %0, 0                       \n"     \
  54        "3:                                     \n"     \
  55        "       .section .fixup,\"ax\"          \n"     \
  56        "       .align  4                       \n"     \
  57        "4:     mov %0, %4                      \n"     \
  58        "       j   3b                          \n"     \
  59        "       .previous                       \n"     \
  60        "       .section __ex_table,\"a\"       \n"     \
  61        "       .align  4                       \n"     \
  62        "       .word   1b, 4b                  \n"     \
  63        "       .word   2b, 4b                  \n"     \
  64        "       .previous                       \n"     \
  65                                                        \
  66        : "=&r" (ret), "=&r" (oldval)                   \
  67        : "r" (uaddr), "r" (oparg), "ir" (-EFAULT)      \
  68        : "cc", "memory");                              \
  69        smp_mb()                                        \
  70
  71#endif
  72
  73static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
  74                u32 __user *uaddr)
  75{
  76        int oldval = 0, ret;
  77
  78        if (!access_ok(uaddr, sizeof(u32)))
  79                return -EFAULT;
  80
  81#ifndef CONFIG_ARC_HAS_LLSC
  82        preempt_disable();      /* to guarantee atomic r-m-w of futex op */
  83#endif
  84
  85        switch (op) {
  86        case FUTEX_OP_SET:
  87                __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg);
  88                break;
  89        case FUTEX_OP_ADD:
  90                /* oldval = *uaddr; *uaddr += oparg ; ret = *uaddr */
  91                __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg);
  92                break;
  93        case FUTEX_OP_OR:
  94                __futex_atomic_op("or  %0, %1, %3", ret, oldval, uaddr, oparg);
  95                break;
  96        case FUTEX_OP_ANDN:
  97                __futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg);
  98                break;
  99        case FUTEX_OP_XOR:
 100                __futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg);
 101                break;
 102        default:
 103                ret = -ENOSYS;
 104        }
 105
 106#ifndef CONFIG_ARC_HAS_LLSC
 107        preempt_enable();
 108#endif
 109
 110        if (!ret)
 111                *oval = oldval;
 112
 113        return ret;
 114}
 115
 116/*
 117 * cmpxchg of futex (pagefaults disabled by caller)
 118 * Return 0 for success, -EFAULT otherwise
 119 */
 120static inline int
 121futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 expval,
 122                              u32 newval)
 123{
 124        int ret = 0;
 125        u32 existval;
 126
 127        if (!access_ok(uaddr, sizeof(u32)))
 128                return -EFAULT;
 129
 130#ifndef CONFIG_ARC_HAS_LLSC
 131        preempt_disable();      /* to guarantee atomic r-m-w of futex op */
 132#endif
 133        smp_mb();
 134
 135        __asm__ __volatile__(
 136#ifdef CONFIG_ARC_HAS_LLSC
 137        "1:     llock   %1, [%4]                \n"
 138        "       brne    %1, %2, 3f              \n"
 139        "2:     scond   %3, [%4]                \n"
 140        "       bnz     1b                      \n"
 141#else
 142        "1:     ld      %1, [%4]                \n"
 143        "       brne    %1, %2, 3f              \n"
 144        "2:     st      %3, [%4]                \n"
 145#endif
 146        "3:     \n"
 147        "       .section .fixup,\"ax\"  \n"
 148        "4:     mov %0, %5      \n"
 149        "       j   3b  \n"
 150        "       .previous       \n"
 151        "       .section __ex_table,\"a\"       \n"
 152        "       .align  4       \n"
 153        "       .word   1b, 4b  \n"
 154        "       .word   2b, 4b  \n"
 155        "       .previous\n"
 156        : "+&r"(ret), "=&r"(existval)
 157        : "r"(expval), "r"(newval), "r"(uaddr), "ir"(-EFAULT)
 158        : "cc", "memory");
 159
 160        smp_mb();
 161
 162#ifndef CONFIG_ARC_HAS_LLSC
 163        preempt_enable();
 164#endif
 165        *uval = existval;
 166        return ret;
 167}
 168
 169#endif
 170