linux/arch/alpha/include/asm/futex.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ASM_ALPHA_FUTEX_H
   3#define _ASM_ALPHA_FUTEX_H
   4
   5#ifdef __KERNEL__
   6
   7#include <linux/futex.h>
   8#include <linux/uaccess.h>
   9#include <asm/errno.h>
  10#include <asm/barrier.h>
  11
  12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)      \
  13        __asm__ __volatile__(                                   \
  14                __ASM_SMP_MB                                    \
  15        "1:     ldl_l   %0,0(%2)\n"                             \
  16                insn                                            \
  17        "2:     stl_c   %1,0(%2)\n"                             \
  18        "       beq     %1,4f\n"                                \
  19        "       mov     $31,%1\n"                               \
  20        "3:     .subsection 2\n"                                \
  21        "4:     br      1b\n"                                   \
  22        "       .previous\n"                                    \
  23        EXC(1b,3b,$31,%1)                                       \
  24        EXC(2b,3b,$31,%1)                                       \
  25        :       "=&r" (oldval), "=&r"(ret)                      \
  26        :       "r" (uaddr), "r"(oparg)                         \
  27        :       "memory")
  28
  29static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
  30                u32 __user *uaddr)
  31{
  32        int oldval = 0, ret;
  33
  34        if (!access_ok(uaddr, sizeof(u32)))
  35                return -EFAULT;
  36
  37        switch (op) {
  38        case FUTEX_OP_SET:
  39                __futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg);
  40                break;
  41        case FUTEX_OP_ADD:
  42                __futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg);
  43                break;
  44        case FUTEX_OP_OR:
  45                __futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg);
  46                break;
  47        case FUTEX_OP_ANDN:
  48                __futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg);
  49                break;
  50        case FUTEX_OP_XOR:
  51                __futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg);
  52                break;
  53        default:
  54                ret = -ENOSYS;
  55        }
  56
  57        if (!ret)
  58                *oval = oldval;
  59
  60        return ret;
  61}
  62
  63static inline int
  64futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  65                              u32 oldval, u32 newval)
  66{
  67        int ret = 0, cmp;
  68        u32 prev;
  69
  70        if (!access_ok(uaddr, sizeof(u32)))
  71                return -EFAULT;
  72
  73        __asm__ __volatile__ (
  74                __ASM_SMP_MB
  75        "1:     ldl_l   %1,0(%3)\n"
  76        "       cmpeq   %1,%4,%2\n"
  77        "       beq     %2,3f\n"
  78        "       mov     %5,%2\n"
  79        "2:     stl_c   %2,0(%3)\n"
  80        "       beq     %2,4f\n"
  81        "3:     .subsection 2\n"
  82        "4:     br      1b\n"
  83        "       .previous\n"
  84        EXC(1b,3b,$31,%0)
  85        EXC(2b,3b,$31,%0)
  86        :       "+r"(ret), "=&r"(prev), "=&r"(cmp)
  87        :       "r"(uaddr), "r"((long)(int)oldval), "r"(newval)
  88        :       "memory");
  89
  90        *uval = prev;
  91        return ret;
  92}
  93
  94#endif /* __KERNEL__ */
  95#endif /* _ASM_ALPHA_FUTEX_H */
  96