linux/arch/alpha/include/asm/futex.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ASM_ALPHA_FUTEX_H
   3#define _ASM_ALPHA_FUTEX_H
   4
   5#ifdef __KERNEL__
   6
   7#include <linux/futex.h>
   8#include <linux/uaccess.h>
   9#include <asm/errno.h>
  10#include <asm/barrier.h>
  11
  12#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)      \
  13        __asm__ __volatile__(                                   \
  14                __ASM_SMP_MB                                    \
  15        "1:     ldl_l   %0,0(%2)\n"                             \
  16                insn                                            \
  17        "2:     stl_c   %1,0(%2)\n"                             \
  18        "       beq     %1,4f\n"                                \
  19        "       mov     $31,%1\n"                               \
  20        "3:     .subsection 2\n"                                \
  21        "4:     br      1b\n"                                   \
  22        "       .previous\n"                                    \
  23        EXC(1b,3b,$31,%1)                                       \
  24        EXC(2b,3b,$31,%1)                                       \
  25        :       "=&r" (oldval), "=&r"(ret)                      \
  26        :       "r" (uaddr), "r"(oparg)                         \
  27        :       "memory")
  28
  29static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
  30                u32 __user *uaddr)
  31{
  32        int oldval = 0, ret;
  33
  34        pagefault_disable();
  35
  36        switch (op) {
  37        case FUTEX_OP_SET:
  38                __futex_atomic_op("mov %3,%1\n", ret, oldval, uaddr, oparg);
  39                break;
  40        case FUTEX_OP_ADD:
  41                __futex_atomic_op("addl %0,%3,%1\n", ret, oldval, uaddr, oparg);
  42                break;
  43        case FUTEX_OP_OR:
  44                __futex_atomic_op("or %0,%3,%1\n", ret, oldval, uaddr, oparg);
  45                break;
  46        case FUTEX_OP_ANDN:
  47                __futex_atomic_op("andnot %0,%3,%1\n", ret, oldval, uaddr, oparg);
  48                break;
  49        case FUTEX_OP_XOR:
  50                __futex_atomic_op("xor %0,%3,%1\n", ret, oldval, uaddr, oparg);
  51                break;
  52        default:
  53                ret = -ENOSYS;
  54        }
  55
  56        pagefault_enable();
  57
  58        if (!ret)
  59                *oval = oldval;
  60
  61        return ret;
  62}
  63
  64static inline int
  65futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  66                              u32 oldval, u32 newval)
  67{
  68        int ret = 0, cmp;
  69        u32 prev;
  70
  71        if (!access_ok(uaddr, sizeof(u32)))
  72                return -EFAULT;
  73
  74        __asm__ __volatile__ (
  75                __ASM_SMP_MB
  76        "1:     ldl_l   %1,0(%3)\n"
  77        "       cmpeq   %1,%4,%2\n"
  78        "       beq     %2,3f\n"
  79        "       mov     %5,%2\n"
  80        "2:     stl_c   %2,0(%3)\n"
  81        "       beq     %2,4f\n"
  82        "3:     .subsection 2\n"
  83        "4:     br      1b\n"
  84        "       .previous\n"
  85        EXC(1b,3b,$31,%0)
  86        EXC(2b,3b,$31,%0)
  87        :       "+r"(ret), "=&r"(prev), "=&r"(cmp)
  88        :       "r"(uaddr), "r"((long)(int)oldval), "r"(newval)
  89        :       "memory");
  90
  91        *uval = prev;
  92        return ret;
  93}
  94
  95#endif /* __KERNEL__ */
  96#endif /* _ASM_ALPHA_FUTEX_H */
  97