linux/arch/hexagon/include/asm/futex.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ASM_HEXAGON_FUTEX_H
   3#define _ASM_HEXAGON_FUTEX_H
   4
   5#ifdef __KERNEL__
   6
   7#include <linux/futex.h>
   8#include <linux/uaccess.h>
   9#include <asm/errno.h>
  10
  11/* XXX TODO-- need to add sync barriers! */
  12
  13#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  14        __asm__ __volatile( \
  15        "1: %0 = memw_locked(%3);\n" \
  16            /* For example: %1 = %4 */ \
  17            insn \
  18        "2: memw_locked(%3,p2) = %1;\n" \
  19        "   if (!p2) jump 1b;\n" \
  20        "   %1 = #0;\n" \
  21        "3:\n" \
  22        ".section .fixup,\"ax\"\n" \
  23        "4: %1 = #%5;\n" \
  24        "   jump ##3b\n" \
  25        ".previous\n" \
  26        ".section __ex_table,\"a\"\n" \
  27        ".long 1b,4b,2b,4b\n" \
  28        ".previous\n" \
  29        : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
  30        : "r" (uaddr), "r" (oparg), "i" (-EFAULT) \
  31        : "p2", "memory")
  32
  33
  34static inline int
  35arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
  36{
  37        int oldval = 0, ret;
  38
  39        if (!access_ok(uaddr, sizeof(u32)))
  40                return -EFAULT;
  41
  42        switch (op) {
  43        case FUTEX_OP_SET:
  44                __futex_atomic_op("%1 = %4\n", ret, oldval, uaddr, oparg);
  45                break;
  46        case FUTEX_OP_ADD:
  47                __futex_atomic_op("%1 = add(%0,%4)\n", ret, oldval, uaddr,
  48                                  oparg);
  49                break;
  50        case FUTEX_OP_OR:
  51                __futex_atomic_op("%1 = or(%0,%4)\n", ret, oldval, uaddr,
  52                                  oparg);
  53                break;
  54        case FUTEX_OP_ANDN:
  55                __futex_atomic_op("%1 = not(%4); %1 = and(%0,%1)\n", ret,
  56                                  oldval, uaddr, oparg);
  57                break;
  58        case FUTEX_OP_XOR:
  59                __futex_atomic_op("%1 = xor(%0,%4)\n", ret, oldval, uaddr,
  60                                  oparg);
  61                break;
  62        default:
  63                ret = -ENOSYS;
  64        }
  65
  66        if (!ret)
  67                *oval = oldval;
  68
  69        return ret;
  70}
  71
  72static inline int
  73futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval,
  74                              u32 newval)
  75{
  76        int prev;
  77        int ret;
  78
  79        if (!access_ok(uaddr, sizeof(u32)))
  80                return -EFAULT;
  81
  82        __asm__ __volatile__ (
  83        "1: %1 = memw_locked(%3)\n"
  84        "   {\n"
  85        "      p2 = cmp.eq(%1,%4)\n"
  86        "      if (!p2.new) jump:NT 3f\n"
  87        "   }\n"
  88        "2: memw_locked(%3,p2) = %5\n"
  89        "   if (!p2) jump 1b\n"
  90        "3:\n"
  91        ".section .fixup,\"ax\"\n"
  92        "4: %0 = #%6\n"
  93        "   jump ##3b\n"
  94        ".previous\n"
  95        ".section __ex_table,\"a\"\n"
  96        ".long 1b,4b,2b,4b\n"
  97        ".previous\n"
  98        : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
  99        : "r" (uaddr), "r" (oldval), "r" (newval), "i"(-EFAULT)
 100        : "p2", "memory");
 101
 102        *uval = prev;
 103        return ret;
 104}
 105
 106#endif /* __KERNEL__ */
 107#endif /* _ASM_HEXAGON_FUTEX_H */
 108