linux/arch/arm/include/asm/futex.h
<<
>>
Prefs
   1#ifndef _ASM_ARM_FUTEX_H
   2#define _ASM_ARM_FUTEX_H
   3
   4#ifdef __KERNEL__
   5
   6#if defined(CONFIG_CPU_USE_DOMAINS) && defined(CONFIG_SMP)
   7/* ARM doesn't provide unprivileged exclusive memory accessors */
   8#include <asm-generic/futex.h>
   9#else
  10
  11#include <linux/futex.h>
  12#include <linux/uaccess.h>
  13#include <asm/errno.h>
  14
  15#define __futex_atomic_ex_table(err_reg)                        \
  16        "3:\n"                                                  \
  17        "       .pushsection __ex_table,\"a\"\n"                \
  18        "       .align  3\n"                                    \
  19        "       .long   1b, 4f, 2b, 4f\n"                       \
  20        "       .popsection\n"                                  \
  21        "       .pushsection .fixup,\"ax\"\n"                   \
  22        "4:     mov     %0, " err_reg "\n"                      \
  23        "       b       3b\n"                                   \
  24        "       .popsection"
  25
  26#ifdef CONFIG_SMP
  27
  28#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
  29        smp_mb();                                               \
  30        __asm__ __volatile__(                                   \
  31        "1:     ldrex   %1, [%3]\n"                             \
  32        "       " insn "\n"                                     \
  33        "2:     strex   %2, %0, [%3]\n"                         \
  34        "       teq     %2, #0\n"                               \
  35        "       bne     1b\n"                                   \
  36        "       mov     %0, #0\n"                               \
  37        __futex_atomic_ex_table("%5")                           \
  38        : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
  39        : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
  40        : "cc", "memory")
  41
  42static inline int
  43futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  44                              u32 oldval, u32 newval)
  45{
  46        int ret;
  47        u32 val;
  48
  49        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  50                return -EFAULT;
  51
  52        smp_mb();
  53        __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
  54        "1:     ldrex   %1, [%4]\n"
  55        "       teq     %1, %2\n"
  56        "       ite     eq      @ explicit IT needed for the 2b label\n"
  57        "2:     strexeq %0, %3, [%4]\n"
  58        "       movne   %0, #0\n"
  59        "       teq     %0, #0\n"
  60        "       bne     1b\n"
  61        __futex_atomic_ex_table("%5")
  62        : "=&r" (ret), "=&r" (val)
  63        : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
  64        : "cc", "memory");
  65        smp_mb();
  66
  67        *uval = val;
  68        return ret;
  69}
  70
  71#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
  72
  73#include <linux/preempt.h>
  74#include <asm/domain.h>
  75
  76#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
  77        __asm__ __volatile__(                                   \
  78        "1:     " TUSER(ldr) "  %1, [%3]\n"                     \
  79        "       " insn "\n"                                     \
  80        "2:     " TUSER(str) "  %0, [%3]\n"                     \
  81        "       mov     %0, #0\n"                               \
  82        __futex_atomic_ex_table("%5")                           \
  83        : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
  84        : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
  85        : "cc", "memory")
  86
  87static inline int
  88futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  89                              u32 oldval, u32 newval)
  90{
  91        int ret = 0;
  92        u32 val;
  93
  94        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  95                return -EFAULT;
  96
  97        __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
  98        "1:     " TUSER(ldr) "  %1, [%4]\n"
  99        "       teq     %1, %2\n"
 100        "       it      eq      @ explicit IT needed for the 2b label\n"
 101        "2:     " TUSER(streq) "        %3, [%4]\n"
 102        __futex_atomic_ex_table("%5")
 103        : "+r" (ret), "=&r" (val)
 104        : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
 105        : "cc", "memory");
 106
 107        *uval = val;
 108        return ret;
 109}
 110
 111#endif /* !SMP */
 112
 113static inline int
 114futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
 115{
 116        int op = (encoded_op >> 28) & 7;
 117        int cmp = (encoded_op >> 24) & 15;
 118        int oparg = (encoded_op << 8) >> 20;
 119        int cmparg = (encoded_op << 20) >> 20;
 120        int oldval = 0, ret, tmp;
 121
 122        if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
 123                oparg = 1 << oparg;
 124
 125        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 126                return -EFAULT;
 127
 128        pagefault_disable();    /* implies preempt_disable() */
 129
 130        switch (op) {
 131        case FUTEX_OP_SET:
 132                __futex_atomic_op("mov  %0, %4", ret, oldval, tmp, uaddr, oparg);
 133                break;
 134        case FUTEX_OP_ADD:
 135                __futex_atomic_op("add  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
 136                break;
 137        case FUTEX_OP_OR:
 138                __futex_atomic_op("orr  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
 139                break;
 140        case FUTEX_OP_ANDN:
 141                __futex_atomic_op("and  %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
 142                break;
 143        case FUTEX_OP_XOR:
 144                __futex_atomic_op("eor  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
 145                break;
 146        default:
 147                ret = -ENOSYS;
 148        }
 149
 150        pagefault_enable();     /* subsumes preempt_enable() */
 151
 152        if (!ret) {
 153                switch (cmp) {
 154                case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
 155                case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
 156                case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
 157                case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
 158                case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
 159                case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
 160                default: ret = -ENOSYS;
 161                }
 162        }
 163        return ret;
 164}
 165
 166#endif /* !(CPU_USE_DOMAINS && SMP) */
 167#endif /* __KERNEL__ */
 168#endif /* _ASM_ARM_FUTEX_H */
 169