linux/arch/x86/include/asm/futex.h
<<
>>
Prefs
   1#ifndef _ASM_X86_FUTEX_H
   2#define _ASM_X86_FUTEX_H
   3
   4#ifdef __KERNEL__
   5
   6#include <linux/futex.h>
   7#include <linux/uaccess.h>
   8
   9#include <asm/asm.h>
  10#include <asm/errno.h>
  11#include <asm/processor.h>
  12
  13#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg)     \
  14        asm volatile("1:\t" insn "\n"                           \
  15                     "2:\t.section .fixup,\"ax\"\n"             \
  16                     "3:\tmov\t%3, %1\n"                        \
  17                     "\tjmp\t2b\n"                              \
  18                     "\t.previous\n"                            \
  19                     _ASM_EXTABLE(1b, 3b)                       \
  20                     : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
  21                     : "i" (-EFAULT), "0" (oparg), "1" (0))
  22
  23#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)     \
  24        asm volatile("1:\tmovl  %2, %0\n"                       \
  25                     "\tmovl\t%0, %3\n"                         \
  26                     "\t" insn "\n"                             \
  27                     "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"     \
  28                     "\tjnz\t1b\n"                              \
  29                     "3:\t.section .fixup,\"ax\"\n"             \
  30                     "4:\tmov\t%5, %1\n"                        \
  31                     "\tjmp\t3b\n"                              \
  32                     "\t.previous\n"                            \
  33                     _ASM_EXTABLE(1b, 4b)                       \
  34                     _ASM_EXTABLE(2b, 4b)                       \
  35                     : "=&a" (oldval), "=&r" (ret),             \
  36                       "+m" (*uaddr), "=&r" (tem)               \
  37                     : "r" (oparg), "i" (-EFAULT), "1" (0))
  38
  39static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
  40{
  41        int op = (encoded_op >> 28) & 7;
  42        int cmp = (encoded_op >> 24) & 15;
  43        int oparg = (encoded_op << 8) >> 20;
  44        int cmparg = (encoded_op << 20) >> 20;
  45        int oldval = 0, ret, tem;
  46
  47        if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  48                oparg = 1 << oparg;
  49
  50        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  51                return -EFAULT;
  52
  53#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
  54        /* Real i386 machines can only support FUTEX_OP_SET */
  55        if (op != FUTEX_OP_SET && boot_cpu_data.x86 == 3)
  56                return -ENOSYS;
  57#endif
  58
  59        pagefault_disable();
  60
  61        switch (op) {
  62        case FUTEX_OP_SET:
  63                __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
  64                break;
  65        case FUTEX_OP_ADD:
  66                __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
  67                                   uaddr, oparg);
  68                break;
  69        case FUTEX_OP_OR:
  70                __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
  71                break;
  72        case FUTEX_OP_ANDN:
  73                __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
  74                break;
  75        case FUTEX_OP_XOR:
  76                __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
  77                break;
  78        default:
  79                ret = -ENOSYS;
  80        }
  81
  82        pagefault_enable();
  83
  84        if (!ret) {
  85                switch (cmp) {
  86                case FUTEX_OP_CMP_EQ:
  87                        ret = (oldval == cmparg);
  88                        break;
  89                case FUTEX_OP_CMP_NE:
  90                        ret = (oldval != cmparg);
  91                        break;
  92                case FUTEX_OP_CMP_LT:
  93                        ret = (oldval < cmparg);
  94                        break;
  95                case FUTEX_OP_CMP_GE:
  96                        ret = (oldval >= cmparg);
  97                        break;
  98                case FUTEX_OP_CMP_LE:
  99                        ret = (oldval <= cmparg);
 100                        break;
 101                case FUTEX_OP_CMP_GT:
 102                        ret = (oldval > cmparg);
 103                        break;
 104                default:
 105                        ret = -ENOSYS;
 106                }
 107        }
 108        return ret;
 109}
 110
 111static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
 112                                                u32 oldval, u32 newval)
 113{
 114        int ret = 0;
 115
 116#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
 117        /* Real i386 machines have no cmpxchg instruction */
 118        if (boot_cpu_data.x86 == 3)
 119                return -ENOSYS;
 120#endif
 121
 122        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 123                return -EFAULT;
 124
 125        asm volatile("1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
 126                     "2:\t.section .fixup, \"ax\"\n"
 127                     "3:\tmov     %3, %0\n"
 128                     "\tjmp     2b\n"
 129                     "\t.previous\n"
 130                     _ASM_EXTABLE(1b, 3b)
 131                     : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
 132                     : "i" (-EFAULT), "r" (newval), "1" (oldval)
 133                     : "memory"
 134        );
 135
 136        *uval = oldval;
 137        return ret;
 138}
 139
 140#endif
 141#endif /* _ASM_X86_FUTEX_H */
 142