linux/arch/x86/include/asm/futex.h
<<
>>
Prefs
   1#ifndef _ASM_X86_FUTEX_H
   2#define _ASM_X86_FUTEX_H
   3
   4#ifdef __KERNEL__
   5
   6#include <linux/futex.h>
   7#include <linux/uaccess.h>
   8
   9#include <asm/asm.h>
  10#include <asm/errno.h>
  11#include <asm/processor.h>
  12#include <asm/system.h>
  13
  14#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg)     \
  15        asm volatile("1:\t" insn "\n"                           \
  16                     "2:\t.section .fixup,\"ax\"\n"             \
  17                     "3:\tmov\t%3, %1\n"                        \
  18                     "\tjmp\t2b\n"                              \
  19                     "\t.previous\n"                            \
  20                     _ASM_EXTABLE(1b, 3b)                       \
  21                     : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
  22                     : "i" (-EFAULT), "0" (oparg), "1" (0))
  23
  24#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)     \
  25        asm volatile("1:\tmovl  %2, %0\n"                       \
  26                     "\tmovl\t%0, %3\n"                         \
  27                     "\t" insn "\n"                             \
  28                     "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"     \
  29                     "\tjnz\t1b\n"                              \
  30                     "3:\t.section .fixup,\"ax\"\n"             \
  31                     "4:\tmov\t%5, %1\n"                        \
  32                     "\tjmp\t3b\n"                              \
  33                     "\t.previous\n"                            \
  34                     _ASM_EXTABLE(1b, 4b)                       \
  35                     _ASM_EXTABLE(2b, 4b)                       \
  36                     : "=&a" (oldval), "=&r" (ret),             \
  37                       "+m" (*uaddr), "=&r" (tem)               \
  38                     : "r" (oparg), "i" (-EFAULT), "1" (0))
  39
  40static inline int futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
  41{
  42        int op = (encoded_op >> 28) & 7;
  43        int cmp = (encoded_op >> 24) & 15;
  44        int oparg = (encoded_op << 8) >> 20;
  45        int cmparg = (encoded_op << 20) >> 20;
  46        int oldval = 0, ret, tem;
  47
  48        if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  49                oparg = 1 << oparg;
  50
  51        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
  52                return -EFAULT;
  53
  54#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
  55        /* Real i386 machines can only support FUTEX_OP_SET */
  56        if (op != FUTEX_OP_SET && boot_cpu_data.x86 == 3)
  57                return -ENOSYS;
  58#endif
  59
  60        pagefault_disable();
  61
  62        switch (op) {
  63        case FUTEX_OP_SET:
  64                __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
  65                break;
  66        case FUTEX_OP_ADD:
  67                __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
  68                                   uaddr, oparg);
  69                break;
  70        case FUTEX_OP_OR:
  71                __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
  72                break;
  73        case FUTEX_OP_ANDN:
  74                __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
  75                break;
  76        case FUTEX_OP_XOR:
  77                __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
  78                break;
  79        default:
  80                ret = -ENOSYS;
  81        }
  82
  83        pagefault_enable();
  84
  85        if (!ret) {
  86                switch (cmp) {
  87                case FUTEX_OP_CMP_EQ:
  88                        ret = (oldval == cmparg);
  89                        break;
  90                case FUTEX_OP_CMP_NE:
  91                        ret = (oldval != cmparg);
  92                        break;
  93                case FUTEX_OP_CMP_LT:
  94                        ret = (oldval < cmparg);
  95                        break;
  96                case FUTEX_OP_CMP_GE:
  97                        ret = (oldval >= cmparg);
  98                        break;
  99                case FUTEX_OP_CMP_LE:
 100                        ret = (oldval <= cmparg);
 101                        break;
 102                case FUTEX_OP_CMP_GT:
 103                        ret = (oldval > cmparg);
 104                        break;
 105                default:
 106                        ret = -ENOSYS;
 107                }
 108        }
 109        return ret;
 110}
 111
 112static inline int futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval,
 113                                                int newval)
 114{
 115
 116#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
 117        /* Real i386 machines have no cmpxchg instruction */
 118        if (boot_cpu_data.x86 == 3)
 119                return -ENOSYS;
 120#endif
 121
 122        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
 123                return -EFAULT;
 124
 125        asm volatile("1:\t" LOCK_PREFIX "cmpxchgl %3, %1\n"
 126                     "2:\t.section .fixup, \"ax\"\n"
 127                     "3:\tmov     %2, %0\n"
 128                     "\tjmp     2b\n"
 129                     "\t.previous\n"
 130                     _ASM_EXTABLE(1b, 3b)
 131                     : "=a" (oldval), "+m" (*uaddr)
 132                     : "i" (-EFAULT), "r" (newval), "0" (oldval)
 133                     : "memory"
 134        );
 135
 136        return oldval;
 137}
 138
 139#endif
 140#endif /* _ASM_X86_FUTEX_H */
 141