linux/arch/arc/include/asm/futex.h
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
   3 *
   4 * This program is free software; you can redistribute it and/or modify
   5 * it under the terms of the GNU General Public License version 2 as
   6 * published by the Free Software Foundation.
   7 *
   8 * Vineetg: August 2010: From Android kernel work
   9 */
  10
  11#ifndef _ASM_FUTEX_H
  12#define _ASM_FUTEX_H
  13
  14#include <linux/futex.h>
  15#include <linux/preempt.h>
  16#include <linux/uaccess.h>
  17#include <asm/errno.h>
  18
  19#ifdef CONFIG_ARC_HAS_LLSC
  20
  21#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
  22                                                        \
  23        smp_mb();                                       \
  24        __asm__ __volatile__(                           \
  25        "1:     llock   %1, [%2]                \n"     \
  26                insn                            "\n"    \
  27        "2:     scond   %0, [%2]                \n"     \
  28        "       bnz     1b                      \n"     \
  29        "       mov %0, 0                       \n"     \
  30        "3:                                     \n"     \
  31        "       .section .fixup,\"ax\"          \n"     \
  32        "       .align  4                       \n"     \
  33        "4:     mov %0, %4                      \n"     \
  34        "       j   3b                          \n"     \
  35        "       .previous                       \n"     \
  36        "       .section __ex_table,\"a\"       \n"     \
  37        "       .align  4                       \n"     \
  38        "       .word   1b, 4b                  \n"     \
  39        "       .word   2b, 4b                  \n"     \
  40        "       .previous                       \n"     \
  41                                                        \
  42        : "=&r" (ret), "=&r" (oldval)                   \
  43        : "r" (uaddr), "r" (oparg), "ir" (-EFAULT)      \
  44        : "cc", "memory");                              \
  45        smp_mb()                                        \
  46
  47#else   /* !CONFIG_ARC_HAS_LLSC */
  48
  49#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
  50                                                        \
  51        smp_mb();                                       \
  52        __asm__ __volatile__(                           \
  53        "1:     ld      %1, [%2]                \n"     \
  54                insn                            "\n"    \
  55        "2:     st      %0, [%2]                \n"     \
  56        "       mov %0, 0                       \n"     \
  57        "3:                                     \n"     \
  58        "       .section .fixup,\"ax\"          \n"     \
  59        "       .align  4                       \n"     \
  60        "4:     mov %0, %4                      \n"     \
  61        "       j   3b                          \n"     \
  62        "       .previous                       \n"     \
  63        "       .section __ex_table,\"a\"       \n"     \
  64        "       .align  4                       \n"     \
  65        "       .word   1b, 4b                  \n"     \
  66        "       .word   2b, 4b                  \n"     \
  67        "       .previous                       \n"     \
  68                                                        \
  69        : "=&r" (ret), "=&r" (oldval)                   \
  70        : "r" (uaddr), "r" (oparg), "ir" (-EFAULT)      \
  71        : "cc", "memory");                              \
  72        smp_mb()                                        \
  73
  74#endif
  75
  76static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
  77{
  78        int op = (encoded_op >> 28) & 7;
  79        int cmp = (encoded_op >> 24) & 15;
  80        int oparg = (encoded_op << 8) >> 20;
  81        int cmparg = (encoded_op << 20) >> 20;
  82        int oldval = 0, ret;
  83
  84        if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  85                oparg = 1 << oparg;
  86
  87        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
  88                return -EFAULT;
  89
  90#ifndef CONFIG_ARC_HAS_LLSC
  91        preempt_disable();      /* to guarantee atomic r-m-w of futex op */
  92#endif
  93        pagefault_disable();
  94
  95        switch (op) {
  96        case FUTEX_OP_SET:
  97                __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg);
  98                break;
  99        case FUTEX_OP_ADD:
 100                /* oldval = *uaddr; *uaddr += oparg ; ret = *uaddr */
 101                __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg);
 102                break;
 103        case FUTEX_OP_OR:
 104                __futex_atomic_op("or  %0, %1, %3", ret, oldval, uaddr, oparg);
 105                break;
 106        case FUTEX_OP_ANDN:
 107                __futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg);
 108                break;
 109        case FUTEX_OP_XOR:
 110                __futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg);
 111                break;
 112        default:
 113                ret = -ENOSYS;
 114        }
 115
 116        pagefault_enable();
 117#ifndef CONFIG_ARC_HAS_LLSC
 118        preempt_enable();
 119#endif
 120
 121        if (!ret) {
 122                switch (cmp) {
 123                case FUTEX_OP_CMP_EQ:
 124                        ret = (oldval == cmparg);
 125                        break;
 126                case FUTEX_OP_CMP_NE:
 127                        ret = (oldval != cmparg);
 128                        break;
 129                case FUTEX_OP_CMP_LT:
 130                        ret = (oldval < cmparg);
 131                        break;
 132                case FUTEX_OP_CMP_GE:
 133                        ret = (oldval >= cmparg);
 134                        break;
 135                case FUTEX_OP_CMP_LE:
 136                        ret = (oldval <= cmparg);
 137                        break;
 138                case FUTEX_OP_CMP_GT:
 139                        ret = (oldval > cmparg);
 140                        break;
 141                default:
 142                        ret = -ENOSYS;
 143                }
 144        }
 145        return ret;
 146}
 147
 148/*
 149 * cmpxchg of futex (pagefaults disabled by caller)
 150 * Return 0 for success, -EFAULT otherwise
 151 */
 152static inline int
 153futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 expval,
 154                              u32 newval)
 155{
 156        int ret = 0;
 157        u32 existval;
 158
 159        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
 160                return -EFAULT;
 161
 162#ifndef CONFIG_ARC_HAS_LLSC
 163        preempt_disable();      /* to guarantee atomic r-m-w of futex op */
 164#endif
 165        smp_mb();
 166
 167        __asm__ __volatile__(
 168#ifdef CONFIG_ARC_HAS_LLSC
 169        "1:     llock   %1, [%4]                \n"
 170        "       brne    %1, %2, 3f              \n"
 171        "2:     scond   %3, [%4]                \n"
 172        "       bnz     1b                      \n"
 173#else
 174        "1:     ld      %1, [%4]                \n"
 175        "       brne    %1, %2, 3f              \n"
 176        "2:     st      %3, [%4]                \n"
 177#endif
 178        "3:     \n"
 179        "       .section .fixup,\"ax\"  \n"
 180        "4:     mov %0, %5      \n"
 181        "       j   3b  \n"
 182        "       .previous       \n"
 183        "       .section __ex_table,\"a\"       \n"
 184        "       .align  4       \n"
 185        "       .word   1b, 4b  \n"
 186        "       .word   2b, 4b  \n"
 187        "       .previous\n"
 188        : "+&r"(ret), "=&r"(existval)
 189        : "r"(expval), "r"(newval), "r"(uaddr), "ir"(-EFAULT)
 190        : "cc", "memory");
 191
 192        smp_mb();
 193
 194#ifndef CONFIG_ARC_HAS_LLSC
 195        preempt_enable();
 196#endif
 197        *uval = existval;
 198        return ret;
 199}
 200
 201#endif
 202