linux/arch/s390/include/asm/cmpxchg.h
<<
>>
Prefs
   1/*
   2 * Copyright IBM Corp. 1999, 2011
   3 *
   4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
   5 */
   6
   7#ifndef __ASM_CMPXCHG_H
   8#define __ASM_CMPXCHG_H
   9
  10#include <linux/mmdebug.h>
  11#include <linux/types.h>
  12#include <linux/bug.h>
  13
  14extern void __xchg_called_with_bad_pointer(void);
  15
  16static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
  17{
  18        unsigned long addr, old;
  19        int shift;
  20
  21        switch (size) {
  22        case 1:
  23                addr = (unsigned long) ptr;
  24                shift = (3 ^ (addr & 3)) << 3;
  25                addr ^= addr & 3;
  26                asm volatile(
  27                        "       l       %0,%4\n"
  28                        "0:     lr      0,%0\n"
  29                        "       nr      0,%3\n"
  30                        "       or      0,%2\n"
  31                        "       cs      %0,0,%4\n"
  32                        "       jl      0b\n"
  33                        : "=&d" (old), "=Q" (*(int *) addr)
  34                        : "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
  35                          "Q" (*(int *) addr) : "memory", "cc", "0");
  36                return old >> shift;
  37        case 2:
  38                addr = (unsigned long) ptr;
  39                shift = (2 ^ (addr & 2)) << 3;
  40                addr ^= addr & 2;
  41                asm volatile(
  42                        "       l       %0,%4\n"
  43                        "0:     lr      0,%0\n"
  44                        "       nr      0,%3\n"
  45                        "       or      0,%2\n"
  46                        "       cs      %0,0,%4\n"
  47                        "       jl      0b\n"
  48                        : "=&d" (old), "=Q" (*(int *) addr)
  49                        : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
  50                          "Q" (*(int *) addr) : "memory", "cc", "0");
  51                return old >> shift;
  52        case 4:
  53                asm volatile(
  54                        "       l       %0,%3\n"
  55                        "0:     cs      %0,%2,%3\n"
  56                        "       jl      0b\n"
  57                        : "=&d" (old), "=Q" (*(int *) ptr)
  58                        : "d" (x), "Q" (*(int *) ptr)
  59                        : "memory", "cc");
  60                return old;
  61#ifdef CONFIG_64BIT
  62        case 8:
  63                asm volatile(
  64                        "       lg      %0,%3\n"
  65                        "0:     csg     %0,%2,%3\n"
  66                        "       jl      0b\n"
  67                        : "=&d" (old), "=m" (*(long *) ptr)
  68                        : "d" (x), "Q" (*(long *) ptr)
  69                        : "memory", "cc");
  70                return old;
  71#endif /* CONFIG_64BIT */
  72        }
  73        __xchg_called_with_bad_pointer();
  74        return x;
  75}
  76
  77#define xchg(ptr, x)                                                      \
  78({                                                                        \
  79        __typeof__(*(ptr)) __ret;                                         \
  80        __ret = (__typeof__(*(ptr)))                                      \
  81                __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
  82        __ret;                                                            \
  83})
  84
  85/*
  86 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
  87 * store NEW in MEM.  Return the initial value in MEM.  Success is
  88 * indicated by comparing RETURN with OLD.
  89 */
  90
  91#define __HAVE_ARCH_CMPXCHG
  92
  93extern void __cmpxchg_called_with_bad_pointer(void);
  94
  95static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
  96                                      unsigned long new, int size)
  97{
  98        unsigned long addr, prev, tmp;
  99        int shift;
 100
 101        switch (size) {
 102        case 1:
 103                addr = (unsigned long) ptr;
 104                shift = (3 ^ (addr & 3)) << 3;
 105                addr ^= addr & 3;
 106                asm volatile(
 107                        "       l       %0,%2\n"
 108                        "0:     nr      %0,%5\n"
 109                        "       lr      %1,%0\n"
 110                        "       or      %0,%3\n"
 111                        "       or      %1,%4\n"
 112                        "       cs      %0,%1,%2\n"
 113                        "       jnl     1f\n"
 114                        "       xr      %1,%0\n"
 115                        "       nr      %1,%5\n"
 116                        "       jnz     0b\n"
 117                        "1:"
 118                        : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
 119                        : "d" ((old & 0xff) << shift),
 120                          "d" ((new & 0xff) << shift),
 121                          "d" (~(0xff << shift))
 122                        : "memory", "cc");
 123                return prev >> shift;
 124        case 2:
 125                addr = (unsigned long) ptr;
 126                shift = (2 ^ (addr & 2)) << 3;
 127                addr ^= addr & 2;
 128                asm volatile(
 129                        "       l       %0,%2\n"
 130                        "0:     nr      %0,%5\n"
 131                        "       lr      %1,%0\n"
 132                        "       or      %0,%3\n"
 133                        "       or      %1,%4\n"
 134                        "       cs      %0,%1,%2\n"
 135                        "       jnl     1f\n"
 136                        "       xr      %1,%0\n"
 137                        "       nr      %1,%5\n"
 138                        "       jnz     0b\n"
 139                        "1:"
 140                        : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
 141                        : "d" ((old & 0xffff) << shift),
 142                          "d" ((new & 0xffff) << shift),
 143                          "d" (~(0xffff << shift))
 144                        : "memory", "cc");
 145                return prev >> shift;
 146        case 4:
 147                asm volatile(
 148                        "       cs      %0,%3,%1\n"
 149                        : "=&d" (prev), "=Q" (*(int *) ptr)
 150                        : "0" (old), "d" (new), "Q" (*(int *) ptr)
 151                        : "memory", "cc");
 152                return prev;
 153#ifdef CONFIG_64BIT
 154        case 8:
 155                asm volatile(
 156                        "       csg     %0,%3,%1\n"
 157                        : "=&d" (prev), "=Q" (*(long *) ptr)
 158                        : "0" (old), "d" (new), "Q" (*(long *) ptr)
 159                        : "memory", "cc");
 160                return prev;
 161#endif /* CONFIG_64BIT */
 162        }
 163        __cmpxchg_called_with_bad_pointer();
 164        return old;
 165}
 166
 167#define cmpxchg(ptr, o, n)                                               \
 168({                                                                       \
 169        __typeof__(*(ptr)) __ret;                                        \
 170        __ret = (__typeof__(*(ptr)))                                     \
 171                __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
 172                          sizeof(*(ptr)));                               \
 173        __ret;                                                           \
 174})
 175
 176#ifdef CONFIG_64BIT
 177#define cmpxchg64(ptr, o, n)                                            \
 178({                                                                      \
 179        cmpxchg((ptr), (o), (n));                                       \
 180})
 181#else /* CONFIG_64BIT */
 182static inline unsigned long long __cmpxchg64(void *ptr,
 183                                             unsigned long long old,
 184                                             unsigned long long new)
 185{
 186        register_pair rp_old = {.pair = old};
 187        register_pair rp_new = {.pair = new};
 188
 189        asm volatile(
 190                "       cds     %0,%2,%1"
 191                : "+&d" (rp_old), "=Q" (ptr)
 192                : "d" (rp_new), "Q" (ptr)
 193                : "memory", "cc");
 194        return rp_old.pair;
 195}
 196
 197#define cmpxchg64(ptr, o, n)                            \
 198({                                                      \
 199        __typeof__(*(ptr)) __ret;                       \
 200        __ret = (__typeof__(*(ptr)))                    \
 201                __cmpxchg64((ptr),                      \
 202                            (unsigned long long)(o),    \
 203                            (unsigned long long)(n));   \
 204        __ret;                                          \
 205})
 206#endif /* CONFIG_64BIT */
 207
 208#define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn)               \
 209({                                                                      \
 210        register __typeof__(*(p1)) __old1 asm("2") = (o1);              \
 211        register __typeof__(*(p2)) __old2 asm("3") = (o2);              \
 212        register __typeof__(*(p1)) __new1 asm("4") = (n1);              \
 213        register __typeof__(*(p2)) __new2 asm("5") = (n2);              \
 214        int cc;                                                         \
 215        asm volatile(                                                   \
 216                        insn   " %[old],%[new],%[ptr]\n"                \
 217                "       ipm     %[cc]\n"                                \
 218                "       srl     %[cc],28"                               \
 219                : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2)    \
 220                : [new] "d" (__new1), "d" (__new2),                     \
 221                  [ptr] "Q" (*(p1)), "Q" (*(p2))                        \
 222                : "memory", "cc");                                      \
 223        !cc;                                                            \
 224})
 225
 226#define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
 227        __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
 228
 229#define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
 230        __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
 231
 232extern void __cmpxchg_double_called_with_bad_pointer(void);
 233
 234#define __cmpxchg_double(p1, p2, o1, o2, n1, n2)                        \
 235({                                                                      \
 236        int __ret;                                                      \
 237        switch (sizeof(*(p1))) {                                        \
 238        case 4:                                                         \
 239                __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2);     \
 240                break;                                                  \
 241        case 8:                                                         \
 242                __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2);     \
 243                break;                                                  \
 244        default:                                                        \
 245                __cmpxchg_double_called_with_bad_pointer();             \
 246        }                                                               \
 247        __ret;                                                          \
 248})
 249
 250#define cmpxchg_double(p1, p2, o1, o2, n1, n2)                          \
 251({                                                                      \
 252        __typeof__(p1) __p1 = (p1);                                     \
 253        __typeof__(p2) __p2 = (p2);                                     \
 254        int __ret;                                                      \
 255        BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));                    \
 256        BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));                    \
 257        VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
 258        if (sizeof(long) == 4)                                          \
 259                __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \
 260        else                                                            \
 261                __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \
 262        __ret;                                                          \
 263})
 264
 265#define system_has_cmpxchg_double()     1
 266
 267#include <asm-generic/cmpxchg-local.h>
 268
 269static inline unsigned long __cmpxchg_local(void *ptr,
 270                                            unsigned long old,
 271                                            unsigned long new, int size)
 272{
 273        switch (size) {
 274        case 1:
 275        case 2:
 276        case 4:
 277#ifdef CONFIG_64BIT
 278        case 8:
 279#endif
 280                return __cmpxchg(ptr, old, new, size);
 281        default:
 282                return __cmpxchg_local_generic(ptr, old, new, size);
 283        }
 284
 285        return old;
 286}
 287
 288/*
 289 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
 290 * them available.
 291 */
 292#define cmpxchg_local(ptr, o, n)                                        \
 293({                                                                      \
 294        __typeof__(*(ptr)) __ret;                                       \
 295        __ret = (__typeof__(*(ptr)))                                    \
 296                __cmpxchg_local((ptr), (unsigned long)(o),              \
 297                                (unsigned long)(n), sizeof(*(ptr)));    \
 298        __ret;                                                          \
 299})
 300
 301#define cmpxchg64_local(ptr, o, n)      cmpxchg64((ptr), (o), (n))
 302
 303#endif /* __ASM_CMPXCHG_H */
 304