linux/arch/mips/include/asm/cmpxchg.h
<<
>>
Prefs
   1/*
   2 * This file is subject to the terms and conditions of the GNU General Public
   3 * License.  See the file "COPYING" in the main directory of this archive
   4 * for more details.
   5 *
   6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
   7 */
   8#ifndef __ASM_CMPXCHG_H
   9#define __ASM_CMPXCHG_H
  10
  11#include <linux/bug.h>
  12#include <linux/irqflags.h>
  13#include <asm/compiler.h>
  14#include <asm/war.h>
  15
  16static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
  17{
  18        __u32 retval;
  19
  20        smp_mb__before_llsc();
  21
  22        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  23                unsigned long dummy;
  24
  25                __asm__ __volatile__(
  26                "       .set    arch=r4000                              \n"
  27                "1:     ll      %0, %3                  # xchg_u32      \n"
  28                "       .set    mips0                                   \n"
  29                "       move    %2, %z4                                 \n"
  30                "       .set    arch=r4000                              \n"
  31                "       sc      %2, %1                                  \n"
  32                "       beqzl   %2, 1b                                  \n"
  33                "       .set    mips0                                   \n"
  34                : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
  35                : GCC_OFF12_ASM() (*m), "Jr" (val)
  36                : "memory");
  37        } else if (kernel_uses_llsc) {
  38                unsigned long dummy;
  39
  40                do {
  41                        __asm__ __volatile__(
  42                        "       .set    arch=r4000                      \n"
  43                        "       ll      %0, %3          # xchg_u32      \n"
  44                        "       .set    mips0                           \n"
  45                        "       move    %2, %z4                         \n"
  46                        "       .set    arch=r4000                      \n"
  47                        "       sc      %2, %1                          \n"
  48                        "       .set    mips0                           \n"
  49                        : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
  50                          "=&r" (dummy)
  51                        : GCC_OFF12_ASM() (*m), "Jr" (val)
  52                        : "memory");
  53                } while (unlikely(!dummy));
  54        } else {
  55                unsigned long flags;
  56
  57                raw_local_irq_save(flags);
  58                retval = *m;
  59                *m = val;
  60                raw_local_irq_restore(flags);   /* implies memory barrier  */
  61        }
  62
  63        smp_llsc_mb();
  64
  65        return retval;
  66}
  67
  68#ifdef CONFIG_64BIT
  69static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
  70{
  71        __u64 retval;
  72
  73        smp_mb__before_llsc();
  74
  75        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  76                unsigned long dummy;
  77
  78                __asm__ __volatile__(
  79                "       .set    arch=r4000                              \n"
  80                "1:     lld     %0, %3                  # xchg_u64      \n"
  81                "       move    %2, %z4                                 \n"
  82                "       scd     %2, %1                                  \n"
  83                "       beqzl   %2, 1b                                  \n"
  84                "       .set    mips0                                   \n"
  85                : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
  86                : GCC_OFF12_ASM() (*m), "Jr" (val)
  87                : "memory");
  88        } else if (kernel_uses_llsc) {
  89                unsigned long dummy;
  90
  91                do {
  92                        __asm__ __volatile__(
  93                        "       .set    arch=r4000                      \n"
  94                        "       lld     %0, %3          # xchg_u64      \n"
  95                        "       move    %2, %z4                         \n"
  96                        "       scd     %2, %1                          \n"
  97                        "       .set    mips0                           \n"
  98                        : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
  99                          "=&r" (dummy)
 100                        : GCC_OFF12_ASM() (*m), "Jr" (val)
 101                        : "memory");
 102                } while (unlikely(!dummy));
 103        } else {
 104                unsigned long flags;
 105
 106                raw_local_irq_save(flags);
 107                retval = *m;
 108                *m = val;
 109                raw_local_irq_restore(flags);   /* implies memory barrier  */
 110        }
 111
 112        smp_llsc_mb();
 113
 114        return retval;
 115}
 116#else
 117extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
 118#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
 119#endif
 120
 121static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
 122{
 123        switch (size) {
 124        case 4:
 125                return __xchg_u32(ptr, x);
 126        case 8:
 127                return __xchg_u64(ptr, x);
 128        }
 129
 130        return x;
 131}
 132
 133#define xchg(ptr, x)                                                    \
 134({                                                                      \
 135        BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc);                            \
 136                                                                        \
 137        ((__typeof__(*(ptr)))                                           \
 138                __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));     \
 139})
 140
 141#define __HAVE_ARCH_CMPXCHG 1
 142
 143#define __cmpxchg_asm(ld, st, m, old, new)                              \
 144({                                                                      \
 145        __typeof(*(m)) __ret;                                           \
 146                                                                        \
 147        if (kernel_uses_llsc && R10000_LLSC_WAR) {                      \
 148                __asm__ __volatile__(                                   \
 149                "       .set    push                            \n"     \
 150                "       .set    noat                            \n"     \
 151                "       .set    arch=r4000                      \n"     \
 152                "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
 153                "       bne     %0, %z3, 2f                     \n"     \
 154                "       .set    mips0                           \n"     \
 155                "       move    $1, %z4                         \n"     \
 156                "       .set    arch=r4000                      \n"     \
 157                "       " st "  $1, %1                          \n"     \
 158                "       beqzl   $1, 1b                          \n"     \
 159                "2:                                             \n"     \
 160                "       .set    pop                             \n"     \
 161                : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m)               \
 162                : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new)          \
 163                : "memory");                                            \
 164        } else if (kernel_uses_llsc) {                                  \
 165                __asm__ __volatile__(                                   \
 166                "       .set    push                            \n"     \
 167                "       .set    noat                            \n"     \
 168                "       .set    arch=r4000                      \n"     \
 169                "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
 170                "       bne     %0, %z3, 2f                     \n"     \
 171                "       .set    mips0                           \n"     \
 172                "       move    $1, %z4                         \n"     \
 173                "       .set    arch=r4000                      \n"     \
 174                "       " st "  $1, %1                          \n"     \
 175                "       beqz    $1, 1b                          \n"     \
 176                "       .set    pop                             \n"     \
 177                "2:                                             \n"     \
 178                : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m)               \
 179                : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new)          \
 180                : "memory");                                            \
 181        } else {                                                        \
 182                unsigned long __flags;                                  \
 183                                                                        \
 184                raw_local_irq_save(__flags);                            \
 185                __ret = *m;                                             \
 186                if (__ret == old)                                       \
 187                        *m = new;                                       \
 188                raw_local_irq_restore(__flags);                         \
 189        }                                                               \
 190                                                                        \
 191        __ret;                                                          \
 192})
 193
 194/*
 195 * This function doesn't exist, so you'll get a linker error
 196 * if something tries to do an invalid cmpxchg().
 197 */
 198extern void __cmpxchg_called_with_bad_pointer(void);
 199
 200#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier)             \
 201({                                                                      \
 202        __typeof__(ptr) __ptr = (ptr);                                  \
 203        __typeof__(*(ptr)) __old = (old);                               \
 204        __typeof__(*(ptr)) __new = (new);                               \
 205        __typeof__(*(ptr)) __res = 0;                                   \
 206                                                                        \
 207        pre_barrier;                                                    \
 208                                                                        \
 209        switch (sizeof(*(__ptr))) {                                     \
 210        case 4:                                                         \
 211                __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
 212                break;                                                  \
 213        case 8:                                                         \
 214                if (sizeof(long) == 8) {                                \
 215                        __res = __cmpxchg_asm("lld", "scd", __ptr,      \
 216                                           __old, __new);               \
 217                        break;                                          \
 218                }                                                       \
 219        default:                                                        \
 220                __cmpxchg_called_with_bad_pointer();                    \
 221                break;                                                  \
 222        }                                                               \
 223                                                                        \
 224        post_barrier;                                                   \
 225                                                                        \
 226        __res;                                                          \
 227})
 228
 229#define cmpxchg(ptr, old, new)          __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
 230#define cmpxchg_local(ptr, old, new)    __cmpxchg(ptr, old, new, , )
 231
 232#define cmpxchg64(ptr, o, n)                                            \
 233  ({                                                                    \
 234        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 235        cmpxchg((ptr), (o), (n));                                       \
 236  })
 237
 238#ifdef CONFIG_64BIT
 239#define cmpxchg64_local(ptr, o, n)                                      \
 240  ({                                                                    \
 241        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 242        cmpxchg_local((ptr), (o), (n));                                 \
 243  })
 244#else
 245#include <asm-generic/cmpxchg-local.h>
 246#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 247#endif
 248
 249#endif /* __ASM_CMPXCHG_H */
 250