linux/arch/mips/include/asm/cmpxchg.h
<<
>>
Prefs
   1/*
   2 * This file is subject to the terms and conditions of the GNU General Public
   3 * License.  See the file "COPYING" in the main directory of this archive
   4 * for more details.
   5 *
   6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
   7 */
   8#ifndef __ASM_CMPXCHG_H
   9#define __ASM_CMPXCHG_H
  10
  11#include <linux/bug.h>
  12#include <linux/irqflags.h>
  13#include <asm/compiler.h>
  14#include <asm/war.h>
  15
  16static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
  17{
  18        __u32 retval;
  19
  20        smp_mb__before_llsc();
  21
  22        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  23                unsigned long dummy;
  24
  25                __asm__ __volatile__(
  26                "       .set    arch=r4000                              \n"
  27                "1:     ll      %0, %3                  # xchg_u32      \n"
  28                "       .set    mips0                                   \n"
  29                "       move    %2, %z4                                 \n"
  30                "       .set    arch=r4000                              \n"
  31                "       sc      %2, %1                                  \n"
  32                "       beqzl   %2, 1b                                  \n"
  33                "       .set    mips0                                   \n"
  34                : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
  35                : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
  36                : "memory");
  37        } else if (kernel_uses_llsc) {
  38                unsigned long dummy;
  39
  40                do {
  41                        __asm__ __volatile__(
  42                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
  43                        "       ll      %0, %3          # xchg_u32      \n"
  44                        "       .set    mips0                           \n"
  45                        "       move    %2, %z4                         \n"
  46                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
  47                        "       sc      %2, %1                          \n"
  48                        "       .set    mips0                           \n"
  49                        : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
  50                          "=&r" (dummy)
  51                        : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
  52                        : "memory");
  53                } while (unlikely(!dummy));
  54        } else {
  55                unsigned long flags;
  56
  57                raw_local_irq_save(flags);
  58                retval = *m;
  59                *m = val;
  60                raw_local_irq_restore(flags);   /* implies memory barrier  */
  61        }
  62
  63        smp_llsc_mb();
  64
  65        return retval;
  66}
  67
  68#ifdef CONFIG_64BIT
  69static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
  70{
  71        __u64 retval;
  72
  73        smp_mb__before_llsc();
  74
  75        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  76                unsigned long dummy;
  77
  78                __asm__ __volatile__(
  79                "       .set    arch=r4000                              \n"
  80                "1:     lld     %0, %3                  # xchg_u64      \n"
  81                "       move    %2, %z4                                 \n"
  82                "       scd     %2, %1                                  \n"
  83                "       beqzl   %2, 1b                                  \n"
  84                "       .set    mips0                                   \n"
  85                : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
  86                : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
  87                : "memory");
  88        } else if (kernel_uses_llsc) {
  89                unsigned long dummy;
  90
  91                do {
  92                        __asm__ __volatile__(
  93                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
  94                        "       lld     %0, %3          # xchg_u64      \n"
  95                        "       move    %2, %z4                         \n"
  96                        "       scd     %2, %1                          \n"
  97                        "       .set    mips0                           \n"
  98                        : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
  99                          "=&r" (dummy)
 100                        : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
 101                        : "memory");
 102                } while (unlikely(!dummy));
 103        } else {
 104                unsigned long flags;
 105
 106                raw_local_irq_save(flags);
 107                retval = *m;
 108                *m = val;
 109                raw_local_irq_restore(flags);   /* implies memory barrier  */
 110        }
 111
 112        smp_llsc_mb();
 113
 114        return retval;
 115}
 116#else
 117extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
 118#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
 119#endif
 120
 121static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
 122{
 123        switch (size) {
 124        case 4:
 125                return __xchg_u32(ptr, x);
 126        case 8:
 127                return __xchg_u64(ptr, x);
 128        }
 129
 130        return x;
 131}
 132
 133#define xchg(ptr, x)                                                    \
 134({                                                                      \
 135        BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc);                            \
 136                                                                        \
 137        ((__typeof__(*(ptr)))                                           \
 138                __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));     \
 139})
 140
 141#define __cmpxchg_asm(ld, st, m, old, new)                              \
 142({                                                                      \
 143        __typeof(*(m)) __ret;                                           \
 144                                                                        \
 145        if (kernel_uses_llsc && R10000_LLSC_WAR) {                      \
 146                __asm__ __volatile__(                                   \
 147                "       .set    push                            \n"     \
 148                "       .set    noat                            \n"     \
 149                "       .set    arch=r4000                      \n"     \
 150                "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
 151                "       bne     %0, %z3, 2f                     \n"     \
 152                "       .set    mips0                           \n"     \
 153                "       move    $1, %z4                         \n"     \
 154                "       .set    arch=r4000                      \n"     \
 155                "       " st "  $1, %1                          \n"     \
 156                "       beqzl   $1, 1b                          \n"     \
 157                "2:                                             \n"     \
 158                "       .set    pop                             \n"     \
 159                : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
 160                : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)              \
 161                : "memory");                                            \
 162        } else if (kernel_uses_llsc) {                                  \
 163                __asm__ __volatile__(                                   \
 164                "       .set    push                            \n"     \
 165                "       .set    noat                            \n"     \
 166                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
 167                "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
 168                "       bne     %0, %z3, 2f                     \n"     \
 169                "       .set    mips0                           \n"     \
 170                "       move    $1, %z4                         \n"     \
 171                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
 172                "       " st "  $1, %1                          \n"     \
 173                "       beqz    $1, 1b                          \n"     \
 174                "       .set    pop                             \n"     \
 175                "2:                                             \n"     \
 176                : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
 177                : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)              \
 178                : "memory");                                            \
 179        } else {                                                        \
 180                unsigned long __flags;                                  \
 181                                                                        \
 182                raw_local_irq_save(__flags);                            \
 183                __ret = *m;                                             \
 184                if (__ret == old)                                       \
 185                        *m = new;                                       \
 186                raw_local_irq_restore(__flags);                         \
 187        }                                                               \
 188                                                                        \
 189        __ret;                                                          \
 190})
 191
 192/*
 193 * This function doesn't exist, so you'll get a linker error
 194 * if something tries to do an invalid cmpxchg().
 195 */
 196extern void __cmpxchg_called_with_bad_pointer(void);
 197
 198#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier)             \
 199({                                                                      \
 200        __typeof__(ptr) __ptr = (ptr);                                  \
 201        __typeof__(*(ptr)) __old = (old);                               \
 202        __typeof__(*(ptr)) __new = (new);                               \
 203        __typeof__(*(ptr)) __res = 0;                                   \
 204                                                                        \
 205        pre_barrier;                                                    \
 206                                                                        \
 207        switch (sizeof(*(__ptr))) {                                     \
 208        case 4:                                                         \
 209                __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
 210                break;                                                  \
 211        case 8:                                                         \
 212                if (sizeof(long) == 8) {                                \
 213                        __res = __cmpxchg_asm("lld", "scd", __ptr,      \
 214                                           __old, __new);               \
 215                        break;                                          \
 216                }                                                       \
 217        default:                                                        \
 218                __cmpxchg_called_with_bad_pointer();                    \
 219                break;                                                  \
 220        }                                                               \
 221                                                                        \
 222        post_barrier;                                                   \
 223                                                                        \
 224        __res;                                                          \
 225})
 226
 227#define cmpxchg(ptr, old, new)          __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
 228#define cmpxchg_local(ptr, old, new)    __cmpxchg(ptr, old, new, , )
 229
 230#ifdef CONFIG_64BIT
 231#define cmpxchg64_local(ptr, o, n)                                      \
 232  ({                                                                    \
 233        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 234        cmpxchg_local((ptr), (o), (n));                                 \
 235  })
 236
 237#define cmpxchg64(ptr, o, n)                                            \
 238  ({                                                                    \
 239        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 240        cmpxchg((ptr), (o), (n));                                       \
 241  })
 242#else
 243#include <asm-generic/cmpxchg-local.h>
 244#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 245#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
 246#endif
 247
 248#endif /* __ASM_CMPXCHG_H */
 249