linux/arch/mips/include/asm/cmpxchg.h
<<
>>
Prefs
   1/*
   2 * This file is subject to the terms and conditions of the GNU General Public
   3 * License.  See the file "COPYING" in the main directory of this archive
   4 * for more details.
   5 *
   6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
   7 */
   8#ifndef __ASM_CMPXCHG_H
   9#define __ASM_CMPXCHG_H
  10
  11#include <linux/bug.h>
  12#include <linux/irqflags.h>
  13#include <asm/compiler.h>
  14#include <asm/llsc.h>
  15#include <asm/sync.h>
  16#include <asm/war.h>
  17
  18/*
  19 * These functions doesn't exist, so if they are called you'll either:
  20 *
  21 * - Get an error at compile-time due to __compiletime_error, if supported by
  22 *   your compiler.
  23 *
  24 * or:
  25 *
  26 * - Get an error at link-time due to the call to the missing function.
  27 */
  28extern unsigned long __cmpxchg_called_with_bad_pointer(void)
  29        __compiletime_error("Bad argument size for cmpxchg");
  30extern unsigned long __cmpxchg64_unsupported(void)
  31        __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
  32extern unsigned long __xchg_called_with_bad_pointer(void)
  33        __compiletime_error("Bad argument size for xchg");
  34
  35#define __xchg_asm(ld, st, m, val)                                      \
  36({                                                                      \
  37        __typeof(*(m)) __ret;                                           \
  38                                                                        \
  39        if (kernel_uses_llsc) {                                         \
  40                __asm__ __volatile__(                                   \
  41                "       .set    push                            \n"     \
  42                "       .set    noat                            \n"     \
  43                "       .set    push                            \n"     \
  44                "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
  45                "       " __SYNC(full, loongson3_war) "         \n"     \
  46                "1:     " ld "  %0, %2          # __xchg_asm    \n"     \
  47                "       .set    pop                             \n"     \
  48                "       move    $1, %z3                         \n"     \
  49                "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
  50                "       " st "  $1, %1                          \n"     \
  51                "\t" __SC_BEQZ  "$1, 1b                         \n"     \
  52                "       .set    pop                             \n"     \
  53                : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
  54                : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)                  \
  55                : __LLSC_CLOBBER);                                      \
  56        } else {                                                        \
  57                unsigned long __flags;                                  \
  58                                                                        \
  59                raw_local_irq_save(__flags);                            \
  60                __ret = *m;                                             \
  61                *m = val;                                               \
  62                raw_local_irq_restore(__flags);                         \
  63        }                                                               \
  64                                                                        \
  65        __ret;                                                          \
  66})
  67
  68extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
  69                                  unsigned int size);
  70
  71static __always_inline
  72unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
  73{
  74        switch (size) {
  75        case 1:
  76        case 2:
  77                return __xchg_small(ptr, x, size);
  78
  79        case 4:
  80                return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
  81
  82        case 8:
  83                if (!IS_ENABLED(CONFIG_64BIT))
  84                        return __xchg_called_with_bad_pointer();
  85
  86                return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
  87
  88        default:
  89                return __xchg_called_with_bad_pointer();
  90        }
  91}
  92
  93#define arch_xchg(ptr, x)                                               \
  94({                                                                      \
  95        __typeof__(*(ptr)) __res;                                       \
  96                                                                        \
  97        /*                                                              \
  98         * In the Loongson3 workaround case __xchg_asm() already        \
  99         * contains a completion barrier prior to the LL, so we don't   \
 100         * need to emit an extra one here.                              \
 101         */                                                             \
 102        if (__SYNC_loongson3_war == 0)                                  \
 103                smp_mb__before_llsc();                                  \
 104                                                                        \
 105        __res = (__typeof__(*(ptr)))                                    \
 106                __xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));      \
 107                                                                        \
 108        smp_llsc_mb();                                                  \
 109                                                                        \
 110        __res;                                                          \
 111})
 112
 113#define __cmpxchg_asm(ld, st, m, old, new)                              \
 114({                                                                      \
 115        __typeof(*(m)) __ret;                                           \
 116                                                                        \
 117        if (kernel_uses_llsc) {                                         \
 118                __asm__ __volatile__(                                   \
 119                "       .set    push                            \n"     \
 120                "       .set    noat                            \n"     \
 121                "       .set    push                            \n"     \
 122                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
 123                "       " __SYNC(full, loongson3_war) "         \n"     \
 124                "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
 125                "       bne     %0, %z3, 2f                     \n"     \
 126                "       .set    pop                             \n"     \
 127                "       move    $1, %z4                         \n"     \
 128                "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
 129                "       " st "  $1, %1                          \n"     \
 130                "\t" __SC_BEQZ  "$1, 1b                         \n"     \
 131                "       .set    pop                             \n"     \
 132                "2:     " __SYNC(full, loongson3_war) "         \n"     \
 133                : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
 134                : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)      \
 135                : __LLSC_CLOBBER);                                      \
 136        } else {                                                        \
 137                unsigned long __flags;                                  \
 138                                                                        \
 139                raw_local_irq_save(__flags);                            \
 140                __ret = *m;                                             \
 141                if (__ret == old)                                       \
 142                        *m = new;                                       \
 143                raw_local_irq_restore(__flags);                         \
 144        }                                                               \
 145                                                                        \
 146        __ret;                                                          \
 147})
 148
 149extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
 150                                     unsigned long new, unsigned int size);
 151
 152static __always_inline
 153unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
 154                        unsigned long new, unsigned int size)
 155{
 156        switch (size) {
 157        case 1:
 158        case 2:
 159                return __cmpxchg_small(ptr, old, new, size);
 160
 161        case 4:
 162                return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
 163                                     (u32)old, new);
 164
 165        case 8:
 166                /* lld/scd are only available for MIPS64 */
 167                if (!IS_ENABLED(CONFIG_64BIT))
 168                        return __cmpxchg_called_with_bad_pointer();
 169
 170                return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
 171                                     (u64)old, new);
 172
 173        default:
 174                return __cmpxchg_called_with_bad_pointer();
 175        }
 176}
 177
 178#define arch_cmpxchg_local(ptr, old, new)                               \
 179        ((__typeof__(*(ptr)))                                           \
 180                __cmpxchg((ptr),                                        \
 181                          (unsigned long)(__typeof__(*(ptr)))(old),     \
 182                          (unsigned long)(__typeof__(*(ptr)))(new),     \
 183                          sizeof(*(ptr))))
 184
 185#define arch_cmpxchg(ptr, old, new)                                     \
 186({                                                                      \
 187        __typeof__(*(ptr)) __res;                                       \
 188                                                                        \
 189        /*                                                              \
 190         * In the Loongson3 workaround case __cmpxchg_asm() already     \
 191         * contains a completion barrier prior to the LL, so we don't   \
 192         * need to emit an extra one here.                              \
 193         */                                                             \
 194        if (__SYNC_loongson3_war == 0)                                  \
 195                smp_mb__before_llsc();                                  \
 196                                                                        \
 197        __res = arch_cmpxchg_local((ptr), (old), (new));                \
 198                                                                        \
 199        /*                                                              \
 200         * In the Loongson3 workaround case __cmpxchg_asm() already     \
 201         * contains a completion barrier after the SC, so we don't      \
 202         * need to emit an extra one here.                              \
 203         */                                                             \
 204        if (__SYNC_loongson3_war == 0)                                  \
 205                smp_llsc_mb();                                          \
 206                                                                        \
 207        __res;                                                          \
 208})
 209
 210#ifdef CONFIG_64BIT
 211#define arch_cmpxchg64_local(ptr, o, n)                                 \
 212  ({                                                                    \
 213        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 214        arch_cmpxchg_local((ptr), (o), (n));                            \
 215  })
 216
 217#define arch_cmpxchg64(ptr, o, n)                                       \
 218  ({                                                                    \
 219        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 220        arch_cmpxchg((ptr), (o), (n));                                  \
 221  })
 222#else
 223
 224# include <asm-generic/cmpxchg-local.h>
 225# define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 226
 227# ifdef CONFIG_SMP
 228
 229static inline unsigned long __cmpxchg64(volatile void *ptr,
 230                                        unsigned long long old,
 231                                        unsigned long long new)
 232{
 233        unsigned long long tmp, ret;
 234        unsigned long flags;
 235
 236        /*
 237         * The assembly below has to combine 32 bit values into a 64 bit
 238         * register, and split 64 bit values from one register into two. If we
 239         * were to take an interrupt in the middle of this we'd only save the
 240         * least significant 32 bits of each register & probably clobber the
 241         * most significant 32 bits of the 64 bit values we're using. In order
 242         * to avoid this we must disable interrupts.
 243         */
 244        local_irq_save(flags);
 245
 246        asm volatile(
 247        "       .set    push                            \n"
 248        "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"
 249        /* Load 64 bits from ptr */
 250        "       " __SYNC(full, loongson3_war) "         \n"
 251        "1:     lld     %L0, %3         # __cmpxchg64   \n"
 252        /*
 253         * Split the 64 bit value we loaded into the 2 registers that hold the
 254         * ret variable.
 255         */
 256        "       dsra    %M0, %L0, 32                    \n"
 257        "       sll     %L0, %L0, 0                     \n"
 258        /*
 259         * Compare ret against old, breaking out of the loop if they don't
 260         * match.
 261         */
 262        "       bne     %M0, %M4, 2f                    \n"
 263        "       bne     %L0, %L4, 2f                    \n"
 264        /*
 265         * Combine the 32 bit halves from the 2 registers that hold the new
 266         * variable into a single 64 bit register.
 267         */
 268#  if MIPS_ISA_REV >= 2
 269        "       move    %L1, %L5                        \n"
 270        "       dins    %L1, %M5, 32, 32                \n"
 271#  else
 272        "       dsll    %L1, %L5, 32                    \n"
 273        "       dsrl    %L1, %L1, 32                    \n"
 274        "       .set    noat                            \n"
 275        "       dsll    $at, %M5, 32                    \n"
 276        "       or      %L1, %L1, $at                   \n"
 277        "       .set    at                              \n"
 278#  endif
 279        /* Attempt to store new at ptr */
 280        "       scd     %L1, %2                         \n"
 281        /* If we failed, loop! */
 282        "\t" __SC_BEQZ "%L1, 1b                         \n"
 283        "       .set    pop                             \n"
 284        "2:     " __SYNC(full, loongson3_war) "         \n"
 285        : "=&r"(ret),
 286          "=&r"(tmp),
 287          "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
 288        : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
 289          "r" (old),
 290          "r" (new)
 291        : "memory");
 292
 293        local_irq_restore(flags);
 294        return ret;
 295}
 296
 297#  define arch_cmpxchg64(ptr, o, n) ({                                  \
 298        unsigned long long __old = (__typeof__(*(ptr)))(o);             \
 299        unsigned long long __new = (__typeof__(*(ptr)))(n);             \
 300        __typeof__(*(ptr)) __res;                                       \
 301                                                                        \
 302        /*                                                              \
 303         * We can only use cmpxchg64 if we know that the CPU supports   \
 304         * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported  \
 305         * will cause a build error unless cpu_has_64bits is a          \
 306         * compile-time constant 1.                                     \
 307         */                                                             \
 308        if (cpu_has_64bits && kernel_uses_llsc) {                       \
 309                smp_mb__before_llsc();                                  \
 310                __res = __cmpxchg64((ptr), __old, __new);               \
 311                smp_llsc_mb();                                          \
 312        } else {                                                        \
 313                __res = __cmpxchg64_unsupported();                      \
 314        }                                                               \
 315                                                                        \
 316        __res;                                                          \
 317})
 318
 319# else /* !CONFIG_SMP */
 320#  define arch_cmpxchg64(ptr, o, n) arch_cmpxchg64_local((ptr), (o), (n))
 321# endif /* !CONFIG_SMP */
 322#endif /* !CONFIG_64BIT */
 323
 324#endif /* __ASM_CMPXCHG_H */
 325