linux/arch/sparc/include/asm/cmpxchg_64.h
<<
>>
Prefs
   1/* 64-bit atomic xchg() and cmpxchg() definitions.
   2 *
   3 * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com)
   4 */
   5
   6#ifndef __ARCH_SPARC64_CMPXCHG__
   7#define __ARCH_SPARC64_CMPXCHG__
   8
   9static inline unsigned long xchg32(__volatile__ unsigned int *m, unsigned int val)
  10{
  11        unsigned long tmp1, tmp2;
  12
  13        __asm__ __volatile__(
  14"       mov             %0, %1\n"
  15"1:     lduw            [%4], %2\n"
  16"       cas             [%4], %2, %0\n"
  17"       cmp             %2, %0\n"
  18"       bne,a,pn        %%icc, 1b\n"
  19"        mov            %1, %0\n"
  20        : "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
  21        : "0" (val), "r" (m)
  22        : "cc", "memory");
  23        return val;
  24}
  25
  26static inline unsigned long xchg64(__volatile__ unsigned long *m, unsigned long val)
  27{
  28        unsigned long tmp1, tmp2;
  29
  30        __asm__ __volatile__(
  31"       mov             %0, %1\n"
  32"1:     ldx             [%4], %2\n"
  33"       casx            [%4], %2, %0\n"
  34"       cmp             %2, %0\n"
  35"       bne,a,pn        %%xcc, 1b\n"
  36"        mov            %1, %0\n"
  37        : "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
  38        : "0" (val), "r" (m)
  39        : "cc", "memory");
  40        return val;
  41}
  42
  43#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
  44
  45extern void __xchg_called_with_bad_pointer(void);
  46
  47static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr,
  48                                       int size)
  49{
  50        switch (size) {
  51        case 4:
  52                return xchg32(ptr, x);
  53        case 8:
  54                return xchg64(ptr, x);
  55        }
  56        __xchg_called_with_bad_pointer();
  57        return x;
  58}
  59
  60/*
  61 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
  62 * store NEW in MEM.  Return the initial value in MEM.  Success is
  63 * indicated by comparing RETURN with OLD.
  64 */
  65
  66#include <asm-generic/cmpxchg-local.h>
  67
  68#define __HAVE_ARCH_CMPXCHG 1
  69
  70static inline unsigned long
  71__cmpxchg_u32(volatile int *m, int old, int new)
  72{
  73        __asm__ __volatile__("cas [%2], %3, %0"
  74                             : "=&r" (new)
  75                             : "0" (new), "r" (m), "r" (old)
  76                             : "memory");
  77
  78        return new;
  79}
  80
  81static inline unsigned long
  82__cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
  83{
  84        __asm__ __volatile__("casx [%2], %3, %0"
  85                             : "=&r" (new)
  86                             : "0" (new), "r" (m), "r" (old)
  87                             : "memory");
  88
  89        return new;
  90}
  91
  92/* This function doesn't exist, so you'll get a linker error
  93   if something tries to do an invalid cmpxchg().  */
  94extern void __cmpxchg_called_with_bad_pointer(void);
  95
  96static inline unsigned long
  97__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
  98{
  99        switch (size) {
 100                case 4:
 101                        return __cmpxchg_u32(ptr, old, new);
 102                case 8:
 103                        return __cmpxchg_u64(ptr, old, new);
 104        }
 105        __cmpxchg_called_with_bad_pointer();
 106        return old;
 107}
 108
 109#define cmpxchg(ptr,o,n)                                                 \
 110  ({                                                                     \
 111     __typeof__(*(ptr)) _o_ = (o);                                       \
 112     __typeof__(*(ptr)) _n_ = (n);                                       \
 113     (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_,           \
 114                                    (unsigned long)_n_, sizeof(*(ptr))); \
 115  })
 116
 117/*
 118 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
 119 * them available.
 120 */
 121
 122static inline unsigned long __cmpxchg_local(volatile void *ptr,
 123                                      unsigned long old,
 124                                      unsigned long new, int size)
 125{
 126        switch (size) {
 127        case 4:
 128        case 8: return __cmpxchg(ptr, old, new, size);
 129        default:
 130                return __cmpxchg_local_generic(ptr, old, new, size);
 131        }
 132
 133        return old;
 134}
 135
 136#define cmpxchg_local(ptr, o, n)                                        \
 137        ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
 138                        (unsigned long)(n), sizeof(*(ptr))))
 139#define cmpxchg64_local(ptr, o, n)                                      \
 140  ({                                                                    \
 141        BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 142        cmpxchg_local((ptr), (o), (n));                                 \
 143  })
 144#define cmpxchg64(ptr, o, n)    cmpxchg64_local((ptr), (o), (n))
 145
 146#endif /* __ARCH_SPARC64_CMPXCHG__ */
 147