linux/arch/m68k/include/asm/cmpxchg.h
<<
>>
Prefs
   1#ifndef __ARCH_M68K_CMPXCHG__
   2#define __ARCH_M68K_CMPXCHG__
   3
   4#include <linux/irqflags.h>
   5
   6struct __xchg_dummy { unsigned long a[100]; };
   7#define __xg(x) ((volatile struct __xchg_dummy *)(x))
   8
   9extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
  10
  11#ifndef CONFIG_RMW_INSNS
  12static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
  13{
  14        unsigned long flags, tmp;
  15
  16        local_irq_save(flags);
  17
  18        switch (size) {
  19        case 1:
  20                tmp = *(u8 *)ptr;
  21                *(u8 *)ptr = x;
  22                x = tmp;
  23                break;
  24        case 2:
  25                tmp = *(u16 *)ptr;
  26                *(u16 *)ptr = x;
  27                x = tmp;
  28                break;
  29        case 4:
  30                tmp = *(u32 *)ptr;
  31                *(u32 *)ptr = x;
  32                x = tmp;
  33                break;
  34        default:
  35                tmp = __invalid_xchg_size(x, ptr, size);
  36                break;
  37        }
  38
  39        local_irq_restore(flags);
  40        return x;
  41}
  42#else
  43static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
  44{
  45        switch (size) {
  46        case 1:
  47                __asm__ __volatile__
  48                        ("moveb %2,%0\n\t"
  49                         "1:\n\t"
  50                         "casb %0,%1,%2\n\t"
  51                         "jne 1b"
  52                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
  53                break;
  54        case 2:
  55                __asm__ __volatile__
  56                        ("movew %2,%0\n\t"
  57                         "1:\n\t"
  58                         "casw %0,%1,%2\n\t"
  59                         "jne 1b"
  60                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
  61                break;
  62        case 4:
  63                __asm__ __volatile__
  64                        ("movel %2,%0\n\t"
  65                         "1:\n\t"
  66                         "casl %0,%1,%2\n\t"
  67                         "jne 1b"
  68                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
  69                break;
  70        default:
  71                x = __invalid_xchg_size(x, ptr, size);
  72                break;
  73        }
  74        return x;
  75}
  76#endif
  77
  78#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
  79
  80#include <asm-generic/cmpxchg-local.h>
  81
  82#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  83
  84extern unsigned long __invalid_cmpxchg_size(volatile void *,
  85                                            unsigned long, unsigned long, int);
  86
  87/*
  88 * Atomic compare and exchange.  Compare OLD with MEM, if identical,
  89 * store NEW in MEM.  Return the initial value in MEM.  Success is
  90 * indicated by comparing RETURN with OLD.
  91 */
  92#ifdef CONFIG_RMW_INSNS
  93#define __HAVE_ARCH_CMPXCHG     1
  94
  95static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
  96                                      unsigned long new, int size)
  97{
  98        switch (size) {
  99        case 1:
 100                __asm__ __volatile__ ("casb %0,%2,%1"
 101                                      : "=d" (old), "=m" (*(char *)p)
 102                                      : "d" (new), "0" (old), "m" (*(char *)p));
 103                break;
 104        case 2:
 105                __asm__ __volatile__ ("casw %0,%2,%1"
 106                                      : "=d" (old), "=m" (*(short *)p)
 107                                      : "d" (new), "0" (old), "m" (*(short *)p));
 108                break;
 109        case 4:
 110                __asm__ __volatile__ ("casl %0,%2,%1"
 111                                      : "=d" (old), "=m" (*(int *)p)
 112                                      : "d" (new), "0" (old), "m" (*(int *)p));
 113                break;
 114        default:
 115                old = __invalid_cmpxchg_size(p, old, new, size);
 116                break;
 117        }
 118        return old;
 119}
 120
 121#define cmpxchg(ptr, o, n)                                                  \
 122        ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),           \
 123                        (unsigned long)(n), sizeof(*(ptr))))
 124#define cmpxchg_local(ptr, o, n)                                            \
 125        ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),           \
 126                        (unsigned long)(n), sizeof(*(ptr))))
 127
 128#define cmpxchg64(ptr, o, n)    cmpxchg64_local((ptr), (o), (n))
 129
 130#else
 131
 132/*
 133 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
 134 * them available.
 135 */
 136#define cmpxchg_local(ptr, o, n)                                               \
 137        ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
 138                        (unsigned long)(n), sizeof(*(ptr))))
 139
 140#include <asm-generic/cmpxchg.h>
 141
 142#endif
 143
 144#endif /* __ARCH_M68K_CMPXCHG__ */
 145