linux/arch/h8300/include/asm/cmpxchg.h
<<
>>
Prefs
   1#ifndef __ARCH_H8300_CMPXCHG__
   2#define __ARCH_H8300_CMPXCHG__
   3
   4#include <linux/irqflags.h>
   5
   6#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
   7
   8struct __xchg_dummy { unsigned long a[100]; };
   9#define __xg(x) ((volatile struct __xchg_dummy *)(x))
  10
  11static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
  12{
  13  unsigned long tmp, flags;
  14
  15  local_irq_save(flags);
  16
  17  switch (size) {
  18  case 1:
  19    __asm__ __volatile__
  20    ("mov.b %2,%0\n\t"
  21     "mov.b %1,%2"
  22    : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory");
  23    break;
  24  case 2:
  25    __asm__ __volatile__
  26    ("mov.w %2,%0\n\t"
  27     "mov.w %1,%2"
  28    : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory");
  29    break;
  30  case 4:
  31    __asm__ __volatile__
  32    ("mov.l %2,%0\n\t"
  33     "mov.l %1,%2"
  34    : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory");
  35    break;
  36  default:
  37    tmp = 0;      
  38  }
  39  local_irq_restore(flags);
  40  return tmp;
  41}
  42
  43#include <asm-generic/cmpxchg-local.h>
  44
  45/*
  46 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  47 * them available.
  48 */
  49#define cmpxchg_local(ptr, o, n)                                               \
  50        ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
  51                        (unsigned long)(n), sizeof(*(ptr))))
  52#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  53
  54#ifndef CONFIG_SMP
  55#include <asm-generic/cmpxchg.h>
  56#endif
  57
  58#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  59
  60#endif /* __ARCH_H8300_CMPXCHG__ */
  61